Skip to content

Commit

Permalink
Merge pull request #77 from melexis/snapshots
Browse files Browse the repository at this point in the history
Use snapshot ID if defined
  • Loading branch information
JasperCraeghs authored Sep 9, 2024
2 parents 939ff87 + e15dd57 commit b36afde
Show file tree
Hide file tree
Showing 8 changed files with 152 additions and 42 deletions.
1 change: 1 addition & 0 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ env:
COVERITY_PASSWORD: dummy
COVERITY_STREAM: dummy
COVERITY_USERNAME: dummy
COVERITY_SNAPSHOT: dummy

jobs:
test:
Expand Down
2 changes: 1 addition & 1 deletion example/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ BUILDDIR ?= _build

# logging variables
DEBUG ?= 0
LOGLEVEL =? WARNING
LOGLEVEL ?= WARNING

# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
Expand Down
7 changes: 3 additions & 4 deletions example/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,10 @@
import sys

import mlx.coverity
from mlx.coverity import __version__
from mlx.coverity import __version__, coverity_logging
import mlx.traceability
from decouple import config
import logging
from sphinx.util.logging import getLogger

pkg_version = __version__

Expand Down Expand Up @@ -315,6 +314,7 @@
"username": config("COVERITY_USERNAME"),
"password": config("COVERITY_PASSWORD"),
"stream": config("COVERITY_STREAM"),
"snapshot": config("COVERITY_SNAPSHOT"),
}

TRACEABILITY_ITEM_ID_REGEX = r"([A-Z_]+-[A-Z0-9_]+)"
Expand All @@ -324,8 +324,7 @@
if log_level:
try:
numeric_level = getattr(logging, log_level.upper(), None)
logger = getLogger("mlx.coverity_logging")
logger.setLevel(level=numeric_level)
coverity_logging.LOGGER.setLevel(level=numeric_level)
except:
raise ValueError(f"Invalid log level: {log_level}")

27 changes: 19 additions & 8 deletions mlx/coverity/coverity.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def initialize_environment(self, app):
\\makeatother"""

self.stream = app.config.coverity_credentials["stream"]

self.snapshot = app.config.coverity_credentials.get("snapshot", "")
# Login to Coverity and obtain stream information
try:
self.input_credentials(app.config.coverity_credentials)
Expand All @@ -58,15 +58,21 @@ def initialize_environment(self, app):
app.config.coverity_credentials["username"], app.config.coverity_credentials["password"]
)
report_info("done")
report_info("Verify the given stream name... ", True)
report_info("Verify the given stream name... ")
self.coverity_service.validate_stream(self.stream)
report_info("done")
if self.snapshot:
report_info("Verify the given snapshot ID and obtain all enabled checkers... ")
self.snapshot = self.coverity_service.validate_snapshot(self.snapshot)
report_info("done")
else:
self.snapshot = "last()"
# Get all column keys
report_info("obtaining all column keys... ", True)
report_info("obtaining all column keys... ")
self.coverity_service.retrieve_column_keys()
report_info("done")
# Get all checkers
report_info("obtaining all checkers... ", True)
report_info("obtaining all checkers... ")
self.coverity_service.retrieve_checkers()
report_info("done")
except (URLError, HTTPError, Exception, ValueError) as error_info: # pylint: disable=broad-except
Expand Down Expand Up @@ -100,7 +106,13 @@ def process_coverity_nodes(self, app, doctree, fromdocname):
# Get items from server
try:
defects = self.get_filtered_defects(node)
node.perform_replacement(defects, self, app, fromdocname)
if defects["totalRows"] == -1:
error_message = "There are no defects with the specified filters"
report_warning(error_message, fromdocname, lineno=node["line"])
else:
report_info("building defects table and/or chart... ", True)
node.perform_replacement(defects, self, app, fromdocname)
report_info("done")
except (URLError, AttributeError, Exception) as err: # pylint: disable=broad-except
error_message = f"failed to process coverity-list with {err!r}"
report_warning(error_message, fromdocname, lineno=node["line"])
Expand Down Expand Up @@ -138,13 +150,12 @@ def get_filtered_defects(self, node):
"rows": [list of dictionaries {"key": <key>, "value": <value>}]
}
"""
report_info("obtaining defects... ", True)
report_info("obtaining defects... ")
column_names = set(node["col"])
if "chart_attribute" in node and node["chart_attribute"].upper() in node.column_map:
column_names.add(node["chart_attribute"])
defects = self.coverity_service.get_defects(self.stream, node["filters"], column_names)
defects = self.coverity_service.get_defects(self.stream, node["filters"], column_names, self.snapshot)
report_info("%d received" % (defects["totalRows"]))
report_info("building defects table and/or chart... ", True)
return defects


Expand Down
1 change: 0 additions & 1 deletion mlx/coverity/coverity_directives/coverity_defect_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,6 @@ def perform_replacement(self, defects, connector, app, fromdocname):
self._prepare_labels_and_values(combined_labels, defects["totalRows"])
top_node += self.build_pie_chart(env)

report_info("done")
self.replace_self(top_node)

def initialize_table(self):
Expand Down
50 changes: 35 additions & 15 deletions mlx/coverity/coverity_services.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import requests
from sphinx.util.logging import getLogger

from mlx.coverity import report_info
from mlx.coverity import report_info, report_warning

# Coverity built in Impact statuses
IMPACT_LIST = ["High", "Medium", "Low"]
Expand Down Expand Up @@ -125,6 +125,25 @@ def validate_stream(self, stream):
url = f"{self.api_endpoint}/streams/{stream}"
self._request(url)

def validate_snapshot(self, snapshot):
"""Validate snapshot by retrieving the specified snapshot.
When the request fails, the snapshot does not exist or the user does not have acces to it.
In this case a warning is logged and continues with the latest snapshot.
Args:
snapshot (str): The snapshot ID
"""
url = f"{self.api_endpoint}/snapshots/{snapshot}"
response = self.session.get(url)
if response.ok:
report_info(f"Snapshot ID {snapshot} is valid")
valid_snapshot = snapshot
else:
report_warning(f"No snapshot found for ID {snapshot}; Continue with using the latest snapshot.", "")
valid_snapshot = "last()"

return valid_snapshot

def retrieve_issues(self, filters):
"""Retrieve issues from the server (Coverity Connect).
Expand Down Expand Up @@ -200,7 +219,7 @@ def _request(self, url, data=None):
err_msg = response.json()["message"]
except (requests.exceptions.JSONDecodeError, KeyError):
err_msg = response.content.decode()
self.logger.warning(err_msg)
self.logger.error(err_msg)
return response.raise_for_status()

def assemble_query_filter(self, column_name, filter_values, matcher_type):
Expand Down Expand Up @@ -236,8 +255,10 @@ def assemble_query_filter(self, column_name, filter_values, matcher_type):
"matchers": matchers
}

def get_defects(self, stream, filters, column_names):
"""Gets a list of defects for given stream, filters and column names.
def get_defects(self, stream, filters, column_names, snapshot):
"""Gets a list of defects for the given stream, filters and column names.
If no snapshot ID is given, the last snapshot is taken.
If a column name does not match the name of the `columns` property, the column can not be obtained because
it need the correct corresponding column key.
Column key `cid` is always obtained to use later in other functions.
Expand All @@ -246,6 +267,7 @@ def get_defects(self, stream, filters, column_names):
stream (str): Name of the stream to query
filters (dict): Dictionary with attribute names as keys and CSV lists of attribute values to query as values
column_names (list[str]): The column names
snapshot (str): The snapshot ID; If empty the last snapshot is taken.
Returns:
dict: The content of the request. This has a structure like:
Expand All @@ -256,7 +278,7 @@ def get_defects(self, stream, filters, column_names):
"rows": list of [list of dictionaries {"key": <key>, "value": <value>}]
}
"""
report_info(f"Querying Coverity for defects in stream [{stream}] ...",)
report_info(f"Querying Coverity for defects in stream [{stream}] ...")
query_filters = [
{
"columnKey": "streams",
Expand Down Expand Up @@ -296,18 +318,16 @@ def get_defects(self, stream, filters, column_names):
"columns": list(self.column_keys(column_names)),
"snapshotScope": {
"show": {
"scope": "last()",
"includeOutdatedSnapshots": False
},
"compareTo": {
"scope": "last()",
"scope": snapshot,
"includeOutdatedSnapshots": False
}
}
}

report_info("Running Coverity query...")
return self.retrieve_issues(data)
defects_data = self.retrieve_issues(data)
report_info("done")

return defects_data

def handle_attribute_filter(self, attribute_values, name, valid_attributes, allow_regex=False):
"""Process the given CSV list of attribute values by filtering out the invalid ones while logging an error.
Expand All @@ -322,11 +342,11 @@ def handle_attribute_filter(self, attribute_values, name, valid_attributes, allo
Returns:
set[str]: The attributes values to query with
"""
report_info(f"Using {name} filter [{attribute_values}]")
report_info(f"Using {name!r} filter [{attribute_values}]")
filter_values = set()
for field in attribute_values.split(","):
if not valid_attributes or field in valid_attributes:
report_info("Classification [{field}] is valid")
report_info(f"Classification [{field}] is valid")
filter_values.add(field)
elif allow_regex:
pattern = re.compile(field)
Expand All @@ -346,7 +366,7 @@ def handle_component_filter(self, attribute_values):
Returns:
list[str]: The list of attributes
"""
report_info(f"Using Component filter [{attribute_values}]")
report_info(f"Using 'Component' filter [{attribute_values}]")
parser = csv.reader([attribute_values])
filter_values = []
for fields in parser:
Expand Down
58 changes: 50 additions & 8 deletions tests/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,7 @@
],
"columns": ["cid"],
"snapshotScope": {
"show": {"scope": "last()", "includeOutdatedSnapshots": False},
"compareTo": {"scope": "last()", "includeOutdatedSnapshots": False},
"show": {"scope": "last()", "includeOutdatedSnapshots": False}
},
},
)
Expand Down Expand Up @@ -74,8 +73,7 @@
],
"columns": ["cid", "checker", "lastTriageComment", "classification"],
"snapshotScope": {
"show": {"scope": "last()", "includeOutdatedSnapshots": False},
"compareTo": {"scope": "last()", "includeOutdatedSnapshots": False},
"show": {"scope": "last()", "includeOutdatedSnapshots": False}
},
},
)
Expand All @@ -102,8 +100,7 @@
],
"columns": ["status", "cid", "checker", "lastTriageComment"],
"snapshotScope": {
"show": {"scope": "last()", "includeOutdatedSnapshots": False},
"compareTo": {"scope": "last()", "includeOutdatedSnapshots": False},
"show": {"scope": "last()", "includeOutdatedSnapshots": False}
},
},
)
Expand Down Expand Up @@ -135,8 +132,53 @@
],
"columns": ["cid", "classification", "action"],
"snapshotScope": {
"show": {"scope": "last()", "includeOutdatedSnapshots": False},
"compareTo": {"scope": "last()", "includeOutdatedSnapshots": False},
"show": {"scope": "last()", "includeOutdatedSnapshots": False}
},
},
)

test_snapshot = Filter(
{
"checker": "MISRA",
"impact": None,
"kind": None,
"classification": "Intentional,Bug,Pending,Unclassified",
"action": None,
"component": None,
"cwe": None,
"cid": None,
},
["CID", "Classification", "Checker", "Comment"],
{
"filters": [
{
"columnKey": "streams",
"matchMode": "oneOrMoreMatch",
"matchers": [{"class": "Stream", "name": "test_stream", "type": "nameMatcher"}],
},
{
"columnKey": "checker",
"matchMode": "oneOrMoreMatch",
"matchers": [
{"type": "keyMatcher", "key": "MISRA 2 KEY"},
{"type": "keyMatcher", "key": "MISRA 1"},
{"type": "keyMatcher", "key": "MISRA 3"},
],
},
{
"columnKey": "classification",
"matchMode": "oneOrMoreMatch",
"matchers": [
{"type": "keyMatcher", "key": "Bug"},
{"type": "keyMatcher", "key": "Pending"},
{"type": "keyMatcher", "key": "Unclassified"},
{"type": "keyMatcher", "key": "Intentional"},
],
},
],
"columns": ["cid", "checker", "lastTriageComment", "classification"],
"snapshotScope": {
"show": {"scope": "123", "includeOutdatedSnapshots": False}
},
},
)
Loading

0 comments on commit b36afde

Please sign in to comment.