Skip to content

Commit

Permalink
Merge branch 'REST-API' into namespace
Browse files Browse the repository at this point in the history
  • Loading branch information
JokeWaumans committed Aug 22, 2024
2 parents 189c373 + 61ae104 commit 83da8ad
Show file tree
Hide file tree
Showing 6 changed files with 342 additions and 159 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
Expand Down
79 changes: 30 additions & 49 deletions mlx/coverity/coverity_services.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,14 @@

"""Services and other utilities for Coverity scripting"""

# General
from collections import namedtuple
import csv
import logging
import re
from collections import namedtuple
from urllib.parse import urlencode
import requests.structures
from sphinx.util.logging import getLogger

# For Coverity - REST API
import requests
from sphinx.util.logging import getLogger

# Coverity built in Impact statuses
IMPACT_LIST = ["High", "Medium", "Low"]
Expand Down Expand Up @@ -132,7 +129,7 @@ def retrieve_issues(self, filters):
"""Retrieve issues from the server (Coverity Connect).
Args:
filters (json): The filters as json
filters (dict): The filters for the query
Returns:
dict: The response
Expand Down Expand Up @@ -206,36 +203,6 @@ def _request(self, url, data=None):
self.logger.warning(err_msg)
return response.raise_for_status()

@staticmethod
def validate_filter_option(name, req_csv, valid_attributes, allow_regex=False):
"""Add filter when the attribute is valid. If `valid_attributes` is empty or falsy,
all attributes of the CSV list are valid.
The CSV list can allow regular expressions when `allow_regex` is set to True.
Args:
name (str): String representation of the attribute.
req_csv (str): A CSV list of attribute values to query.
valid_attributes (list/dict): The valid attributes.
allow_regex (bool): True to treat filter values as regular expressions, False to require exact matches
Returns:
list[str]: The list of valid attributes
"""
logging.info("Validate required %s [%s]", name, req_csv)
filter_values = []
for field in req_csv.split(","):
if not valid_attributes or field in valid_attributes:
logging.info("Classification [%s] is valid", field)
filter_values.append(field)
elif allow_regex:
pattern = re.compile(field)
for element in valid_attributes:
if pattern.search(element) and element not in filter_values:
filter_values.append(element)
else:
logging.error("Invalid %s filter: %s", name, field)
return filter_values

def assemble_query_filter(self, column_name, filter_values, matcher_type):
"""Assemble a filter for a specific column
Expand All @@ -259,6 +226,10 @@ def assemble_query_filter(self, column_name, filter_values, matcher_type):
else:
matcher["key"] = filter_
matchers.append(matcher)

if column_name not in self.columns:
self.logger.warning(f"Invalid column name {column_name!r}; Retrieve column keys first.")

return {
"columnKey": self.columns[column_name],
"matchMode": "oneOrMoreMatch",
Expand Down Expand Up @@ -300,7 +271,7 @@ def get_defects(self, stream, filters, column_names):
}
]

Filter = namedtuple("Filter", "name matcher_type list allow_regex", defaults=[None, False])
Filter = namedtuple("Filter", "name matcher_type values allow_regex", defaults=[[], False])
filter_options = {
"checker": Filter("Checker", "keyMatcher", self.checkers, True),
"impact": Filter("Impact", "keyMatcher", IMPACT_LIST),
Expand All @@ -312,17 +283,13 @@ def get_defects(self, stream, filters, column_names):
}

for option, filter in filter_options.items():
if filters[option]:
filter_values = self.handle_attribute_filter(
filters[option], filter.name, filter.list, filter.allow_regex
)
if (filter_option := filters[option]) and (filter_values := self.handle_attribute_filter(
filter_option, filter.name, filter.values, filter.allow_regex)):
if filter_values:
query_filters.append(self.assemble_query_filter(filter.name, filter_values, filter.matcher_type))

if filters["component"]:
filter_values = self.handle_component_filter(filters["component"])
if filter_values:
query_filters.append(self.assemble_query_filter("Component", filter_values, "nameMatcher"))
if (filter := filters["component"]) and (filter_values := self.handle_component_filter(filter)):
query_filters.append(self.assemble_query_filter("Component", filter_values, "nameMatcher"))

data = {
"filters": query_filters,
Expand All @@ -342,18 +309,32 @@ def get_defects(self, stream, filters, column_names):
logging.info("Running Coverity query...")
return self.retrieve_issues(data)

def handle_attribute_filter(self, attribute_values, name, *args, **kwargs):
"""Applies any filter on an attribute's values.
def handle_attribute_filter(self, attribute_values, name, valid_attributes, allow_regex=False):
"""Process the given CSV list of attribute values by filtering out the invalid ones while logging an error.
The CSV list can allow regular expressions when `allow_regex` is set to True.
Args:
attribute_values (str): A CSV list of attribute values to query.
name (str): String representation of the attribute.
valid_attributes (list/dict): All valid/possible attribute values.
allow_regex (bool): True to treat filter values as regular expressions, False to require exact matches
Returns:
list[str]: The list of valid attributes
set[str]: The attributes values to query with
"""
logging.info("Using %s filter [%s]", name, attribute_values)
filter_values = self.validate_filter_option(name, attribute_values, *args, **kwargs)
filter_values = set()
for field in attribute_values.split(","):
if not valid_attributes or field in valid_attributes:
logging.info("Classification [%s] is valid", field)
filter_values.add(field)
elif allow_regex:
pattern = re.compile(field)
for element in valid_attributes:
if pattern.search(element):
filter_values.add(element)
else:
logging.error("Invalid %s filter: %s", name, field)
return filter_values

def handle_component_filter(self, attribute_values):
Expand Down
34 changes: 18 additions & 16 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,28 +25,30 @@
long_description_content_type="text/x-rst",
zip_safe=False,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Environment :: Web Environment",
"Framework :: Sphinx :: Extension",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Topic :: Documentation",
"Topic :: Documentation :: Sphinx",
"Topic :: Utilities",
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Framework :: Sphinx :: Extension',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
'Programming Language :: Python :: 3.12',
'Topic :: Documentation',
'Topic :: Documentation :: Sphinx',
'Topic :: Utilities',
],
platforms="any",
packages=find_namespace_packages(where="."),
package_dir={"": "."},
include_package_data=True,
install_requires=requires,
python_requires='>=3.8',
namespace_packages=['mlx'],
keywords=[
"coverity",
"reporting",
Expand Down
142 changes: 142 additions & 0 deletions tests/filters.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
# filters, columns_names, response data in apart python bestand

from collections import namedtuple

Filter = namedtuple("Filter", "filters column_names request_data")

# Test with no filters and no column names
test_defect_filter_0 = Filter(
{
"checker": None,
"impact": None,
"kind": None,
"classification": None,
"action": None,
"component": None,
"cwe": None,
"cid": None,
},
[],
{
"filters": [
{
"columnKey": "streams",
"matchMode": "oneOrMoreMatch",
"matchers": [{"class": "Stream", "name": "test_stream", "type": "nameMatcher"}],
}
],
"columns": ["cid"],
"snapshotScope": {
"show": {"scope": "last()", "includeOutdatedSnapshots": False},
"compareTo": {"scope": "last()", "includeOutdatedSnapshots": False},
},
},
)

test_defect_filter_1 = Filter(
{
"checker": "MISRA",
"impact": None,
"kind": None,
"classification": "Intentional,Bug,Pending,Unclassified",
"action": None,
"component": None,
"cwe": None,
"cid": None,
},
["CID", "Classification", "Checker", "Comment"],
{
"filters": [
{
"columnKey": "streams",
"matchMode": "oneOrMoreMatch",
"matchers": [{"class": "Stream", "name": "test_stream", "type": "nameMatcher"}],
},
{
"columnKey": "checker",
"matchMode": "oneOrMoreMatch",
"matchers": [
{"type": "keyMatcher", "key": "MISRA 2 KEY"},
{"type": "keyMatcher", "key": "MISRA 1"},
{"type": "keyMatcher", "key": "MISRA 3"},
],
},
{
"columnKey": "classification",
"matchMode": "oneOrMoreMatch",
"matchers": [
{"type": "keyMatcher", "key": "Bug"},
{"type": "keyMatcher", "key": "Pending"},
{"type": "keyMatcher", "key": "Unclassified"},
{"type": "keyMatcher", "key": "Intentional"},
],
},
],
"columns": ["cid", "checker", "lastTriageComment", "classification"],
"snapshotScope": {
"show": {"scope": "last()", "includeOutdatedSnapshots": False},
"compareTo": {"scope": "last()", "includeOutdatedSnapshots": False},
},
},
)

test_defect_filter_2 = Filter(
{
"checker": None,
"impact": None,
"kind": None,
"classification": None,
"action": None,
"component": None,
"cwe": None,
"cid": None,
},
["CID", "Checker", "Status", "Comment"],
{
"filters": [
{
"columnKey": "streams",
"matchMode": "oneOrMoreMatch",
"matchers": [{"class": "Stream", "name": "test_stream", "type": "nameMatcher"}],
}
],
"columns": ["status", "cid", "checker", "lastTriageComment"],
"snapshotScope": {
"show": {"scope": "last()", "includeOutdatedSnapshots": False},
"compareTo": {"scope": "last()", "includeOutdatedSnapshots": False},
},
},
)

test_defect_filter_3 = Filter(
{
"checker": None,
"impact": None,
"kind": None,
"classification": "Unclassified",
"action": None,
"component": None,
"cwe": None,
"cid": None,
},
["CID", "Classification", "Action"],
{
"filters": [
{
"columnKey": "streams",
"matchMode": "oneOrMoreMatch",
"matchers": [{"class": "Stream", "name": "test_stream", "type": "nameMatcher"}],
},
{
"columnKey": "classification",
"matchMode": "oneOrMoreMatch",
"matchers": [{"type": "keyMatcher", "key": "Unclassified"}],
},
],
"columns": ["cid", "classification", "action"],
"snapshotScope": {
"show": {"scope": "last()", "includeOutdatedSnapshots": False},
"compareTo": {"scope": "last()", "includeOutdatedSnapshots": False},
},
},
)
Loading

0 comments on commit 83da8ad

Please sign in to comment.