From 89c31061415bdb7cd635dc7395a7f8a02e6c8f1a Mon Sep 17 00:00:00 2001 From: Andrew Calder Date: Tue, 22 Dec 2020 18:07:55 +0000 Subject: [PATCH 01/12] Split HubRestApi mega-class into multiple files --- .gitignore | 1 + .vscode/settings.json | 3 + blackduck/Authentication.py | 7 + blackduck/Components.py | 72 ++ blackduck/Core.py | 183 ++++ blackduck/CustomFields.py | 112 +++ blackduck/Exceptions.py | 49 + blackduck/HubRestApi.py | 1692 ++-------------------------------- blackduck/Jobs.py | 20 + blackduck/Ldap.py | 40 + blackduck/Licences.py | 45 + blackduck/Policy.py | 45 + blackduck/Projects.py | 585 ++++++++++++ blackduck/Reporting.py | 81 ++ blackduck/Roles.py | 63 ++ blackduck/Scans.py | 105 +++ blackduck/Snippet.py | 21 + blackduck/System.py | 18 + blackduck/UserGroup.py | 68 ++ blackduck/Users.py | 66 ++ blackduck/Utils.py | 113 +++ blackduck/Versions.py | 39 + blackduck/Vulnerabilities.py | 38 + blackduck/__init__.py | 1 + 24 files changed, 1841 insertions(+), 1626 deletions(-) create mode 100644 .vscode/settings.json create mode 100644 blackduck/Authentication.py create mode 100644 blackduck/Components.py create mode 100644 blackduck/Core.py create mode 100644 blackduck/CustomFields.py create mode 100644 blackduck/Exceptions.py create mode 100644 blackduck/Jobs.py create mode 100644 blackduck/Ldap.py create mode 100644 blackduck/Licences.py create mode 100644 blackduck/Policy.py create mode 100644 blackduck/Projects.py create mode 100644 blackduck/Reporting.py create mode 100644 blackduck/Roles.py create mode 100644 blackduck/Scans.py create mode 100644 blackduck/Snippet.py create mode 100644 blackduck/System.py create mode 100644 blackduck/UserGroup.py create mode 100644 blackduck/Users.py create mode 100644 blackduck/Utils.py create mode 100644 blackduck/Versions.py create mode 100644 blackduck/Vulnerabilities.py diff --git a/.gitignore b/.gitignore index 894a44cc..1c20e402 100644 --- a/.gitignore +++ b/.gitignore @@ -86,6 +86,7 @@ celerybeat-schedule .venv env/ venv/ +virtualenv/ ENV/ env.bak/ venv.bak/ diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..4d76a7c6 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "python.pythonPath": "c:\\Users\\arcalder\\Projects\\hub-rest-api-python\\virtualenv\\Scripts\\python.exe" +} \ No newline at end of file diff --git a/blackduck/Authentication.py b/blackduck/Authentication.py new file mode 100644 index 00000000..26d38097 --- /dev/null +++ b/blackduck/Authentication.py @@ -0,0 +1,7 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) \ No newline at end of file diff --git a/blackduck/Components.py b/blackduck/Components.py new file mode 100644 index 00000000..45ecb8c0 --- /dev/null +++ b/blackduck/Components.py @@ -0,0 +1,72 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def find_component_info_for_protex_component(self, protex_component_id, protex_component_release_id): + '''Will return the Hub component corresponding to the protex_component_id, and if a release (version) id + is given, the response will also include the component-version. Returns an empty list if there were + no components found. + ''' + url = self.config['baseurl'] + "/api/components" + if protex_component_release_id: + query = "?q=bdsuite:{}%23{}&limit=9999".format(protex_component_id, protex_component_release_id) + else: + query = "?q=bdsuite:{}&limit=9999".format(protex_component_id) + with_query = url + query + logger.debug("Finding the Hub componet for Protex component id {}, release id {} using query/url {}".format( + protex_component_id, protex_component_release_id, with_query)) + response = self.execute_get(with_query) + logger.debug("query results in status code {}, json data: {}".format(response.status_code, response.json())) + # TODO: Error checking and retry? For now, as POC just assuming it worked + component_list_d = response.json() + return response.json() + +def _get_components_url(self): + return self.get_urlbase() + "/api/components" + +def get_components(self, limit=100, parameters={}): + if limit: + parameters.update({'limit':limit}) + # + # I was only able to GET components when using this internal media type which is how the GUI works + # July 19, 2019 Glenn Snyder + # + custom_headers = {'Accept':'application/vnd.blackducksoftware.internal-1+json'} + url = self._get_components_url() + self._get_parameter_string(parameters) + response = self.execute_get(url, custom_headers=custom_headers) + return response.json() + +def search_components(self, search_str_or_query, limit=100, parameters={}): + if limit: + parameters.update({'limit':limit}) + if search_str_or_query.startswith("q="): + # allow caller to override original behavior with their own query + query = search_str_or_query + else: + # maintain original, somewhat flawed behavior + query = "q=name:{}".format(search_str_or_query) + parm_str = self._get_parameter_string(parameters) + url = self.get_apibase() + "/search/components{}&{}".format(parm_str, query) + response = self.execute_get(url) + return response.json() + +def get_component_by_id(self, component_id): + url = self.config['baseurl'] + "/api/components/{}".format(component_id) + return self.get_component_by_url(url) + +def get_component_by_url(self, component_url): + headers = self.get_headers() + response = self.execute_get(component_url) + jsondata = response.json() + return jsondata + +def update_component_by_id(self, component_id, update_json): + url = self.config["baseurl"] + "/api/components/{}".format(component_id) + return self.update_component_by_url(url, update_json) + +def update_component_by_url(self, component_url, update_json): + return self.execute_put(component_url, update_json) diff --git a/blackduck/Core.py b/blackduck/Core.py new file mode 100644 index 00000000..3b3e4938 --- /dev/null +++ b/blackduck/Core.py @@ -0,0 +1,183 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def read_config(self): + try: + with open('.restconfig.json','r') as f: + self.config = json.load(f) + except: + logging.error(f"Unable to load configuration from '.restconfig.json'. Make sure you create one with proper connection and authentication values for your Black Duck server") + raise + +def write_config(self): + with open(self.configfile,'w') as f: + json.dump(self.config, f, indent=3) + +def get_auth_token(self): + api_token = self.config.get('api_token', False) + if api_token: + authendpoint = "/api/tokens/authenticate" + url = self.config['baseurl'] + authendpoint + session = requests.session() + response = session.post( + url, + data={}, + headers={'Authorization': 'token {}'.format(api_token)}, + verify=not self.config['insecure'] + ) + csrf_token = response.headers['X-CSRF-TOKEN'] + try: + bearer_token = json.loads(response.content.decode('utf-8'))['bearerToken'] + except json.decoder.JSONDecodeError as e: + logger.exception("Authentication failure, could not obtain bearer token") + raise Exception("Failed to obtain bearer token, check for valid authentication token") + return (bearer_token, csrf_token, None) + else: + authendpoint="/j_spring_security_check" + url = self.config['baseurl'] + authendpoint + session=requests.session() + credentials = dict() + credentials['j_username'] = self.config['username'] + credentials['j_password'] = self.config['password'] + response = session.post(url, credentials, verify= not self.config['insecure']) + cookie = response.headers['Set-Cookie'] + token = cookie[cookie.index('=')+1:cookie.index(';')] + return (token, None, cookie) + +def _get_hub_rest_api_version_info(self): + '''Get the version info from the server, if available + ''' + session = requests.session() + url = self.config['baseurl'] + "/api/current-version" + response = session.get(url, verify = not self.config['insecure']) + + if response.status_code == 200: + version_info = response.json() + if 'version' in version_info: + return version_info + else: + raise UnknownVersion("Did not find the 'version' key in the response to a successful GET on /api/current-version") + else: + raise UnknownVersion("Failed to retrieve the version info from {}, status code {}".format(url, response.status_code)) + +def _get_major_version(self): + return self.version_info['version'].split(".")[0] + +def get_urlbase(self): + return self.config['baseurl'] + +def get_headers(self): + if self.config.get('api_token', False): + return { + 'X-CSRF-TOKEN': self.csrf_token, + 'Authorization': 'Bearer {}'.format(self.token), + 'Accept': 'application/json', + 'Content-Type': 'application/json'} + else: + if self.bd_major_version == "3": + return {"Cookie": self.cookie} + else: + return {"Authorization":"Bearer " + self.token} + +def get_api_version(self): + url = self.get_urlbase() + '/api/current-version' + response = self.execute_get(url) + version = response.json().get('version', 'unknown') + return version + +def _get_parameter_string(self, parameters={}): + parameter_string = "&".join(["{}={}".format(k,urllib.parse.quote(str(v))) for k,v in sorted(parameters.items(), key=itemgetter(0))]) + return "?" + parameter_string + +def get_tags_url(self, component_or_project): + # Utility method to return the tags URL from either a component or project object + url = None + for link_d in component_or_project['_meta']['links']: + if link_d['rel'] == 'tags': + return link_d['href'] + return url + +def get_link(self, bd_rest_obj, link_name): + # returns the URL for the link_name OR None + if bd_rest_obj and '_meta' in bd_rest_obj and 'links' in bd_rest_obj['_meta']: + for link_obj in bd_rest_obj['_meta']['links']: + if 'rel' in link_obj and link_obj['rel'] == link_name: + return link_obj.get('href', None) + else: + logger.warning("This does not appear to be a BD REST object. It should have ['_meta']['links']") + +def get_limit_paramstring(self, limit): + return "?limit={}".format(limit) + +def get_apibase(self): + return self.config['baseurl'] + "/api" + +def execute_delete(self, url): + headers = self.get_headers() + response = requests.delete(url, headers=headers, verify = not self.config['insecure']) + return response + +def _validated_json_data(self, data_to_validate): + if isinstance(data_to_validate, dict) or isinstance(data_to_validate, list): + json_data = json.dumps(data_to_validate) + else: + json_data = data_to_validate + json.loads(json_data) # will fail with JSONDecodeError if invalid + return json_data + +def execute_get(self, url, custom_headers={}): + headers = self.get_headers() + headers.update(custom_headers) + response = requests.get(url, headers=headers, verify = not self.config['insecure']) + return response + +def execute_put(self, url, data, custom_headers={}): + json_data = self._validated_json_data(data) + headers = self.get_headers() + headers["Content-Type"] = "application/json" + headers.update(custom_headers) + response = requests.put(url, headers=headers, data=json_data, verify = not self.config['insecure']) + return response + +def _create(self, url, json_body): + response = self.execute_post(url, json_body) + # v4+ returns the newly created location in the response headers + # and there is nothing in the response json + # whereas v3 returns the newly created object in the response json + if response.status_code == 201: + if "location" in response.headers: + return response.headers["location"] + else: + try: + response_json = response.json() + except json.decoder.JSONDecodeError: + logger.warning('did not receive any json data back') + else: + if '_meta' in response_json and 'href' in response_json['_meta']: + return response_json['_meta']['href'] + else: + return response_json + elif response.status_code == 412: + raise CreateFailedAlreadyExists("Failed to create the object because it already exists - url {}, body {}, response {}".format(url, json_body, response)) + else: + raise CreateFailedUnknown("Failed to create the object for an unknown reason - url {}, body {}, response {}".format(url, json_body, response)) + +def execute_post(self, url, data, custom_headers={}): + json_data = self._validated_json_data(data) + headers = self.get_headers() + headers["Content-Type"] = "application/json" + headers.update(custom_headers) + response = requests.post(url, headers=headers, data=json_data, verify = not self.config['insecure']) + return response + +def get_matched_components(self, version_obj, limit=9999): + url = "{}/matched-files".format(version_obj['_meta']['href']) + param_string = self._get_parameter_string({'limit': limit}) + url = "{}{}".format(url, param_string) + response = self.execute_get(url) + return response.json() diff --git a/blackduck/CustomFields.py b/blackduck/CustomFields.py new file mode 100644 index 00000000..8f1e243c --- /dev/null +++ b/blackduck/CustomFields.py @@ -0,0 +1,112 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def _get_cf_url(self): + return self.get_apibase() + "/custom-fields/objects" + +def supported_cf_object_types(self): + '''Get the types and cache them since they are static (on a per-release basis)''' + if not hasattr(self, "_cf_object_types"): + logger.debug("retrieving object types") + self._cf_object_types = [cfo['name'] for cfo in self.get_cf_objects().get('items', [])] + return self._cf_object_types + +def get_cf_objects(self): + '''Get CF objects and cache them since these are static (on a per-release basis)''' + url = self._get_cf_url() + if not hasattr(self, "_cf_objects"): + logger.debug("retrieving objects") + response = self.execute_get(url) + self._cf_objects = response.json() + return self._cf_objects + +def _get_cf_object_url(self, object_name): + for cf_object in self.get_cf_objects().get('items', []): + if cf_object['name'].lower() == object_name.lower(): + return cf_object['_meta']['href'] + +def get_cf_object(self, object_name): + assert object_name in self.supported_cf_object_types(), "Object name {} not one of the supported types ({})".format(object_name, self.supported_cf_object_types()) + + object_url = self._get_cf_object_url(object_name) + response = self.execute_get(object_url) + return response.json() + +def _get_cf_obj_rel_path(self, object_name): + return object_name.lower().replace(" ", "-") + +def create_cf(self, object_name, field_type, description, label, position, active=True, initial_options=[]): + ''' + Create a custom field for the given object type (e.g. "Project", "Project Version") using the field_type and other parameters. + + Initial options are needed for field types like multi-select where the multiple values to choose from must also be provided. + + initial_options = [{"label":"val1", "position":0}, {"label":"val2", "position":1}] + ''' + assert isinstance(position, int) and position >= 0, "position must be an integer that is greater than or equal to 0" + assert field_type in ["BOOLEAN", "DATE", "DROPDOWN", "MULTISELECT", "RADIO", "TEXT", "TEXTAREA"] + + types_using_initial_options = ["DROPDOWN", "MULTISELECT", "RADIO"] + + post_url = self._get_cf_object_url(object_name) + "/fields" + cf_object = self._get_cf_obj_rel_path(object_name) + cf_request = { + "active": active, + "description": description, + "label": label, + "position": position, + "type": field_type, + } + if field_type in types_using_initial_options and initial_options: + cf_request.update({"initialOptions": initial_options}) + response = self.execute_post(post_url, data=cf_request) + return response + +def delete_cf(self, object_name, field_id): + '''Delete a custom field from a given object type, e.g. Project, Project Version, Component, etc + + WARNING: Deleting a custom field is irreversiable. Any data in the custom fields could be lost so use with caution. + ''' + assert object_name in self.supported_cf_object_types(), "You must supply a supported object name that is in {}".format(self.supported_cf_object_types()) + + delete_url = self._get_cf_object_url(object_name) + "/fields/{}".format(field_id) + return self.execute_delete(delete_url) + +def get_custom_fields(self, object_name): + '''Get the custom field (definition) for a given object type, e.g. Project, Project Version, Component, etc + ''' + assert object_name in self.supported_cf_object_types(), "You must supply a supported object name that is in {}".format(self.supported_cf_object_types()) + + url = self._get_cf_object_url(object_name) + "/fields" + + response = self.execute_get(url) + return response.json() + +def get_cf_values(self, obj): + '''Get all of the custom fields from an object such as a Project, Project Version, Component, etc + + The obj is expected to be the JSON document for a project, project-version, component, etc + ''' + url = self.get_link(obj, "custom-fields") + response = self.execute_get(url) + return response.json() + +def get_cf_value(self, obj, field_id): + '''Get a custom field value from an object such as a Project, Project Version, Component, etc + + The obj is expected to be the JSON document for a project, project-version, component, etc + ''' + url = self.get_link(obj, "custom-fields") + "/{}".format(field_id) + response = self.execute_get(url) + return response.json() + +def put_cf_value(self, cf_url, new_cf_obj): + '''new_cf_obj is expected to be a modified custom field value object with the values updated accordingly, e.g. + call get_cf_value, modify the object, and then call put_cf_value + ''' + return self.execute_put(cf_url, new_cf_obj) diff --git a/blackduck/Exceptions.py b/blackduck/Exceptions.py new file mode 100644 index 00000000..61f71fcd --- /dev/null +++ b/blackduck/Exceptions.py @@ -0,0 +1,49 @@ +''' + +Created on Dec 22, 2020 +@author: ar-calder + +''' +import logging +from json import JSONDecodeError +from .Utils import pfmt + +logger = logging.getLogger(__name__) + +class CreateFailedAlreadyExists(Exception): + pass + +class CreateFailedUnknown(Exception): + pass + +class InvalidVersionPhase(Exception): + pass + +class UnknownVersion(Exception): + pass + +class UnsupportedBDVersion(Exception): + # Some operations require specific versions of BD + pass + +class EndpointNotFound(Exception): + pass + +class UnacceptableContentType(Exception): + pass + +def exception_handler(self, response, name): + error_codes = { + 404 : EndpointNotFound, + 406 : UnacceptableContentType + } + + try: + content = pfmt(response.json()) + except JSONDecodeError: + content = response.text + + error = error_codes.get(response.status_code) + if error: + raise error(f"{name}: {content}") + raise NotImplementedError(f"No handler for status code: {response.status_code}") \ No newline at end of file diff --git a/blackduck/HubRestApi.py b/blackduck/HubRestApi.py index d319e4be..6264cea5 100755 --- a/blackduck/HubRestApi.py +++ b/blackduck/HubRestApi.py @@ -47,31 +47,9 @@ ''' import logging -import requests -import json -from operator import itemgetter -import urllib.parse logger = logging.getLogger(__name__) -# TODO: Create some kind of Black Duck exception grouping/hierarchy? - -class CreateFailedAlreadyExists(Exception): - pass - -class CreateFailedUnknown(Exception): - pass - -class InvalidVersionPhase(Exception): - pass - -class UnknownVersion(Exception): - pass - -class UnsupportedBDVersion(Exception): - # Some operations require specific versions of BD - pass - def object_id(object): assert '_meta' in object, "REST API object must have _meta key" assert 'href' in object['_meta'], "REST API object must have href key in it's _meta" @@ -87,6 +65,68 @@ class HubInstance(object): # TODO: What to do about the config file for thread-safety, concurrency configfile = ".restconfig.json" + + from .Core import ( + _create,_get_hub_rest_api_version_info,_get_major_version,_get_parameter_string,_validated_json_data, + execute_delete,execute_get,execute_post,execute_put,get_api_version,get_apibase,get_auth_token,get_headers, + get_limit_paramstring,get_link,get_matched_components,get_tags_url,get_urlbase,read_config,write_config + ) + from .Roles import ( + _get_role_url, assign_role_given_role_url, assign_role_to_user_or_group, + delete_role_from_user_or_group, get_role_url_by_name, get_roles, get_roles_for_user_or_group, + get_roles_url_from_user_or_group, user_has_role + ) + from .Users import ( + _get_user_url, create_user, delete_user_by_id, delete_user_by_url, get_current_user, + get_last_login, get_user_by_id, get_user_by_url, get_users, reset_user_password, update_user_by_id, + update_user_by_url + ) + from .UserGroup import ( + _get_user_group_url, create_user_group, create_user_group_by_name, + delete_user_group_by_id, delete_user_group_by_url, get_user_group_by_name, get_user_groups, + update_user_group_by_id, update_user_group_by_url + ) + from .Policy import ( + _get_policy_url, create_policy, delete_policy_by_id, delete_policy_by_url, + get_policies, get_policy_by_id, get_policy_by_url, update_policy_by_id, update_policy_by_url + ) + from .Vulnerabilities import ( + _get_vulnerabilities_url, get_component_remediation, get_vulnerabilities, + get_vulnerability_affected_projects, get_vulnerable_bom_components + ) + from .Reporting import ( + create_version_notices_report, create_version_reports, create_vuln_status_report, + download_notification_report, download_report + ) + from .Projects import ( + _find_user_group_url, _find_user_url, _get_projects_url, _project_role_urls, + assign_project_application_id, assign_user_group_to_project, assign_user_to_project, + compare_project_versions, create_project, create_project_version, delete_all_empty_versions, + delete_application_id, delete_empty_projects, delete_empty_versions, delete_project_by_name, + delete_project_version_by_name, delete_project_version_codelocations, delete_user_group_from_project, + get_or_create_project_version, get_project_application_id, get_project_by_id, get_project_by_name, + get_project_info, get_project_roles, get_project_version_by_name, get_project_versions, get_projects, + get_projects_by_version_name, get_version_by_id, get_version_by_name, get_version_codelocations, + get_version_components, get_version_scan_info, update_project_application_id, update_project_settings, + update_project_version_settings + ) # TODO Transfer relevant versions related functions to .Versions + from .Versions import ( add_version_as_component, remove_version_as_component ) + from .Scans import ( + delete_codelocation, delete_unmapped_codelocations, download_project_scans, + get_codelocation_scan_summaries, get_codelocations, get_scan_locations, upload_scan + ) + from .Components import ( + _get_components_url, find_component_info_for_protex_component, get_component_by_id, + get_component_by_url, get_components, search_components, update_component_by_id, update_component_by_url + ) + from .CustomFields import ( + _get_cf_obj_rel_path, _get_cf_object_url, _get_cf_url, create_cf, delete_cf, + get_cf_object, get_cf_objects, get_cf_value, get_cf_values, get_custom_fields, put_cf_value, + supported_cf_object_types + ) + from .Licences import ( _get_license_info, get_license_info_for_bom_component, get_licenses ) + from .System import ( get_health_checks, get_notifications ) + from .Ldap import ( disable_ldap, enable_ldap, get_ldap_configs, get_ldap_state ) def __init__(self, *args, **kwargs): # Config needs to be an instance variable for thread-safety, concurrent use of HubInstance() @@ -121,1608 +161,8 @@ def __init__(self, *args, **kwargs): self.version_info = {'version': '3'} # assume it's v3 since all versions after 3 supported version info self.bd_major_version = self._get_major_version() - - def read_config(self): - try: - with open('.restconfig.json','r') as f: - self.config = json.load(f) - except: - logging.error(f"Unable to load configuration from '.restconfig.json'. Make sure you create one with proper connection and authentication values for your Black Duck server") - raise - - def write_config(self): - with open(self.configfile,'w') as f: - json.dump(self.config, f, indent=3) - - def get_auth_token(self): - api_token = self.config.get('api_token', False) - if api_token: - authendpoint = "/api/tokens/authenticate" - url = self.config['baseurl'] + authendpoint - session = requests.session() - response = session.post( - url, - data={}, - headers={'Authorization': 'token {}'.format(api_token)}, - verify=not self.config['insecure'] - ) - csrf_token = response.headers['X-CSRF-TOKEN'] - try: - bearer_token = json.loads(response.content.decode('utf-8'))['bearerToken'] - except json.decoder.JSONDecodeError as e: - logger.exception("Authentication failure, could not obtain bearer token") - raise Exception("Failed to obtain bearer token, check for valid authentication token") - return (bearer_token, csrf_token, None) - else: - authendpoint="/j_spring_security_check" - url = self.config['baseurl'] + authendpoint - session=requests.session() - credentials = dict() - credentials['j_username'] = self.config['username'] - credentials['j_password'] = self.config['password'] - response = session.post(url, credentials, verify= not self.config['insecure']) - cookie = response.headers['Set-Cookie'] - token = cookie[cookie.index('=')+1:cookie.index(';')] - return (token, None, cookie) - - def _get_hub_rest_api_version_info(self): - '''Get the version info from the server, if available - ''' - session = requests.session() - url = self.config['baseurl'] + "/api/current-version" - response = session.get(url, verify = not self.config['insecure']) - - if response.status_code == 200: - version_info = response.json() - if 'version' in version_info: - return version_info - else: - raise UnknownVersion("Did not find the 'version' key in the response to a successful GET on /api/current-version") - else: - raise UnknownVersion("Failed to retrieve the version info from {}, status code {}".format(url, response.status_code)) - - def _get_major_version(self): - return self.version_info['version'].split(".")[0] - - def get_urlbase(self): - return self.config['baseurl'] - - def get_headers(self): - if self.config.get('api_token', False): - return { - 'X-CSRF-TOKEN': self.csrf_token, - 'Authorization': 'Bearer {}'.format(self.token), - 'Accept': 'application/json', - 'Content-Type': 'application/json'} - else: - if self.bd_major_version == "3": - return {"Cookie": self.cookie} - else: - return {"Authorization":"Bearer " + self.token} - - def get_api_version(self): - url = self.get_urlbase() + '/api/current-version' - response = self.execute_get(url) - version = response.json().get('version', 'unknown') - return version - - def _get_parameter_string(self, parameters={}): - parameter_string = "&".join(["{}={}".format(k,urllib.parse.quote(str(v))) for k,v in sorted(parameters.items(), key=itemgetter(0))]) - return "?" + parameter_string - - def get_tags_url(self, component_or_project): - # Utility method to return the tags URL from either a component or project object - url = None - for link_d in component_or_project['_meta']['links']: - if link_d['rel'] == 'tags': - return link_d['href'] - return url - - def get_link(self, bd_rest_obj, link_name): - # returns the URL for the link_name OR None - if bd_rest_obj and '_meta' in bd_rest_obj and 'links' in bd_rest_obj['_meta']: - for link_obj in bd_rest_obj['_meta']['links']: - if 'rel' in link_obj and link_obj['rel'] == link_name: - return link_obj.get('href', None) - else: - logger.warning("This does not appear to be a BD REST object. It should have ['_meta']['links']") - - def get_limit_paramstring(self, limit): - return "?limit={}".format(limit) - - def get_apibase(self): - return self.config['baseurl'] + "/api" - - ### - # - # Role stuff - # - ### - def _get_role_url(self): - return self.config['baseurl'] + "/api/roles" - - def get_roles(self, parameters={}): - url = self._get_role_url() + self._get_parameter_string(parameters) - response = self.execute_get(url) - return response.json() - - def get_roles_url_from_user_or_group(self, user_or_group): - # Given a user or user group object, return the 'roles' url - roles_url = None - for endpoint in user_or_group['_meta']['links']: - if endpoint['rel'] == "roles": - roles_url = endpoint['href'] - return roles_url - - def get_roles_for_user_or_group(self, user_or_group): - roles_url = self.get_roles_url_from_user_or_group(user_or_group) - if roles_url: - response = self.execute_get(roles_url) - return response.json() - else: - return [] - - def get_role_url_by_name(self, role_name): - # Return the global (as opposed to project-specific) role URL for this server corresponding to the role name - all_roles = self.get_roles() - for role in all_roles['items']: - if role['name'] == role_name: - return role['_meta']['href'] - - def assign_role_to_user_or_group(self, role_name, user_or_group): - user_or_group_roles_url = self.get_roles_url_from_user_or_group(user_or_group) - return self.assign_role_given_role_url(role_name, user_or_group_roles_url) - - def assign_role_given_role_url(self, role_name, user_or_group_role_assignment_url): - role_url = self.get_role_url_by_name(role_name) - if self.bd_major_version == "3": - # A hack to get the assignment to work on v3 - role_url = role_url.replace("api", "api/internal") - data = {"name": role_name, "role": role_url} - logger.debug("executing POST to {} with {}".format( - user_or_group_role_assignment_url, data)) - return self.execute_post(user_or_group_role_assignment_url, data = data) - - def delete_role_from_user_or_group(self, role_name, user_or_group): - roles = self.get_roles_for_user_or_group(user_or_group) - for role in roles['items']: - if role['name'] == role_name: - self.execute_delete(role['_meta']['href']) - - # def get_current_user_roles(self): - # url = self.config['baseurl'] + "/api/current-user" - # response = self.execute_get(url) - # response = self.get_roles_for_user_or_group(response.json()) - # roles_json = response.json() - # return roles_json - - # def current_user_has_role(self, role_name): - # user_roles_obj = self.get_current_user_roles() - # return role_name in [r['name'] for r in user_roles_obj['items']] - - def user_has_role(self, user_or_group, role_name): - user_roles_obj = self.get_roles_for_user_or_group(user_or_group) - return role_name in [r['name'] for r in user_roles_obj['items']] - - ### - # - # User stuff - # - ### - def _get_user_url(self): - return self.config['baseurl'] + "/api/users" - - def get_users(self, parameters={}): - url = self._get_user_url() + self._get_parameter_string(parameters) - headers = {'Accept': 'application/vnd.blackducksoftware.user-4+json'} - response = self.execute_get(url, custom_headers=headers) - return response.json() - - def get_current_user(self): - url = self.config['baseurl'] + "/api/current-user" - headers = {'Accept': 'application/vnd.blackducksoftware.user-4+json'} - response = self.execute_get(url, custom_headers=headers) - return response.json() - - def create_user(self, user_json): - url = self._get_user_url() - location = self._create(url, user_json) - return location - - def get_user_by_id(self, user_id): - url = self._get_user_url() + "/{}".format(user_id) - headers = {'Accept': 'application/vnd.blackducksoftware.user-4+json'} - return self.get_user_by_url(url, custom_headers=headers) - - def get_user_by_url(self, user_url): - headers = {'Accept': 'application/vnd.blackducksoftware.user-4+json'} - response = self.execute_get(user_url, custom_headers=headers) - jsondata = response.json() - return jsondata - - def update_user_by_id(self, user_id, update_json): - url = self._get_user_url() + "/{}".format(user_id) - return self.update_user_by_url(url, update_json) - - def update_user_by_url(self, user_url, update_json): - return self.execute_put(user_url, update_json) - - def delete_user_by_id(self, user_id): - url = self._get_user_url() + "/{}".format(user_id) - return self.delete_user_by_url(url) - - def delete_user_by_url(self, user_url): - return self.execute_delete(user_url) - - def reset_user_password(self, user_id, new_password): - url = self.config['baseurl'] + "/api/users/" + user_id + "/resetpassword" - headers = {'Content-Type':'application/vnd.blackducksoftware.user-1+json', 'Accept': 'application/json'} - data = {'password': new_password} - return self.execute_put(url, data, headers) - - def get_last_login(self,sinceDays=60): - url = self.config['baseurl'] + "/api/dormant-users" - param_string = self._get_parameter_string({'sinceDays': sinceDays}) - url = "{}{}".format(url, param_string) - headers = {'Accept': 'application/vnd.blackducksoftware.user-4+json'} - response = self.execute_get(url, custom_headers=headers) - return response.json() - - ### - # - # User group stuff - # - ### - def _get_user_group_url(self): - return self.config['baseurl'] + "/api/usergroups" - - def get_user_groups(self, parameters={}): - url = self._get_user_group_url() + self._get_parameter_string(parameters) - headers = {'Accept': 'application/vnd.blackducksoftware.user-4+json'} - response = self.execute_get(url, custom_headers=headers) - return response.json() - - def get_user_group_by_name(self, group_name): - group_list = self.get_user_groups({"q": f"name:{group_name}"}) - for group in group_list['items']: - if group['name'] == group_name: - return group - - def create_user_group(self, user_group_json): - if self.bd_major_version == "3": - url = self.config['baseurl'] + '/api/v1/usergroups' - else: - url = self._get_user_group_url() - location = self._create(url, user_group_json) - return location - - def create_user_group_by_name(self, group_name, active=True): - user_group_info = { - 'name': group_name, - 'createdFrom': 'INTERNAL', - 'active': active - } - return self.create_user_group(user_group_info) - - # def get_user_group_by_id(self, user_group_id): - # url = self._get_user_group_url() + "/{}".format(user_group_id) - # return self.get_user_group_by_url(url) - - # def get_user_group_by_url(self, user_group_url): - # response = self.execute_get(user_group_url) - # jsondata = response.json() - # return jsondata - - # def get_user_group_by_name(self, user_group_name): - # url = self._get_user_group_url() + "?q={}".format(user_group_name) - # response = self.execute_get(url) - # user_group_obj = response.json() - # if user_group_obj['totalCount'] > 0: - # return user_group_obj['items'][0] - - def update_user_group_by_id(self, user_group_id, update_json): - url = self._get_user_group_url() + "/{}".format(user_group_id) - return self.update_user_group_by_url(url, update_json) - - def update_user_group_by_url(self, user_group_url, update_json): - return self.execute_put(user_group_url, update_json) - - def delete_user_group_by_id(self, user_group_id): - url = self._get_user_group_url() + "/{}".format(user_group_id) - return self.delete_user_group_by_url(url) - - def delete_user_group_by_url(self, user_group_url): - return self.execute_delete(user_group_url) - - ### - # - # Policy stuff - # - ### - def _get_policy_url(self): - return self.config['baseurl'] + "/api/policy-rules" - - def get_policies(self, parameters={}): - url = self._get_policy_url() + self._get_parameter_string(parameters) - headers = {'Accept': 'application/json'} - response = self.execute_get(url, custom_headers=headers) - return response.json() - - def create_policy(self, policy_json): - url = self._get_policy_url() - location = self._create(url, policy_json) - return location - - def get_policy_by_id(self, policy_id): - url = self._get_policy_url() + "/{}".format(policy_id) - return self.get_policy_by_url(url) - - def get_policy_by_url(self, policy_url): - headers = {'Accept': 'application/vnd.blackducksoftware.policy-4+json'} - response = self.execute_get(policy_url, custom_headers=headers) - jsondata = response.json() - return jsondata - - def update_policy_by_id(self, policy_id, update_json): - url = self._get_policy_url() + "/{}".format(policy_id) - return self.update_policy_by_url(url, update_json) - - def update_policy_by_url(self, policy_url, update_json): - return self.execute_put(policy_url, update_json) - - def delete_policy_by_id(self, policy_id): - url = self._get_policy_url() + "/{}".format(policy_id) - return self.delete_policy_by_url(url) - - def delete_policy_by_url(self, policy_url): - return self.execute_delete(policy_url) - - ## - # - # Vulnerabilities - # - ## - def _get_vulnerabilities_url(self): - return self.config['baseurl'] + '/api/vulnerabilities' - - def get_vulnerabilities(self, vulnerability, parameters={}): - url = self._get_vulnerabilities_url() + "/{}".format(vulnerability) + self._get_parameter_string(parameters) - headers = {'Accept': 'application/vnd.blackducksoftware.vulnerability-4+json'} - response = self.execute_get(url, custom_headers=headers) - return response.json() - - def get_vulnerability_affected_projects(self, vulnerability): - url = self._get_vulnerabilities_url() + "/{}/affected-projects".format(vulnerability) - custom_headers = {'Accept': 'application/vnd.blackducksoftware.vulnerability-4+json'} - response = self.execute_get(url, custom_headers=custom_headers) - return response.json() - - # TODO: Refactor this, i.e. use get_link method? - def get_vulnerable_bom_components(self, version_obj, limit=9999): - url = "{}/vulnerable-bom-components".format(version_obj['_meta']['href']) - custom_headers = {'Accept': 'application/vnd.blackducksoftware.bill-of-materials-6+json'} - param_string = self._get_parameter_string({'limit': limit}) - url = "{}{}".format(url, param_string) - response = self.execute_get(url, custom_headers=custom_headers) - return response.json() - - # TODO: Remove or refactor this - def get_component_remediation(self, bom_component): - url = "{}/remediating".format(bom_component['componentVersion']) - logger.debug("Url for getting remediation info is : {}".format(url)) - response = self.execute_get(url) - return response.json() - - ## - # - # Lookup Black Duck (Hub) KB info given Protex KB info - # - ## - def find_component_info_for_protex_component(self, protex_component_id, protex_component_release_id): - '''Will return the Hub component corresponding to the protex_component_id, and if a release (version) id - is given, the response will also include the component-version. Returns an empty list if there were - no components found. - ''' - url = self.config['baseurl'] + "/api/components" - if protex_component_release_id: - query = "?q=bdsuite:{}%23{}&limit=9999".format(protex_component_id, protex_component_release_id) - else: - query = "?q=bdsuite:{}&limit=9999".format(protex_component_id) - with_query = url + query - logger.debug("Finding the Hub componet for Protex component id {}, release id {} using query/url {}".format( - protex_component_id, protex_component_release_id, with_query)) - response = self.execute_get(with_query) - logger.debug("query results in status code {}, json data: {}".format(response.status_code, response.json())) - # TODO: Error checking and retry? For now, as POC just assuming it worked - component_list_d = response.json() - return response.json() - - - ## - # - # CSV and Notices reporting - # - ## - - valid_categories = ['VERSION','CODE_LOCATIONS','COMPONENTS','SECURITY','FILES', 'ATTACHMENTS', 'CRYPTO_ALGORITHMS', 'PROJECT_VERSION_CUSTOM_FIELDS', 'BOM_COMPONENT_CUSTOM_FIELDS', 'LICENSE_TERM_FULFILLMENT'] - valid_report_formats = ["CSV", "JSON"] - def create_version_reports(self, version, report_list, format="CSV"): - assert all(list(map(lambda k: k in HubInstance.valid_categories, report_list))), "One or more selected report categories in {} are not valid ({})".format( - report_list, HubInstance.valid_categories) - assert format in HubInstance.valid_report_formats, "Format must be one of {}".format(HubInstance.valid_report_formats) - - post_data = { - 'categories': report_list, - 'versionId': version['_meta']['href'].split("/")[-1], - 'reportType': 'VERSION', - 'reportFormat': format - } - version_reports_url = self.get_link(version, 'versionReport') - return self.execute_post(version_reports_url, post_data) - - valid_notices_formats = ["TEXT", "JSON"] - def create_version_notices_report(self, version, format="TEXT", include_copyright_info=True): - assert format in HubInstance.valid_notices_formats, "Format must be one of {}".format(HubInstance.valid_notices_formats) - - post_data = { - 'versionId': object_id(version), - 'reportType': 'VERSION_LICENSE', - 'reportFormat': format - } - if include_copyright_info: - post_data.update({'categories': ["COPYRIGHT_TEXT"] }) - - notices_report_url = self.get_link(version, 'licenseReports') - return self.execute_post(notices_report_url, post_data) - - def download_report(self, report_id): - # TODO: Fix me, looks like the reports should be downloaded from different paths than the one here, and depending on the type and format desired the path can change - url = self.get_urlbase() + "/api/reports/{}".format(report_id) - return self.execute_get(url, {'Content-Type': 'application/zip', 'Accept':'application/zip'}) - - def download_notification_report(self, report_location_url): - '''Download the notices report using the report URL. Inspect the report object to determine - the format and use the appropriate media header''' - custom_headers = {'Accept': 'application/vnd.blackducksoftware.report-4+json'} - response = self.execute_get(report_location_url, custom_headers=custom_headers) - report_obj = response.json() - - if report_obj['reportFormat'] == 'TEXT': - download_url = self.get_link(report_obj, "download") + ".json" - logger.debug("downloading report from {}".format(download_url)) - response = self.execute_get(download_url, {'Accept': 'application/zip'}) - else: - # JSON - contents_url = self.get_link(report_obj, "content") - logger.debug("retrieving report contents from {}".format(contents_url)) - response = self.execute_get(contents_url, {'Accept': 'application/json'}) - return response, report_obj['reportFormat'] - - ## - # - # (Global) Vulnerability reports - # - ## - valid_vuln_status_report_formats = ["CSV", "JSON"] - def create_vuln_status_report(self, format="CSV"): - assert format in HubInstance.valid_vuln_status_report_formats, "Format must be one of {}".format(HubInstance.valid_vuln_status_report_formats) - - post_data = { - "reportFormat": format, - "locale": "en_US" - } - url = self.get_apibase() + "/vulnerability-status-reports" - custom_headers = { - 'Content-Type': 'application/vnd.blackducksoftware.report-4+json', - 'Accept': 'application/vnd.blackducksoftware.report-4+json' - } - return self.execute_post(url, custom_headers=custom_headers, data=post_data) - - ## - # - # License stuff - # - ## - def _get_license_info(self, license_obj): - if 'license' in license_obj: - license_info = {} - text_json = {} - logger.debug("license: {}".format(license_obj)) - response = self.execute_get(license_obj['license']) - if response.status_code == 200: - license_info = response.json() - text_url = self.get_link(license_info, 'text') - response = self.execute_get(text_url) - if response.status_code == 200: - text_json = response.text - yield {"license_info": license_info, - "license_text_info": text_json} - elif 'licenses' in license_obj and isinstance(license_obj['licenses'], list): - for license in license_obj['licenses']: - self._get_license_info(license) - - def get_license_info_for_bom_component(self, bom_component, limit=1000): - self._check_version_compatibility() - all_licenses = {} - logger.debug("gathering license info for bom component {}, version {}".format( - bom_component['componentName'], bom_component['componentVersionName'])) - for license in bom_component.get('licenses', []): - for license_info_obj in self._get_license_info(license): - all_licenses.update({ - license['licenseDisplay']: license_info_obj - }) - return all_licenses - - ## - # - # Files and Snippet matching - # - ## - def _check_version_compatibility(self): - if int(self.bd_major_version) < 2018: - raise UnsupportedBDVersion("The BD major version {} is less than the minimum required major version {}".format(self.bd_major_version, 2018)) - - def get_file_matches_for_bom_component(self, bom_component, limit=1000): - self._check_version_compatibility() - url = self.get_link(bom_component, "matched-files") - paramstring = self.get_limit_paramstring(limit) - logger.debug("GET {}".format(url)) - response = self.execute_get(url) - jsondata = response.json() - return jsondata - - - ## - # - # Projects and versions Stuff - # - ## - - def _get_projects_url(self): - return self.get_urlbase() + "/api/projects" - - def get_projects(self, limit=100, parameters={}): - headers = self.get_headers() - if limit: - parameters.update({'limit': limit}) - url = self._get_projects_url() + self._get_parameter_string(parameters) - headers['Accept'] = 'application/vnd.blackducksoftware.project-detail-4+json' - logger.debug(f"Retrieving projects using url {url}") - response = requests.get(url, headers=headers, verify = not self.config['insecure']) - jsondata = response.json() - return jsondata - - def create_project(self, project_name, version_name="Default Version", parameters={}): - url = self._get_projects_url() - - post_data = { - "name": project_name, - "description": parameters.get("description", ""), - "projectTier": parameters.get("project_tier", ""), - "projectOwner": parameters.get("project_owner", ""), - "projectLevelAdjustments": parameters.get("project_level_adjustments", True), - "cloneCategories": [ - "COMPONENT_DATA", - "VULN_DATA" - ], - "versionRequest": { - "phase": parameters.get("version_phase", "PLANNING"), - "distribution": parameters.get("version_distribution", "EXTERNAL"), - "projectLevelAdjustments": parameters.get("project_level_adjustments", True), - "versionName": version_name - } - } - response = self.execute_post(url, data=post_data) - return response - - def create_project_version(self, project_obj, new_version_name, clone_version=None, parameters={}): - url = self.get_link(project_obj, "versions") - - version_phase = parameters.get("phase", "PLANNING") - if version_phase not in HubInstance.VERSION_PHASES: - raise InvalidVersionPhase("The phase given {} is not in the list of valid phases ({})".format( - version_phase, HubInstance.VERSION_PHASES)) - - post_data = { - "versionUrl": url, - "cloneCategories": [ - "VULN_DATA", - "COMPONENT_DATA" - ], - "versionName": new_version_name, - "phase": version_phase, - "distribution": parameters.get("distribution", "EXTERNAL") - } - if clone_version: - post_data["cloneFromReleaseUrl"] = clone_version['_meta']['href'] - response = self.execute_post(url, data=post_data) - return response - - def get_project_by_name(self, project_name): - project_list = self.get_projects(parameters={"q":"name:{}".format(project_name)}) - for project in project_list['items']: - if project['name'] == project_name: - return project - - def get_projects_by_version_name(self, version_name, exclude_projects=None): - """Returns all project dicts which have given version_name, including the version object under 'version' key - - Arguments: - version_name {str} -- version name to be searched - exclude_projects {list} -- list of project names to be excluded from scanning for given version name - """ - headers = self.get_headers() - projects = self.get_projects(limit=9999).get('items',[]) - if len(projects) == 0: - logger.error('No projects found') - else: - jsondata = {'items':[]} - for project in projects: - if project['name'] not in exclude_projects: - version = self.get_version_by_name(project, version_name) - if version: - project['version'] = version - jsondata['items'].append(project) - jsondata['totalCount'] = len(jsondata['items']) - return jsondata - - def get_version_by_name(self, project, version_name): - version_list = self.get_project_versions(project, parameters={'q':"versionName:{}".format(version_name)}) - # A query by name can return more than one version if other versions - # have names that include the search term as part of their name - for version in version_list['items']: - if version['versionName'] == version_name: - return version - - def get_project_version_by_name(self, project_name, version_name): - project = self.get_project_by_name(project_name) - if project: - version = self.get_version_by_name(project, version_name) - if version == None: - logger.debug("Did not find any project version matching {}".format(version_name)) - else: - return version - else: - logger.debug("Did not find a project with name {}".format(project_name)) - - def get_or_create_project_version(self, project_name, version_name, parameters = {}): - project = self.get_project_by_name(project_name) - if project: - version = self.get_version_by_name(project, version_name) - if not version: - self.create_project_version(project, version_name, parameters=parameters) - version = self.get_version_by_name(project, version_name) - else: - self.create_project(project_name, version_name, parameters=parameters) - project = self.get_project_by_name(project_name) - version = self.get_version_by_name(project, version_name) - return version - - def get_project_by_id(self, project_id, limit=100): - headers = self.get_headers() - paramstring = self.get_limit_paramstring(limit) - url = self._get_projects_url() + "/" + project_id + paramstring - headers['Accept'] = 'application/vnd.blackducksoftware.project-detail-4+json' - response = requests.get(url, headers=headers, verify = not self.config['insecure']) - jsondata = response.json() - return jsondata - - def get_project_versions(self, project, limit=100, parameters={}): - # paramstring = self.get_limit_paramstring(limit) - parameters.update({'limit': limit}) - url = project['_meta']['href'] + "/versions" + self._get_parameter_string(parameters) - headers = self.get_headers() - headers['Accept'] = 'application/vnd.blackducksoftware.project-detail-4+json' - response = requests.get(url, headers=headers, verify = not self.config['insecure']) - jsondata = response.json() - return jsondata - - def get_version_components(self, projectversion, limit=1000): - paramstring = self.get_limit_paramstring(limit) - url = projectversion['_meta']['href'] + "/components" + paramstring - headers = self.get_headers() - headers['Accept'] = 'application/vnd.blackducksoftware.bill-of-materials-6+json' - response = requests.get(url, headers=headers, verify = not self.config['insecure']) - jsondata = response.json() - return jsondata - - def update_project_settings(self, project, new_settings={}): - url = project['_meta']['href'] - headers = self.get_headers() - headers['Accept'] = 'application/vnd.blackducksoftware.project-detail-4+json' - headers['Content-Type'] = 'application/vnd.blackducksoftware.project-detail-4+json' - response = self.execute_put(url, new_settings, headers) - return response - - def update_project_version_settings(self, project_name, version_name, new_settings={}): - # Apply any new settings to the given project version - version = self.get_project_version_by_name(project_name, version_name) - - if version: - for k,v in new_settings.items(): - if k in HubInstance.PROJECT_VERSION_SETTINGS: - logger.debug("updating setting {} in version {} with value {}".format( - k, version['versionName'], v)) - version[k] = v - else: - logger.warn("Setting {} is not in the list of project version settings ({})".format( - k, HubInstance.PROJECT_VERSION_SETTINGS)) - - url = version['_meta']['href'] - - response = self.execute_put(url, version) - - if response.status_code == 200: - logger.info("Successfully updated version {} with new settings {}".format( - version['versionName'], new_settings)) - else: - logger.error("Failed to update version {} with new settings {}; status code: {}".format( - version['versionName'], new_settings, response.status_code)) - else: - logger.debug("Did not find a matching project-version in project {}, version name {}".format( - project_name, version_name)) - - def get_version_by_id(self, project_id, version_id, limit=100): - headers = self.get_headers() - paramstring = self.get_limit_paramstring(limit) - url = self._get_projects_url() + "/" + project_id + "/versions/" + version_id - headers['Accept'] = 'application/vnd.blackducksoftware.project-detail-4+json' - response = requests.get(url, headers=headers, verify = not self.config['insecure']) - jsondata = response.json() - return jsondata - - def compare_project_versions(self, version, compareTo): - apibase = self.config['baseurl'] + "/api" - paramstring = "?limit=1000&sortField=component.securityRiskProfile&ascending=false&offset=0" - cwhat = version['_meta']['href'].replace(apibase, '') - cto = compareTo['_meta']['href'].replace(apibase, '') - url = apibase + cwhat + "/compare" + cto + "/components" + paramstring - headers = self.get_headers() - response = requests.get(url, headers=headers, verify = not self.config['insecure']) - jsondata = response.json() - return jsondata - - def get_version_codelocations(self, version, limit=100, offset=0): - url = self.get_link(version, "codelocations") + self._get_parameter_string({ - 'limit': limit, - 'offset': offset}) - custom_headers = {'Content-Type': 'application/vnd.blackducksoftware.scan-4+json'} - response = self.execute_get(url, custom_headers=custom_headers) - jsondata = response.json() - return jsondata - - def delete_project_version_by_name(self, project_name, version_name, save_scans=False): - project = self.get_project_by_name(project_name) - if project: - logger.debug("found project {}".format(project)) - project_versions = self.get_project_versions( - project, - parameters={'q':"versionName:{}".format(version_name)} - ) - - project_version_codelocations = None - if 'totalCount' in project_versions and project_versions['totalCount'] == 1: - project_version = project_versions['items'][0] - logger.debug("found the project version: {}".format(project_version)) - - delete_scans = not save_scans - logger.debug("delete_scans was {}".format(delete_scans)) - - if delete_scans: - self.delete_project_version_codelocations(project_version) - else: - logger.debug("Delete scans was false, or we did not find any codelocations (scans) in version {} of project {}".format(version_name, project_name)) - # TODO: Check if the project will be "empty" once we delete this version and - # delete the project accordingly? - logger.info("Deleting project-version at: {}".format(project_version['_meta']['href'])) - self.execute_delete(project_version['_meta']['href']) - else: - logger.debug("Did not find version with name {} in project {}".format(version_name, project_name)) - else: - logger.debug("Did not find project with name {}".format(project_name)) - - def delete_project_by_name(self, project_name, save_scans=False, backup_scans=False): - project = self.get_project_by_name(project_name) - if project: - # get project versions - project_versions = self.get_project_versions(project) - versions = project_versions.get('items', []) - logger.debug("Retrieved {} versions for project {}".format(len(versions), project_name)) - - delete_scans = not save_scans - logger.debug("delete_scans was {}".format(delete_scans)) - - if delete_scans: - # delete all code locations associated with each version - for version in versions: - if backup_scans: - logger.debug("Backup code locations (aka scans) for version {}".format(version['versionName'])) - self.download_project_scans(project_name, version['versionName']) - logger.debug("Deleting code locations (aka scans) for version {}".format(version['versionName'])) - self.delete_project_version_codelocations(version) - - # delete the project itself - project_url = project['_meta']['href'] - logger.info("Deleting project {}".format(project_name)) - self.execute_delete(project_url) - else: - logger.debug("Did not find project with name {}".format(project_name)) - - def delete_project_version_codelocations(self, version): - version_name = version['versionName'] - try: - logger.debug("Retrieving code locations (aka scans) for version {}".format(version_name)) - version_code_locations = self.get_version_codelocations(version) - except: - logger.error("Failed to get codelocations (aka scans) for version {}".format(version_name), exc_info=True) - version_code_locations = [] - else: - version_code_locations = version_code_locations.get('items', []) if version_code_locations else [] - logger.debug("Found {} code locations (aka scans) for version {}".format(len(version_code_locations), version_name)) - code_location_urls = [c['_meta']['href'] for c in version_code_locations] - for code_location_url in code_location_urls: - logger.info("Deleting code location at: {}".format(code_location_url)) - self.execute_delete(code_location_url) - - def delete_empty_projects(self): - #get all projects with no mapped code locations and delete them all - projects = self.get_projects().get('items',[]) - deleted_projects = list() - for p in projects: - p_empty = True - versions = self.get_project_versions(p).get('items', []) - for v in versions: - codelocations = self.get_version_codelocations(v) - if codelocations['totalCount'] != 0: - p_empty = False - logger.debug("Found a non-empty version in project {}, skipping...".format( - p['name'])) - break - if p_empty: - logger.info("Project {} is empty, deleting".format(p['name'])) - self.execute_delete(p['_meta']['href']) - deleted_projects.append(p['name']) - return deleted_projects - - def delete_empty_versions(self, project): - # delete versions within a given project if there are no mapped code locations (scans) - versions = self.get_project_versions(project).get('items', []) - logger.debug("Deleting empty versions for project {}".format(project['name'])) - deleted_versions = list() - for v in versions: - codelocations = self.get_version_codelocations(v).get('items', []) - if not codelocations: - logger.info("Deleting empty version {} from project {}".format( - v['versionName'], project['name'])) - self.execute_delete(v['_meta']['href']) - deleted_versions.append((project['name'], v['versionName'])) - else: - logger.debug("Version {} within project {} has scans (i.e. not empty), skipping".format( - v['versionName'], project['name'])) - return deleted_versions - - def delete_all_empty_versions(self): - # delete versions if there are no mapped code locations (scans) across all projects - projects = self.get_projects().get('items', []) - deleted_versions = list() - logger.info("Deleting empty versions for all {} projects on this server".format( - len(projects))) - for p in projects: - deleted_versions.extend(self.delete_empty_versions(p)) - return deleted_versions - - def _find_user_group_url(self, assignable_user_groups, user_group_name): - for user_group in assignable_user_groups['items']: - if user_group['name'] == user_group_name: - return user_group['usergroup'] - - def _find_user_url(self, assignable_user, user_name): - for user in assignable_user['items']: - if user['name'] == user_name: - return user['user'] - - def _project_role_urls(self, project_role_names): - all_project_roles = self.get_project_roles() - project_role_urls = list() - for project_role_name in project_role_names: - for project_role in all_project_roles: - if project_role_name == project_role['name']: - project_role_urls.append(project_role['_meta']['href']) - return project_role_urls - - def assign_user_group_to_project(self, project_name, user_group_name, project_roles): - # Assign the user group to the project using the list of project-role names - project = self.get_project_by_name(project_name) - # user_group = self.get_user_group_by_name(user_group_name) - - if project: - project_url = project['_meta']['href'] - assignable_user_groups_link = self.get_link(project, 'assignable-usergroups') - if assignable_user_groups_link: - assignable_user_groups_response = self.execute_get(f"{assignable_user_groups_link}?q=name:{user_group_name}") - assignable_user_groups = assignable_user_groups_response.json() - - # TODO: What to do if the user group is already assigned to the project, and therefore - # does not appear in the list of 'assignable' user groups? Should we search the (assigned) user - # groups and re-apply the project-roles to the assignment? - - user_group_url = self._find_user_group_url(assignable_user_groups, user_group_name) - if user_group_url: - headers = self.get_headers() - - # need project role urls to build the POST payload - project_roles_urls = self._project_role_urls(project_roles) - - # The POST endpoint changes based on whether we found any project-roles to assign - # Also, due to what appears to be a defect, the Content-Type changes - if project_roles_urls: - url = user_group_url + "/roles" - # one dict per project role assignment - post_data = [{'role': r, 'scope': project_url} for r in project_roles_urls] - # I found I had to use this Content-Type (application/json resulted in 412) - # ref: https://jira.dc1.lan/browse/HUB-18417 - headers['Content-Type'] = 'application/vnd.blackducksoftware.internal-1+json' - else: - url = project_url + "/usergroups" - # Assigning a group with no project-roles - post_data = {"group": user_group_url} - headers['Content-Type'] = 'application/json' - - response = requests.post( - url, - headers=headers, - data=json.dumps(post_data), - verify = not self.config['insecure']) - return response - else: - assignable_groups = [u['name'] for u in assignable_user_groups['items']] - logger.warning("The user group {} was not found in the assignable user groups ({}) for this project {}. Is the group already assigned to this project?".format( - user_group_name, assignable_groups, project_name)) - else: - logger.warning("This project {} has no assignable user groups".format(project_name)) - else: - logger.warning("Did not find a project by the name {}".format(project_name)) - - def delete_user_group_from_project(self, project_name, user_group_name): - project = self.get_project_by_name(project_name) - - if project: - project_url = project['_meta']['href'] - - user_group = self.get_user_group_by_name(user_group_name) - if user_group: - user_group_url = user_group['_meta']['href'] - user_group_id = user_group_url.rsplit('/', 1)[-1] - - project_user_group_url = f"{project_url}/usergroups/{user_group_id}" - self.execute_delete(project_user_group_url) - - def assign_user_to_project(self, user_name, project_name, project_roles, limit=1000): - # Assign users to projects - project = self.get_project_by_name(project_name) - - if project: - project_url = project['_meta']['href'] - assignable_users_link = self.get_link(project, 'assignable-users') - paramstring = self.get_limit_paramstring(limit) - url = assignable_users_link + paramstring - logger.debug("GET {}".format(url)) - if assignable_users_link: - assignable_users_response = self.execute_get(url) - assignable_users = assignable_users_response.json() - - # TODO: What to do if the user is already assigned to the project, and therefore - # does not appear in the list of 'assignable' user? Should we search the (assigned) user - # and re-apply the project-roles to the assignment? - - user_url = self._find_user_url(assignable_users, user_name) - if user_url: - headers = self.get_headers() - - # need project role urls to build the POST payload - project_roles_urls = self._project_role_urls(project_roles) - - # The POST endpoint changes based on whether we found any project-roles to assign - # Also, due to what appears to be a defect, the Content-Type changes - if project_roles_urls: - url = user_url + "/roles" - # one dict per project role assignment - post_data = [{'role': r, 'scope': project_url} for r in project_roles_urls] - # I found I had to use this Content-Type (application/json resulted in 412) - # ref: https://jira.dc1.lan/browse/HUB-18417 - headers['Content-Type'] = 'application/vnd.blackducksoftware.internal-1+json' - else: - url = project_url + "/users" - # Assigning a user with no project-roles - post_data = {"user": user_url} - headers['Content-Type'] = 'application/json' - - response = requests.post( - url, - headers=headers, - data=json.dumps(post_data), - verify=not self.config['insecure']) - return response - else: - assignable_username = [u['name'] for u in assignable_users['items']] - logger.warning( - "The user {} was not found in the assignable user ({}) for this project {}. Is the user already assigned to this project?".format( - user_name, assignable_username, project_name)) - else: - logger.warning("This project {} has no assignable users".format(project_name)) - else: - logger.warning("Did not find a project by the name {}".format(project_name)) - - def assign_project_application_id(self, project_name, application_id, overwrite=False): - logger.debug("Assigning application_id {} to project_name {}, overwrite={}".format( - application_id, project_name, overwrite)) - - existing_application_id, application_id_url = self.get_project_application_id(project_name) - - if existing_application_id: - if overwrite: - logger.debug("Found an existing application id {} for project {} and overwrite was True. Updating it to {}".format( - existing_application_id, project_name, application_id)) - return self.update_project_application_id(project_name, application_id) - else: - logger.debug("Found an existing application id {} for project {} and overwrite was False so not updating it".format( - existing_application_id, project_name)) - else: - logger.debug("No application id exists for project {}, assigning {} to it".format( - project_name, application_id)) - project = self.get_project_by_name(project_name) - if project: - project_mappings_url = self.get_link(project, "project-mappings") - if project_mappings_url: - post_data = {"applicationId": application_id} - response = self.execute_post(project_mappings_url, data=post_data) - return response - else: - logger.warning("Did not find project-mappings URL for project {}".format(project)) - else: - logger.warning("Did not find project by name {}".format(project_name)) - - def update_project_application_id(self, project_name, new_application_id): - application_id, application_id_url = self.get_project_application_id(project_name) - - if application_id and application_id_url: - put_data = { - "applicationId": new_application_id, - "_meta": { - "allow": [ - "DELETE", - "GET", - "PUT" - ], - "href": application_id_url, - "links": [] - } - } - response = self.execute_put(application_id_url, data=put_data) - return response - else: - logger.debug("Did not find application id for project name {}".format(project_name)) - - def delete_application_id(self, project_name): - application_id, application_id_url = self.get_project_application_id(project_name) - - if application_id_url: - self.execute_delete(application_id_url) - - def get_project_application_id(self, project_name): - project_mapping_info = self.get_project_info(project_name, 'project-mappings') - if project_mapping_info and 'items' in project_mapping_info: - for project_mapping in project_mapping_info['items']: - if 'applicationId' in project_mapping: - application_id = project_mapping['applicationId'] - application_id_url = project_mapping['_meta']['href'] - - return (application_id, application_id_url) - logger.debug("Did not find any project-mappings with 'applicationId' in them") - return (None, None) - else: - logger.debug("did not find any project-mappings for project {}".format(project_name)) - return (None, None) - - def get_project_info(self, project_name, link_name): - project = self.get_project_by_name(project_name) - link = self.get_link(project, link_name) - if link: - response = self.execute_get(link) - return response.json() - else: - return {} # nada - - def get_project_roles(self): - all_project_roles = self.get_roles(parameters={"filter":"scope:project"}) - return all_project_roles['items'] - - def get_version_scan_info(self, version_obj): - url = self.get_link(version_obj, "codelocations") - custom_headers = {'Accept': 'application/vnd.blackducksoftware.project-detail-5+json'} - response = self.execute_get(url, custom_headers=custom_headers) - code_locations = response.json().get('items', []) - if code_locations: - scan_info = { - 'most_recent_scan': max([cl['updatedAt'] for cl in code_locations]), - 'oldest_scan': min([cl['createdAt'] for cl in code_locations]), - 'number_scans': len(code_locations) - } - else: - scan_info = { - 'most_recent_scan': None, - 'oldest_scan': None, - 'number_scans': None - } - return scan_info - - ### - # - # Add project version as a component to another project - # - # - - def add_version_as_component(self, main_project_release, sub_project_release): - headers = self.get_headers() - main_data = main_project_release['_meta']['href'].split('/') - sub_data = sub_project_release['_meta']['href'].split('/') - main_project_release_links = main_project_release['_meta']['links'] - main_project_release_component_links = [x for x in main_project_release_links if x['rel'] == 'components'] - main_project_release_component_link = main_project_release_component_links[0]['href'] - logger.debug(main_project_release_component_link) - sub_project_release_as_custom_component_url = self.get_apibase() + "/components/" + sub_data[5] + "/versions/" + sub_data[7] - logger.debug(sub_project_release_as_custom_component_url) - payload = {} - payload['component'] = sub_project_release_as_custom_component_url - logger.debug(json.dumps(payload)) - response = requests.post(main_project_release_component_link, headers=headers, verify = not self.config['insecure'], json=payload) - logger.debug(response) - return response - - ### - # - # Remove a project version as a component from another project - # - # - - def remove_version_as_component(self, main_project_release, sub_project_release): - headers = self.get_headers() - main_data = main_project_release['_meta']['href'].split('/') - sub_data = sub_project_release['_meta']['href'].split('/') - main_project_release_links = main_project_release['_meta']['links'] - main_project_release_component_links = [x for x in main_project_release_links if x['rel'] == 'components'] - main_project_release_component_link = main_project_release_component_links[0]['href'] - logger.debug(main_project_release_component_link) - subcomponent_url = main_project_release_component_link + "/" + sub_data[5] + "/versions/" + sub_data[7] - logger.debug(subcomponent_url) - response = requests.delete(subcomponent_url, headers=headers, verify = not self.config['insecure']) - return response - - ### - # - # Code locations or Scans Stuff - # - ### - - def upload_scan(self, filename): - url = self.get_apibase() + "/scan/data/?mode=replace" - headers = self.get_headers() - if filename.endswith('.json') or filename.endswith('.jsonld'): - headers['Content-Type'] = 'application/ld+json' - with open(filename,"r") as f: - response = requests.post(url, headers=headers, data=f, verify=not self.config['insecure']) - elif filename.endswith('.bdio'): - headers['Content-Type'] = 'application/vnd.blackducksoftware.bdio+zip' - with open(filename,"rb") as f: - response = requests.post(url, headers=headers, data=f, verify=not self.config['insecure']) - else: - raise Exception("Unkown file type") - return response - - def download_project_scans(self, project_name,version_name, output_folder=None): - version = self.get_project_version_by_name(project_name,version_name) - codelocations = self.get_version_codelocations(version) - import os - if output_folder: - if not os.path.exists(output_folder): - os.makedirs(output_folder, 0o755, True) - - result = [] - - for item in codelocations['items']: - links = item['_meta']['links'] - matches = [x for x in links if x['rel'] == 'enclosure' or x['rel'] == 'scan-data'] - for m in matches: - url = m['href'] - filename = url.split('/')[6] - if output_folder: - pathname = os.path.join(output_folder, filename) - else: - if not os.path.exists(project_name): - os.mkdir(project_name) - pathname = os.path.join(project_name, filename) - responce = requests.get(url, headers=self.get_headers(), stream=True, verify=not self.config['insecure']) - with open(pathname, "wb") as f: - for data in responce.iter_content(): - f.write(data) - result.append({filename, pathname}) - return result - - def get_codelocations(self, limit=100, unmapped=False, parameters={}): - parameters['limit'] = limit - paramstring = self._get_parameter_string(parameters) - headers = self.get_headers() - url = self.get_apibase() + "/codelocations" + paramstring - headers['Accept'] = 'application/vnd.blackducksoftware.scan-4+json' - response = requests.get(url, headers=headers, verify = not self.config['insecure']) - jsondata = response.json() - if unmapped: - jsondata['items'] = [s for s in jsondata['items'] if 'mappedProjectVersion' not in s] - jsondata['totalCount'] = len(jsondata['items']) - return jsondata - - def get_codelocation_scan_summaries(self, code_location_id = None, code_location_obj = None, limit=100): - '''Retrieve the scans (aka scan summaries) for the given location. You can give either - code_location_id or code_location_obj. If both are supplied, precedence is to use code_location_obj - ''' - assert code_location_id or code_location_obj, "You must supply at least one - code_location_id or code_location_obj" - - paramstring = "?limit={}&offset=0".format(limit) - headers = self.get_headers() - headers['Accept'] = 'application/vnd.blackducksoftware.scan-4+json' - if code_location_obj: - url = self.get_link(code_location_obj, "scans") - else: - url = self.get_apibase() + \ - "/codelocations/{}/scan-summaries".format(code_location_id) - response = requests.get(url, headers=headers, verify = not self.config['insecure']) - jsondata = response.json() - return jsondata - - def delete_unmapped_codelocations(self, limit=1000): - code_locations = self.get_codelocations(limit=limit, unmapped=True).get('items', []) - - for c in code_locations: - scan_summaries = self.get_codelocation_scan_summaries(code_location_obj = c).get('items', []) - - if scan_summaries[0]['status'] == 'COMPLETE': - response = self.execute_delete(c['_meta']['href']) - - def delete_codelocation(self, locationid): - url = self.config['baseurl'] + "/api/codelocations/" + locationid - headers = self.get_headers() - response = requests.delete(url, headers=headers, verify = not self.config['insecure']) - return response - - def get_scan_locations(self, code_location_id): - headers = self.get_headers() - headers['Accept'] = 'application/vnd.blackducksoftware.scan-4+json' - url = self.get_apibase() + "/codelocations/{}".format(code_location_id) - response = requests.get(url, headers=headers, verify = not self.config['insecure']) - jsondata = response.json() - return jsondata - - ## - # - # Component stuff - # - ## - def _get_components_url(self): - return self.get_urlbase() + "/api/components" - - def get_components(self, limit=100, parameters={}): - if limit: - parameters.update({'limit':limit}) - # - # I was only able to GET components when using this internal media type which is how the GUI works - # July 19, 2019 Glenn Snyder - # - custom_headers = {'Accept':'application/vnd.blackducksoftware.internal-1+json'} - url = self._get_components_url() + self._get_parameter_string(parameters) - response = self.execute_get(url, custom_headers=custom_headers) - return response.json() - - def search_components(self, search_str_or_query, limit=100, parameters={}): - if limit: - parameters.update({'limit':limit}) - if search_str_or_query.startswith("q="): - # allow caller to override original behavior with their own query - query = search_str_or_query - else: - # maintain original, somewhat flawed behavior - query = "q=name:{}".format(search_str_or_query) - parm_str = self._get_parameter_string(parameters) - url = self.get_apibase() + "/search/components{}&{}".format(parm_str, query) - response = self.execute_get(url) - return response.json() - - def get_component_by_id(self, component_id): - url = self.config['baseurl'] + "/api/components/{}".format(component_id) - return self.get_component_by_url(url) - - def get_component_by_url(self, component_url): - headers = self.get_headers() - response = self.execute_get(component_url) - jsondata = response.json() - return jsondata - - def update_component_by_id(self, component_id, update_json): - url = self.config["baseurl"] + "/api/components/{}".format(component_id) - return self.update_component_by_url(url, update_json) - - def update_component_by_url(self, component_url, update_json): - return self.execute_put(component_url, update_json) - - - ## - # - # Custom fields - # - ## - def _get_cf_url(self): - return self.get_apibase() + "/custom-fields/objects" - - def supported_cf_object_types(self): - '''Get the types and cache them since they are static (on a per-release basis)''' - if not hasattr(self, "_cf_object_types"): - logger.debug("retrieving object types") - self._cf_object_types = [cfo['name'] for cfo in self.get_cf_objects().get('items', [])] - return self._cf_object_types - - def get_cf_objects(self): - '''Get CF objects and cache them since these are static (on a per-release basis)''' - url = self._get_cf_url() - if not hasattr(self, "_cf_objects"): - logger.debug("retrieving objects") - response = self.execute_get(url) - self._cf_objects = response.json() - return self._cf_objects - - def _get_cf_object_url(self, object_name): - for cf_object in self.get_cf_objects().get('items', []): - if cf_object['name'].lower() == object_name.lower(): - return cf_object['_meta']['href'] - - def get_cf_object(self, object_name): - assert object_name in self.supported_cf_object_types(), "Object name {} not one of the supported types ({})".format(object_name, self.supported_cf_object_types()) - - object_url = self._get_cf_object_url(object_name) - response = self.execute_get(object_url) - return response.json() - - def _get_cf_obj_rel_path(self, object_name): - return object_name.lower().replace(" ", "-") - - def create_cf(self, object_name, field_type, description, label, position, active=True, initial_options=[]): - ''' - Create a custom field for the given object type (e.g. "Project", "Project Version") using the field_type and other parameters. - - Initial options are needed for field types like multi-select where the multiple values to choose from must also be provided. - - initial_options = [{"label":"val1", "position":0}, {"label":"val2", "position":1}] - ''' - assert isinstance(position, int) and position >= 0, "position must be an integer that is greater than or equal to 0" - assert field_type in ["BOOLEAN", "DATE", "DROPDOWN", "MULTISELECT", "RADIO", "TEXT", "TEXTAREA"] - - types_using_initial_options = ["DROPDOWN", "MULTISELECT", "RADIO"] - - post_url = self._get_cf_object_url(object_name) + "/fields" - cf_object = self._get_cf_obj_rel_path(object_name) - cf_request = { - "active": active, - "description": description, - "label": label, - "position": position, - "type": field_type, - } - if field_type in types_using_initial_options and initial_options: - cf_request.update({"initialOptions": initial_options}) - response = self.execute_post(post_url, data=cf_request) - return response - - def delete_cf(self, object_name, field_id): - '''Delete a custom field from a given object type, e.g. Project, Project Version, Component, etc - - WARNING: Deleting a custom field is irreversiable. Any data in the custom fields could be lost so use with caution. - ''' - assert object_name in self.supported_cf_object_types(), "You must supply a supported object name that is in {}".format(self.supported_cf_object_types()) - - delete_url = self._get_cf_object_url(object_name) + "/fields/{}".format(field_id) - return self.execute_delete(delete_url) - - def get_custom_fields(self, object_name): - '''Get the custom field (definition) for a given object type, e.g. Project, Project Version, Component, etc - ''' - assert object_name in self.supported_cf_object_types(), "You must supply a supported object name that is in {}".format(self.supported_cf_object_types()) - - url = self._get_cf_object_url(object_name) + "/fields" - - response = self.execute_get(url) - return response.json() - - def get_cf_values(self, obj): - '''Get all of the custom fields from an object such as a Project, Project Version, Component, etc - - The obj is expected to be the JSON document for a project, project-version, component, etc - ''' - url = self.get_link(obj, "custom-fields") - response = self.execute_get(url) - return response.json() - - def get_cf_value(self, obj, field_id): - '''Get a custom field value from an object such as a Project, Project Version, Component, etc - - The obj is expected to be the JSON document for a project, project-version, component, etc - ''' - url = self.get_link(obj, "custom-fields") + "/{}".format(field_id) - response = self.execute_get(url) - return response.json() - - def put_cf_value(self, cf_url, new_cf_obj): - '''new_cf_obj is expected to be a modified custom field value object with the values updated accordingly, e.g. - call get_cf_value, modify the object, and then call put_cf_value - ''' - return self.execute_put(cf_url, new_cf_obj) - - ## - # - # General stuff - # - ## - - def execute_delete(self, url): - headers = self.get_headers() - response = requests.delete(url, headers=headers, verify = not self.config['insecure']) - return response - - def get_ldap_state(self): - url = self.config['baseurl'] + "/api/v1/ldap/state" - headers = self.get_headers() - response = requests.get(url, headers=headers, verify = not self.config['insecure']) - jsondata = response.json() - return jsondata - - def enable_ldap(self): - url = self.config['baseurl'] + "/api/v1/ldap/state" - headers = self.get_headers() - payload = {} - payload['ldapEnabled'] = True - response = requests.post(url, headers=headers, verify = not self.config['insecure'], json=payload) - jsondata = response.json() - return jsondata - - def disable_ldap(self): - url = self.config['baseurl'] + "/api/v1/ldap/state" - headers = self.get_headers() - payload = {} - payload['ldapEnabled'] = False - response = requests.post(url, headers=headers, verify = not self.config['insecure'], json=payload) - jsondata = response.json() - return jsondata - - def get_ldap_configs(self): - url = self.config['baseurl'] + "/api/v1/ldap/configs" - headers = self.get_headers() - headers['Content-Type'] = "application/json" - response = requests.get(url, headers=headers, verify = not self.config['insecure']) - jsondata = response.json() - return jsondata - - ## - # - # Health Stuff - # - ## - def get_health_checks(self): - url = self.get_urlbase() + "/api/health-checks/liveness" - return self.execute_get(url) - - ## - # - # Jobs - # - ## - def get_jobs(self, parameters={}): - url = self.get_apibase() + "/jobs" - url = url + self._get_parameter_string(parameters) - custom_headers = {'Accept': 'application/vnd.blackducksoftware.status-4+json'} - response = self.execute_get(url, custom_headers=custom_headers) - return response.json() - - ## - # - # Job Statistics - # - ## - def get_job_statistics(self): - url = self.get_urlbase() + "/api/job-statistics" - response = self.execute_get(url) - return response.json() - - ## - # - # Notifications - # - ## - def get_notifications(self, parameters={}): - url = self.get_urlbase() + "/api/notifications" + self._get_parameter_string(parameters) - custom_headers = {'Accept': 'application/vnd.blackducksoftware.notification-4+json'} - response = self.execute_get(url, custom_headers=custom_headers) - json_data = response.json() - return json_data - - ## - # - # Licenses - # - ## - def get_licenses(self, parameters={}): - url = self.get_urlbase() + "/api/licenses" + self._get_parameter_string(parameters) - response = self.execute_get(url, custom_headers={'Accept':'application/json'}) - json_data = response.json() - return json_data - - ## - # - # General methods including get, put, post, etc - # - ## - def _validated_json_data(self, data_to_validate): - if isinstance(data_to_validate, dict) or isinstance(data_to_validate, list): - json_data = json.dumps(data_to_validate) - else: - json_data = data_to_validate - json.loads(json_data) # will fail with JSONDecodeError if invalid - return json_data - - def execute_get(self, url, custom_headers={}): - headers = self.get_headers() - headers.update(custom_headers) - response = requests.get(url, headers=headers, verify = not self.config['insecure']) - return response - - def execute_put(self, url, data, custom_headers={}): - json_data = self._validated_json_data(data) - headers = self.get_headers() - headers["Content-Type"] = "application/json" - headers.update(custom_headers) - response = requests.put(url, headers=headers, data=json_data, verify = not self.config['insecure']) - return response - - def _create(self, url, json_body): - response = self.execute_post(url, json_body) - # v4+ returns the newly created location in the response headers - # and there is nothing in the response json - # whereas v3 returns the newly created object in the response json - if response.status_code == 201: - if "location" in response.headers: - return response.headers["location"] - else: - try: - response_json = response.json() - except json.decoder.JSONDecodeError: - logger.warning('did not receive any json data back') - else: - if '_meta' in response_json and 'href' in response_json['_meta']: - return response_json['_meta']['href'] - else: - return response_json - elif response.status_code == 412: - raise CreateFailedAlreadyExists("Failed to create the object because it already exists - url {}, body {}, response {}".format(url, json_body, response)) - else: - raise CreateFailedUnknown("Failed to create the object for an unknown reason - url {}, body {}, response {}".format(url, json_body, response)) - - def execute_post(self, url, data, custom_headers={}): - json_data = self._validated_json_data(data) - headers = self.get_headers() - headers["Content-Type"] = "application/json" - headers.update(custom_headers) - response = requests.post(url, headers=headers, data=json_data, verify = not self.config['insecure']) - return response - def get_matched_components(self, version_obj, limit=9999): - url = "{}/matched-files".format(version_obj['_meta']['href']) - param_string = self._get_parameter_string({'limit': limit}) - url = "{}{}".format(url, param_string) - response = self.execute_get(url) - return response.json() + def print_methods(self): + import inspect + for fn in inspect.getmembers(self, predicate=inspect.isfunction): + print(fn[0]) diff --git a/blackduck/Jobs.py b/blackduck/Jobs.py new file mode 100644 index 00000000..f9a58636 --- /dev/null +++ b/blackduck/Jobs.py @@ -0,0 +1,20 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def get_jobs(self, parameters={}): + url = self.get_apibase() + "/jobs" + url = url + self._get_parameter_string(parameters) + custom_headers = {'Accept': 'application/vnd.blackducksoftware.status-4+json'} + response = self.execute_get(url, custom_headers=custom_headers) + return response.json() + +def get_job_statistics(self): + url = self.get_urlbase() + "/api/job-statistics" + response = self.execute_get(url) + return response.json() + \ No newline at end of file diff --git a/blackduck/Ldap.py b/blackduck/Ldap.py new file mode 100644 index 00000000..b201981c --- /dev/null +++ b/blackduck/Ldap.py @@ -0,0 +1,40 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def get_ldap_state(self): + url = self.config['baseurl'] + "/api/v1/ldap/state" + headers = self.get_headers() + response = requests.get(url, headers=headers, verify = not self.config['insecure']) + jsondata = response.json() + return jsondata + +def enable_ldap(self): + url = self.config['baseurl'] + "/api/v1/ldap/state" + headers = self.get_headers() + payload = {} + payload['ldapEnabled'] = True + response = requests.post(url, headers=headers, verify = not self.config['insecure'], json=payload) + jsondata = response.json() + return jsondata + +def disable_ldap(self): + url = self.config['baseurl'] + "/api/v1/ldap/state" + headers = self.get_headers() + payload = {} + payload['ldapEnabled'] = False + response = requests.post(url, headers=headers, verify = not self.config['insecure'], json=payload) + jsondata = response.json() + return jsondata + +def get_ldap_configs(self): + url = self.config['baseurl'] + "/api/v1/ldap/configs" + headers = self.get_headers() + headers['Content-Type'] = "application/json" + response = requests.get(url, headers=headers, verify = not self.config['insecure']) + jsondata = response.json() + return jsondata \ No newline at end of file diff --git a/blackduck/Licences.py b/blackduck/Licences.py new file mode 100644 index 00000000..4a2f9c19 --- /dev/null +++ b/blackduck/Licences.py @@ -0,0 +1,45 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def get_licenses(self, parameters={}): + url = self.get_urlbase() + "/api/licenses" + self._get_parameter_string(parameters) + response = self.execute_get(url, custom_headers={'Accept':'application/json'}) + json_data = response.json() + return json_data + +def _get_license_info(self, license_obj): + if 'license' in license_obj: + license_info = {} + text_json = {} + logger.debug("license: {}".format(license_obj)) + response = self.execute_get(license_obj['license']) + if response.status_code == 200: + license_info = response.json() + text_url = self.get_link(license_info, 'text') + response = self.execute_get(text_url) + if response.status_code == 200: + text_json = response.text + yield {"license_info": license_info, + "license_text_info": text_json} + elif 'licenses' in license_obj and isinstance(license_obj['licenses'], list): + for license in license_obj['licenses']: + self._get_license_info(license) + +def get_license_info_for_bom_component(self, bom_component, limit=1000): + self._check_version_compatibility() + all_licenses = {} + logger.debug("gathering license info for bom component {}, version {}".format( + bom_component['componentName'], bom_component['componentVersionName'])) + for license in bom_component.get('licenses', []): + for license_info_obj in self._get_license_info(license): + all_licenses.update({ + license['licenseDisplay']: license_info_obj + }) + return all_licenses + + diff --git a/blackduck/Policy.py b/blackduck/Policy.py new file mode 100644 index 00000000..626f9f66 --- /dev/null +++ b/blackduck/Policy.py @@ -0,0 +1,45 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def _get_policy_url(self): + return self.config['baseurl'] + "/api/policy-rules" + +def get_policies(self, parameters={}): + url = self._get_policy_url() + self._get_parameter_string(parameters) + headers = {'Accept': 'application/json'} + response = self.execute_get(url, custom_headers=headers) + return response.json() + +def create_policy(self, policy_json): + url = self._get_policy_url() + location = self._create(url, policy_json) + return location + +def get_policy_by_id(self, policy_id): + url = self._get_policy_url() + "/{}".format(policy_id) + return self.get_policy_by_url(url) + +def get_policy_by_url(self, policy_url): + headers = {'Accept': 'application/vnd.blackducksoftware.policy-4+json'} + response = self.execute_get(policy_url, custom_headers=headers) + jsondata = response.json() + return jsondata + +def update_policy_by_id(self, policy_id, update_json): + url = self._get_policy_url() + "/{}".format(policy_id) + return self.update_policy_by_url(url, update_json) + +def update_policy_by_url(self, policy_url, update_json): + return self.execute_put(policy_url, update_json) + +def delete_policy_by_id(self, policy_id): + url = self._get_policy_url() + "/{}".format(policy_id) + return self.delete_policy_by_url(url) + +def delete_policy_by_url(self, policy_url): + return self.execute_delete(policy_url) diff --git a/blackduck/Projects.py b/blackduck/Projects.py new file mode 100644 index 00000000..8b4c6e0c --- /dev/null +++ b/blackduck/Projects.py @@ -0,0 +1,585 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def _get_projects_url(self): + return self.get_urlbase() + "/api/projects" + +def get_projects(self, limit=100, parameters={}): + headers = self.get_headers() + if limit: + parameters.update({'limit': limit}) + url = self._get_projects_url() + self._get_parameter_string(parameters) + headers['Accept'] = 'application/vnd.blackducksoftware.project-detail-4+json' + logger.debug(f"Retrieving projects using url {url}") + response = requests.get(url, headers=headers, verify = not self.config['insecure']) + jsondata = response.json() + return jsondata + +def create_project(self, project_name, version_name="Default Version", parameters={}): + url = self._get_projects_url() + + post_data = { + "name": project_name, + "description": parameters.get("description", ""), + "projectTier": parameters.get("project_tier", ""), + "projectOwner": parameters.get("project_owner", ""), + "projectLevelAdjustments": parameters.get("project_level_adjustments", True), + "cloneCategories": [ + "COMPONENT_DATA", + "VULN_DATA" + ], + "versionRequest": { + "phase": parameters.get("version_phase", "PLANNING"), + "distribution": parameters.get("version_distribution", "EXTERNAL"), + "projectLevelAdjustments": parameters.get("project_level_adjustments", True), + "versionName": version_name + } + } + response = self.execute_post(url, data=post_data) + return response + +def create_project_version(self, project_obj, new_version_name, clone_version=None, parameters={}): + url = self.get_link(project_obj, "versions") + + version_phase = parameters.get("phase", "PLANNING") + if version_phase not in HubInstance.VERSION_PHASES: + raise InvalidVersionPhase("The phase given {} is not in the list of valid phases ({})".format( + version_phase, HubInstance.VERSION_PHASES)) + + post_data = { + "versionUrl": url, + "cloneCategories": [ + "VULN_DATA", + "COMPONENT_DATA" + ], + "versionName": new_version_name, + "phase": version_phase, + "distribution": parameters.get("distribution", "EXTERNAL") + } + if clone_version: + post_data["cloneFromReleaseUrl"] = clone_version['_meta']['href'] + response = self.execute_post(url, data=post_data) + return response + +def get_project_by_name(self, project_name): + project_list = self.get_projects(parameters={"q":"name:{}".format(project_name)}) + for project in project_list['items']: + if project['name'] == project_name: + return project + +def get_projects_by_version_name(self, version_name, exclude_projects=None): + """Returns all project dicts which have given version_name, including the version object under 'version' key + + Arguments: + version_name {str} -- version name to be searched + exclude_projects {list} -- list of project names to be excluded from scanning for given version name + """ + headers = self.get_headers() + projects = self.get_projects(limit=9999).get('items',[]) + if len(projects) == 0: + logger.error('No projects found') + else: + jsondata = {'items':[]} + for project in projects: + if project['name'] not in exclude_projects: + version = self.get_version_by_name(project, version_name) + if version: + project['version'] = version + jsondata['items'].append(project) + jsondata['totalCount'] = len(jsondata['items']) + return jsondata + +def get_version_by_name(self, project, version_name): + version_list = self.get_project_versions(project, parameters={'q':"versionName:{}".format(version_name)}) + # A query by name can return more than one version if other versions + # have names that include the search term as part of their name + for version in version_list['items']: + if version['versionName'] == version_name: + return version + +def get_project_version_by_name(self, project_name, version_name): + project = self.get_project_by_name(project_name) + if project: + version = self.get_version_by_name(project, version_name) + if version == None: + logger.debug("Did not find any project version matching {}".format(version_name)) + else: + return version + else: + logger.debug("Did not find a project with name {}".format(project_name)) + +def get_or_create_project_version(self, project_name, version_name, parameters = {}): + project = self.get_project_by_name(project_name) + if project: + version = self.get_version_by_name(project, version_name) + if not version: + self.create_project_version(project, version_name, parameters=parameters) + version = self.get_version_by_name(project, version_name) + else: + self.create_project(project_name, version_name, parameters=parameters) + project = self.get_project_by_name(project_name) + version = self.get_version_by_name(project, version_name) + return version + +def get_project_by_id(self, project_id, limit=100): + headers = self.get_headers() + paramstring = self.get_limit_paramstring(limit) + url = self._get_projects_url() + "/" + project_id + paramstring + headers['Accept'] = 'application/vnd.blackducksoftware.project-detail-4+json' + response = requests.get(url, headers=headers, verify = not self.config['insecure']) + jsondata = response.json() + return jsondata + +def get_project_versions(self, project, limit=100, parameters={}): + # paramstring = self.get_limit_paramstring(limit) + parameters.update({'limit': limit}) + url = project['_meta']['href'] + "/versions" + self._get_parameter_string(parameters) + headers = self.get_headers() + headers['Accept'] = 'application/vnd.blackducksoftware.project-detail-4+json' + response = requests.get(url, headers=headers, verify = not self.config['insecure']) + jsondata = response.json() + return jsondata + +def get_version_components(self, projectversion, limit=1000): + paramstring = self.get_limit_paramstring(limit) + url = projectversion['_meta']['href'] + "/components" + paramstring + headers = self.get_headers() + headers['Accept'] = 'application/vnd.blackducksoftware.bill-of-materials-6+json' + response = requests.get(url, headers=headers, verify = not self.config['insecure']) + jsondata = response.json() + return jsondata + +def update_project_settings(self, project, new_settings={}): + url = project['_meta']['href'] + headers = self.get_headers() + headers['Accept'] = 'application/vnd.blackducksoftware.project-detail-4+json' + headers['Content-Type'] = 'application/vnd.blackducksoftware.project-detail-4+json' + response = self.execute_put(url, new_settings, headers) + return response + +def update_project_version_settings(self, project_name, version_name, new_settings={}): + # Apply any new settings to the given project version + version = self.get_project_version_by_name(project_name, version_name) + + if version: + for k,v in new_settings.items(): + if k in HubInstance.PROJECT_VERSION_SETTINGS: + logger.debug("updating setting {} in version {} with value {}".format( + k, version['versionName'], v)) + version[k] = v + else: + logger.warn("Setting {} is not in the list of project version settings ({})".format( + k, HubInstance.PROJECT_VERSION_SETTINGS)) + + url = version['_meta']['href'] + + response = self.execute_put(url, version) + + if response.status_code == 200: + logger.info("Successfully updated version {} with new settings {}".format( + version['versionName'], new_settings)) + else: + logger.error("Failed to update version {} with new settings {}; status code: {}".format( + version['versionName'], new_settings, response.status_code)) + else: + logger.debug("Did not find a matching project-version in project {}, version name {}".format( + project_name, version_name)) + +def get_version_by_id(self, project_id, version_id, limit=100): + headers = self.get_headers() + paramstring = self.get_limit_paramstring(limit) + url = self._get_projects_url() + "/" + project_id + "/versions/" + version_id + headers['Accept'] = 'application/vnd.blackducksoftware.project-detail-4+json' + response = requests.get(url, headers=headers, verify = not self.config['insecure']) + jsondata = response.json() + return jsondata + +def compare_project_versions(self, version, compareTo): + apibase = self.config['baseurl'] + "/api" + paramstring = "?limit=1000&sortField=component.securityRiskProfile&ascending=false&offset=0" + cwhat = version['_meta']['href'].replace(apibase, '') + cto = compareTo['_meta']['href'].replace(apibase, '') + url = apibase + cwhat + "/compare" + cto + "/components" + paramstring + headers = self.get_headers() + response = requests.get(url, headers=headers, verify = not self.config['insecure']) + jsondata = response.json() + return jsondata + +def get_version_codelocations(self, version, limit=100, offset=0): + url = self.get_link(version, "codelocations") + self._get_parameter_string({ + 'limit': limit, + 'offset': offset}) + custom_headers = {'Content-Type': 'application/vnd.blackducksoftware.scan-4+json'} + response = self.execute_get(url, custom_headers=custom_headers) + jsondata = response.json() + return jsondata + +def delete_project_version_by_name(self, project_name, version_name, save_scans=False): + project = self.get_project_by_name(project_name) + if project: + logger.debug("found project {}".format(project)) + project_versions = self.get_project_versions( + project, + parameters={'q':"versionName:{}".format(version_name)} + ) + + project_version_codelocations = None + if 'totalCount' in project_versions and project_versions['totalCount'] == 1: + project_version = project_versions['items'][0] + logger.debug("found the project version: {}".format(project_version)) + + delete_scans = not save_scans + logger.debug("delete_scans was {}".format(delete_scans)) + + if delete_scans: + self.delete_project_version_codelocations(project_version) + else: + logger.debug("Delete scans was false, or we did not find any codelocations (scans) in version {} of project {}".format(version_name, project_name)) + # TODO: Check if the project will be "empty" once we delete this version and + # delete the project accordingly? + logger.info("Deleting project-version at: {}".format(project_version['_meta']['href'])) + self.execute_delete(project_version['_meta']['href']) + else: + logger.debug("Did not find version with name {} in project {}".format(version_name, project_name)) + else: + logger.debug("Did not find project with name {}".format(project_name)) + +def delete_project_by_name(self, project_name, save_scans=False, backup_scans=False): + project = self.get_project_by_name(project_name) + if project: + # get project versions + project_versions = self.get_project_versions(project) + versions = project_versions.get('items', []) + logger.debug("Retrieved {} versions for project {}".format(len(versions), project_name)) + + delete_scans = not save_scans + logger.debug("delete_scans was {}".format(delete_scans)) + + if delete_scans: + # delete all code locations associated with each version + for version in versions: + if backup_scans: + logger.debug("Backup code locations (aka scans) for version {}".format(version['versionName'])) + self.download_project_scans(project_name, version['versionName']) + logger.debug("Deleting code locations (aka scans) for version {}".format(version['versionName'])) + self.delete_project_version_codelocations(version) + + # delete the project itself + project_url = project['_meta']['href'] + logger.info("Deleting project {}".format(project_name)) + self.execute_delete(project_url) + else: + logger.debug("Did not find project with name {}".format(project_name)) + +def delete_project_version_codelocations(self, version): + version_name = version['versionName'] + try: + logger.debug("Retrieving code locations (aka scans) for version {}".format(version_name)) + version_code_locations = self.get_version_codelocations(version) + except: + logger.error("Failed to get codelocations (aka scans) for version {}".format(version_name), exc_info=True) + version_code_locations = [] + else: + version_code_locations = version_code_locations.get('items', []) if version_code_locations else [] + logger.debug("Found {} code locations (aka scans) for version {}".format(len(version_code_locations), version_name)) + code_location_urls = [c['_meta']['href'] for c in version_code_locations] + for code_location_url in code_location_urls: + logger.info("Deleting code location at: {}".format(code_location_url)) + self.execute_delete(code_location_url) + +def delete_empty_projects(self): + #get all projects with no mapped code locations and delete them all + projects = self.get_projects().get('items',[]) + deleted_projects = list() + for p in projects: + p_empty = True + versions = self.get_project_versions(p).get('items', []) + for v in versions: + codelocations = self.get_version_codelocations(v) + if codelocations['totalCount'] != 0: + p_empty = False + logger.debug("Found a non-empty version in project {}, skipping...".format( + p['name'])) + break + if p_empty: + logger.info("Project {} is empty, deleting".format(p['name'])) + self.execute_delete(p['_meta']['href']) + deleted_projects.append(p['name']) + return deleted_projects + +def delete_empty_versions(self, project): + # delete versions within a given project if there are no mapped code locations (scans) + versions = self.get_project_versions(project).get('items', []) + logger.debug("Deleting empty versions for project {}".format(project['name'])) + deleted_versions = list() + for v in versions: + codelocations = self.get_version_codelocations(v).get('items', []) + if not codelocations: + logger.info("Deleting empty version {} from project {}".format( + v['versionName'], project['name'])) + self.execute_delete(v['_meta']['href']) + deleted_versions.append((project['name'], v['versionName'])) + else: + logger.debug("Version {} within project {} has scans (i.e. not empty), skipping".format( + v['versionName'], project['name'])) + return deleted_versions + +def delete_all_empty_versions(self): + # delete versions if there are no mapped code locations (scans) across all projects + projects = self.get_projects().get('items', []) + deleted_versions = list() + logger.info("Deleting empty versions for all {} projects on this server".format( + len(projects))) + for p in projects: + deleted_versions.extend(self.delete_empty_versions(p)) + return deleted_versions + +def _find_user_group_url(self, assignable_user_groups, user_group_name): + for user_group in assignable_user_groups['items']: + if user_group['name'] == user_group_name: + return user_group['usergroup'] + +def _find_user_url(self, assignable_user, user_name): + for user in assignable_user['items']: + if user['name'] == user_name: + return user['user'] + +def _project_role_urls(self, project_role_names): + all_project_roles = self.get_project_roles() + project_role_urls = list() + for project_role_name in project_role_names: + for project_role in all_project_roles: + if project_role_name == project_role['name']: + project_role_urls.append(project_role['_meta']['href']) + return project_role_urls + +def assign_user_group_to_project(self, project_name, user_group_name, project_roles): + # Assign the user group to the project using the list of project-role names + project = self.get_project_by_name(project_name) + # user_group = self.get_user_group_by_name(user_group_name) + + if project: + project_url = project['_meta']['href'] + assignable_user_groups_link = self.get_link(project, 'assignable-usergroups') + if assignable_user_groups_link: + assignable_user_groups_response = self.execute_get(f"{assignable_user_groups_link}?q=name:{user_group_name}") + assignable_user_groups = assignable_user_groups_response.json() + + # TODO: What to do if the user group is already assigned to the project, and therefore + # does not appear in the list of 'assignable' user groups? Should we search the (assigned) user + # groups and re-apply the project-roles to the assignment? + + user_group_url = self._find_user_group_url(assignable_user_groups, user_group_name) + if user_group_url: + headers = self.get_headers() + + # need project role urls to build the POST payload + project_roles_urls = self._project_role_urls(project_roles) + + # The POST endpoint changes based on whether we found any project-roles to assign + # Also, due to what appears to be a defect, the Content-Type changes + if project_roles_urls: + url = user_group_url + "/roles" + # one dict per project role assignment + post_data = [{'role': r, 'scope': project_url} for r in project_roles_urls] + # I found I had to use this Content-Type (application/json resulted in 412) + # ref: https://jira.dc1.lan/browse/HUB-18417 + headers['Content-Type'] = 'application/vnd.blackducksoftware.internal-1+json' + else: + url = project_url + "/usergroups" + # Assigning a group with no project-roles + post_data = {"group": user_group_url} + headers['Content-Type'] = 'application/json' + + response = requests.post( + url, + headers=headers, + data=json.dumps(post_data), + verify = not self.config['insecure']) + return response + else: + assignable_groups = [u['name'] for u in assignable_user_groups['items']] + logger.warning("The user group {} was not found in the assignable user groups ({}) for this project {}. Is the group already assigned to this project?".format( + user_group_name, assignable_groups, project_name)) + else: + logger.warning("This project {} has no assignable user groups".format(project_name)) + else: + logger.warning("Did not find a project by the name {}".format(project_name)) + +def delete_user_group_from_project(self, project_name, user_group_name): + project = self.get_project_by_name(project_name) + + if project: + project_url = project['_meta']['href'] + + user_group = self.get_user_group_by_name(user_group_name) + if user_group: + user_group_url = user_group['_meta']['href'] + user_group_id = user_group_url.rsplit('/', 1)[-1] + + project_user_group_url = f"{project_url}/usergroups/{user_group_id}" + self.execute_delete(project_user_group_url) + +def assign_user_to_project(self, user_name, project_name, project_roles, limit=1000): + # Assign users to projects + project = self.get_project_by_name(project_name) + + if project: + project_url = project['_meta']['href'] + assignable_users_link = self.get_link(project, 'assignable-users') + paramstring = self.get_limit_paramstring(limit) + url = assignable_users_link + paramstring + logger.debug("GET {}".format(url)) + if assignable_users_link: + assignable_users_response = self.execute_get(url) + assignable_users = assignable_users_response.json() + + # TODO: What to do if the user is already assigned to the project, and therefore + # does not appear in the list of 'assignable' user? Should we search the (assigned) user + # and re-apply the project-roles to the assignment? + + user_url = self._find_user_url(assignable_users, user_name) + if user_url: + headers = self.get_headers() + + # need project role urls to build the POST payload + project_roles_urls = self._project_role_urls(project_roles) + + # The POST endpoint changes based on whether we found any project-roles to assign + # Also, due to what appears to be a defect, the Content-Type changes + if project_roles_urls: + url = user_url + "/roles" + # one dict per project role assignment + post_data = [{'role': r, 'scope': project_url} for r in project_roles_urls] + # I found I had to use this Content-Type (application/json resulted in 412) + # ref: https://jira.dc1.lan/browse/HUB-18417 + headers['Content-Type'] = 'application/vnd.blackducksoftware.internal-1+json' + else: + url = project_url + "/users" + # Assigning a user with no project-roles + post_data = {"user": user_url} + headers['Content-Type'] = 'application/json' + + response = requests.post( + url, + headers=headers, + data=json.dumps(post_data), + verify=not self.config['insecure']) + return response + else: + assignable_username = [u['name'] for u in assignable_users['items']] + logger.warning( + "The user {} was not found in the assignable user ({}) for this project {}. Is the user already assigned to this project?".format( + user_name, assignable_username, project_name)) + else: + logger.warning("This project {} has no assignable users".format(project_name)) + else: + logger.warning("Did not find a project by the name {}".format(project_name)) + +def assign_project_application_id(self, project_name, application_id, overwrite=False): + logger.debug("Assigning application_id {} to project_name {}, overwrite={}".format( + application_id, project_name, overwrite)) + + existing_application_id, application_id_url = self.get_project_application_id(project_name) + + if existing_application_id: + if overwrite: + logger.debug("Found an existing application id {} for project {} and overwrite was True. Updating it to {}".format( + existing_application_id, project_name, application_id)) + return self.update_project_application_id(project_name, application_id) + else: + logger.debug("Found an existing application id {} for project {} and overwrite was False so not updating it".format( + existing_application_id, project_name)) + else: + logger.debug("No application id exists for project {}, assigning {} to it".format( + project_name, application_id)) + project = self.get_project_by_name(project_name) + if project: + project_mappings_url = self.get_link(project, "project-mappings") + if project_mappings_url: + post_data = {"applicationId": application_id} + response = self.execute_post(project_mappings_url, data=post_data) + return response + else: + logger.warning("Did not find project-mappings URL for project {}".format(project)) + else: + logger.warning("Did not find project by name {}".format(project_name)) + +def update_project_application_id(self, project_name, new_application_id): + application_id, application_id_url = self.get_project_application_id(project_name) + + if application_id and application_id_url: + put_data = { + "applicationId": new_application_id, + "_meta": { + "allow": [ + "DELETE", + "GET", + "PUT" + ], + "href": application_id_url, + "links": [] + } + } + response = self.execute_put(application_id_url, data=put_data) + return response + else: + logger.debug("Did not find application id for project name {}".format(project_name)) + +def delete_application_id(self, project_name): + application_id, application_id_url = self.get_project_application_id(project_name) + + if application_id_url: + self.execute_delete(application_id_url) + +def get_project_application_id(self, project_name): + project_mapping_info = self.get_project_info(project_name, 'project-mappings') + if project_mapping_info and 'items' in project_mapping_info: + for project_mapping in project_mapping_info['items']: + if 'applicationId' in project_mapping: + application_id = project_mapping['applicationId'] + application_id_url = project_mapping['_meta']['href'] + + return (application_id, application_id_url) + logger.debug("Did not find any project-mappings with 'applicationId' in them") + return (None, None) + else: + logger.debug("did not find any project-mappings for project {}".format(project_name)) + return (None, None) + +def get_project_info(self, project_name, link_name): + project = self.get_project_by_name(project_name) + link = self.get_link(project, link_name) + if link: + response = self.execute_get(link) + return response.json() + else: + return {} # nada + +def get_project_roles(self): + all_project_roles = self.get_roles(parameters={"filter":"scope:project"}) + return all_project_roles['items'] + +def get_version_scan_info(self, version_obj): + url = self.get_link(version_obj, "codelocations") + custom_headers = {'Accept': 'application/vnd.blackducksoftware.project-detail-5+json'} + response = self.execute_get(url, custom_headers=custom_headers) + code_locations = response.json().get('items', []) + if code_locations: + scan_info = { + 'most_recent_scan': max([cl['updatedAt'] for cl in code_locations]), + 'oldest_scan': min([cl['createdAt'] for cl in code_locations]), + 'number_scans': len(code_locations) + } + else: + scan_info = { + 'most_recent_scan': None, + 'oldest_scan': None, + 'number_scans': None + } + return scan_info diff --git a/blackduck/Reporting.py b/blackduck/Reporting.py new file mode 100644 index 00000000..62257198 --- /dev/null +++ b/blackduck/Reporting.py @@ -0,0 +1,81 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +valid_categories = ['VERSION','CODE_LOCATIONS','COMPONENTS','SECURITY','FILES', 'ATTACHMENTS', 'CRYPTO_ALGORITHMS', 'PROJECT_VERSION_CUSTOM_FIELDS', 'BOM_COMPONENT_CUSTOM_FIELDS', 'LICENSE_TERM_FULFILLMENT'] +valid_report_formats = ["CSV", "JSON"] +def create_version_reports(self, version, report_list, format="CSV"): + assert all(list(map(lambda k: k in HubInstance.valid_categories, report_list))), "One or more selected report categories in {} are not valid ({})".format( + report_list, HubInstance.valid_categories) + assert format in HubInstance.valid_report_formats, "Format must be one of {}".format(HubInstance.valid_report_formats) + + post_data = { + 'categories': report_list, + 'versionId': version['_meta']['href'].split("/")[-1], + 'reportType': 'VERSION', + 'reportFormat': format + } + version_reports_url = self.get_link(version, 'versionReport') + return self.execute_post(version_reports_url, post_data) + +valid_notices_formats = ["TEXT", "JSON"] +def create_version_notices_report(self, version, format="TEXT", include_copyright_info=True): + assert format in HubInstance.valid_notices_formats, "Format must be one of {}".format(HubInstance.valid_notices_formats) + + post_data = { + 'versionId': object_id(version), + 'reportType': 'VERSION_LICENSE', + 'reportFormat': format + } + if include_copyright_info: + post_data.update({'categories': ["COPYRIGHT_TEXT"] }) + + notices_report_url = self.get_link(version, 'licenseReports') + return self.execute_post(notices_report_url, post_data) + +def download_report(self, report_id): + # TODO: Fix me, looks like the reports should be downloaded from different paths than the one here, and depending on the type and format desired the path can change + url = self.get_urlbase() + "/api/reports/{}".format(report_id) + return self.execute_get(url, {'Content-Type': 'application/zip', 'Accept':'application/zip'}) + +def download_notification_report(self, report_location_url): + '''Download the notices report using the report URL. Inspect the report object to determine + the format and use the appropriate media header''' + custom_headers = {'Accept': 'application/vnd.blackducksoftware.report-4+json'} + response = self.execute_get(report_location_url, custom_headers=custom_headers) + report_obj = response.json() + + if report_obj['reportFormat'] == 'TEXT': + download_url = self.get_link(report_obj, "download") + ".json" + logger.debug("downloading report from {}".format(download_url)) + response = self.execute_get(download_url, {'Accept': 'application/zip'}) + else: + # JSON + contents_url = self.get_link(report_obj, "content") + logger.debug("retrieving report contents from {}".format(contents_url)) + response = self.execute_get(contents_url, {'Accept': 'application/json'}) + return response, report_obj['reportFormat'] + +## +# +# (Global) Vulnerability reports +# +## +valid_vuln_status_report_formats = ["CSV", "JSON"] +def create_vuln_status_report(self, format="CSV"): + assert format in HubInstance.valid_vuln_status_report_formats, "Format must be one of {}".format(HubInstance.valid_vuln_status_report_formats) + + post_data = { + "reportFormat": format, + "locale": "en_US" + } + url = self.get_apibase() + "/vulnerability-status-reports" + custom_headers = { + 'Content-Type': 'application/vnd.blackducksoftware.report-4+json', + 'Accept': 'application/vnd.blackducksoftware.report-4+json' + } + return self.execute_post(url, custom_headers=custom_headers, data=post_data) diff --git a/blackduck/Roles.py b/blackduck/Roles.py new file mode 100644 index 00000000..f46b81fb --- /dev/null +++ b/blackduck/Roles.py @@ -0,0 +1,63 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def _get_role_url(self): + return self.config['baseurl'] + "/api/roles" + +def get_roles(self, parameters={}): + url = self._get_role_url() + self._get_parameter_string(parameters) + response = self.execute_get(url) + return response.json() + +def get_roles_url_from_user_or_group(self, user_or_group): + # Given a user or user group object, return the 'roles' url + roles_url = None + for endpoint in user_or_group['_meta']['links']: + if endpoint['rel'] == "roles": + roles_url = endpoint['href'] + return roles_url + +def get_roles_for_user_or_group(self, user_or_group): + roles_url = self.get_roles_url_from_user_or_group(user_or_group) + if roles_url: + response = self.execute_get(roles_url) + return response.json() + else: + return [] + +def get_role_url_by_name(self, role_name): + # Return the global (as opposed to project-specific) role URL for this server corresponding to the role name + all_roles = self.get_roles() + for role in all_roles['items']: + if role['name'] == role_name: + return role['_meta']['href'] + +def assign_role_to_user_or_group(self, role_name, user_or_group): + user_or_group_roles_url = self.get_roles_url_from_user_or_group(user_or_group) + return self.assign_role_given_role_url(role_name, user_or_group_roles_url) + +def assign_role_given_role_url(self, role_name, user_or_group_role_assignment_url): + role_url = self.get_role_url_by_name(role_name) + if self.bd_major_version == "3": + # A hack to get the assignment to work on v3 + role_url = role_url.replace("api", "api/internal") + data = {"name": role_name, "role": role_url} + logger.debug("executing POST to {} with {}".format( + user_or_group_role_assignment_url, data)) + return self.execute_post(user_or_group_role_assignment_url, data = data) + +def delete_role_from_user_or_group(self, role_name, user_or_group): + roles = self.get_roles_for_user_or_group(user_or_group) + for role in roles['items']: + if role['name'] == role_name: + self.execute_delete(role['_meta']['href']) + + +def user_has_role(self, user_or_group, role_name): + user_roles_obj = self.get_roles_for_user_or_group(user_or_group) + return role_name in [r['name'] for r in user_roles_obj['items']] diff --git a/blackduck/Scans.py b/blackduck/Scans.py new file mode 100644 index 00000000..855507cb --- /dev/null +++ b/blackduck/Scans.py @@ -0,0 +1,105 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def upload_scan(self, filename): + url = self.get_apibase() + "/scan/data/?mode=replace" + headers = self.get_headers() + if filename.endswith('.json') or filename.endswith('.jsonld'): + headers['Content-Type'] = 'application/ld+json' + with open(filename,"r") as f: + response = requests.post(url, headers=headers, data=f, verify=not self.config['insecure']) + elif filename.endswith('.bdio'): + headers['Content-Type'] = 'application/vnd.blackducksoftware.bdio+zip' + with open(filename,"rb") as f: + response = requests.post(url, headers=headers, data=f, verify=not self.config['insecure']) + else: + raise Exception("Unkown file type") + return response + +def download_project_scans(self, project_name,version_name, output_folder=None): + version = self.get_project_version_by_name(project_name,version_name) + codelocations = self.get_version_codelocations(version) + import os + if output_folder: + if not os.path.exists(output_folder): + os.makedirs(output_folder, 0o755, True) + + result = [] + + for item in codelocations['items']: + links = item['_meta']['links'] + matches = [x for x in links if x['rel'] == 'enclosure' or x['rel'] == 'scan-data'] + for m in matches: + url = m['href'] + filename = url.split('/')[6] + if output_folder: + pathname = os.path.join(output_folder, filename) + else: + if not os.path.exists(project_name): + os.mkdir(project_name) + pathname = os.path.join(project_name, filename) + responce = requests.get(url, headers=self.get_headers(), stream=True, verify=not self.config['insecure']) + with open(pathname, "wb") as f: + for data in responce.iter_content(): + f.write(data) + result.append({filename, pathname}) + return result + +def get_codelocations(self, limit=100, unmapped=False, parameters={}): + parameters['limit'] = limit + paramstring = self._get_parameter_string(parameters) + headers = self.get_headers() + url = self.get_apibase() + "/codelocations" + paramstring + headers['Accept'] = 'application/vnd.blackducksoftware.scan-4+json' + response = requests.get(url, headers=headers, verify = not self.config['insecure']) + jsondata = response.json() + if unmapped: + jsondata['items'] = [s for s in jsondata['items'] if 'mappedProjectVersion' not in s] + jsondata['totalCount'] = len(jsondata['items']) + return jsondata + +def get_codelocation_scan_summaries(self, code_location_id = None, code_location_obj = None, limit=100): + '''Retrieve the scans (aka scan summaries) for the given location. You can give either + code_location_id or code_location_obj. If both are supplied, precedence is to use code_location_obj + ''' + assert code_location_id or code_location_obj, "You must supply at least one - code_location_id or code_location_obj" + + paramstring = "?limit={}&offset=0".format(limit) + headers = self.get_headers() + headers['Accept'] = 'application/vnd.blackducksoftware.scan-4+json' + if code_location_obj: + url = self.get_link(code_location_obj, "scans") + else: + url = self.get_apibase() + \ + "/codelocations/{}/scan-summaries".format(code_location_id) + response = requests.get(url, headers=headers, verify = not self.config['insecure']) + jsondata = response.json() + return jsondata + +def delete_unmapped_codelocations(self, limit=1000): + code_locations = self.get_codelocations(limit=limit, unmapped=True).get('items', []) + + for c in code_locations: + scan_summaries = self.get_codelocation_scan_summaries(code_location_obj = c).get('items', []) + + if scan_summaries[0]['status'] == 'COMPLETE': + response = self.execute_delete(c['_meta']['href']) + +def delete_codelocation(self, locationid): + url = self.config['baseurl'] + "/api/codelocations/" + locationid + headers = self.get_headers() + response = requests.delete(url, headers=headers, verify = not self.config['insecure']) + return response + +def get_scan_locations(self, code_location_id): + headers = self.get_headers() + headers['Accept'] = 'application/vnd.blackducksoftware.scan-4+json' + url = self.get_apibase() + "/codelocations/{}".format(code_location_id) + response = requests.get(url, headers=headers, verify = not self.config['insecure']) + jsondata = response.json() + return jsondata diff --git a/blackduck/Snippet.py b/blackduck/Snippet.py new file mode 100644 index 00000000..cff23d8f --- /dev/null +++ b/blackduck/Snippet.py @@ -0,0 +1,21 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def _check_version_compatibility(self): + if int(self.bd_major_version) < 2018: + raise UnsupportedBDVersion("The BD major version {} is less than the minimum required major version {}".format(self.bd_major_version, 2018)) + +def get_file_matches_for_bom_component(self, bom_component, limit=1000): + self._check_version_compatibility() + url = self.get_link(bom_component, "matched-files") + paramstring = self.get_limit_paramstring(limit) + logger.debug("GET {}".format(url)) + response = self.execute_get(url) + jsondata = response.json() + return jsondata + diff --git a/blackduck/System.py b/blackduck/System.py new file mode 100644 index 00000000..d91c8980 --- /dev/null +++ b/blackduck/System.py @@ -0,0 +1,18 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def get_health_checks(self): + url = self.get_urlbase() + "/api/health-checks/liveness" + return self.execute_get(url) + +def get_notifications(self, parameters={}): + url = self.get_urlbase() + "/api/notifications" + self._get_parameter_string(parameters) + custom_headers = {'Accept': 'application/vnd.blackducksoftware.notification-4+json'} + response = self.execute_get(url, custom_headers=custom_headers) + json_data = response.json() + return json_data diff --git a/blackduck/UserGroup.py b/blackduck/UserGroup.py new file mode 100644 index 00000000..06de3c96 --- /dev/null +++ b/blackduck/UserGroup.py @@ -0,0 +1,68 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def _get_user_group_url(self): + return self.config['baseurl'] + "/api/usergroups" + +def get_user_groups(self, parameters={}): + url = self._get_user_group_url() + self._get_parameter_string(parameters) + headers = {'Accept': 'application/vnd.blackducksoftware.user-4+json'} + response = self.execute_get(url, custom_headers=headers) + return response.json() + +def get_user_group_by_name(self, group_name): + group_list = self.get_user_groups({"q": f"name:{group_name}"}) + for group in group_list['items']: + if group['name'] == group_name: + return group + +def create_user_group(self, user_group_json): + if self.bd_major_version == "3": + url = self.config['baseurl'] + '/api/v1/usergroups' + else: + url = self._get_user_group_url() + location = self._create(url, user_group_json) + return location + +def create_user_group_by_name(self, group_name, active=True): + user_group_info = { + 'name': group_name, + 'createdFrom': 'INTERNAL', + 'active': active + } + return self.create_user_group(user_group_info) + +# def get_user_group_by_id(self, user_group_id): +# url = self._get_user_group_url() + "/{}".format(user_group_id) +# return self.get_user_group_by_url(url) + +# def get_user_group_by_url(self, user_group_url): +# response = self.execute_get(user_group_url) +# jsondata = response.json() +# return jsondata + +# def get_user_group_by_name(self, user_group_name): +# url = self._get_user_group_url() + "?q={}".format(user_group_name) +# response = self.execute_get(url) +# user_group_obj = response.json() +# if user_group_obj['totalCount'] > 0: +# return user_group_obj['items'][0] + +def update_user_group_by_id(self, user_group_id, update_json): + url = self._get_user_group_url() + "/{}".format(user_group_id) + return self.update_user_group_by_url(url, update_json) + +def update_user_group_by_url(self, user_group_url, update_json): + return self.execute_put(user_group_url, update_json) + +def delete_user_group_by_id(self, user_group_id): + url = self._get_user_group_url() + "/{}".format(user_group_id) + return self.delete_user_group_by_url(url) + +def delete_user_group_by_url(self, user_group_url): + return self.execute_delete(user_group_url) diff --git a/blackduck/Users.py b/blackduck/Users.py new file mode 100644 index 00000000..09556fe2 --- /dev/null +++ b/blackduck/Users.py @@ -0,0 +1,66 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def _get_user_url(self): + return self.config['baseurl'] + "/api/users" + +def get_users(self, parameters={}): + url = self._get_user_url() + self._get_parameter_string(parameters) + headers = {'Accept': 'application/vnd.blackducksoftware.user-4+json'} + response = self.execute_get(url, custom_headers=headers) + return response.json() + +def get_current_user(self): + url = self.config['baseurl'] + "/api/current-user" + headers = {'Accept': 'application/vnd.blackducksoftware.user-4+json'} + response = self.execute_get(url, custom_headers=headers) + return response.json() + +def create_user(self, user_json): + url = self._get_user_url() + location = self._create(url, user_json) + return location + +def get_user_by_id(self, user_id): + url = self._get_user_url() + "/{}".format(user_id) + headers = {'Accept': 'application/vnd.blackducksoftware.user-4+json'} + return self.get_user_by_url(url, custom_headers=headers) + +def get_user_by_url(self, user_url): + headers = {'Accept': 'application/vnd.blackducksoftware.user-4+json'} + response = self.execute_get(user_url, custom_headers=headers) + jsondata = response.json() + return jsondata + +def update_user_by_id(self, user_id, update_json): + url = self._get_user_url() + "/{}".format(user_id) + return self.update_user_by_url(url, update_json) + +def update_user_by_url(self, user_url, update_json): + return self.execute_put(user_url, update_json) + +def delete_user_by_id(self, user_id): + url = self._get_user_url() + "/{}".format(user_id) + return self.delete_user_by_url(url) + +def delete_user_by_url(self, user_url): + return self.execute_delete(user_url) + +def reset_user_password(self, user_id, new_password): + url = self.config['baseurl'] + "/api/users/" + user_id + "/resetpassword" + headers = {'Content-Type':'application/vnd.blackducksoftware.user-1+json', 'Accept': 'application/json'} + data = {'password': new_password} + return self.execute_put(url, data, headers) + +def get_last_login(self,sinceDays=60): + url = self.config['baseurl'] + "/api/dormant-users" + param_string = self._get_parameter_string({'sinceDays': sinceDays}) + url = "{}{}".format(url, param_string) + headers = {'Accept': 'application/vnd.blackducksoftware.user-4+json'} + response = self.execute_get(url, custom_headers=headers) + return response.json() diff --git a/blackduck/Utils.py b/blackduck/Utils.py new file mode 100644 index 00000000..f4e010b2 --- /dev/null +++ b/blackduck/Utils.py @@ -0,0 +1,113 @@ +''' + +Created on Dec 22, 2020 +@author: ar-calder + +''' + +from datetime import datetime, timedelta +import dateutil.parser +import json +import logging + +logger = logging.getLogger(__name__) + + +def iso8061_to_date(iso_string, with_zone=False): + """Utility function to convert iso_8061 formatted string to datetime object, optionally accounting for timezone + + Args: + iso_string (string): the iso_8061 string to convert to datetime object + with_zone (bool, optional): whether to account for timezone offset. Defaults to False. + + Returns: + datetime.datetime: equivalent time, with or without timezone offsets + """ + date_timezone = iso_string.split('Z') + date = dateutil.parser.parse(date_timezone[0]) + if with_zone and len(date_timezone > 1): + hours_minutes = date_timezone[1].split(':') + minutes = (60*int(hours_minutes[0]) + int(hours_minutes[1] if len(hours_minutes) > 1 else 0)) + date = date + datetime.timedelta(minutes=minutes) + return date + +def min_iso8061(): + """Utility wrapper for iso8061_to_date which provides minimum date (for comparison purposes). + + Returns: + datetime.datetime: 0 / 1970-01-01T00:00:00.000 + """ + return iso8061_to_date("1970-01-01T00:00:00.000") + +def find_field(data_to_filter, field_name, field_value): + """Utility function to filter blackduck objects for specific fields + + Args: + data_to_filter (dict): typically the blackduck object or subselection of this + field_name (string): name of field to use in comparisons + field_value (string): value of field we seek + + Returns: + object: object if found or None. + """ + return next(filter(lambda d: d.get(field) == field_value, data_to_filter), None) + +def safe_get(obj, *keys): + """Utility function to safely perform multiple get's on a dict. + Particularly useful on complex/deep objects. + + Args: + obj (dict): object to perform get on. + *keys (string): consecutive keys as args. + + Returns: + object: object if found or None. + """ + for key in keys: + try: + obj = obj[key] + except KeyError: + return None + return obj + +def get_url(obj): + """Utility wrapper for safe_get providing URL lookup for a given object + + Args: + obj (dict): object to perform URL lookup on. + + Returns: + string: url if found or None. + """ + return safe_get(obj, '_meta', 'href') + +def get_resource_name(obj): + parts = get_url(obj).split('/') + + for part in parts: + pass + + # regex for id 8-4-4-12 + + +def pfmt(value): + """Utility function to 'pretty format' a dict or json + + Args: + value (json/dict): the json object or dict to pretty format + + Returns: + string: json formatted string representing passed object + """ + return json.dumps(value, indent=4) + +def pprint(value): + """Utility wrapper for pfmt that prints 'pretty formatted' json data. + + Args: + value (json/dict): the json object or dict to pretty print + + Returns: + None + """ + print(pfmt(value)) diff --git a/blackduck/Versions.py b/blackduck/Versions.py new file mode 100644 index 00000000..8d817b04 --- /dev/null +++ b/blackduck/Versions.py @@ -0,0 +1,39 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def add_version_as_component(self, main_project_release, sub_project_release): + headers = self.get_headers() + main_data = main_project_release['_meta']['href'].split('/') + sub_data = sub_project_release['_meta']['href'].split('/') + main_project_release_links = main_project_release['_meta']['links'] + main_project_release_component_links = [x for x in main_project_release_links if x['rel'] == 'components'] + main_project_release_component_link = main_project_release_component_links[0]['href'] + logger.debug(main_project_release_component_link) + sub_project_release_as_custom_component_url = self.get_apibase() + "/components/" + sub_data[5] + "/versions/" + sub_data[7] + logger.debug(sub_project_release_as_custom_component_url) + payload = {} + payload['component'] = sub_project_release_as_custom_component_url + logger.debug(json.dumps(payload)) + response = requests.post(main_project_release_component_link, headers=headers, verify = not self.config['insecure'], json=payload) + logger.debug(response) + return response + + + +def remove_version_as_component(self, main_project_release, sub_project_release): + headers = self.get_headers() + main_data = main_project_release['_meta']['href'].split('/') + sub_data = sub_project_release['_meta']['href'].split('/') + main_project_release_links = main_project_release['_meta']['links'] + main_project_release_component_links = [x for x in main_project_release_links if x['rel'] == 'components'] + main_project_release_component_link = main_project_release_component_links[0]['href'] + logger.debug(main_project_release_component_link) + subcomponent_url = main_project_release_component_link + "/" + sub_data[5] + "/versions/" + sub_data[7] + logger.debug(subcomponent_url) + response = requests.delete(subcomponent_url, headers=headers, verify = not self.config['insecure']) + return response diff --git a/blackduck/Vulnerabilities.py b/blackduck/Vulnerabilities.py new file mode 100644 index 00000000..fe3d665c --- /dev/null +++ b/blackduck/Vulnerabilities.py @@ -0,0 +1,38 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def _get_vulnerabilities_url(self): + return self.config['baseurl'] + '/api/vulnerabilities' + +def get_vulnerabilities(self, vulnerability, parameters={}): + url = self._get_vulnerabilities_url() + "/{}".format(vulnerability) + self._get_parameter_string(parameters) + headers = {'Accept': 'application/vnd.blackducksoftware.vulnerability-4+json'} + response = self.execute_get(url, custom_headers=headers) + return response.json() + +def get_vulnerability_affected_projects(self, vulnerability): + url = self._get_vulnerabilities_url() + "/{}/affected-projects".format(vulnerability) + custom_headers = {'Accept': 'application/vnd.blackducksoftware.vulnerability-4+json'} + response = self.execute_get(url, custom_headers=custom_headers) + return response.json() + +# TODO: Refactor this, i.e. use get_link method? +def get_vulnerable_bom_components(self, version_obj, limit=9999): + url = "{}/vulnerable-bom-components".format(version_obj['_meta']['href']) + custom_headers = {'Accept': 'application/vnd.blackducksoftware.bill-of-materials-6+json'} + param_string = self._get_parameter_string({'limit': limit}) + url = "{}{}".format(url, param_string) + response = self.execute_get(url, custom_headers=custom_headers) + return response.json() + +# TODO: Remove or refactor this +def get_component_remediation(self, bom_component): + url = "{}/remediating".format(bom_component['componentVersion']) + logger.debug("Url for getting remediation info is : {}".format(url)) + response = self.execute_get(url) + return response.json() diff --git a/blackduck/__init__.py b/blackduck/__init__.py index 8b137891..5fbbeeb7 100644 --- a/blackduck/__init__.py +++ b/blackduck/__init__.py @@ -1 +1,2 @@ +from .HubRestApi import HubInstance From 91f01ae1e4c8cbda062fe990420beb24119f2673 Mon Sep 17 00:00:00 2001 From: Andrew Calder Date: Tue, 12 Jan 2021 13:44:22 +0000 Subject: [PATCH 02/12] Added BearerAuth class to leverage requests.auth --- blackduck/Authentication.py | 96 +++++++++++++++++++++++++++++++++++-- 1 file changed, 92 insertions(+), 4 deletions(-) diff --git a/blackduck/Authentication.py b/blackduck/Authentication.py index 26d38097..f6c5628f 100644 --- a/blackduck/Authentication.py +++ b/blackduck/Authentication.py @@ -1,7 +1,95 @@ -import logging +''' + +Created on Dec 23, 2020 +@author: ar-calder + +''' + import requests +import logging import json -from operator import itemgetter -import urllib.parse +from datetime import datetime, timedelta + +logger = logging.getLogger(__name__) + +class BearerAuth(requests.auth.AuthBase): + + from .Exceptions import http_exception_handler + + def __init__( + self, + session=None, + token=None, + base_url=None, + verify=True, + timeout=15, + ): + + if any(arg == False for arg in (token, base_url)): + raise ValueError( + 'token & base_url are required' + ) + + self.verify=verify + self.client_token = token + self.auth_token = None + self.csrf_token = None + self.valid_until = datetime.utcnow() + + self.auth_url = requests.compat.urljoin(base_url, '/api/tokens/authenticate') + self.session = session or requests.session() + self.timeout = timeout + + + def __call__(self, request): + if not self.auth_token or self.valid_until < datetime.utcnow(): + # If authentication token not set or no longer valid + self.authenticate() + + request.headers.update({ + "authorization" : f"bearer {self.auth_token}", + "X-CSRF-TOKEN" : self.csrf_token + }) + + return request + + + def authenticate(self): + if not self.verify: + requests.packages.urllib3.disable_warnings() + # Announce this on every auth attempt, as a little incentive to properly configure certs + logger.warn("ssl verification disabled, connection insecure. do NOT use verify=False in production!") + + try: + response = self.session.request( + method='POST', + url=self.auth_url, + headers = { + "Authorization" : f"token {self.client_token}" + }, + verify=self.verify, + timeout=self.timeout + ) + + if response.status_code / 100 != 2: + self.http_exception_handler( + response=response, + name="authenticate" + ) + + content = response.json() + self.csrf_token = response.headers.get('X-CSRF-TOKEN') + self.auth_token = content.get('bearerToken') + self.valid_until = datetime.utcnow() + timedelta(milliseconds=int(content.get('expiresInMilliseconds', 0))) -logger = logging.getLogger(__name__) \ No newline at end of file + # Do not handle exceptions - just just more details as to possible causes + # Thus we do not catch a JsonDecodeError here even though it may occur + # - no futher details to give. + except requests.exceptions.ConnectTimeout as connect_timeout: + logger.critical(f"could not establish a connection within {self.timeout}s, this may be indicative of proxy misconfiguration") + raise connect_timeout + except requests.exceptions.ReadTimeout as read_timeout: + logger.critical(f"slow or unstable connection, consider increasing timeout (currently set to {self.timeout}s)") + raise read_timeout + else: + logger.info(f"success: auth granted until {self.valid_until} UTC") \ No newline at end of file From e205b47a4054f8f2506c6ec0ba588c00ef26ba16 Mon Sep 17 00:00:00 2001 From: Andrew Calder Date: Tue, 12 Jan 2021 13:45:19 +0000 Subject: [PATCH 03/12] corrected exception handler naming to indicate http-only --- blackduck/Exceptions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/blackduck/Exceptions.py b/blackduck/Exceptions.py index 61f71fcd..fa8c61cd 100644 --- a/blackduck/Exceptions.py +++ b/blackduck/Exceptions.py @@ -32,7 +32,7 @@ class EndpointNotFound(Exception): class UnacceptableContentType(Exception): pass -def exception_handler(self, response, name): +def http_exception_handler(self, response, name): error_codes = { 404 : EndpointNotFound, 406 : UnacceptableContentType From 4881877f171ffc35293133b065ff5ad38a450486 Mon Sep 17 00:00:00 2001 From: Andrew Calder Date: Tue, 12 Jan 2021 13:46:33 +0000 Subject: [PATCH 04/12] added alternative blackduck client and alternative core methods based on new BearerAuth class --- blackduck/Client.py | 68 ++++++++++++++++ blackduck/ClientCore.py | 176 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 244 insertions(+) create mode 100644 blackduck/Client.py create mode 100644 blackduck/ClientCore.py diff --git a/blackduck/Client.py b/blackduck/Client.py new file mode 100644 index 00000000..1ab20e02 --- /dev/null +++ b/blackduck/Client.py @@ -0,0 +1,68 @@ +''' +Created on Dec 23, 2020 +@author: ar-calder + +Wrapper for common HUB API queries. +Upon initialization Bearer token is obtained and used for all subsequent calls. +Token will auto-renew on timeout. +''' + +from .Utils import find_field, safe_get +from .Authentication import BearerAuth +import logging +import requests +logger = logging.getLogger(__name__) + +class Client: + VERSION_DISTRIBUTION=["EXTERNAL", "SAAS", "INTERNAL", "OPENSOURCE"] + VERSION_PHASES = ["PLANNING", "DEVELOPMENT", "PRERELEASE", "RELEASED", "DEPRECATED", "ARCHIVED"] + PROJECT_VERSION_SETTINGS = ['nickname', 'releaseComments', 'versionName', 'phase', 'distribution', 'releasedOn'] + + from .Exceptions import( + http_exception_handler + ) + + from .ClientCore import ( + _request, _get_items, _get_resource_href, get_resource, list_resources, _get_base_resource_url, get_base_resource, _get_parameter_string + ) + + def __init__( + self, + *args, + token=None, + base_url=None, + session=None, + auth=None, + verify=True, + timeout=15, + **kwargs): + + self.verify=verify + self.timeout=int(timeout) + self.base_url=base_url + self.session = session or requests.session() + self.auth = auth or BearerAuth( + session = self.session, + token=token, + base_url=base_url, + verify=self.verify + ) + + def print_methods(self): + import inspect + for fn in inspect.getmembers(self, predicate=inspect.ismember): + print(fn[0]) + + # Example for projects + def get_projects(self, parameters=[], **kwargs): + return self._get_items( + method='GET', + # url unlikely to change hence is_public=false (faster). + url= self._get_base_resource_url('projects', is_public=False), + name="project", + **kwargs + ) + + def get_project_by_name(self, project_name, **kwargs): + projects = self.get_projects(**kwargs) + return find_field(projects, 'name', project_name) diff --git a/blackduck/ClientCore.py b/blackduck/ClientCore.py new file mode 100644 index 00000000..b3e2f942 --- /dev/null +++ b/blackduck/ClientCore.py @@ -0,0 +1,176 @@ +''' +Created on Dec 23, 2020 +@author: ar-calder + +''' + +import logging +import requests +import json + +from .Utils import find_field, safe_get +logger = logging.getLogger(__name__) + +def _request( + self, + method, + url, + name='', + parameters=[], + **kwargs + ): + """[summary] + + Args: + method ([type]): [description] + url ([type]): [description] + name (str, optional): name of the reqested resource. Defaults to ''. + + Raises: + connect_timeout: often indicative of proxy misconfig + read_timeout: often indicative of slow connection + + Returns: + json/dict/list: requested object, json decoded. + """ + + headers = { + 'accept' : 'application/json' + } + headers.update(kwargs.pop('headers', dict())) + + if parameters: + url += self._get_parameter_string(parameters) + + try: + response = self.session.request( + method=method, + url=url, + headers=headers, + verify=self.verify, + auth=self.auth, + **kwargs + ) + + if response.status_code / 100 != 2: + self.http_exception_handler( + response=response, + name=name + ) + + response_json = response.json() + + # Do not handle exceptions - just just more details as to possible causes + # Thus we do not catch a JsonDecodeError here even though it may occur + except requests.exceptions.ConnectTimeout as connect_timeout: + logger.critical(f"could not establish a connection within {self.timeout}s, this may be indicative of proxy misconfiguration") + raise connect_timeout + except requests.exceptions.ReadTimeout as read_timeout: + logger.critical(f"slow or unstable connection, consider increasing timeout (currently set to {self.timeout}s)") + raise read_timeout + else: + return response_json + +def _get_items(self, url, method='GET', page_size=100, name='', **kwargs): + """Utility method to get 'pages' of items + + Args: + url (str): [description] + method (str, optional): [description]. Defaults to 'GET'. + page_size (int, optional): [description]. Defaults to 100. + name (str, optional): [description]. Defaults to ''. + + Yields: + [type]: [description] + """ + offset = 0 + params = kwargs.pop('params', dict()) + while True: + params.update({'offset':f"{offset}", 'limit':f"{page_size}"}) + items = self._request( + method=method, + url=url, + params=params, + name=name, + **kwargs + ).get('items', list()) + + for item in items: + yield item + + if len(items) < page_size: + # This will be true if there are no more 'pages' to view + break + + offset += page_size + + +def _get_resource_href(self, resources, resource_name): + """Utility function to get url for a given resource_name + + Args: + resources (dict/json): [description] + resource_name (str): [description] + + Raises: + KeyError: on key not found + + Returns: + str: url to named resource + """ + res = find_field( + data_to_filter=safe_get(resources, '_meta', 'links'), + field_name='rel', + field_value=resource_name + ) + + if None == res: + raise KeyError(f"'{self.get_resource_name(resources)}' object has no such key '{resource_name}'") + return safe_get(res, 'href') + +def get_resource(self, bd_object, resource_name, iterable=True, is_public=True, **kwargs): + """Generic function to facilitate subresource fetching + + Args: + bd_object (dict/json): [description] + resource_name (str): [description] + iterable (bool, optional): [description]. Defaults to True. + is_public (bool, optional): [description]. Defaults to True. + + Returns: + dict/json: named resource object + """ + url = self._get_resource_href(resources=bd_object, resource_name=resource_name) if is_public else self.get_url(bd_object) + f"/{resource_name}" + fn = self._get_items if iterable else self._request + return fn( + method='GET', + url=url, + name=resource_name, + **kwargs + ) + +def list_resources(self, bd_object): + return [res.get('rel') for res in safe_get(bd_object, '_meta', 'links')] + +def _get_base_resource_url(self, resource_name, is_public=True, **kwargs): + if is_public: + resources = self._request( + method="GET", + url=self.base_url + f"/api/", + name='_get_base_resource_url', + **kwargs + ) + return resources.get(resource_name, "") + else: + return self.base_url + f"/api/{resource_name}" + +def get_base_resource(self, resource_name, is_public=True, **kwargs): + return self._request( + method='GET', + url=self._get_base_resource_url(resource_name, is_public=is_public, **kwargs), + name='get_base_resource', + **kwargs + ) + +def _get_parameter_string(self, parameters=list()): + return '?' + '&'.join(parameters) if parameters else '' From 7e4209a49cd7f9049f0a2de19a45bcce31ef055e Mon Sep 17 00:00:00 2001 From: Andrew Calder Date: Tue, 12 Jan 2021 13:46:53 +0000 Subject: [PATCH 05/12] added alternative blackduck client and alternative core methods based on new BearerAuth class --- blackduck/Utils.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/blackduck/Utils.py b/blackduck/Utils.py index f4e010b2..1253d158 100644 --- a/blackduck/Utils.py +++ b/blackduck/Utils.py @@ -13,7 +13,7 @@ logger = logging.getLogger(__name__) -def iso8061_to_date(iso_string, with_zone=False): +def iso8601_to_date(iso_string, with_zone=False): """Utility function to convert iso_8061 formatted string to datetime object, optionally accounting for timezone Args: @@ -31,6 +31,12 @@ def iso8061_to_date(iso_string, with_zone=False): date = date + datetime.timedelta(minutes=minutes) return date +def iso8601_timespan(days_ago, from_date=datetime.utcnow(), delta=timedelta(weeks=1)): + curr_date = from_date - timedelta(days=days_ago) + while curr_date < from_date: + yield curr_date.isoformat('T', 'seconds') + curr_date += delta + def min_iso8061(): """Utility wrapper for iso8061_to_date which provides minimum date (for comparison purposes). @@ -50,7 +56,7 @@ def find_field(data_to_filter, field_name, field_value): Returns: object: object if found or None. """ - return next(filter(lambda d: d.get(field) == field_value, data_to_filter), None) + return next(filter(lambda d: d.get(field_name) == field_value, data_to_filter), None) def safe_get(obj, *keys): """Utility function to safely perform multiple get's on a dict. From d5b7a103d9c4583b544e453d25ae64e81bd747a9 Mon Sep 17 00:00:00 2001 From: Andrew Calder Date: Tue, 12 Jan 2021 13:47:58 +0000 Subject: [PATCH 06/12] added examples/demos for alternative blackduck client --- test/demo_client.py | 74 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 74 insertions(+) create mode 100644 test/demo_client.py diff --git a/test/demo_client.py b/test/demo_client.py new file mode 100644 index 00000000..3f9f42a2 --- /dev/null +++ b/test/demo_client.py @@ -0,0 +1,74 @@ +import os +import requests +from requests.adapters import HTTPAdapter +import logging + +logging.basicConfig( + level=logging.INFO, + format='[%(asctime)s] {%(module)s:%(lineno)d} %(levelname)s - %(message)s' +) + +# create http adapter with exponential backoff (for unstable and/or slow connections) +http_adapter = HTTPAdapter( + max_retries=requests.packages.urllib3.util.retry.Retry( + total=5, + backoff_factor=10, + status_forcelist=[429,500,502,503,504] + ) +) +custom_session = requests.session() +custom_session.mount('http://', http_adapter) +custom_session.mount('https://', http_adapter) + +# use os env proxy settings, if any +custom_session.proxies.update({ + 'http' : os.environ.get('http_proxy',''), + 'https' : os.environ.get('http_proxy', '') +}) + + +# Brief demo +from datetime import datetime, timedelta +import blackduck + +def vulns_in_all_project_versions_components(bd): + for project in bd.get_projects(): + for version in bd.get_resource(project, 'versions'): + for component in bd.get_resource(version, 'components'): + for vulnerability in bd.get_resource(component, 'vulnerabilities'): + print(f"{project.get('name')}-{version.get('versionName')} [{component.get('componentName')}] has {vulnerability.get('severity')} severity vulnerability '{vulnerability.get('name')}'") + +def list_project_subresources(bd): + for project in bd.get_projects(): + subresources = bd.list_resources(project) + print(f"projects has the following subresources: {', '.join(subresources)}") + return + + +def projects_added_at_4_week_intervals(bd): + last_count = 0 + count = 0 + print("Projects added, in 4 week intervals:") + for timestamp in blackduck.Utils.iso8601_timespan(days_ago=365, delta=timedelta(weeks=4)): + last_count=count + count=0 + for project in bd.get_projects(): + created_at = blackduck.Utils.iso8601_to_date(project.get('createdAt')) + count += (created_at <= blackduck.Utils.iso8601_to_date(timestamp)) + + print(f"{count-last_count} projects as of {timestamp}") + +bd = blackduck.Client( + token=os.environ.get('blackduck_token', 'YOUR TOKEN HERE'), + base_url='https://your.blackduck.url', #!important! no trailing slash + session=custom_session + # verify=False # if required +) + +# If disabling warnings, don't do so at the library level: +requests.packages.urllib3.disable_warnings() + +# Various examples: +# vulns_in_all_project_versions_components(bd) +projects_added_at_4_week_intervals(bd) +# list_project_subresources(bd) \ No newline at end of file From 04faee1f8f95b1dead15a95d0bebfe4580e17ec2 Mon Sep 17 00:00:00 2001 From: Andrew Calder Date: Tue, 12 Jan 2021 14:16:09 +0000 Subject: [PATCH 07/12] Delete settings.json --- .vscode/settings.json | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 .vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index 4d76a7c6..00000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "python.pythonPath": "c:\\Users\\arcalder\\Projects\\hub-rest-api-python\\virtualenv\\Scripts\\python.exe" -} \ No newline at end of file From 31338fb84505118920e49e3aa5c992580867aaf7 Mon Sep 17 00:00:00 2001 From: Andrew Calder Date: Tue, 12 Jan 2021 14:16:46 +0000 Subject: [PATCH 08/12] Delete CustomFields.py --- blackduck/CustomFields.py | 112 -------------------------------------- 1 file changed, 112 deletions(-) delete mode 100644 blackduck/CustomFields.py diff --git a/blackduck/CustomFields.py b/blackduck/CustomFields.py deleted file mode 100644 index 8f1e243c..00000000 --- a/blackduck/CustomFields.py +++ /dev/null @@ -1,112 +0,0 @@ -import logging -import requests -import json -from operator import itemgetter -import urllib.parse - -logger = logging.getLogger(__name__) - -def _get_cf_url(self): - return self.get_apibase() + "/custom-fields/objects" - -def supported_cf_object_types(self): - '''Get the types and cache them since they are static (on a per-release basis)''' - if not hasattr(self, "_cf_object_types"): - logger.debug("retrieving object types") - self._cf_object_types = [cfo['name'] for cfo in self.get_cf_objects().get('items', [])] - return self._cf_object_types - -def get_cf_objects(self): - '''Get CF objects and cache them since these are static (on a per-release basis)''' - url = self._get_cf_url() - if not hasattr(self, "_cf_objects"): - logger.debug("retrieving objects") - response = self.execute_get(url) - self._cf_objects = response.json() - return self._cf_objects - -def _get_cf_object_url(self, object_name): - for cf_object in self.get_cf_objects().get('items', []): - if cf_object['name'].lower() == object_name.lower(): - return cf_object['_meta']['href'] - -def get_cf_object(self, object_name): - assert object_name in self.supported_cf_object_types(), "Object name {} not one of the supported types ({})".format(object_name, self.supported_cf_object_types()) - - object_url = self._get_cf_object_url(object_name) - response = self.execute_get(object_url) - return response.json() - -def _get_cf_obj_rel_path(self, object_name): - return object_name.lower().replace(" ", "-") - -def create_cf(self, object_name, field_type, description, label, position, active=True, initial_options=[]): - ''' - Create a custom field for the given object type (e.g. "Project", "Project Version") using the field_type and other parameters. - - Initial options are needed for field types like multi-select where the multiple values to choose from must also be provided. - - initial_options = [{"label":"val1", "position":0}, {"label":"val2", "position":1}] - ''' - assert isinstance(position, int) and position >= 0, "position must be an integer that is greater than or equal to 0" - assert field_type in ["BOOLEAN", "DATE", "DROPDOWN", "MULTISELECT", "RADIO", "TEXT", "TEXTAREA"] - - types_using_initial_options = ["DROPDOWN", "MULTISELECT", "RADIO"] - - post_url = self._get_cf_object_url(object_name) + "/fields" - cf_object = self._get_cf_obj_rel_path(object_name) - cf_request = { - "active": active, - "description": description, - "label": label, - "position": position, - "type": field_type, - } - if field_type in types_using_initial_options and initial_options: - cf_request.update({"initialOptions": initial_options}) - response = self.execute_post(post_url, data=cf_request) - return response - -def delete_cf(self, object_name, field_id): - '''Delete a custom field from a given object type, e.g. Project, Project Version, Component, etc - - WARNING: Deleting a custom field is irreversiable. Any data in the custom fields could be lost so use with caution. - ''' - assert object_name in self.supported_cf_object_types(), "You must supply a supported object name that is in {}".format(self.supported_cf_object_types()) - - delete_url = self._get_cf_object_url(object_name) + "/fields/{}".format(field_id) - return self.execute_delete(delete_url) - -def get_custom_fields(self, object_name): - '''Get the custom field (definition) for a given object type, e.g. Project, Project Version, Component, etc - ''' - assert object_name in self.supported_cf_object_types(), "You must supply a supported object name that is in {}".format(self.supported_cf_object_types()) - - url = self._get_cf_object_url(object_name) + "/fields" - - response = self.execute_get(url) - return response.json() - -def get_cf_values(self, obj): - '''Get all of the custom fields from an object such as a Project, Project Version, Component, etc - - The obj is expected to be the JSON document for a project, project-version, component, etc - ''' - url = self.get_link(obj, "custom-fields") - response = self.execute_get(url) - return response.json() - -def get_cf_value(self, obj, field_id): - '''Get a custom field value from an object such as a Project, Project Version, Component, etc - - The obj is expected to be the JSON document for a project, project-version, component, etc - ''' - url = self.get_link(obj, "custom-fields") + "/{}".format(field_id) - response = self.execute_get(url) - return response.json() - -def put_cf_value(self, cf_url, new_cf_obj): - '''new_cf_obj is expected to be a modified custom field value object with the values updated accordingly, e.g. - call get_cf_value, modify the object, and then call put_cf_value - ''' - return self.execute_put(cf_url, new_cf_obj) From 01db04c8aa011448a2f8b0053aafd873e1b655d6 Mon Sep 17 00:00:00 2001 From: Andrew Calder Date: Tue, 12 Jan 2021 14:21:27 +0000 Subject: [PATCH 09/12] Revert "Delete CustomFields.py" This reverts commit 31338fb84505118920e49e3aa5c992580867aaf7. --- blackduck/CustomFields.py | 112 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 112 insertions(+) create mode 100644 blackduck/CustomFields.py diff --git a/blackduck/CustomFields.py b/blackduck/CustomFields.py new file mode 100644 index 00000000..8f1e243c --- /dev/null +++ b/blackduck/CustomFields.py @@ -0,0 +1,112 @@ +import logging +import requests +import json +from operator import itemgetter +import urllib.parse + +logger = logging.getLogger(__name__) + +def _get_cf_url(self): + return self.get_apibase() + "/custom-fields/objects" + +def supported_cf_object_types(self): + '''Get the types and cache them since they are static (on a per-release basis)''' + if not hasattr(self, "_cf_object_types"): + logger.debug("retrieving object types") + self._cf_object_types = [cfo['name'] for cfo in self.get_cf_objects().get('items', [])] + return self._cf_object_types + +def get_cf_objects(self): + '''Get CF objects and cache them since these are static (on a per-release basis)''' + url = self._get_cf_url() + if not hasattr(self, "_cf_objects"): + logger.debug("retrieving objects") + response = self.execute_get(url) + self._cf_objects = response.json() + return self._cf_objects + +def _get_cf_object_url(self, object_name): + for cf_object in self.get_cf_objects().get('items', []): + if cf_object['name'].lower() == object_name.lower(): + return cf_object['_meta']['href'] + +def get_cf_object(self, object_name): + assert object_name in self.supported_cf_object_types(), "Object name {} not one of the supported types ({})".format(object_name, self.supported_cf_object_types()) + + object_url = self._get_cf_object_url(object_name) + response = self.execute_get(object_url) + return response.json() + +def _get_cf_obj_rel_path(self, object_name): + return object_name.lower().replace(" ", "-") + +def create_cf(self, object_name, field_type, description, label, position, active=True, initial_options=[]): + ''' + Create a custom field for the given object type (e.g. "Project", "Project Version") using the field_type and other parameters. + + Initial options are needed for field types like multi-select where the multiple values to choose from must also be provided. + + initial_options = [{"label":"val1", "position":0}, {"label":"val2", "position":1}] + ''' + assert isinstance(position, int) and position >= 0, "position must be an integer that is greater than or equal to 0" + assert field_type in ["BOOLEAN", "DATE", "DROPDOWN", "MULTISELECT", "RADIO", "TEXT", "TEXTAREA"] + + types_using_initial_options = ["DROPDOWN", "MULTISELECT", "RADIO"] + + post_url = self._get_cf_object_url(object_name) + "/fields" + cf_object = self._get_cf_obj_rel_path(object_name) + cf_request = { + "active": active, + "description": description, + "label": label, + "position": position, + "type": field_type, + } + if field_type in types_using_initial_options and initial_options: + cf_request.update({"initialOptions": initial_options}) + response = self.execute_post(post_url, data=cf_request) + return response + +def delete_cf(self, object_name, field_id): + '''Delete a custom field from a given object type, e.g. Project, Project Version, Component, etc + + WARNING: Deleting a custom field is irreversiable. Any data in the custom fields could be lost so use with caution. + ''' + assert object_name in self.supported_cf_object_types(), "You must supply a supported object name that is in {}".format(self.supported_cf_object_types()) + + delete_url = self._get_cf_object_url(object_name) + "/fields/{}".format(field_id) + return self.execute_delete(delete_url) + +def get_custom_fields(self, object_name): + '''Get the custom field (definition) for a given object type, e.g. Project, Project Version, Component, etc + ''' + assert object_name in self.supported_cf_object_types(), "You must supply a supported object name that is in {}".format(self.supported_cf_object_types()) + + url = self._get_cf_object_url(object_name) + "/fields" + + response = self.execute_get(url) + return response.json() + +def get_cf_values(self, obj): + '''Get all of the custom fields from an object such as a Project, Project Version, Component, etc + + The obj is expected to be the JSON document for a project, project-version, component, etc + ''' + url = self.get_link(obj, "custom-fields") + response = self.execute_get(url) + return response.json() + +def get_cf_value(self, obj, field_id): + '''Get a custom field value from an object such as a Project, Project Version, Component, etc + + The obj is expected to be the JSON document for a project, project-version, component, etc + ''' + url = self.get_link(obj, "custom-fields") + "/{}".format(field_id) + response = self.execute_get(url) + return response.json() + +def put_cf_value(self, cf_url, new_cf_obj): + '''new_cf_obj is expected to be a modified custom field value object with the values updated accordingly, e.g. + call get_cf_value, modify the object, and then call put_cf_value + ''' + return self.execute_put(cf_url, new_cf_obj) From 5ce4d294f1d9c9b39fd2e4b19cdbd6464edafdfb Mon Sep 17 00:00:00 2001 From: Andrew Calder Date: Tue, 12 Jan 2021 14:22:48 +0000 Subject: [PATCH 10/12] provide 'from blackduck import Client' --- blackduck/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/blackduck/__init__.py b/blackduck/__init__.py index 5fbbeeb7..134c1ac2 100644 --- a/blackduck/__init__.py +++ b/blackduck/__init__.py @@ -1,2 +1,4 @@ from .HubRestApi import HubInstance + +from .Client import Client From f0577516e037bd5e61b6985dcbd53693ae6678c1 Mon Sep 17 00:00:00 2001 From: Andrew Calder Date: Wed, 20 Jan 2021 12:24:58 +0000 Subject: [PATCH 11/12] Update README.md --- README.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index abcb1d05..f8b8a476 100644 --- a/README.md +++ b/README.md @@ -13,18 +13,18 @@ pip install blackduck ``` ```python -from blackduck.HubRestApi import HubInstance +from blackduck import Client import json -username = "sysadmin" -password = "your-password" -urlbase = "https://ec2-34-201-23-208.compute-1.amazonaws.com" +bd = blackduck.Client( + token=os.environ.get('blackduck_token', 'YOUR TOKEN HERE'), + base_url='https://your.blackduck.url' #!important! no trailing slash + #, verify=False # if required +) -hub = HubInstance(urlbase, username, password, insecure=True) +for project in bd.get_projects(): + print(project.get('name') -projects = hub.get_projects() - -print(json.dumps(projects.get('items', []))) ``` ### Examples From 8813b97bd2337760e6266c555eb39bf801ae81a2 Mon Sep 17 00:00:00 2001 From: Andrew Calder Date: Mon, 15 Feb 2021 11:50:02 +0000 Subject: [PATCH 12/12] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index f8b8a476..5f62fb1f 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ pip install blackduck from blackduck import Client import json -bd = blackduck.Client( +bd = Client( token=os.environ.get('blackduck_token', 'YOUR TOKEN HERE'), base_url='https://your.blackduck.url' #!important! no trailing slash #, verify=False # if required