From e3061441e1492f44386d10b1c6426162e6d0c81c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=A1rio=20Foganholi=20Fernandes?= Date: Wed, 20 Sep 2023 15:10:37 -0300 Subject: [PATCH 1/2] (chore) Large reformatting with black --- conf/config.py | 101 +- contrib/generate_report.py | 13 +- contrib/get_freshmaker_stats.py | 124 +- docs/conf.py | 41 +- fedmsg.d/freshmaker-logging.py | 16 +- fedmsg.d/freshmaker-scheduler.py | 4 +- fedmsg.d/freshmaker.py | 21 +- freshmaker/__init__.py | 6 +- freshmaker/api_utils.py | 60 +- freshmaker/auth.py | 115 +- freshmaker/config.py | 679 +++--- freshmaker/consumer.py | 55 +- freshmaker/container.py | 8 +- freshmaker/errata.py | 129 +- freshmaker/errors.py | 19 +- freshmaker/events.py | 106 +- freshmaker/handlers/__init__.py | 220 +- freshmaker/handlers/botas/__init__.py | 2 +- .../handlers/botas/botas_shipped_advisory.py | 11 +- ...ncel_event_on_freshmaker_manage_request.py | 30 +- .../update_db_on_odcs_compose_fail.py | 14 +- freshmaker/handlers/koji/__init__.py | 4 +- ...ild_flatpak_application_on_module_ready.py | 32 +- .../rebuild_images_on_async_manual_build.py | 87 +- .../rebuild_images_on_odcs_compose_done.py | 36 +- .../rebuild_images_on_parent_image_build.py | 107 +- .../rebuild_images_on_rpm_advisory_change.py | 157 +- freshmaker/image.py | 358 +-- freshmaker/image_verifier.py | 26 +- freshmaker/kojiservice.py | 150 +- freshmaker/manage.py | 76 +- freshmaker/messaging.py | 44 +- freshmaker/models.py | 380 +-- freshmaker/monitor.py | 137 +- freshmaker/odcsclient.py | 172 +- freshmaker/parsers/__init__.py | 1 + freshmaker/parsers/brew/task_state_change.py | 30 +- .../internal/freshmaker_manage_request.py | 22 +- freshmaker/parsers/internal/manual_rebuild.py | 16 +- freshmaker/parsers/koji/async_manual_build.py | 23 +- freshmaker/parsers/koji/task_state_change.py | 14 +- freshmaker/parsers/odcs/state_change.py | 6 +- freshmaker/producer.py | 25 +- freshmaker/proxy.py | 21 +- freshmaker/pyxis.py | 109 +- freshmaker/types.py | 13 +- freshmaker/utils.py | 74 +- freshmaker/views.py | 457 ++-- setup.py | 86 +- tests/__init__.py | 4 +- tests/conftest.py | 5 +- .../botas/test_botas_shipped_advisory.py | 729 +++--- .../test_freshmaker_manage_request.py | 96 +- .../test_update_db_on_odcs_compose_fail.py | 47 +- ...ild_flatpak_application_on_module_ready.py | 31 +- ...st_rebuild_images_on_async_manual_build.py | 495 ++-- ...est_rebuild_images_on_odcs_compose_done.py | 117 +- ...st_rebuild_images_on_parent_image_build.py | 332 ++- ...t_rebuild_images_on_rpm_advisory_change.py | 2037 +++++++++-------- tests/helpers.py | 236 +- tests/test_auth.py | 195 +- tests/test_config.py | 30 +- tests/test_consumer.py | 21 +- tests/test_container.py | 8 +- tests/test_errata.py | 223 +- tests/test_handler.py | 527 +++-- tests/test_image.py | 13 +- tests/test_image_verifier.py | 2 +- tests/test_kojiservice.py | 4 +- tests/test_messaging.py | 127 +- tests/test_models.py | 274 ++- tests/test_monitor.py | 66 +- tests/test_odcsclient.py | 349 +-- tests/test_producer.py | 41 +- tests/test_pulp.py | 157 +- tests/test_pyxis.py | 473 ++-- tests/test_utils.py | 1 - tests/test_views.py | 1507 ++++++------ 78 files changed, 6808 insertions(+), 5776 deletions(-) diff --git a/conf/config.py b/conf/config.py index 1c504e0d..2f253063 100644 --- a/conf/config.py +++ b/conf/config.py @@ -10,33 +10,31 @@ # declared properly somewhere/somehow confdir = os.path.abspath(os.path.dirname(__file__)) # use parent dir as dbdir else fallback to current dir -dbdir = os.path.abspath(os.path.join(confdir, '..')) if confdir.endswith('conf') \ - else confdir +dbdir = os.path.abspath(os.path.join(confdir, "..")) if confdir.endswith("conf") else confdir class BaseConfiguration(object): # Make this random (used to generate session keys) - SECRET_KEY = '74d9e9f9cd40e66fc6c4c2e9987dce48df3ce98542529fd0' - SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format(os.path.join( - dbdir, 'freshmaker.db')) + SECRET_KEY = "74d9e9f9cd40e66fc6c4c2e9987dce48df3ce98542529fd0" + SQLALCHEMY_DATABASE_URI = "sqlite:///{0}".format(os.path.join(dbdir, "freshmaker.db")) SQLALCHEMY_TRACK_MODIFICATIONS = False - HOST = '0.0.0.0' + HOST = "0.0.0.0" PORT = 5001 - SERVER_NAME = 'localhost:5001' + SERVER_NAME = "localhost:5001" DEBUG = False # Global network-related values, in seconds NET_TIMEOUT = 120 NET_RETRY_INTERVAL = 30 - SYSTEM = 'koji' + SYSTEM = "koji" # Available log levels are: debug, info, warn, error. - LOG_LEVEL = 'info' + LOG_LEVEL = "info" - MESSAGING_TOPIC_PREFIX = ['org.fedoraproject.prod'] + MESSAGING_TOPIC_PREFIX = ["org.fedoraproject.prod"] # Base URL of git repository with source artifacts. GIT_BASE_URL = "git://pkgs.devel.redhat.com" @@ -49,9 +47,9 @@ class BaseConfiguration(object): # Read Koji configuration from profile instead of reading them from # configuration file directly. For staging Koji, it is stg. - KOJI_PROFILE = 'koji' + KOJI_PROFILE = "koji" KOJI_PROXYUSER = False - KOJI_BUILD_OWNER = 'freshmaker' + KOJI_BUILD_OWNER = "freshmaker" # Settings for docker image rebuild handler KOJI_CONTAINER_SCRATCH_BUILD = False @@ -121,12 +119,12 @@ class BaseConfiguration(object): # ODCS configs # URL to ODCS to call APIs - ODCS_SERVER_URL = 'https://odcs.localhost/' + ODCS_SERVER_URL = "https://odcs.localhost/" ODCS_VERIFY_SSL = True # Valid authentication method would be kerberos or openidc - ODCS_AUTH_MECH = 'kerberos' + ODCS_AUTH_MECH = "kerberos" # When use openidc authentcation, set the openidc token for accessing ODCS - ODCS_OPENIDC_TOKEN = '' + ODCS_OPENIDC_TOKEN = "" # Kerberos authentication Settings used to authenticated freshmaker itself # by other services @@ -137,65 +135,66 @@ class BaseConfiguration(object): # Principal used to acquire credential cache. When using a client keytab, # this value must be present in that keytab file. Otherwise, principal must # match the one in specified ccache file. - KRB_AUTH_PRINCIPAL = '' + KRB_AUTH_PRINCIPAL = "" # Path to freshmaker's client keytab file. - KRB_AUTH_CLIENT_KEYTAB = '' + KRB_AUTH_CLIENT_KEYTAB = "" # Path to credential cache file. This optional could be None when not using # a client keytab to acquire credential. KRB_AUTH_CCACHE_FILE = tempfile.mkstemp( - suffix=str(os.getpid()), prefix="freshmaker_cc_") # type: Union[Tuple[int, str], Optional[str]] + suffix=str(os.getpid()), prefix="freshmaker_cc_" + ) # type: Union[Tuple[int, str], Optional[str]] # Select which authentication backend to work with. There are 3 choices Tuple[int, str] # noauth: no authentication is enabled. Useful for development particularly. # kerberos: Kerberos authentication is enabled. # openidc: OpenIDC authentication is enabled. - AUTH_BACKEND = '' + AUTH_BACKEND = "" # Used for Kerberos authentication and to query user's groups. # Format: ldap://hostname[:port] # For example: ldap://ldap.example.com/ - AUTH_LDAP_SERVER = '' + AUTH_LDAP_SERVER = "" # The base to query for users in LDAP. For example, ou=users,dc=example,dc=com. - AUTH_LDAP_USER_BASE = '' + AUTH_LDAP_USER_BASE = "" # OIDC provider - AUTH_OPENIDC_USERINFO_URI = 'https://id.fedoraproject.org/openidc/UserInfo' + AUTH_OPENIDC_USERINFO_URI = "https://id.fedoraproject.org/openidc/UserInfo" # OIDC base namespace - OIDC_BASE_NAMESPACE = '' + OIDC_BASE_NAMESPACE = "" # Scope requested from Fedora Infra for permission of submitting request to # run a new compose. # See also: https://fedoraproject.org/wiki/Infrastructure/Authentication # Add additional required scope in following list AUTH_OPENIDC_REQUIRED_SCOPES = [ - 'openid', - 'https://id.fedoraproject.org/scope/groups', + "openid", + "https://id.fedoraproject.org/scope/groups", ] # Select which messaging backend will be used, that could be fedmsg, amq, # in_memory or rhmsg. - MESSAGING = 'fedmsg' + MESSAGING = "fedmsg" MESSAGING_BACKENDS = { - 'fedmsg': { - 'SERVICE': 'freshmaker', + "fedmsg": { + "SERVICE": "freshmaker", }, - 'rhmsg': { + "rhmsg": { # Brokers to connect, e.g. # ['amqps://host:5671', 'amqps://anotherhost:5671'] - 'BROKER_URLS': [], + "BROKER_URLS": [], # Path to certificate file used to authenticate freshmaker - 'CERT_FILE': '', + "CERT_FILE": "", # Path to private key file used to authenticate freshmaker - 'KEY_FILE': '', + "KEY_FILE": "", # Path to trusted CA certificate bundle. - 'CA_CERT': '', - 'TOPIC_PREFIX': 'VirtualTopic.eng.freshmaker', + "CA_CERT": "", + "TOPIC_PREFIX": "VirtualTopic.eng.freshmaker", + }, + "in_memory": { + "SERVICE": "freshmaker", }, - 'in_memory': { - 'SERVICE': 'freshmaker', - } } # repositories that should be searched for unpublished images, specifically because of EUS base images @@ -204,9 +203,9 @@ class BaseConfiguration(object): class DevConfiguration(BaseConfiguration): DEBUG = True - LOG_LEVEL = 'debug' + LOG_LEVEL = "debug" - MESSAGING_TOPIC_PREFIX = ['org.fedoraproject.dev', 'org.fedoraproject.stg'] + MESSAGING_TOPIC_PREFIX = ["org.fedoraproject.dev", "org.fedoraproject.stg"] # Global network-related values, in seconds NET_TIMEOUT = 5 @@ -220,24 +219,24 @@ class DevConfiguration(BaseConfiguration): # credential. Instead, kinit in default ccache with personal principal # often. KRB_AUTH_USE_KEYTAB = False - KRB_AUTH_PRINCIPAL = '' # Should be in form name@REAL + KRB_AUTH_PRINCIPAL = "" # Should be in form name@REAL # Use the default ccache KRB_AUTH_CCACHE_FILE = None - AUTH_BACKEND = 'noauth' - AUTH_OPENIDC_USERINFO_URI = 'https://iddev.fedorainfracloud.org/openidc/UserInfo' + AUTH_BACKEND = "noauth" + AUTH_OPENIDC_USERINFO_URI = "https://iddev.fedorainfracloud.org/openidc/UserInfo" class TestConfiguration(BaseConfiguration): - LOG_LEVEL = 'debug' + LOG_LEVEL = "debug" DEBUG = True FRESHMAKER_ROOT_URL = "https://localhost" # Root url of Freshmaker's endpoints - SQLALCHEMY_DATABASE_URI = 'sqlite://' + SQLALCHEMY_DATABASE_URI = "sqlite://" - MESSAGING = 'in_memory' - MESSAGING_SENDER = 'in_memory' + MESSAGING = "in_memory" + MESSAGING_SENDER = "in_memory" # Global network-related values, in seconds NET_TIMEOUT = 1 @@ -245,17 +244,17 @@ class TestConfiguration(BaseConfiguration): KOJI_CONTAINER_SCRATCH_BUILD = True - LIGHTBLUE_SERVER_URL = '' # replace with real dev server url + LIGHTBLUE_SERVER_URL = "" # replace with real dev server url LIGHTBLUE_VERIFY_SSL = False - PYXIS_SERVER_URL = 'https://localhost/' + PYXIS_SERVER_URL = "https://localhost/" # Disable caching for tests DOGPILE_CACHE_BACKEND = "dogpile.cache.null" - AUTH_BACKEND = 'noauth' - AUTH_LDAP_SERVER = 'ldap://ldap.example.com' - AUTH_LDAP_USER_BASE = 'ou=users,dc=example,dc=com' + AUTH_BACKEND = "noauth" + AUTH_LDAP_SERVER = "ldap://ldap.example.com" + AUTH_LDAP_USER_BASE = "ou=users,dc=example,dc=com" MAX_THREAD_WORKERS = 1 HANDLER_BUILD_ALLOWLIST = {} diff --git a/contrib/generate_report.py b/contrib/generate_report.py index 63abd185..e75479cf 100644 --- a/contrib/generate_report.py +++ b/contrib/generate_report.py @@ -22,13 +22,15 @@ """ -ERRATA_URL = 'https://errata.devel.redhat.com/api/v1/' -FRESHMAKER_URL = 'https://freshmaker.engineering.redhat.com/api/1/' +ERRATA_URL = "https://errata.devel.redhat.com/api/v1/" +FRESHMAKER_URL = "https://freshmaker.engineering.redhat.com/api/1/" def get_advisory(errata_id): krb_auth = HTTPKerberosAuth() - r = requests.get(ERRATA_URL + "erratum/%s" % str(errata_id), auth=krb_auth, timeout=conf.requests_timeout) + r = requests.get( + ERRATA_URL + "erratum/%s" % str(errata_id), auth=krb_auth, timeout=conf.requests_timeout + ) r.raise_for_status() data = r.json() return data["errata"].values()[0] @@ -45,7 +47,7 @@ def get_freshmaker_build(search_key, original_nvr): return None -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("SEARCH_KEY", help="Freshmaker's search_key") parser.add_argument("ORIGINAL_NVR", help="Freshmaker's original_nvr") @@ -61,7 +63,8 @@ def get_freshmaker_build(search_key, original_nvr): template_data = { "freshmaker_date": build["time_completed"].split("T")[0], "original_nvr": build["original_nvr"], - "freshmaker_brew_build": "https://brewweb.engineering.redhat.com/brew/taskinfo?taskID=%d" % build["build_id"], + "freshmaker_brew_build": "https://brewweb.engineering.redhat.com/brew/taskinfo?taskID=%d" + % build["build_id"], "rhsa_advisory": "https://errata.devel.redhat.com/advisory/%s" % event["search_key"], "container_advisory": "https://errata.devel.redhat.com/advisory/%s" % container_advisory, "container_advisory_date": errata["issue_date"].split("T")[0], diff --git a/contrib/get_freshmaker_stats.py b/contrib/get_freshmaker_stats.py index 46f1152f..e844d2d4 100644 --- a/contrib/get_freshmaker_stats.py +++ b/contrib/get_freshmaker_stats.py @@ -17,38 +17,33 @@ from lightblue.query import LightBlueQuery -LB_DATA_URL = 'https://datasvc.periwinkle.corp.redhat.com/rest/data' -LB_META_URL = 'https://datasvc.periwinkle.corp.redhat.com/rest/metadata' -ERRATA_URL = 'https://errata.devel.redhat.com/api/v1/' -FRESHMAKER_URL = 'https://freshmaker.engineering.redhat.com/api/1/' +LB_DATA_URL = "https://datasvc.periwinkle.corp.redhat.com/rest/data" +LB_META_URL = "https://datasvc.periwinkle.corp.redhat.com/rest/metadata" +ERRATA_URL = "https://errata.devel.redhat.com/api/v1/" +FRESHMAKER_URL = "https://freshmaker.engineering.redhat.com/api/1/" def get_images_fixing_rhsa(service, start, finish): - interface = LightBlueEntity(service, 'containerImage') - advisory_type = 'RHSA' + interface = LightBlueEntity(service, "containerImage") + advisory_type = "RHSA" query = LightBlueQuery( interface, - ('repositories.*.published', '=', True), - ('createdBy', '=', 'metaxor'), - ('creationDate', '$gte', '%sT00:00:00.000-0000' % start), - ('creationDate', '$lte', '%sT00:00:00.000-0000' % finish), + ("repositories.*.published", "=", True), + ("createdBy", "=", "metaxor"), + ("creationDate", "$gte", "%sT00:00:00.000-0000" % start), + ("creationDate", "$lte", "%sT00:00:00.000-0000" % finish), ) - query.add_raw_query({ - "field": "repositories.*.content_advisory_ids.*", - "regex": "%s.*" % advisory_type - }) - query._add_to_projection('repositories.*.content_advisory_ids.*') - query._add_to_projection('brew.build') - return query.find()['processed'] + query.add_raw_query( + {"field": "repositories.*.content_advisory_ids.*", "regex": "%s.*" % advisory_type} + ) + query._add_to_projection("repositories.*.content_advisory_ids.*") + query._add_to_projection("brew.build") + return query.find()["processed"] def get_important_critical_ids(service): - interface = LightBlueEntity(service, 'redHatContainerAdvisory') - query = { - "field": "severity", - "op": "$in", - "values": ["Important", "Critical"] - } + interface = LightBlueEntity(service, "redHatContainerAdvisory") + query = {"field": "severity", "op": "$in", "values": ["Important", "Critical"]} projection = {"field": "_id", "include": True} @@ -57,7 +52,7 @@ def get_important_critical_ids(service): if not interface.check_response(response): logging.warning(response) return [] - return set([adv['_id'] for adv in response['processed']]) + return set([adv["_id"] for adv in response["processed"]]) def group_images_by_content_advisory(images): @@ -89,10 +84,12 @@ def get_image_advisories_from_image_nvrs(grouped_images): if nvr in nvr_to_image_erratum: continue - r = requests.get(ERRATA_URL + "build/" + nvr, auth=krb_auth, timeout=conf.requests_timeout) + r = requests.get( + ERRATA_URL + "build/" + nvr, auth=krb_auth, timeout=conf.requests_timeout + ) r.raise_for_status() data = r.json() - if not data['all_errata']: + if not data["all_errata"]: # Super weird. This means we have a container that wasn't shipped via an advisory. logging.warn("Failed to find errata for %s at %s" % (nvr, r.request.url)) continue @@ -103,7 +100,11 @@ def get_image_advisories_from_image_nvrs(grouped_images): msg = "[%i/%i]: %s" % (nvrs_checks, nvrs_to_check, errata_name) sys.stdout.write(msg + chr(8) * len(msg)) sys.stdout.flush() - r = requests.get(ERRATA_URL + "erratum/%s/builds" % (errata_name), auth=krb_auth, timeout=conf.requests_timeout) + r = requests.get( + ERRATA_URL + "erratum/%s/builds" % (errata_name), + auth=krb_auth, + timeout=conf.requests_timeout, + ) r.raise_for_status() data = r.json() for builds_dict in data.values(): @@ -115,7 +116,9 @@ def get_image_advisories_from_image_nvrs(grouped_images): def is_content_advisory_rebuilt_by_freshmaker(errata_name): krb_auth = HTTPKerberosAuth() - r = requests.get(ERRATA_URL + "erratum/" + errata_name, auth=krb_auth, timeout=conf.requests_timeout) + r = requests.get( + ERRATA_URL + "erratum/" + errata_name, auth=krb_auth, timeout=conf.requests_timeout + ) r.raise_for_status() data = r.json() errata_id = str(data["content"]["content"]["errata_id"]) @@ -156,8 +159,8 @@ def show_advisories(security_images, freshmaker_images): # content advisory, filter them out to keep only those images # which have not been rebuilt by Freshmaker. non_freshmaker_nvrs = [ - nvr for nvr in nvrs - if nvr not in freshmaker_advisories[content_advisory]] + nvr for nvr in nvrs if nvr not in freshmaker_advisories[content_advisory] + ] else: # In case Freshmaker did not rebuild this advisory at all, keep # all the NVRs in the list. @@ -170,37 +173,54 @@ def show_advisories(security_images, freshmaker_images): continue errata_id, errata_name, products = nvr_to_image_erratum[nvr] if errata_name not in advisories: - freshmaker_url = is_content_advisory_rebuilt_by_freshmaker( - content_advisory) + freshmaker_url = is_content_advisory_rebuilt_by_freshmaker(content_advisory) advisories[errata_name] = { - "nvrs": [], "freshmaker_url": freshmaker_url, - "errata_id": errata_id, "products": products} + "nvrs": [], + "freshmaker_url": freshmaker_url, + "errata_id": errata_id, + "products": products, + } advisories[errata_name]["nvrs"].append(nvr) # Print the table table = [["Name", "Errata URL", "Freshmaker URL", "Products"]] for advisory, data in advisories.items(): - table.append([ - advisory, - "https://errata.devel.redhat.com/advisory/" + str(data["errata_id"]), - str(data["freshmaker_url"]), data["products"]]) + table.append( + [ + advisory, + "https://errata.devel.redhat.com/advisory/" + str(data["errata_id"]), + str(data["freshmaker_url"]), + data["products"], + ] + ) print(tabulate(sorted(table, key=lambda x: x[0]), headers="firstrow")) -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("FROM", help="Date to start report from (YYYYMMDD)") - parser.add_argument("-T", "--to", help="Date to end report at (YYYYMMDD)", - default=date.today().strftime("%Y%m%d")) + parser.add_argument( + "-T", + "--to", + help="Date to end report at (YYYYMMDD)", + default=date.today().strftime("%Y%m%d"), + ) parser.add_argument("-c", "--lb-cert", help="Path to lightblue cert") - parser.add_argument("-A", "--all", help="Include all RHSA in report " - "(not just important/critical", action='store_true', - default=False) - parser.add_argument("-d", "--debug", help="Debugging info", - action='store_true', default=False) - parser.add_argument("-s", "--show-advisories", - help="Show list of advisories together with stats", - action='store_true', default=False) + parser.add_argument( + "-A", + "--all", + help="Include all RHSA in report " "(not just important/critical", + action="store_true", + default=False, + ) + parser.add_argument("-d", "--debug", help="Debugging info", action="store_true", default=False) + parser.add_argument( + "-s", + "--show-advisories", + help="Show list of advisories together with stats", + action="store_true", + default=False, + ) args = parser.parse_args() @@ -231,15 +251,15 @@ def show_advisories(security_images, freshmaker_images): for image in images: if image in security_images: continue - for repo in image['repositories']: - ids = set(repo['content_advisory_ids']) + for repo in image["repositories"]: + ids = set(repo["content_advisory_ids"]) if ids.intersection(important_ids): security_images.append(image) break freshmaker_images = [] for image in security_images: - if re.match(r'.*\d{10}$', image['brew']['build']): + if re.match(r".*\d{10}$", image["brew"]["build"]): freshmaker_images.append(image) logging.debug("All shipped containers with security fixes: ") diff --git a/docs/conf.py b/docs/conf.py index 9fd47f1f..e2bffe44 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -19,7 +19,8 @@ import os import sys import pkg_resources -sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../'))) + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../"))) # -- General configuration ------------------------------------------------ @@ -27,58 +28,58 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.intersphinx', - 'sphinx.ext.viewcode', - 'sphinx.ext.napoleon', - 'sphinxcontrib.autohttp.flask', + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.viewcode", + "sphinx.ext.napoleon", + "sphinxcontrib.autohttp.flask", ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # -source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'Freshmaker' -copyright = '2020, Red Hat, Inc. and others' -author = 'Red Hat, Inc. and others' +project = "Freshmaker" +copyright = "2020, Red Hat, Inc. and others" +author = "Red Hat, Inc. and others" try: - version = pkg_resources.get_distribution('freshmaker').version + version = pkg_resources.get_distribution("freshmaker").version except pkg_resources.DistributionNotFound: - version = 'unknown' + version = "unknown" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" html_static_path = [] # -- Options for HTMLHelp output ------------------------------------------ -htmlhelp_basename = 'Freshmakerdoc' +htmlhelp_basename = "Freshmakerdoc" # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'python': ('https://docs.python.org/3/', 'python-intersphinx.inv')} +intersphinx_mapping = {"python": ("https://docs.python.org/3/", "python-intersphinx.inv")} diff --git a/fedmsg.d/freshmaker-logging.py b/fedmsg.d/freshmaker-logging.py index a6228d4c..a50b8c3e 100644 --- a/fedmsg.d/freshmaker-logging.py +++ b/fedmsg.d/freshmaker-logging.py @@ -1,17 +1,17 @@ config = dict( logging=dict( handlers={ - 'console': { - 'class': 'logging.StreamHandler', - 'formatter': 'bare', - 'level': 'INFO', - 'stream': 'ext://sys.stdout', + "console": { + "class": "logging.StreamHandler", + "formatter": "bare", + "level": "INFO", + "stream": "ext://sys.stdout", }, }, formatters={ - 'bare': { - 'datefmt': '%Y-%m-%d %H:%M:%S', - 'format': '[%(asctime)s][%(name)10s %(levelname)7s] %(message)s' + "bare": { + "datefmt": "%Y-%m-%d %H:%M:%S", + "format": "[%(asctime)s][%(name)10s %(levelname)7s] %(message)s", }, }, loggers=dict( diff --git a/fedmsg.d/freshmaker-scheduler.py b/fedmsg.d/freshmaker-scheduler.py index 094ba3f5..d11f1c8e 100644 --- a/fedmsg.d/freshmaker-scheduler.py +++ b/fedmsg.d/freshmaker-scheduler.py @@ -1,4 +1,4 @@ config = { - 'freshmakerconsumer': True, - 'freshmakerproducer': True, + "freshmakerconsumer": True, + "freshmakerproducer": True, } diff --git a/fedmsg.d/freshmaker.py b/fedmsg.d/freshmaker.py index a7379476..208d4f7d 100644 --- a/fedmsg.d/freshmaker.py +++ b/fedmsg.d/freshmaker.py @@ -5,10 +5,8 @@ config = { # Just for dev. "validate_signatures": False, - # Talk to the relay, so things also make it to composer.stg in our dev env "active": True, - # Since we're in active mode, we don't need to declare any of our own # passive endpoints. This placeholder value needs to be here for the tests # to pass in Jenkins, though. \o/ @@ -19,7 +17,6 @@ # "tcp://stg.fedoraproject.org:9940", # ], }, - # Start of code signing configuration # 'sign_messages': True, # 'validate_signatures': True, @@ -39,12 +36,16 @@ } # developer's instance (docker/vagrant/...) -if 'FRESHMAKER_DEVELOPER_ENV' in os.environ and \ - os.environ['FRESHMAKER_DEVELOPER_ENV'].lower() in ( - '1', 'on', 'true', 'y', 'yes'): - config['endpoints']['relay_outbound'] = ["tcp://127.0.0.1:2001"] - config['relay_inbound'] = ["tcp://127.0.0.1:2003"] +if "FRESHMAKER_DEVELOPER_ENV" in os.environ and os.environ["FRESHMAKER_DEVELOPER_ENV"].lower() in ( + "1", + "on", + "true", + "y", + "yes", +): + config["endpoints"]["relay_outbound"] = ["tcp://127.0.0.1:2001"] + config["relay_inbound"] = ["tcp://127.0.0.1:2003"] else: # These configuration values are reasonable for most other configurations. - config['endpoints']['relay_outbound'] = ["tcp://127.0.0.1:4001"] - config['relay_inbound'] = ["tcp://127.0.0.1:2003"] + config["endpoints"]["relay_outbound"] = ["tcp://127.0.0.1:4001"] + config["relay_inbound"] = ["tcp://127.0.0.1:2003"] diff --git a/freshmaker/__init__.py b/freshmaker/__init__.py index 166e32ee..75bb2153 100644 --- a/freshmaker/__init__.py +++ b/freshmaker/__init__.py @@ -37,9 +37,9 @@ from freshmaker.proxy import ReverseProxy try: - version = pkg_resources.get_distribution('freshmaker').version + version = pkg_resources.get_distribution("freshmaker").version except pkg_resources.DistributionNotFound: - version = 'unknown' + version = "unknown" app = Flask(__name__) # type: Any app.wsgi_app = ReverseProxy(app.wsgi_app) @@ -56,9 +56,11 @@ login_manager.init_app(app) from freshmaker.auth import init_auth # noqa + init_auth(login_manager, conf.auth_backend) from freshmaker import views # noqa from freshmaker.monitor import db_hook_event_listeners # noqa + db_hook_event_listeners(target=db.engine) diff --git a/freshmaker/api_utils.py b/freshmaker/api_utils.py index 9b063daa..44ba7042 100644 --- a/freshmaker/api_utils.py +++ b/freshmaker/api_utils.py @@ -101,7 +101,7 @@ def _order_by(flask_request, query, base_class, allowed_keys, default_key): If "order_by" argument starts with minus sign ('-'), the descending order is used. """ - order_by = flask_request.args.get('order_by', default_key, type=str) + order_by = flask_request.args.get("order_by", default_key, type=str) if order_by and len(order_by) > 1 and order_by[0] == "-": order_asc = False order_by = order_by[1:] @@ -110,8 +110,8 @@ def _order_by(flask_request, query, base_class, allowed_keys, default_key): if order_by not in allowed_keys: raise ValueError( - 'An invalid order_by key was suplied, allowed keys are: ' - '%r' % allowed_keys) + "An invalid order_by key was suplied, allowed keys are: " "%r" % allowed_keys + ) order_by_attr = getattr(base_class, order_by) if not order_asc: @@ -127,34 +127,33 @@ def filter_artifact_builds(flask_request): """ search_query = dict() - artifact_type = flask_request.args.get('type', None) + artifact_type = flask_request.args.get("type", None) if artifact_type: if artifact_type.isdigit(): if int(artifact_type) in [t.value for t in list(ArtifactType)]: - search_query['type'] = artifact_type + search_query["type"] = artifact_type else: - raise ValueError('An invalid artifact type was supplied') + raise ValueError("An invalid artifact type was supplied") else: if str(artifact_type).upper() in [t.name for t in list(ArtifactType)]: - search_query['type'] = ArtifactType[artifact_type.upper()].value + search_query["type"] = ArtifactType[artifact_type.upper()].value else: - raise ValueError('An invalid artifact type was supplied') + raise ValueError("An invalid artifact type was supplied") - state = flask_request.args.get('state', None) + state = flask_request.args.get("state", None) if state: if state.isdigit(): if int(state) in [s.value for s in list(ArtifactBuildState)]: - search_query['state'] = state + search_query["state"] = state else: - raise ValueError('An invalid state was supplied') + raise ValueError("An invalid state was supplied") else: if str(state).upper() in [s.name for s in list(ArtifactBuildState)]: - search_query['state'] = ArtifactBuildState[state.upper()].value + search_query["state"] = ArtifactBuildState[state.upper()].value else: - raise ValueError('An invalid state was supplied') + raise ValueError("An invalid state was supplied") - for key in ['name', 'event_id', 'dep_on_id', 'build_id', 'original_nvr', - 'rebuilt_nvr']: + for key in ["name", "event_id", "dep_on_id", "build_id", "original_nvr", "rebuilt_nvr"]: if flask_request.args.get(key, None): search_query[key] = flask_request.args[key] @@ -163,23 +162,27 @@ def filter_artifact_builds(flask_request): if search_query: query = query.filter_by(**search_query) - event_type_id = flask_request.args.get('event_type_id', None) + event_type_id = flask_request.args.get("event_type_id", None) if event_type_id: query = query.join(Event).filter(Event.event_type_id == event_type_id) - event_search_key = flask_request.args.get('event_search_key', None) + event_search_key = flask_request.args.get("event_search_key", None) if event_search_key: # use alias to avoid 'ambiguous column name' error when we have both # event_type_id and event_search_key specified. ea = db.aliased(Event) query = query.join(ea).filter(ea.search_key == event_search_key) - query = _order_by(flask_request, query, ArtifactBuild, - ["id", "name", "event_id", "dep_on_id", "build_id", - "original_nvr", "rebuilt_nvr"], "-id") + query = _order_by( + flask_request, + query, + ArtifactBuild, + ["id", "name", "event_id", "dep_on_id", "build_id", "original_nvr", "rebuilt_nvr"], + "-id", + ) - page = flask_request.args.get('page', 1, type=int) - per_page = flask_request.args.get('per_page', 10, type=int) + page = flask_request.args.get("page", 1, type=int) + per_page = flask_request.args.get("per_page", 10, type=int) return query.paginate(page, per_page, False) @@ -192,7 +195,7 @@ def filter_events(flask_request): query = Event.query - for key in ['message_id', 'search_key', 'event_type_id', 'requester']: + for key in ["message_id", "search_key", "event_type_id", "requester"]: values = flask_request.args.getlist(key) if not values: continue @@ -216,17 +219,14 @@ def filter_events(flask_request): if search_states: query = query.filter(Event.state.in_(search_states)) - query = _order_by(flask_request, query, Event, - ["id", "message_id"], "-id") + query = _order_by(flask_request, query, Event, ["id", "message_id"], "-id") - page = flask_request.args.get('page', 1, type=int) - per_page = flask_request.args.get('per_page', 10, type=int) + page = flask_request.args.get("page", 1, type=int) + per_page = flask_request.args.get("per_page", 10, type=int) return query.paginate(page, per_page, False) def json_error(status, error, message): - response = jsonify({'status': status, - 'error': error, - 'message': message}) + response = jsonify({"status": status, "error": error, "message": message}) response.status_code = status return response diff --git a/freshmaker/auth.py b/freshmaker/auth.py index ca4d31c9..45eca352 100644 --- a/freshmaker/auth.py +++ b/freshmaker/auth.py @@ -43,13 +43,17 @@ def _validate_kerberos_config(): """ errors = [] if not conf.auth_ldap_server: - errors.append("kerberos authentication enabled with no LDAP server " - "configured, check AUTH_LDAP_SERVER in your config.") + errors.append( + "kerberos authentication enabled with no LDAP server " + "configured, check AUTH_LDAP_SERVER in your config." + ) if not conf.auth_ldap_user_base: - errors.append("kerberos authentication enabled with no LDAP user " - "base configured, check AUTH_LDAP_USER_BASE in your " - "config.") + errors.append( + "kerberos authentication enabled with no LDAP user " + "base configured, check AUTH_LDAP_USER_BASE in your " + "config." + ) if errors: for error in errors: @@ -64,11 +68,11 @@ def load_krb_user_from_request(request): REMOTE_USER needs to be set in environment variable, that is set by frontend Apache authentication module. """ - remote_user = request.environ.get('REMOTE_USER') + remote_user = request.environ.get("REMOTE_USER") if not remote_user: - raise Unauthorized('REMOTE_USER is not present in request.') + raise Unauthorized("REMOTE_USER is not present in request.") - username, realm = remote_user.split('@') + username, realm = remote_user.split("@") user = User.find_user_by_name(username) if not user: @@ -77,8 +81,7 @@ def load_krb_user_from_request(request): try: groups = query_ldap_groups(username) except ldap.SERVER_DOWN as e: - log.error('Cannot query groups of %s from LDAP. Error: %s', - username, e.args[0]['desc']) + log.error("Cannot query groups of %s from LDAP. Error: %s", username, e.args[0]["desc"]) groups = [] g.groups = groups @@ -94,13 +97,13 @@ def load_ssl_user_from_request(request): SSL_CLIENT_VERIFY and SSL_CLIENT_S_DN needs to be set in request.environ. This is set by frontend httpd mod_ssl module. """ - ssl_client_verify = request.environ.get('SSL_CLIENT_VERIFY') - if ssl_client_verify != 'SUCCESS': - raise Unauthorized('Cannot verify client: %s' % ssl_client_verify) + ssl_client_verify = request.environ.get("SSL_CLIENT_VERIFY") + if ssl_client_verify != "SUCCESS": + raise Unauthorized("Cannot verify client: %s" % ssl_client_verify) - username = request.environ.get('SSL_CLIENT_S_DN') + username = request.environ.get("SSL_CLIENT_S_DN") if not username: - raise Unauthorized('Unable to get user information (DN) from client certificate') + raise Unauthorized("Unable to get user information (DN) from client certificate") user = User.find_user_by_name(username) if not user: @@ -115,7 +118,7 @@ def load_krb_or_ssl_user_from_request(request): """ Loads User using Kerberos or SSL auth. """ - if request.environ.get('REMOTE_USER'): + if request.environ.get("REMOTE_USER"): return load_krb_user_from_request(request) else: return load_ssl_user_from_request(request) @@ -133,8 +136,8 @@ def query_ldap_groups(uid): users = client.search_s( conf.auth_ldap_user_base, ldap.SCOPE_ONELEVEL, - attrlist=['memberOf'], - filterstr=f'(&(uid={uid})(objectClass=posixAccount))', + attrlist=["memberOf"], + filterstr=f"(&(uid={uid})(objectClass=posixAccount))", ) group_distinguished_names = set() @@ -144,8 +147,8 @@ def query_ldap_groups(uid): _, user_attributes = users[0] group_distinguished_names = { # The value of group is the entire distinguished name of the group - group.decode('utf-8') - for group in user_attributes.get('memberOf', []) + group.decode("utf-8") + for group in user_attributes.get("memberOf", []) } return group_distinguished_names @@ -154,17 +157,17 @@ def query_ldap_groups(uid): @commit_on_success def load_openidc_user(request): """Load FAS user from current request""" - username = request.environ.get('REMOTE_USER') + username = request.environ.get("REMOTE_USER") if not username: - raise Unauthorized('REMOTE_USER is not present in request.') + raise Unauthorized("REMOTE_USER is not present in request.") - token = request.environ.get('OIDC_access_token') + token = request.environ.get("OIDC_access_token") if not token: - raise Unauthorized('Missing token passed to Freshmaker.') + raise Unauthorized("Missing token passed to Freshmaker.") - scope = request.environ.get('OIDC_CLAIM_scope') + scope = request.environ.get("OIDC_CLAIM_scope") if not scope: - raise Unauthorized('Missing OIDC_CLAIM_scope.') + raise Unauthorized("Missing OIDC_CLAIM_scope.") validate_scopes(scope) user_info = get_user_info(token) @@ -173,9 +176,9 @@ def load_openidc_user(request): if not user: user = User.create_user(username=username) - g.groups = user_info.get('groups', []) + g.groups = user_info.get("groups", []) g.user = user - g.oidc_scopes = scope.split(' ') + g.oidc_scopes = scope.split(" ") return user @@ -185,45 +188,45 @@ def validate_scopes(scope): :param str scope: scope passed in from. :raises: Unauthorized if any of required scopes is not present. """ - scopes = scope.split(' ') + scopes = scope.split(" ") required_scopes = conf.auth_openidc_required_scopes for scope in required_scopes: if scope not in scopes: - raise Unauthorized( - 'Required OIDC scope {0} not present.'.format(scope)) + raise Unauthorized("Required OIDC scope {0} not present.".format(scope)) def require_oidc_scope(scope): """Check if required scopes is in OIDC scopes within request""" - full_scope = '{0}{1}'.format(conf.oidc_base_namespace, scope) + full_scope = "{0}{1}".format(conf.oidc_base_namespace, scope) if conf.auth_backend == "openidc" and full_scope not in g.oidc_scopes: - message = 'Request does not have required scope %s' % scope + message = "Request does not have required scope %s" % scope log.error(message) raise Forbidden(message) def require_scopes(*scopes): """Check if required scopes is in OIDC scopes within request""" + def wrapper(f): @wraps(f) def decorator(*args, **kwargs): for scope in scopes: require_oidc_scope(scope) return f(*args, **kwargs) + return decorator + return wrapper def get_user_info(token): """Query FAS groups from Fedora""" - headers = { - 'authorization': 'Bearer {0}'.format(token) - } + headers = {"authorization": "Bearer {0}".format(token)} r = requests.get(conf.auth_openidc_userinfo_uri, headers=headers, timeout=conf.requests_timeout) if r.status_code != 200: raise Unauthorized( - 'Cannot get user information from {0} endpoint.'.format( - conf.auth_openidc_userinfo_uri)) + "Cannot get user information from {0} endpoint.".format(conf.auth_openidc_userinfo_uri) + ) return r.json() @@ -234,30 +237,29 @@ def init_auth(login_manager, backend): Enable and initialize authentication backend to work with frontend authentication module running in Apache. """ - if backend == 'noauth': + if backend == "noauth": # Do not enable any authentication backend working with frontend # authentication module in Apache. log.warning("Authorization is disabled in Freshmaker configuration.") return - if backend == 'kerberos': + if backend == "kerberos": _validate_kerberos_config() global load_krb_user_from_request - load_krb_user_from_request = login_manager.request_loader( - load_krb_user_from_request) - elif backend == 'openidc': + load_krb_user_from_request = login_manager.request_loader(load_krb_user_from_request) + elif backend == "openidc": global load_openidc_user load_openidc_user = login_manager.request_loader(load_openidc_user) - elif backend == 'kerberos_or_ssl': + elif backend == "kerberos_or_ssl": _validate_kerberos_config() global load_krb_or_ssl_user_from_request load_krb_or_ssl_user_from_request = login_manager.request_loader( - load_krb_or_ssl_user_from_request) - elif backend == 'ssl': + load_krb_or_ssl_user_from_request + ) + elif backend == "ssl": global load_ssl_user_from_request - load_ssl_user_from_request = login_manager.request_loader( - load_ssl_user_from_request) + load_ssl_user_from_request = login_manager.request_loader(load_ssl_user_from_request) else: - raise ValueError('Unknown backend name {0}.'.format(backend)) + raise ValueError("Unknown backend name {0}.".format(backend)) def user_has_role(role): @@ -268,11 +270,11 @@ def user_has_role(role): :return: a boolean determining if the user has the role :rtype: bool """ - if conf.auth_backend == 'noauth': + if conf.auth_backend == "noauth": return True - groups = conf.permissions[role]['groups'] - users = conf.permissions[role]['users'] + groups = conf.permissions[role]["groups"] + users = conf.permissions[role]["users"] in_groups = bool(set(flask.g.groups) & set(groups)) in_users = flask.g.user.username in users return in_groups or in_users @@ -285,6 +287,7 @@ def requires_roles(roles): :param list roles: the list of role names to verify :raises freshmaker.errors.Forbidden: if the user is not in the role """ + def wrapper(f): @wraps(f) def wrapped(*args, **kwargs): @@ -292,10 +295,12 @@ def wrapped(*args, **kwargs): return f(*args, **kwargs) raise Forbidden( - f'User {flask.g.user.username} does not have any of the following ' + f"User {flask.g.user.username} does not have any of the following " f'roles: {", ".join(roles)}' ) + return wrapped + return wrapper @@ -304,9 +309,11 @@ def login_required(f): Wrapper of flask_login's login_required to ingore auth check when auth backend is 'noauth'. """ + @wraps(f) def wrapped(*args, **kwargs): - if conf.auth_backend == 'noauth': + if conf.auth_backend == "noauth": return f(*args, **kwargs) return _login_required(f)(*args, **kwargs) + return wrapped diff --git a/freshmaker/config.py b/freshmaker/config.py index bc194d85..7ee4bb97 100644 --- a/freshmaker/config.py +++ b/freshmaker/config.py @@ -32,7 +32,7 @@ from os import sys # type: ignore from freshmaker import logger -PROD_CONFIG_FILE = '/etc/freshmaker/config.py' +PROD_CONFIG_FILE = "/etc/freshmaker/config.py" def any_(*rules): @@ -66,30 +66,35 @@ def get_config_section_module(): """ from conf import config - config_section = 'DevConfiguration' + config_section = "DevConfiguration" config_module = None # TestConfiguration shall only be used for running tests - test_env = os.environ.get('FRESHMAKER_TESTING_ENV', '').lower() - test_executables = {'py.test', 'pytest', 'pytest.py'} - if (os.path.basename(sys.argv[0]) in test_executables or - test_env in ('1', 'on', 'true', 'y', 'yes')): - config_section = 'TestConfiguration' + test_env = os.environ.get("FRESHMAKER_TESTING_ENV", "").lower() + test_executables = {"py.test", "pytest", "pytest.py"} + if os.path.basename(sys.argv[0]) in test_executables or test_env in ( + "1", + "on", + "true", + "y", + "yes", + ): + config_section = "TestConfiguration" config_module = config - elif os.environ.get('FRESHMAKER_DEVELOPER_ENV', '').lower() in ('1', 'on', 'true', 'y', 'yes'): - config_section = 'DevConfiguration' - if 'FRESHMAKER_CONFIG_FILE' not in os.environ: + elif os.environ.get("FRESHMAKER_DEVELOPER_ENV", "").lower() in ("1", "on", "true", "y", "yes"): + config_section = "DevConfiguration" + if "FRESHMAKER_CONFIG_FILE" not in os.environ: config_module = config # Try getting config_section from os.environ - elif 'FRESHMAKER_CONFIG_SECTION' in os.environ: - config_section = os.environ['FRESHMAKER_CONFIG_SECTION'] + elif "FRESHMAKER_CONFIG_SECTION" in os.environ: + config_section = os.environ["FRESHMAKER_CONFIG_SECTION"] # Automagically detect production environment: # - existing and readable config_file presets ProdConfiguration elif os.path.exists(PROD_CONFIG_FILE) and os.access(PROD_CONFIG_FILE, os.O_RDONLY): - config_section = 'ProdConfiguration' + config_section = "ProdConfiguration" return (config_section, config_module) @@ -99,7 +104,7 @@ def get_config_module_from_file(config_file): Try loading configuration module from a file """ try: - config_module = imp.load_source('freshmaker_runtime_config', config_file) + config_module = imp.load_source("freshmaker_runtime_config", config_file) except IOError: raise SystemError("Configuration file {} was not found.".format(config_file)) @@ -121,8 +126,8 @@ def init_config(app): Configure Freshmaker """ config_file = PROD_CONFIG_FILE - if 'FRESHMAKER_CONFIG_FILE' in os.environ: - config_file = os.environ['FRESHMAKER_CONFIG_FILE'] + if "FRESHMAKER_CONFIG_FILE" in os.environ: + config_file = os.environ["FRESHMAKER_CONFIG_FILE"] config_section, config_module = get_config_section_module() @@ -134,310 +139,316 @@ def init_config(app): class Config(object): """Class representing the freshmaker configuration.""" + _defaults = { - 'debug': { - 'type': bool, - 'default': False, - 'desc': 'Debug mode'}, - 'log_level': { - 'type': str, - 'default': 0, - 'desc': 'Log level'}, - 'messaging': { - 'type': str, - 'default': 'fedmsg', - 'desc': 'The messaging system to use.'}, - 'messaging_sender': { - 'type': str, - 'default': 'fedmsg', - 'desc': 'The messaging system to use for sending msgs.'}, - 'messaging_topic_prefix': { - 'type': list, - 'default': ['org.fedoraproject.prod'], - 'desc': 'The messaging system topic prefixes which we are interested in.'}, - 'net_timeout': { - 'type': int, - 'default': 120, - 'desc': 'Global network timeout for read/write operations, in seconds.'}, - 'requests_timeout': { - 'type': int, - 'default': 120, - 'desc': ' Global timeout for HTTP requests in Freshmaker.'}, - 'net_retry_interval': { - 'type': int, - 'default': 30, - 'desc': 'Global network retry interval for read/write operations, in seconds.'}, - 'parsers': { - 'type': list, - 'default': [ - 'freshmaker.parsers.koji:FreshmakerAsyncManualbuildParser', - 'freshmaker.parsers.internal:FreshmakerManualRebuildParser', - 'freshmaker.parsers.brew:BrewTaskStateChangeParser', - 'freshmaker.parsers.errata:ErrataAdvisorySigningChangedParser', - 'freshmaker.parsers.errata:ErrataAdvisoryStateChangedParser', - 'freshmaker.parsers.odcs:ComposeStateChangeParser', + "debug": {"type": bool, "default": False, "desc": "Debug mode"}, + "log_level": {"type": str, "default": 0, "desc": "Log level"}, + "messaging": {"type": str, "default": "fedmsg", "desc": "The messaging system to use."}, + "messaging_sender": { + "type": str, + "default": "fedmsg", + "desc": "The messaging system to use for sending msgs.", + }, + "messaging_topic_prefix": { + "type": list, + "default": ["org.fedoraproject.prod"], + "desc": "The messaging system topic prefixes which we are interested in.", + }, + "net_timeout": { + "type": int, + "default": 120, + "desc": "Global network timeout for read/write operations, in seconds.", + }, + "requests_timeout": { + "type": int, + "default": 120, + "desc": " Global timeout for HTTP requests in Freshmaker.", + }, + "net_retry_interval": { + "type": int, + "default": 30, + "desc": "Global network retry interval for read/write operations, in seconds.", + }, + "parsers": { + "type": list, + "default": [ + "freshmaker.parsers.koji:FreshmakerAsyncManualbuildParser", + "freshmaker.parsers.internal:FreshmakerManualRebuildParser", + "freshmaker.parsers.brew:BrewTaskStateChangeParser", + "freshmaker.parsers.errata:ErrataAdvisorySigningChangedParser", + "freshmaker.parsers.errata:ErrataAdvisoryStateChangedParser", + "freshmaker.parsers.odcs:ComposeStateChangeParser", ], - 'desc': 'Parsers defined for parse specific messages.'}, - 'handlers': { - 'type': list, - 'default': [ - 'freshmaker.handlers.koji:RebuildImagesOnParentImageBuild', - 'freshmaker.handlers.koji:RebuildImagesOnRPMAdvisoryChange', - 'freshmaker.handlers.koji:RebuildImagesOnODCSComposeDone', - 'freshmaker.handlers.koji:RebuildImagesOnAsyncManualBuild', - 'freshmaker.handlers.botas:HandleBotasAdvisory' + "desc": "Parsers defined for parse specific messages.", + }, + "handlers": { + "type": list, + "default": [ + "freshmaker.handlers.koji:RebuildImagesOnParentImageBuild", + "freshmaker.handlers.koji:RebuildImagesOnRPMAdvisoryChange", + "freshmaker.handlers.koji:RebuildImagesOnODCSComposeDone", + "freshmaker.handlers.koji:RebuildImagesOnAsyncManualBuild", + "freshmaker.handlers.botas:HandleBotasAdvisory", ], - 'desc': 'List of enabled handlers.'}, - 'polling_interval': { - 'type': int, - 'default': 60, - 'desc': 'Polling interval, in seconds.'}, - 'git_base_url': { - 'type': str, - 'default': "git://pkgs.devel.redhat.com", - 'desc': 'Dist-git base URL.'}, - 'git_ssh_base_url': { - 'type': str, - 'default': "ssh://%s@pkgs.devel.redhat.com/", - 'desc': 'Dist-git ssh base URL.'}, - 'git_user': { - 'type': str, - 'default': '', - 'desc': 'User for git operations.'}, - 'git_author': { - 'type': str, - 'default': 'Freshmaker ', - 'desc': 'Author for git commit.'}, - 'koji_profile': { - 'type': str, - 'default': 'koji', - 'desc': 'Koji Profile from where to load Koji configuration.'}, - 'koji_container_scratch_build': { - 'type': bool, - 'default': False, - 'desc': 'Whether to make a scratch build to rebuild the image.'}, - 'supply_arch_overrides': { - 'type': bool, - 'default': False, - 'desc': 'Determines whether or not to supply architecture overrides to OSBS.', - }, - 'manifest_v2_arch_map': { - 'type': dict, - 'default': { + "desc": "List of enabled handlers.", + }, + "polling_interval": {"type": int, "default": 60, "desc": "Polling interval, in seconds."}, + "git_base_url": { + "type": str, + "default": "git://pkgs.devel.redhat.com", + "desc": "Dist-git base URL.", + }, + "git_ssh_base_url": { + "type": str, + "default": "ssh://%s@pkgs.devel.redhat.com/", + "desc": "Dist-git ssh base URL.", + }, + "git_user": {"type": str, "default": "", "desc": "User for git operations."}, + "git_author": { + "type": str, + "default": "Freshmaker ", + "desc": "Author for git commit.", + }, + "koji_profile": { + "type": str, + "default": "koji", + "desc": "Koji Profile from where to load Koji configuration.", + }, + "koji_container_scratch_build": { + "type": bool, + "default": False, + "desc": "Whether to make a scratch build to rebuild the image.", + }, + "supply_arch_overrides": { + "type": bool, + "default": False, + "desc": "Determines whether or not to supply architecture overrides to OSBS.", + }, + "manifest_v2_arch_map": { + "type": dict, + "default": { # Someday, somebody please tell me why these names are different. - 'amd64': 'x86_64', - 'arm64': 'aarch64', + "amd64": "x86_64", + "arm64": "aarch64", }, - 'desc': 'A map of manifest api v2 architectures to brew architectures.'}, - 'dry_run': { - 'type': bool, - 'default': False, - 'desc': 'When True, no builds will be submitted and only log ' - 'messages will be logged instead. Freshmaker will also ' - 'generate fake "build succeeded" events to mark fake ' - 'artifact rebuild as done.', - }, - 'handler_build_allowlist': { - 'type': dict, - 'default': {}, - 'desc': 'Allowlist for build targets of handlers', - }, - 'handler_build_blocklist': { - 'type': dict, - 'default': {}, - 'desc': 'Blocklist for build targets of handlers', - }, - 'image_extra_repo': { - 'type': dict, - 'default': {}, - 'desc': 'Dict with base container "name-version" as key and URL ' - 'to extra .repo file to include in a rebuild', - }, - 'sfm2_api_url': { - 'type': str, - 'default': '', - 'desc': 'SFM2 API URl' - }, - 'container_released_dependencies_only': { - 'type': bool, - 'default': False, - 'desc': 'When True, only released images will be used as dependencies ' - 'for other images. WARN: This may lead to downgrade to older ' - 'release as result of rebuild when image to rebuild depends ' - 'on unreleased release of the parent image.'}, - 'container_repo_vendors': { - 'type': tuple, - 'default': ("redhat",), - 'desc': 'Allowed vendors for Container Repositories'}, - 'image_build_repository_registries': { - 'type': list, - 'default': [], - 'desc': 'List of image build repository registries.'}, - 'errata_tool_server_url': { - 'type': str, - 'default': '', - 'desc': 'Server URL of Errata Tool.'}, - 'errata_rhel_release_prefix': { - 'type': str, - 'default': '', - 'desc': 'When set, only builds based on this RHEL release ' - 'will be included in rebuilds.'}, - 'pulp_server_url': { - 'type': str, - 'default': '', - 'desc': 'Server URL of Pulp.'}, - 'pulp_crt_path': { - 'type': str, - 'default': '', - 'desc': 'Path to certificate file to authenticate to Pulp.'}, - 'pulp_key_path': { - 'type': str, - 'default': '', - 'desc': 'Path to key file to authenticate to Pulp.'}, - 'odcs_server_url': { - 'type': str, - 'default': '', - 'desc': 'Server URL to ODCS'}, - 'odcs_auth_mech': { - 'type': str, - 'default': 'kerberos', - 'desc': 'ODCS authentication mechanism.'}, - 'odcs_verify_ssl': { - 'type': bool, - 'default': True, - 'desc': 'Whether to enable SSL verification over HTTP with ODCS.'}, - 'odcs_openidc_token': { - 'type': str, - 'default': '', - 'desc': 'OpenIDC token used to access ODCS.'}, - 'odcs_sigkeys': { - 'type': list, - 'default': [], - 'desc': 'List of sigkeys IDs to use when requesting compose.'}, - 'krb_auth_using_keytab': { - 'type': bool, - 'default': True, - 'desc': 'Whether to acquire credential cache from a client keytab.'}, - 'krb_auth_principal': { - 'type': str, - 'default': "", - 'desc': 'Principal used to acquire credential cache, which must be' - ' present in specified client keytab.'}, - 'krb_auth_client_keytab': { - 'type': str, - 'default': '', - 'desc': 'Path to a client keytab.'}, - 'krb_auth_ccache_file': { - 'type': str, - 'default': '', - 'desc': 'Path to credential cache file. ' - 'The "$pid" is replaced by process ID. ' - 'The "$tid" is replaced by thread ID'}, - 'oidc_base_namespace': { - 'type': str, - 'default': '', - 'desc': 'Base namespace of OIDC scopes.'}, - 'dogpile_cache_backend': { - 'type': str, - 'default': 'dogpile.cache.memory', - 'desc': 'Name of dogpile.cache backend to use.'}, - 'messaging_backends': { - 'type': dict, - 'default': {}, - 'desc': 'Configuration for each supported messaging backend.'}, - 'max_thread_workers': { - 'type': int, - 'default': 10, - 'desc': 'Maximum number of thread workers used by Freshmaker.'}, - 'permissions': { - 'type': dict, - 'default': {}, - 'desc': 'The permissions with keys as role names and the values as dictionaries with ' - 'the keys "groups" and "users" which have values that are lists. Any roles not ' - 'provided as keys, will contain defaut empty values.' - }, - 'rebuilt_nvr_release_suffix': { - 'type': str, - 'default': '', - 'desc': 'A suffix to add to the rebuilt_nvr release in addition to the timestamp.', - }, - 'bundle_include_previous_rebuilds': { - 'type': bool, - 'default': True, - 'desc': 'When True, enables an automatic search in bundle rebuilds for previous' - 'Freshmaker builds of the current operator/operand images, and replace them' - 'in the bundle.' - }, - 'container_release_categories': { - 'type': tuple, - 'default': ("Generally Available", "Tech Preview", "Beta",), - 'desc': 'Release categories', - }, - 'pyxis_server_url': { - 'type': str, - 'default': '', - 'desc': 'Server URL of Pyxis Rest API.' - }, - 'pyxis_graphql_url': { - 'type': str, - 'default': '', - 'desc': 'Server URL of Pyxis GraphQL API.' - }, - 'pyxis_certificate': { - 'type': str, - 'default': '', - 'desc': 'Path to Pyxis certificate file.'}, - 'pyxis_private_key': { - 'type': str, - 'default': '', - 'desc': 'Path to Pyxis private key file.'}, - 'pyxis_index_image_organizations': { - 'type': list, - 'default': [], - 'desc': 'Query Pyxis for index images only with these organizations' - }, - 'pyxis_default_page_size': { - 'type': int, - 'default': 200, - 'desc': 'Default page size to be used in Pyxis requests' - }, - 'pyxis_small_page_size': { - 'type': int, - 'default': 50, - 'desc': 'Small page size to be used in Pyxis requests' - }, - 'product_pages_api_url': { - 'type': str, - 'default': '', - 'desc': 'The API URL of the Product Pages service' - }, - 'unpublished_exceptions': { - 'type': list, - 'default': [], - 'desc': 'List of dictionaries with unpublished repos, containing ' - '"registry" and "repository" keys that should not be ignored ' - 'when searching for images to rebuild.' - }, - 'freshmaker_root_url': { - 'type': str, - 'default': '', - 'desc': 'Root of the API URL of Freshmaker' - }, - 'bundle_autorebuild_tag_exceptions': { - 'type': list, - 'default': [], - 'desc': 'A list of bundle name-version entries that do not need to have an auto-rebuild ' - 'tag to be rebuilt. This only applies to the HandleBotasAdvisory handler' - }, - 'flatpak_server_url': { - 'type': str, - 'default': '', - 'desc': 'Root url of Flatpak index service' - }, - 'exclude_content_sets_pattern': { - 'type': str, - 'default': '-hidden-rpms$', - 'desc': 'Pattern for content sets which will be excluded while generating composes' + "desc": "A map of manifest api v2 architectures to brew architectures.", + }, + "dry_run": { + "type": bool, + "default": False, + "desc": "When True, no builds will be submitted and only log " + "messages will be logged instead. Freshmaker will also " + 'generate fake "build succeeded" events to mark fake ' + "artifact rebuild as done.", + }, + "handler_build_allowlist": { + "type": dict, + "default": {}, + "desc": "Allowlist for build targets of handlers", + }, + "handler_build_blocklist": { + "type": dict, + "default": {}, + "desc": "Blocklist for build targets of handlers", + }, + "image_extra_repo": { + "type": dict, + "default": {}, + "desc": 'Dict with base container "name-version" as key and URL ' + "to extra .repo file to include in a rebuild", + }, + "sfm2_api_url": {"type": str, "default": "", "desc": "SFM2 API URl"}, + "container_released_dependencies_only": { + "type": bool, + "default": False, + "desc": "When True, only released images will be used as dependencies " + "for other images. WARN: This may lead to downgrade to older " + "release as result of rebuild when image to rebuild depends " + "on unreleased release of the parent image.", + }, + "container_repo_vendors": { + "type": tuple, + "default": ("redhat",), + "desc": "Allowed vendors for Container Repositories", + }, + "image_build_repository_registries": { + "type": list, + "default": [], + "desc": "List of image build repository registries.", + }, + "errata_tool_server_url": { + "type": str, + "default": "", + "desc": "Server URL of Errata Tool.", + }, + "errata_rhel_release_prefix": { + "type": str, + "default": "", + "desc": "When set, only builds based on this RHEL release " + "will be included in rebuilds.", + }, + "pulp_server_url": {"type": str, "default": "", "desc": "Server URL of Pulp."}, + "pulp_crt_path": { + "type": str, + "default": "", + "desc": "Path to certificate file to authenticate to Pulp.", + }, + "pulp_key_path": { + "type": str, + "default": "", + "desc": "Path to key file to authenticate to Pulp.", + }, + "odcs_server_url": {"type": str, "default": "", "desc": "Server URL to ODCS"}, + "odcs_auth_mech": { + "type": str, + "default": "kerberos", + "desc": "ODCS authentication mechanism.", + }, + "odcs_verify_ssl": { + "type": bool, + "default": True, + "desc": "Whether to enable SSL verification over HTTP with ODCS.", + }, + "odcs_openidc_token": { + "type": str, + "default": "", + "desc": "OpenIDC token used to access ODCS.", + }, + "odcs_sigkeys": { + "type": list, + "default": [], + "desc": "List of sigkeys IDs to use when requesting compose.", + }, + "krb_auth_using_keytab": { + "type": bool, + "default": True, + "desc": "Whether to acquire credential cache from a client keytab.", + }, + "krb_auth_principal": { + "type": str, + "default": "", + "desc": "Principal used to acquire credential cache, which must be" + " present in specified client keytab.", + }, + "krb_auth_client_keytab": {"type": str, "default": "", "desc": "Path to a client keytab."}, + "krb_auth_ccache_file": { + "type": str, + "default": "", + "desc": "Path to credential cache file. " + 'The "$pid" is replaced by process ID. ' + 'The "$tid" is replaced by thread ID', + }, + "oidc_base_namespace": { + "type": str, + "default": "", + "desc": "Base namespace of OIDC scopes.", + }, + "dogpile_cache_backend": { + "type": str, + "default": "dogpile.cache.memory", + "desc": "Name of dogpile.cache backend to use.", + }, + "messaging_backends": { + "type": dict, + "default": {}, + "desc": "Configuration for each supported messaging backend.", + }, + "max_thread_workers": { + "type": int, + "default": 10, + "desc": "Maximum number of thread workers used by Freshmaker.", + }, + "permissions": { + "type": dict, + "default": {}, + "desc": "The permissions with keys as role names and the values as dictionaries with " + 'the keys "groups" and "users" which have values that are lists. Any roles not ' + "provided as keys, will contain defaut empty values.", + }, + "rebuilt_nvr_release_suffix": { + "type": str, + "default": "", + "desc": "A suffix to add to the rebuilt_nvr release in addition to the timestamp.", + }, + "bundle_include_previous_rebuilds": { + "type": bool, + "default": True, + "desc": "When True, enables an automatic search in bundle rebuilds for previous" + "Freshmaker builds of the current operator/operand images, and replace them" + "in the bundle.", + }, + "container_release_categories": { + "type": tuple, + "default": ( + "Generally Available", + "Tech Preview", + "Beta", + ), + "desc": "Release categories", + }, + "pyxis_server_url": {"type": str, "default": "", "desc": "Server URL of Pyxis Rest API."}, + "pyxis_graphql_url": { + "type": str, + "default": "", + "desc": "Server URL of Pyxis GraphQL API.", + }, + "pyxis_certificate": { + "type": str, + "default": "", + "desc": "Path to Pyxis certificate file.", + }, + "pyxis_private_key": { + "type": str, + "default": "", + "desc": "Path to Pyxis private key file.", + }, + "pyxis_index_image_organizations": { + "type": list, + "default": [], + "desc": "Query Pyxis for index images only with these organizations", + }, + "pyxis_default_page_size": { + "type": int, + "default": 200, + "desc": "Default page size to be used in Pyxis requests", + }, + "pyxis_small_page_size": { + "type": int, + "default": 50, + "desc": "Small page size to be used in Pyxis requests", + }, + "product_pages_api_url": { + "type": str, + "default": "", + "desc": "The API URL of the Product Pages service", + }, + "unpublished_exceptions": { + "type": list, + "default": [], + "desc": "List of dictionaries with unpublished repos, containing " + '"registry" and "repository" keys that should not be ignored ' + "when searching for images to rebuild.", + }, + "freshmaker_root_url": { + "type": str, + "default": "", + "desc": "Root of the API URL of Freshmaker", + }, + "bundle_autorebuild_tag_exceptions": { + "type": list, + "default": [], + "desc": "A list of bundle name-version entries that do not need to have an auto-rebuild " + "tag to be rebuilt. This only applies to the HandleBotasAdvisory handler", + }, + "flatpak_server_url": { + "type": str, + "default": "", + "desc": "Root url of Flatpak index service", + }, + "exclude_content_sets_pattern": { + "type": str, + "default": "-hidden-rpms$", + "desc": "Pattern for content sets which will be excluded while generating composes", }, } @@ -449,11 +460,11 @@ def __init__(self, conf_section_obj): # Set defaults for key, values in self._defaults.items(): - self.set_item(key, values['default']) + self.set_item(key, values["default"]) # Override defaults for key in dir(conf_section_obj): - if key.startswith('_'): + if key.startswith("_"): continue self.set_item(key.lower(), getattr(conf_section_obj, key)) @@ -462,7 +473,7 @@ def set_item(self, key, value): Set value for configuration item. Creates the self._key = value attribute and self.key property to set/get/del the attribute. """ - if key == 'set_item' or key.startswith('_'): + if key == "set_item" or key.startswith("_"): raise Exception("Configuration item's name is not allowed: %s" % key) # Create the empty self._key attribute, so we can assign to it. @@ -470,12 +481,12 @@ def set_item(self, key, value): # Create self.key property to access the self._key attribute. # Use the setifok_func if available for the attribute. - setifok_func = '_setifok_{}'.format(key) + setifok_func = "_setifok_{}".format(key) if hasattr(self, setifok_func): setx = lambda self, val: getattr(self, setifok_func)(val) else: setx = lambda self, val: setattr(self, "_" + key, val) - get_func = '_get_{}'.format(key) + get_func = "_get_{}".format(key) if hasattr(self, get_func): getx = lambda self: getattr(self, get_func)() else: @@ -486,7 +497,7 @@ def set_item(self, key, value): # Managed/registered configuration items if key in self._defaults: # Type conversion for configuration item - convert = self._defaults[key]['type'] + convert = self._defaults[key]["type"] if convert in [bool, int, list, str, set, dict, tuple]: try: # Do no try to convert None... @@ -496,7 +507,9 @@ def set_item(self, key, value): raise TypeError("Configuration value conversion failed for name: %s" % key) # unknown type/unsupported conversion elif convert is not None: - raise TypeError("Unsupported type %s for configuration item name: %s" % (convert, key)) + raise TypeError( + "Unsupported type %s for configuration item name: %s" % (convert, key) + ) # Set the attribute to the correct value setattr(self, key, value) @@ -523,7 +536,7 @@ def _setifok_messaging_sender(self, s): def _setifok_permissions(self, permissions): invalid_value = ValueError( - 'The permissions configuration must be a dictionary with the keys as role names and ' + "The permissions configuration must be a dictionary with the keys as role names and " 'the values as dictionaries with the keys "users" and "groups", which must have values ' 'that are lists. For example, {"admin": {"groups": [], "users": ["user"]}}.' ) @@ -534,7 +547,7 @@ def _setifok_permissions(self, permissions): if not isinstance(mapping, dict): raise invalid_value - allowed_keys = {'users', 'groups'} + allowed_keys = {"users", "groups"} if mapping.keys() - allowed_keys: raise invalid_value @@ -552,7 +565,7 @@ def _setifok_permissions(self, permissions): # Use a default dict where any missing key will return {'groups': [], 'users': []}. This # Allows Freshmaker developers to add roles without needing to check if they key is set. - fixed_permissions = defaultdict(lambda: {'groups': [], 'users': []}) + fixed_permissions = defaultdict(lambda: {"groups": [], "users": []}) fixed_permissions.update(permissions) self._permissions = fixed_permissions @@ -560,19 +573,19 @@ def _get_krb_auth_ccache_file(self): if not self._krb_auth_ccache_file: return self._krb_auth_ccache_file ccache_file = str(self._krb_auth_ccache_file) - ccache_file = ccache_file.replace( - "$tid", str(threading.current_thread().ident)) - ccache_file = ccache_file.replace( - "$pid", str(os.getpid())) + ccache_file = ccache_file.replace("$tid", str(threading.current_thread().ident)) + ccache_file = ccache_file.replace("$pid", str(os.getpid())) return ccache_file def _setifok_unpublished_exceptions(self, exceptions): for exception in exceptions: if not exception.get("registry", ""): - raise ValueError("There is no 'registry' or it's empty in one" - " of the UNPUBLISHED_EXCEPTIONS") + raise ValueError( + "There is no 'registry' or it's empty in one" " of the UNPUBLISHED_EXCEPTIONS" + ) if not exception.get("repository", ""): - raise ValueError("There is no 'repository' or it's empty in one" - " of the UNPUBLISHED_EXCEPTIONS") + raise ValueError( + "There is no 'repository' or it's empty in one" " of the UNPUBLISHED_EXCEPTIONS" + ) self._unpublished_exceptions = exceptions diff --git a/freshmaker/consumer.py b/freshmaker/consumer.py index 68d48b43..8b8462f3 100644 --- a/freshmaker/consumer.py +++ b/freshmaker/consumer.py @@ -30,8 +30,11 @@ from freshmaker import log, conf, messaging, events, app from freshmaker.monitor import ( - messaging_rx_counter, messaging_rx_ignored_counter, - messaging_rx_processed_ok_counter, messaging_rx_failed_counter) + messaging_rx_counter, + messaging_rx_ignored_counter, + messaging_rx_processed_ok_counter, + messaging_rx_failed_counter, +) from freshmaker.utils import load_classes @@ -40,7 +43,8 @@ class FreshmakerConsumer(fedmsg.consumers.FedmsgConsumer): This is triggered by running fedmsg-hub. This class is responsible for ingesting and processing messages from the message bus. """ - config_key = 'freshmakerconsumer' + + config_key = "freshmakerconsumer" def __init__(self, hub): # set topic before super, otherwise topic will not be subscribed @@ -49,8 +53,8 @@ def __init__(self, hub): # These two values are typically provided either by the unit tests or # by the local build command. They are empty in the production environ - self.stop_condition = hub.config.get('freshmaker.stop_condition') - initial_messages = hub.config.get('freshmaker.initial_messages', []) + self.stop_condition = hub.config.get("freshmaker.stop_condition") + initial_messages = hub.config.get("freshmaker.initial_messages", []) for msg in initial_messages: self.incoming.put(msg) @@ -67,16 +71,17 @@ def register_parsers(self): log.debug("Parser classes: %r", events.BaseEvent._parsers) self.topic = events.BaseEvent.get_parsed_topics() - log.debug('Setting topics: {}'.format(', '.join(self.topic))) + log.debug("Setting topics: {}".format(", ".join(self.topic))) def shutdown(self): log.info("Scheduling shutdown.") from moksha.hub.reactor import reactor + reactor.callFromThread(self.hub.stop) reactor.callFromThread(reactor.stop) def validate(self, message): - if conf.messaging == 'fedmsg': + if conf.messaging == "fedmsg": # If this is a faked internal message, don't bother. if isinstance(message, events.BaseEvent): return @@ -95,7 +100,7 @@ def consume(self, message): if isinstance(message, events.BaseEvent): msg = message else: - msg = self.get_abstracted_msg(message['body']) + msg = self.get_abstracted_msg(message["body"]) if not msg: # We do not log here anything, because it would create lot of @@ -118,33 +123,33 @@ def consume(self, message): messaging_rx_processed_ok_counter.inc() except Exception: messaging_rx_failed_counter.inc() - log.exception('Failed while handling {0!r}'.format(msg)) + log.exception("Failed while handling {0!r}".format(msg)) if self.stop_condition and self.stop_condition(message): self.shutdown() def get_abstracted_msg(self, message): # Convert the message to an abstracted message - if 'topic' not in message: - raise ValueError( - 'The messaging format "{}" is not supported'.format(conf.messaging)) + if "topic" not in message: + raise ValueError('The messaging format "{}" is not supported'.format(conf.messaging)) # Fallback to message['headers']['message-id'] if msg_id not defined. - if ('msg_id' not in message and - 'headers' in message and - "message-id" in message['headers']): - message['msg_id'] = message['headers']['message-id'] + if "msg_id" not in message and "headers" in message and "message-id" in message["headers"]: + message["msg_id"] = message["headers"]["message-id"] - if 'msg_id' not in message: + if "msg_id" not in message: raise ValueError( - 'Received message does not contain "msg_id" or "message-id": ' - '%r' % (message)) + 'Received message does not contain "msg_id" or "message-id": ' "%r" % (message) + ) - return events.BaseEvent.from_fedmsg(message['topic'], message) + return events.BaseEvent.from_fedmsg(message["topic"], message) def process_event(self, msg): - log.debug('Received a message with an ID of "{0}" and of type "{1}"' - .format(getattr(msg, 'msg_id', None), type(msg).__name__)) + log.debug( + 'Received a message with an ID of "{0}" and of type "{1}"'.format( + getattr(msg, "msg_id", None), type(msg).__name__ + ) + ) handlers = load_classes(conf.handlers) handlers = sorted(handlers, key=lambda handler: getattr(handler, "order", 50)) @@ -159,7 +164,7 @@ def process_event(self, msg): try: further_work = handler.handle(msg) or [] except Exception: - err = 'Could not process message handler. See the traceback.' + err = "Could not process message handler. See the traceback." log.exception(err) else: # Handlers can *optionally* return a list of fake messages that @@ -177,7 +182,7 @@ def process_event(self, msg): def get_global_consumer(): - """ Return a handle to the active consumer object, if it exists. """ + """Return a handle to the active consumer object, if it exists.""" hub = moksha.hub._hub if not hub: raise ValueError("No global moksha-hub obj found.") @@ -190,6 +195,6 @@ def get_global_consumer(): def work_queue_put(msg): - """ Artificially put a message into the work queue of the consumer. """ + """Artificially put a message into the work queue of the consumer.""" consumer = get_global_consumer() consumer.incoming.put(msg) diff --git a/freshmaker/container.py b/freshmaker/container.py index d7d31d97..b773b6bc 100644 --- a/freshmaker/container.py +++ b/freshmaker/container.py @@ -94,13 +94,9 @@ def load(cls, data: dict[str, Any]): def _convert_rpm(rpm): """Convert rpm data to dict of rpm names and nvr""" parsed_nvra = kobo.rpmlib.parse_nvra(rpm["nvra"]) - nvr = "-".join( - [parsed_nvra["name"], parsed_nvra["version"], parsed_nvra["release"]] - ) + nvr = "-".join([parsed_nvra["name"], parsed_nvra["version"], parsed_nvra["release"]]) parsed_nvra = kobo.rpmlib.parse_nvra(rpm["srpm_nevra"]) - srpm_nvr = "-".join( - [parsed_nvra["name"], parsed_nvra["version"], parsed_nvra["release"]] - ) + srpm_nvr = "-".join([parsed_nvra["name"], parsed_nvra["version"], parsed_nvra["release"]]) return { "name": rpm["name"], "nvr": nvr, diff --git a/freshmaker/errata.py b/freshmaker/errata.py index e133bca2..da1cfb16 100644 --- a/freshmaker/errata.py +++ b/freshmaker/errata.py @@ -26,9 +26,7 @@ import dogpile.cache from requests_kerberos import HTTPKerberosAuth, OPTIONAL -from freshmaker.events import ( - BrewSignRPMEvent, ErrataBaseEvent, - FreshmakerManualRebuildEvent) +from freshmaker.events import BrewSignRPMEvent, ErrataBaseEvent, FreshmakerManualRebuildEvent from freshmaker import conf, log from freshmaker.utils import retry @@ -38,9 +36,17 @@ class ErrataAdvisory(object): Represents Errata advisory. """ - def __init__(self, errata_id, name, state, content_types, - security_impact=None, product_short_name=None, - cve_list=None, has_hightouch_bug=None): + def __init__( + self, + errata_id, + name, + state, + content_types, + security_impact=None, + product_short_name=None, + cve_list=None, + has_hightouch_bug=None, + ): """ Initializes the ErrataAdvisory instance. """ @@ -73,15 +79,14 @@ def reporter(self): errata = Errata() advisory_data = errata._get_advisory_legacy(self.errata_id) - self._reporter = advisory_data['people']['reporter'] + self._reporter = advisory_data["people"]["reporter"] return self._reporter @property def builds(self): if self._builds is None: errata = Errata() - self._builds = errata._errata_rest_get(f"erratum/{self.errata_id}" - "/builds") + self._builds = errata._errata_rest_get(f"erratum/{self.errata_id}" "/builds") return self._builds @@ -117,13 +122,18 @@ def from_advisory_id(cls, errata, errata_id): break return ErrataAdvisory( - erratum_data["id"], erratum_data["fulladvisory"], erratum_data["status"], - erratum_data['content_types'], security_impact, - product_data["product"]["short_name"], cve_list, - has_hightouch_bug) + erratum_data["id"], + erratum_data["fulladvisory"], + erratum_data["status"], + erratum_data["content_types"], + security_impact, + product_data["product"]["short_name"], + cve_list, + has_hightouch_bug, + ) def is_flatpak_module_advisory_ready(self): - """ Returns True only if a Flatpaks can be rebuilt from module advisory. + """Returns True only if a Flatpaks can be rebuilt from module advisory. Flatpaks can be rebuilt only if all of the following are true: - Advisory must contain modules. @@ -133,33 +143,32 @@ def is_flatpak_module_advisory_ready(self): """ errata = Errata() return ( - self.state == "QE" and - "module" in self.content_types and - all( - "-hidden-" in repo_id - for repo_id in errata.get_pulp_repository_ids(self.errata_id) - ) and - errata.builds_signed(self.errata_id) and - errata.is_zstream(self.errata_id) + self.state == "QE" + and "module" in self.content_types + and all( + "-hidden-" in repo_id for repo_id in errata.get_pulp_repository_ids(self.errata_id) + ) + and errata.builds_signed(self.errata_id) + and errata.is_zstream(self.errata_id) ) class Errata(object): - """ Interface to Errata. """ + """Interface to Errata.""" # Cache for `advisories_from_event` related methods. The main reason # of this cache is lookup of BrewSignRPMEvents which came in waves. # Therefore the short 10 seconds timeout. We don't want to cache it for # too long to keep the data in sync with Errata tool. - region = dogpile.cache.make_region().configure( - conf.dogpile_cache_backend, expiration_time=10) + region = dogpile.cache.make_region().configure(conf.dogpile_cache_backend, expiration_time=10) # Change for _rhel_release_from_product_version. # Big expiration_time is OK here, because once we start rebuilding # something for particular product version, its rhel_release version # should not change. product_region = dogpile.cache.make_region().configure( - conf.dogpile_cache_backend, expiration_time=24 * 3600) + conf.dogpile_cache_backend, expiration_time=24 * 3600 + ) def __init__(self, server_url=None): """ @@ -167,11 +176,11 @@ def __init__(self, server_url=None): :param str server_url: Base URL of Errata server. """ - self._rest_api_ver = 'api/v1' + self._rest_api_ver = "api/v1" if server_url is not None: - self.server_url = server_url.rstrip('/') + self.server_url = server_url.rstrip("/") else: - self.server_url = conf.errata_tool_server_url.rstrip('/') + self.server_url = conf.errata_tool_server_url.rstrip("/") @retry(wait_on=(requests.exceptions.RequestException,), logger=log) def _errata_authorized_get(self, *args, **kwargs): @@ -181,7 +190,9 @@ def _errata_authorized_get(self, *args, **kwargs): auth=HTTPKerberosAuth( mutual_authentication=OPTIONAL, principal=conf.krb_auth_principal ), - **kwargs, timeout=conf.requests_timeout) + **kwargs, + timeout=conf.requests_timeout, + ) r.raise_for_status() except requests.exceptions.RequestException as e: if e.response is not None and e.response.status_code == 401: @@ -196,22 +207,21 @@ def _errata_rest_get(self, endpoint): Document: /developer-guide/api-http-api.html """ return self._errata_authorized_get( - "%s/%s/%s" % (self.server_url, self._rest_api_ver, - endpoint.lstrip('/'))) + "%s/%s/%s" % (self.server_url, self._rest_api_ver, endpoint.lstrip("/")) + ) def _errata_http_get(self, endpoint): """Request Errata legacy HTTP API See also Legacy section in /developer-guide/api-http-api.html """ - return self._errata_authorized_get( - '{}/{}'.format(self.server_url, endpoint)) + return self._errata_authorized_get("{}/{}".format(self.server_url, endpoint)) def _get_advisory(self, errata_id): - return self._errata_rest_get('erratum/{0}'.format(errata_id)) + return self._errata_rest_get("erratum/{0}".format(errata_id)) def _get_advisory_legacy(self, errata_id): - return self._errata_http_get('advisory/{0}.json'.format(errata_id)) + return self._errata_http_get("advisory/{0}.json".format(errata_id)) def _get_product(self, product_id): return self._errata_http_get("products/%s.json" % str(product_id)) @@ -297,8 +307,7 @@ def _rhel_release_from_product_version(self, errata_id, product_version): product_id = data["product"]["id"] # Get all the product versions associated with this product ID. - data = self._errata_http_get("products/%s/product_versions.json" - % str(product_id)) + data = self._errata_http_get("products/%s/product_versions.json" % str(product_id)) # Find out the product version ID for the input `product_version` # name. @@ -311,12 +320,14 @@ def _rhel_release_from_product_version(self, errata_id, product_version): if not pr_version_id: raise ValueError( "Cannot get RHEL release from Errata advisory %s, product " - "version %s" % (str(errata_id), product_version)) + "version %s" % (str(errata_id), product_version) + ) # Get the additional product version info to find out the RHEL release # name. - data = self._errata_http_get("products/%s/product_versions/%s.json" - % (str(product_id), str(pr_version_id))) + data = self._errata_http_get( + "products/%s/product_versions/%s.json" % (str(product_id), str(pr_version_id)) + ) return data["rhel_release"]["name"] @@ -353,12 +364,16 @@ def _get_rpms(self, errata_id, rhel_release_prefix=None): rhel_release = Errata.product_region.get(product_version) if not rhel_release: rhel_release = self._rhel_release_from_product_version( - errata_id, product_version) + errata_id, product_version + ) Errata.product_region.set(product_version, rhel_release) if not rhel_release.startswith(rhel_release_prefix): - log.info("Skipping builds for %s - not based on RHEL %s", - product_version, rhel_release_prefix) + log.info( + "Skipping builds for %s - not based on RHEL %s", + product_version, + rhel_release_prefix, + ) continue for build in builds: @@ -372,7 +387,7 @@ def _get_rpms(self, errata_id, rhel_release_prefix=None): return {"source_rpms": source_rpms, "binary_rpms": binary_rpms} def get_srpm_nvrs(self, errata_id, rhel_release_prefix=None): - """" + """ " Returns list with nvrs of SRPMs attached to the advisory :param number errata_id: ID of advisory. @@ -387,11 +402,11 @@ def get_srpm_nvrs(self, errata_id, rhel_release_prefix=None): """ rpms = self._get_rpms(errata_id, rhel_release_prefix) source_rpms = rpms.get("source_rpms", []) - srpm_nvrs = {nvr.rsplit('.', 2)[0] for nvr in source_rpms} + srpm_nvrs = {nvr.rsplit(".", 2)[0] for nvr in source_rpms} return list(srpm_nvrs) def get_binary_rpm_nvrs(self, errata_id, rhel_release_prefix=None): - """" + """ " Returns list with nvrs of all binary RPMs attached to the advisory :param number errata_id: ID of advisory. @@ -406,7 +421,7 @@ def get_binary_rpm_nvrs(self, errata_id, rhel_release_prefix=None): """ rpms = self._get_rpms(errata_id, rhel_release_prefix) binary_rpms = rpms.get("binary_rpms", []) - nvrs = {nvr.rsplit('.', 2)[0] for nvr in binary_rpms} + nvrs = {nvr.rsplit(".", 2)[0] for nvr in binary_rpms} return list(nvrs) def get_pulp_repository_ids(self, errata_id): @@ -417,12 +432,11 @@ def get_pulp_repository_ids(self, errata_id): :return: a list of strings each of them represents a pulp repository ID :rtype: list """ - data = self._errata_http_get( - '/errata/get_pulp_packages/{}.json'.format(errata_id)) + data = self._errata_http_get("/errata/get_pulp_packages/{}.json".format(errata_id)) return data.keys() def get_cve_affected_rpm_nvrs(self, errata_id): - """ Get RPM nvrs which are affected by the CVEs in errata + """Get RPM nvrs which are affected by the CVEs in errata :param errata_id: Errata advisory ID, e.g. 25713. :type errata_id: str or int @@ -442,14 +456,14 @@ def get_cve_affected_rpm_nvrs(self, errata_id): for arch, rpms in variant_data.items(): # Remove '.arch.....' part from rpm's name # and make a list from them - if arch != 'SRPMS': - just_nvrs = [rpm.rsplit('.', 2)[0] for rpm in rpms] + if arch != "SRPMS": + just_nvrs = [rpm.rsplit(".", 2)[0] for rpm in rpms] nvrs.update(just_nvrs) return list(nvrs) def get_blocking_advisories_builds(self, errata_id): - """ Get all advisories that block given advisory id, and fetch all builds from it + """Get all advisories that block given advisory id, and fetch all builds from it :param number errata_id: ID of advisory :return: NVRs of builds attached to all dependent advisories @@ -467,7 +481,7 @@ def get_blocking_advisories_builds(self, errata_id): return nvrs def get_attached_build_nvrs(self, errata_id): - """ Get all attached builds' NVRs + """Get all attached builds' NVRs :param number errata_id: ID of advisory :return: NVRs of attached builds @@ -475,10 +489,7 @@ def get_attached_build_nvrs(self, errata_id): """ product_builds = self._get_attached_builds(errata_id) return { - nvr - for builds in product_builds.values() - for build in builds - for nvr in build.keys() + nvr for builds in product_builds.values() for build in builds for nvr in build.keys() } def _get_release(self, errata_id): diff --git a/freshmaker/errors.py b/freshmaker/errors.py index 2506284f..cb796210 100644 --- a/freshmaker/errors.py +++ b/freshmaker/errors.py @@ -52,10 +52,7 @@ class Forbidden(ValueError): def json_error(status, error, message): - response = jsonify( - {'status': status, - 'error': error, - 'message': message}) + response = jsonify({"status": status, "error": error, "message": message}) response.status_code = status return response @@ -63,30 +60,30 @@ def json_error(status, error, message): @app.errorhandler(NotFound) def notfound_error(e): """Flask error handler for NotFound exceptions""" - return json_error(404, 'Not Found', e.args[0]) + return json_error(404, "Not Found", e.args[0]) @app.errorhandler(Unauthorized) def unauthorized_error(e): """Flask error handler for Unauthorized exceptions""" - return json_error(401, 'Unauthorized', e.description) + return json_error(401, "Unauthorized", e.description) @app.errorhandler(Forbidden) def forbidden_error(e): """Flask error handler for Forbidden exceptions""" - return json_error(403, 'Forbidden', e.args[0]) + return json_error(403, "Forbidden", e.args[0]) @app.errorhandler(ValueError) def validationerror_error(e): """Flask error handler for ValueError exceptions""" - log.exception('Bad Request: %s', e) - return json_error(400, 'Bad Request', str(e)) + log.exception("Bad Request: %s", e) + return json_error(400, "Bad Request", str(e)) @app.errorhandler(Exception) def internal_server_error(e): """Flask error handler for RuntimeError exceptions""" - log.exception('Internal server error: %s', e) - return json_error(500, 'Internal Server Error', str(e)) + log.exception("Internal server error: %s", e) + return json_error(500, "Internal Server Error", str(e)) diff --git a/freshmaker/events.py b/freshmaker/events.py index 7f2b419a..4905413c 100644 --- a/freshmaker/events.py +++ b/freshmaker/events.py @@ -31,7 +31,6 @@ class BaseEvent(object): - _parsers = {} # type: dict[Any, Any] def __init__(self, msg_id, manual=False, dry_run=False): @@ -75,11 +74,10 @@ def get_parsed_topics(cls): topic_suffixes = [] for parser in BaseEvent._parsers.values(): topic_suffixes.extend(parser.topic_suffixes) - return ['{}.{}'.format(pref.rstrip('.'), cat) - for pref, cat - in itertools.product( - conf.messaging_topic_prefix, - topic_suffixes)] + return [ + "{}.{}".format(pref.rstrip("."), cat) + for pref, cat in itertools.product(conf.messaging_topic_prefix, topic_suffixes) + ] def __repr__(self): init_sig = signature(self.__init__) @@ -88,20 +86,21 @@ def __repr__(self): "{}={!r}".format(name, getattr(self, name)) if param.default != param.empty else repr(getattr(self, name, {})) - for name, param in init_sig.parameters.items()) + for name, param in init_sig.parameters.items() + ) - return "{}({})".format(type(self).__name__, ', '.join(args_strs)) + return "{}({})".format(type(self).__name__, ", ".join(args_strs)) def __getitem__(self, key): - """ Used to trick moksha into thinking we are a dict. """ + """Used to trick moksha into thinking we are a dict.""" return getattr(self, key) def __setitem__(self, key, value): - """ Used to trick moksha into thinking we are a dict. """ + """Used to trick moksha into thinking we are a dict.""" return setattr(self, key, value) def get(self, key, value=None): - """ Used to trick moksha into thinking we are a dict. """ + """Used to trick moksha into thinking we are a dict.""" return getattr(self, key, value) def __json__(self): @@ -145,17 +144,18 @@ def is_allowed(self, handler, artifact_type, **kwargs): :param kwargs: Extra kwargs to be passed to `handler.allow_build()`. """ return handler.allow_build( - artifact_type, dry_run=self.dry_run, - manual=self.manual, **kwargs) + artifact_type, dry_run=self.dry_run, manual=self.manual, **kwargs + ) class MBSModuleStateChangeEvent(BaseEvent): - """ A class that inherits from BaseEvent to provide an event + """A class that inherits from BaseEvent to provide an event object for a module event generated by module-build-service :param msg_id: the id of the msg (e.g. 2016-SomeGUID) :param module_build_id: the id of the module build :param module_build_state: the state of the module build """ + def __init__(self, msg_id, module, stream, build_id, build_state, **kwargs): super(MBSModuleStateChangeEvent, self).__init__(msg_id, **kwargs) self.module = module @@ -174,6 +174,7 @@ class GitModuleMetadataChangeEvent(BaseEvent): :param scm_url: SCM URL of a updated module. :param branch: Branch of updated module. """ + def __init__(self, msg_id, module, branch, rev, **kwargs): super(GitModuleMetadataChangeEvent, self).__init__(msg_id, **kwargs) self.module = module @@ -193,6 +194,7 @@ class GitRPMSpecChangeEvent(BaseEvent): :param branch: Branch of updated RPM spec. :param rev: revision. """ + def __init__(self, msg_id, rpm, branch, rev, **kwargs): super(GitRPMSpecChangeEvent, self).__init__(msg_id, **kwargs) self.rpm = rpm @@ -208,6 +210,7 @@ class TestingEvent(BaseEvent): """ Event used in unit-tests. """ + def __init__(self, msg_id, **kwargs): super(TestingEvent, self).__init__(msg_id, **kwargs) @@ -262,6 +265,7 @@ class KojiTaskStateChangeEvent(BaseEvent): """ Provides an event object for "the state of task changed in koji" """ + def __init__(self, msg_id, task_id, task_state, **kwargs): super(KojiTaskStateChangeEvent, self).__init__(msg_id, **kwargs) self.task_id = task_id @@ -287,14 +291,16 @@ def search_key(self): def is_allowed(self, handler, **kwargs): return super(ErrataBaseEvent, self).is_allowed( - handler, ArtifactType.IMAGE, + handler, + ArtifactType.IMAGE, advisory_state=self.advisory.state, advisory_name=self.advisory.name, advisory_security_impact=self.advisory.security_impact, advisory_product_short_name=self.advisory.product_short_name, advisory_has_hightouch_bug=self.advisory.has_hightouch_bug, - advisory_content_types=' '.join(self.advisory.content_types), - **kwargs) + advisory_content_types=" ".join(self.advisory.content_types), + **kwargs + ) class ErrataAdvisoryStateChangedEvent(ErrataBaseEvent): @@ -321,9 +327,15 @@ class ManualRebuildWithAdvisoryEvent(ErrataRPMAdvisoryShippedEvent): from advisory. """ - def __init__(self, msg_id, advisory, container_images, - requester_metadata_json=None, - requester=None, **kwargs): + def __init__( + self, + msg_id, + advisory, + container_images, + requester_metadata_json=None, + requester=None, + **kwargs + ): """ Creates new ManualRebuildWithAdvisoryEvent. @@ -334,8 +346,7 @@ def __init__(self, msg_id, advisory, container_images, :param requester_metadata_json: JSON of additional information about rebuild :param requester: name of requester of rebuild """ - super(ManualRebuildWithAdvisoryEvent, self).__init__( - msg_id, advisory, **kwargs) + super(ManualRebuildWithAdvisoryEvent, self).__init__(msg_id, advisory, **kwargs) self.manual = True self.container_images = container_images self.requester_metadata_json = requester_metadata_json @@ -346,6 +357,7 @@ class BrewSignRPMEvent(BaseEvent): """ Represents the message sent by Brew when RPM is signed. """ + def __init__(self, msg_id, nvr, **kwargs): super(BrewSignRPMEvent, self).__init__(msg_id, **kwargs) self.nvr = nvr @@ -359,8 +371,8 @@ class BrewContainerTaskStateChangeEvent(BaseEvent): """ Represents the message sent by Brew when a container task state is changed. """ - def __init__(self, msg_id, container, branch, target, task_id, old_state, - new_state, **kwargs): + + def __init__(self, msg_id, container, branch, target, task_id, old_state, new_state, **kwargs): super(BrewContainerTaskStateChangeEvent, self).__init__(msg_id, **kwargs) self.container = container self.branch = branch @@ -387,9 +399,9 @@ class FreshmakerManualRebuildEvent(BaseEvent): NOTE: This event is deprecated and not used anymore, but we have to keep it around, because we have instances of this event stored in database. """ + def __init__(self, msg_id, errata_id=None, dry_run=False): - super(FreshmakerManualRebuildEvent, self).__init__( - msg_id, dry_run=dry_run) + super(FreshmakerManualRebuildEvent, self).__init__(msg_id, dry_run=dry_run) self.errata_id = errata_id @@ -397,6 +409,7 @@ class FreshmakerManageEvent(BaseEvent): """ Event triggered by an internal message for managing Freshmaker itself. """ + _max_tries = 3 def __init__(self, msg_body, **kwargs): @@ -412,8 +425,8 @@ def __new__(cls, msg_body, *args, **kwargs): # then the unhandleable `None` is returned here as last resort, # instead of `FreshmakerManageEvent`. instance = super(FreshmakerManageEvent, cls).__new__(cls) - instance.action = msg_body['action'] - instance.try_count = msg_body['try'] + instance.action = msg_body["action"] + instance.try_count = msg_body["try"] instance.try_count += 1 instance.last_try = instance.try_count == FreshmakerManageEvent._max_tries @@ -425,9 +438,17 @@ def __new__(cls, msg_body, *args, **kwargs): class FreshmakerAsyncManualBuildEvent(BaseEvent): """Event triggered via API endpoint /async-builds""" - def __init__(self, msg_id, dist_git_branch, container_images, - freshmaker_event_id=None, brew_target=None, dry_run=False, - requester=None, requester_metadata_json=None): + def __init__( + self, + msg_id, + dist_git_branch, + container_images, + freshmaker_event_id=None, + brew_target=None, + dry_run=False, + requester=None, + requester_metadata_json=None, + ): """Initialize this event :param str msg_id: the message id. @@ -449,8 +470,7 @@ def __init__(self, msg_id, dist_git_branch, container_images, :param requester: name of requester of rebuild :param requester_metadata_json: JSON of additional information about rebuild """ - super(FreshmakerAsyncManualBuildEvent, self).__init__( - msg_id, manual=True, dry_run=dry_run) + super(FreshmakerAsyncManualBuildEvent, self).__init__(msg_id, manual=True, dry_run=dry_run) self.dist_git_branch = dist_git_branch self.container_images = container_images self.freshmaker_event_id = freshmaker_event_id @@ -460,7 +480,7 @@ def __init__(self, msg_id, dist_git_branch, container_images, class BotasErrataShippedEvent(ErrataBaseEvent): - """ Event triggered, when BOTAS pushes advisory to SHIPPED_LIVE state """ + """Event triggered, when BOTAS pushes advisory to SHIPPED_LIVE state""" def __init__(self, msg_id, advisory, dry_run=False): super().__init__(msg_id, advisory, dry_run=dry_run) @@ -471,12 +491,20 @@ class ManualBundleRebuildEvent(ErrataBaseEvent): Event triggered when Release Driver requests manual rebuild OR when manual rebuild of bundles requested by person """ - def __init__(self, msg_id, advisory, container_images, - requester_metadata_json=None, freshmaker_event_id=None, - requester=None, dry_run=False, **kwargs): + + def __init__( + self, + msg_id, + advisory, + container_images, + requester_metadata_json=None, + freshmaker_event_id=None, + requester=None, + dry_run=False, + **kwargs + ): super().__init__( - msg_id, advisory, - freshmaker_event_id=freshmaker_event_id, dry_run=dry_run, **kwargs + msg_id, advisory, freshmaker_event_id=freshmaker_event_id, dry_run=dry_run, **kwargs ) self.manual = True self.container_images = container_images diff --git a/freshmaker/handlers/__init__.py b/freshmaker/handlers/__init__.py index 5c029744..8db591c9 100644 --- a/freshmaker/handlers/__init__.py +++ b/freshmaker/handlers/__init__.py @@ -43,6 +43,7 @@ class ODCSComposeNotReady(Exception): Raised when ODCS compose is still generating and therefore not ready to be used to build an image. """ + pass @@ -54,6 +55,7 @@ def fail_event_on_handler_exception(func): The exception is re-raised by this decorator once its finished. """ + @wraps(func) def decorator(handler, *args, **kwargs): try: @@ -66,7 +68,7 @@ def decorator(handler, *args, **kwargs): raise handler._last_handled_exception = e - err = 'Could not process message handler. See the traceback.' + err = "Could not process message handler. See the traceback." log.exception(err) # In case the exception interrupted the database transaction, @@ -75,14 +77,14 @@ def decorator(handler, *args, **kwargs): # Mark the event as failed. db_event_id = handler.current_db_event_id - db_event = db.session.query(Event).filter_by( - id=db_event_id).first() + db_event = db.session.query(Event).filter_by(id=db_event_id).first() if db_event: msg = "Handling of event failed with traceback: %s" % (str(e)) db_event.transition(EventState.FAILED, msg) db_event.builds_transition(ArtifactBuildState.FAILED.value, msg) db.session.commit() raise + return decorator @@ -98,6 +100,7 @@ def fail_artifact_build_on_handler_exception(allowlist=None): subclasses which do not cause the ArtifactBuild to fail but are instead just re-raised. """ + def wrapper(func): @wraps(func) def decorator(handler, *args, **kwargs): @@ -114,7 +117,7 @@ def decorator(handler, *args, **kwargs): if allowlist and type(e) in allowlist: raise - err = 'Could not process message handler. See the traceback.' + err = "Could not process message handler. See the traceback." log.exception(err) # In case the exception interrupted the database transaction, @@ -123,15 +126,17 @@ def decorator(handler, *args, **kwargs): # Mark the event as failed. build_id = handler.current_db_artifact_build_id - build = db.session.query(ArtifactBuild).filter_by( - id=build_id).first() + build = db.session.query(ArtifactBuild).filter_by(id=build_id).first() if build: build.transition( - ArtifactBuildState.FAILED.value, "Handling of " - "build failed with traceback: %s" % (str(e))) + ArtifactBuildState.FAILED.value, + "Handling of " "build failed with traceback: %s" % (str(e)), + ) db.session.commit() raise + return decorator + return wrapper @@ -139,6 +144,7 @@ class BaseHandler(object): """ Abstract base class for event handlers. """ + __metaclass__ = abc.ABCMeta # Defines the order of this handler when evaluating multiple handlers. @@ -248,8 +254,7 @@ def set_context(self, db_object): # Prefix logs with " ():". self._log_prefix = "%s (%s): " % (str(db_object.event), str(db_object)) else: - raise ProgrammingError( - "Unsupported context type passed to BaseHandler.set_context()") + raise ProgrammingError("Unsupported context type passed to BaseHandler.set_context()") @abc.abstractmethod def can_handle(self, event): @@ -270,10 +275,18 @@ def handle(self, event): """ raise NotImplementedError() - def record_build(self, event, name, artifact_type, - build_id=None, dep_on=None, state=None, - original_nvr=None, rebuilt_nvr=None, - rebuild_reason=0): + def record_build( + self, + event, + name, + artifact_type, + build_id=None, + dep_on=None, + state=None, + original_nvr=None, + rebuilt_nvr=None, + rebuild_reason=0, + ): """ Record build in db. @@ -298,12 +311,20 @@ def record_build(self, event, name, artifact_type, ev = event else: ev = models.Event.get_or_create( - db.session, event.msg_id, event.search_key, event.__class__) - build = models.ArtifactBuild.create(db.session, ev, name, - artifact_type.name.lower(), - build_id, dep_on, state, - original_nvr, rebuilt_nvr, - rebuild_reason) + db.session, event.msg_id, event.search_key, event.__class__ + ) + build = models.ArtifactBuild.create( + db.session, + ev, + name, + artifact_type.name.lower(), + build_id, + dep_on, + state, + original_nvr, + rebuilt_nvr, + rebuild_reason, + ) db.session.commit() return build @@ -347,23 +368,22 @@ def _match_allow_build_rule(self, criteria, rule): if not isinstance(rule[0], str): raise TypeError( "Rule does not have any operator, use any_() or all_() " - "methods to construct the rule: %r" % rule) + "methods to construct the rule: %r" % rule + ) if rule[0] == "any": operator = any elif rule[0] == "all": operator = all else: - raise ValueError( - "Invalid operator %s in rule: %r." % (rule[0], rule)) + raise ValueError("Invalid operator %s in rule: %r." % (rule[0], rule)) - return operator([ - self._match_allow_build_rule(criteria, subrule) - for subrule in rule[1]]) + return operator( + [self._match_allow_build_rule(criteria, subrule) for subrule in rule[1]] + ) if not isinstance(rule, dict): - raise TypeError( - "Rebuild rule must be dict or list, got %r." % rule) + raise TypeError("Rebuild rule must be dict or list, got %r." % rule) # If none of passed criteria matches configured rule, build is not allowed if not (set(rule.keys()) & set(criteria.keys())): @@ -399,10 +419,8 @@ def allow_build(self, artifact_type, **criteria): :rtype: bool """ # Global rules - allowlist_rules = copy.deepcopy( - conf.handler_build_allowlist.get("global", {})) - blocklist_rules = copy.deepcopy( - conf.handler_build_blocklist.get("global", {})) + allowlist_rules = copy.deepcopy(conf.handler_build_allowlist.get("global", {})) + blocklist_rules = copy.deepcopy(conf.handler_build_blocklist.get("global", {})) # This handler rules handler_name = self.name @@ -414,24 +432,23 @@ def allow_build(self, artifact_type, **criteria): if self._match_allow_build_rule(criteria, allowlist): blocklist = blocklist_rules.get(artifact_type.name.lower(), []) if self._match_allow_build_rule(criteria, blocklist): - self.log_debug('%r, type=%r is blocked.', - criteria, artifact_type.name.lower()) + self.log_debug("%r, type=%r is blocked.", criteria, artifact_type.name.lower()) return False - self.log_debug('%r, type=%r is allowed.', - criteria, artifact_type.name.lower()) - self.log_debug('name=%r, allowlist=%r', handler_name, allowlist) + self.log_debug("%r, type=%r is allowed.", criteria, artifact_type.name.lower()) + self.log_debug("name=%r, allowlist=%r", handler_name, allowlist) return True except re.error as exc: - err_msg = ("Error while compiling whilelist rule " - "for :\n" - "Incorrect regular expression: %s\n" - "Allowlist will not take effect" % - (handler_name, artifact_type.name.lower(), str(exc))) + err_msg = ( + "Error while compiling whilelist rule " + "for :\n" + "Incorrect regular expression: %s\n" + "Allowlist will not take effect" + % (handler_name, artifact_type.name.lower(), str(exc)) + ) self.log_error(err_msg) raise UnprocessableEntity(err_msg) - self.log_debug('%r, type=%r is not allowed.', - criteria, artifact_type.name.lower()) + self.log_debug("%r, type=%r is not allowed.", criteria, artifact_type.name.lower()) return False def _mark_event_complete_when_all_builds_done(self, db_event): @@ -455,23 +472,41 @@ def _mark_event_complete_when_all_builds_done(self, db_event): if num_failed: db_event.transition( EventState.COMPLETE, - 'Advisory %s: %d of %d container image(s) failed to rebuild.' % ( - db_event.search_key, num_failed, len(db_event.builds.all()),)) + "Advisory %s: %d of %d container image(s) failed to rebuild." + % ( + db_event.search_key, + num_failed, + len(db_event.builds.all()), + ), + ) else: db_event.transition( EventState.COMPLETE, - 'Advisory %s: All %s container images have been rebuilt.' % ( - db_event.search_key, len(db_event.builds.all()),)) + "Advisory %s: All %s container images have been rebuilt." + % ( + db_event.search_key, + len(db_event.builds.all()), + ), + ) class ContainerBuildHandler(BaseHandler): """Handler for building containers""" - def build_container(self, scm_url, branch, target, - repo_urls=None, flatpak=False, isolated=False, - release=None, koji_parent_build=None, - arch_override=None, compose_ids=None, - operator_csv_modifications_url=None): + def build_container( + self, + scm_url, + branch, + target, + repo_urls=None, + flatpak=False, + isolated=False, + release=None, + koji_parent_build=None, + arch_override=None, + compose_ids=None, + operator_csv_modifications_url=None, + ): """ Build a container in Koji. @@ -489,13 +524,17 @@ def build_container(self, scm_url, branch, target, :return: task id returned from Koji buildContainer API. :rtype: int """ - with koji_service( - profile=conf.koji_profile, logger=log, - dry_run=self.dry_run) as service: - log.info('Building container from source: %s, ' - 'release=%r, parent=%r, target=%r, arch=%r, compose_ids=%r', - scm_url, release, koji_parent_build, target, arch_override, - compose_ids) + with koji_service(profile=conf.koji_profile, logger=log, dry_run=self.dry_run) as service: + log.info( + "Building container from source: %s, " + "release=%r, parent=%r, target=%r, arch=%r, compose_ids=%r", + scm_url, + release, + koji_parent_build, + target, + arch_override, + compose_ids, + ) return service.build_container( scm_url, @@ -525,20 +564,21 @@ def build_image_artifact_build(self, build, repo_urls=None): """ if build.state != ArtifactBuildState.PLANNED.value: build.transition( - ArtifactBuildState.FAILED.value, - "Container image build is not in PLANNED state.") + ArtifactBuildState.FAILED.value, "Container image build is not in PLANNED state." + ) return if not build.build_args: build.transition( ArtifactBuildState.FAILED.value, - "Container image does not have 'build_args' filled in.") + "Container image does not have 'build_args' filled in.", + ) return if not build.original_nvr: build.transition( - ArtifactBuildState.FAILED.value, - "Container image does not have original_nvr set.") + ArtifactBuildState.FAILED.value, "Container image does not have original_nvr set." + ) return # If this is a bundle rebuild, check original build's OpenShift versions @@ -549,19 +589,18 @@ def build_image_artifact_build(self, build, repo_urls=None): # check ocp versions range of if build.event.event_type in build_bundle_event_types: with koji_service( - profile=conf.koji_profile, logger=log, - dry_run=self.dry_run, login=False + profile=conf.koji_profile, logger=log, dry_run=self.dry_run, login=False ) as service: ocp_versions_range = service.get_ocp_versions_range(build.original_nvr) if ocp_versions_range and not is_valid_ocp_versions_range(ocp_versions_range): build.transition( ArtifactBuildState.FAILED.value, - "Original image has invalid openshift versions range") + "Original image has invalid openshift versions range", + ) return args = json.loads(build.build_args) - scm_url = "%s/%s#%s" % (conf.git_base_url, args["repository"], - args["commit"]) + scm_url = "%s/%s#%s" % (conf.git_base_url, args["repository"], args["commit"]) branch = args["branch"] target = args["target"] @@ -592,12 +631,12 @@ def build_image_artifact_build(self, build, repo_urls=None): for compose_id in compose_ids: odcs_compose = self.odcs_get_compose(compose_id) - if odcs_compose["state"] in [COMPOSE_STATES['wait'], - COMPOSE_STATES['generating']]: + if odcs_compose["state"] in [COMPOSE_STATES["wait"], COMPOSE_STATES["generating"]]: # In case the ODCS compose is still generating, raise an # exception. - msg = ("Compose %s has not been generated yet. Waiting with " - "rebuild." % (str(compose_id))) + msg = "Compose %s has not been generated yet. Waiting with " "rebuild." % ( + str(compose_id) + ) self.log_info(msg) raise ODCSComposeNotReady(msg) # OSBS can renew a compose if it needs to, so we can just pass @@ -608,13 +647,18 @@ def build_image_artifact_build(self, build, repo_urls=None): self.log_debug( "Artifact build %s has rebuilt_nvr %s already. " "It will be replaced with a new one %s to be rebuilt.", - build, build.rebuilt_nvr, rebuilt_nvr) + build, + build.rebuilt_nvr, + rebuilt_nvr, + ) build.rebuilt_nvr = rebuilt_nvr db.session.commit() return self.build_container( - scm_url, branch, target, + scm_url, + branch, + target, repo_urls=repo_urls, flatpak=flatpak, isolated=isolated, @@ -633,9 +677,9 @@ def odcs_get_compose(self, compose_id): """ if self.dry_run: return { - 'id': compose_id, - 'result_repofile': "http://localhost/%d.repo" % compose_id, - 'state': COMPOSE_STATES['done'], + "id": compose_id, + "result_repofile": "http://localhost/%d.repo" % compose_id, + "state": COMPOSE_STATES["done"], } return create_odcs_client().get_compose(compose_id) @@ -680,24 +724,24 @@ def build_image(build): return except Exception: self.log_except( - "While processing the event with id {} exception occurred" - .format(self._db_event_id)) + "While processing the event with id {} exception occurred".format( + self._db_event_id + ) + ) unknown_exception_occurred = True if unknown_exception_occurred: - build.transition( - ArtifactBuildState.FAILED.value, - "An unknown error occurred.") + build.transition(ArtifactBuildState.FAILED.value, "An unknown error occurred.") elif build.state == ArtifactBuildState.FAILED.value: log.debug(f"Build {build.id} failed: {build.state_reason}") elif not build.build_id: build.transition( - ArtifactBuildState.FAILED.value, - "Error while building container image in Koji.") + ArtifactBuildState.FAILED.value, "Error while building container image in Koji." + ) else: build.transition( - ArtifactBuildState.BUILD.value, - "Building container image in Koji.") + ArtifactBuildState.BUILD.value, "Building container image in Koji." + ) db.session.add(build) db.session.commit() diff --git a/freshmaker/handlers/botas/__init__.py b/freshmaker/handlers/botas/__init__.py index d32dd0c9..4da93c23 100644 --- a/freshmaker/handlers/botas/__init__.py +++ b/freshmaker/handlers/botas/__init__.py @@ -19,4 +19,4 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -from .botas_shipped_advisory import HandleBotasAdvisory # noqa +from .botas_shipped_advisory import HandleBotasAdvisory # noqa diff --git a/freshmaker/handlers/botas/botas_shipped_advisory.py b/freshmaker/handlers/botas/botas_shipped_advisory.py index 8f881cec..e547fd9a 100644 --- a/freshmaker/handlers/botas/botas_shipped_advisory.py +++ b/freshmaker/handlers/botas/botas_shipped_advisory.py @@ -110,7 +110,7 @@ def handle(self, event): return [] def _get_bundles_to_rebuild(self): - """ Get the impacted bundle to rebuild + """Get the impacted bundle to rebuild :return: a tuple of bundles and reason :rtype: tuple ([dict], str) @@ -148,8 +148,9 @@ def _get_bundles_to_rebuild(self): # not present previously. E.g., if nvr_mapping={or_nvr1:new_nvr1, or_nvr2:new_nvr2} and # manual_remapping = {or_nvr2:man_nvr2, or_nvr3:man_nvr3}, then after the manual remapping # nvr_mapping={or_nvr1:new_nvr1, or_nvr2:man_nvr2, or_nvr3:man_nvr3}. - if (self.event.get("requester_metadata_json", False) and - self.event.requester_metadata_json.get("bundle_related_image_overrides", False)): + if self.event.get( + "requester_metadata_json", False + ) and self.event.requester_metadata_json.get("bundle_related_image_overrides", False): log.info("Performing manual NVR remapping") manual_remapping = self.event.requester_metadata_json["bundle_related_image_overrides"] for x in manual_remapping: @@ -334,9 +335,7 @@ def _get_bundles_to_rebuild(self): csv_data = self._get_bundle_csv(bundle_nvr) substitutes_for = csv_data["metadata"]["annotations"].get("olm.substitutesFor") - bundle_data.update( - self._get_csv_updates(csv_name, version, substitutes_for) - ) + bundle_data.update(self._get_csv_updates(csv_name, version, substitutes_for)) bundles_to_rebuild.append(bundle_data) if not bundles_to_rebuild: return ( diff --git a/freshmaker/handlers/internal/cancel_event_on_freshmaker_manage_request.py b/freshmaker/handlers/internal/cancel_event_on_freshmaker_manage_request.py index e8a2e2ce..e464337d 100644 --- a/freshmaker/handlers/internal/cancel_event_on_freshmaker_manage_request.py +++ b/freshmaker/handlers/internal/cancel_event_on_freshmaker_manage_request.py @@ -33,7 +33,7 @@ class CancelEventOnFreshmakerManageRequest(BaseHandler): order = 0 def can_handle(self, event): - if isinstance(event, FreshmakerManageEvent) and event.action == 'eventcancel': + if isinstance(event, FreshmakerManageEvent) and event.action == "eventcancel": return True return False @@ -49,26 +49,32 @@ def handle(self, event): failed_to_cancel_builds_id = [] log_fail = log.error if event.last_try else log.warning - with koji_service( - conf.koji_profile, log, dry_run=event.dry_run) as session: - builds = db.session.query(ArtifactBuild).filter( - ArtifactBuild.id.in_(event.body['builds_id'])).all() + with koji_service(conf.koji_profile, log, dry_run=event.dry_run) as session: + builds = ( + db.session.query(ArtifactBuild) + .filter(ArtifactBuild.id.in_(event.body["builds_id"])) + .all() + ) for build in builds: if session.cancel_build(build.build_id): - build.state_reason = 'Build canceled in external build system.' + build.state_reason = "Build canceled in external build system." continue if event.last_try: - build.state_reason = ('Build was NOT canceled in external build system.' - ' Max number of tries reached!') + build.state_reason = ( + "Build was NOT canceled in external build system." + " Max number of tries reached!" + ) failed_to_cancel_builds_id.append(build.id) db.session.commit() if failed_to_cancel_builds_id: - log_fail("Builds which failed to cancel in external build system," - " by DB id: %s; try #%s", - failed_to_cancel_builds_id, event.try_count) + log_fail( + "Builds which failed to cancel in external build system," " by DB id: %s; try #%s", + failed_to_cancel_builds_id, + event.try_count, + ) if event.last_try or not failed_to_cancel_builds_id: return [] - event.body['builds_id'] = failed_to_cancel_builds_id + event.body["builds_id"] = failed_to_cancel_builds_id return [event] diff --git a/freshmaker/handlers/internal/update_db_on_odcs_compose_fail.py b/freshmaker/handlers/internal/update_db_on_odcs_compose_fail.py index a3b8c853..fe441d2b 100644 --- a/freshmaker/handlers/internal/update_db_on_odcs_compose_fail.py +++ b/freshmaker/handlers/internal/update_db_on_odcs_compose_fail.py @@ -51,22 +51,22 @@ def can_handle(self, event): @fail_event_on_handler_exception def handle(self, event): - compose_id = event.compose["id"] self.log_error("ODCS compose %s failed", compose_id) # Get all the builds waiting for this compose. - builds_with_compose = db.session.query(ArtifactBuild).join( - ArtifactBuildCompose).join(Compose) + builds_with_compose = ( + db.session.query(ArtifactBuild).join(ArtifactBuildCompose).join(Compose) + ) builds_with_compose = builds_with_compose.filter( - Compose.odcs_compose_id == compose_id, - ArtifactBuildCompose.compose_id == Compose.id) + Compose.odcs_compose_id == compose_id, ArtifactBuildCompose.compose_id == Compose.id + ) for build in builds_with_compose: build.transition( - ArtifactBuildState.FAILED.value, - "ODCS compose %r is in failed state." % compose_id) + ArtifactBuildState.FAILED.value, "ODCS compose %r is in failed state." % compose_id + ) db.session.commit() db_event = builds_with_compose[0].event diff --git a/freshmaker/handlers/koji/__init__.py b/freshmaker/handlers/koji/__init__.py index 687774a7..af73ae61 100644 --- a/freshmaker/handlers/koji/__init__.py +++ b/freshmaker/handlers/koji/__init__.py @@ -23,4 +23,6 @@ from .rebuild_images_on_parent_image_build import RebuildImagesOnParentImageBuild # noqa from .rebuild_images_on_rpm_advisory_change import RebuildImagesOnRPMAdvisoryChange # noqa from .rebuild_images_on_async_manual_build import RebuildImagesOnAsyncManualBuild # noqa -from .rebuild_flatpak_application_on_module_ready import RebuildFlatpakApplicationOnModuleReady # noqa +from .rebuild_flatpak_application_on_module_ready import ( # noqa + RebuildFlatpakApplicationOnModuleReady, +) diff --git a/freshmaker/handlers/koji/rebuild_flatpak_application_on_module_ready.py b/freshmaker/handlers/koji/rebuild_flatpak_application_on_module_ready.py index d6068346..55025f1b 100644 --- a/freshmaker/handlers/koji/rebuild_flatpak_application_on_module_ready.py +++ b/freshmaker/handlers/koji/rebuild_flatpak_application_on_module_ready.py @@ -80,7 +80,6 @@ def can_handle(self, event): @fail_event_on_handler_exception def handle(self, event): - if event.dry_run: self.force_dry_run() @@ -90,9 +89,7 @@ def handle(self, event): self.set_context(db_event) self.errata = Errata() - self.advisory_module_nvrs = self.errata.get_attached_build_nvrs( - event.advisory.errata_id - ) + self.advisory_module_nvrs = self.errata.get_attached_build_nvrs(event.advisory.errata_id) try: builds = self._handle_or_skip(event) @@ -180,9 +177,7 @@ def _image_modules_mapping(self): image_modules_mapping = defaultdict(set) req_session = self._get_requests_session() - with koji_service( - conf.koji_profile, log, login=False, dry_run=self.dry_run - ) as session: + with koji_service(conf.koji_profile, log, login=False, dry_run=self.dry_run) as session: for advisory_module_nvr in self.advisory_module_nvrs: mmd = session.get_modulemd(advisory_module_nvr) content_index_url = "{}/released/contents/modules/{}:{}.json".format( @@ -197,9 +192,7 @@ def _image_modules_mapping(self): image_nvr = image_info["ImageNvr"] image_modules_mapping[image_nvr].add(advisory_module_nvr) else: - self.log_error( - "Fetching module %s data failed.", advisory_module_nvr - ) + self.log_error("Fetching module %s data failed.", advisory_module_nvr) return image_modules_mapping @@ -212,9 +205,7 @@ def _filter_images_with_higher_rpm_nvr(self, images): :return: a list of ContainerImage instances which can be auto rebuilt. :rtype: list """ - errata_rpm_nvrs = self.errata.get_binary_rpm_nvrs( - self.event.advisory.errata_id - ) + errata_rpm_nvrs = self.errata.get_binary_rpm_nvrs(self.event.advisory.errata_id) pyxis = PyxisAPI(server_url=conf.pyxis_graphql_url) if errata_rpm_nvrs: @@ -266,20 +257,15 @@ def _updated_compose_source( compose = self.odcs.get_compose(compose_id) source_type = compose.get("source_type") if source_type == PungiSourceType.MODULE: - name_stream_set = { - f"{n}:{s}" for n, s, v, c in _compose_sources(compose) - } + name_stream_set = {f"{n}:{s}" for n, s, v, c in _compose_sources(compose)} mapping = { - f"{n}:{s}": f"{n}:{s}:{v}:{c}" - for n, s, v, c in _compose_sources(compose) + f"{n}:{s}": f"{n}:{s}:{v}:{c}" for n, s, v, c in _compose_sources(compose) } if not name_stream_set.isdisjoint(module_name_stream_set): updated_composes.update( mapping[name_stream] - for name_stream in name_stream_set.difference( - module_name_stream_set - ) + for name_stream in name_stream_set.difference(module_name_stream_set) ) updated_composes.update(module_nsvc_set) @@ -307,9 +293,7 @@ def _record_builds(self, images, image_modules_mapping): # Dict with {brew_build_nvr: ArtifactBuild, ...} mapping. builds = {} - with koji_service( - conf.koji_profile, log, login=False, dry_run=self.dry_run - ) as session: + with koji_service(conf.koji_profile, log, login=False, dry_run=self.dry_run) as session: for image in images: self.set_context(db_event) diff --git a/freshmaker/handlers/koji/rebuild_images_on_async_manual_build.py b/freshmaker/handlers/koji/rebuild_images_on_async_manual_build.py index c9937d52..5ba2874c 100644 --- a/freshmaker/handlers/koji/rebuild_images_on_async_manual_build.py +++ b/freshmaker/handlers/koji/rebuild_images_on_async_manual_build.py @@ -36,7 +36,7 @@ class RebuildImagesOnAsyncManualBuild(ContainerBuildHandler): """Rebuild images on async.manual.build""" - name = 'RebuildImagesOnAsyncManualBuild' + name = "RebuildImagesOnAsyncManualBuild" def can_handle(self, event): return isinstance(event, FreshmakerAsyncManualBuildEvent) @@ -58,8 +58,10 @@ def handle(self, event): # Check if we are allowed to build this image. if not self.event.is_allowed(self, ArtifactType.IMAGE): - msg = ("This image rebuild is not allowed by internal policy. " - f"message_id: {event.msg_id}") + msg = ( + "This image rebuild is not allowed by internal policy. " + f"message_id: {event.msg_id}" + ) db_event.transition(EventState.SKIPPED, msg) db.session.commit() self.log_info(msg) @@ -96,18 +98,16 @@ def handle(self, event): db.session.commit() return [] - if all([build.state == ArtifactBuildState.FAILED.value - for build in builds.values()]): + if all([build.state == ArtifactBuildState.FAILED.value for build in builds.values()]): db_event.transition( - EventState.COMPLETE, - "No container images to rebuild, all are in failed state.") + EventState.COMPLETE, "No container images to rebuild, all are in failed state." + ) db.session.commit() return [] - self.start_to_build_images( - db_event.get_image_builds_in_first_batch(db.session)) + self.start_to_build_images(db_event.get_image_builds_in_first_batch(db.session)) - msg = 'Rebuilding %d container images.' % (len(db_event.builds.all())) + msg = "Rebuilding %d container images." % (len(db_event.builds.all())) db_event.transition(EventState.BUILDING, msg) return [] @@ -139,14 +139,14 @@ def filter_out_unrelated_images(self, batches): for batch in batches: # We expect the first item in the list to always be in the requested images # if not, there must be something wrong... maybe we should return an error. - if batch[0]['brew']['package'] not in self.event.container_images: - self.log_info('Unexpected error identifying images to rebuild.') + if batch[0]["brew"]["package"] not in self.event.container_images: + self.log_info("Unexpected error identifying images to rebuild.") return [] filtered_batch = [] maybe_batch = [] for image in batch: maybe_batch.append(image) - if image['brew']['package'] in self.event.container_images: + if image["brew"]["package"] in self.event.container_images: filtered_batch.extend(maybe_batch) maybe_batch = [] new_batches.append(filtered_batch) @@ -155,9 +155,7 @@ def filter_out_unrelated_images(self, batches): def generate_batches(self, to_rebuild, images, pyxis): # Get all the directly affected images so that any parents that are not marked as # directly affected can be set in _images_to_rebuild_to_batches - directly_affected_nvrs = { - image.nvr for image in images if image.get("directly_affected") - } + directly_affected_nvrs = {image.nvr for image in images if image.get("directly_affected")} # Now generate batches from deduplicated list and return it. return pyxis._images_to_rebuild_to_batches(to_rebuild, directly_affected_nvrs) @@ -194,7 +192,7 @@ def get_image_tree(self, pyxis, image, tree): if parent: parent = parent[0] parent.resolve(pyxis) - image['parent'] = parent + image["parent"] = parent tree.append(parent) return self.get_image_tree(pyxis, parent, tree) return tree @@ -211,10 +209,7 @@ def filter_images_based_on_dist_git_branch(self, images, db_event): :return: list of images to rebuild. If the event gets skipped, return empty list. :rtype: list """ - with koji_service( - conf.koji_profile, log, dry_run=conf.dry_run, - login=False) as session: - + with koji_service(conf.koji_profile, log, dry_run=conf.dry_run, login=False) as session: # Sort images by nvr images = sorted_by_nvr(images, reverse=True) @@ -237,7 +232,7 @@ def filter_images_based_on_dist_git_branch(self, images, db_event): build = None git_branch = None - package = image['brew']['package'] + package = image["brew"]["package"] # if package is already in images_to_rebuild we don't need to keep searching # since the images were sorted by NVR in the beginning if package not in images_to_rebuild: @@ -250,8 +245,12 @@ def filter_images_based_on_dist_git_branch(self, images, db_event): task_info = task[2] git_branch = task_info.get("git_branch") if len(task_info) else None - if (build and task_id and git_branch and - self.event.dist_git_branch == git_branch): + if ( + build + and task_id + and git_branch + and self.event.dist_git_branch == git_branch + ): images_to_rebuild[package] = image if not images_to_rebuild or len(images_to_rebuild) < len(self.event.container_images): @@ -261,9 +260,11 @@ def filter_images_based_on_dist_git_branch(self, images, db_event): # that was never built before. # We cannot return to the API with an error, because the request already completed # at this point. Let's mark this build as FAILED then. - msg = ("One or more of the requested image was never built before for the " - f"requested branch: {self.event.dist_git_branch}. " - "Cannot build it, please change your request.") + msg = ( + "One or more of the requested image was never built before for the " + f"requested branch: {self.event.dist_git_branch}. " + "Cannot build it, please change your request." + ) missing_images = set(self.event.container_images) - set(images_to_rebuild.keys()) if missing_images: msg += f" Problematic images are {missing_images}" @@ -353,10 +354,13 @@ def _record_batches(self, batches, db_event, pyxis): # We don't need to rebuild the nvr this time. The release value # will be automatically generated by OSBS. build = self.record_build( - self.event, image_name, ArtifactType.IMAGE, + self.event, + image_name, + ArtifactType.IMAGE, dep_on=dep_on, state=ArtifactBuildState.PLANNED.value, - original_nvr=nvr) + original_nvr=nvr, + ) # Set context to particular build so logging shows this build # in case of error. @@ -364,18 +368,19 @@ def _record_batches(self, batches, db_event, pyxis): image.resolve(pyxis) build.transition(state, state_reason) - build_target = ( - self.event.brew_target if self.event.brew_target else image["target"]) - build.build_args = json.dumps({ - "repository": image["repository"], - "commit": image["commit"], - "original_parent": parent_nvr, - "target": build_target, - "branch": image["git_branch"], - "arches": image["arches"], - "flatpak": image.get("flatpak", False), - "isolated": image.get("isolated", True), - }) + build_target = self.event.brew_target if self.event.brew_target else image["target"] + build.build_args = json.dumps( + { + "repository": image["repository"], + "commit": image["commit"], + "original_parent": parent_nvr, + "target": build_target, + "branch": image["git_branch"], + "arches": image["arches"], + "flatpak": image.get("flatpak", False), + "isolated": image.get("isolated", True), + } + ) db.session.commit() builds[nvr] = build diff --git a/freshmaker/handlers/koji/rebuild_images_on_odcs_compose_done.py b/freshmaker/handlers/koji/rebuild_images_on_odcs_compose_done.py index 01019673..225d3646 100644 --- a/freshmaker/handlers/koji/rebuild_images_on_odcs_compose_done.py +++ b/freshmaker/handlers/koji/rebuild_images_on_odcs_compose_done.py @@ -22,15 +22,13 @@ # Written by Chenxiong Qi from freshmaker import db -from freshmaker.models import ( - ArtifactBuild, ArtifactBuildState, Compose, ArtifactBuildCompose) -from freshmaker.handlers import ( - ContainerBuildHandler, fail_event_on_handler_exception) +from freshmaker.models import ArtifactBuild, ArtifactBuildState, Compose, ArtifactBuildCompose +from freshmaker.handlers import ContainerBuildHandler, fail_event_on_handler_exception from freshmaker.events import ODCSComposeStateChangeEvent from odcs.common.types import COMPOSE_STATES -__all__ = ('RebuildImagesOnODCSComposeDone',) +__all__ = ("RebuildImagesOnODCSComposeDone",) class RebuildImagesOnODCSComposeDone(ContainerBuildHandler): @@ -40,44 +38,50 @@ def can_handle(self, event): if not isinstance(event, ODCSComposeStateChangeEvent): return False - compose_id = event.compose['id'] + compose_id = event.compose["id"] # check db to see whether this compose exists in db - found_compose = Compose.query.filter_by(odcs_compose_id=compose_id).first() # db.session.query(ArtifactBuild).filter_by(odcs_compose_id=compose_id).first() + found_compose = Compose.query.filter_by( + odcs_compose_id=compose_id + ).first() # db.session.query(ArtifactBuild).filter_by(odcs_compose_id=compose_id).first() if not found_compose: return False - return event.compose['state'] == COMPOSE_STATES['done'] + return event.compose["state"] == COMPOSE_STATES["done"] @fail_event_on_handler_exception def handle(self, event): if event.dry_run: self.force_dry_run() - compose_id = event.compose['id'] + compose_id = event.compose["id"] - self.log_info('ODCS compose %s finished', compose_id) + self.log_info("ODCS compose %s finished", compose_id) - builds_ready_to_rebuild = db.session.query(ArtifactBuild).join( - ArtifactBuildCompose).join(Compose) + builds_ready_to_rebuild = ( + db.session.query(ArtifactBuild).join(ArtifactBuildCompose).join(Compose) + ) # Get all the builds waiting for this compose in PLANNED state ... builds_ready_to_rebuild = builds_ready_to_rebuild.filter( ArtifactBuild.state == ArtifactBuildState.PLANNED.value, Compose.odcs_compose_id == compose_id, - ArtifactBuildCompose.compose_id == Compose.id) + ArtifactBuildCompose.compose_id == Compose.id, + ) # ... and depending on DONE parent image or parent image which is # not planned to be built in this Event (dep_on == None). builds_ready_to_rebuild = [ - b for b in builds_ready_to_rebuild if - b.dep_on is None or b.dep_on.state == ArtifactBuildState.DONE.value + b + for b in builds_ready_to_rebuild + if b.dep_on is None or b.dep_on.state == ArtifactBuildState.DONE.value ] if not self.dry_run: # In non-dry-run mode, check that all the composes are ready. # In dry-run mode, the composes are fake, so they are always ready. builds_ready_to_rebuild = filter( - lambda build: build.composes_ready, builds_ready_to_rebuild) + lambda build: build.composes_ready, builds_ready_to_rebuild + ) # Start the rebuild. self.start_to_build_images(builds_ready_to_rebuild) diff --git a/freshmaker/handlers/koji/rebuild_images_on_parent_image_build.py b/freshmaker/handlers/koji/rebuild_images_on_parent_image_build.py index 2d6105ee..d0fe3f11 100644 --- a/freshmaker/handlers/koji/rebuild_images_on_parent_image_build.py +++ b/freshmaker/handlers/koji/rebuild_images_on_parent_image_build.py @@ -27,12 +27,16 @@ from freshmaker import db from freshmaker.errata import Errata from freshmaker.events import ( - BrewContainerTaskStateChangeEvent, ErrataRPMAdvisoryShippedEvent, - ManualRebuildWithAdvisoryEvent) + BrewContainerTaskStateChangeEvent, + ErrataRPMAdvisoryShippedEvent, + ManualRebuildWithAdvisoryEvent, +) from freshmaker.models import ArtifactBuild, EVENT_TYPES -from freshmaker.handlers import (ContainerBuildHandler, - fail_artifact_build_on_handler_exception, - fail_event_on_handler_exception) +from freshmaker.handlers import ( + ContainerBuildHandler, + fail_artifact_build_on_handler_exception, + fail_event_on_handler_exception, +) from freshmaker.kojiservice import koji_service from freshmaker.types import ArtifactType, ArtifactBuildState, EventState @@ -40,7 +44,7 @@ class RebuildImagesOnParentImageBuild(ContainerBuildHandler): """Rebuild container when a dependecy container is built in Brew""" - name = 'RebuildImagesOnParentImageBuild' + name = "RebuildImagesOnParentImageBuild" def can_handle(self, event): if not isinstance(event, BrewContainerTaskStateChangeEvent): @@ -49,10 +53,11 @@ def can_handle(self, event): build_id = event.task_id # check db to see whether this build exists in db - found_build = db.session.query(ArtifactBuild).filter_by( - type=ArtifactType.IMAGE.value, - build_id=build_id - ).first() + found_build = ( + db.session.query(ArtifactBuild) + .filter_by(type=ArtifactType.IMAGE.value, build_id=build_id) + .first() + ) if not found_build: return False @@ -70,35 +75,38 @@ def handle(self, event): build_id = event.task_id # check db to see whether this build exists in db - found_build = db.session.query(ArtifactBuild).filter_by( - type=ArtifactType.IMAGE.value, - build_id=build_id - ).first() + found_build = ( + db.session.query(ArtifactBuild) + .filter_by(type=ArtifactType.IMAGE.value, build_id=build_id) + .first() + ) self.set_context(found_build) - if found_build.event.state not in [EventState.INITIALIZED.value, - EventState.BUILDING.value]: + if found_build.event.state not in [EventState.INITIALIZED.value, EventState.BUILDING.value]: return self.update_db_build_state(build_id, found_build, event) self.rebuild_dependent_containers(found_build) @fail_artifact_build_on_handler_exception() def update_db_build_state(self, build_id, found_build, event): - """ Update build state in db. """ - if event.new_state == 'CLOSED': + """Update build state in db.""" + if event.new_state == "CLOSED": # if build is triggered by an advisory, verify the container # contains latest RPMs from the advisory if found_build.event.event_type_id in ( - EVENT_TYPES[ErrataRPMAdvisoryShippedEvent], - EVENT_TYPES[ManualRebuildWithAdvisoryEvent]): + EVENT_TYPES[ErrataRPMAdvisoryShippedEvent], + EVENT_TYPES[ManualRebuildWithAdvisoryEvent], + ): errata_id = found_build.event.search_key # build_id is actually task id in build system, find out the actual build first with koji_service( - conf.koji_profile, log, login=False, - dry_run=self.dry_run) as session: + conf.koji_profile, log, login=False, dry_run=self.dry_run + ) as session: container_build_id = session.get_container_build_id_from_task(build_id) - ret, msg = self._verify_advisory_rpms_in_container_build(errata_id, container_build_id) + ret, msg = self._verify_advisory_rpms_in_container_build( + errata_id, container_build_id + ) if ret: found_build.transition(ArtifactBuildState.DONE.value, "Built successfully.") else: @@ -107,7 +115,7 @@ def update_db_build_state(self, build_id, found_build, event): # for other builds, mark them as DONE else: found_build.transition(ArtifactBuildState.DONE.value, "Built successfully.") - if event.new_state == 'FAILED': + if event.new_state == "FAILED": args = json.loads(found_build.build_args) if "retry_count" not in args: args["retry_count"] = 0 @@ -116,28 +124,30 @@ def update_db_build_state(self, build_id, found_build, event): if args["retry_count"] < 3: found_build.transition( ArtifactBuildState.PLANNED.value, - "Retrying failed build %s" % (str(found_build.build_id))) + "Retrying failed build %s" % (str(found_build.build_id)), + ) self.start_to_build_images([found_build]) else: - found_build.transition( - ArtifactBuildState.FAILED.value, - "Failed to build in Koji.") + found_build.transition(ArtifactBuildState.FAILED.value, "Failed to build in Koji.") db.session.commit() @fail_artifact_build_on_handler_exception() def rebuild_dependent_containers(self, found_build): - """ Rebuild containers depend on the success build as necessary. """ + """Rebuild containers depend on the success build as necessary.""" if found_build.state == ArtifactBuildState.DONE.value: # check db to see whether there is any planned image build # depends on this build - planned_builds = db.session.query(ArtifactBuild).filter_by( - type=ArtifactType.IMAGE.value, - state=ArtifactBuildState.PLANNED.value, - dep_on=found_build - ).all() - - log.info("Found following PLANNED builds to rebuild that " - "depends on %r", found_build) + planned_builds = ( + db.session.query(ArtifactBuild) + .filter_by( + type=ArtifactType.IMAGE.value, + state=ArtifactBuildState.PLANNED.value, + dep_on=found_build, + ) + .all() + ) + + log.info("Found following PLANNED builds to rebuild that " "depends on %r", found_build) for build in planned_builds: log.info(" %r", build) @@ -153,7 +163,7 @@ def _verify_advisory_rpms_in_container_build(self, errata_id, container_build_id verify container built on brew has the latest rpms from an advisory """ if self.dry_run: - return (True, '') + return (True, "") # Get rpms in advisory. There can be multiple versions of RPMs with # the same name, so we group them by a name in `advisory_rpms_by_name` @@ -164,17 +174,14 @@ def _verify_advisory_rpms_in_container_build(self, errata_id, container_build_id if binary_rpm_nvrs: for nvr in binary_rpm_nvrs: parsed_nvr = rpmlib.parse_nvr(nvr) - if parsed_nvr['name'] not in advisory_rpms_by_name: - advisory_rpms_by_name[parsed_nvr['name']] = set() - advisory_rpms_by_name[parsed_nvr['name']].add(nvr) + if parsed_nvr["name"] not in advisory_rpms_by_name: + advisory_rpms_by_name[parsed_nvr["name"]] = set() + advisory_rpms_by_name[parsed_nvr["name"]].add(nvr) # get rpms in container - with koji_service( - conf.koji_profile, log, login=False, - dry_run=self.dry_run) as session: + with koji_service(conf.koji_profile, log, login=False, dry_run=self.dry_run) as session: container_rpms = session.get_rpms_in_container(container_build_id) - container_rpms_by_name = { - rpmlib.parse_nvr(x)['name']: x for x in container_rpms} + container_rpms_by_name = {rpmlib.parse_nvr(x)["name"]: x for x in container_rpms} # For each RPM name in advisory, check that the RPM exists in the # built container and its version is the same as one RPM in the @@ -188,8 +195,10 @@ def _verify_advisory_rpms_in_container_build(self, errata_id, container_build_id unmatched_rpms.append(rpm_name) if unmatched_rpms: - msg = ("The following RPMs in container build (%s) do not match " - "with the latest RPMs in advisory (%s):\n%s" % - (container_build_id, errata_id, unmatched_rpms)) + msg = ( + "The following RPMs in container build (%s) do not match " + "with the latest RPMs in advisory (%s):\n%s" + % (container_build_id, errata_id, unmatched_rpms) + ) return (False, msg) return (True, "") diff --git a/freshmaker/handlers/koji/rebuild_images_on_rpm_advisory_change.py b/freshmaker/handlers/koji/rebuild_images_on_rpm_advisory_change.py index 52d10563..e4f76eba 100644 --- a/freshmaker/handlers/koji/rebuild_images_on_rpm_advisory_change.py +++ b/freshmaker/handlers/koji/rebuild_images_on_rpm_advisory_change.py @@ -27,14 +27,12 @@ import re from freshmaker import conf, db -from freshmaker.events import ( - ErrataRPMAdvisoryShippedEvent, ManualRebuildWithAdvisoryEvent) +from freshmaker.events import ErrataRPMAdvisoryShippedEvent, ManualRebuildWithAdvisoryEvent from freshmaker.handlers import ContainerBuildHandler, fail_event_on_handler_exception from freshmaker.image import PyxisAPI from freshmaker.pulp import Pulp from freshmaker.errata import Errata -from freshmaker.types import ( - ArtifactType, ArtifactBuildState, EventState, RebuildReason) +from freshmaker.types import ArtifactType, ArtifactBuildState, EventState, RebuildReason from freshmaker.models import Event, Compose, ArtifactBuild @@ -44,14 +42,14 @@ class RebuildImagesOnRPMAdvisoryChange(ContainerBuildHandler): advisory. """ - name = 'RebuildImagesOnRPMAdvisoryChange' + name = "RebuildImagesOnRPMAdvisoryChange" def can_handle(self, event): if not isinstance(event, ErrataRPMAdvisoryShippedEvent): return False - if not {'rpm', 'module'} & set(event.advisory.content_types): - self.log_info('Skip non-RPM and non-module advisory %s.', event.advisory.errata_id) + if not {"rpm", "module"} & set(event.advisory.content_types): + self.log_info("Skip non-RPM and non-module advisory %s.", event.advisory.errata_id) return False return True @@ -78,8 +76,10 @@ def handle(self, event): # Check if we are allowed to build this advisory. if not self.event.is_allowed(self): - msg = ("Errata advisory {0} is not allowed by internal policy " - "to trigger rebuilds.".format(event.advisory.errata_id)) + msg = ( + "Errata advisory {0} is not allowed by internal policy " + "to trigger rebuilds.".format(event.advisory.errata_id) + ) db_event.transition(EventState.SKIPPED, msg) db.session.commit() self.log_info(msg) @@ -102,21 +102,20 @@ def handle(self, event): builds = self._record_batches(batches, event) if not builds: - msg = 'No container images to rebuild for advisory %r' % event.advisory.name + msg = "No container images to rebuild for advisory %r" % event.advisory.name self.log_info(msg) db_event.transition(EventState.SKIPPED, msg) db.session.commit() return [] - if all([build.state == ArtifactBuildState.FAILED.value - for build in builds.values()]): + if all([build.state == ArtifactBuildState.FAILED.value for build in builds.values()]): db_event.transition( - EventState.COMPLETE, - "No container images to rebuild, all are in failed state.") + EventState.COMPLETE, "No container images to rebuild, all are in failed state." + ) db.session.commit() return [] - if event.advisory.state != 'SHIPPED_LIVE': + if event.advisory.state != "SHIPPED_LIVE": # If freshmaker is configured to rebuild images only when advisory # moves to SHIPPED_LIVE state, there is no need to generate new # composes for rebuild as all signed RPMs should already be @@ -124,18 +123,18 @@ def handle(self, event): # # Generate the ODCS compose with RPMs from the current advisory. repo_urls = self.odcs.prepare_yum_repos_for_rebuilds(db_event) - self.log_info( - "Following repositories will be used for the rebuild:") + self.log_info("Following repositories will be used for the rebuild:") for url in repo_urls: self.log_info(" - %s", url) # Log what we are going to rebuild self._check_images_to_rebuild(db_event, builds) - self.start_to_build_images( - db_event.get_image_builds_in_first_batch(db.session)) + self.start_to_build_images(db_event.get_image_builds_in_first_batch(db.session)) - msg = 'Advisory %s: Rebuilding %d container images.' % ( - db_event.search_key, len(db_event.builds.all())) + msg = "Advisory %s: Rebuilding %d container images." % ( + db_event.search_key, + len(db_event.builds.all()), + ) db_event.transition(EventState.BUILDING, msg) return [] @@ -147,7 +146,7 @@ def _check_images_to_rebuild(self, db_event, builds): :param builds dict: list of docker images to build as returned by _find_images_to_rebuild(...). """ - self.log_info('Found container images to rebuild in following order:') + self.log_info("Found container images to rebuild in following order:") batch = 0 printed = [] printed_cnt = 0 @@ -155,7 +154,7 @@ def _check_images_to_rebuild(self, db_event, builds): db_event_builds_cnt = len(db_event.builds.all()) while printed_cnt != builds_cnt or printed_cnt != db_event_builds_cnt: - self.log_info(' Batch %d:', batch) + self.log_info(" Batch %d:", batch) old_printed_count = printed_cnt @@ -167,18 +166,22 @@ def _check_images_to_rebuild(self, db_event, builds): # batch 0 - this handles the base images # In call cases, print only builds which have not been printed # so far. - if (build.original_nvr not in printed and - ((build.dep_on and build.dep_on.original_nvr in printed) or - (not build.dep_on and batch == 0))): + if build.original_nvr not in printed and ( + (build.dep_on and build.dep_on.original_nvr in printed) + or (not build.dep_on and batch == 0) + ): args = json.loads(build.build_args) if build.dep_on: based_on = "based on %s" % build.dep_on.rebuilt_nvr else: - based_on = "based on %s" % args["original_parent"] \ - if args["original_parent"] else "base image" + based_on = ( + "based on %s" % args["original_parent"] + if args["original_parent"] + else "base image" + ) self.log_info( - ' - %s#%s (%s)' % - (args["repository"], args["commit"], based_on)) + " - %s#%s (%s)" % (args["repository"], args["commit"], based_on) + ) printed.append(build.original_nvr) printed_cnt = len(printed) @@ -188,8 +191,8 @@ def _check_images_to_rebuild(self, db_event, builds): # print error and stop the rebuild. if old_printed_count == printed_cnt: db_event.builds_transition( - ArtifactBuildState.FAILED.value, - "No image to be built in batch %d." % (batch)) + ArtifactBuildState.FAILED.value, "No image to be built in batch %d." % (batch) + ) self.log_error("Dumping the builds:") for build in builds.values(): self.log_error(" %r", build.original_nvr) @@ -236,20 +239,20 @@ def _record_batches(self, batches, event, builds=None): nvr = image.nvr if nvr in builds: - self.log_debug("Skipping recording build %s, " - "it is already in db", nvr) + self.log_debug("Skipping recording build %s, " "it is already in db", nvr) continue parent_build = db_event.get_artifact_build_from_event_dependencies(nvr) if parent_build: self.log_debug( - "Skipping recording build %s, " - "it is already built in dependant event %r", nvr, parent_build[0].event_id) + "Skipping recording build %s, " "it is already built in dependant event %r", + nvr, + parent_build[0].event_id, + ) continue self.log_debug("Recording %s", nvr) - parent_nvr = image["parent"].nvr \ - if "parent" in image and image["parent"] else None + parent_nvr = image["parent"].nvr if "parent" in image and image["parent"] else None dep_on = builds[parent_nvr] if parent_nvr in builds else None if parent_nvr: @@ -264,8 +267,9 @@ def _record_batches(self, batches, event, builds=None): elif dep_on and dep_on.state == ArtifactBuildState.FAILED.value: # If this artifact build depends on a build which cannot # be built by Freshmaker, mark this one as failed too. - state_reason = "Cannot build artifact, because its " \ - "dependency cannot be built." + state_reason = ( + "Cannot build artifact, because its " "dependency cannot be built." + ) state = ArtifactBuildState.FAILED.value else: state_reason = "" @@ -277,17 +281,20 @@ def _record_batches(self, batches, event, builds=None): # rebuild. If some image is not in the latest released version and # it is included in a rebuild, it must be just a dependency of # other image. - if image.get('directly_affected'): + if image.get("directly_affected"): rebuild_reason = RebuildReason.DIRECTLY_AFFECTED.value else: rebuild_reason = RebuildReason.DEPENDENCY.value build = self.record_build( - event, image_name, ArtifactType.IMAGE, + event, + image_name, + ArtifactType.IMAGE, dep_on=dep_on, state=ArtifactBuildState.PLANNED.value, original_nvr=nvr, - rebuild_reason=rebuild_reason) + rebuild_reason=rebuild_reason, + ) # Set context to particular build so logging shows this build # in case of error. @@ -295,17 +302,19 @@ def _record_batches(self, batches, event, builds=None): build.transition(state, state_reason) - build.build_args = json.dumps({ - "repository": image["repository"], - "commit": image["commit"], - "original_parent": parent_nvr, - "target": image["target"], - "branch": image["git_branch"], - "arches": image["arches"], - "renewed_odcs_compose_ids": image["odcs_compose_ids"], - "flatpak": image.get("flatpak", False), - "isolated": image.get("isolated", True), - }) + build.build_args = json.dumps( + { + "repository": image["repository"], + "commit": image["commit"], + "original_parent": parent_nvr, + "target": image["target"], + "branch": image["git_branch"], + "arches": image["arches"], + "renewed_odcs_compose_ids": image["odcs_compose_ids"], + "flatpak": image.get("flatpak", False), + "isolated": image.get("isolated", True), + } + ) db.session.commit() @@ -330,11 +339,10 @@ def _record_batches(self, batches, event, builds=None): if cache_key in odcs_cache: db_compose = odcs_cache[cache_key] else: - compose = self.odcs.prepare_pulp_repo( - build, list(missing_content_sets)) + compose = self.odcs.prepare_pulp_repo(build, list(missing_content_sets)) if build.state != ArtifactBuildState.FAILED.value: - db_compose = Compose(odcs_compose_id=compose['id']) + db_compose = Compose(odcs_compose_id=compose["id"]) db.session.add(db_compose) db.session.commit() odcs_cache[cache_key] = db_compose @@ -351,7 +359,7 @@ def _record_batches(self, batches, event, builds=None): if not image["published"]: compose = self.odcs.prepare_odcs_compose_with_image_rpms(image) if compose: - db_compose = Compose(odcs_compose_id=compose['id']) + db_compose = Compose(odcs_compose_id=compose["id"]) db.session.add(db_compose) db.session.commit() build.add_composes(db.session, [db_compose]) @@ -377,12 +385,12 @@ def _filter_out_not_allowed_builds(self, image): parsed_nvr = koji.parse_NVR(image.nvr) if not self.event.is_allowed( - self, image_name=parsed_nvr["name"], - image_version=parsed_nvr["version"], - image_release=parsed_nvr["release"]): - self.log_info( - "Skipping rebuild of image %s, not allowed by configuration", - image.nvr) + self, + image_name=parsed_nvr["name"], + image_version=parsed_nvr["version"], + image_release=parsed_nvr["release"], + ): + self.log_info("Skipping rebuild of image %s, not allowed by configuration", image.nvr) return True return False @@ -405,17 +413,14 @@ def _find_images_to_rebuild(self, errata_id, skip_nvrs=None): # up eventually when advisories are shipped. pulp_repo_ids = list(set(errata.get_pulp_repository_ids(errata_id))) - pulp = Pulp( - server_url=conf.pulp_server_url, cert=(conf.pulp_crt_path, conf.pulp_key_path) - ) + pulp = Pulp(server_url=conf.pulp_server_url, cert=(conf.pulp_crt_path, conf.pulp_key_path)) content_sets = pulp.get_content_set_by_repo_ids(pulp_repo_ids) # Some container builds declare Pulp repos directly instead of content # sets, but they are stored in the same location as content sets so they # can be treated the same content_sets.extend(pulp_repo_ids) - self.log_info('RPMs from advisory ends up in following content sets: ' - '%s', content_sets) + self.log_info("RPMs from advisory ends up in following content sets: " "%s", content_sets) # Query images from Pyxis by signed RPM's srpm name and found # content sets @@ -443,12 +448,16 @@ def _find_images_to_rebuild(self, errata_id, skip_nvrs=None): affected_nvrs = errata.get_binary_rpm_nvrs(errata_id) self.log_info( - "Going to find all the container images to rebuild as " - "result of %r update.", affected_nvrs) + "Going to find all the container images to rebuild as " "result of %r update.", + affected_nvrs, + ) batches = pyxis.find_images_to_rebuild( - affected_nvrs, content_sets, + affected_nvrs, + content_sets, filter_fnc=self._filter_out_not_allowed_builds, - published=published, release_categories=release_categories, + published=published, + release_categories=release_categories, leaf_container_images=leaf_container_images, - skip_nvrs=skip_nvrs) + skip_nvrs=skip_nvrs, + ) return batches diff --git a/freshmaker/image.py b/freshmaker/image.py index 9ea54b51..4a908d7b 100644 --- a/freshmaker/image.py +++ b/freshmaker/image.py @@ -50,9 +50,7 @@ def __init__(self, image, pyxis_api_instance): def __eq__(self, other): return ( - self.name == other.name and - self.version == other.version and - self.repos == other.repos + self.name == other.name and self.version == other.version and self.repos == other.repos ) def __str__(self): @@ -60,19 +58,21 @@ def __str__(self): def issubset(self, other): return ( - self.name == other.name and - self.version == other.version and - self.repos.issubset(other.repos) + self.name == other.name + and self.version == other.version + and self.repos.issubset(other.repos) ) class KojiLookupError(ValueError): - """ Koji lookup error """ + """Koji lookup error""" + pass class ExtraRepoNotConfiguredError(ValueError): - """ Extra repo required but missing in config """ + """Extra repo required but missing in config""" + pass @@ -96,11 +96,11 @@ def create(cls, data): image = cls() image.update(data) - arch = data.get('architecture') - image['multi_arch_rpm_manifest'] = {} - rpm_manifest = data.get('rpm_manifest') + arch = data.get("architecture") + image["multi_arch_rpm_manifest"] = {} + rpm_manifest = data.get("rpm_manifest") if arch and rpm_manifest: - image['multi_arch_rpm_manifest'][arch] = rpm_manifest + image["multi_arch_rpm_manifest"][arch] = rpm_manifest return image @@ -109,7 +109,7 @@ def __hash__(self): @property def nvr(self): - return self['brew']['build'] + return self["brew"]["build"] def log_error(self, err): """ @@ -118,13 +118,13 @@ def log_error(self, err): to self['error'] with ';' separator. """ prefix = "" - if 'brew' in self and 'build' in self['brew']: + if "brew" in self and "build" in self["brew"]: prefix = self.nvr + ": " log.error("%s%s", prefix, err) - if 'error' not in self or not self['error']: - self['error'] = str(err) + if "error" not in self or not self["error"]: + self["error"] = str(err) else: - self['error'] += "; " + str(err) + self["error"] += "; " + str(err) def update_multi_arch(self, image): """ @@ -134,13 +134,13 @@ def update_multi_arch(self, image): arch attributes from :rtype: None """ - image_arch = image.get('architecture') + image_arch = image.get("architecture") if not image_arch: return - image_rpm_manifest = image.get('rpm_manifest') + image_rpm_manifest = image.get("rpm_manifest") if image_rpm_manifest: - self['multi_arch_rpm_manifest'][image_arch] = image_rpm_manifest + self["multi_arch_rpm_manifest"][image_arch] = image_rpm_manifest @staticmethod def _get_default_additional_data(): @@ -167,31 +167,33 @@ def get_additional_data_from_koji(cls, nvr): """ data = cls._get_default_additional_data() - with koji_service( - conf.koji_profile, log, dry_run=conf.dry_run, - login=False) as session: + with koji_service(conf.koji_profile, log, dry_run=conf.dry_run, login=False) as session: build = session.get_build(nvr) if not build: - raise KojiLookupError( - "Cannot find Koji build with nvr %s in Koji" % nvr) - - if 'task_id' not in build or not build['task_id']: - if ("extra" in build and - "container_koji_task_id" in build["extra"] and - build["extra"]["container_koji_task_id"]): - build['task_id'] = build["extra"]['container_koji_task_id'] + raise KojiLookupError("Cannot find Koji build with nvr %s in Koji" % nvr) + + if "task_id" not in build or not build["task_id"]: + if ( + "extra" in build + and "container_koji_task_id" in build["extra"] + and build["extra"]["container_koji_task_id"] + ): + build["task_id"] = build["extra"]["container_koji_task_id"] else: raise KojiLookupError( "Cannot find task_id or container_koji_task_id " - "in the Koji build %r" % build) + "in the Koji build %r" % build + ) - fs_koji_task_id = build.get('extra', {}).get('filesystem_koji_task_id') + fs_koji_task_id = build.get("extra", {}).get("filesystem_koji_task_id") if fs_koji_task_id: parsed_nvr = koji.parse_NVR(nvr) name_version = f'{parsed_nvr["name"]}-{parsed_nvr["version"]}' if name_version not in conf.image_extra_repo: - msg = (f'{name_version} is a base image, but extra image repo for it ' - f'is not specified in the Freshmaker configuration.') + msg = ( + f"{name_version} is a base image, but extra image repo for it " + f"is not specified in the Freshmaker configuration." + ) raise ExtraRepoNotConfiguredError(msg) extra_image = build.get("extra", {}).get("image", {}) @@ -206,8 +208,7 @@ def get_additional_data_from_koji(cls, nvr): if flatpak: data["flatpak"] = flatpak - brew_task = session.get_task_request( - build['task_id']) + brew_task = session.get_task_request(build["task_id"]) source = brew_task[0] data["target"] = brew_task[1] extra_data = brew_task[2] @@ -240,14 +241,13 @@ def get_additional_data_from_koji(cls, nvr): data["commit"] = m.group("commit") break - if not data['commit']: - raise KojiLookupError( - "Cannot find valid source of Koji build %r" % build) + if not data["commit"]: + raise KojiLookupError("Cannot find valid source of Koji build %r" % build) if not conf.supply_arch_overrides: - data['arches'] = None + data["arches"] = None else: - data['arches'] = cls._get_arches_from_koji(session, build['build_id']) + data["arches"] = cls._get_arches_from_koji(session, build["build_id"]) return data @@ -255,9 +255,9 @@ def get_additional_data_from_koji(cls, nvr): def _get_arches_from_koji(koji_session, build_id): archives = koji_session.list_archives(build_id=build_id) arches = [ - archive['extra']['image']['arch'] - for archive in archives if archive['btype'] == 'image'] - return ' '.join(sorted(arches)) + archive["extra"]["image"]["arch"] for archive in archives if archive["btype"] == "image" + ] + return " ".join(sorted(arches)) def resolve_commit(self): """ @@ -305,8 +305,11 @@ def resolve_compose_sources(self): compose_sources.update(source.split()) self["compose_sources"] = list(compose_sources) - log.info("Container image %s uses following compose sources: %r", - self.nvr, self["compose_sources"]) + log.info( + "Container image %s uses following compose sources: %r", + self.nvr, + self["compose_sources"], + ) def resolve_content_sets(self, pyxis_api_instance, children=None): """ @@ -322,8 +325,9 @@ def resolve_content_sets(self, pyxis_api_instance, children=None): # ContainerImage now has content_sets field, so use it if available. if "content_sets" in self and self["content_sets"]: - log.info("Container image %s uses following content sets: %r", - self.nvr, self["content_sets"]) + log.info( + "Container image %s uses following content sets: %r", self.nvr, self["content_sets"] + ) if "content_sets_source" not in self: self["content_sets_source"] = "pyxis_container_image" return @@ -332,9 +336,12 @@ def resolve_content_sets(self, pyxis_api_instance, children=None): # try to get them from children image. self["content_sets_source"] = "child_image" if not children: - log.warning("Container image %s does not have 'content_sets' set " - "in Pyxis and also does not have any children, " - "this is suspicious.", self.nvr) + log.warning( + "Container image %s does not have 'content_sets' set " + "in Pyxis and also does not have any children, " + "this is suspicious.", + self.nvr, + ) self.update({"content_sets": []}) return @@ -346,16 +353,22 @@ def resolve_content_sets(self, pyxis_api_instance, children=None): if not child["content_sets"]: continue - log.info("Container image %s does not have 'content-sets' set " - "in Pyxis. Using child image %s content_sets: %r", - self.nvr, child.nvr, - child["content_sets"]) + log.info( + "Container image %s does not have 'content-sets' set " + "in Pyxis. Using child image %s content_sets: %r", + self.nvr, + child.nvr, + child["content_sets"], + ) self.update({"content_sets": child["content_sets"]}) return - log.warning("Container image %s does not have 'content_sets' set " - "in Pyxis as well as its children, this " - "is suspicious.", self.nvr) + log.warning( + "Container image %s does not have 'content_sets' set " + "in Pyxis as well as its children, this " + "is suspicious.", + self.nvr, + ) self.update({"content_sets": []}) def resolve_published(self, pyxis_api_instance): @@ -400,8 +413,9 @@ def get_rpms(self): """ if "rpm_manifest" not in self or not self["rpm_manifest"]: # Do not filter if we are not sure what RPMs are in the image. - log.info(("Not filtering out this image because we " - "are not sure what RPMs are in there.")) + log.info( + ("Not filtering out this image because we " "are not sure what RPMs are in there.") + ) return # There is always just single "rpm_manifest". Pyxis returns # this as a list, because it is reference to @@ -409,30 +423,32 @@ def get_rpms(self): rpm_manifest = self["rpm_manifest"][0] if "rpms" not in rpm_manifest: # Do not filter if we are not sure what RPMs are in the image. - log.info(("Not filtering out this image because we " - "are not sure what RPMs are in there.")) + log.info( + ("Not filtering out this image because we " "are not sure what RPMs are in there.") + ) return return rpm_manifest["rpms"] def get_registry_repositories(self, pyxis_api_instance): - if self['repositories']: - return self['repositories'] + if self["repositories"]: + return self["repositories"] parsed_nvr = kobo.rpmlib.parse_nvr(self.nvr) - if '.' not in parsed_nvr['release']: - log.debug('There are no repositories for %s', self.nvr) + if "." not in parsed_nvr["release"]: + log.debug("There are no repositories for %s", self.nvr) return [] - original_release = parsed_nvr['release'].rsplit('.', 1)[0] - parsed_nvr['release'] = original_release - original_nvr = '{name}-{version}-{release}'.format(**parsed_nvr) - log.debug('Finding repositories for %s through %s', self.nvr, original_nvr) + original_release = parsed_nvr["release"].rsplit(".", 1)[0] + parsed_nvr["release"] = original_release + original_nvr = "{name}-{version}-{release}".format(**parsed_nvr) + log.debug("Finding repositories for %s through %s", self.nvr, original_nvr) previous_images = pyxis_api_instance.get_images_by_nvrs( - [original_nvr], published=None, include_rpm_manifest=False) + [original_nvr], published=None, include_rpm_manifest=False + ) if not previous_images: - log.warning('original_nvr %s not found in Pyxis', original_nvr) + log.warning("original_nvr %s not found in Pyxis", original_nvr) return [] return previous_images[0].get_registry_repositories(pyxis_api_instance) @@ -441,8 +457,7 @@ def get_registry_repositories(self, pyxis_api_instance): class PyxisAPI(object): """Interface to query Pyxis""" - region = dogpile.cache.make_region().configure( - conf.dogpile_cache_backend, expiration_time=120) + region = dogpile.cache.make_region().configure(conf.dogpile_cache_backend, expiration_time=120) def __init__(self, server_url): """Initialize PyxisAPI instance @@ -450,10 +465,7 @@ def __init__(self, server_url): :param str server_url: Pyxis GraphQL url """ self.server_url = server_url - self.pyxis = PyxisGQL( - url=server_url, - cert=(conf.pyxis_certificate, conf.pyxis_private_key) - ) + self.pyxis = PyxisGQL(url=server_url, cert=(conf.pyxis_certificate, conf.pyxis_private_key)) def _dicts_to_images(self, image_dicts): """Convert image dictionaries to list of ContainerImage""" @@ -487,10 +499,10 @@ def _dicts_to_images(self, image_dicts): for k, temp_images in groupby(sorted_images, key=lambda item: item.nvr): temp_images = list(temp_images) img = temp_images[0] - if 'content_sets' in img and len(temp_images) > 1: - new_content_sets = set(img.get('content_sets')) + if "content_sets" in img and len(temp_images) > 1: + new_content_sets = set(img.get("content_sets")) for i in temp_images[1:]: - new_content_sets.update(i.get('content_sets', [])) + new_content_sets.update(i.get("content_sets", [])) img["content_sets"] = list(new_content_sets) images.append(img) @@ -530,9 +542,11 @@ def find_repositories( repos = [] for repo_data in repositories: - if auto_rebuild and not repo_data.get('auto_rebuild_tags'): - log.info('"auto_rebuild_tags" not set for %s repository, ignoring repository', - repo_data["repository"]) + if auto_rebuild and not repo_data.get("auto_rebuild_tags"): + log.info( + '"auto_rebuild_tags" not set for %s repository, ignoring repository', + repo_data["repository"], + ) continue repo = ContainerRepository() repo.update(repo_data) @@ -577,8 +591,10 @@ def filter_out_images_with_higher_rpm_nvr(self, images, rpm_name_to_nvrs): # - nvr1 older: -1 # We want to rebuild only images with RPM NVR lower than # input RPM NVR, therefore we check for -1. - if kobo.rpmlib.compare_nvr( - image_rpm_nvra, input_rpm_nvr, ignore_epoch=True) == -1: + if ( + kobo.rpmlib.compare_nvr(image_rpm_nvra, input_rpm_nvr, ignore_epoch=True) + == -1 + ): ret.append(image) image_included = True break @@ -589,9 +605,11 @@ def filter_out_images_with_higher_rpm_nvr(self, images, rpm_name_to_nvrs): # The else clause executes after the loop completes normally. # This means that the loop did not encounter a break statement. # In our case, this means that we filtered out the image. - log.info("Will not rebuild %s, because it does not contain " - "older version of any input package: %r" % ( - image.nvr, rpm_name_to_nvrs.values())) + log.info( + "Will not rebuild %s, because it does not contain " + "older version of any input package: %r" + % (image.nvr, rpm_name_to_nvrs.values()) + ) return ret def filter_out_modularity_mismatch(self, images, rpm_name_to_nvrs): @@ -627,8 +645,8 @@ def filter_out_modularity_mismatch(self, images, rpm_name_to_nvrs): else: log.info( "Filtered out %s because there is a modularity mismatch between the RPMs " - "from the image and the advisory: %r" % ( - image.nvr, rpm_name_to_nvrs.values())) + "from the image and the advisory: %r" % (image.nvr, rpm_name_to_nvrs.values()) + ) return ret def filter_out_images_based_on_content_set(self, images, content_sets): @@ -652,17 +670,19 @@ def filter_out_images_based_on_content_set(self, images, content_sets): ret = [] for image in images: if not content_sets & set(image["content_sets"]): - log.info(f"Will not rebuild {image.nvr} because its content_sets " - "({image['content_sets']}) are not related to the requested content_sets" - " ({content_sets})") + log.info( + f"Will not rebuild {image.nvr} because its content_sets " + "({image['content_sets']}) are not related to the requested content_sets" + " ({content_sets})" + ) else: ret.append(image) return ret @retry(wait_on=requests.exceptions.ConnectionError, logger=log) def find_images_with_included_rpms( - self, content_sets, rpm_nvrs, repositories, published=True, - include_rpm_manifest=True): + self, content_sets, rpm_nvrs, repositories, published=True, include_rpm_manifest=True + ): """ Query Pyxis and find the containerImages in the given containerRepositories. @@ -692,7 +712,9 @@ def find_images_with_included_rpms( name = koji.parse_NVR(rpm_nvr)["name"] rpm_name_to_nvrs.setdefault(name, []).append(rpm_nvr) - images = self.pyxis.find_images_by_installed_rpms(rpm_name_to_nvrs, content_sets, repositories, published, auto_rebuild_tags) + images = self.pyxis.find_images_by_installed_rpms( + rpm_name_to_nvrs, content_sets, repositories, published, auto_rebuild_tags + ) if not images: return [] @@ -702,7 +724,11 @@ def find_images_with_included_rpms( for image in image_dicts: # modify Pyxis image data to simulate the data structure returned from LightBlue - rpms = [rpm for rpm in image["edges"]["rpm_manifest"]["data"]["rpms"] if rpm["name"] in rpm_name_to_nvrs] + rpms = [ + rpm + for rpm in image["edges"]["rpm_manifest"]["data"]["rpms"] + if rpm["name"] in rpm_name_to_nvrs + ] image["rpm_manifest"] = [{"rpms": rpms}] del image["edges"] @@ -752,9 +778,16 @@ def find_images_with_included_rpms( images = self.filter_out_images_based_on_content_set(images, set(content_sets)) return images - def get_images_by_nvrs(self, nvrs, published=True, content_sets=None, - rpm_nvrs=None, include_rpm_manifest=True, - rpm_names=None, pyxis_api_instance=None): + def get_images_by_nvrs( + self, + nvrs, + published=True, + content_sets=None, + rpm_nvrs=None, + include_rpm_manifest=True, + rpm_names=None, + pyxis_api_instance=None, + ): """Query Pyxis and returns containerImages defined by list of `nvrs`. @@ -829,6 +862,7 @@ def _image_has_rpm(image, rpms): image_dicts = list(filter(lambda x: _image_has_rpm(x, rpm_names), image_dicts)) if published is not None: + def _image_visibility_is(image, published): # published: boolean value, True or False for repo in image["repositories"]: @@ -837,6 +871,7 @@ def _image_visibility_is(image, published): if repo["published"] is published: return True return False + image_dicts = list(filter(lambda x: _image_visibility_is(x, published), image_dicts)) images = self._dicts_to_images(image_dicts) @@ -896,8 +931,10 @@ def find_parent_brew_build_nvr_from_child(self, child_image, pyxis_api_instance= # it means we found a base image and there's no parent image. if child_image["parent_image_builds"]: parent_brew_build = [ - i["nvr"] for i in child_image["parent_image_builds"].values() - if i["id"] == child_image["parent_build_id"]][0] + i["nvr"] + for i in child_image["parent_image_builds"].values() + if i["id"] == child_image["parent_build_id"] + ][0] return parent_brew_build @@ -929,8 +966,10 @@ def find_parent_images_with_package( if not parent_brew_build: return images parent_image = self.get_images_by_nvrs( - [parent_brew_build], rpm_names=[rpm_name], published=None, - pyxis_api_instance=pyxis_api_instance + [parent_brew_build], + rpm_names=[rpm_name], + published=None, + pyxis_api_instance=pyxis_api_instance, ) if parent_image: @@ -943,7 +982,7 @@ def find_parent_images_with_package( if images: if parent_image: - images[-1]['parent'] = parent_image + images[-1]["parent"] = parent_image else: # If we did not find the parent image with the package, # we still want to set the parent of the last image with @@ -958,11 +997,12 @@ def find_parent_images_with_package( parent.resolve(pyxis_api_instance, images) else: err = "Couldn't find parent image %s. Pyxis data is probably incomplete" % ( - parent_brew_build) + parent_brew_build + ) log.error(err) - if not images[-1]['error']: - images[-1]['error'] = err - images[-1]['parent'] = parent + if not images[-1]["error"]: + images[-1]["error"] = err + images[-1]["parent"] = parent if not parent_image: return images @@ -972,9 +1012,14 @@ def find_parent_images_with_package( ) def find_images_with_packages_from_content_set( - self, rpm_nvrs, content_sets, filter_fnc=None, published=True, - release_categories=conf.container_release_categories, - leaf_container_images=None): + self, + rpm_nvrs, + content_sets, + filter_fnc=None, + published=True, + release_categories=conf.container_release_categories, + leaf_container_images=None, + ): """Query Pyxis and find containers which contain given package from one of content sets @@ -1011,7 +1056,8 @@ def find_images_with_packages_from_content_set( for i in range(0, len(repos), chunk_size): repos_chunk = {k: repos[k] for k in islice(repos_iterator, chunk_size)} images_chunk = self.find_images_with_included_rpms( - content_sets, rpm_nvrs, repos_chunk, published) + content_sets, rpm_nvrs, repos_chunk, published + ) if images_chunk: images.extend(images_chunk) else: @@ -1029,9 +1075,9 @@ def _name_version_key(item): return f"{nvr['name']}-{nvr['version']}" images = [ - next(grouped_images) for _, grouped_images in groupby( - sorted_by_nvr(images, reverse=True), - key=_name_version_key + next(grouped_images) + for _, grouped_images in groupby( + sorted_by_nvr(images, reverse=True), key=_name_version_key ) ] @@ -1043,8 +1089,7 @@ def _resolve_image(image): # We do not set "children" here in resolve_content_sets call, because # published images should have the content_set set. pyxis_api_instance = PyxisGQL( - url=self.server_url, - cert=(conf.pyxis_certificate, conf.pyxis_private_key) + url=self.server_url, cert=(conf.pyxis_certificate, conf.pyxis_private_key) ) image.resolve(pyxis_api_instance, None) @@ -1127,7 +1172,9 @@ def _deduplicate_images_to_rebuild(self, to_rebuild): # Sort the lists in image_group_to_nvrs dict. for image_group in image_group_to_nvrs.keys(): - image_group_to_nvrs[image_group] = sorted_by_nvr(image_group_to_nvrs[image_group], reverse=True) + image_group_to_nvrs[image_group] = sorted_by_nvr( + image_group_to_nvrs[image_group], reverse=True + ) # There might be container image NVRs which are not released yet, # but some released image is already built on top of them. @@ -1184,19 +1231,26 @@ def _deduplicate_images_to_rebuild(self, to_rebuild): # Go through the older images and in case the parent image differs, # update its parents according to latest image parents. - for nvr in nvrs[latest_released_nvr_index + 1:]: + for nvr in nvrs[latest_released_nvr_index + 1 :]: image = nvr_to_image[nvr] if not image.get("parent"): continue parent_nvr_dict = koji.parse_NVR(image["parent"].nvr) parent_name = parent_nvr_dict["name"] parent_version = parent_nvr_dict["version"] - if (parent_name, parent_version) != (latest_parent_name, latest_parent_version): + if (parent_name, parent_version) != ( + latest_parent_name, + latest_parent_version, + ): for image_id, parent_id in nvr_to_coordinates[nvr]: - latest_image_id, latest_parent_id = nvr_to_coordinates[latest_released_nvr][0] - to_rebuild[image_id][parent_id:] = to_rebuild[latest_image_id][latest_parent_id:] + latest_image_id, latest_parent_id = nvr_to_coordinates[ + latest_released_nvr + ][0] + to_rebuild[image_id][parent_id:] = to_rebuild[latest_image_id][ + latest_parent_id: + ] elif phase == "update_to_latest": - for nvr in nvrs[latest_released_nvr_index + 1:]: + for nvr in nvrs[latest_released_nvr_index + 1 :]: for image_id, parent_id in nvr_to_coordinates[nvr]: # At first replace the image in to_rebuild based # on the coordinates from temp dict. @@ -1206,7 +1260,9 @@ def _deduplicate_images_to_rebuild(self, to_rebuild): # the ["parent"] record for the child image to point to the image # with highest NVR. if parent_id != 0: - to_rebuild[image_id][parent_id - 1]["parent"] = nvr_to_image[latest_released_nvr] + to_rebuild[image_id][parent_id - 1]["parent"] = nvr_to_image[ + latest_released_nvr + ] return to_rebuild @@ -1271,9 +1327,15 @@ def _images_to_rebuild_to_batches(self, to_rebuild, directly_affected_nvrs): return batches def find_images_to_rebuild( - self, rpm_nvrs, content_sets, published=True, - release_categories=conf.container_release_categories, - filter_fnc=None, leaf_container_images=None, skip_nvrs=None): + self, + rpm_nvrs, + content_sets, + published=True, + release_categories=conf.container_release_categories, + filter_fnc=None, + leaf_container_images=None, + skip_nvrs=None, + ): """ Find images to rebuild through image build layers @@ -1303,8 +1365,13 @@ def find_images_to_rebuild( :param list skip_nvrs: List of NVRs of images to be skipped. """ images = self.find_images_with_packages_from_content_set( - rpm_nvrs, content_sets, filter_fnc, published, - release_categories, leaf_container_images=leaf_container_images) + rpm_nvrs, + content_sets, + filter_fnc, + published, + release_categories, + leaf_container_images=leaf_container_images, + ) # Remove any hotfix images from list of images for img in images[:]: @@ -1323,8 +1390,7 @@ def _get_images_to_rebuild(image): Find out parent images to rebuild, helper called from threadpool. """ pyxis_api_instance = PyxisGQL( - url=self.server_url, - cert=(conf.pyxis_certificate, conf.pyxis_private_key) + url=self.server_url, cert=(conf.pyxis_certificate, conf.pyxis_private_key) ) rebuild_list = {} # per binary rpm name rebuild list. @@ -1340,20 +1406,21 @@ def _get_images_to_rebuild(image): image, rpm_name, images=[], pyxis_api_instance=pyxis_api_instance ) if rebuild_list[rpm_name]: - image['parent'] = rebuild_list[rpm_name][0] + image["parent"] = rebuild_list[rpm_name][0] else: parent_brew_build = self.find_parent_brew_build_nvr_from_child( image, pyxis_api_instance ) if parent_brew_build: parent = self.get_images_by_nvrs( - [parent_brew_build], published=None, - pyxis_api_instance=pyxis_api_instance + [parent_brew_build], + published=None, + pyxis_api_instance=pyxis_api_instance, ) if parent: parent = parent[0] parent.resolve(pyxis_api_instance, images) - image['parent'] = parent + image["parent"] = parent rebuild_list[rpm_name].insert(0, image) return rebuild_list @@ -1378,9 +1445,7 @@ def _get_images_to_rebuild(image): to_rebuild = self._deduplicate_images_to_rebuild(to_rebuild) # Get all the directly affected images so that any parents that are not marked as # directly affected can be set in _images_to_rebuild_to_batches - directly_affected_nvrs = { - image.nvr for image in images if image.get("directly_affected") - } + directly_affected_nvrs = {image.nvr for image in images if image.get("directly_affected")} # Some images that aren't marked as directly affected may have already been fixed # in the latest published version of the image. Use those images instead. self._filter_out_already_fixed_published_images( @@ -1465,7 +1530,7 @@ def _filter_out_already_fixed_published_images( log.info( "The image %s will be replaced with the latest published image of %s", image.nvr, - fixed_published_image.nvr + fixed_published_image.nvr, ) # On the first iteration, this is the last directly affected image in image_group child_image = image_group[i - 1] @@ -1573,7 +1638,10 @@ def get_fixed_published_image(self, name, version, image_group, rpm_nvrs, conten for candidate_image in candidate_images[1:]: parsed_candidate_image_nvr = kobo.rpmlib.parse_nvr(candidate_image.nvr) if ( - kobo.rpmlib.compare_nvr(parsed_candidate_image_nvr, parsed_fixed_published_image_nvr) > 0 + kobo.rpmlib.compare_nvr( + parsed_candidate_image_nvr, parsed_fixed_published_image_nvr + ) + > 0 ): fixed_published_image = candidate_image @@ -1581,9 +1649,7 @@ def get_fixed_published_image(self, name, version, image_group, rpm_nvrs, conten # metadata required by Freshmaker images = self.pyxis.find_images_by_nvr(fixed_published_image.nvr) if not images: - log.error( - "The image with the NVR %s was not found in Pyxis", fixed_published_image.nvr - ) + log.error("The image with the NVR %s was not found in Pyxis", fixed_published_image.nvr) return images = self.postprocess_images(images, rpm_name_to_nvrs) diff --git a/freshmaker/image_verifier.py b/freshmaker/image_verifier.py index 593f11c0..3df3e98a 100644 --- a/freshmaker/image_verifier.py +++ b/freshmaker/image_verifier.py @@ -69,7 +69,8 @@ def _verify_repository_data(self, repo): else: raise ValueError( "Only published repositories or unpublished exceptions can be rebuilt, but " - "this repository is not published.") + "this repository is not published." + ) if "auto_rebuild_tags" not in repo or repo["auto_rebuild_tags"] is None: raise ValueError('The "auto_rebuild_tags" in COMET is not set.') @@ -88,8 +89,7 @@ def _verify_image_data(self, image): if not image["content_sets"]: raise ValueError( 'Found image "%s" in this repository, but it cannot be rebuilt, because ' - 'the "content_sets" are not set for this image.' - % image["brew"]["build"] + 'the "content_sets" are not set for this image.' % image["brew"]["build"] ) def _get_repository_from_name(self, repo_name: str): @@ -102,9 +102,7 @@ def _get_repository_from_name(self, repo_name: str): raise ValueError("Cannot get repository %s from Pyxis." % repo_name) if len(repos) != 1: - raise ValueError( - "Multiple records found in Pyxis for image repository %s." % repo_name - ) + raise ValueError("Multiple records found in Pyxis for image repository %s." % repo_name) return repos[0] @@ -113,13 +111,21 @@ def _get_repository_from_image(self, image): Returns the ContainerRepository object based on the image defined by the image NVR. """ if "repositories" not in image or not image["repositories"]: - raise ValueError("Cannot get repository for image %s from Pyxis." % image["brew"]["build"]) + raise ValueError( + "Cannot get repository for image %s from Pyxis." % image["brew"]["build"] + ) - repos = [repo for repo in image["repositories"] if repo["registry"] != "conf.image_build_repository_registries"] + repos = [ + repo + for repo in image["repositories"] + if repo["registry"] != "conf.image_build_repository_registries" + ] image_repo = repos[0] # returns a single repository - return self.pyxis.get_repository_by_registry_path(image_repo["registry"], image_repo["repository"]) + return self.pyxis.get_repository_by_registry_path( + image_repo["registry"], image_repo["repository"] + ) def verify_image(self, image_nvr: str) -> dict[str, list[str]]: """ @@ -162,7 +168,7 @@ def verify_repository(self, repo_name: str) -> DataElements: data: DataElements = { "repository": {"auto_rebuild_tags": repo["auto_rebuild_tags"]}, - "images": {} + "images": {}, } images = self.pyxis.find_images_by_repository(repo["repository"], repo["auto_rebuild_tags"]) diff --git a/freshmaker/kojiservice.py b/freshmaker/kojiservice.py index ffebeb8c..934ad7ed 100644 --- a/freshmaker/kojiservice.py +++ b/freshmaker/kojiservice.py @@ -27,7 +27,7 @@ # in freshmaker.handlers __init__.py. We cannot "import koji" there, because # it would import freshmaker.handlers.koji, so instead, we import it here # and in freshmaker.handler do "from freshmaker.kojiservice import parse_NVR". -from koji import parse_NVR # noqa +from koji import parse_NVR # noqa from kobo import rpmlib from io import BytesIO from zipfile import ZipFile @@ -39,7 +39,8 @@ import yaml import gi -gi.require_version('Modulemd', '2.0') + +gi.require_version("Modulemd", "2.0") from gi.repository import Modulemd # noqa E402 import freshmaker.utils # noqa E402 @@ -50,7 +51,8 @@ class KojiLookupError(ValueError): - """ Koji lookup error """ + """Koji lookup error""" + pass @@ -63,21 +65,21 @@ class KojiService(object): As a wrapper of Koji API, new APIs could be added as well. """ + region = dogpile.cache.make_region().configure(conf.dogpile_cache_backend) # Used to generate incremental task id in dry run mode. _FAKE_TASK_ID = 0 def __init__(self, profile=None, dry_run=False): - self._config = koji.read_config(profile or 'koji') + self._config = koji.read_config(profile or "koji") self.dry_run = dry_run # In case we run in DRY_RUN mode, we need to initialize # _FAKE_TASK_ID to the id of last ODCS builds to have the IDs # increasing and unique even between Freshmaker restarts. if self.dry_run: - KojiService._FAKE_TASK_ID = \ - ArtifactBuild.get_lowest_build_id(db.session) - 1 + KojiService._FAKE_TASK_ID = ArtifactBuild.get_lowest_build_id(db.session) - 1 if KojiService._FAKE_TASK_ID >= 0: KojiService._FAKE_TASK_ID = -1 @@ -87,21 +89,20 @@ def config(self): @property def weburl(self): - return self.config['weburl'] + return self.config["weburl"] @property def topurl(self): - return self.config['topurl'] + return self.config["topurl"] @property def server(self): - return self.config['server'] + return self.config["server"] @property def session(self): - if not hasattr(self, '_session'): - self._session = koji.ClientSession(self.config['server'], - self.config) + if not hasattr(self, "_session"): + self._session = koji.ClientSession(self.config["server"], self.config) return self._session @freshmaker.utils.retry(wait_on=koji.AuthError, logger=log) @@ -111,7 +112,7 @@ def krb_login(self): self.session.gssapi_login( principal=conf.krb_auth_principal, keytab=conf.krb_auth_client_keytab, - ccache=conf.krb_auth_ccache_file + ccache=conf.krb_auth_ccache_file, ) else: log.info("DRY RUN: Skipping login in dry run mode.") @@ -133,8 +134,10 @@ def _fake_build_container(self, source_url, build_target, build_opts): :rtype: number :return: Fake task_id. """ - log.info("DRY RUN: Calling fake buildContainer with args: %r", - (source_url, build_target, build_opts)) + log.info( + "DRY RUN: Calling fake buildContainer with args: %r", + (source_url, build_target, build_opts), + ) # Get the task_id KojiService._FAKE_TASK_ID -= 1 @@ -143,10 +146,16 @@ def _fake_build_container(self, source_url, build_target, build_opts): # Parse the source_url to get the name of container and generate # fake event. m = re.match(r".*/(?P[^#]*)", source_url) - container = m.group('container') + container = m.group("container") event = BrewContainerTaskStateChangeEvent( - "fake_koji_msg_%d" % task_id, container, build_opts["git_branch"], - build_target, task_id, "BUILDING", "CLOSED") + "fake_koji_msg_%d" % task_id, + container, + build_opts["git_branch"], + build_target, + task_id, + "BUILDING", + "CLOSED", + ) event.dry_run = self.dry_run # Inject the fake event. @@ -155,11 +164,21 @@ def _fake_build_container(self, source_url, build_target, build_opts): return task_id - def build_container(self, source_url, branch, target, - scratch=None, repo_urls=None, flatpak=False, isolated=False, - release=None, koji_parent_build=None, - arch_override=None, compose_ids=None, - operator_csv_modifications_url=None): + def build_container( + self, + source_url, + branch, + target, + scratch=None, + repo_urls=None, + flatpak=False, + isolated=False, + release=None, + koji_parent_build=None, + arch_override=None, + compose_ids=None, + operator_csv_modifications_url=None, + ): """Build container by buildContainer :param str source_url: the container repository URL. @@ -184,41 +203,38 @@ def build_container(self, source_url, branch, target, build_target = target build_opts = { - 'scratch': False if scratch is None else scratch, - 'git_branch': branch, + "scratch": False if scratch is None else scratch, + "git_branch": branch, } if repo_urls: - build_opts['yum_repourls'] = repo_urls + build_opts["yum_repourls"] = repo_urls if compose_ids: - build_opts['compose_ids'] = compose_ids + build_opts["compose_ids"] = compose_ids if flatpak: - build_opts['flatpak'] = True + build_opts["flatpak"] = True if isolated: - build_opts['isolated'] = True + build_opts["isolated"] = True if koji_parent_build: - build_opts['koji_parent_build'] = koji_parent_build + build_opts["koji_parent_build"] = koji_parent_build # arch-override is only allowed for isolated or scratch builds if arch_override and (isolated or scratch): - build_opts['arch_override'] = arch_override + build_opts["arch_override"] = arch_override if release: - build_opts['release'] = release + build_opts["release"] = release if operator_csv_modifications_url: - build_opts['operator_csv_modifications_url'] = operator_csv_modifications_url + build_opts["operator_csv_modifications_url"] = operator_csv_modifications_url - log.debug('Build from target: %s', build_target) - log.debug('Build options: %s', build_opts) + log.debug("Build from target: %s", build_target) + log.debug("Build options: %s", build_opts) if not self.dry_run: - task_id = self.session.buildContainer(source_url, build_target, - build_opts) + task_id = self.session.buildContainer(source_url, build_target, build_opts) else: - task_id = self._fake_build_container(source_url, build_target, - build_opts) + task_id = self._fake_build_container(source_url, build_target, build_opts) - log.info('Task %s is created to build docker image for %s', - task_id, source_url) - log.info('Task info: %s/taskinfo?taskID=%s', self.weburl, task_id) + log.info("Task %s is created to build docker image for %s", task_id, source_url) + log.info("Task info: %s/taskinfo?taskID=%s", self.weburl, task_id) return task_id @@ -228,8 +244,7 @@ def cancel_build(self, build_id): @region.cache_on_arguments() def get_build_rpms(self, build_nvr, arches=None): build_info = self.session.getBuild(build_nvr) - return self.session.listRPMs(buildID=build_info['id'], - arches=arches) + return self.session.listRPMs(buildID=build_info["id"], arches=arches) @region.cache_on_arguments() def get_build(self, buildinfo): @@ -270,14 +285,14 @@ def get_container_build_id_from_task(self, task_id): subtasks = self.session.getTaskChildren(task_id) if subtasks: for task in subtasks: - task_result = self.session.getTaskResult(task['id']) - builds = task_result.get('koji_builds', None) + task_result = self.session.getTaskResult(task["id"]) + builds = task_result.get("koji_builds", None) if builds: build_id = int(builds.pop()) break else: task_result = self.session.getTaskResult(task_id) - builds = task_result.get('koji_builds', None) + builds = task_result.get("koji_builds", None) if builds: build_id = int(builds.pop()) return build_id @@ -292,7 +307,7 @@ def get_cg_metadata_url(self, buildinfo): Note: it doesn't check whether the metadata.json exists or not. """ build_info = self.get_build(buildinfo) - return koji.PathInfo(topdir=self.topurl).build(build_info) + '/metadata.json' + return koji.PathInfo(topdir=self.topurl).build(build_info) + "/metadata.json" @freshmaker.utils.retry(wait_on=(requests.Timeout, requests.ConnectionError), logger=log) def load_cg_metadata(self, buildinfo): @@ -314,8 +329,12 @@ def load_cg_metadata(self, buildinfo): raise except Exception as e: if cg_metadata_url: - log.error("Unable to load CG metadata for build (%r) from url (%s): %s", - buildinfo, cg_metadata_url, str(e)) + log.error( + "Unable to load CG metadata for build (%r) from url (%s): %s", + buildinfo, + cg_metadata_url, + str(e), + ) else: log.error("Unable to load CG metadata for build (%r): %s", str(e)) raise @@ -332,11 +351,11 @@ def get_rpms_in_container(self, buildinfo): """ rpms = set() cg_metadata = self.load_cg_metadata(buildinfo) - outputs = cg_metadata['output'] + outputs = cg_metadata["output"] for out in outputs: - if out['type'] == 'docker-image': - components = out['components'] - rpms = set([rpmlib.make_nvr(rpm) for rpm in components if rpm['type'] == 'rpm']) + if out["type"] == "docker-image": + components = out["components"] + rpms = set([rpmlib.make_nvr(rpm) for rpm in components if rpm["type"] == "rpm"]) return rpms @region.cache_on_arguments() @@ -378,7 +397,7 @@ def get_bundle_csv(self, build_nvr): """ try: build_info = self.get_build(build_nvr) - manifest_name = build_info.get('extra', {}).get('operator_manifests_archive') + manifest_name = build_info.get("extra", {}).get("operator_manifests_archive") if not manifest_name: log.error("Operator manifests archive is unavaiable for build %s", build_nvr) return None @@ -417,10 +436,10 @@ def get_bundle_related_images(self, build_nvr): try: buildinfo = self.get_build(build_nvr) related_images = ( - buildinfo.get('extra', {}) - .get('image', {}) - .get('operator_manifests', {}) - .get('related_images', {}) + buildinfo.get("extra", {}) + .get("image", {}) + .get("operator_manifests", {}) + .get("related_images", {}) ) except Exception as e: log.error("Unable to get related images in build %s: %s", build_nvr, str(e)) @@ -450,7 +469,8 @@ def get_modulemd(self, build_nvr): # If no overrides installed, it returns _ResultTuple. Reference from: # https://github.com/fedora-modularity/libmodulemd/blob/main/modulemd/tests/ModulemdTests/common.py#L116-L129 if not ( - "_overrides_module" in dir(Modulemd) and hasattr(gi.overrides.Modulemd, "read_packager_string") + "_overrides_module" in dir(Modulemd) + and hasattr(gi.overrides.Modulemd, "read_packager_string") ): _, mmd = Modulemd.read_packager_string(modulemd_str, None, None) else: @@ -464,8 +484,7 @@ def get_modulemd(self, build_nvr): def get_build_arches(self, build_id): archives = self.list_archives(build_id=build_id) arches = [ - archive["extra"]["image"]["arch"] - for archive in archives if archive["btype"] == "image" + archive["extra"]["image"]["arch"] for archive in archives if archive["btype"] == "image" ] return " ".join(sorted(arches)) @@ -493,14 +512,13 @@ def koji_service(profile=None, logger=None, login=True, dry_run=False): if not conf.krb_auth_principal: log.error("Cannot login to Koji, krb_auth_principal not set") else: - log.debug('Logging into %s with Kerberos authentication.', - service.server) + log.debug("Logging into %s with Kerberos authentication.", service.server) service.krb_login() # We are not logged in in dry run mode... if not dry_run and not service.logged_in: - log.error('Could not login server %s', service.server) + log.error("Could not login server %s", service.server) yield None try: @@ -508,5 +526,5 @@ def koji_service(profile=None, logger=None, login=True, dry_run=False): finally: if service.logged_in: if logger: - logger.debug('Logout Koji session') + logger.debug("Logout Koji session") service.logout() diff --git a/freshmaker/manage.py b/freshmaker/manage.py index 5d8cdf92..97d45345 100644 --- a/freshmaker/manage.py +++ b/freshmaker/manage.py @@ -30,8 +30,7 @@ from freshmaker import app, conf, db from freshmaker import models -migrations_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), - 'migrations') +migrations_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "migrations") migrate = flask_migrate.Migrate(app, db, directory=migrations_dir) @@ -40,7 +39,7 @@ def cli(): """Manage freshmaker application""" -cli.command('db', flask_migrate.Migrate) +cli.command("db", flask_migrate.Migrate) def _establish_ssl_context(): @@ -48,9 +47,9 @@ def _establish_ssl_context(): return None # First, do some validation of the configuration attributes = ( - 'ssl_certificate_file', - 'ssl_certificate_key_file', - 'ssl_ca_certificate_file', + "ssl_certificate_file", + "ssl_certificate_key_file", + "ssl_ca_certificate_file", ) for attribute in attributes: @@ -62,80 +61,71 @@ def _establish_ssl_context(): # Then, establish the ssl context and return it ssl_ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) - ssl_ctx.load_cert_chain(conf.ssl_certificate_file, - conf.ssl_certificate_key_file) + ssl_ctx.load_cert_chain(conf.ssl_certificate_file, conf.ssl_certificate_key_file) ssl_ctx.verify_mode = ssl.CERT_OPTIONAL ssl_ctx.load_verify_locations(cafile=conf.ssl_ca_certificate_file) return ssl_ctx -@cli.command('upgradedb') +@cli.command("upgradedb") def upgradedb(): - """ Upgrades the database schema to the latest revision - """ - app.config["SERVER_NAME"] = 'localhost' - migrations_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), - 'migrations') + """Upgrades the database schema to the latest revision""" + app.config["SERVER_NAME"] = "localhost" + migrations_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "migrations") with app.app_context(): flask_migrate.upgrade(directory=migrations_dir) -@cli.command('cleardb') +@cli.command("cleardb") def cleardb(): - """ Clears the database - """ + """Clears the database""" models.Event.query.delete() models.ArtifactBuild.query.delete() db.session.commit() -@cli.command('gencert') +@cli.command("gencert") def generatelocalhostcert(): - """ Creates a public/private key pair for message signing and the frontend - """ + """Creates a public/private key pair for message signing and the frontend""" from OpenSSL import crypto + cert_key = crypto.PKey() cert_key.generate_key(crypto.TYPE_RSA, 2048) - with open(conf.ssl_certificate_key_file, 'w') as cert_key_file: + with open(conf.ssl_certificate_key_file, "w") as cert_key_file: os.chmod(conf.ssl_certificate_key_file, 0o600) - cert_key_file.write( - crypto.dump_privatekey(crypto.FILETYPE_PEM, cert_key)) + cert_key_file.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, cert_key)) cert = crypto.X509() msg_cert_subject = cert.get_subject() - msg_cert_subject.C = 'US' - msg_cert_subject.ST = 'MA' - msg_cert_subject.L = 'Boston' - msg_cert_subject.O = 'Development' # noqa - msg_cert_subject.CN = 'localhost' + msg_cert_subject.C = "US" + msg_cert_subject.ST = "MA" + msg_cert_subject.L = "Boston" + msg_cert_subject.O = "Development" # noqa + msg_cert_subject.CN = "localhost" cert.set_serial_number(2) cert.gmtime_adj_notBefore(0) cert.gmtime_adj_notAfter(315360000) # 10 years cert.set_issuer(cert.get_subject()) cert.set_pubkey(cert_key) cert_extensions = [ - crypto.X509Extension( - 'keyUsage', True, - 'digitalSignature, keyEncipherment, nonRepudiation'), - crypto.X509Extension('extendedKeyUsage', True, 'serverAuth'), + crypto.X509Extension("keyUsage", True, "digitalSignature, keyEncipherment, nonRepudiation"), + crypto.X509Extension("extendedKeyUsage", True, "serverAuth"), ] cert.add_extensions(cert_extensions) - cert.sign(cert_key, 'sha256') + cert.sign(cert_key, "sha256") - with open(conf.ssl_certificate_file, 'w') as cert_file: - cert_file.write( - crypto.dump_certificate(crypto.FILETYPE_PEM, cert)) + with open(conf.ssl_certificate_file, "w") as cert_file: + cert_file.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert)) -@cli.command('runssl') -@click.option('-h', '--host', default=conf.host, help='Bind to this address') -@click.option('-p', '--port', type=int, default=conf.port, help='Listen on this port') -@click.option('-d', '--debug', is_flag=True, default=conf.debug, help='Debug mode') +@cli.command("runssl") +@click.option("-h", "--host", default=conf.host, help="Bind to this address") +@click.option("-p", "--port", type=int, default=conf.port, help="Listen on this port") +@click.option("-d", "--debug", is_flag=True, default=conf.debug, help="Debug mode") def runssl(host, port, debug): - """ Runs the Flask app with the HTTPS settings configured in config.py - """ - logging.info('Starting Freshmaker frontend') + """Runs the Flask app with the HTTPS settings configured in config.py""" + logging.info("Starting Freshmaker frontend") ssl_ctx = _establish_ssl_context() diff --git a/freshmaker/messaging.py b/freshmaker/messaging.py index 5fe8ad8e..e94f3048 100644 --- a/freshmaker/messaging.py +++ b/freshmaker/messaging.py @@ -41,13 +41,15 @@ def publish(topic, msg): :return: the value returned from underlying backend "send" method. """ from freshmaker.monitor import ( - messaging_tx_to_send_counter, messaging_tx_sent_ok_counter, - messaging_tx_failed_counter) + messaging_tx_to_send_counter, + messaging_tx_sent_ok_counter, + messaging_tx_failed_counter, + ) messaging_tx_to_send_counter.inc() try: - handler = _messaging_backends[conf.messaging_sender]['publish'] + handler = _messaging_backends[conf.messaging_sender]["publish"] except KeyError: messaging_tx_failed_counter.inc() raise KeyError("No messaging backend found for %r" % conf.messaging) @@ -64,8 +66,9 @@ def publish(topic, msg): def _fedmsg_publish(topic, msg): # fedmsg doesn't really need access to conf, however other backends do import fedmsg - config = conf.messaging_backends['fedmsg'] - return fedmsg.publish(topic, msg=msg, modname=config['SERVICE']) + + config = conf.messaging_backends["fedmsg"] + return fedmsg.publish(topic, msg=msg, modname=config["SERVICE"]) @retry(wait_on=(RuntimeError,), logger=log) @@ -79,15 +82,15 @@ def _rhmsg_publish(topic, msg): import proton from rhmsg.activemq.producer import AMQProducer - config = conf.messaging_backends['rhmsg'] + config = conf.messaging_backends["rhmsg"] producer_config = { - 'urls': config['BROKER_URLS'], - 'certificate': config['CERT_FILE'], - 'private_key': config['KEY_FILE'], - 'trusted_certificates': config['CA_CERT'], + "urls": config["BROKER_URLS"], + "certificate": config["CERT_FILE"], + "private_key": config["KEY_FILE"], + "trusted_certificates": config["CA_CERT"], } with AMQProducer(**producer_config) as producer: - topic = '{0}.{1}'.format(config['TOPIC_PREFIX'], topic) + topic = "{0}.{1}".format(config["TOPIC_PREFIX"], topic) producer.through_topic(topic) outgoing_msg = proton.Message() @@ -101,23 +104,24 @@ def _rhmsg_publish(topic, msg): def _in_memory_publish(topic, msg): - """ Puts the message into the in memory work queue. """ + """Puts the message into the in memory work queue.""" # Increment the message ID. global _in_memory_msg_id _in_memory_msg_id += 1 - config = conf.messaging_backends['in_memory'] + config = conf.messaging_backends["in_memory"] # Create fake fedmsg from the message so we can reuse # the BaseEvent.from_fedmsg code to get the particular BaseEvent # class instance. wrapped_msg = BaseEvent.from_fedmsg( - config['SERVICE'] + "." + topic, + config["SERVICE"] + "." + topic, {"msg_id": str(_in_memory_msg_id), "msg": msg}, ) # Put the message to queue. from freshmaker.consumer import work_queue_put + try: work_queue_put(wrapped_msg) except ValueError as e: @@ -131,13 +135,7 @@ def _in_memory_publish(topic, msg): _messaging_backends = { - 'fedmsg': { - 'publish': _fedmsg_publish - }, - 'in_memory': { - 'publish': _in_memory_publish - }, - 'rhmsg': { - 'publish': _rhmsg_publish - } + "fedmsg": {"publish": _fedmsg_publish}, + "in_memory": {"publish": _in_memory_publish}, + "rhmsg": {"publish": _rhmsg_publish}, } diff --git a/freshmaker/models.py b/freshmaker/models.py index aaeae26f..57080215 100644 --- a/freshmaker/models.py +++ b/freshmaker/models.py @@ -28,7 +28,7 @@ from collections import defaultdict from datetime import datetime -from sqlalchemy.orm import (validates, relationship) +from sqlalchemy.orm import validates, relationship from sqlalchemy.schema import Index from sqlalchemy.sql.expression import false @@ -37,16 +37,24 @@ from freshmaker import db, log from freshmaker import messaging from freshmaker.utils import get_url_for -from freshmaker.types import (ArtifactType, ArtifactBuildState, EventState, - RebuildReason) +from freshmaker.types import ArtifactType, ArtifactBuildState, EventState, RebuildReason from freshmaker.events import ( - MBSModuleStateChangeEvent, GitModuleMetadataChangeEvent, - GitRPMSpecChangeEvent, TestingEvent, GitDockerfileChangeEvent, - BodhiUpdateCompleteStableEvent, KojiTaskStateChangeEvent, BrewSignRPMEvent, - ErrataRPMAdvisoryShippedEvent, BrewContainerTaskStateChangeEvent, - ErrataAdvisoryStateChangedEvent, FreshmakerManualRebuildEvent, - ODCSComposeStateChangeEvent, ManualRebuildWithAdvisoryEvent, - FreshmakerAsyncManualBuildEvent, BotasErrataShippedEvent, + MBSModuleStateChangeEvent, + GitModuleMetadataChangeEvent, + GitRPMSpecChangeEvent, + TestingEvent, + GitDockerfileChangeEvent, + BodhiUpdateCompleteStableEvent, + KojiTaskStateChangeEvent, + BrewSignRPMEvent, + ErrataRPMAdvisoryShippedEvent, + BrewContainerTaskStateChangeEvent, + ErrataAdvisoryStateChangedEvent, + FreshmakerManualRebuildEvent, + ODCSComposeStateChangeEvent, + ManualRebuildWithAdvisoryEvent, + FreshmakerAsyncManualBuildEvent, + BotasErrataShippedEvent, ManualBundleRebuildEvent, FlatpakModuleAdvisoryReadyEvent, FlatpakApplicationManualBuildEvent, @@ -95,6 +103,7 @@ def commit_on_success(func): Ensures db session is committed after a successful call to decorated function, otherwise rollback. """ + def _decorator(*args, **kwargs): try: return func(*args, **kwargs) @@ -103,6 +112,7 @@ def _decorator(*args, **kwargs): raise finally: db.session.commit() + return _decorator @@ -112,7 +122,8 @@ class FreshmakerBase(db.Model): class User(FreshmakerBase, UserMixin): """User information table""" - __tablename__ = 'users' + + __tablename__ = "users" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(200), nullable=False, unique=True) @@ -158,9 +169,13 @@ class Event(FreshmakerBase): time_created = db.Column(db.DateTime, nullable=True) time_done = db.Column(db.DateTime, nullable=True) # AppenderQuery for getting builds associated with this Event. - builds = relationship("ArtifactBuild", back_populates="event", - lazy="dynamic", cascade="all, delete-orphan", - passive_deletes=True) + builds = relationship( + "ArtifactBuild", + back_populates="event", + lazy="dynamic", + cascade="all, delete-orphan", + passive_deletes=True, + ) # True if the even should be handled in dry run mode. dry_run = db.Column(db.Boolean, default=False) # For manual rebuilds, set to user requesting the rebuild. Otherwise null. @@ -174,14 +189,24 @@ class Event(FreshmakerBase): requester_metadata = db.Column(db.String, nullable=True) manual_triggered = db.Column( - db.Boolean, - default=False, - doc='Whether this event is triggered manually') + db.Boolean, default=False, doc="Whether this event is triggered manually" + ) @classmethod - def create(cls, session, message_id, search_key, event_type, released=True, - state=None, manual=False, dry_run=False, requester=None, - requested_rebuilds=None, requester_metadata=None): + def create( + cls, + session, + message_id, + search_key, + event_type, + released=True, + state=None, + manual=False, + dry_run=False, + requester=None, + requested_rebuilds=None, + requester_metadata=None, + ): if event_type in EVENT_TYPES: event_type = EVENT_TYPES[event_type] now = datetime.utcnow() @@ -201,7 +226,7 @@ def create(cls, session, message_id, search_key, event_type, released=True, session.add(event) return event - @validates('state') + @validates("state") def validate_state(self, key, field): if field in [s.value for s in list(EventState)]: return field @@ -216,18 +241,34 @@ def get(cls, session, message_id): return session.query(cls).filter_by(message_id=message_id).first() @classmethod - def get_or_create(cls, session, message_id, search_key, event_type, - released=True, manual=False, dry_run=False, - requester=None, requested_rebuilds=None, - requester_metadata=None): + def get_or_create( + cls, + session, + message_id, + search_key, + event_type, + released=True, + manual=False, + dry_run=False, + requester=None, + requested_rebuilds=None, + requester_metadata=None, + ): instance = cls.get(session, message_id) if instance: return instance instance = cls.create( - session, message_id, search_key, event_type, - released=released, manual=manual, dry_run=dry_run, - requester=requester, requested_rebuilds=requested_rebuilds, - requester_metadata=requester_metadata) + session, + message_id, + search_key, + event_type, + released=released, + manual=manual, + dry_run=dry_run, + requester=requester, + requested_rebuilds=requested_rebuilds, + requester_metadata=requester_metadata, + ) session.commit() return instance @@ -239,8 +280,7 @@ def get_or_create_from_event(cls, session, event, released=True): requested_rebuilds_list = getattr(event, "container_images", None) requested_rebuilds = None # make sure 'container_images' field is a list and convert it to str - if requested_rebuilds_list is not None and \ - isinstance(requested_rebuilds_list, list): + if requested_rebuilds_list is not None and isinstance(requested_rebuilds_list, list): requested_rebuilds = " ".join(requested_rebuilds_list) requester_metadata = getattr(event, "requester_metadata_json", None) if requester_metadata is not None: @@ -248,16 +288,23 @@ def get_or_create_from_event(cls, session, event, released=True): try: requester_metadata = json.dumps(requester_metadata) except TypeError: - log.warning("requester_metadata_json field is ill-formatted: %s", - requester_metadata) + log.warning( + "requester_metadata_json field is ill-formatted: %s", requester_metadata + ) requester_metadata = None - return cls.get_or_create(session, event.msg_id, - event.search_key, event.__class__, - released=released, manual=event.manual, - dry_run=event.dry_run, requester=requester, - requested_rebuilds=requested_rebuilds, - requester_metadata=requester_metadata) + return cls.get_or_create( + session, + event.msg_id, + event.search_key, + event.__class__, + released=released, + manual=event.manual, + dry_run=event.dry_run, + requester=requester, + requested_rebuilds=requested_rebuilds, + requester_metadata=requester_metadata, + ) @classmethod def get_unreleased(cls, session, states=None): @@ -272,27 +319,29 @@ def get_unreleased(cls, session, states=None): :return: List of unreleased events of `states` state. """ if not states: - states = [EventState.INITIALIZED.value, - EventState.BUILDING.value, - EventState.COMPLETE.value] - else: states = [ - state.value if isinstance(state, EventState) else state for - state in states + EventState.INITIALIZED.value, + EventState.BUILDING.value, + EventState.COMPLETE.value, ] - return session.query(cls).filter(cls.released == false(), - cls.state.in_(states)).all() + else: + states = [state.value if isinstance(state, EventState) else state for state in states] + return session.query(cls).filter(cls.released == false(), cls.state.in_(states)).all() @classmethod def get_by_event_id(cls, session, event_id): return session.query(cls).filter_by(id=event_id).first() def get_image_builds_in_first_batch(self, session): - return session.query(ArtifactBuild).filter_by( - dep_on=None, - type=ArtifactType.IMAGE.value, - event_id=self.id, - ).all() + return ( + session.query(ArtifactBuild) + .filter_by( + dep_on=None, + type=ArtifactType.IMAGE.value, + event_id=self.id, + ) + .all() + ) @property def event_type(self): @@ -308,11 +357,13 @@ def add_event_dependency(self, session, event): for committing changes to database. If `event` has been added already, nothing changed and `None` will be returned. """ - dep = session.query(EventDependency.id).filter_by( - event_id=self.id, event_dependency_id=event.id).first() + dep = ( + session.query(EventDependency.id) + .filter_by(event_id=self.id, event_dependency_id=event.id) + .first() + ) if dep is None: - dep = EventDependency(event_id=self.id, - event_dependency_id=event.id) + dep = EventDependency(event_id=self.id, event_dependency_id=event.id) session.add(dep) return dep else: @@ -326,8 +377,7 @@ def event_dependencies(self): events = [] deps = EventDependency.query.filter_by(event_id=self.id).all() for dep in deps: - events.append(Event.query.filter_by( - id=dep.event_dependency_id).first()) + events.append(Event.query.filter_by(id=dep.event_dependency_id).first()) return events @property @@ -338,16 +388,20 @@ def depending_events(self): depending_events = [] parents = EventDependency.query.filter_by(event_dependency_id=self.id).all() for p in parents: - depending_events.append(Event.query.filter_by( - id=p.event_id).first()) + depending_events.append(Event.query.filter_by(id=p.event_id).first()) return depending_events def has_all_builds_in_state(self, state): """ Returns True when all builds are in the given `state`. """ - return db.session.query(ArtifactBuild).filter_by( - event_id=self.id).filter(state != state).count() == 0 + return ( + db.session.query(ArtifactBuild) + .filter_by(event_id=self.id) + .filter(state != state) + .count() + == 0 + ) def builds_transition(self, state, reason, filters=None): """ @@ -361,11 +415,11 @@ def builds_transition(self, state, reason, filters=None): if not self.builds: return [] - builds_to_transition = self.builds.filter_by( - **filters).all() if isinstance(filters, dict) else self.builds + builds_to_transition = ( + self.builds.filter_by(**filters).all() if isinstance(filters, dict) else self.builds + ) - return [build.id - for build in builds_to_transition if build.transition(state, reason)] + return [build.id for build in builds_to_transition if build.transition(state, reason)] def transition(self, state, state_reason=None): """ @@ -387,8 +441,7 @@ def transition(self, state, state_reason=None): log_fnc = log.error else: log_fnc = log.info - log_fnc("Event %r moved to state %s, %r" % ( - self, EventState(state).name, state_reason)) + log_fnc("Event %r moved to state %s, %r" % (self, EventState(state).name, state_reason)) # In case Event is already in the state, return False. if self.state == state: @@ -397,16 +450,20 @@ def transition(self, state, state_reason=None): self.state = state # Log the time done - if state in [EventState.FAILED.value, EventState.COMPLETE.value, - EventState.SKIPPED.value, EventState.CANCELED.value]: + if state in [ + EventState.FAILED.value, + EventState.COMPLETE.value, + EventState.SKIPPED.value, + EventState.CANCELED.value, + ]: self.time_done = datetime.utcnow() if EventState(state).counter: EventState(state).counter.inc() db.session.commit() - messaging.publish('event.state.changed', self.json()) - messaging.publish('event.state.changed.min', self.json_min()) + messaging.publish("event.state.changed", self.json()) + messaging.publish("event.state.changed.min", self.json_min()) return True @@ -428,22 +485,22 @@ def requester_metadata_json(self): def json(self): data = self._common_json() - data['builds'] = [b.json() for b in self.builds] + data["builds"] = [b.json() for b in self.builds] return data def json_min(self): builds_summary = defaultdict(int) - builds_summary['total'] = len(self.builds.all()) + builds_summary["total"] = len(self.builds.all()) for build in self.builds: state_name = ArtifactBuildState(build.state).name builds_summary[state_name] += 1 data = self._common_json() - data['builds_summary'] = dict(builds_summary) + data["builds_summary"] = dict(builds_summary) return data def _common_json(self): - event_url = get_url_for('event', id=self.id) + event_url = get_url_for("event", id=self.id) db.session.add(self) return { "id": self.id, @@ -458,8 +515,9 @@ def _common_json(self): "url": event_url, "dry_run": self.dry_run, "requester": self.requester, - "requested_rebuilds": (self.requested_rebuilds.split(" ") - if self.requested_rebuilds else []), + "requested_rebuilds": ( + self.requested_rebuilds.split(" ") if self.requested_rebuilds else [] + ), "requester_metadata": self.requester_metadata_json, "depends_on_events": [event.id for event in self.event_dependencies], "depending_events": [event.id for event in self.depending_events], @@ -476,19 +534,25 @@ def find_dependent_events(self): """ builds_nvrs = [build.name for build in self.builds] - states = [EventState.INITIALIZED.value, - EventState.BUILDING.value, - EventState.COMPLETE.value] + states = [ + EventState.INITIALIZED.value, + EventState.BUILDING.value, + EventState.COMPLETE.value, + ] query = db.session.query(ArtifactBuild.event_id) - dep_event_ids = query.join(ArtifactBuild.event).filter( - ArtifactBuild.name.in_(builds_nvrs), - ArtifactBuild.event_id != self.id, - ArtifactBuild.type == ArtifactType.IMAGE.value, - Event.manual_triggered == false(), - Event.released == false(), - Event.state.in_(states), - ).distinct() + dep_event_ids = ( + query.join(ArtifactBuild.event) + .filter( + ArtifactBuild.name.in_(builds_nvrs), + ArtifactBuild.event_id != self.id, + ArtifactBuild.type == ArtifactType.IMAGE.value, + Event.manual_triggered == false(), + Event.released == false(), + Event.state.in_(states), + ) + .distinct() + ) dep_events = [] query = db.session.query(Event) @@ -507,29 +571,33 @@ def get_artifact_build_from_event_dependencies(self, nvr): If the build is not found, it returns None. """ for parent_event in self.event_dependencies: - parent_build = db.session.query( - ArtifactBuild).filter_by(event_id=parent_event.id, - original_nvr=nvr, - state=ArtifactBuildState.DONE.value).all() + parent_build = ( + db.session.query(ArtifactBuild) + .filter_by( + event_id=parent_event.id, original_nvr=nvr, state=ArtifactBuildState.DONE.value + ) + .all() + ) if parent_build: return parent_build -Index('idx_event_message_id', Event.message_id, unique=True) +Index("idx_event_message_id", Event.message_id, unique=True) class EventDependency(FreshmakerBase): __tablename__ = "event_dependencies" id = db.Column(db.Integer, primary_key=True) - event_id = db.Column(db.Integer, db.ForeignKey('events.id'), nullable=False) - event_dependency_id = db.Column(db.Integer, db.ForeignKey('events.id'), nullable=False) + event_id = db.Column(db.Integer, db.ForeignKey("events.id"), nullable=False) + event_dependency_id = db.Column(db.Integer, db.ForeignKey("events.id"), nullable=False) Index( - 'idx_event_dependency_rel', + "idx_event_dependency_rel", EventDependency.event_id, EventDependency.event_dependency_id, - unique=True) + unique=True, +) class ArtifactBuild(FreshmakerBase): @@ -546,11 +614,11 @@ class ArtifactBuild(FreshmakerBase): # Link to the Artifact on which this one depends and which triggered # the rebuild of this Artifact. - dep_on_id = db.Column(db.Integer, db.ForeignKey('artifact_builds.id')) - dep_on = relationship('ArtifactBuild', remote_side=[id]) + dep_on_id = db.Column(db.Integer, db.ForeignKey("artifact_builds.id")) + dep_on = relationship("ArtifactBuild", remote_side=[id]) # Event associated with this Build - event_id = db.Column(db.Integer, db.ForeignKey('events.id')) + event_id = db.Column(db.Integer, db.ForeignKey("events.id")) event = relationship("Event", back_populates="builds") # Id of corresponding real build in external build system. @@ -569,18 +637,24 @@ class ArtifactBuild(FreshmakerBase): rebuild_reason = db.Column(db.Integer, nullable=True) # pullspec overrides - _bundle_pullspec_overrides = db.Column( - "bundle_pullspec_overrides", db.Text, nullable=True - ) + _bundle_pullspec_overrides = db.Column("bundle_pullspec_overrides", db.Text, nullable=True) - composes = db.relationship('ArtifactBuildCompose', back_populates='build') + composes = db.relationship("ArtifactBuildCompose", back_populates="build") @classmethod - def create(cls, session, event, name, type, - build_id=None, dep_on=None, state=None, - original_nvr=None, rebuilt_nvr=None, - rebuild_reason=0): - + def create( + cls, + session, + event, + name, + type, + build_id=None, + dep_on=None, + state=None, + original_nvr=None, + rebuilt_nvr=None, + rebuild_reason=0, + ): now = datetime.utcnow() build = cls( name=name, @@ -592,12 +666,12 @@ def create(cls, session, event, name, type, build_id=build_id, time_submitted=now, dep_on=dep_on, - rebuild_reason=rebuild_reason + rebuild_reason=rebuild_reason, ) session.add(build) return build - @validates('state') + @validates("state") def validate_state(self, key, field): if field in [s.value for s in list(ArtifactBuildState)]: return field @@ -607,7 +681,7 @@ def validate_state(self, key, field): return field.value raise ValueError("%s: %s, not in %r" % (key, field, list(ArtifactBuildState))) - @validates('type') + @validates("type") def validate_type(self, key, field): if field in [t.value for t in list(ArtifactType)]: return field @@ -623,10 +697,12 @@ def get_lowest_build_id(cls, session): Returns the lowest build_id. If there is no build so far, returns 0. """ - build = (session.query(ArtifactBuild) - .filter(cls.build_id != None) # noqa - .order_by(ArtifactBuild.build_id.asc()) - .first()) + build = ( + session.query(ArtifactBuild) + .filter(cls.build_id != None) # noqa + .order_by(ArtifactBuild.build_id.asc()) + .first() + ) if not build: return 0 return build.build_id @@ -635,9 +711,7 @@ def get_lowest_build_id(cls, session): def bundle_pullspec_overrides(self): """Return the Python representation of the JSON bundle_pullspec_overrides.""" return ( - json.loads(self._bundle_pullspec_overrides) - if self._bundle_pullspec_overrides - else None + json.loads(self._bundle_pullspec_overrides) if self._bundle_pullspec_overrides else None ) @bundle_pullspec_overrides.setter @@ -675,8 +749,10 @@ def transition(self, state, state_reason): log_fnc = log.error else: log_fnc = log.info - log_fnc("Artifact build %r moved to state %s, %r" % ( - self, ArtifactBuildState(state).name, state_reason)) + log_fnc( + "Artifact build %r moved to state %s, %r" + % (self, ArtifactBuildState(state).name, state_reason) + ) if self.state == state: return False @@ -686,36 +762,40 @@ def transition(self, state, state_reason): ArtifactBuildState(state).counter.inc() self.state_reason = state_reason - if self.state in [ArtifactBuildState.DONE.value, - ArtifactBuildState.FAILED.value, - ArtifactBuildState.CANCELED.value]: + if self.state in [ + ArtifactBuildState.DONE.value, + ArtifactBuildState.FAILED.value, + ArtifactBuildState.CANCELED.value, + ]: self.time_completed = datetime.utcnow() # For FAILED/CANCELED states, move also all the artifacts depending # on this one to FAILED/CANCELED state, because there is no way we # can rebuild them. - if self.state in [ArtifactBuildState.FAILED.value, - ArtifactBuildState.CANCELED.value]: + if self.state in [ArtifactBuildState.FAILED.value, ArtifactBuildState.CANCELED.value]: for build in self.depending_artifact_builds(): build.transition( - self.state, "Cannot build artifact, because its " - "dependency cannot be built.") + self.state, "Cannot build artifact, because its " "dependency cannot be built." + ) - messaging.publish('build.state.changed', self.json()) + messaging.publish("build.state.changed", self.json()) return True def __repr__(self): return "" % ( - self.name, ArtifactType(self.type).name, - ArtifactBuildState(self.state).name, self.event.message_id) + self.name, + ArtifactType(self.type).name, + ArtifactBuildState(self.state).name, + self.event.message_id, + ) def json(self): build_args = {} if self.build_args: build_args = json.loads(self.build_args) - build_url = get_url_for('build', id=self.id) + build_url = get_url_for("build", id=self.id) db.session.add(self) return { "id": self.id, @@ -736,7 +816,7 @@ def json(self): "url": build_url, "build_args": build_args, "odcs_composes": [rel.compose.odcs_compose_id for rel in self.composes], - "rebuild_reason": RebuildReason(self.rebuild_reason or 0).name.lower() + "rebuild_reason": RebuildReason(self.rebuild_reason or 0).name.lower(), } def get_root_dep_on(self): @@ -752,8 +832,7 @@ def get_root_dep_on(self): def add_composes(self, session, composes): """Add an ODCS compose to this build""" for compose in composes: - session.add(ArtifactBuildCompose( - build_id=self.id, compose_id=compose.id)) + session.add(ArtifactBuildCompose(build_id=self.id, compose_id=compose.id)) @property def composes_ready(self): @@ -775,18 +854,18 @@ def get_rebuilt_original_nvrs_by_search_key(cls, session, search_key, directly_a class Compose(FreshmakerBase): - __tablename__ = 'composes' + __tablename__ = "composes" id = db.Column(db.Integer, primary_key=True) odcs_compose_id = db.Column(db.Integer, nullable=False) - builds = db.relationship('ArtifactBuildCompose', back_populates='compose') + builds = db.relationship("ArtifactBuildCompose", back_populates="compose") @property def finished(self): from freshmaker.odcsclient import create_odcs_client - return 'done' == create_odcs_client().get_compose( - self.odcs_compose_id)['state_name'] + + return "done" == create_odcs_client().get_compose(self.odcs_compose_id)["state_name"] @classmethod def get_lowest_compose_id(cls, session): @@ -794,28 +873,21 @@ def get_lowest_compose_id(cls, session): Returns the lowest odcs_compose_id. If there is no compose, returns 0. """ - compose = session.query(Compose).order_by( - Compose.odcs_compose_id.asc()).first() + compose = session.query(Compose).order_by(Compose.odcs_compose_id.asc()).first() if not compose: return 0 return compose.odcs_compose_id -Index('idx_odcs_compose_id', Compose.odcs_compose_id, unique=True) +Index("idx_odcs_compose_id", Compose.odcs_compose_id, unique=True) class ArtifactBuildCompose(FreshmakerBase): - __tablename__ = 'artifact_build_composes' + __tablename__ = "artifact_build_composes" - build_id = db.Column( - db.Integer, - db.ForeignKey('artifact_builds.id'), - primary_key=True) + build_id = db.Column(db.Integer, db.ForeignKey("artifact_builds.id"), primary_key=True) - compose_id = db.Column( - db.Integer, - db.ForeignKey('composes.id'), - primary_key=True) + compose_id = db.Column(db.Integer, db.ForeignKey("composes.id"), primary_key=True) - build = db.relationship('ArtifactBuild', back_populates='composes') - compose = db.relationship('Compose', back_populates='builds') + build = db.relationship("ArtifactBuild", back_populates="composes") + compose = db.relationship("Compose", back_populates="builds") diff --git a/freshmaker/monitor.py b/freshmaker/monitor.py index 5ffbaaac..eeec73bc 100644 --- a/freshmaker/monitor.py +++ b/freshmaker/monitor.py @@ -27,104 +27,100 @@ from flask import Blueprint, Response from prometheus_client import ( # noqa: F401 - ProcessCollector, CollectorRegistry, Counter, multiprocess, - Histogram, generate_latest, start_http_server, CONTENT_TYPE_LATEST) + ProcessCollector, + CollectorRegistry, + Counter, + multiprocess, + Histogram, + generate_latest, + start_http_server, + CONTENT_TYPE_LATEST, +) from sqlalchemy import event # Service-specific imports -if not os.environ.get('prometheus_multiproc_dir'): - os.environ.setdefault('prometheus_multiproc_dir', tempfile.mkdtemp()) +if not os.environ.get("prometheus_multiproc_dir"): + os.environ.setdefault("prometheus_multiproc_dir", tempfile.mkdtemp()) registry = CollectorRegistry() ProcessCollector(registry=registry) multiprocess.MultiProcessCollector(registry) -if os.getenv('MONITOR_STANDALONE_METRICS_SERVER_ENABLE', 'false') == 'true': - port = os.getenv('MONITOR_STANDALONE_METRICS_SERVER_PORT', '10040') +if os.getenv("MONITOR_STANDALONE_METRICS_SERVER_ENABLE", "false") == "true": + port = os.getenv("MONITOR_STANDALONE_METRICS_SERVER_PORT", "10040") start_http_server(int(port), registry=registry) # Generic metrics messaging_rx_counter = Counter( - 'messaging_rx', - 'Total number of messages received', - registry=registry) + "messaging_rx", "Total number of messages received", registry=registry +) messaging_rx_ignored_counter = Counter( - 'messaging_rx_ignored', - 'Number of received messages, which were ignored', - registry=registry) + "messaging_rx_ignored", "Number of received messages, which were ignored", registry=registry +) messaging_rx_processed_ok_counter = Counter( - 'messaging_rx_processed_ok', - 'Number of received messages, which were processed successfully', - registry=registry) + "messaging_rx_processed_ok", + "Number of received messages, which were processed successfully", + registry=registry, +) messaging_rx_failed_counter = Counter( - 'messaging_rx_failed', - 'Number of received messages, which failed during processing', - registry=registry) + "messaging_rx_failed", + "Number of received messages, which failed during processing", + registry=registry, +) messaging_tx_to_send_counter = Counter( - 'messaging_tx_to_send', - 'Total number of messages to send', - registry=registry) + "messaging_tx_to_send", "Total number of messages to send", registry=registry +) messaging_tx_sent_ok_counter = Counter( - 'messaging_tx_sent_ok', - 'Number of messages, which were sent successfully', - registry=registry) + "messaging_tx_sent_ok", "Number of messages, which were sent successfully", registry=registry +) messaging_tx_failed_counter = Counter( - 'messaging_tx_failed', - 'Number of messages, for which the sender failed', - registry=registry) + "messaging_tx_failed", "Number of messages, for which the sender failed", registry=registry +) db_engine_connect_counter = Counter( - 'db_engine_connect', - 'Number of \'engine_connect\' events', - registry=registry) + "db_engine_connect", "Number of 'engine_connect' events", registry=registry +) db_handle_error_counter = Counter( - 'db_handle_error', - 'Number of exceptions during connection', - registry=registry) + "db_handle_error", "Number of exceptions during connection", registry=registry +) db_transaction_rollback_counter = Counter( - 'db_transaction_rollback', - 'Number of transactions, which were rolled back', - registry=registry) + "db_transaction_rollback", "Number of transactions, which were rolled back", registry=registry +) # Service-specific metrics freshmaker_artifact_build_done_counter = Counter( - 'freshmaker_artifact_build_done', - 'Number of successful artifact builds', - registry=registry) + "freshmaker_artifact_build_done", "Number of successful artifact builds", registry=registry +) freshmaker_artifact_build_failed_counter = Counter( - 'freshmaker_artifact_build_failed', - 'Number of artifact builds, which failed due to error(s)', - registry=registry) + "freshmaker_artifact_build_failed", + "Number of artifact builds, which failed due to error(s)", + registry=registry, +) freshmaker_artifact_build_canceled_counter = Counter( - 'freshmaker_artifact_build_canceled', - 'Number of artifact builds, which were canceled', - registry=registry) + "freshmaker_artifact_build_canceled", + "Number of artifact builds, which were canceled", + registry=registry, +) freshmaker_event_complete_counter = Counter( - 'freshmaker_event_complete', - 'Number of successfully handled events', - registry=registry) + "freshmaker_event_complete", "Number of successfully handled events", registry=registry +) freshmaker_event_failed_counter = Counter( - 'freshmaker_event_failed', - 'Number of events, which failed due to error(s)', - registry=registry) + "freshmaker_event_failed", "Number of events, which failed due to error(s)", registry=registry +) freshmaker_event_skipped_counter = Counter( - 'freshmaker_event_skipped', - 'Number of events, for which no action was taken', - registry=registry) + "freshmaker_event_skipped", "Number of events, for which no action was taken", registry=registry +) freshmaker_event_canceled_counter = Counter( - 'freshmaker_event_canceled', - 'Number of events canceled during their handling', - registry=registry) + "freshmaker_event_canceled", + "Number of events canceled during their handling", + registry=registry, +) -freshmaker_build_api_latency = Histogram( - 'build_api_latency', - 'BuildAPI latency', registry=registry) -freshmaker_event_api_latency = Histogram( - 'event_api_latency', - 'EventAPI latency', registry=registry) +freshmaker_build_api_latency = Histogram("build_api_latency", "BuildAPI latency", registry=registry) +freshmaker_event_api_latency = Histogram("event_api_latency", "EventAPI latency", registry=registry) def db_hook_event_listeners(target=None): @@ -134,25 +130,22 @@ def db_hook_event_listeners(target=None): if not target: target = db.engine - @event.listens_for(target, 'engine_connect') + @event.listens_for(target, "engine_connect") def receive_engine_connect(conn, branch): db_engine_connect_counter.inc() - @event.listens_for(target, 'handle_error') + @event.listens_for(target, "handle_error") def receive_handle_error(exception_context): db_handle_error_counter.inc() - @event.listens_for(target, 'rollback') + @event.listens_for(target, "rollback") def receive_rollback(conn): db_transaction_rollback_counter.inc() -monitor_api = Blueprint( - 'monitor', __name__, - url_prefix='/api/1/monitor') +monitor_api = Blueprint("monitor", __name__, url_prefix="/api/1/monitor") -@monitor_api.route('/metrics') +@monitor_api.route("/metrics") def metrics(): - return Response(generate_latest(registry), - content_type=CONTENT_TYPE_LATEST) + return Response(generate_latest(registry), content_type=CONTENT_TYPE_LATEST) diff --git a/freshmaker/odcsclient.py b/freshmaker/odcsclient.py index 3c16ec1a..f89826ae 100644 --- a/freshmaker/odcsclient.py +++ b/freshmaker/odcsclient.py @@ -48,7 +48,6 @@ class RetryingODCS(ODCS): - def _make_request(self, *args, **kwargs): try: return super(RetryingODCS, self)._make_request(*args, **kwargs) @@ -65,21 +64,23 @@ def create_odcs_client(): """ Create instance of ODCS according to configured authentication mechasnim """ - if conf.odcs_auth_mech == 'kerberos': - return RetryingODCS(conf.odcs_server_url, - auth_mech=AuthMech.Kerberos, - verify_ssl=conf.odcs_verify_ssl) - elif conf.odcs_auth_mech == 'openidc': + if conf.odcs_auth_mech == "kerberos": + return RetryingODCS( + conf.odcs_server_url, auth_mech=AuthMech.Kerberos, verify_ssl=conf.odcs_verify_ssl + ) + elif conf.odcs_auth_mech == "openidc": if not conf.odcs_openidc_token: - raise ValueError('Missing OpenIDC token in configuration.') - return RetryingODCS(conf.odcs_server_url, - auth_mech=AuthMech.OpenIDC, - openidc_token=conf.odcs_openidc_token, - verify_ssl=conf.odcs_verify_ssl) + raise ValueError("Missing OpenIDC token in configuration.") + return RetryingODCS( + conf.odcs_server_url, + auth_mech=AuthMech.OpenIDC, + openidc_token=conf.odcs_openidc_token, + verify_ssl=conf.odcs_verify_ssl, + ) else: raise ValueError( - 'Authentication mechanism {0} is not supported yet.'.format( - conf.odcs_auth_mech)) + "Authentication mechanism {0} is not supported yet.".format(conf.odcs_auth_mech) + ) class FreshmakerODCSClient(object): @@ -98,8 +99,8 @@ def __init__(self, handler): self.handler = handler def _fake_odcs_new_compose( - self, compose_source, tag, packages=None, results=None, - builds=None, arches=None): + self, compose_source, tag, packages=None, results=None, builds=None, arches=None + ): """ Fake odcs.new_compose(...) method used in the dry run mode. @@ -110,7 +111,8 @@ def _fake_odcs_new_compose( """ self.handler.log_info( "DRY RUN: Calling fake odcs.new_compose with args: %r", - (compose_source, tag, packages, results, arches)) + (compose_source, tag, packages, results, arches), + ) # In case we run in DRY_RUN mode, we need to initialize # FAKE_COMPOSE_ID to the id of last ODCS compose to give the IDs @@ -120,19 +122,18 @@ def _fake_odcs_new_compose( fake_compose_id = -1 new_compose = { - 'id': fake_compose_id, - 'result_repofile': "http://localhost/{}.repo".format(fake_compose_id), - 'state': COMPOSE_STATES['done'], - 'results': results or ['boot.iso'] + "id": fake_compose_id, + "result_repofile": "http://localhost/{}.repo".format(fake_compose_id), + "state": COMPOSE_STATES["done"], + "results": results or ["boot.iso"], } if builds: - new_compose['builds'] = builds + new_compose["builds"] = builds if arches: - new_compose['arches'] = arches + new_compose["arches"] = arches # Generate and inject the ODCSComposeStateChangeEvent event. - event = ODCSComposeStateChangeEvent( - "fake_compose_msg", new_compose) + event = ODCSComposeStateChangeEvent("fake_compose_msg", new_compose) event.dry_run = True self.handler.log_info("Injecting fake event: %r", event) work_queue_put(event) @@ -146,10 +147,9 @@ def _get_packages_for_compose(self, nvr): :return: list of RPM names built from given build. :rtype: list """ - with koji_service( - conf.koji_profile, log, dry_run=self.handler.dry_run) as session: + with koji_service(conf.koji_profile, log, dry_run=self.handler.dry_run) as session: rpms = session.get_build_rpms(nvr) - return list(set([rpm['name'] for rpm in rpms])) + return list(set([rpm["name"] for rpm in rpms])) def _get_compose_source(self, nvr): """Get tag from which to collect packages to compose @@ -158,47 +158,46 @@ def _get_compose_source(self, nvr): of found tag. :rtype: str """ - with koji_service( - conf.koji_profile, log, dry_run=self.handler.dry_run) as service: + with koji_service(conf.koji_profile, log, dry_run=self.handler.dry_run) as service: # Get the list of *-candidate tags, because packages added into # Errata should be tagged into -candidate tag. tags = service.session.listTags(nvr) - candidate_tags = [tag['name'] for tag in tags - if tag['name'].endswith('-candidate')] + candidate_tags = [tag["name"] for tag in tags if tag["name"].endswith("-candidate")] # Candidate tags may include unsigned packages and ODCS won't # allow generating compose from them, so try to find out final # version of candidate tag (without the "-candidate" suffix). final_tags = [] for candidate_tag in candidate_tags: - final = candidate_tag[:-len("-candidate")] - final_tags += [tag['name'] for tag in tags - if tag['name'] == final] + final = candidate_tag[: -len("-candidate")] + final_tags += [tag["name"] for tag in tags if tag["name"] == final] # Prefer final tags over candidate tags. tags_to_try = final_tags + candidate_tags for tag in tags_to_try: latest_build = service.session.listTagged( - tag, - latest=True, - package=koji.parse_NVR(nvr)['name']) - if latest_build and latest_build[0]['nvr'] == nvr: + tag, latest=True, package=koji.parse_NVR(nvr)["name"] + ) + if latest_build and latest_build[0]["nvr"] == nvr: self.handler.log_info( - "Package %r is latest version in tag %r, " - "will use this tag", nvr, tag) + "Package %r is latest version in tag %r, " "will use this tag", nvr, tag + ) return tag elif not latest_build: self.handler.log_info( - "Could not find package %r in tag %r, " - "skipping this tag", nvr, tag) + "Could not find package %r in tag %r, " "skipping this tag", nvr, tag + ) else: self.handler.log_info( "Package %r is not he latest in the tag %r (" "latest is %r), skipping this tag", - nvr, tag, latest_build[0]['nvr']) + nvr, + tag, + latest_build[0]["nvr"], + ) def get_compose(self, compose_id): - """ Get compose info from ODCS + """Get compose info from ODCS :param int compose_id: id of compose :return: a dict of compose info @@ -211,15 +210,15 @@ def prepare_yum_repos_for_rebuilds(self, db_event): db_composes = [] compose = self.prepare_yum_repo(db_event) - db_composes.append(Compose(odcs_compose_id=compose['id'])) + db_composes.append(Compose(odcs_compose_id=compose["id"])) db.session.add(db_composes[-1]) - repo_urls.append(compose['result_repofile']) + repo_urls.append(compose["result_repofile"]) for dep_event in db_event.find_dependent_events(): compose = self.prepare_yum_repo(dep_event) - db_composes.append(Compose(odcs_compose_id=compose['id'])) + db_composes.append(Compose(odcs_compose_id=compose["id"])) db.session.add(db_composes[-1]) - repo_urls.append(compose['result_repofile']) + repo_urls.append(compose["result_repofile"]) # commit all new composes db.session.commit() @@ -256,32 +255,39 @@ def prepare_yum_repo(self, db_event): if compose_source and compose_source != source: # TODO: Handle this by generating two ODCS composes db_event.builds_transition( - ArtifactBuildState.FAILED.value, "Packages for errata " - "advisory %d found in multiple different tags." - % (errata_id)) + ArtifactBuildState.FAILED.value, + "Packages for errata " + "advisory %d found in multiple different tags." % (errata_id), + ) return else: compose_source = source if compose_source is None: db_event.builds_transition( - ArtifactBuildState.FAILED.value, 'None of builds %s of ' - 'advisory %d is the latest build in its candidate tag.' - % (builds, errata_id)) + ArtifactBuildState.FAILED.value, + "None of builds %s of " + "advisory %d is the latest build in its candidate tag." % (builds, errata_id), + ) return self.handler.log_info( - 'Generating new compose for rebuild: ' - 'source: %s, source type: %s, packages: %s', - compose_source, 'tag', packages) + "Generating new compose for rebuild: " "source: %s, source type: %s, packages: %s", + compose_source, + "tag", + packages, + ) if not self.handler.dry_run: new_compose = create_odcs_client().new_compose( - compose_source, 'tag', packages=packages, - sigkeys=conf.odcs_sigkeys, flags=["no_deps"]) + compose_source, + "tag", + packages=packages, + sigkeys=conf.odcs_sigkeys, + flags=["no_deps"], + ) else: - new_compose = self._fake_odcs_new_compose( - compose_source, 'tag', packages=packages) + new_compose = self._fake_odcs_new_compose(compose_source, "tag", packages=packages) return new_compose @@ -297,17 +303,13 @@ def prepare_pulp_repo(self, build, content_sets): :rtype: dict :return: ODCS compose dictionary. """ - self.handler.log_info( - 'Generating new PULP type compose for content_sets: %r', - content_sets) + self.handler.log_info("Generating new PULP type compose for content_sets: %r", content_sets) odcs = create_odcs_client() if not self.handler.dry_run: - new_compose = odcs.new_compose( - ' '.join(content_sets), 'pulp') + new_compose = odcs.new_compose(" ".join(content_sets), "pulp") else: - new_compose = self._fake_odcs_new_compose( - content_sets, 'pulp') + new_compose = self._fake_odcs_new_compose(content_sets, "pulp") return new_compose @@ -325,27 +327,30 @@ def prepare_odcs_compose_with_image_rpms(self, image): :rtype: dict """ - if not image.get('multi_arch_rpm_manifest'): + if not image.get("multi_arch_rpm_manifest"): self.handler.log_warn('"multi_arch_rpm_manifest" not set in image.') return builds = set() packages = set() - for rpm_manifest in image['multi_arch_rpm_manifest'].values(): + for rpm_manifest in image["multi_arch_rpm_manifest"].values(): # For some reason, the rpm manifest itself is always a wrapped in a list. if not rpm_manifest: continue for rpm in rpm_manifest[0].get("rpms", []): parsed_nvr = kobo.rpmlib.parse_nvra(rpm["srpm_nevra"]) - srpm_nvr = "%s-%s-%s" % (parsed_nvr["name"], parsed_nvr["version"], - parsed_nvr["release"]) + srpm_nvr = "%s-%s-%s" % ( + parsed_nvr["name"], + parsed_nvr["version"], + parsed_nvr["release"], + ) builds.add(srpm_nvr) parsed_nvr = kobo.rpmlib.parse_nvra(rpm["nvra"]) packages.add(parsed_nvr["name"]) if not builds or not packages: - self.handler.log_warn('No builds or packages identified in image') + self.handler.log_warn("No builds or packages identified in image") return # ODCS client expects list and not set for packages/builds/arches, so convert @@ -353,20 +358,25 @@ def prepare_odcs_compose_with_image_rpms(self, image): # in logs, and easy to test. builds = sorted(builds) packages = sorted(packages) - arches = sorted(image['arches'].split()) + arches = sorted(image["arches"].split()) if not self.handler.dry_run: new_compose = create_odcs_client().new_compose( - "", 'build', packages=packages, builds=builds, - arches=arches, sigkeys=conf.odcs_sigkeys, - flags=["no_deps"]) + "", + "build", + packages=packages, + builds=builds, + arches=arches, + sigkeys=conf.odcs_sigkeys, + flags=["no_deps"], + ) else: new_compose = self._fake_odcs_new_compose( - "", 'build', packages=packages, - builds=builds, arches=arches) + "", "build", packages=packages, builds=builds, arches=arches + ) self.handler.log_info( - "Started generating ODCS 'build' type compose %d." % ( - new_compose["id"])) + "Started generating ODCS 'build' type compose %d." % (new_compose["id"]) + ) return new_compose diff --git a/freshmaker/parsers/__init__.py b/freshmaker/parsers/__init__.py index 99adfae8..f7ee0ae8 100644 --- a/freshmaker/parsers/__init__.py +++ b/freshmaker/parsers/__init__.py @@ -28,6 +28,7 @@ class BaseParser(object): """ Abstract parser class parsing fedmsg messages and generating events. """ + __metaclass__ = abc.ABCMeta name = "abstract_parser" topic_suffixes = [] # type: list[str] diff --git a/freshmaker/parsers/brew/task_state_change.py b/freshmaker/parsers/brew/task_state_change.py index 05f63da4..25e8c27a 100644 --- a/freshmaker/parsers/brew/task_state_change.py +++ b/freshmaker/parsers/brew/task_state_change.py @@ -34,24 +34,26 @@ class BrewTaskStateChangeParser(BaseParser): """ name = "BrewTaskStateChangeParser" - topic_suffixes = ["brew.task.closed", 'brew.task.failed'] + topic_suffixes = ["brew.task.closed", "brew.task.failed"] def can_parse(self, topic, msg): return any([topic.endswith(s) for s in self.topic_suffixes]) def parse(self, topic, msg): - msg_id = msg.get('msg_id') - inner_msg = msg.get('msg') - old_state = inner_msg.get('old') - new_state = inner_msg.get('new') - task_info = inner_msg.get('info', {}) - task_id = task_info.get('id') - task_method = task_info.get('method') - - if task_method == 'buildContainer': - request = task_info.get('request') + msg_id = msg.get("msg_id") + inner_msg = msg.get("msg") + old_state = inner_msg.get("old") + new_state = inner_msg.get("new") + task_info = inner_msg.get("info", {}) + task_id = task_info.get("id") + task_method = task_info.get("method") + + if task_method == "buildContainer": + request = task_info.get("request") (git_url, target, opts) = request - branch = opts.get('git_branch', None) + branch = opts.get("git_branch", None) m = re.match(r".*/(?P[^#]*)", git_url) - container = m.group('container') - return BrewContainerTaskStateChangeEvent(msg_id, container, branch, target, task_id, old_state, new_state) + container = m.group("container") + return BrewContainerTaskStateChangeEvent( + msg_id, container, branch, target, task_id, old_state, new_state + ) diff --git a/freshmaker/parsers/internal/freshmaker_manage_request.py b/freshmaker/parsers/internal/freshmaker_manage_request.py index 49d8bf72..e82b3269 100644 --- a/freshmaker/parsers/internal/freshmaker_manage_request.py +++ b/freshmaker/parsers/internal/freshmaker_manage_request.py @@ -39,18 +39,20 @@ def parse(self, topic, msg): Parse message and call specific method according to the action defined within the message. """ - action_from_topic = topic.split('.')[-1] - inner_msg = msg.get('msg') + action_from_topic = topic.split(".")[-1] + inner_msg = msg.get("msg") - if 'action' not in inner_msg: + if "action" not in inner_msg: raise ValueError("Action is not defined within the message.") - if inner_msg['action'] != action_from_topic: - raise ValueError("Last part of 'Freshmaker manage' message topic" - " must match the action defined within the message.") + if inner_msg["action"] != action_from_topic: + raise ValueError( + "Last part of 'Freshmaker manage' message topic" + " must match the action defined within the message." + ) - if 'try' not in inner_msg: - inner_msg['try'] = 0 + if "try" not in inner_msg: + inner_msg["try"] = 0 try: getattr(self, action_from_topic)(inner_msg) @@ -64,8 +66,8 @@ def eventcancel(self, inner_msg): Parse message for event cancelation request """ try: - inner_msg['event_id'] - inner_msg['builds_id'] + inner_msg["event_id"] + inner_msg["builds_id"] except KeyError: raise ValueError("Message doesn't contain all required information.") diff --git a/freshmaker/parsers/internal/manual_rebuild.py b/freshmaker/parsers/internal/manual_rebuild.py index 5ce5c1ca..a23cf0bf 100644 --- a/freshmaker/parsers/internal/manual_rebuild.py +++ b/freshmaker/parsers/internal/manual_rebuild.py @@ -47,16 +47,16 @@ def parse_post_data(self, data): :param dict data: Dict generated from JSON from HTTP POST or parsed from the UMB message sent from Frontend to Backend. """ - msg_id = data.get('msg_id', "manual_rebuild_%s" % (str(time.time()))) - dry_run = data.get('dry_run', False) + msg_id = data.get("msg_id", "manual_rebuild_%s" % (str(time.time()))) + dry_run = data.get("dry_run", False) - errata_id = data.get('errata_id') + errata_id = data.get("errata_id") errata = Errata() advisory = ErrataAdvisory.from_advisory_id(errata, errata_id) if advisory.is_flatpak_module_advisory_ready(): klass = FlatpakApplicationManualBuildEvent - elif advisory.state == "SHIPPED_LIVE" and advisory.reporter.startswith('botas'): + elif advisory.state == "SHIPPED_LIVE" and advisory.reporter.startswith("botas"): klass = ManualBundleRebuildEvent else: klass = ManualRebuildWithAdvisoryEvent @@ -66,11 +66,11 @@ def parse_post_data(self, data): advisory, data.get("container_images", []), requester_metadata_json=data.get("metadata"), - freshmaker_event_id=data.get('freshmaker_event_id'), - requester=data.get('requester'), - dry_run=dry_run + freshmaker_event_id=data.get("freshmaker_event_id"), + requester=data.get("requester"), + dry_run=dry_run, ) def parse(self, topic, msg): - inner_msg = msg.get('msg') + inner_msg = msg.get("msg") return self.parse_post_data(inner_msg) diff --git a/freshmaker/parsers/koji/async_manual_build.py b/freshmaker/parsers/koji/async_manual_build.py index ac6f7338..669f8327 100644 --- a/freshmaker/parsers/koji/async_manual_build.py +++ b/freshmaker/parsers/koji/async_manual_build.py @@ -27,8 +27,8 @@ class FreshmakerAsyncManualbuildParser(BaseParser): """Parser of event async.manual.build""" - name = 'FreshmakerAsyncManualbuildParser' - topic_suffixes = ['freshmaker.async.manual.build'] + name = "FreshmakerAsyncManualbuildParser" + topic_suffixes = ["freshmaker.async.manual.build"] def can_parse(self, topic, msg): return any([topic.endswith(s) for s in self.topic_suffixes]) @@ -42,16 +42,19 @@ def parse_post_data(self, data): :param dict data: Dict generated from JSON from HTTP POST or parsed from the UMB message sent from Frontend to Backend. """ - msg_id = data.get('msg_id', "async_build_%s" % (str(time.time()))) + msg_id = data.get("msg_id", "async_build_%s" % (str(time.time()))) return FreshmakerAsyncManualBuildEvent( - msg_id, data.get('dist_git_branch'), data.get('container_images', []), - freshmaker_event_id=data.get('freshmaker_event_id'), - brew_target=data.get('brew_target'), - dry_run=data.get('dry_run', False), - requester=data.get('requester', None), - requester_metadata_json=data.get("metadata", None)) + msg_id, + data.get("dist_git_branch"), + data.get("container_images", []), + freshmaker_event_id=data.get("freshmaker_event_id"), + brew_target=data.get("brew_target"), + dry_run=data.get("dry_run", False), + requester=data.get("requester", None), + requester_metadata_json=data.get("metadata", None), + ) def parse(self, topic, msg): - inner_msg = msg['msg'] + inner_msg = msg["msg"] return self.parse_post_data(inner_msg) diff --git a/freshmaker/parsers/koji/task_state_change.py b/freshmaker/parsers/koji/task_state_change.py index 1dc18890..26ecc72b 100644 --- a/freshmaker/parsers/koji/task_state_change.py +++ b/freshmaker/parsers/koji/task_state_change.py @@ -29,6 +29,7 @@ class KojiTaskStateChangeParser(BaseParser): Parser parsing task state change message from buildsys (koji), generating KojiTaskStateChanged event. """ + name = "KojiTaskStateChangeParser" topic_suffixes = ["buildsys.task.state.change"] @@ -39,15 +40,14 @@ def can_parse(self, topic, msg): return True def parse(self, topic, msg): - msg_id = msg.get('msg_id') - msg_inner_msg = msg.get('msg') + msg_id = msg.get("msg_id") + msg_inner_msg = msg.get("msg") # If there isn't a msg dict in msg then this message can be skipped if not msg_inner_msg: - log.debug(('Skipping message without any content with the ' - 'topic "{0}"').format(topic)) + log.debug( + ("Skipping message without any content with the " 'topic "{0}"').format(topic) + ) return None - return KojiTaskStateChangeEvent(msg_id, - msg_inner_msg.get('id'), - msg_inner_msg.get('new')) + return KojiTaskStateChangeEvent(msg_id, msg_inner_msg.get("id"), msg_inner_msg.get("new")) diff --git a/freshmaker/parsers/odcs/state_change.py b/freshmaker/parsers/odcs/state_change.py index f260d190..68bb5613 100644 --- a/freshmaker/parsers/odcs/state_change.py +++ b/freshmaker/parsers/odcs/state_change.py @@ -33,8 +33,8 @@ def can_parse(self, topic, msg): return any([topic.endswith(s) for s in self.topic_suffixes]) def parse(self, topic, msg): - msg_id = msg.get('msg_id') - inner_msg = msg.get('msg') - compose = inner_msg.get('compose') + msg_id = msg.get("msg_id") + inner_msg = msg.get("msg") + compose = inner_msg.get("compose") return ODCSComposeStateChangeEvent(msg_id, compose) diff --git a/freshmaker/producer.py b/freshmaker/producer.py index e7801614..905b2662 100644 --- a/freshmaker/producer.py +++ b/freshmaker/producer.py @@ -35,9 +35,11 @@ try: # SQLAlchemy 1.4 from sqlalchemy.exc import StatementError, PendingRollbackError + _sa_disconnect_exceptions = (StatementError, PendingRollbackError) except ImportError: from sqlalchemy.exc import StatementError + _sa_disconnect_exceptions = (StatementError,) # type: ignore @@ -51,17 +53,21 @@ def poll(self): db.session.rollback() log.error("Invalid request, session is rolled back: %s", ex.orig) except Exception: - msg = 'Error in poller execution:' + msg = "Error in poller execution:" log.exception(msg) - log.info('Poller will now sleep for "{}" seconds' - .format(conf.polling_interval)) + log.info('Poller will now sleep for "{}" seconds'.format(conf.polling_interval)) def check_unfinished_koji_tasks(self, session): stale_date = datetime.utcnow() - timedelta(days=7) - db_events = session.query(models.Event).filter( - models.Event.state == EventState.BUILDING.value, - models.Event.time_created >= stale_date).all() + db_events = ( + session.query(models.Event) + .filter( + models.Event.state == EventState.BUILDING.value, + models.Event.time_created >= stale_date, + ) + .all() + ) for db_event in db_events: for build in db_event.builds: @@ -69,14 +75,13 @@ def check_unfinished_koji_tasks(self, session): continue if build.build_id <= 0: continue - with koji_service( - conf.koji_profile, log, login=False) as koji_session: + with koji_service(conf.koji_profile, log, login=False) as koji_session: task = koji_session.get_task_info(build.build_id) task_states = {v: k for k, v in koji.TASK_STATES.items()} new_state = task_states[task["state"]] if new_state not in ["FAILED", "CLOSED"]: continue event = BrewContainerTaskStateChangeEvent( - "fake event", build.name, None, None, build.build_id, - "BUILD", new_state) + "fake event", build.name, None, None, build.build_id, "BUILD", new_state + ) work_queue_put(event) diff --git a/freshmaker/proxy.py b/freshmaker/proxy.py index 753f6a91..8b54efd9 100644 --- a/freshmaker/proxy.py +++ b/freshmaker/proxy.py @@ -32,29 +32,30 @@ class ReverseProxy(object): - '''Wrap the application in this middleware and configure the + """Wrap the application in this middleware and configure the front-end server to add these headers, to let you quietly bind this to a URL other than / and to an HTTP scheme that is different than what is used locally. :param app: the WSGI application - ''' + """ + def __init__(self, app): self.app = app def __call__(self, environ, start_response): - script_name = environ.get('HTTP_X_SCRIPT_NAME', '') + script_name = environ.get("HTTP_X_SCRIPT_NAME", "") if script_name: - environ['SCRIPT_NAME'] = script_name - path_info = environ['PATH_INFO'] + environ["SCRIPT_NAME"] = script_name + path_info = environ["PATH_INFO"] if path_info.startswith(script_name): - environ['PATH_INFO'] = path_info[len(script_name):] + environ["PATH_INFO"] = path_info[len(script_name) :] - server = environ.get('HTTP_X_FORWARDED_HOST', '') + server = environ.get("HTTP_X_FORWARDED_HOST", "") if server: - environ['HTTP_HOST'] = server + environ["HTTP_HOST"] = server - scheme = environ.get('HTTP_X_SCHEME', '') + scheme = environ.get("HTTP_X_SCHEME", "") if scheme: - environ['wsgi.url_scheme'] = scheme + environ["wsgi.url_scheme"] = scheme return self.app(environ, start_response) diff --git a/freshmaker/pyxis.py b/freshmaker/pyxis.py index 244ac205..31bd4cb7 100644 --- a/freshmaker/pyxis.py +++ b/freshmaker/pyxis.py @@ -41,7 +41,7 @@ def trace_id(self): class Pyxis(object): - """ Interface for querying Pyxis""" + """Interface for querying Pyxis""" region = dogpile.cache.make_region().configure(conf.dogpile_cache_backend) @@ -63,8 +63,9 @@ def _make_request(self, entity, params): entity_url = urllib.parse.urljoin(self._api_root, entity) auth_method = HTTPKerberosAuth(mutual_authentication=OPTIONAL) - response = requests.get(entity_url, params=params, auth=auth_method, - timeout=conf.net_timeout) + response = requests.get( + entity_url, params=params, auth=auth_method, timeout=conf.net_timeout + ) if response.ok: return response.json() @@ -115,20 +116,19 @@ def _pagination(self, entity, params): local_params["page"] = page response_data = self._make_request(entity, params=local_params) # When the page after the actual last page is reached, data will be an empty list - if not response_data.get('data'): + if not response_data.get("data"): break - ret.extend(response_data['data']) + ret.extend(response_data["data"]) page += 1 return ret def get_operator_indices(self): - """ Get all index images for organization(s)(configurable) from Pyxis """ + """Get all index images for organization(s)(configurable) from Pyxis""" request_params = {} organizations = conf.pyxis_index_image_organizations if organizations: - rsql = " or ".join( - [f"organization=={organization}" for organization in organizations]) + rsql = " or ".join([f"organization=={organization}" for organization in organizations]) request_params["filter"] = rsql indices = self._pagination("operators/indices", request_params) log.debug("Found the following index images: %s", ", ".join(i["path"] for i in indices)) @@ -140,12 +140,12 @@ def get_operator_indices(self): return indices def get_index_paths(self): - """ Get paths of index images """ + """Get paths of index images""" return [i["path"] for i in self.get_operator_indices() if i.get("path")] @region.cache_on_arguments() def ocp_is_released(self, ocp_version): - """ Check if ocp_version is released by comparing the GA date with current date + """Check if ocp_version is released by comparing the GA date with current date :param str ocp_version: the OpenShift Version :return: True if GA date in Product Pages is in the past, otherwise False @@ -162,7 +162,7 @@ def ocp_is_released(self, ocp_version): return datetime.now() > datetime.strptime(ga_date_str, "%Y-%m-%d") def get_bundles_by_related_image_digest(self, digest, index_paths=None, latest=True): - """ Get bundles which include a related image with the specified digest + """Get bundles which include a related image with the specified digest :param str digest: digest value of related image :param list index_paths: list of index image paths @@ -171,9 +171,15 @@ def get_bundles_by_related_image_digest(self, digest, index_paths=None, latest=T :rtype: list """ related_bundles = [] - include_fields = ['data.channel_name', 'data.version_original', 'data.related_images', - 'data.bundle_path_digest', 'data.bundle_path', 'data.csv_name'] - request_params = {'include': ','.join(include_fields)} + include_fields = [ + "data.channel_name", + "data.version_original", + "data.related_images", + "data.bundle_path_digest", + "data.bundle_path", + "data.csv_name", + ] + request_params = {"include": ",".join(include_fields)} filters = [f"related_images.digest=={digest}"] if latest: @@ -181,9 +187,9 @@ def get_bundles_by_related_image_digest(self, digest, index_paths=None, latest=T if index_paths: index_paths = ",".join(index_paths) filters.append(f"source_index_container_path=in=({index_paths})") - request_params['filter'] = " and ".join(filters) + request_params["filter"] = " and ".join(filters) - bundles = self._pagination('operators/bundles', request_params) + bundles = self._pagination("operators/bundles", request_params) for bundle in bundles: csv_name = bundle["csv_name"] version = bundle["version_original"] @@ -206,15 +212,15 @@ def get_manifest_list_digest_by_nvr(self, nvr, must_be_published=True): :return: digest of image or None if manifest_list_digest not exists :rtype: str or None """ - request_params = {'include': ','.join(['data.brew', 'data.repositories'])} + request_params = {"include": ",".join(["data.brew", "data.repositories"])} # get manifest_list_digest of ContainerImage from Pyxis - for image in self._pagination(f'images/nvr/{nvr}', request_params): - for repo in image['repositories']: - if must_be_published and not repo['published']: + for image in self._pagination(f"images/nvr/{nvr}", request_params): + for repo in image["repositories"]: + if must_be_published and not repo["published"]: continue - if 'manifest_list_digest' in repo: - return repo['manifest_list_digest'] + if "manifest_list_digest" in repo: + return repo["manifest_list_digest"] return None def get_manifest_schema2_digests_by_nvr(self, nvr, must_be_published=True): @@ -227,16 +233,16 @@ def get_manifest_schema2_digests_by_nvr(self, nvr, must_be_published=True): :return: a list of image manifest schema2 digests :rtype: list """ - request_params = {'include': ','.join(['data.brew', 'data.repositories'])} + request_params = {"include": ",".join(["data.brew", "data.repositories"])} digests = set() # Each arch has a manifest schema2 digest, they're different - for image in self._pagination(f'images/nvr/{nvr}', request_params): - for repo in image['repositories']: - if must_be_published and not repo['published']: + for image in self._pagination(f"images/nvr/{nvr}", request_params): + for repo in image["repositories"]: + if must_be_published and not repo["published"]: continue - if 'manifest_schema2_digest' in repo: - digests.add(repo['manifest_schema2_digest']) + if "manifest_schema2_digest" in repo: + digests.add(repo["manifest_schema2_digest"]) return list(digests) def get_bundles_by_digests(self, digests): @@ -249,11 +255,11 @@ def get_bundles_by_digests(self, digests): """ q_filter = " or ".join([f"bundle_path_digest=={digest}" for digest in digests]) params = { - 'include': ','.join(['data.version_original', 'data.csv_name']), - 'filter': q_filter + "include": ",".join(["data.version_original", "data.csv_name"]), + "filter": q_filter, } - return self._pagination('operators/bundles', params) + return self._pagination("operators/bundles", params) def get_bundles_by_nvr(self, nvr): """ @@ -282,13 +288,12 @@ def get_images_by_digest(self, digest): :rtype: list """ q_filter = ( - f"repositories.manifest_list_digest=={digest}" + - " or " + - f"repositories.manifest_schema2_digest=={digest}" + f"repositories.manifest_list_digest=={digest}" + + " or " + + f"repositories.manifest_schema2_digest=={digest}" ) - request_params = {'include': 'data.brew,data.repositories', - 'filter': q_filter} - return self._pagination('images', request_params) + request_params = {"include": "data.brew,data.repositories", "filter": q_filter} + return self._pagination("images", request_params) def get_images_by_nvr(self, nvr, include=None): """ @@ -301,8 +306,8 @@ def get_images_by_nvr(self, nvr, include=None): """ request_params = {"include": "data.architecture,data.brew,data.repositories"} if include: - request_params = {'include': ','.join(include)} - return self._pagination(f'images/nvr/{nvr}', request_params) + request_params = {"include": ",".join(include)} + return self._pagination(f"images/nvr/{nvr}", request_params) def get_auto_rebuild_tags(self, registry, repository): """ @@ -313,9 +318,9 @@ def get_auto_rebuild_tags(self, registry, repository): :rtype: list :return: list of auto rebuild tags """ - params = {'include': 'auto_rebuild_tags'} + params = {"include": "auto_rebuild_tags"} repo = self._get(f"repositories/registry/{registry}/repository/{repository}", params) - return repo.get('auto_rebuild_tags', []) + return repo.get("auto_rebuild_tags", []) def is_bundle(self, nvr): """ @@ -326,7 +331,7 @@ def is_bundle(self, nvr): :rtype: bool """ request_params = {"include": "data.parsed_data.labels"} - images = self._pagination(f'images/nvr/{nvr}', request_params) + images = self._pagination(f"images/nvr/{nvr}", request_params) if not images: return False @@ -336,18 +341,22 @@ def is_bundle(self, nvr): return False def image_is_tagged_auto_rebuild(self, nvr): - include = ["data.repositories.registry", "data.repositories.repository", - "data.repositories.tags.name", "data.repositories.published"] + include = [ + "data.repositories.registry", + "data.repositories.repository", + "data.repositories.tags.name", + "data.repositories.published", + ] images = self.get_images_by_nvr(nvr, include=include) if images: # Only use item 0 for getting necessary metadata as the difference between # different items are the arches info, the metadata we want are the same. image = images[0] - for repo in image['repositories']: - if not repo['published']: + for repo in image["repositories"]: + if not repo["published"]: continue - auto_rebuild_tags = self.get_auto_rebuild_tags(repo['registry'], repo['repository']) - if set(tag['name'] for tag in repo['tags']) & set(auto_rebuild_tags): + auto_rebuild_tags = self.get_auto_rebuild_tags(repo["registry"], repo["repository"]) + if set(tag["name"] for tag in repo["tags"]) & set(auto_rebuild_tags): return True return False @@ -363,4 +372,6 @@ def is_hotfix_image(self, image_nvr): if not image: raise Exception("Image %s was not found in Pyxis", image_nvr) # images for different arches contain the same label names, so just check the first image - return any(label["name"] == "com.redhat.hotfix" for label in image[0]["parsed_data"]["labels"]) + return any( + label["name"] == "com.redhat.hotfix" for label in image[0]["parsed_data"]["labels"] + ) diff --git a/freshmaker/types.py b/freshmaker/types.py index 836d8705..1acabb6e 100644 --- a/freshmaker/types.py +++ b/freshmaker/types.py @@ -24,8 +24,11 @@ freshmaker_artifact_build_done_counter, freshmaker_artifact_build_failed_counter, freshmaker_artifact_build_canceled_counter, - freshmaker_event_complete_counter, freshmaker_event_failed_counter, - freshmaker_event_skipped_counter, freshmaker_event_canceled_counter) + freshmaker_event_complete_counter, + freshmaker_event_failed_counter, + freshmaker_event_skipped_counter, + freshmaker_event_canceled_counter, +) class ArtifactType(Enum): @@ -36,7 +39,6 @@ class ArtifactType(Enum): class ArtifactBuildState(Enum): - def __init__(self, value): self._value_ = value @@ -45,7 +47,7 @@ def __init__(self, value): freshmaker_artifact_build_done_counter, freshmaker_artifact_build_failed_counter, freshmaker_artifact_build_canceled_counter, - None + None, ] if isinstance(value, int): @@ -61,7 +63,6 @@ def __init__(self, value): class EventState(Enum): - def __init__(self, value): self._value_ = value @@ -71,7 +72,7 @@ def __init__(self, value): freshmaker_event_complete_counter, freshmaker_event_failed_counter, freshmaker_event_skipped_counter, - freshmaker_event_canceled_counter + freshmaker_event_canceled_counter, ] if isinstance(value, int): diff --git a/freshmaker/utils.py b/freshmaker/utils.py index 4c2e859a..8b885b3d 100644 --- a/freshmaker/utils.py +++ b/freshmaker/utils.py @@ -53,11 +53,12 @@ def sorted_by_nvr(lst, get_nvr=None, reverse=False): :rtype: list :return: Sorted `lst`. """ + def _compare_items(item1, item2): if get_nvr: nvr1 = get_nvr(item1) nvr2 = get_nvr(item2) - elif hasattr(item1, 'nvr') and hasattr(item2, 'nvr'): + elif hasattr(item1, "nvr") and hasattr(item2, "nvr"): nvr1 = item1.nvr nvr2 = item2.nvr else: @@ -70,8 +71,7 @@ def _compare_items(item1, item2): return _cmp(nvr1_dict["name"], nvr2_dict["name"]) return kobo.rpmlib.compare_nvr(nvr1_dict, nvr2_dict) - return sorted( - lst, key=functools.cmp_to_key(_compare_items), reverse=reverse) + return sorted(lst, key=functools.cmp_to_key(_compare_items), reverse=reverse) def get_url_for(*args, **kwargs): @@ -83,11 +83,13 @@ def get_url_for(*args, **kwargs): # Localhost is right URL only when the scheduler runs on the same # system as the web views. - app.config['SERVER_NAME'] = 'localhost' + app.config["SERVER_NAME"] = "localhost" with app.app_context(): - log.warning("get_url_for() has been called without the Flask " - "app_context. That can lead to SQLAlchemy errors caused by " - "multiple session being used in the same time.") + log.warning( + "get_url_for() has been called without the Flask " + "app_context. That can lead to SQLAlchemy errors caused by " + "multiple session being used in the same time." + ) return url_for(*args, **kwargs) @@ -108,20 +110,19 @@ def get_rebuilt_nvr(artifact_type, nvr): parsed_nvr = koji.parse_NVR(nvr) r_version = parsed_nvr["release"].split(".")[0] release = f"{r_version}.{int(time.time())}{conf.rebuilt_nvr_release_suffix}" - rebuilt_nvr = "%s-%s-%s" % (parsed_nvr["name"], parsed_nvr["version"], - release) + rebuilt_nvr = "%s-%s-%s" % (parsed_nvr["name"], parsed_nvr["version"], release) return rebuilt_nvr def load_class(location): - """ Take a string of the form 'fedmsg.consumers.ircbot:IRCBotConsumer' + """Take a string of the form 'fedmsg.consumers.ircbot:IRCBotConsumer' and return the IRCBotConsumer class. """ try: - mod_name, cls_name = location.strip().split(':') + mod_name, cls_name = location.strip().split(":") except ValueError: - raise ImportError('Invalid import path.') + raise ImportError("Invalid import path.") __import__(mod_name) @@ -136,8 +137,11 @@ def load_classes(import_paths): return [load_class(import_path) for import_path in import_paths] -def retry(timeout=conf.net_timeout, interval=conf.net_retry_interval, wait_on=Exception, logger=None): +def retry( + timeout=conf.net_timeout, interval=conf.net_retry_interval, wait_on=Exception, logger=None +): """A decorator that allows to retry a section of code until success or timeout.""" + def wrapper(function): @functools.wraps(function) def inner(*args, **kwargs): @@ -155,15 +159,25 @@ def inner(*args, **kwargs): ) raise if logger is not None: - logger.warning("Exception %r raised from %r. Retry in %rs", - e, function, interval) + logger.warning( + "Exception %r raised from %r. Retry in %rs", e, function, interval + ) time.sleep(interval) + return inner + return wrapper -def _run_command(command, logger=None, rundir=None, output=subprocess.PIPE, error=subprocess.PIPE, env=None, - log_output=True): +def _run_command( + command, + logger=None, + rundir=None, + output=subprocess.PIPE, + error=subprocess.PIPE, + env=None, + log_output=True, +): """Run a command, return output. Error out if command exit with non-zero code.""" if rundir is None: @@ -172,8 +186,15 @@ def _run_command(command, logger=None, rundir=None, output=subprocess.PIPE, erro if logger: logger.info("Running %s", subprocess.list2cmdline(command)) - p1 = subprocess.Popen(command, cwd=rundir, stdout=output, stderr=error, universal_newlines=True, env=env, - close_fds=True) + p1 = subprocess.Popen( + command, + cwd=rundir, + stdout=output, + stderr=error, + universal_newlines=True, + env=env, + close_fds=True, + ) (out, err) = p1.communicate() if out and logger and log_output: @@ -189,12 +210,12 @@ def _run_command(command, logger=None, rundir=None, output=subprocess.PIPE, erro def is_pkg_modular(nvr): - """ Returns True if the package is modular, False otherwise. """ + """Returns True if the package is modular, False otherwise.""" return "module+" in nvr def get_ocp_release_date(ocp_version): - """ Get the OpenShift version release date via the Product Pages API + """Get the OpenShift version release date via the Product Pages API :param str ocp_version: the OpenShift version :return: None or date in format of "%Y-%m-%d", example: 2021-02-23. @@ -222,11 +243,11 @@ def get_ocp_release_date(ocp_version): if not resp.json(): return None - return resp.json()[0]['date_finish'] + return resp.json()[0]["date_finish"] def is_valid_ocp_versions_range(ocp_versions_range): - """ Check if an ocp_versions_range string is valid + """Check if an ocp_versions_range string is valid :param str ocp_versions_range: the OpenShift versions range value :return: True if the OpenShift versions range is valid, otherwise False @@ -236,17 +257,14 @@ def is_valid_ocp_versions_range(ocp_versions_range): # For historical reasons, there are two special values that are currently allowed # to contain commas, "v4.5,v4.6" and "v4.6,v4.5". valid_commas_ranges = ["v4.5,v4.6", "v4.6,v4.5"] - if ( - "," in ocp_versions_range and - ocp_versions_range.replace(" ", "") not in valid_commas_ranges - ): + if "," in ocp_versions_range and ocp_versions_range.replace(" ", "") not in valid_commas_ranges: return False return True def is_valid_semver(version_string): - """ Check if version string is a valid semantic version + """Check if version string is a valid semantic version :param str version_string: version string :return: True if version string is a valid semantic version, other False diff --git a/freshmaker/views.py b/freshmaker/views.py index 6bb125ad..bc060db3 100644 --- a/freshmaker/views.py +++ b/freshmaker/views.py @@ -43,132 +43,135 @@ from freshmaker.parsers.internal.manual_rebuild import FreshmakerManualRebuildParser from freshmaker.parsers.koji.async_manual_build import FreshmakerAsyncManualbuildParser from freshmaker.monitor import ( - monitor_api, freshmaker_build_api_latency, freshmaker_event_api_latency) + monitor_api, + freshmaker_build_api_latency, + freshmaker_event_api_latency, +) from freshmaker.image_verifier import ImageVerifier from freshmaker.types import ArtifactBuildState, EventState api_v1 = { - 'event_types': { - 'event_types_list': { - 'url': '/api/1/event-types/', - 'options': { - 'defaults': {'id': None}, - 'methods': ['GET'], - } + "event_types": { + "event_types_list": { + "url": "/api/1/event-types/", + "options": { + "defaults": {"id": None}, + "methods": ["GET"], + }, }, - 'event_type': { - 'url': '/api/1/event-types/', - 'options': { - 'methods': ['GET'], - } + "event_type": { + "url": "/api/1/event-types/", + "options": { + "methods": ["GET"], + }, }, }, - 'build_types': { - 'build_types_list': { - 'url': '/api/1/build-types/', - 'options': { - 'defaults': {'id': None}, - 'methods': ['GET'], - } + "build_types": { + "build_types_list": { + "url": "/api/1/build-types/", + "options": { + "defaults": {"id": None}, + "methods": ["GET"], + }, }, - 'build_type': { - 'url': '/api/1/build-types/', - 'options': { - 'methods': ['GET'], - } + "build_type": { + "url": "/api/1/build-types/", + "options": { + "methods": ["GET"], + }, }, }, - 'build_states': { - 'build_states_list': { - 'url': '/api/1/build-states/', - 'options': { - 'defaults': {'id': None}, - 'methods': ['GET'], - } + "build_states": { + "build_states_list": { + "url": "/api/1/build-states/", + "options": { + "defaults": {"id": None}, + "methods": ["GET"], + }, }, - 'build_state': { - 'url': '/api/1/build-states/', - 'options': { - 'methods': ['GET'], - } + "build_state": { + "url": "/api/1/build-states/", + "options": { + "methods": ["GET"], + }, }, }, - 'events': { - 'events_list': { - 'url': '/api/1/events/', - 'options': { - 'defaults': {'id': None}, - 'methods': ['GET'], - } + "events": { + "events_list": { + "url": "/api/1/events/", + "options": { + "defaults": {"id": None}, + "methods": ["GET"], + }, }, - 'event': { - 'url': '/api/1/events/', - 'options': { - 'methods': ['GET', 'PATCH'], - } + "event": { + "url": "/api/1/events/", + "options": { + "methods": ["GET", "PATCH"], + }, }, }, - 'builds': { - 'builds_list': { - 'url': '/api/1/builds/', - 'options': { - 'defaults': {'id': None}, - 'methods': ['GET'], - } + "builds": { + "builds_list": { + "url": "/api/1/builds/", + "options": { + "defaults": {"id": None}, + "methods": ["GET"], + }, }, - 'build': { - 'url': '/api/1/builds/', - 'options': { - 'methods': ['GET'], - } + "build": { + "url": "/api/1/builds/", + "options": { + "methods": ["GET"], + }, }, - 'manual_trigger': { - 'url': '/api/1/builds/', - 'options': { - 'methods': ['POST'], - } + "manual_trigger": { + "url": "/api/1/builds/", + "options": { + "methods": ["POST"], + }, }, }, - 'async_builds': { - 'async_build': { - 'url': '/api/1/async-builds/', - 'options': { - 'methods': ['POST'], - } + "async_builds": { + "async_build": { + "url": "/api/1/async-builds/", + "options": { + "methods": ["POST"], + }, }, }, - 'about': { - 'about': { - 'url': '/api/1/about/', - 'options': { - 'methods': ['GET'], - } + "about": { + "about": { + "url": "/api/1/about/", + "options": { + "methods": ["GET"], + }, }, }, - 'verify_image': { - 'verify_image': { - 'url': '/api/1/verify-image/', - 'options': { - 'methods': ['GET'], - } + "verify_image": { + "verify_image": { + "url": "/api/1/verify-image/", + "options": { + "methods": ["GET"], + }, }, }, - 'verify_image_repository': { - 'verify_image_repository': { - 'url': '/api/1/verify-image-repository//', - 'options': { - 'methods': ['GET'], - } + "verify_image_repository": { + "verify_image_repository": { + "url": "/api/1/verify-image-repository//", + "options": { + "methods": ["GET"], + }, }, }, - 'pullspec_overrides': { - 'pullspec_overrides': { - 'url': '/api/1/pullspec_overrides/', - 'options': { - 'methods': ['GET'], - } + "pullspec_overrides": { + "pullspec_overrides": { + "url": "/api/1/pullspec_overrides/", + "options": { + "methods": ["GET"], + }, }, - } + }, } @@ -176,13 +179,13 @@ class EventTypeAPI(MethodView): def get(self, id): event_types = [] for cls, val in models.EVENT_TYPES.items(): - event_types.append({'name': cls.__name__, 'id': val}) + event_types.append({"name": cls.__name__, "id": val}) if id is None: - return jsonify({'items': event_types}), 200 + return jsonify({"items": event_types}), 200 else: - event_type = [x for x in event_types if x['id'] == id] + event_type = [x for x in event_types if x["id"] == id] if event_type: return jsonify(event_type.pop()), 200 @@ -194,13 +197,13 @@ class BuildTypeAPI(MethodView): def get(self, id): build_types = [] for x in list(types.ArtifactType): - build_types.append({'name': x.name, 'id': x.value}) + build_types.append({"name": x.name, "id": x.value}) if id is None: - return jsonify({'items': build_types}), 200 + return jsonify({"items": build_types}), 200 else: - build_type = [x for x in build_types if x['id'] == id] + build_type = [x for x in build_types if x["id"] == id] if build_type: return jsonify(build_type.pop()), 200 @@ -212,13 +215,13 @@ class BuildStateAPI(MethodView): def get(self, id): build_states = [] for x in list(types.ArtifactBuildState): - build_states.append({'name': x.name, 'id': x.value}) + build_states.append({"name": x.name, "id": x.value}) if id is None: - return jsonify({'items': build_states}), 200 + return jsonify({"items": build_states}), 200 else: - build_state = [x for x in build_states if x['id'] == id] + build_state = [x for x in build_states if x["id"] == id] if build_state: return jsonify(build_state.pop()), 200 @@ -227,12 +230,11 @@ def get(self, id): class EventAPI(MethodView): - - _freshmaker_manage_prefix = 'event' + _freshmaker_manage_prefix = "event" @freshmaker_event_api_latency.time() def get(self, id): - """ Returns Freshmaker Events. + """Returns Freshmaker Events. If ``id`` is set, only the Freshmaker Event defined by that ID is returned. @@ -263,24 +265,22 @@ def get(self, id): # be displayed in order to increase api speed # For API v1, this is true by default to not break the backward compatibility # For API v2, this is false by default - value = request.args.getlist('show_full_json') + value = request.args.getlist("show_full_json") show_full_json = request.base_url.find("/api/1/") != -1 - if len(value) == 1 and value[0] == 'False': + if len(value) == 1 and value[0] == "False": show_full_json = False - elif len(value) == 1 and value[0] == 'True': + elif len(value) == 1 and value[0] == "True": show_full_json = True if id is None: p_query = filter_events(request) - json_data = { - 'meta': pagination_metadata(p_query, request.args) - } + json_data = {"meta": pagination_metadata(p_query, request.args)} if not show_full_json: - json_data['items'] = [item.json_min() for item in p_query.items] + json_data["items"] = [item.json_min() for item in p_query.items] else: - json_data['items'] = [item.json() for item in p_query.items] + json_data["items"] = [item.json() for item in p_query.items] return jsonify(json_data), 200 @@ -294,7 +294,7 @@ def get(self, id): return json_error(404, "Not Found", "No such event found.") @login_required - @requires_roles(['admin', 'manual_rebuilder']) + @requires_roles(["admin", "manual_rebuilder"]) def patch(self, id): """ Manage Freshmaker event defined by ID. The request must be @@ -320,10 +320,12 @@ def patch(self, id): :statuscode 400: Action is missing or is unsupported. """ data = request.get_json(force=True) - if 'action' not in data: + if "action" not in data: return json_error( - 400, "Bad Request", "Missing action in request." - " Don't know what to do with the event.") + 400, + "Bad Request", + "Missing action in request." " Don't know what to do with the event.", + ) if data["action"] != "cancel": return json_error(400, "Bad Request", "Unsupported action requested.") @@ -335,7 +337,8 @@ def patch(self, id): username = g.user.username if conf.auth_backend != "noauth" else None if username and event.requester != g.user.username and not user_has_role("admin"): return json_error( - 403, "Forbidden", "You must be an admin to cancel someone else's event.") + 403, "Forbidden", "You must be an admin to cancel someone else's event." + ) msg = "Event id %s requested for canceling by user %s" % (event.id, username) log.info(msg) @@ -344,10 +347,13 @@ def patch(self, id): event.builds_transition( ArtifactBuildState.CANCELED.value, "Build canceled before running on external build system.", - filters={'state': ArtifactBuildState.PLANNED.value}) + filters={"state": ArtifactBuildState.PLANNED.value}, + ) builds_id = event.builds_transition( - ArtifactBuildState.CANCELED.value, None, - filters={'state': ArtifactBuildState.BUILD.value}) + ArtifactBuildState.CANCELED.value, + None, + filters={"state": ArtifactBuildState.BUILD.value}, + ) db.session.commit() data["action"] = self._freshmaker_manage_prefix + data["action"] @@ -368,47 +374,51 @@ def _validate_rebuild_request(request): """ data = request.get_json(force=True) - for key in ('errata_id', 'freshmaker_event_id'): + for key in ("errata_id", "freshmaker_event_id"): if data.get(key) and not isinstance(data[key], int): - return json_error(400, 'Bad Request', f'"{key}" must be an integer.') + return json_error(400, "Bad Request", f'"{key}" must be an integer.') - if data.get('freshmaker_event_id'): - event = models.Event.get_by_event_id(db.session, data.get('freshmaker_event_id')) + if data.get("freshmaker_event_id"): + event = models.Event.get_by_event_id(db.session, data.get("freshmaker_event_id")) if not event: return json_error( - 400, 'Bad Request', 'The provided "freshmaker_event_id" is invalid.', + 400, + "Bad Request", + 'The provided "freshmaker_event_id" is invalid.', ) - for key in ('dist_git_branch', 'brew_target'): + for key in ("dist_git_branch", "brew_target"): if data.get(key) and not isinstance(data[key], str): - return json_error(400, 'Bad Request', f'"{key}" must be a string.') + return json_error(400, "Bad Request", f'"{key}" must be a string.') - container_images = data.get('container_images', []) - if ( - not isinstance(container_images, list) or - any(not isinstance(image, str) for image in container_images) + container_images = data.get("container_images", []) + if not isinstance(container_images, list) or any( + not isinstance(image, str) for image in container_images ): return json_error( - 400, 'Bad Request', '"container_images" must be an array of strings.', + 400, + "Bad Request", + '"container_images" must be an array of strings.', ) - if not isinstance(data.get('dry_run', False), bool): - return json_error(400, 'Bad Request', '"dry_run" must be a boolean.') + if not isinstance(data.get("dry_run", False), bool): + return json_error(400, "Bad Request", '"dry_run" must be a boolean.') - if not isinstance(data.get('force', False), bool): - return json_error(400, 'Bad Request', '"force" must be a boolean.') + if not isinstance(data.get("force", False), bool): + return json_error(400, "Bad Request", '"force" must be a boolean.') - if data.get('bundle_related_image_overrides', False) and not container_images: + if data.get("bundle_related_image_overrides", False) and not container_images: return json_error( 400, - 'Bad Request', - 'Manual image overriding allowed only when "container_images" is set explicitly' + "Bad Request", + 'Manual image overriding allowed only when "container_images" is set explicitly', ) - if (data.get('bundle_related_image_overrides', False) and - not isinstance(data.get('bundle_related_image_overrides'), dict)): + if data.get("bundle_related_image_overrides", False) and not isinstance( + data.get("bundle_related_image_overrides"), dict + ): return json_error( - 400, 'Bad Request', '"bundle_related_image_overrides" must be a dictionary' + 400, "Bad Request", '"bundle_related_image_overrides" must be a dictionary' ) return None @@ -433,17 +443,21 @@ def _create_rebuild_event_from_request(db_session, parser, request): if conf.auth_backend != "noauth": db_event.requester = g.user.username db_event.requested_rebuilds = " ".join(event.container_images) - if hasattr(event, 'requester_metadata_json') and event.requester_metadata_json: + if hasattr(event, "requester_metadata_json") and event.requester_metadata_json: db_event.requester_metadata = json.dumps(event.requester_metadata_json) - if data.get('freshmaker_event_id'): + if data.get("freshmaker_event_id"): dependent_event = models.Event.get_by_event_id( - db_session, data.get('freshmaker_event_id'), + db_session, + data.get("freshmaker_event_id"), ) if dependent_event: dependency = db_event.add_event_dependency(db_session, dependent_event) if not dependency: - log.warn('Dependency between {} and {} could not be added!'.format( - event.freshmaker_event_id, dependent_event.id)) + log.warn( + "Dependency between {} and {} could not be added!".format( + event.freshmaker_event_id, dependent_event.id + ) + ) db_session.commit() return db_event @@ -454,10 +468,8 @@ def get(self, id): if id is None: p_query = filter_artifact_builds(request) - json_data = { - 'meta': pagination_metadata(p_query, request.args) - } - json_data['items'] = [item.json() for item in p_query.items] + json_data = {"meta": pagination_metadata(p_query, request.args)} + json_data["items"] = [item.json() for item in p_query.items] return jsonify(json_data), 200 @@ -469,8 +481,8 @@ def get(self, id): return json_error(404, "Not Found", "No such build found.") @login_required - @require_scopes('submit-build') - @requires_roles(['admin', 'manual_rebuilder']) + @require_scopes("submit-build") + @requires_roles(["admin", "manual_rebuilder"]) def post(self): """ Trigger manual Freshmaker rebuild. The request must be @@ -512,24 +524,23 @@ def post(self): return error data = request.get_json(force=True) - if not data.get('errata_id') and not data.get('freshmaker_event_id'): + if not data.get("errata_id") and not data.get("freshmaker_event_id"): return json_error( 400, - 'Bad Request', - 'You must at least provide "errata_id" or "freshmaker_event_id" in the request.' + "Bad Request", + 'You must at least provide "errata_id" or "freshmaker_event_id" in the request.', ) dependent_event = None - if data.get('freshmaker_event_id'): - dependent_event_id = data.get('freshmaker_event_id') - dependent_event = models.Event.get_by_event_id( - db.session, dependent_event_id) + if data.get("freshmaker_event_id"): + dependent_event_id = data.get("freshmaker_event_id") + dependent_event = models.Event.get_by_event_id(db.session, dependent_event_id) if dependent_event is None: return json_error( 400, - 'Bad Request', - f'There is no event with id {dependent_event_id}', + "Bad Request", + f"There is no event with id {dependent_event_id}", ) # requesting a CVE rebuild, the event can not be an async build event which @@ -537,32 +548,41 @@ def post(self): async_build_event_type = models.EVENT_TYPES[events.FreshmakerAsyncManualBuildEvent] if dependent_event.event_type_id == async_build_event_type: return json_error( - 400, 'Bad Request', f'The event (id={dependent_event_id}) is an async build' - ' event, can not be used for this build.') + 400, + "Bad Request", + f"The event (id={dependent_event_id}) is an async build" + " event, can not be used for this build.", + ) - if not data.get('errata_id'): - data['errata_id'] = int(dependent_event.search_key) - elif int(dependent_event.search_key) != data['errata_id']: + if not data.get("errata_id"): + data["errata_id"] = int(dependent_event.search_key) + elif int(dependent_event.search_key) != data["errata_id"]: return json_error( 400, - 'Bad Request', + "Bad Request", 'The provided "errata_id" doesn\'t match the Advisory ID associated with the ' 'input "freshmaker_event_id".', ) if data.get("errata_id") or data.get("freshmaker_event_id"): running_events = ( - models.Event.query.filter(models.Event.search_key == str(data['errata_id'])).filter( + models.Event.query.filter(models.Event.search_key == str(data["errata_id"])) + .filter( models.Event.state.in_( - [EventState.INITIALIZED.value, EventState.BUILDING.value])).all()) + [EventState.INITIALIZED.value, EventState.BUILDING.value] + ) + ) + .all() + ) - if running_events and not data.get('force', False): + if running_events and not data.get("force", False): event_ids = [e.id for e in running_events] return json_error( 400, - 'Bad Request', + "Bad Request", f'Events triggered by advisory {data["errata_id"]} are running: {event_ids}. ' - f'If you want to rebuild it anyway, use "force": true option.') + f'If you want to rebuild it anyway, use "force": true option.', + ) # Use the shared code to parse the POST data and generate right # event based on the data. @@ -585,8 +605,8 @@ def post(self): class AsyncBuildAPI(MethodView): @login_required - @require_scopes('submit-build') - @requires_roles(['admin', 'freshmaker_async_rebuilders']) + @require_scopes("submit-build") + @requires_roles(["admin", "freshmaker_async_rebuilders"]) def post(self): """ Trigger Freshmaker async rebuild (a.k.a non-CVE rebuild). The request @@ -635,31 +655,34 @@ def post(self): return error data = request.get_json(force=True) - if not all([data.get('dist_git_branch'), data.get('container_images')]): + if not all([data.get("dist_git_branch"), data.get("container_images")]): return json_error( 400, - 'Bad Request', + "Bad Request", '"dist_git_branch" and "container_images" are required in the request ' - 'for async builds', + "for async builds", ) dependent_event = None - if data.get('freshmaker_event_id'): + if data.get("freshmaker_event_id"): dependent_event = models.Event.get_by_event_id( - db.session, data.get('freshmaker_event_id'), + db.session, + data.get("freshmaker_event_id"), ) async_build_event_type = models.EVENT_TYPES[events.FreshmakerAsyncManualBuildEvent] if dependent_event.event_type_id != async_build_event_type: return json_error( - 400, 'Bad Request', 'The event (id={}) is not an async build ' - 'event.'.format(data.get('freshmaker_event_id')), + 400, + "Bad Request", + "The event (id={}) is not an async build " + "event.".format(data.get("freshmaker_event_id")), ) # The '-container' string is optional, the user might have omitted it. But we need it to be # there for our query. Let's check if it's there, and if it's not, let's add it. - for i, image in enumerate(data.get('container_images', [])): - if not image.endswith('-container'): - data.get('container_images')[i] = f"{image}-container" + for i, image in enumerate(data.get("container_images", [])): + if not image.endswith("-container"): + data.get("container_images")[i] = f"{image}-container" # parse the POST data and generate FreshmakerAsyncManualBuildEvent parser = FreshmakerAsyncManualbuildParser() @@ -681,14 +704,13 @@ def post(self): class AboutAPI(MethodView): def get(self): - json = {'version': version} - config_items = ['auth_backend'] + json = {"version": version} + config_items = ["auth_backend"] for item in config_items: config_item = getattr(conf, item) # All config items have a default, so if doesn't exist it is an error if not config_item: - raise ValueError( - 'An invalid config item of "{0}" was specified'.format(item)) + raise ValueError('An invalid config item of "{0}" was specified'.format(item)) json[item] = config_item return jsonify(json), 200 @@ -730,8 +752,8 @@ def get(self, image): images = verifier.verify_image(image) ret = { "msg": "Found %d images which are handled by Freshmaker for " - "defined content_sets." % len(images), - "images": images + "defined content_sets." % len(images), + "images": images, } return jsonify(ret), 200 @@ -785,9 +807,9 @@ def get(self, project, repo): data = verifier.verify_repository("%s/%s" % (project, repo)) ret = { "msg": "Found %d images which are handled by Freshmaker for " - "defined content_sets." % len(data["images"]), + "defined content_sets." % len(data["images"]), "images": data["images"], - "repository": data["repository"] + "repository": data["repository"], } return jsonify(ret), 200 @@ -804,43 +826,42 @@ def get(self, id): API_V1_MAPPING = { - 'events': EventAPI, - 'builds': BuildAPI, - 'async_builds': AsyncBuildAPI, - 'event_types': EventTypeAPI, - 'build_types': BuildTypeAPI, - 'build_states': BuildStateAPI, - 'about': AboutAPI, - 'verify_image': VerifyImageAPI, - 'verify_image_repository': VerifyImageRepositoryAPI, - 'pullspec_overrides': PullspecOverrideAPI, + "events": EventAPI, + "builds": BuildAPI, + "async_builds": AsyncBuildAPI, + "event_types": EventTypeAPI, + "build_types": BuildTypeAPI, + "build_states": BuildStateAPI, + "about": AboutAPI, + "verify_image": VerifyImageAPI, + "verify_image_repository": VerifyImageRepositoryAPI, + "pullspec_overrides": PullspecOverrideAPI, } def register_api_v1(): - """ Registers version 1 of Freshmaker API. """ + """Registers version 1 of Freshmaker API.""" for k, v in API_V1_MAPPING.items(): view = v.as_view(k) for key, val in api_v1.get(k, {}).items(): - app.add_url_rule(val['url'], - endpoint=key, - view_func=view, - **val['options']) + app.add_url_rule(val["url"], endpoint=key, view_func=view, **val["options"]) app.register_blueprint(monitor_api, name="monitor_api_v1") def register_api_v2(): - """ Registers version 2 of Freshmaker API. """ + """Registers version 2 of Freshmaker API.""" # The API v2 has the same URL schema as v1, only semantic is different. for k, v in API_V1_MAPPING.items(): view = v.as_view(k + "_v2") for key, val in api_v1.get(k, {}).items(): - app.add_url_rule(val['url'].replace("/api/1/", "/api/2/"), - endpoint=key + "_v2", - view_func=view, - **val['options']) + app.add_url_rule( + val["url"].replace("/api/1/", "/api/2/"), + endpoint=key + "_v2", + view_func=view, + **val["options"], + ) app.register_blueprint(monitor_api, name="monitor_api_v2") diff --git a/setup.py b/setup.py index e6a691ab..9429ec3a 100644 --- a/setup.py +++ b/setup.py @@ -8,11 +8,11 @@ def read_requirements(filename): specifiers = [] dep_links = [] - with open(filename, 'r') as f: + with open(filename, "r") as f: for line in f: - if line.startswith('-r') or line.strip() == '': + if line.startswith("-r") or line.strip() == "": continue - if line.startswith('git+'): + if line.startswith("git+"): dep_links.append(line.strip()) else: specifiers.append(line.strip()) @@ -21,47 +21,51 @@ def read_requirements(filename): setup_py_path = os.path.dirname(os.path.realpath(__file__)) -requirements_file = os.path.join(setup_py_path, 'requirements.txt') -test_requirements_file = os.path.join(setup_py_path, 'test-requirements.txt') +requirements_file = os.path.join(setup_py_path, "requirements.txt") +test_requirements_file = os.path.join(setup_py_path, "test-requirements.txt") install_requires, deps_links = read_requirements(requirements_file) tests_require, _ = read_requirements(test_requirements_file) if _: deps_links.extend(_) -setup(name='freshmaker', - description='Continuous Compose Service', - version='0.1.2', - classifiers=[ - "Programming Language :: Python", - "Topic :: Software Development :: Build Tools" - ], - keywords='freshmaker continuous compose service modularity fedora', - author='The Factory 2.0 Team', - # TODO: Not sure which name would be used for mail alias, - # but let's set this proactively to the new name. - author_email='freshmaker-owner@github.com', - url='https://github.com/redhat-exd-rebuilds/freshmaker', - license='MIT', - packages=find_packages(exclude=['tests', 'tests.*']), - include_package_data=True, - zip_safe=False, - install_requires=install_requires, - tests_require=tests_require, - dependency_links=deps_links, - entry_points={ - 'moksha.consumer': 'freshmakerconsumer = freshmaker.consumer:FreshmakerConsumer', - 'moksha.producer': 'freshmakerproducer = freshmaker.producer:FreshmakerProducer', - 'console_scripts': [ - 'freshmaker-frontend = freshmaker.manage:runssl', - 'freshmaker-gencert = freshmaker.manage:generatelocalhostcert', - 'freshmaker-manager = freshmaker.manage:cli', - 'freshmaker-upgradedb = freshmaker.manage:upgradedb', - ], - }, - data_files=[('/etc/freshmaker/', ['conf/config.py']), - ('/etc/fedmsg.d/', ['fedmsg.d/freshmaker-logging.py', - 'fedmsg.d/freshmaker-scheduler.py', - 'fedmsg.d/freshmaker.py']), - ], - ) +setup( + name="freshmaker", + description="Continuous Compose Service", + version="0.1.2", + classifiers=["Programming Language :: Python", "Topic :: Software Development :: Build Tools"], + keywords="freshmaker continuous compose service modularity fedora", + author="The Factory 2.0 Team", + # TODO: Not sure which name would be used for mail alias, + # but let's set this proactively to the new name. + author_email="freshmaker-owner@github.com", + url="https://github.com/redhat-exd-rebuilds/freshmaker", + license="MIT", + packages=find_packages(exclude=["tests", "tests.*"]), + include_package_data=True, + zip_safe=False, + install_requires=install_requires, + tests_require=tests_require, + dependency_links=deps_links, + entry_points={ + "moksha.consumer": "freshmakerconsumer = freshmaker.consumer:FreshmakerConsumer", + "moksha.producer": "freshmakerproducer = freshmaker.producer:FreshmakerProducer", + "console_scripts": [ + "freshmaker-frontend = freshmaker.manage:runssl", + "freshmaker-gencert = freshmaker.manage:generatelocalhostcert", + "freshmaker-manager = freshmaker.manage:cli", + "freshmaker-upgradedb = freshmaker.manage:upgradedb", + ], + }, + data_files=[ + ("/etc/freshmaker/", ["conf/config.py"]), + ( + "/etc/fedmsg.d/", + [ + "fedmsg.d/freshmaker-logging.py", + "fedmsg.d/freshmaker-scheduler.py", + "fedmsg.d/freshmaker.py", + ], + ), + ], +) diff --git a/tests/__init__.py b/tests/__init__.py index 611728e4..ed341181 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -30,8 +30,8 @@ def get_fedmsg(name): this_path = path.abspath(path.dirname(__file__)) fedmsg_path = path.join(this_path, "fedmsgs", name) - with open(fedmsg_path, 'r') as f: - return {'body': json.load(f)} + with open(fedmsg_path, "r") as f: + return {"body": json.load(f)} # There is no Flask app-context in the tests and we need some, diff --git a/tests/conftest.py b/tests/conftest.py index 660a0ba1..92d27096 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -36,7 +36,7 @@ def clear_flask_g(): Many of the tests end up modifying flask.g such as for testing or mocking authentication. If it isn't cleared, it would end up leaking into other tests which don't expect it. """ - for attr in ('group', 'user'): + for attr in ("group", "user"): if hasattr(flask.g, attr): delattr(flask.g, attr) @@ -53,8 +53,7 @@ def pyxis_graphql_schema(): schema = build_ast_schema(document) with mock.patch( - "freshmaker.pyxis_gql.PyxisGQL.dsl_schema", - new_callable=mock.PropertyMock + "freshmaker.pyxis_gql.PyxisGQL.dsl_schema", new_callable=mock.PropertyMock ) as dsl_schema: dsl_schema.return_value = DSLSchema(schema) yield dsl_schema diff --git a/tests/handlers/botas/test_botas_shipped_advisory.py b/tests/handlers/botas/test_botas_shipped_advisory.py index 9d910ab1..0b2e4dbb 100644 --- a/tests/handlers/botas/test_botas_shipped_advisory.py +++ b/tests/handlers/botas/test_botas_shipped_advisory.py @@ -40,43 +40,39 @@ class TestBotasShippedAdvisory(helpers.ModelsTestCase): - def setUp(self): super(TestBotasShippedAdvisory, self).setUp() # Each time when recording a build into database, freshmaker has to # request a pulp repo from ODCS. This is not necessary for running # tests. - self.patcher = helpers.Patcher( - 'freshmaker.handlers.botas.botas_shipped_advisory.') + self.patcher = helpers.Patcher("freshmaker.handlers.botas.botas_shipped_advisory.") self.pyxis = self.patcher.patch("Pyxis") - self.get_blocking_advisories = \ - self.patcher.patch("freshmaker.errata.Errata.get_blocking_advisories_builds", - return_value=set()) + self.get_blocking_advisories = self.patcher.patch( + "freshmaker.errata.Errata.get_blocking_advisories_builds", return_value=set() + ) # We do not want to send messages to message bus while running tests - self.mock_messaging_publish = self.patcher.patch( - 'freshmaker.messaging.publish') + self.mock_messaging_publish = self.patcher.patch("freshmaker.messaging.publish") self.handler = HandleBotasAdvisory() - self.botas_advisory = ErrataAdvisory( - 123, "RHBA-2020", "SHIPPED_LIVE", ['docker']) + self.botas_advisory = ErrataAdvisory(123, "RHBA-2020", "SHIPPED_LIVE", ["docker"]) self.botas_advisory._reporter = "botas/pnt-devops-jenkins@REDHAT.COM" def tearDown(self): super(TestBotasShippedAdvisory, self).tearDown() self.patcher.unpatch_all() - @patch.object(conf, 'pyxis_server_url', new='test_url') + @patch.object(conf, "pyxis_server_url", new="test_url") def test_init(self): handler1 = HandleBotasAdvisory(self.pyxis) self.assertEqual(handler1._pyxis, self.pyxis) HandleBotasAdvisory() - self.pyxis.assert_called_with('test_url') + self.pyxis.assert_called_with("test_url") - @patch.object(conf, 'pyxis_server_url', new='') + @patch.object(conf, "pyxis_server_url", new="") def test_init_no_pyxis_server(self): with self.assertRaises(ValueError, msg="'pyxis_server_url' parameter should be set"): HandleBotasAdvisory() @@ -92,8 +88,7 @@ def test_can_handle_manual_rebuilds(self): self.assertTrue(handler.can_handle(event)) def test_handle_set_dry_run(self): - event = BotasErrataShippedEvent("test_msg_id", self.botas_advisory, - dry_run=True) + event = BotasErrataShippedEvent("test_msg_id", self.botas_advisory, dry_run=True) self.handler.handle(event) self.assertTrue(self.handler._force_dry_run) @@ -103,52 +98,74 @@ def test_handle_isnt_allowed_by_internal_policy(self): event = BotasErrataShippedEvent("test_msg_id", self.botas_advisory) self.handler.handle(event) - db_event = Event.get(db.session, message_id='test_msg_id') + db_event = Event.get(db.session, message_id="test_msg_id") self.assertEqual(db_event.state, EventState.SKIPPED.value) - self.assertTrue(db_event.state_reason.startswith( - "This event is not allowed by internal policy")) + self.assertTrue( + db_event.state_reason.startswith("This event is not allowed by internal policy") + ) def test_handle_manual_isnt_allowed_by_internal_policy(self): event = ManualBundleRebuildEvent("test_msg_id1", self.botas_advisory, []) self.handler.handle(event) - db_event = Event.get(db.session, message_id='test_msg_id1') + db_event = Event.get(db.session, message_id="test_msg_id1") self.assertEqual(db_event.state, EventState.SKIPPED.value) - self.assertTrue(db_event.state_reason.startswith( - "This event is not allowed by internal policy")) + self.assertTrue( + db_event.state_reason.startswith("This event is not allowed by internal policy") + ) - @patch("freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory.start_to_build_images") + @patch( + "freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory.start_to_build_images" + ) @patch("freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory._prepare_builds") - @patch("freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory._get_bundles_to_rebuild") + @patch( + "freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory._get_bundles_to_rebuild" + ) @patch("freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory.allow_build") - def test_handle(self, allow_build, get_bundles_to_rebuild, prepare_builds, - start_to_build_images): + def test_handle( + self, allow_build, get_bundles_to_rebuild, prepare_builds, start_to_build_images + ): event = BotasErrataShippedEvent("test_msg_id", self.botas_advisory) db_event = Event.get_or_create_from_event(db.session, event) allow_build.return_value = True get_bundles_to_rebuild.return_value = ([{"bundle": 1}, {"bundle": 2}], None) prepare_builds.return_value = [ - ArtifactBuild.create(db.session, db_event, "ed0", "image", 1234, - original_nvr="some_name-2-12345", - rebuilt_nvr="some_name-2-12346"), - ArtifactBuild.create(db.session, db_event, "ed0", "image", 12345, - original_nvr="some_name_2-2-2", - rebuilt_nvr="some_name_2-2-210") + ArtifactBuild.create( + db.session, + db_event, + "ed0", + "image", + 1234, + original_nvr="some_name-2-12345", + rebuilt_nvr="some_name-2-12346", + ), + ArtifactBuild.create( + db.session, + db_event, + "ed0", + "image", + 12345, + original_nvr="some_name_2-2-2", + rebuilt_nvr="some_name_2-2-210", + ), ] self.handler.handle(event) self.handler._prepare_builds.assert_called_once() - self.assertEqual(self.handler._prepare_builds.call_args[0][0], - [{"bundle": 1}, {"bundle": 2}]) + self.assertEqual( + self.handler._prepare_builds.call_args[0][0], [{"bundle": 1}, {"bundle": 2}] + ) - @patch.object(conf, "handler_build_allowlist", new={ - "HandleBotasAdvisory": {"image": {"advisory_name": "RHBA-2020"}} - }) + @patch.object( + conf, + "handler_build_allowlist", + new={"HandleBotasAdvisory": {"image": {"advisory_name": "RHBA-2020"}}}, + ) def test_handle_bundle_rebuild_auto(self): - """ Test handling of AUTOMATICALLY triggered bundle rebuild""" + """Test handling of AUTOMATICALLY triggered bundle rebuild""" # operators mapping nvr_to_digest = { "original_1": "original_1_digest", @@ -170,7 +187,7 @@ def test_handle_bundle_rebuild_auto(self): "csv_name": "image.1.2.4", "version_original": "1.2.4", }, - ] + ], } image_by_digest = { "bundle_with_related_images_1_digest": {"brew": {"build": "bundle1_nvr-1-1"}}, @@ -184,7 +201,7 @@ def test_handle_bundle_rebuild_auto(self): } csv_data_by_nvr = { "bundle1_nvr-1-1": ("image.1.2.3", "1.2.3"), - "bundle2_nvr-1-1": ("image.1.2.4", "1.2.4") + "bundle2_nvr-1-1": ("image.1.2.4", "1.2.4"), } builds = { "bundle1_nvr-1-1": { @@ -194,15 +211,17 @@ def test_handle_bundle_rebuild_auto(self): "operator_manifests": { "related_images": { "created_by_osbs": True, - "pullspecs": [{ - "new": "registry/repo/operator1@original_1_digest", - "original": "registry/repo/operator1:v2.2.0", - "pinned": True, - }] + "pullspecs": [ + { + "new": "registry/repo/operator1@original_1_digest", + "original": "registry/repo/operator1:v2.2.0", + "pinned": True, + } + ], }, } } - } + }, }, "bundle2_nvr-1-1": { "task_id": 2, @@ -211,16 +230,18 @@ def test_handle_bundle_rebuild_auto(self): "operator_manifests": { "related_images": { "created_by_osbs": True, - "pullspecs": [{ - "new": "registry/repo/operator2@original_2_digest", - "original": "registry/repo/operator2:v2.2.0", - "pinned": True, - }] + "pullspecs": [ + { + "new": "registry/repo/operator2@original_2_digest", + "original": "registry/repo/operator2:v2.2.0", + "pinned": True, + } + ], }, } } - } - } + }, + }, } event = BotasErrataShippedEvent("test_msg_id", self.botas_advisory) @@ -245,7 +266,9 @@ def gmldbn(nvr, must_be_published=True): # that uses return value self.pyxis().get_latest_bundles.return_value = ["some", "bundles", "info"] # return bundles for original operator images - self.pyxis().get_bundles_by_related_image_digest.side_effect = lambda x, y: bundles_with_related_images[x] + self.pyxis().get_bundles_by_related_image_digest.side_effect = ( + lambda x, y: bundles_with_related_images[x] + ) self.pyxis().get_images_by_digest.side_effect = lambda x: [image_by_digest[x]] self.pyxis().get_images_by_nvr.side_effect = lambda x: [image_by_nvr[x]] self.handler.image_has_auto_rebuild_tag = MagicMock(return_value=True) @@ -301,7 +324,7 @@ def gmldbn(nvr, must_be_published=True): assert bundles_to_rebuild[1] in self.handler._prepare_builds.call_args.args[0] def test_handle_bundle_rebuild_without_hotfixes(self): - """ Test handling bundle rebuilds that filtered hotfix images""" + """Test handling bundle rebuilds that filtered hotfix images""" nvr_to_digest = { "original_1": "original_1_digest", "some_name-1-12345": "some_name-1-12345_digest", @@ -322,7 +345,7 @@ def test_handle_bundle_rebuild_without_hotfixes(self): "csv_name": "image.1.2.4", "version_original": "1.2.4", }, - ] + ], } image_by_digest = { "bundle_with_related_images_1_digest": {"brew": {"build": "bundle1_nvr-1-1"}}, @@ -336,7 +359,7 @@ def test_handle_bundle_rebuild_without_hotfixes(self): } csv_data_by_nvr = { "bundle1_nvr-1-1": ("image.1.2.3", "1.2.3"), - "bundle2_nvr-1-1": ("image.1.2.4", "1.2.4") + "bundle2_nvr-1-1": ("image.1.2.4", "1.2.4"), } builds = { "bundle1_nvr-1-1": { @@ -346,15 +369,17 @@ def test_handle_bundle_rebuild_without_hotfixes(self): "operator_manifests": { "related_images": { "created_by_osbs": True, - "pullspecs": [{ - "new": "registry/repo/operator1@original_1_digest", - "original": "registry/repo/operator1:v2.2.0", - "pinned": True, - }] + "pullspecs": [ + { + "new": "registry/repo/operator1@original_1_digest", + "original": "registry/repo/operator1:v2.2.0", + "pinned": True, + } + ], }, } } - } + }, }, "bundle2_nvr-1-1": { "task_id": 2, @@ -363,16 +388,18 @@ def test_handle_bundle_rebuild_without_hotfixes(self): "operator_manifests": { "related_images": { "created_by_osbs": True, - "pullspecs": [{ - "new": "registry/repo/operator2@original_2_digest", - "original": "registry/repo/operator2:v2.2.0", - "pinned": True, - }] + "pullspecs": [ + { + "new": "registry/repo/operator2@original_2_digest", + "original": "registry/repo/operator2:v2.2.0", + "pinned": True, + } + ], }, } } - } - } + }, + }, } event = BotasErrataShippedEvent("test_msg_id", self.botas_advisory) @@ -396,7 +423,9 @@ def gmldbn(nvr, must_be_published=True): # that uses return value self.pyxis().get_latest_bundles.return_value = ["some", "bundles", "info"] # return bundles for original operator images - self.pyxis().get_bundles_by_related_image_digest.side_effect = lambda x, y: bundles_with_related_images[x] + self.pyxis().get_bundles_by_related_image_digest.side_effect = ( + lambda x, y: bundles_with_related_images[x] + ) self.pyxis().get_images_by_digest.side_effect = lambda x: [image_by_digest[x]] self.pyxis().get_images_by_nvr.side_effect = lambda x: [image_by_nvr[x]] self.handler.image_has_auto_rebuild_tag = MagicMock(return_value=True) @@ -411,12 +440,14 @@ def gmldbn(nvr, must_be_published=True): get_build.assert_has_calls([]) assert self.handler._prepare_builds.call_args is None - @patch.object(conf, "handler_build_allowlist", new={ - "HandleBotasAdvisory": {"image": {"advisory_name": "RHBA-2020"}} - }) - @patch('freshmaker.models.Event.get_artifact_build_from_event_dependencies') + @patch.object( + conf, + "handler_build_allowlist", + new={"HandleBotasAdvisory": {"image": {"advisory_name": "RHBA-2020"}}}, + ) + @patch("freshmaker.models.Event.get_artifact_build_from_event_dependencies") def test_handle_bundle_rebuild_manual(self, get_dependent_event_build): - """ Test handling of MANUALLY triggered bundle rebuild""" + """Test handling of MANUALLY triggered bundle rebuild""" # operators mapping nvr_to_digest = { "original_1": "original_1_digest", @@ -428,7 +459,7 @@ def test_handle_bundle_rebuild_manual(self, get_dependent_event_build): # operator for bundle ignored because of 'container_images' "original_3": "original_3_digest", # operator for bundle ignored because it was built in dependent event - "original_4": "original_4_digest" + "original_4": "original_4_digest", } # related image digest -> bundle bundles_with_related_images = { @@ -461,7 +492,7 @@ def test_handle_bundle_rebuild_manual(self, get_dependent_event_build): "csv_name": "image.1.2.6", "version_original": "1.2.6", }, - ] + ], } image_by_digest = { "bundle_with_related_images_1_digest": {"brew": {"build": "bundle1_nvr-1-1"}}, @@ -477,7 +508,7 @@ def test_handle_bundle_rebuild_manual(self, get_dependent_event_build): } csv_data_by_nvr = { "bundle1_nvr-1-1": ("image.1.2.3", "1.2.3"), - "bundle2_nvr-1-1": ("image.1.2.4", "1.2.4") + "bundle2_nvr-1-1": ("image.1.2.4", "1.2.4"), } builds = { "bundle1_nvr-1-1": { @@ -487,15 +518,17 @@ def test_handle_bundle_rebuild_manual(self, get_dependent_event_build): "operator_manifests": { "related_images": { "created_by_osbs": True, - "pullspecs": [{ - "new": "registry/repo/operator1@original_1_digest", - "original": "registry/repo/operator1:v2.2.0", - "pinned": True, - }] + "pullspecs": [ + { + "new": "registry/repo/operator1@original_1_digest", + "original": "registry/repo/operator1:v2.2.0", + "pinned": True, + } + ], }, } } - } + }, }, "bundle2_nvr-1-1": { "task_id": 2, @@ -504,21 +537,24 @@ def test_handle_bundle_rebuild_manual(self, get_dependent_event_build): "operator_manifests": { "related_images": { "created_by_osbs": True, - "pullspecs": [{ - "new": "registry/repo/operator2@original_2_digest", - "original": "registry/repo/operator2:v2.2.0", - "pinned": True, - }] + "pullspecs": [ + { + "new": "registry/repo/operator2@original_2_digest", + "original": "registry/repo/operator2:v2.2.0", + "pinned": True, + } + ], }, } } - } - } + }, + }, } event = ManualBundleRebuildEvent( - "test_msg_id", self.botas_advisory, - container_images=["bundle1_nvr-1-1", "bundle2_nvr-1-1", "bundle4_nvr-1-1"] + "test_msg_id", + self.botas_advisory, + container_images=["bundle1_nvr-1-1", "bundle2_nvr-1-1", "bundle4_nvr-1-1"], ) db_event = Event.get_or_create_from_event(db.session, event) self.handler.event = event @@ -527,7 +563,7 @@ def test_handle_bundle_rebuild_manual(self, get_dependent_event_build): "original_1": "some_name-1-12345", "original_2": "some_name_2-2-2", "original_3": "some_name_3-3-3", - "original_4": "some_name_4-4-4" + "original_4": "some_name_4-4-4", } ) self.handler._get_bundle_csv_name_and_version = MagicMock( @@ -537,19 +573,22 @@ def test_handle_bundle_rebuild_manual(self, get_dependent_event_build): def gmldbn(nvr, must_be_published=True): return nvr_to_digest[nvr] + self.pyxis().get_manifest_list_digest_by_nvr.side_effect = gmldbn self.pyxis().get_operator_indices.return_value = [] # Doens't matter what this method will return, because we override method # that uses return value self.pyxis().get_latest_bundles.return_value = ["some", "bundles", "info"] # return bundles for original operator images - self.pyxis().get_bundles_by_related_image_digest.side_effect = lambda x, y: bundles_with_related_images[x] + self.pyxis().get_bundles_by_related_image_digest.side_effect = ( + lambda x, y: bundles_with_related_images[x] + ) self.pyxis().get_images_by_digest.side_effect = lambda x: [image_by_digest[x]] self.pyxis().get_images_by_nvr.side_effect = lambda x: [image_by_nvr[x]] self.pyxis().is_bundle.return_value = True self.pyxis().is_hotfix_image.return_value = False # ignore bundle because it was already built in dependent event - get_dependent_event_build.side_effect = lambda x: True if x == 'bundle4_nvr-1-1' else False + get_dependent_event_build.side_effect = lambda x: True if x == "bundle4_nvr-1-1" else False self.handler.image_has_auto_rebuild_tag = MagicMock(return_value=True) get_build = self.patcher.patch("freshmaker.kojiservice.KojiService.get_build") get_build.side_effect = lambda x: builds[x] @@ -559,7 +598,9 @@ def gmldbn(nvr, must_be_published=True): self.handler.handle(event) self.assertNotEqual(db_event.state, EventState.SKIPPED.value) - get_build.assert_has_calls([call("bundle1_nvr-1-1"), call("bundle2_nvr-1-1")], any_order=True) + get_build.assert_has_calls( + [call("bundle1_nvr-1-1"), call("bundle2_nvr-1-1")], any_order=True + ) bundles_to_rebuild = [ { "pullspec_replacements": [ @@ -599,13 +640,14 @@ def gmldbn(nvr, must_be_published=True): assert bundles_to_rebuild[0] in self.handler._prepare_builds.call_args.args[0] assert bundles_to_rebuild[1] in self.handler._prepare_builds.call_args.args[0] - @patch.object(conf, "handler_build_allowlist", new={ - "HandleBotasAdvisory": {"image": {"advisory_name": "RHBA-2020"}} - }) - @patch('freshmaker.models.Event.get_artifact_build_from_event_dependencies') + @patch.object( + conf, + "handler_build_allowlist", + new={"HandleBotasAdvisory": {"image": {"advisory_name": "RHBA-2020"}}}, + ) + @patch("freshmaker.models.Event.get_artifact_build_from_event_dependencies") def test_handle_bundle_rebuild_manual_nvr_override(self, get_dependent_event_build): - - """ Test handling of MANUALLY triggered bundle rebuild with manual nvr override. + """Test handling of MANUALLY triggered bundle rebuild with manual nvr override. Bundle 1 images are left alone, bundle 2 images are manually overriden, and there is an override that is introduced manually in the request (#FIXME specify) @@ -667,15 +709,17 @@ def test_handle_bundle_rebuild_manual_nvr_override(self, get_dependent_event_bui "operator_manifests": { "related_images": { "created_by_osbs": True, - "pullspecs": [{ - "new": "registry/repo/operator1@original_1_digest", - "original": "registry/repo/operator1:v2.2.0", - "pinned": True, - }] + "pullspecs": [ + { + "new": "registry/repo/operator1@original_1_digest", + "original": "registry/repo/operator1:v2.2.0", + "pinned": True, + } + ], }, } } - } + }, }, "bundle2_nvr-1-1": { "task_id": 2, @@ -684,15 +728,17 @@ def test_handle_bundle_rebuild_manual_nvr_override(self, get_dependent_event_bui "operator_manifests": { "related_images": { "created_by_osbs": True, - "pullspecs": [{ - "new": "registry/repo/operator2@original_2_digest", - "original": "registry/repo/operator2:v2.2.0", - "pinned": True, - }] + "pullspecs": [ + { + "new": "registry/repo/operator2@original_2_digest", + "original": "registry/repo/operator2:v2.2.0", + "pinned": True, + } + ], }, } } - } + }, }, "bundle3_nvr-1-1": { "task_id": 3, @@ -701,26 +747,30 @@ def test_handle_bundle_rebuild_manual_nvr_override(self, get_dependent_event_bui "operator_manifests": { "related_images": { "created_by_osbs": True, - "pullspecs": [{ - "new": "registry/repo/operator3@original_3_digest", - "original": "registry/repo/operator3:v2.2.0", - "pinned": True, - }] + "pullspecs": [ + { + "new": "registry/repo/operator3@original_3_digest", + "original": "registry/repo/operator3:v2.2.0", + "pinned": True, + } + ], }, } } - } - } + }, + }, } event = ManualBundleRebuildEvent( - "test_msg_id", self.botas_advisory, + "test_msg_id", + self.botas_advisory, container_images=["bundle1_nvr-1-1", "bundle2_nvr-1-1"], requester_metadata_json={ "bundle_related_image_overrides": { - "original_2": "manual_name_2-2-2", "original_3": "manual_name_3-3-3" + "original_2": "manual_name_2-2-2", + "original_3": "manual_name_3-3-3", } - } + }, ) db_event = Event.get_or_create_from_event(db.session, event) self.handler.event = event @@ -737,13 +787,16 @@ def test_handle_bundle_rebuild_manual_nvr_override(self, get_dependent_event_bui def gmldbn(nvr, must_be_published=True): return nvr_to_digest[nvr] + self.pyxis().get_manifest_list_digest_by_nvr.side_effect = gmldbn self.pyxis().get_operator_indices.return_value = [] # Doens't matter what this method will return, because we override method # that uses return value self.pyxis().get_latest_bundles.return_value = ["some", "bundles", "info"] # return bundles for original operator images - self.pyxis().get_bundles_by_related_image_digest.side_effect = lambda x, y: bundles_with_related_images[x] + self.pyxis().get_bundles_by_related_image_digest.side_effect = ( + lambda x, y: bundles_with_related_images[x] + ) self.pyxis().get_images_by_digest.side_effect = lambda x: [image_by_digest[x]] self.pyxis().get_images_by_nvr.side_effect = lambda x: [image_by_nvr[x]] self.pyxis().is_bundle.return_value = True @@ -813,49 +866,64 @@ def gmldbn(nvr, must_be_published=True): }, "spec": {"version": "1.2.5+0.1608854400.p"}, }, - } + }, ] assert bundles_to_rebuild[0] in self.handler._prepare_builds.call_args.args[0] assert bundles_to_rebuild[1] in self.handler._prepare_builds.call_args.args[0] @patch.object(conf, "dry_run", new=True) - @patch.object(conf, "handler_build_allowlist", new={ - "HandleBotasAdvisory": { - "image": { - "advisory_name": "RHBA-2020" - } - }}) - @patch("freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory.get_published_original_nvr") + @patch.object( + conf, + "handler_build_allowlist", + new={"HandleBotasAdvisory": {"image": {"advisory_name": "RHBA-2020"}}}, + ) + @patch( + "freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory.get_published_original_nvr" + ) def test_get_original_nvrs(self, get_build): event = BotasErrataShippedEvent("test_msg_id", self.botas_advisory) self.botas_advisory._builds = { "product_name": { - "builds": [{"some_name-2-2": {"nvr": "some_name-2-2"}}, - {"some_name_two-2-2": {"nvr": "some_name_two-2-2"}}] + "builds": [ + {"some_name-2-2": {"nvr": "some_name-2-2"}}, + {"some_name_two-2-2": {"nvr": "some_name_two-2-2"}}, + ] } } get_build.return_value = "some_name-1-0" db_event = Event.get_or_create_from_event(db.session, event) ArtifactBuild.create( - db.session, db_event, "ed0", "image", original_nvr="some_name-2-12344", rebuilt_nvr="some_name-2-12345" + db.session, + db_event, + "ed0", + "image", + original_nvr="some_name-2-12344", + rebuilt_nvr="some_name-2-12345", ) ArtifactBuild.create( - db.session, db_event, "ed1", "image", original_nvr="some_name_two-2-1", rebuilt_nvr="some_name_two-2-2" + db.session, + db_event, + "ed1", + "image", + original_nvr="some_name_two-2-1", + rebuilt_nvr="some_name_two-2-2", ) self.handler.handle(event) - self.pyxis().get_manifest_list_digest_by_nvr.assert_has_calls([ - call("some_name-1-0"), - call("some_name_two-2-2", must_be_published=False), - ], any_order=True) - - @patch.object(conf, 'dry_run', new=True) - @patch.object(conf, 'handler_build_allowlist', new={ - 'HandleBotasAdvisory': { - 'image': { - 'advisory_name': 'RHBA-2020' - } - }}) + self.pyxis().get_manifest_list_digest_by_nvr.assert_has_calls( + [ + call("some_name-1-0"), + call("some_name_two-2-2", must_be_published=False), + ], + any_order=True, + ) + + @patch.object(conf, "dry_run", new=True) + @patch.object( + conf, + "handler_build_allowlist", + new={"HandleBotasAdvisory": {"image": {"advisory_name": "RHBA-2020"}}}, + ) def test_handle_no_digests_error(self): event = BotasErrataShippedEvent("test_msg_id", self.botas_advisory) self.handler._create_original_to_rebuilt_nvrs_map = MagicMock( @@ -865,35 +933,33 @@ def test_handle_no_digests_error(self): self.botas_advisory._builds = {} self.handler.handle(event) - db_event = Event.get(db.session, message_id='test_msg_id') + db_event = Event.get(db.session, message_id="test_msg_id") self.assertEqual(db_event.state, EventState.SKIPPED.value) - self.assertTrue( - db_event.state_reason.startswith("None of the original images have digest")) + self.assertTrue(db_event.state_reason.startswith("None of the original images have digest")) - @patch.object(conf, 'dry_run', new=True) - @patch.object(conf, 'handler_build_allowlist', new={ - 'HandleBotasAdvisory': { - 'image': { - 'advisory_name': 'RHBA-2020' - } - }}) - @patch("freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory.get_published_original_nvr") + @patch.object(conf, "dry_run", new=True) + @patch.object( + conf, + "handler_build_allowlist", + new={"HandleBotasAdvisory": {"image": {"advisory_name": "RHBA-2020"}}}, + ) + @patch( + "freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory.get_published_original_nvr" + ) @patch("freshmaker.handlers.botas.botas_shipped_advisory.KojiService") - def test_multiple_bundles_to_single_related_image(self, mock_koji, - get_published): + def test_multiple_bundles_to_single_related_image(self, mock_koji, get_published): event = BotasErrataShippedEvent("test_msg_id", self.botas_advisory) self.botas_advisory._builds = { "product_name": { - "builds": [{"foo-1-2.123": {"nvr": "foo-1-2.123"}}, - {"bar-2-2.134": {"nvr": "bar-2-2.134"}}] + "builds": [ + {"foo-1-2.123": {"nvr": "foo-1-2.123"}}, + {"bar-2-2.134": {"nvr": "bar-2-2.134"}}, + ] } } - published_nvrs = { - "foo-1-2.123": "foo-1-2", - "bar-2-2.134": "bar-2-2" - } + published_nvrs = {"foo-1-2.123": "foo-1-2", "bar-2-2.134": "bar-2-2"} get_published.side_effect = lambda x: published_nvrs[x] digests_by_nvrs = { @@ -910,6 +976,7 @@ def test_multiple_bundles_to_single_related_image(self, mock_koji, def gmldbn(nvr, must_be_published=True): return digests_by_nvrs[nvr] + self.pyxis().get_manifest_list_digest_by_nvr.side_effect = gmldbn self.pyxis().is_hotfix_image.return_value = False @@ -921,13 +988,9 @@ def gmldbn(nvr, must_be_published=True): "channel_name": "streams-1.5.x", "csv_name": "amq-streams.1.5.3", "related_images": [ - { - "image": "foo@sha256:111", - "name": "foo", - "digest": "sha256:111" - }, + {"image": "foo@sha256:111", "name": "foo", "digest": "sha256:111"}, ], - "version_original": "1.5.3" + "version_original": "1.5.3", }, { "bundle_path": "bundle-b/path", @@ -935,57 +998,58 @@ def gmldbn(nvr, must_be_published=True): "channel_name": "4.5", "csv_name": "amq-streams.2.4.2", "related_images": [ - { - "image": "foo@sha256:111", - "name": "foo", - "digest": "sha256:111" - }, + {"image": "foo@sha256:111", "name": "foo", "digest": "sha256:111"}, ], - "version_original": "2.4.2" + "version_original": "2.4.2", }, ], - "sha256:222": [] + "sha256:222": [], } - self.pyxis().get_bundles_by_related_image_digest.side_effect = \ + self.pyxis().get_bundles_by_related_image_digest.side_effect = ( lambda x, _: bundles_by_related_digest[x] + ) bundle_images = { - "sha256:123123": [{ - "brew": { - "build": "foo-a-bundle-2.1-2", - "nvra": "foo-a-bundle-2.1-2.amd64", - "package": "foo-a-bundle", - }, - "repositories": [ - { - "content_advisory_ids": [], - "manifest_list_digest": "sha256:12322", - "manifest_schema2_digest": "sha256:123123", - "published": True, - "registry": "registry.example.com", - "repository": "foo/foo-a-operator-bundle", - "tags": [{"name": "2"}, {"name": "2.1"}], - } - ], - }], - "sha256:023023": [{ - "brew": { - "build": "foo-b-bundle-3.1-2", - "nvra": "foo-b-bundle-3.1-2.amd64", - "package": "foo-b-bundle", - }, - "repositories": [ - { - "content_advisory_ids": [], - "manifest_list_digest": "sha256:12345", - "manifest_schema2_digest": "sha256:023023", - "published": True, - "registry": "registry.example.com", - "repository": "foo/foo-b-operator-bundle", - "tags": [{"name": "3"}, {"name": "3.1"}], - } - ], - }] + "sha256:123123": [ + { + "brew": { + "build": "foo-a-bundle-2.1-2", + "nvra": "foo-a-bundle-2.1-2.amd64", + "package": "foo-a-bundle", + }, + "repositories": [ + { + "content_advisory_ids": [], + "manifest_list_digest": "sha256:12322", + "manifest_schema2_digest": "sha256:123123", + "published": True, + "registry": "registry.example.com", + "repository": "foo/foo-a-operator-bundle", + "tags": [{"name": "2"}, {"name": "2.1"}], + } + ], + } + ], + "sha256:023023": [ + { + "brew": { + "build": "foo-b-bundle-3.1-2", + "nvra": "foo-b-bundle-3.1-2.amd64", + "package": "foo-b-bundle", + }, + "repositories": [ + { + "content_advisory_ids": [], + "manifest_list_digest": "sha256:12345", + "manifest_schema2_digest": "sha256:023023", + "published": True, + "registry": "registry.example.com", + "repository": "foo/foo-b-operator-bundle", + "tags": [{"name": "3"}, {"name": "3.1"}], + } + ], + } + ], } self.pyxis().get_images_by_digest.side_effect = lambda x: bundle_images[x] @@ -999,7 +1063,7 @@ def _fake_get_auto_rebuild_tags(registry, repository): csv_data_by_nvr = { "foo-a-bundle-2.1-2": ("image.2.1.2", "2.1.2"), - "foo-b-bundle-3.1-2": ("image.3.1.2", "3.1.2") + "foo-b-bundle-3.1-2": ("image.3.1.2", "3.1.2"), } self.handler._get_bundle_csv_name_and_version = MagicMock( side_effect=lambda x: csv_data_by_nvr[x] @@ -1025,28 +1089,39 @@ def _fake_get_auto_rebuild_tags(registry, repository): "pinned": True, } ], - } + }, } - mock_koji.return_value.get_bundle_related_images.side_effect = lambda x: build_related_images[x] + mock_koji.return_value.get_bundle_related_images.side_effect = ( + lambda x: build_related_images[x] + ) self.handler._prepare_builds = MagicMock() self.handler._prepare_builds.return_value = [MagicMock()] self.handler.image_has_auto_rebuild_tag = MagicMock(return_value=True) self.handler.start_to_build_images = MagicMock() prep_event = Event.get_or_create_from_event(db.session, event) ArtifactBuild.create( - db.session, prep_event, "ed0", "image", original_nvr="foo-1-2.122", rebuilt_nvr="foo-1-2.123" + db.session, + prep_event, + "ed0", + "image", + original_nvr="foo-1-2.122", + rebuilt_nvr="foo-1-2.123", ) ArtifactBuild.create( - db.session, prep_event, "ed1", "image", original_nvr="bar-2-2.133", rebuilt_nvr="bar-2-2.134" + db.session, + prep_event, + "ed1", + "image", + original_nvr="bar-2-2.133", + rebuilt_nvr="bar-2-2.134", ) self.handler.handle(event) - db_event = Event.get(db.session, message_id='test_msg_id') + db_event = Event.get(db.session, message_id="test_msg_id") - self.pyxis().get_images_by_digest.assert_has_calls([ - call("sha256:123123"), - call("sha256:023023") - ], any_order=True) + self.pyxis().get_images_by_digest.assert_has_calls( + [call("sha256:123123"), call("sha256:023023")], any_order=True + ) self.assertEqual(db_event.state, EventState.BUILDING.value) def test_can_handle_manual_rebuild_with_advisory(self): @@ -1055,44 +1130,59 @@ def test_can_handle_manual_rebuild_with_advisory(self): def test_get_published_original_nvr_single_event(self): event1 = Event.create(db.session, "id1", "RHSA-1", TestingEvent) - ArtifactBuild.create(db.session, event1, "ed0", "image", 1234, - original_nvr="nvr1-0-1", - rebuilt_nvr="nvr1-0-2") + ArtifactBuild.create( + db.session, + event1, + "ed0", + "image", + 1234, + original_nvr="nvr1-0-1", + rebuilt_nvr="nvr1-0-2", + ) db.session.commit() - self.pyxis()._pagination.return_value = [ - {"repositories": [{"published": True}]} - ] + self.pyxis()._pagination.return_value = [{"repositories": [{"published": True}]}] ret_nvr = self.handler.get_published_original_nvr("nvr1-0-2") self.assertEqual(ret_nvr, "nvr1-0-1") def test_get_published_original_nvr(self): event1 = Event.create(db.session, "id1", "RHSA-1", TestingEvent) - ArtifactBuild.create(db.session, event1, "ed0", "image", 1234, - original_nvr="nvr1", rebuilt_nvr="nvr1-001") - - event2 = Event.create(db.session, "id2", "RHSA-1", - ManualRebuildWithAdvisoryEvent) - ArtifactBuild.create(db.session, event2, "ed1", "image", 12345, - original_nvr="nvr1-001", rebuilt_nvr="nvr1-002") - - event3 = Event.create(db.session, "id3", "RHSA-1", - ManualRebuildWithAdvisoryEvent) - ArtifactBuild.create(db.session, event3, "ed2", "image", 123456, - original_nvr="nvr1-002", rebuilt_nvr="nvr1-003") + ArtifactBuild.create( + db.session, event1, "ed0", "image", 1234, original_nvr="nvr1", rebuilt_nvr="nvr1-001" + ) + + event2 = Event.create(db.session, "id2", "RHSA-1", ManualRebuildWithAdvisoryEvent) + ArtifactBuild.create( + db.session, + event2, + "ed1", + "image", + 12345, + original_nvr="nvr1-001", + rebuilt_nvr="nvr1-002", + ) + + event3 = Event.create(db.session, "id3", "RHSA-1", ManualRebuildWithAdvisoryEvent) + ArtifactBuild.create( + db.session, + event3, + "ed2", + "image", + 123456, + original_nvr="nvr1-002", + rebuilt_nvr="nvr1-003", + ) db.session.commit() self.pyxis()._pagination.side_effect = [ [{"repositories": [{"published": False}]}], - [{"repositories": [{"published": True}]}] + [{"repositories": [{"published": True}]}], ] ret_nvr = self.handler.get_published_original_nvr("nvr1-003") self.assertEqual(ret_nvr, "nvr1-001") def test_no_original_build_by_nvr(self): - self.pyxis()._pagination.return_value = [ - {"repositories": [{"published": True}]} - ] + self.pyxis()._pagination.return_value = [{"repositories": [{"published": True}]}] self.assertIsNone(self.handler.get_published_original_nvr("nvr2")) def test_image_has_auto_rebuild_tag(self): @@ -1120,7 +1210,7 @@ def test_image_has_auto_rebuild_tag(self): has_auto_rebuild_tag = self.handler.image_has_auto_rebuild_tag(bundle_image) self.assertTrue(has_auto_rebuild_tag) - @patch.object(conf, 'bundle_autorebuild_tag_exceptions', new=['foo-operator-2.1']) + @patch.object(conf, "bundle_autorebuild_tag_exceptions", new=["foo-operator-2.1"]) def test_image_has_auto_rebuild_tag_exception(self): bundle_image = { "brew": { @@ -1146,60 +1236,97 @@ def test_image_has_auto_rebuild_tag_exception(self): has_auto_rebuild_tag = self.handler.image_has_auto_rebuild_tag(bundle_image) self.assertTrue(has_auto_rebuild_tag) - @patch("freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory.get_published_original_nvr") + @patch( + "freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory.get_published_original_nvr" + ) def test_create_original_to_rebuilt_nvrs_map(self, get_original_build): get_original_build.side_effect = ["original_1", "original_2"] self.handler.event = BotasErrataShippedEvent("test_msg_id", self.botas_advisory) self.botas_advisory._builds = { "product_name": { - "builds": [{"some_name-2-12345": {"nvr": "some_name-2-12345"}}, - {"some_name_two-2-2": {"nvr": "some_name_two-2-2"}}] + "builds": [ + {"some_name-2-12345": {"nvr": "some_name-2-12345"}}, + {"some_name_two-2-2": {"nvr": "some_name_two-2-2"}}, + ] } } - self.get_blocking_advisories.return_value = {"some_name-1-1", - "some_name-2-1"} + self.get_blocking_advisories.return_value = {"some_name-1-1", "some_name-2-1"} db_event = Event.get_or_create_from_event(db.session, self.handler.event) ArtifactBuild.create( - db.session, db_event, "ed0", "image", original_nvr="some_name-2-12344", rebuilt_nvr="some_name-2-12345" + db.session, + db_event, + "ed0", + "image", + original_nvr="some_name-2-12344", + rebuilt_nvr="some_name-2-12345", ) ArtifactBuild.create( - db.session, db_event, "ed1", "image", original_nvr="some_name_two-2-1", rebuilt_nvr="some_name_two-2-2" + db.session, + db_event, + "ed1", + "image", + original_nvr="some_name_two-2-1", + rebuilt_nvr="some_name_two-2-2", ), - expected_map = {"original_1": "some_name-2-12345", - "original_2": "some_name_two-2-2", - "some_name-2-1": "some_name-2-12345"} + expected_map = { + "original_1": "some_name-2-12345", + "original_2": "some_name_two-2-2", + "some_name-2-1": "some_name-2-12345", + } mapping = self.handler._create_original_to_rebuilt_nvrs_map() self.assertEqual(get_original_build.call_count, 2) self.assertEqual(mapping, expected_map) - @patch("freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory.get_published_original_nvr") + @patch( + "freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory.get_published_original_nvr" + ) def test_bundle_include_previous_builds(self, get_original_build): get_original_build.side_effect = ["original_1", "original_2"] self.handler.event = BotasErrataShippedEvent("test_msg_id", self.botas_advisory) self.botas_advisory._builds = { "product_name": { - "builds": [{"some_name-2-12345": {"nvr": "some_name-2-12345"}}, - {"some_name_two-2-2": {"nvr": "some_name_two-2-2"}}] + "builds": [ + {"some_name-2-12345": {"nvr": "some_name-2-12345"}}, + {"some_name_two-2-2": {"nvr": "some_name_two-2-2"}}, + ] } } self.get_blocking_advisories.return_value = {"some_name-1-1", "some_name-2-1"} db_event = Event.get_or_create_from_event(db.session, self.handler.event) ArtifactBuild.create( - db.session, db_event, "ed0", "image", original_nvr="some_name-2-12344", rebuilt_nvr="some_name-2-12345" - ) - ArtifactBuild.create( - db.session, db_event, "ed1", "image", original_nvr="some_name-2-12343", rebuilt_nvr="some_name-2-12344" + db.session, + db_event, + "ed0", + "image", + original_nvr="some_name-2-12344", + rebuilt_nvr="some_name-2-12345", ) ArtifactBuild.create( - db.session, db_event, "ed2", "image", original_nvr="some_name-2-12342", rebuilt_nvr="some_name-2-12343" + db.session, + db_event, + "ed1", + "image", + original_nvr="some_name-2-12343", + rebuilt_nvr="some_name-2-12344", ) ArtifactBuild.create( - db.session, db_event, "ed2", "image", rebuilt_nvr="some_name-2-12342" + db.session, + db_event, + "ed2", + "image", + original_nvr="some_name-2-12342", + rebuilt_nvr="some_name-2-12343", ) + ArtifactBuild.create(db.session, db_event, "ed2", "image", rebuilt_nvr="some_name-2-12342") ArtifactBuild.create( - db.session, db_event, "ed3", "image", original_nvr="some_name_two-2-1", rebuilt_nvr="some_name_two-2-2" + db.session, + db_event, + "ed3", + "image", + original_nvr="some_name_two-2-1", + rebuilt_nvr="some_name_two-2-2", ), expected_map = { "original_1": "some_name-2-12345", @@ -1222,7 +1349,7 @@ def test_prepare_builds(self, get_koji_data): "commit": "commit_1", "target": "target_1", "git_branch": "git_branch_1", - "arches": ["arch_1", "arch_1"] + "arches": ["arch_1", "arch_1"], } pullspec_override_url = "https://localhost/api/2/pullspec_overrides/" db_event = Event.create(db.session, "id1", "RHSA-1", TestingEvent) @@ -1232,19 +1359,21 @@ def test_prepare_builds(self, get_koji_data): bundle_data = [ { "nvr": "nvr-1-1", - "pullspec_replacements": [{ - 'new': 'registry/repo/operator@sha256:123', - 'original': 'registry/repo/operator:v2.2.0', - 'pinned': True, - }], + "pullspec_replacements": [ + { + "new": "registry/repo/operator@sha256:123", + "original": "registry/repo/operator:v2.2.0", + "pinned": True, + } + ], "update": { "metadata": { - 'name': "amq-streams.2.2.0+0.1608854400.p", + "name": "amq-streams.2.2.0+0.1608854400.p", "annotations": {"olm.substitutesFor": "image.2.2.0"}, }, - 'spec': { - 'version': "2.2.0+0.1608854400.p", - } + "spec": { + "version": "2.2.0+0.1608854400.p", + }, }, } ] @@ -1269,7 +1398,7 @@ def test_prepare_builds(self, get_koji_data): }, "spec": { "version": "2.2.0+0.1608854400.p", - } + }, }, } self.assertEqual(submitted_build.bundle_pullspec_overrides, expected_csv_modifications) @@ -1284,8 +1413,10 @@ def test_prepare_builds(self, get_koji_data): } self.assertEqual(json.loads(submitted_build.build_args), expected_build_args) self.assertEqual(submitted_build.state, ArtifactBuildState.PLANNED.value) - self.assertEqual(json.loads(submitted_build.build_args)["operator_csv_modifications_url"], - pullspec_override_url + str(submitted_build.id)) + self.assertEqual( + json.loads(submitted_build.build_args)["operator_csv_modifications_url"], + pullspec_override_url + str(submitted_build.id), + ) @pytest.mark.parametrize( @@ -1302,7 +1433,7 @@ def test_prepare_builds(self, get_koji_data): "1.2.3+beta3.0.1608853000.patched", ("1.2.3+beta3.0.1608854400.p", "0.1608854400.p"), ), - ) + ), ) def test_get_rebuild_bundle_version(version, expected): now = datetime(year=2020, month=12, day=25, hour=0, minute=0, second=0) @@ -1334,7 +1465,9 @@ def test_get_csv_name(): @patch("freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory._get_csv_name") -@patch("freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory._get_rebuild_bundle_version") +@patch( + "freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory._get_rebuild_bundle_version" +) def test_get_csv_updates(mock_grbv, mock_gcn): mock_grbv.return_value = ("1.2.3+0.1608854400.p", "0.1608854400.p") mock_gcn.return_value = "amq-streams.1.2.3-0.1608854400.p" @@ -1342,18 +1475,20 @@ def test_get_csv_updates(mock_grbv, mock_gcn): assert rv == { "update": { "metadata": { - 'name': "amq-streams.1.2.3-0.1608854400.p", - "annotations": {"olm.substitutesFor": "amq-streams.1.2.3"} + "name": "amq-streams.1.2.3-0.1608854400.p", + "annotations": {"olm.substitutesFor": "amq-streams.1.2.3"}, + }, + "spec": { + "version": "1.2.3+0.1608854400.p", }, - 'spec': { - 'version': "1.2.3+0.1608854400.p", - } } } @patch("freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory._get_csv_name") -@patch("freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory._get_rebuild_bundle_version") +@patch( + "freshmaker.handlers.botas.botas_shipped_advisory.HandleBotasAdvisory._get_rebuild_bundle_version" +) def test_get_csv_updates_without_olm_substitutes(mock_grbv, mock_gcn): mock_grbv.return_value = ("1.2.3+0.1608854400.p", "0.1608854400.p") mock_gcn.return_value = "amq-streams.1.2.3-0.1608854400.p" @@ -1361,10 +1496,10 @@ def test_get_csv_updates_without_olm_substitutes(mock_grbv, mock_gcn): assert rv == { "update": { "metadata": { - 'name': "amq-streams.1.2.3-0.1608854400.p", + "name": "amq-streams.1.2.3-0.1608854400.p", + }, + "spec": { + "version": "1.2.3+0.1608854400.p", }, - 'spec': { - 'version': "1.2.3+0.1608854400.p", - } } } diff --git a/tests/handlers/internal/test_freshmaker_manage_request.py b/tests/handlers/internal/test_freshmaker_manage_request.py index 3be1943d..5b351323 100644 --- a/tests/handlers/internal/test_freshmaker_manage_request.py +++ b/tests/handlers/internal/test_freshmaker_manage_request.py @@ -38,23 +38,24 @@ def setUp(self): events.BaseEvent.register_parser(FreshmakerManageRequestParser) def test_freshmaker_manage_mismatched_action(self): - msg = get_fedmsg('freshmaker_manage_mismatched_action') + msg = get_fedmsg("freshmaker_manage_mismatched_action") with self.assertRaises(ValueError) as err: self.get_event_from_msg(msg) self.assertEqual( - err.exception.args[0], 'Last part of \'Freshmaker manage\' message' - ' topic must match the action defined within the message.') + err.exception.args[0], + "Last part of 'Freshmaker manage' message" + " topic must match the action defined within the message.", + ) def test_freshmaker_manage_missing_action(self): - msg = get_fedmsg('freshmaker_manage_missing_action') + msg = get_fedmsg("freshmaker_manage_missing_action") with self.assertRaises(ValueError) as err: self.get_event_from_msg(msg) - self.assertEqual( - err.exception.args[0], 'Action is not defined within the message.') + self.assertEqual(err.exception.args[0], "Action is not defined within the message.") def test_more_than_max_tries_on_freshmaker_manage_request(self): - msg = get_fedmsg('freshmaker_manage_eventcancel') - msg['body']['msg']['try'] = events.FreshmakerManageEvent._max_tries + msg = get_fedmsg("freshmaker_manage_eventcancel") + msg["body"]["msg"]["try"] = events.FreshmakerManageEvent._max_tries event = self.get_event_from_msg(msg) handler = CancelEventOnFreshmakerManageRequest() @@ -67,25 +68,39 @@ def setUp(self): events.BaseEvent.register_parser(FreshmakerManageRequestParser) self.koji_read_config_patcher = patch( - 'koji.read_config', return_value={'server': 'http://localhost/'}) + "koji.read_config", return_value={"server": "http://localhost/"} + ) self.koji_read_config_patcher.start() self.db_event = models.Event.create( - db.session, "2017-00000000-0000-0000-0000-000000000003", "RHSA-2018-103", - events.TestingEvent) + db.session, + "2017-00000000-0000-0000-0000-000000000003", + "RHSA-2018-103", + events.TestingEvent, + ) models.ArtifactBuild.create( - db.session, self.db_event, "mksh", "module", build_id=1237, - state=ArtifactBuildState.CANCELED.value) + db.session, + self.db_event, + "mksh", + "module", + build_id=1237, + state=ArtifactBuildState.CANCELED.value, + ) models.ArtifactBuild.create( - db.session, self.db_event, "bash", "module", build_id=1238, - state=ArtifactBuildState.CANCELED.value) + db.session, + self.db_event, + "bash", + "module", + build_id=1238, + state=ArtifactBuildState.CANCELED.value, + ) def tearDown(self): self.koji_read_config_patcher.stop() - @patch('freshmaker.kojiservice.KojiService.cancel_build') + @patch("freshmaker.kojiservice.KojiService.cancel_build") def test_cancel_event_on_freshmaker_manage_request(self, mocked_cancel_build): - msg = get_fedmsg('freshmaker_manage_eventcancel') + msg = get_fedmsg("freshmaker_manage_eventcancel") event = self.get_event_from_msg(msg) handler = CancelEventOnFreshmakerManageRequest() @@ -95,35 +110,44 @@ def test_cancel_event_on_freshmaker_manage_request(self, mocked_cancel_build): mocked_cancel_build.assert_any_call(1237) mocked_cancel_build.assert_any_call(1238) - self.assertEqual([b.state_reason for b in self.db_event.builds.all()].count( - "Build canceled in external build system."), 2) + self.assertEqual( + [b.state_reason for b in self.db_event.builds.all()].count( + "Build canceled in external build system." + ), + 2, + ) def test_can_not_handle_other_action_than_eventcancel(self): - msg = get_fedmsg('freshmaker_manage_eventcancel') - msg['body']['topic'] = 'freshmaker.manage.someotheraction' - msg['body']['msg']['action'] = 'someotheraction' + msg = get_fedmsg("freshmaker_manage_eventcancel") + msg["body"]["topic"] = "freshmaker.manage.someotheraction" + msg["body"]["msg"]["action"] = "someotheraction" event = self.get_event_from_msg(msg) handler = CancelEventOnFreshmakerManageRequest() self.assertFalse(handler.can_handle(event)) - @patch('freshmaker.kojiservice.KojiService.cancel_build', side_effect=[False, False]) + @patch("freshmaker.kojiservice.KojiService.cancel_build", side_effect=[False, False]) def test_max_tries_reached_on_cancel_event(self, mocked_cancel_build): - msg = get_fedmsg('freshmaker_manage_eventcancel') - msg['body']['msg']['try'] = events.FreshmakerManageEvent._max_tries - 1 + msg = get_fedmsg("freshmaker_manage_eventcancel") + msg["body"]["msg"]["try"] = events.FreshmakerManageEvent._max_tries - 1 event = self.get_event_from_msg(msg) handler = CancelEventOnFreshmakerManageRequest() retval = handler.handle(event) self.assertEqual(retval, []) - self.assertEqual([b.state_reason for b in self.db_event.builds.all()].count( - "Build was NOT canceled in external build system. Max number of tries reached!"), 2) - - @patch('freshmaker.kojiservice.KojiService.cancel_build', - side_effect=[False, False, True, True]) + self.assertEqual( + [b.state_reason for b in self.db_event.builds.all()].count( + "Build was NOT canceled in external build system. Max number of tries reached!" + ), + 2, + ) + + @patch( + "freshmaker.kojiservice.KojiService.cancel_build", side_effect=[False, False, True, True] + ) def test_retry_failed_cancel_event_with_success(self, mocked_cancel_build): - msg = get_fedmsg('freshmaker_manage_eventcancel') + msg = get_fedmsg("freshmaker_manage_eventcancel") event = self.get_event_from_msg(msg) handler = CancelEventOnFreshmakerManageRequest() @@ -132,9 +156,13 @@ def test_retry_failed_cancel_event_with_success(self, mocked_cancel_build): retval = handler.handle(new_event[0]) self.assertEqual(retval, []) - self.assertEqual([b.state_reason for b in self.db_event.builds.all()].count( - "Build canceled in external build system."), 2) + self.assertEqual( + [b.state_reason for b in self.db_event.builds.all()].count( + "Build canceled in external build system." + ), + 2, + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/handlers/internal/test_update_db_on_odcs_compose_fail.py b/tests/handlers/internal/test_update_db_on_odcs_compose_fail.py index f407127a..8b522676 100644 --- a/tests/handlers/internal/test_update_db_on_odcs_compose_fail.py +++ b/tests/handlers/internal/test_update_db_on_odcs_compose_fail.py @@ -23,9 +23,14 @@ from freshmaker import db from freshmaker.models import ( - Event, EventState, EVENT_TYPES, - ArtifactBuild, ArtifactType, ArtifactBuildState, ArtifactBuildCompose, - Compose + Event, + EventState, + EVENT_TYPES, + ArtifactBuild, + ArtifactType, + ArtifactBuildState, + ArtifactBuildCompose, + Compose, ) from freshmaker.events import ErrataRPMAdvisoryShippedEvent from freshmaker.handlers.internal import UpdateDBOnODCSComposeFail @@ -35,53 +40,46 @@ class TestUpdateDBOnODCSComposeFail(helpers.ModelsTestCase): - def setUp(self): super(TestUpdateDBOnODCSComposeFail, self).setUp() - self.db_event = self._create_test_event( - "msg-1", "search-key-1", "build-1", 1) + self.db_event = self._create_test_event("msg-1", "search-key-1", "build-1", 1) # Create another DB event, build and compose just to have more data # in database. - self.db_event_2 = self._create_test_event( - "msg-2", "search-key-2", "another-build-1", 2) + self.db_event_2 = self._create_test_event("msg-2", "search-key-2", "another-build-1", 2) def _create_test_event(self, event_id, search_key, build_name, compose_id): db_event = Event.create( - db.session, event_id, search_key, + db.session, + event_id, + search_key, EVENT_TYPES[ErrataRPMAdvisoryShippedEvent], state=EventState.INITIALIZED, - released=False) + released=False, + ) build_1 = ArtifactBuild.create( - db.session, db_event, build_name, ArtifactType.IMAGE, - state=ArtifactBuildState.PLANNED) + db.session, db_event, build_name, ArtifactType.IMAGE, state=ArtifactBuildState.PLANNED + ) compose_1 = Compose(odcs_compose_id=compose_id) db.session.add(compose_1) db.session.commit() - db.session.add(ArtifactBuildCompose( - build_id=build_1.id, compose_id=compose_1.id)) + db.session.add(ArtifactBuildCompose(build_id=build_1.id, compose_id=compose_1.id)) db.session.commit() return db_event def test_cannot_handle_if_compose_is_not_failed(self): - event = ODCSComposeStateChangeEvent( - 'msg-id', {'id': 1, 'state': COMPOSE_STATES["done"]} - ) + event = ODCSComposeStateChangeEvent("msg-id", {"id": 1, "state": COMPOSE_STATES["done"]}) handler = UpdateDBOnODCSComposeFail() can_handle = handler.can_handle(event) self.assertFalse(can_handle) def test_can_handle(self): - event = ODCSComposeStateChangeEvent( - 'msg-id', {'id': 1, 'state': COMPOSE_STATES["failed"]} - ) + event = ODCSComposeStateChangeEvent("msg-id", {"id": 1, "state": COMPOSE_STATES["failed"]}) handler = UpdateDBOnODCSComposeFail() can_handle = handler.can_handle(event) self.assertTrue(can_handle) def test_handle_mark_build_as_failed(self): - event = ODCSComposeStateChangeEvent( - 'msg-id', {'id': 1, 'state': COMPOSE_STATES["failed"]} - ) + event = ODCSComposeStateChangeEvent("msg-id", {"id": 1, "state": COMPOSE_STATES["failed"]}) handler = UpdateDBOnODCSComposeFail() handler.handle(event) @@ -90,8 +88,7 @@ def test_handle_mark_build_as_failed(self): build = self.db_event.builds[0] self.assertEqual(build.state, ArtifactBuildState.FAILED.value) - self.assertEqual(build.state_reason, - "ODCS compose 1 is in failed state.") + self.assertEqual(build.state_reason, "ODCS compose 1 is in failed state.") build = self.db_event_2.builds[0] self.assertEqual(build.state, ArtifactBuildState.PLANNED.value) diff --git a/tests/handlers/koji/test_rebuild_flatpak_application_on_module_ready.py b/tests/handlers/koji/test_rebuild_flatpak_application_on_module_ready.py index db421ef4..6047c564 100644 --- a/tests/handlers/koji/test_rebuild_flatpak_application_on_module_ready.py +++ b/tests/handlers/koji/test_rebuild_flatpak_application_on_module_ready.py @@ -66,20 +66,18 @@ def setUp(self): "name": "release-example", "description": "For tests", "type": "Zstream", - } + }, } } - self.from_advisory_id = self._patch( - "freshmaker.errata.ErrataAdvisory.from_advisory_id" - ) + self.from_advisory_id = self._patch("freshmaker.errata.ErrataAdvisory.from_advisory_id") self.advisory = ErrataAdvisory(123, "RHSA-123", "QE", ["module"], "Critical") self.from_advisory_id.return_value = self.advisory self.handler = RebuildFlatpakApplicationOnModuleReady() self.mock_image_modules_mapping = self._patch( "freshmaker.handlers.koji.RebuildFlatpakApplicationOnModuleReady._image_modules_mapping", - return_value={"image-foo-bar": {"module-foo-bar"}} + return_value={"image-foo-bar": {"module-foo-bar"}}, ) self.mock_pyxis_api = self._patch( @@ -243,7 +241,9 @@ def test_event_state_updated_when_no_auto_rebuild_images(self): ) def test_event_state_updated_when_no_images_with_higher_rpm_nvr(self): - self.mock_pyxis_api.return_value.get_images_by_nvrs.side_effect = lambda images, rpm_nvrs: [] + self.mock_pyxis_api.return_value.get_images_by_nvrs.side_effect = ( + lambda images, rpm_nvrs: [] + ) self.mock_pyxis.return_value.image_is_tagged_auto_rebuild.return_value = True self.handler.handle(self.event) @@ -358,9 +358,7 @@ def test_record_builds(self, create_odcs_client): """ Tests that builds are properly recorded in DB. """ - resolve_commit = self._patch( - "freshmaker.image.ContainerImage.resolve_commit" - ) + resolve_commit = self._patch("freshmaker.image.ContainerImage.resolve_commit") resolve_commit.return_value = None odcs = create_odcs_client.return_value @@ -400,16 +398,14 @@ def test_record_builds(self, create_odcs_client): self.assertEqual(args["renewed_odcs_compose_ids"], [10, 11]) def test_manual_event_can_handle(self): - event = FlatpakApplicationManualBuildEvent( - "123", self.advisory, container_images=[]) + event = FlatpakApplicationManualBuildEvent("123", self.advisory, container_images=[]) self.assertEqual(event.manual, True) self.assertEqual(self.handler.can_handle(event), True) @patch("freshmaker.handlers.koji.RebuildFlatpakApplicationOnModuleReady._record_builds") def test_manual_event_initialized(self, mock_record_builds): self.mock_pyxis.return_value.image_is_tagged_auto_rebuild.return_value = True - event = FlatpakApplicationManualBuildEvent( - "123", self.advisory, container_images=[]) + event = FlatpakApplicationManualBuildEvent("123", self.advisory, container_images=[]) self.handler.handle(event) db_event = Event.get(db.session, message_id="123") @@ -420,7 +416,8 @@ def test_manual_event_initialized(self, mock_record_builds): def test_manual_event_initialized_when_matching_images(self, mock_record_builds): self.mock_pyxis.return_value.image_is_tagged_auto_rebuild.return_value = True event = FlatpakApplicationManualBuildEvent( - "123", self.advisory, container_images=["image-foo-bar"]) + "123", self.advisory, container_images=["image-foo-bar"] + ) self.handler.handle(event) db_event = Event.get(db.session, message_id="123") @@ -430,7 +427,8 @@ def test_manual_event_initialized_when_matching_images(self, mock_record_builds) def test_manual_event_skipped_when_no_matching_images(self): self.mock_pyxis.return_value.image_is_tagged_auto_rebuild.return_value = False event = FlatpakApplicationManualBuildEvent( - "123", self.advisory, container_images=["image-foo-bar2"]) + "123", self.advisory, container_images=["image-foo-bar2"] + ) self.handler.handle(event) db_event = Event.get(db.session, message_id="123") @@ -444,8 +442,7 @@ def test_manual_event_skipped_when_no_matching_images(self): def test_manual_event_skipped_when_no_auto_rebuild_images(self): self.mock_pyxis.return_value.image_is_tagged_auto_rebuild.return_value = False - event = FlatpakApplicationManualBuildEvent( - "123", self.advisory, container_images=[]) + event = FlatpakApplicationManualBuildEvent("123", self.advisory, container_images=[]) self.handler.handle(event) db_event = Event.get(db.session, message_id="123") diff --git a/tests/handlers/koji/test_rebuild_images_on_async_manual_build.py b/tests/handlers/koji/test_rebuild_images_on_async_manual_build.py index 7e07f38e..ce75dfc6 100644 --- a/tests/handlers/koji/test_rebuild_images_on_async_manual_build.py +++ b/tests/handlers/koji/test_rebuild_images_on_async_manual_build.py @@ -31,39 +31,43 @@ class TestRebuildImagesOnAsyncManualBuild(helpers.ModelsTestCase): - def setUp(self): super(TestRebuildImagesOnAsyncManualBuild, self).setUp() - self.patcher = helpers.Patcher( - 'freshmaker.handlers.koji.RebuildImagesOnAsyncManualBuild.') + self.patcher = helpers.Patcher("freshmaker.handlers.koji.RebuildImagesOnAsyncManualBuild.") # We do not want to send messages to message bus while running tests - self.mock_messaging_publish = self.patcher.patch( - 'freshmaker.messaging.publish') + self.mock_messaging_publish = self.patcher.patch("freshmaker.messaging.publish") # Mocking koji self.mock_get_build = self.patcher.patch( - 'freshmaker.kojiservice.KojiService.get_build', - return_value={'build_id': 123456, 'extra': {'container_koji_task_id': 21938204}}) + "freshmaker.kojiservice.KojiService.get_build", + return_value={"build_id": 123456, "extra": {"container_koji_task_id": 21938204}}, + ) self.mock_get_task_request = self.patcher.patch( - 'freshmaker.kojiservice.KojiService.get_task_request', return_value=[ - 'git://example.com/rpms/repo-1#commit_hash1', - 'test-target', - {'compose_ids': None, - 'git_branch': 'test_branch', - 'scratch': False, - 'signing_intent': None, - 'yum_repourls': [('fake-url.repo')]}]) - - self.mock_allow_build = self.patcher.patch('allow_build', return_value=True) + "freshmaker.kojiservice.KojiService.get_task_request", + return_value=[ + "git://example.com/rpms/repo-1#commit_hash1", + "test-target", + { + "compose_ids": None, + "git_branch": "test_branch", + "scratch": False, + "signing_intent": None, + "yum_repourls": [("fake-url.repo")], + }, + ], + ) + + self.mock_allow_build = self.patcher.patch("allow_build", return_value=True) # Mocking Lightblue - self.mock_find_images_to_rebuild = self.patcher.patch('_find_images_to_rebuild') - self.mock_pyxis = self.patcher.patch('init_pyxis_api_instance') + self.mock_find_images_to_rebuild = self.patcher.patch("_find_images_to_rebuild") + self.mock_pyxis = self.patcher.patch("init_pyxis_api_instance") - self.mock_start_to_build_images = self.patcher.patch('start_to_build_images') + self.mock_start_to_build_images = self.patcher.patch("start_to_build_images") self.mock_get_image_builds_in_first_batch = self.patcher.patch( - 'freshmaker.models.Event.get_image_builds_in_first_batch') + "freshmaker.models.Event.get_image_builds_in_first_batch" + ) # Structure of the images used for testing: # image_0 @@ -85,166 +89,176 @@ def setUp(self): # image_c and image_f are unrelated. # image_0 is a base image, with no parent - self.image_0 = ContainerImage({ - 'repository': 'repo_1', - 'commit': '1234567', - 'target': 'container-candidate', - 'git_branch': 'test_branch', - 'content_sets': ['image_0_content_set_1', 'image_0_content_set_2'], - 'arches': 'x86_64', - 'brew': { - 'build': 'image-container-1.0-2', - 'package': 'image-container', - }, - 'parent': None, - 'parsed_data': { - 'layers': [ - 'sha512:7890', - 'sha512:5678', - ] - }, - 'published': False, - }) - - self.image_a = ContainerImage({ - 'repository': 'repo_1', - 'commit': '1234567', - 'target': 'container-candidate', - 'git_branch': 'test_branch', - 'content_sets': ['image_a_content_set_1', 'image_a_content_set_2'], - 'arches': 'x86_64', - 'brew': { - 'build': 'image-a-container-1.0-2', - 'package': 'image-a-container', - }, - 'parent': self.image_0, - 'parsed_data': { - 'layers': [ - 'sha512:7890', - 'sha512:5678', - ] - }, - 'published': False, - }) + self.image_0 = ContainerImage( + { + "repository": "repo_1", + "commit": "1234567", + "target": "container-candidate", + "git_branch": "test_branch", + "content_sets": ["image_0_content_set_1", "image_0_content_set_2"], + "arches": "x86_64", + "brew": { + "build": "image-container-1.0-2", + "package": "image-container", + }, + "parent": None, + "parsed_data": { + "layers": [ + "sha512:7890", + "sha512:5678", + ] + }, + "published": False, + } + ) + + self.image_a = ContainerImage( + { + "repository": "repo_1", + "commit": "1234567", + "target": "container-candidate", + "git_branch": "test_branch", + "content_sets": ["image_a_content_set_1", "image_a_content_set_2"], + "arches": "x86_64", + "brew": { + "build": "image-a-container-1.0-2", + "package": "image-a-container", + }, + "parent": self.image_0, + "parsed_data": { + "layers": [ + "sha512:7890", + "sha512:5678", + ] + }, + "published": False, + } + ) # image_b is a child image of image_a - self.image_b = ContainerImage({ - 'repository': 'repo_2', - 'commit': '5678901', - 'target': 'container-candidate', - 'git_branch': 'test_branch', - 'content_sets': ['image_b_content_set_1', 'image_b_content_set_2'], - 'arches': 'x86_64', - 'brew': { - 'build': 'image-b-container-2.14-1', - 'package': 'image-b-container' - }, - 'parent': self.image_a, - 'parsed_data': { - 'layers': [ - 'sha512:f109', - 'sha512:7890', - 'sha512:5678', - ] - }, - 'published': False, - }) + self.image_b = ContainerImage( + { + "repository": "repo_2", + "commit": "5678901", + "target": "container-candidate", + "git_branch": "test_branch", + "content_sets": ["image_b_content_set_1", "image_b_content_set_2"], + "arches": "x86_64", + "brew": {"build": "image-b-container-2.14-1", "package": "image-b-container"}, + "parent": self.image_a, + "parsed_data": { + "layers": [ + "sha512:f109", + "sha512:7890", + "sha512:5678", + ] + }, + "published": False, + } + ) # image_c is an image unrelated to image_a and image_b # it also has no parent image. # image_c has the same name of image_a, that's why it has this name - self.image_c = ContainerImage({ - 'repository': 'repo_1', - 'commit': '1234569', - 'target': 'container-candidate', - 'git_branch': 'test_branch', - 'content_sets': ['image_a_content_set_1', 'image_a_content_set_2'], - 'arches': 'x86_64', - 'brew': { - 'build': 'image-a-container-1.0-3', - 'package': 'image-a-container', - }, - 'parent': None, - 'parsed_data': { - 'layers': [ - 'sha512:7890', - 'sha512:5678', - ] - }, - 'published': False, - }) + self.image_c = ContainerImage( + { + "repository": "repo_1", + "commit": "1234569", + "target": "container-candidate", + "git_branch": "test_branch", + "content_sets": ["image_a_content_set_1", "image_a_content_set_2"], + "arches": "x86_64", + "brew": { + "build": "image-a-container-1.0-3", + "package": "image-a-container", + }, + "parent": None, + "parsed_data": { + "layers": [ + "sha512:7890", + "sha512:5678", + ] + }, + "published": False, + } + ) # image_d is a child image of image_a, same as image_b # so image_d and image_b are unrelated, since they are sibilings - self.image_d = ContainerImage({ - 'repository': 'repo_2', - 'commit': '5678906', - 'target': 'container-candidate', - 'git_branch': 'test_branch', - 'content_sets': ['image_d_content_set_1', 'image_d_content_set_2'], - 'arches': 'x86_64', - 'brew': { - 'build': 'image-d-container-3.3-1', - 'package': 'image-d-container' - }, - 'parent': self.image_a, - 'parsed_data': { - 'layers': [ - 'sha512:f109', - ] - }, - 'published': False, - }) + self.image_d = ContainerImage( + { + "repository": "repo_2", + "commit": "5678906", + "target": "container-candidate", + "git_branch": "test_branch", + "content_sets": ["image_d_content_set_1", "image_d_content_set_2"], + "arches": "x86_64", + "brew": {"build": "image-d-container-3.3-1", "package": "image-d-container"}, + "parent": self.image_a, + "parsed_data": { + "layers": [ + "sha512:f109", + ] + }, + "published": False, + } + ) # image_e is a child image of image_d - self.image_e = ContainerImage({ - 'repository': 'repo_2', - 'commit': '5678906', - 'target': 'container-candidate', - 'git_branch': 'test_branch', - 'content_sets': ['image_e_content_set_1', 'image_e_content_set_2'], - 'arches': 'x86_64', - 'brew': { - 'build': 'image-e-container-3.3-1', - 'package': 'image-e-container' - }, - 'parent': self.image_d, - 'parsed_data': { - 'layers': [ - 'sha512:f109', - ] - }, - 'published': False, - }) - - self.image_f = ContainerImage({ - 'architecture': 'arm64', - 'brew': {'build': 's2i-core-container-1-147', - 'completion_date': '20200603T12:00:24.000-0400', - 'nvra': 's2i-core-container-1-147.arm64', - 'package': 's2i-core-container'}, - 'content_sets': ['rhel-8-for-x86_64-appstream-rpms', - 'rhel-8-for-aarch64-baseos-rpms', - 'rhel-8-for-x86_64-baseos-rpms', - 'rhel-8-for-s390x-baseos-rpms', - 'rhel-8-for-aarch64-appstream-rpms', - 'rhel-8-for-ppc64le-appstream-rpms', - 'rhel-8-for-ppc64le-baseos-rpms', - 'rhel-8-for-s390x-appstream-rpms'], - 'multi_arch_rpm_manifest': {}, - 'parent_brew_build': 'ubi8-container-8.2-299', - 'parsed_data': {}, - 'repositories': [{'published': True, - 'repository': 'rhel8/s2i-core', - 'tags': [{'name': '1-147'}]}, - {'published': True, - 'repository': 'ubi8/s2i-core', - 'tags': [{'name': '1-147'}]}] - }) + self.image_e = ContainerImage( + { + "repository": "repo_2", + "commit": "5678906", + "target": "container-candidate", + "git_branch": "test_branch", + "content_sets": ["image_e_content_set_1", "image_e_content_set_2"], + "arches": "x86_64", + "brew": {"build": "image-e-container-3.3-1", "package": "image-e-container"}, + "parent": self.image_d, + "parsed_data": { + "layers": [ + "sha512:f109", + ] + }, + "published": False, + } + ) + + self.image_f = ContainerImage( + { + "architecture": "arm64", + "brew": { + "build": "s2i-core-container-1-147", + "completion_date": "20200603T12:00:24.000-0400", + "nvra": "s2i-core-container-1-147.arm64", + "package": "s2i-core-container", + }, + "content_sets": [ + "rhel-8-for-x86_64-appstream-rpms", + "rhel-8-for-aarch64-baseos-rpms", + "rhel-8-for-x86_64-baseos-rpms", + "rhel-8-for-s390x-baseos-rpms", + "rhel-8-for-aarch64-appstream-rpms", + "rhel-8-for-ppc64le-appstream-rpms", + "rhel-8-for-ppc64le-baseos-rpms", + "rhel-8-for-s390x-appstream-rpms", + ], + "multi_arch_rpm_manifest": {}, + "parent_brew_build": "ubi8-container-8.2-299", + "parsed_data": {}, + "repositories": [ + { + "published": True, + "repository": "rhel8/s2i-core", + "tags": [{"name": "1-147"}], + }, + {"published": True, "repository": "ubi8/s2i-core", "tags": [{"name": "1-147"}]}, + ], + } + ) def test_can_handle_event(self): - event = FreshmakerAsyncManualBuildEvent( - 'msg-id-01', 'repo-branch', ['image1', 'image2']) + event = FreshmakerAsyncManualBuildEvent("msg-id-01", "repo-branch", ["image1", "image2"]) handler = RebuildImagesOnAsyncManualBuild() self.assertTrue(handler.can_handle(event)) @@ -254,13 +268,13 @@ def test_building_single_image(self): """ self.mock_find_images_to_rebuild.return_value = [self.image_a] self.mock_find_images_trees_to_rebuild = self.patcher.patch( - 'find_images_trees_to_rebuild', return_value=[[self.image_a]]) - event = FreshmakerAsyncManualBuildEvent( - 'msg-id-123', 'test_branch', ['image-a-container']) + "find_images_trees_to_rebuild", return_value=[[self.image_a]] + ) + event = FreshmakerAsyncManualBuildEvent("msg-id-123", "test_branch", ["image-a-container"]) handler = RebuildImagesOnAsyncManualBuild() handler.handle(event) - db_event = Event.get(db.session, 'msg-id-123') + db_event = Event.get(db.session, "msg-id-123") self.assertEqual(EventState.BUILDING.value, db_event.state) self.mock_get_image_builds_in_first_batch.assert_called_once_with(db.session) self.assertEqual(len(db_event.builds.all()), 1) @@ -272,17 +286,18 @@ def test_building_related_images_correct_order(self): """ self.mock_find_images_to_rebuild.return_value = [self.image_a, self.image_b] self.mock_find_images_trees_to_rebuild = self.patcher.patch( - 'find_images_trees_to_rebuild', return_value=[[self.image_a, self.image_b]]) - self.mock_generate_batches = self.patcher.patch('generate_batches', return_value=[ - [self.image_a], - [self.image_b] - ]) + "find_images_trees_to_rebuild", return_value=[[self.image_a, self.image_b]] + ) + self.mock_generate_batches = self.patcher.patch( + "generate_batches", return_value=[[self.image_a], [self.image_b]] + ) event = FreshmakerAsyncManualBuildEvent( - 'msg-id-123', 'test_branch', ['image-b-container', 'image-a-container']) + "msg-id-123", "test_branch", ["image-b-container", "image-a-container"] + ) handler = RebuildImagesOnAsyncManualBuild() handler.handle(event) - db_event = Event.get(db.session, 'msg-id-123') + db_event = Event.get(db.session, "msg-id-123") self.assertEqual(EventState.BUILDING.value, db_event.state) self.mock_get_image_builds_in_first_batch.assert_called_once_with(db.session) self.assertEqual(len(db_event.builds.all()), 2) @@ -295,12 +310,14 @@ def test_failed_to_build_images_never_built_before(self): """ self.mock_find_images_to_rebuild.return_value = [self.image_a] self.mock_find_images_trees_to_rebuild = self.patcher.patch( - 'find_images_trees_to_rebuild', return_value=[[self.image_a]]) + "find_images_trees_to_rebuild", return_value=[[self.image_a]] + ) event = FreshmakerAsyncManualBuildEvent( - 'msg-id-123', 'another-branch', ['image-a-container']) + "msg-id-123", "another-branch", ["image-a-container"] + ) handler = RebuildImagesOnAsyncManualBuild() handler.handle(event) - db_event = Event.get(db.session, 'msg-id-123') + db_event = Event.get(db.session, "msg-id-123") self.assertEqual(EventState.FAILED.value, db_event.state) def test_multiple_nvrs_for_the_same_name(self): @@ -310,18 +327,18 @@ def test_multiple_nvrs_for_the_same_name(self): """ self.mock_find_images_to_rebuild.return_value = [self.image_a, self.image_c] self.mock_find_images_trees_to_rebuild = self.patcher.patch( - 'find_images_trees_to_rebuild', return_value=[[self.image_c]]) - event = FreshmakerAsyncManualBuildEvent( - 'msg-id-123', 'test_branch', ['image-a-container']) + "find_images_trees_to_rebuild", return_value=[[self.image_c]] + ) + event = FreshmakerAsyncManualBuildEvent("msg-id-123", "test_branch", ["image-a-container"]) handler = RebuildImagesOnAsyncManualBuild() handler.handle(event) - db_event = Event.get(db.session, 'msg-id-123') + db_event = Event.get(db.session, "msg-id-123") self.assertEqual(EventState.BUILDING.value, db_event.state) self.mock_get_image_builds_in_first_batch.assert_called_once_with(db.session) self.mock_start_to_build_images.assert_called_once() self.assertEqual(len(db_event.builds.all()), 1) - self.assertEqual(db_event.builds.one().original_nvr, 'image-a-container-1.0-3') + self.assertEqual(db_event.builds.one().original_nvr, "image-a-container-1.0-3") def test_building_sibilings(self): """ @@ -331,15 +348,19 @@ def test_building_sibilings(self): """ self.mock_find_images_to_rebuild.return_value = [self.image_b, self.image_d] self.find_images_trees_to_rebuild = self.patcher.patch( - 'find_images_trees_to_rebuild', return_value=[ + "find_images_trees_to_rebuild", + return_value=[ [self.image_b, self.image_a, self.image_0], - [self.image_d, self.image_a, self.image_0]]) + [self.image_d, self.image_a, self.image_0], + ], + ) event = FreshmakerAsyncManualBuildEvent( - 'msg-id-123', 'test_branch', ['image-b-container', 'image-d-container']) + "msg-id-123", "test_branch", ["image-b-container", "image-d-container"] + ) handler = RebuildImagesOnAsyncManualBuild() handler.handle(event) - db_event = Event.get(db.session, 'msg-id-123') + db_event = Event.get(db.session, "msg-id-123") self.assertEqual(EventState.BUILDING.value, db_event.state) self.mock_get_image_builds_in_first_batch.assert_called_once_with(db.session) self.assertEqual(len(db_event.builds.all()), 2) @@ -348,21 +369,25 @@ def test_building_sibilings(self): def test_building_images_with_disconnected_tree(self): self.mock_find_images_to_rebuild.return_value = [self.image_b, self.image_d, self.image_e] self.find_images_trees_to_rebuild = self.patcher.patch( - 'find_images_trees_to_rebuild', return_value=[ + "find_images_trees_to_rebuild", + return_value=[ [self.image_b, self.image_a, self.image_0], [self.image_d, self.image_a, self.image_0], - [self.image_e, self.image_d, self.image_a, self.image_0]]) - self.mock_generate_batches = self.patcher.patch('generate_batches', return_value=[ - [self.image_b, self.image_d], - [self.image_e] - ]) + [self.image_e, self.image_d, self.image_a, self.image_0], + ], + ) + self.mock_generate_batches = self.patcher.patch( + "generate_batches", return_value=[[self.image_b, self.image_d], [self.image_e]] + ) event = FreshmakerAsyncManualBuildEvent( - 'msg-id-123', 'test_branch', - ['image-b-container', 'image-d-container', 'image-e-container']) + "msg-id-123", + "test_branch", + ["image-b-container", "image-d-container", "image-e-container"], + ) handler = RebuildImagesOnAsyncManualBuild() handler.handle(event) - db_event = Event.get(db.session, 'msg-id-123') + db_event = Event.get(db.session, "msg-id-123") self.assertEqual(EventState.BUILDING.value, db_event.state) self.mock_get_image_builds_in_first_batch.assert_called_once_with(db.session) self.assertEqual(len(db_event.builds.all()), 3) @@ -371,22 +396,26 @@ def test_building_images_with_disconnected_tree(self): def test_intermediate_images_are_build(self): self.mock_find_images_to_rebuild.return_value = [self.image_b, self.image_d, self.image_0] self.find_images_trees_to_rebuild = self.patcher.patch( - 'find_images_trees_to_rebuild', return_value=[ + "find_images_trees_to_rebuild", + return_value=[ [self.image_0], [self.image_b, self.image_a, self.image_0], - [self.image_d, self.image_a, self.image_0]]) - self.mock_generate_batches = self.patcher.patch('generate_batches', return_value=[ - [self.image_0], - [self.image_a], - [self.image_b, self.image_d] - ]) + [self.image_d, self.image_a, self.image_0], + ], + ) + self.mock_generate_batches = self.patcher.patch( + "generate_batches", + return_value=[[self.image_0], [self.image_a], [self.image_b, self.image_d]], + ) event = FreshmakerAsyncManualBuildEvent( - 'msg-id-123', 'test_branch', - ['image-container', 'image-b-container', 'image-d-container']) + "msg-id-123", + "test_branch", + ["image-container", "image-b-container", "image-d-container"], + ) handler = RebuildImagesOnAsyncManualBuild() handler.handle(event) - db_event = Event.get(db.session, 'msg-id-123') + db_event = Event.get(db.session, "msg-id-123") self.assertEqual(EventState.BUILDING.value, db_event.state) self.mock_get_image_builds_in_first_batch.assert_called_once_with(db.session) self.assertEqual(len(db_event.builds.all()), 4) @@ -395,21 +424,25 @@ def test_intermediate_images_are_build(self): def test_related_images_are_built(self): self.mock_find_images_to_rebuild.return_value = [self.image_b, self.image_d, self.image_a] self.find_images_trees_to_rebuild = self.patcher.patch( - 'find_images_trees_to_rebuild', return_value=[ + "find_images_trees_to_rebuild", + return_value=[ [self.image_a, self.image_0], [self.image_b, self.image_a, self.image_0], - [self.image_d, self.image_a, self.image_0]]) - self.mock_generate_batches = self.patcher.patch('generate_batches', return_value=[ - [self.image_a], - [self.image_b, self.image_d] - ]) + [self.image_d, self.image_a, self.image_0], + ], + ) + self.mock_generate_batches = self.patcher.patch( + "generate_batches", return_value=[[self.image_a], [self.image_b, self.image_d]] + ) event = FreshmakerAsyncManualBuildEvent( - 'msg-id-123', 'test_branch', - ['image-a-container', 'image-b-container', 'image-d-container']) + "msg-id-123", + "test_branch", + ["image-a-container", "image-b-container", "image-d-container"], + ) handler = RebuildImagesOnAsyncManualBuild() handler.handle(event) - db_event = Event.get(db.session, 'msg-id-123') + db_event = Event.get(db.session, "msg-id-123") self.assertEqual(EventState.BUILDING.value, db_event.state) self.mock_get_image_builds_in_first_batch.assert_called_once_with(db.session) self.assertEqual(len(db_event.builds.all()), 3) @@ -421,19 +454,19 @@ def test_parent_if_image_without_parent(self): when image doesn't have "parent" key """ self.mock_find_images_to_rebuild.return_value = [self.image_f] - event = FreshmakerAsyncManualBuildEvent( - 'msg-id-123', 'test_branch', ['image-a-container']) + event = FreshmakerAsyncManualBuildEvent("msg-id-123", "test_branch", ["image-a-container"]) find_parent_mock = MagicMock() - find_parent_mock.find_parent_brew_build_nvr_from_child.return_value = 'ubi8-container-8.2-299' + find_parent_mock.find_parent_brew_build_nvr_from_child.return_value = ( + "ubi8-container-8.2-299" + ) self.mock_pyxis.return_value = find_parent_mock RebuildImagesOnAsyncManualBuild().handle(event) - db_event = Event.get(db.session, 'msg-id-123') + db_event = Event.get(db.session, "msg-id-123") # Check if build in DB corresponds to parent of the image build = db_event.builds.first().json() - self.assertEqual(build['build_args'].get('original_parent', 0), - 'ubi8-container-8.2-299') + self.assertEqual(build["build_args"].get("original_parent", 0), "ubi8-container-8.2-299") # check if we are calling Lightblue to get proper parent of image find_parent_mock.find_parent_brew_build_nvr_from_child.assert_called_once_with(self.image_f) @@ -444,19 +477,17 @@ def test_parent_if_image_with_parent(self): """ for index, image in enumerate([self.image_0, self.image_a], 1): self.mock_find_images_to_rebuild.return_value = [image] - event_id = f'msg-id-{index}' - event = FreshmakerAsyncManualBuildEvent( - event_id, 'test_branch', ['image-a-container']) + event_id = f"msg-id-{index}" + event = FreshmakerAsyncManualBuildEvent(event_id, "test_branch", ["image-a-container"]) RebuildImagesOnAsyncManualBuild().handle(event) db_event = Event.get(db.session, event_id) - if image['parent'] is not None: - original_parent = image['parent']['brew']['build'] + if image["parent"] is not None: + original_parent = image["parent"]["brew"]["build"] else: original_parent = None # Check if build in DB corresponds to parent of the image build = db_event.builds.first().json() - self.assertEqual(build['build_args'].get('original_parent', 0), - original_parent) + self.assertEqual(build["build_args"].get("original_parent", 0), original_parent) diff --git a/tests/handlers/koji/test_rebuild_images_on_odcs_compose_done.py b/tests/handlers/koji/test_rebuild_images_on_odcs_compose_done.py index c805605b..6dafe0cf 100644 --- a/tests/handlers/koji/test_rebuild_images_on_odcs_compose_done.py +++ b/tests/handlers/koji/test_rebuild_images_on_odcs_compose_done.py @@ -25,9 +25,14 @@ from freshmaker import db from freshmaker.models import ( - Event, EventState, EVENT_TYPES, - ArtifactBuild, ArtifactType, ArtifactBuildState, ArtifactBuildCompose, - Compose + Event, + EventState, + EVENT_TYPES, + ArtifactBuild, + ArtifactType, + ArtifactBuildState, + ArtifactBuildCompose, + Compose, ) from freshmaker.events import ErrataRPMAdvisoryShippedEvent from freshmaker.handlers.koji import RebuildImagesOnODCSComposeDone @@ -52,81 +57,112 @@ def setUp(self): # build 6 (not planned): [compose 1, pulp compose 6] self.db_event = Event.create( - db.session, 'msg-1', 'search-key-1', + db.session, + "msg-1", + "search-key-1", EVENT_TYPES[ErrataRPMAdvisoryShippedEvent], state=EventState.INITIALIZED, - released=False) + released=False, + ) self.build_1 = ArtifactBuild.create( - db.session, self.db_event, 'build-1', ArtifactType.IMAGE, - state=ArtifactBuildState.PLANNED) + db.session, + self.db_event, + "build-1", + ArtifactType.IMAGE, + state=ArtifactBuildState.PLANNED, + ) self.build_2 = ArtifactBuild.create( - db.session, self.db_event, 'build-2', ArtifactType.IMAGE, + db.session, + self.db_event, + "build-2", + ArtifactType.IMAGE, dep_on=self.build_1, - state=ArtifactBuildState.PLANNED) + state=ArtifactBuildState.PLANNED, + ) self.build_3 = ArtifactBuild.create( - db.session, self.db_event, 'build-3', ArtifactType.IMAGE, - state=ArtifactBuildState.PLANNED) + db.session, + self.db_event, + "build-3", + ArtifactType.IMAGE, + state=ArtifactBuildState.PLANNED, + ) self.build_4 = ArtifactBuild.create( - db.session, self.db_event, 'build-4', ArtifactType.IMAGE, + db.session, + self.db_event, + "build-4", + ArtifactType.IMAGE, dep_on=self.build_3, - state=ArtifactBuildState.PLANNED) + state=ArtifactBuildState.PLANNED, + ) self.build_5 = ArtifactBuild.create( - db.session, self.db_event, 'build-5', ArtifactType.IMAGE, + db.session, + self.db_event, + "build-5", + ArtifactType.IMAGE, dep_on=self.build_3, - state=ArtifactBuildState.PLANNED) + state=ArtifactBuildState.PLANNED, + ) self.build_6 = ArtifactBuild.create( - db.session, self.db_event, 'build-6', ArtifactType.IMAGE, - state=ArtifactBuildState.BUILD) + db.session, self.db_event, "build-6", ArtifactType.IMAGE, state=ArtifactBuildState.BUILD + ) self.compose_1 = Compose(odcs_compose_id=1) db.session.add(self.compose_1) db.session.commit() - builds = [self.build_1, self.build_2, self.build_3, - self.build_4, self.build_5, self.build_6] + builds = [ + self.build_1, + self.build_2, + self.build_3, + self.build_4, + self.build_5, + self.build_6, + ] composes = [self.compose_1] * 6 for build, compose in zip(builds, composes): - db.session.add(ArtifactBuildCompose( - build_id=build.id, compose_id=compose.id)) + db.session.add(ArtifactBuildCompose(build_id=build.id, compose_id=compose.id)) db.session.commit() # Create another DB event, build and compose just to have more data # in database. another_db_event = Event.create( - db.session, 'msg-2', 'search-key-2', + db.session, + "msg-2", + "search-key-2", EVENT_TYPES[ErrataRPMAdvisoryShippedEvent], state=EventState.INITIALIZED, - released=False) + released=False, + ) another_build_1 = ArtifactBuild.create( - db.session, another_db_event, 'another-build-1', ArtifactType.IMAGE, - state=ArtifactBuildState.PLANNED) + db.session, + another_db_event, + "another-build-1", + ArtifactType.IMAGE, + state=ArtifactBuildState.PLANNED, + ) another_compose_1 = Compose(odcs_compose_id=2) db.session.add(another_compose_1) db.session.commit() - db.session.add(ArtifactBuildCompose( - build_id=another_build_1.id, compose_id=another_compose_1.id)) + db.session.add( + ArtifactBuildCompose(build_id=another_build_1.id, compose_id=another_compose_1.id) + ) db.session.commit() def test_cannot_handle_if_compose_is_not_done(self): - event = ODCSComposeStateChangeEvent( - 'msg-id', {'id': 1, 'state': 'generating'} - ) + event = ODCSComposeStateChangeEvent("msg-id", {"id": 1, "state": "generating"}) handler = RebuildImagesOnODCSComposeDone() can_handle = handler.can_handle(event) self.assertFalse(can_handle) - @patch('freshmaker.models.ArtifactBuild.composes_ready', - new_callable=PropertyMock) - @patch('freshmaker.handlers.ContainerBuildHandler.start_to_build_images') + @patch("freshmaker.models.ArtifactBuild.composes_ready", new_callable=PropertyMock) + @patch("freshmaker.handlers.ContainerBuildHandler.start_to_build_images") def test_start_to_build(self, start_to_build_images, composes_ready): composes_ready.return_value = True - event = ODCSComposeStateChangeEvent( - 'msg-id', {'id': self.compose_1.id, 'state': 'done'} - ) + event = ODCSComposeStateChangeEvent("msg-id", {"id": self.compose_1.id, "state": "done"}) handler = RebuildImagesOnODCSComposeDone() handler.handle(event) @@ -135,16 +171,13 @@ def test_start_to_build(self, start_to_build_images, composes_ready): passed_builds = sorted(args[0], key=lambda build: build.id) self.assertEqual([self.build_1, self.build_3], passed_builds) - @patch('freshmaker.models.ArtifactBuild.composes_ready', - new_callable=PropertyMock) - @patch('freshmaker.handlers.ContainerBuildHandler.start_to_build_images') + @patch("freshmaker.models.ArtifactBuild.composes_ready", new_callable=PropertyMock) + @patch("freshmaker.handlers.ContainerBuildHandler.start_to_build_images") def test_start_to_build_parent_image_done(self, start_to_build_images, composes_ready): composes_ready.return_value = True self.build_1.state = ArtifactBuildState.DONE.value - event = ODCSComposeStateChangeEvent( - 'msg-id', {'id': self.compose_1.id, 'state': 'done'} - ) + event = ODCSComposeStateChangeEvent("msg-id", {"id": self.compose_1.id, "state": "done"}) handler = RebuildImagesOnODCSComposeDone() handler.handle(event) diff --git a/tests/handlers/koji/test_rebuild_images_on_parent_image_build.py b/tests/handlers/koji/test_rebuild_images_on_parent_image_build.py index da34ff85..7027f4d7 100644 --- a/tests/handlers/koji/test_rebuild_images_on_parent_image_build.py +++ b/tests/handlers/koji/test_rebuild_images_on_parent_image_build.py @@ -42,70 +42,110 @@ def test_can_not_handle_brew_container_task_closed_event(self): """ Tests handler can handle brew build container task closed event. """ - event = self.get_event_from_msg(get_fedmsg('brew_container_task_closed')) + event = self.get_event_from_msg(get_fedmsg("brew_container_task_closed")) self.assertFalse(self.handler.can_handle(event)) def test_can_not_handle_brew_container_task_failed_event(self): """ Tests handler can handle brew build container task failed event. """ - event = self.get_event_from_msg(get_fedmsg('brew_container_task_failed')) + event = self.get_event_from_msg(get_fedmsg("brew_container_task_failed")) self.assertFalse(self.handler.can_handle(event)) - @mock.patch('freshmaker.handlers.ContainerBuildHandler.build_image_artifact_build') - @mock.patch('freshmaker.handlers.ContainerBuildHandler.get_repo_urls') - @mock.patch('freshmaker.handlers.ContainerBuildHandler.set_context') - def test_build_containers_when_dependency_container_is_built(self, set_context, repo_urls, build_image): + @mock.patch("freshmaker.handlers.ContainerBuildHandler.build_image_artifact_build") + @mock.patch("freshmaker.handlers.ContainerBuildHandler.get_repo_urls") + @mock.patch("freshmaker.handlers.ContainerBuildHandler.set_context") + def test_build_containers_when_dependency_container_is_built( + self, set_context, repo_urls, build_image + ): """ Tests when dependency container is built, rebuild containers depend on it. """ build_image.side_effect = [1, 2, 3] repo_urls.return_value = ["url"] e1 = models.Event.create(db.session, "test_msg_id", "RHSA-2018-001", events.TestingEvent) - event = self.get_event_from_msg(get_fedmsg('brew_container_task_closed')) + event = self.get_event_from_msg(get_fedmsg("brew_container_task_closed")) - base_build = models.ArtifactBuild.create(db.session, e1, 'test-product-docker', ArtifactType.IMAGE, event.task_id) + base_build = models.ArtifactBuild.create( + db.session, e1, "test-product-docker", ArtifactType.IMAGE, event.task_id + ) - build_0 = models.ArtifactBuild.create(db.session, e1, 'docker-up-0', ArtifactType.IMAGE, 0, - dep_on=base_build, state=ArtifactBuildState.PLANNED) - build_1 = models.ArtifactBuild.create(db.session, e1, 'docker-up-1', ArtifactType.IMAGE, 0, - dep_on=base_build, state=ArtifactBuildState.PLANNED) - build_2 = models.ArtifactBuild.create(db.session, e1, 'docker-up-2', ArtifactType.IMAGE, 0, - dep_on=base_build, state=ArtifactBuildState.PLANNED) + build_0 = models.ArtifactBuild.create( + db.session, + e1, + "docker-up-0", + ArtifactType.IMAGE, + 0, + dep_on=base_build, + state=ArtifactBuildState.PLANNED, + ) + build_1 = models.ArtifactBuild.create( + db.session, + e1, + "docker-up-1", + ArtifactType.IMAGE, + 0, + dep_on=base_build, + state=ArtifactBuildState.PLANNED, + ) + build_2 = models.ArtifactBuild.create( + db.session, + e1, + "docker-up-2", + ArtifactType.IMAGE, + 0, + dep_on=base_build, + state=ArtifactBuildState.PLANNED, + ) self.handler.handle(event) self.assertEqual(base_build.state, ArtifactBuildState.DONE.value) - build_image.assert_has_calls([ - mock.call(build_0, ['url']), mock.call(build_1, ['url']), - mock.call(build_2, ['url']), - ]) + build_image.assert_has_calls( + [ + mock.call(build_0, ["url"]), + mock.call(build_1, ["url"]), + mock.call(build_2, ["url"]), + ] + ) - set_context.assert_has_calls([ - mock.call(build_0), mock.call(build_1), mock.call(build_2)]) + set_context.assert_has_calls([mock.call(build_0), mock.call(build_1), mock.call(build_2)]) self.assertEqual(build_0.build_id, 1) self.assertEqual(build_1.build_id, 2) self.assertEqual(build_2.build_id, 3) - @mock.patch('freshmaker.handlers.ContainerBuildHandler.build_image_artifact_build') - @mock.patch('freshmaker.handlers.ContainerBuildHandler.get_repo_urls') + @mock.patch("freshmaker.handlers.ContainerBuildHandler.build_image_artifact_build") + @mock.patch("freshmaker.handlers.ContainerBuildHandler.get_repo_urls") def test_not_build_containers_when_dependency_container_build_task_failed( - self, repo_urls, build_image): + self, repo_urls, build_image + ): """ Tests when dependency container build task failed in brew, only update build state in db. """ build_image.side_effect = [1, 2, 3, 4] repo_urls.return_value = ["url"] e1 = models.Event.create(db.session, "test_msg_id", "RHSA-2018-001", events.TestingEvent) - event = self.get_event_from_msg(get_fedmsg('brew_container_task_failed')) + event = self.get_event_from_msg(get_fedmsg("brew_container_task_failed")) base_build = models.ArtifactBuild.create( - db.session, e1, 'test-product-docker', ArtifactType.IMAGE, event.task_id, - original_nvr='foo-1-1') + db.session, + e1, + "test-product-docker", + ArtifactType.IMAGE, + event.task_id, + original_nvr="foo-1-1", + ) base_build.build_args = json.dumps({}) - models.ArtifactBuild.create(db.session, e1, 'docker-up', ArtifactType.IMAGE, 0, - dep_on=base_build, state=ArtifactBuildState.PLANNED) + models.ArtifactBuild.create( + db.session, + e1, + "docker-up", + ArtifactType.IMAGE, + 0, + dep_on=base_build, + state=ArtifactBuildState.PLANNED, + ) self.handler.handle(event) self.assertEqual(base_build.state, ArtifactBuildState.BUILD.value) self.assertEqual(base_build.build_id, 1) @@ -119,204 +159,257 @@ def test_not_build_containers_when_dependency_container_build_task_failed( self.assertEqual(base_build.build_id, 2) build_image.assert_called() - @mock.patch('freshmaker.models.messaging.publish') + @mock.patch("freshmaker.models.messaging.publish") def test_mark_event_COMPLETE_if_all_builds_done(self, publish): self.db_advisory_rpm_signed_event = models.Event.create( - db.session, 'msg-id-123', '12345', + db.session, + "msg-id-123", + "12345", events.ErrataAdvisoryStateChangedEvent, - state=EventState.BUILDING.value) + state=EventState.BUILDING.value, + ) self.image_a_build = models.ArtifactBuild.create( - db.session, self.db_advisory_rpm_signed_event, - 'image-a-0.1-1', ArtifactType.IMAGE, - state=ArtifactBuildState.DONE.value) + db.session, + self.db_advisory_rpm_signed_event, + "image-a-0.1-1", + ArtifactType.IMAGE, + state=ArtifactBuildState.DONE.value, + ) self.image_b_build = models.ArtifactBuild.create( - db.session, self.db_advisory_rpm_signed_event, - 'image-b-0.1-1', ArtifactType.IMAGE, + db.session, + self.db_advisory_rpm_signed_event, + "image-b-0.1-1", + ArtifactType.IMAGE, dep_on=self.image_a_build, - state=ArtifactBuildState.DONE.value) + state=ArtifactBuildState.DONE.value, + ) self.image_c_build = models.ArtifactBuild.create( - db.session, self.db_advisory_rpm_signed_event, - 'image-c-0.1-1', ArtifactType.IMAGE, + db.session, + self.db_advisory_rpm_signed_event, + "image-c-0.1-1", + ArtifactType.IMAGE, dep_on=self.image_b_build, - state=ArtifactBuildState.FAILED.value) + state=ArtifactBuildState.FAILED.value, + ) self.image_d_build = models.ArtifactBuild.create( - db.session, self.db_advisory_rpm_signed_event, - 'image-d-0.1-1', ArtifactType.IMAGE, + db.session, + self.db_advisory_rpm_signed_event, + "image-d-0.1-1", + ArtifactType.IMAGE, dep_on=self.image_a_build, build_id=12345, - state=ArtifactBuildState.BUILD.value) + state=ArtifactBuildState.BUILD.value, + ) db.session.commit() state_changed_event = events.BrewContainerTaskStateChangeEvent( - 'msg-id-890', 'image-d', 'branch', 'target', 12345, - 'BUILD', 'CLOSED') + "msg-id-890", "image-d", "branch", "target", 12345, "BUILD", "CLOSED" + ) handler = RebuildImagesOnParentImageBuild() handler.handle(state_changed_event) - self.assertEqual(EventState.COMPLETE.value, - self.db_advisory_rpm_signed_event.state) - self.assertEqual("Advisory 12345: 1 of 4 container image(s) failed to rebuild.", - self.db_advisory_rpm_signed_event.state_reason) + self.assertEqual(EventState.COMPLETE.value, self.db_advisory_rpm_signed_event.state) + self.assertEqual( + "Advisory 12345: 1 of 4 container image(s) failed to rebuild.", + self.db_advisory_rpm_signed_event.state_reason, + ) - @mock.patch('freshmaker.handlers.ContainerBuildHandler.build_image_artifact_build') - @mock.patch('freshmaker.handlers.ContainerBuildHandler.get_repo_urls') + @mock.patch("freshmaker.handlers.ContainerBuildHandler.build_image_artifact_build") + @mock.patch("freshmaker.handlers.ContainerBuildHandler.get_repo_urls") def test_not_change_state_if_not_all_builds_done( - self, get_repo_urls, build_image_artifact_build): + self, get_repo_urls, build_image_artifact_build + ): build_image_artifact_build.return_value = 67890 self.db_advisory_rpm_signed_event = models.Event.create( - db.session, 'msg-id-123', '12345', + db.session, + "msg-id-123", + "12345", events.ErrataAdvisoryStateChangedEvent, - state=EventState.BUILDING.value) + state=EventState.BUILDING.value, + ) self.image_a_build = models.ArtifactBuild.create( - db.session, self.db_advisory_rpm_signed_event, - 'image-a-0.1-1', ArtifactType.IMAGE, + db.session, + self.db_advisory_rpm_signed_event, + "image-a-0.1-1", + ArtifactType.IMAGE, build_id=12345, - state=ArtifactBuildState.BUILD.value) + state=ArtifactBuildState.BUILD.value, + ) self.image_b_build = models.ArtifactBuild.create( - db.session, self.db_advisory_rpm_signed_event, - 'image-b-0.1-1', ArtifactType.IMAGE, + db.session, + self.db_advisory_rpm_signed_event, + "image-b-0.1-1", + ArtifactType.IMAGE, dep_on=self.image_a_build, - state=ArtifactBuildState.PLANNED.value) + state=ArtifactBuildState.PLANNED.value, + ) self.image_c_build = models.ArtifactBuild.create( - db.session, self.db_advisory_rpm_signed_event, - 'image-c-0.1-1', ArtifactType.IMAGE, + db.session, + self.db_advisory_rpm_signed_event, + "image-c-0.1-1", + ArtifactType.IMAGE, dep_on=self.image_b_build, - state=ArtifactBuildState.FAILED.value) + state=ArtifactBuildState.FAILED.value, + ) db.session.commit() state_changed_event = events.BrewContainerTaskStateChangeEvent( - 'msg-id-890', 'image-a', 'branch', 'target', 12345, - 'BUILD', 'CLOSED') + "msg-id-890", "image-a", "branch", "target", 12345, "BUILD", "CLOSED" + ) handler = RebuildImagesOnParentImageBuild() handler.handle(state_changed_event) # As self.image_b_build starts to be rebuilt, not all images are # rebuilt yet. - self.assertEqual(EventState.BUILDING.value, - self.db_advisory_rpm_signed_event.state) + self.assertEqual(EventState.BUILDING.value, self.db_advisory_rpm_signed_event.state) - @mock.patch('freshmaker.kojiservice.KojiService') - @mock.patch('freshmaker.errata.Errata.get_binary_rpm_nvrs') - def test_mark_build_done_when_container_has_latest_rpms_from_advisory(self, get_binary_rpm_nvrs, KojiService): + @mock.patch("freshmaker.kojiservice.KojiService") + @mock.patch("freshmaker.errata.Errata.get_binary_rpm_nvrs") + def test_mark_build_done_when_container_has_latest_rpms_from_advisory( + self, get_binary_rpm_nvrs, KojiService + ): """ Tests when dependency container build task failed in brew, only update build state in db. """ - get_binary_rpm_nvrs.return_value = set(['foo-1.2.1-22.el7']) + get_binary_rpm_nvrs.return_value = set(["foo-1.2.1-22.el7"]) koji_service = KojiService.return_value koji_service.get_build_rpms.return_value = [ - {'build_id': 634904, 'nvr': 'foo-debuginfo-1.2.1-22.el7', 'name': 'foo-debuginfo'}, - {'build_id': 634904, 'nvr': 'foo-1.2.1-22.el7', 'name': 'foo'}, - {'build_id': 634904, 'nvr': 'foo-debuginfo-1.1.1-22.el7', 'name': 'foo-debuginfo'}, - {'build_id': 634904, 'nvr': 'foo-1.1.1-22.el7', 'name': 'foo'}, + {"build_id": 634904, "nvr": "foo-debuginfo-1.2.1-22.el7", "name": "foo-debuginfo"}, + {"build_id": 634904, "nvr": "foo-1.2.1-22.el7", "name": "foo"}, + {"build_id": 634904, "nvr": "foo-debuginfo-1.1.1-22.el7", "name": "foo-debuginfo"}, + {"build_id": 634904, "nvr": "foo-1.1.1-22.el7", "name": "foo"}, ] koji_service.get_rpms_in_container.return_value = set( - ['foo-1.2.1-22.el7', 'bar-1.2.3-1.el7'] + ["foo-1.2.1-22.el7", "bar-1.2.3-1.el7"] ) - e1 = models.Event.create(db.session, "test_msg_id", "2018001", events.ErrataRPMAdvisoryShippedEvent) - event = self.get_event_from_msg(get_fedmsg('brew_container_task_closed')) - build = models.ArtifactBuild.create(db.session, e1, 'test-product-docker', ArtifactType.IMAGE, event.task_id) + e1 = models.Event.create( + db.session, "test_msg_id", "2018001", events.ErrataRPMAdvisoryShippedEvent + ) + event = self.get_event_from_msg(get_fedmsg("brew_container_task_closed")) + build = models.ArtifactBuild.create( + db.session, e1, "test-product-docker", ArtifactType.IMAGE, event.task_id + ) self.handler.handle(event) self.assertEqual(build.state, ArtifactBuildState.DONE.value) - self.assertEqual(build.state_reason, 'Built successfully.') + self.assertEqual(build.state_reason, "Built successfully.") - @mock.patch('freshmaker.kojiservice.KojiService') - @mock.patch('freshmaker.errata.Errata.get_binary_rpm_nvrs') - def test_mark_build_fail_when_container_not_has_latest_rpms_from_advisory(self, get_binary_rpm_nvrs, KojiService): + @mock.patch("freshmaker.kojiservice.KojiService") + @mock.patch("freshmaker.errata.Errata.get_binary_rpm_nvrs") + def test_mark_build_fail_when_container_not_has_latest_rpms_from_advisory( + self, get_binary_rpm_nvrs, KojiService + ): """ Tests when dependency container build task failed in brew, only update build state in db. """ - get_binary_rpm_nvrs.return_value = set(['foo-1.2.1-23.el7']) + get_binary_rpm_nvrs.return_value = set(["foo-1.2.1-23.el7"]) koji_service = KojiService.return_value koji_service.get_build_rpms.return_value = [ - {'build_id': 634904, 'nvr': 'foo-debuginfo-1.2.1-23.el7', 'name': 'foo-debuginfo'}, - {'build_id': 634904, 'nvr': 'foo-1.2.1-23.el7', 'name': 'foo'}, - {'build_id': 634904, 'nvr': 'foo-debuginfo-1.1.1-22.el7', 'name': 'foo-debuginfo'}, - {'build_id': 634904, 'nvr': 'foo-1.1.1-22.el7', 'name': 'foo'}, + {"build_id": 634904, "nvr": "foo-debuginfo-1.2.1-23.el7", "name": "foo-debuginfo"}, + {"build_id": 634904, "nvr": "foo-1.2.1-23.el7", "name": "foo"}, + {"build_id": 634904, "nvr": "foo-debuginfo-1.1.1-22.el7", "name": "foo-debuginfo"}, + {"build_id": 634904, "nvr": "foo-1.1.1-22.el7", "name": "foo"}, ] koji_service.get_rpms_in_container.return_value = set( - ['foo-1.2.1-22.el7', 'bar-1.2.3-1.el7'] + ["foo-1.2.1-22.el7", "bar-1.2.3-1.el7"] ) - e1 = models.Event.create(db.session, "test_msg_id", "2018001", events.ErrataRPMAdvisoryShippedEvent) - event = self.get_event_from_msg(get_fedmsg('brew_container_task_closed')) - build = models.ArtifactBuild.create(db.session, e1, 'test-product-docker', ArtifactType.IMAGE, event.task_id) + e1 = models.Event.create( + db.session, "test_msg_id", "2018001", events.ErrataRPMAdvisoryShippedEvent + ) + event = self.get_event_from_msg(get_fedmsg("brew_container_task_closed")) + build = models.ArtifactBuild.create( + db.session, e1, "test-product-docker", ArtifactType.IMAGE, event.task_id + ) self.handler.handle(event) self.assertEqual(build.state, ArtifactBuildState.FAILED.value) self.assertRegex(build.state_reason, r"The following RPMs in container build.*") - @mock.patch('freshmaker.kojiservice.KojiService') - @mock.patch('freshmaker.errata.Errata.get_binary_rpm_nvrs') + @mock.patch("freshmaker.kojiservice.KojiService") + @mock.patch("freshmaker.errata.Errata.get_binary_rpm_nvrs") def test_mark_manual_build_failed_when_container_has_not_latest_rpms_from_advisory( - self, get_binary_rpm_nvrs, KojiService): + self, get_binary_rpm_nvrs, KojiService + ): """ Tests when the build gets marked as FAILED in case of a manual rebuild with images with unmatched versions (rpms in images and rpms in advisory). """ - get_binary_rpm_nvrs.return_value = set(['foo-1.2.1-23.el7']) + get_binary_rpm_nvrs.return_value = set(["foo-1.2.1-23.el7"]) koji_service = KojiService.return_value koji_service.get_build_rpms.return_value = [ - {'build_id': 634904, 'nvr': 'foo-1.2.1-23.el7', 'name': 'foo'}, - {'build_id': 634904, 'nvr': 'foo-1.1.1-22.el7', 'name': 'foo'}, + {"build_id": 634904, "nvr": "foo-1.2.1-23.el7", "name": "foo"}, + {"build_id": 634904, "nvr": "foo-1.1.1-22.el7", "name": "foo"}, ] - koji_service.get_rpms_in_container.return_value = set( - ['foo-1.2.1-22.el7'] - ) + koji_service.get_rpms_in_container.return_value = set(["foo-1.2.1-22.el7"]) - e1 = models.Event.create(db.session, "test_msg_id", "2018001", events.ManualRebuildWithAdvisoryEvent) - event = self.get_event_from_msg(get_fedmsg('brew_container_task_closed')) - build = models.ArtifactBuild.create(db.session, e1, 'test-product-docker', ArtifactType.IMAGE, event.task_id) + e1 = models.Event.create( + db.session, "test_msg_id", "2018001", events.ManualRebuildWithAdvisoryEvent + ) + event = self.get_event_from_msg(get_fedmsg("brew_container_task_closed")) + build = models.ArtifactBuild.create( + db.session, e1, "test-product-docker", ArtifactType.IMAGE, event.task_id + ) self.handler.handle(event) self.assertEqual(build.state, ArtifactBuildState.FAILED.value) self.assertRegex(build.state_reason, r"The following RPMs in container build.*") - @mock.patch('freshmaker.handlers.ContainerBuildHandler.build_image_artifact_build') - @mock.patch('freshmaker.handlers.ContainerBuildHandler.get_repo_urls') - @mock.patch('freshmaker.handlers.koji.rebuild_images_on_parent_image_build.' - 'RebuildImagesOnParentImageBuild.start_to_build_images', - side_effect=RuntimeError('something went wrong!')) + @mock.patch("freshmaker.handlers.ContainerBuildHandler.build_image_artifact_build") + @mock.patch("freshmaker.handlers.ContainerBuildHandler.get_repo_urls") + @mock.patch( + "freshmaker.handlers.koji.rebuild_images_on_parent_image_build." + "RebuildImagesOnParentImageBuild.start_to_build_images", + side_effect=RuntimeError("something went wrong!"), + ) def test_no_event_state_change_if_service_fails( - self, update_db, get_repo_urls, build_image_artifact_build): + self, update_db, get_repo_urls, build_image_artifact_build + ): build_image_artifact_build.return_value = 67890 self.db_advisory_rpm_signed_event = models.Event.create( - db.session, 'msg-id-123', '12345', + db.session, + "msg-id-123", + "12345", events.ErrataAdvisoryStateChangedEvent, - state=EventState.BUILDING.value) + state=EventState.BUILDING.value, + ) self.image_a_build = models.ArtifactBuild.create( - db.session, self.db_advisory_rpm_signed_event, - 'image-a-0.1-1', ArtifactType.IMAGE, + db.session, + self.db_advisory_rpm_signed_event, + "image-a-0.1-1", + ArtifactType.IMAGE, build_id=12345, state=ArtifactBuildState.PLANNED.value, - original_nvr='image-a-0.1-1', rebuilt_nvr='image-a-0.1-2') + original_nvr="image-a-0.1-1", + rebuilt_nvr="image-a-0.1-2", + ) # Empty json. self.image_a_build.build_args = "{}" db.session.commit() state_changed_event = events.BrewContainerTaskStateChangeEvent( - 'msg-id-890', 'image-a', 'branch', 'target', 12345, - 'BUILD', 'FAILED') + "msg-id-890", "image-a", "branch", "target", 12345, "BUILD", "FAILED" + ) handler = RebuildImagesOnParentImageBuild() with self.assertRaises(RuntimeError): @@ -324,9 +417,8 @@ def test_no_event_state_change_if_service_fails( # As self.image_b_build starts to be rebuilt, not all images are # rebuilt yet. - self.assertEqual(EventState.BUILDING.value, - self.db_advisory_rpm_signed_event.state) + self.assertEqual(EventState.BUILDING.value, self.db_advisory_rpm_signed_event.state) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/handlers/koji/test_rebuild_images_on_rpm_advisory_change.py b/tests/handlers/koji/test_rebuild_images_on_rpm_advisory_change.py index 0c7437f6..7c243e02 100644 --- a/tests/handlers/koji/test_rebuild_images_on_rpm_advisory_change.py +++ b/tests/handlers/koji/test_rebuild_images_on_rpm_advisory_change.py @@ -29,12 +29,12 @@ from freshmaker.events import ( ErrataRPMAdvisoryShippedEvent, ManualRebuildWithAdvisoryEvent, - BaseEvent) + BaseEvent, +) from freshmaker.handlers.koji import RebuildImagesOnRPMAdvisoryChange from freshmaker.image import ContainerImage from freshmaker.models import Event, Compose, ArtifactBuild, EVENT_TYPES -from freshmaker.types import ( - ArtifactBuildState, ArtifactType, EventState, RebuildReason) +from freshmaker.types import ArtifactBuildState, ArtifactType, EventState, RebuildReason from freshmaker.errata import ErrataAdvisory from freshmaker.config import any_ from freshmaker import conf @@ -42,7 +42,6 @@ class TestRebuildImagesOnRPMAdvisoryChange(helpers.ModelsTestCase): - def setUp(self): super(TestRebuildImagesOnRPMAdvisoryChange, self).setUp() @@ -51,19 +50,17 @@ def setUp(self): # tests. # There are 6 images used to run tests which will be created below, so # there should be 6 composes created as Pulp repos. - self.patcher = helpers.Patcher( - 'freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange.') + self.patcher = helpers.Patcher("freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange.") # We do not want to send messages to message bus while running tests - self.mock_messaging_publish = self.patcher.patch( - 'freshmaker.messaging.publish') + self.mock_messaging_publish = self.patcher.patch("freshmaker.messaging.publish") self.mock_prepare_pulp_repo = self.patcher.patch( - 'freshmaker.odcsclient.FreshmakerODCSClient.prepare_pulp_repo', - side_effect=[{'id': compose_id} for compose_id in range(1, 7)]) + "freshmaker.odcsclient.FreshmakerODCSClient.prepare_pulp_repo", + side_effect=[{"id": compose_id} for compose_id in range(1, 7)], + ) - self.mock_find_images_to_rebuild = self.patcher.patch( - '_find_images_to_rebuild') + self.mock_find_images_to_rebuild = self.patcher.patch("_find_images_to_rebuild") # Fake images found to rebuild has these relationships # @@ -72,161 +69,173 @@ def setUp(self): # image_b | image_d (child of image_a) | # | image_e (child of image_b) | # - self.image_a = ContainerImage({ - 'repository': 'repo_1', - 'commit': '1234567', - 'target': 'docker-container-candidate', - 'git_branch': 'rhel-7.4', - 'content_sets': ['image_a_content_set_1', 'image_a_content_set_2'], - "arches": "x86_64", - 'brew': { - 'build': 'image-a-1.0-2', - }, - 'parent': None, - 'parsed_data': { - 'layers': [ - 'sha512:7890', - 'sha512:5678', - ] - }, - "generate_pulp_repos": True, - "odcs_compose_ids": [], - "compose_sources": [], - "published": False, - }) - self.image_b = ContainerImage({ - 'repository': 'repo_2', - 'commit': '23e9f22', - 'target': 'docker-container-candidate', - 'git_branch': 'rhel-7.4', - 'content_sets': ['image_b_content_set_1', 'image_b_content_set_2'], - "arches": "x86_64", - 'brew': { - 'build': 'image-b-1.0-1' - }, - 'parent': None, - 'parsed_data': { - 'layers': [ - 'sha512:1234', - 'sha512:4567', - ] - }, - "generate_pulp_repos": True, - "odcs_compose_ids": [], - "compose_sources": [], - "published": False, - }) - self.image_c = ContainerImage({ - 'repository': 'repo_2', - 'commit': '2345678', - 'target': 'docker-container-candidate', - 'git_branch': 'rhel-7.4', - 'content_sets': ['image_c_content_set_1', 'image_d_content_set_2'], - "arches": "x86_64", - 'brew': { - 'build': 'image-c-0.2-9', - }, - 'parent': self.image_a, - 'parsed_data': { - 'layers': [ - 'sha512:4ef3', - 'sha512:7890', - 'sha512:5678', - ] - }, - "generate_pulp_repos": True, - "odcs_compose_ids": [], - "compose_sources": [], - "published": False, - }) - self.image_d = ContainerImage({ - 'repository': 'repo_2', - 'commit': '5678901', - 'target': 'docker-container-candidate', - 'git_branch': 'rhel-7.4', - 'content_sets': ['image_d_content_set_1', 'image_d_content_set_2'], - "arches": "x86_64", - 'brew': { - 'build': 'image-d-2.14-1', - }, - 'parent': self.image_a, - 'parsed_data': { - 'layers': [ - 'sha512:f109', - 'sha512:7890', - 'sha512:5678', - ] - }, - "generate_pulp_repos": True, - "odcs_compose_ids": [], - "compose_sources": [], - "published": False, - }) - self.image_e = ContainerImage({ - 'repository': 'repo_2', - 'commit': '7890123', - 'target': 'docker-container-candidate', - 'git_branch': 'rhel-7.4', - 'content_sets': ['image_e_content_set_1', 'image_e_content_set_2'], - "arches": "x86_64", - 'brew': { - 'build': 'image-e-1.0-1', - }, - 'parent': self.image_b, - 'parsed_data': { - 'layers': [ - 'sha512:5aae', - 'sha512:1234', - 'sha512:4567', - ] - }, - "generate_pulp_repos": True, - "odcs_compose_ids": [], - "compose_sources": [], - "published": False, - }) - self.image_f = ContainerImage({ - 'repository': 'repo_2', - 'commit': '3829384', - 'target': 'docker-container-candidate', - 'git_branch': 'rhel-7.4', - 'content_sets': ['image_f_content_set_1', 'image_f_content_set_2'], - "arches": "x86_64", - 'brew': { - 'build': 'image-f-0.2-1', - }, - 'parent': self.image_b, - 'parsed_data': { - 'layers': [ - 'sha512:8b9e', - 'sha512:1234', - 'sha512:4567', - ] - }, - "generate_pulp_repos": True, - "odcs_compose_ids": [], - "compose_sources": [], - "published": False, - }) + self.image_a = ContainerImage( + { + "repository": "repo_1", + "commit": "1234567", + "target": "docker-container-candidate", + "git_branch": "rhel-7.4", + "content_sets": ["image_a_content_set_1", "image_a_content_set_2"], + "arches": "x86_64", + "brew": { + "build": "image-a-1.0-2", + }, + "parent": None, + "parsed_data": { + "layers": [ + "sha512:7890", + "sha512:5678", + ] + }, + "generate_pulp_repos": True, + "odcs_compose_ids": [], + "compose_sources": [], + "published": False, + } + ) + self.image_b = ContainerImage( + { + "repository": "repo_2", + "commit": "23e9f22", + "target": "docker-container-candidate", + "git_branch": "rhel-7.4", + "content_sets": ["image_b_content_set_1", "image_b_content_set_2"], + "arches": "x86_64", + "brew": {"build": "image-b-1.0-1"}, + "parent": None, + "parsed_data": { + "layers": [ + "sha512:1234", + "sha512:4567", + ] + }, + "generate_pulp_repos": True, + "odcs_compose_ids": [], + "compose_sources": [], + "published": False, + } + ) + self.image_c = ContainerImage( + { + "repository": "repo_2", + "commit": "2345678", + "target": "docker-container-candidate", + "git_branch": "rhel-7.4", + "content_sets": ["image_c_content_set_1", "image_d_content_set_2"], + "arches": "x86_64", + "brew": { + "build": "image-c-0.2-9", + }, + "parent": self.image_a, + "parsed_data": { + "layers": [ + "sha512:4ef3", + "sha512:7890", + "sha512:5678", + ] + }, + "generate_pulp_repos": True, + "odcs_compose_ids": [], + "compose_sources": [], + "published": False, + } + ) + self.image_d = ContainerImage( + { + "repository": "repo_2", + "commit": "5678901", + "target": "docker-container-candidate", + "git_branch": "rhel-7.4", + "content_sets": ["image_d_content_set_1", "image_d_content_set_2"], + "arches": "x86_64", + "brew": { + "build": "image-d-2.14-1", + }, + "parent": self.image_a, + "parsed_data": { + "layers": [ + "sha512:f109", + "sha512:7890", + "sha512:5678", + ] + }, + "generate_pulp_repos": True, + "odcs_compose_ids": [], + "compose_sources": [], + "published": False, + } + ) + self.image_e = ContainerImage( + { + "repository": "repo_2", + "commit": "7890123", + "target": "docker-container-candidate", + "git_branch": "rhel-7.4", + "content_sets": ["image_e_content_set_1", "image_e_content_set_2"], + "arches": "x86_64", + "brew": { + "build": "image-e-1.0-1", + }, + "parent": self.image_b, + "parsed_data": { + "layers": [ + "sha512:5aae", + "sha512:1234", + "sha512:4567", + ] + }, + "generate_pulp_repos": True, + "odcs_compose_ids": [], + "compose_sources": [], + "published": False, + } + ) + self.image_f = ContainerImage( + { + "repository": "repo_2", + "commit": "3829384", + "target": "docker-container-candidate", + "git_branch": "rhel-7.4", + "content_sets": ["image_f_content_set_1", "image_f_content_set_2"], + "arches": "x86_64", + "brew": { + "build": "image-f-0.2-1", + }, + "parent": self.image_b, + "parsed_data": { + "layers": [ + "sha512:8b9e", + "sha512:1234", + "sha512:4567", + ] + }, + "generate_pulp_repos": True, + "odcs_compose_ids": [], + "compose_sources": [], + "published": False, + } + ) # For simplicify, mocking _find_images_to_rebuild to just return one # batch, which contains images found for rebuild from parent to # childrens. self.mock_find_images_to_rebuild.return_value = [ [self.image_a, self.image_b], [self.image_c, self.image_d, self.image_e], - [self.image_f] + [self.image_f], ] self.rhba_event = ErrataRPMAdvisoryShippedEvent( "123", - ErrataAdvisory(123, "RHBA-2017", "REL_PREP", [], - security_impact="", - product_short_name="product")) + ErrataAdvisory( + 123, "RHBA-2017", "REL_PREP", [], security_impact="", product_short_name="product" + ), + ) self.rhsa_event = ErrataRPMAdvisoryShippedEvent( "123", - ErrataAdvisory(123, "RHSA-2017", "REL_PREP", [], - security_impact="", - product_short_name="product")) + ErrataAdvisory( + 123, "RHSA-2017", "REL_PREP", [], security_impact="", product_short_name="product" + ), + ) def tearDown(self): super(TestRebuildImagesOnRPMAdvisoryChange, self).tearDown() @@ -236,10 +245,16 @@ def test_can_handle_manual_rebuild_with_advisory_event(self): for content_type in [["rpm"], ["module"]]: event = ManualRebuildWithAdvisoryEvent( "123", - ErrataAdvisory(123, "RHBA-2017", "REL_PREP", content_type, - security_impact="", - product_short_name="product"), - ["foo-container", "bar-container"]) + ErrataAdvisory( + 123, + "RHBA-2017", + "REL_PREP", + content_type, + security_impact="", + product_short_name="product", + ), + ["foo-container", "bar-container"], + ) handler = RebuildImagesOnRPMAdvisoryChange() ret = handler.can_handle(event) self.assertTrue(ret) @@ -248,37 +263,49 @@ def test_cannot_handle_manual_rebuild_for_non_rpm_and_module(self): for content_type in [["non-rpm"], []]: event = ManualRebuildWithAdvisoryEvent( "123", - ErrataAdvisory(123, "RHBA-2017", "REL_PREP", content_type, - security_impact="", - product_short_name="product"), - ["foo-container", "bar-container"]) + ErrataAdvisory( + 123, + "RHBA-2017", + "REL_PREP", + content_type, + security_impact="", + product_short_name="product", + ), + ["foo-container", "bar-container"], + ) handler = RebuildImagesOnRPMAdvisoryChange() ret = handler.can_handle(event) self.assertFalse(ret) - @patch.object(freshmaker.conf, 'dry_run', new=True) + @patch.object(freshmaker.conf, "dry_run", new=True) def test_requester_on_manual_rebuild(self): event = ManualRebuildWithAdvisoryEvent( "123", - ErrataAdvisory(123, "RHBA-2017", "REL_PREP", ["rpm"], - security_impact="", - product_short_name="product"), + ErrataAdvisory( + 123, + "RHBA-2017", + "REL_PREP", + ["rpm"], + security_impact="", + product_short_name="product", + ), ["foo-container", "bar-container"], - requester="requester1") + requester="requester1", + ) handler = RebuildImagesOnRPMAdvisoryChange() ret = handler.can_handle(event) self.assertTrue(ret) handler.handle(event) - db_event = Event.get(db.session, message_id='123') - self.assertEqual(db_event.requester, 'requester1') + db_event = Event.get(db.session, message_id="123") + self.assertEqual(db_event.requester, "requester1") - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'RebuildImagesOnRPMAdvisoryChange': { - 'image': {'product_short_name': 'foo'} - } - }) - @patch.object(freshmaker.conf, 'dry_run', new=True) + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={"RebuildImagesOnRPMAdvisoryChange": {"image": {"product_short_name": "foo"}}}, + ) + @patch.object(freshmaker.conf, "dry_run", new=True) def test_allow_build_by_product_short_name(self): compose_4 = Compose(odcs_compose_id=4) db.session.add(compose_4) @@ -288,17 +315,19 @@ def test_allow_build_by_product_short_name(self): handler = RebuildImagesOnRPMAdvisoryChange() handler.handle(self.rhba_event) - db_event = Event.get(db.session, message_id='123') + db_event = Event.get(db.session, message_id="123") self.assertEqual(db_event.state, EventState.SKIPPED.value) - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'RebuildImagesOnRPMAdvisoryChange': { - 'image': { - 'advisory_security_impact': ['critical', 'important'] + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={ + "RebuildImagesOnRPMAdvisoryChange": { + "image": {"advisory_security_impact": ["critical", "important"]} } - } - }) - @patch.object(freshmaker.conf, 'dry_run', new=True) + }, + ) + @patch.object(freshmaker.conf, "dry_run", new=True) def test_allow_build_by_security_impact(self): compose_4 = Compose(odcs_compose_id=4) db.session.add(compose_4) @@ -310,24 +339,31 @@ def test_allow_build_by_security_impact(self): handler = RebuildImagesOnRPMAdvisoryChange() handler.handle(self.rhba_event) - db_event = Event.get(db.session, message_id='123') + db_event = Event.get(db.session, message_id="123") self.assertEqual(db_event.state, EventState.SKIPPED.value) if severity == "moderate": - self.assertTrue(db_event.state_reason.endswith( - "is not allowed by internal policy to trigger rebuilds.")) + self.assertTrue( + db_event.state_reason.endswith( + "is not allowed by internal policy to trigger rebuilds." + ) + ) else: self.assertEqual( - db_event.state_reason, - "No container images to rebuild for advisory 'RHBA-2017'") + db_event.state_reason, "No container images to rebuild for advisory 'RHBA-2017'" + ) - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'RebuildImagesOnRPMAdvisoryChange': { - 'image': { - 'advisory_has_hightouch_bug': True, + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={ + "RebuildImagesOnRPMAdvisoryChange": { + "image": { + "advisory_has_hightouch_bug": True, + } } - } - }) - @patch.object(freshmaker.conf, 'dry_run', new=True) + }, + ) + @patch.object(freshmaker.conf, "dry_run", new=True) def test_allow_build_has_hightouch_bug(self): compose_4 = Compose(odcs_compose_id=4) db.session.add(compose_4) @@ -339,58 +375,61 @@ def test_allow_build_has_hightouch_bug(self): handler = RebuildImagesOnRPMAdvisoryChange() handler.handle(self.rhba_event) - db_event = Event.get(db.session, message_id='123') + db_event = Event.get(db.session, message_id="123") self.assertEqual(db_event.state, EventState.SKIPPED.value) if not has_hightouch_bug: - self.assertTrue(db_event.state_reason.endswith( - "is not allowed by internal policy to trigger rebuilds.")) + self.assertTrue( + db_event.state_reason.endswith( + "is not allowed by internal policy to trigger rebuilds." + ) + ) else: self.assertEqual( - db_event.state_reason, - "No container images to rebuild for advisory 'RHBA-2017'") + db_event.state_reason, "No container images to rebuild for advisory 'RHBA-2017'" + ) - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'RebuildImagesOnRPMAdvisoryChange': { - 'image': {'advisory_name': 'RHBA-2017'} - } - }) + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={"RebuildImagesOnRPMAdvisoryChange": {"image": {"advisory_name": "RHBA-2017"}}}, + ) def test_event_state_updated_when_no_images_to_rebuild(self): self.mock_find_images_to_rebuild.return_value = [[]] handler = RebuildImagesOnRPMAdvisoryChange() handler.handle(self.rhba_event) - db_event = Event.get(db.session, message_id='123') + db_event = Event.get(db.session, message_id="123") self.assertEqual(db_event.state, EventState.SKIPPED.value) self.assertEqual( - db_event.state_reason, - "No container images to rebuild for advisory 'RHBA-2017'") - - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'RebuildImagesOnRPMAdvisoryChange': { - 'image': {'advisory_name': 'RHBA-2017'} - } - }) + db_event.state_reason, "No container images to rebuild for advisory 'RHBA-2017'" + ) + + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={"RebuildImagesOnRPMAdvisoryChange": {"image": {"advisory_name": "RHBA-2017"}}}, + ) def test_event_state_updated_when_all_images_failed(self): - self.image_a['error'] = "foo" - self.mock_find_images_to_rebuild.return_value = [ - [self.image_a]] + self.image_a["error"] = "foo" + self.mock_find_images_to_rebuild.return_value = [[self.image_a]] handler = RebuildImagesOnRPMAdvisoryChange() handler.handle(self.rhba_event) - db_event = Event.get(db.session, message_id='123') + db_event = Event.get(db.session, message_id="123") self.assertEqual(db_event.state, EventState.COMPLETE.value) self.assertEqual( - db_event.state_reason, - "No container images to rebuild, all are in failed state.") - - @patch('freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange.' - 'allow_build', return_value=True) - @patch('freshmaker.odcsclient.FreshmakerODCSClient.prepare_yum_repos_for_rebuilds') - @patch('freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange.' - 'start_to_build_images') + db_event.state_reason, "No container images to rebuild, all are in failed state." + ) + + @patch( + "freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange." "allow_build", + return_value=True, + ) + @patch("freshmaker.odcsclient.FreshmakerODCSClient.prepare_yum_repos_for_rebuilds") + @patch("freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange." "start_to_build_images") def test_rebuild_if_errata_state_is_prior_to_SHIPPED_LIVE( - self, start_to_build_images, prepare_yum_repos_for_rebuilds, - allow_build): + self, start_to_build_images, prepare_yum_repos_for_rebuilds, allow_build + ): handler = RebuildImagesOnRPMAdvisoryChange() handler.handle(self.rhsa_event) @@ -400,20 +439,31 @@ def test_rebuild_if_errata_state_is_prior_to_SHIPPED_LIVE( db_event = Event.get(db.session, self.rhsa_event.msg_id) self.assertEqual(EventState.BUILDING.value, db_event.state) - @patch('freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange.' - 'allow_build', return_value=True) - @patch('freshmaker.odcsclient.FreshmakerODCSClient.prepare_yum_repos_for_rebuilds') - @patch('freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange.' - 'start_to_build_images') - @patch('freshmaker.models.Event.get_image_builds_in_first_batch') + @patch( + "freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange." "allow_build", + return_value=True, + ) + @patch("freshmaker.odcsclient.FreshmakerODCSClient.prepare_yum_repos_for_rebuilds") + @patch("freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange." "start_to_build_images") + @patch("freshmaker.models.Event.get_image_builds_in_first_batch") def test_rebuild_if_errata_state_is_SHIPPED_LIVE( - self, get_image_builds_in_first_batch, start_to_build_images, - prepare_yum_repos_for_rebuilds, allow_build): + self, + get_image_builds_in_first_batch, + start_to_build_images, + prepare_yum_repos_for_rebuilds, + allow_build, + ): event = ErrataRPMAdvisoryShippedEvent( - 'msg-id-123', - ErrataAdvisory(123, "RHSA-2017", "SHIPPED_LIVE", [], - security_impact="", - product_short_name="product")) + "msg-id-123", + ErrataAdvisory( + 123, + "RHSA-2017", + "SHIPPED_LIVE", + [], + security_impact="", + product_short_name="product", + ), + ) handler = RebuildImagesOnRPMAdvisoryChange() handler.handle(event) @@ -426,40 +476,40 @@ def test_rebuild_if_errata_state_is_SHIPPED_LIVE( class TestFindImagesToRebuild(helpers.FreshmakerTestCase): - def setUp(self): super(TestFindImagesToRebuild, self).setUp() - self.patcher = helpers.Patcher( - "freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange.") + self.patcher = helpers.Patcher("freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange.") self.get_content_set_by_repo_ids = self.patcher.patch( - 'freshmaker.pulp.Pulp.get_content_set_by_repo_ids', - return_value=["content-set-1"]) + "freshmaker.pulp.Pulp.get_content_set_by_repo_ids", return_value=["content-set-1"] + ) self.get_pulp_repository_ids = self.patcher.patch( - 'freshmaker.errata.Errata.get_pulp_repository_ids', - return_value=["pulp_repo_x86_64"]) + "freshmaker.errata.Errata.get_pulp_repository_ids", return_value=["pulp_repo_x86_64"] + ) self.get_affected_srpm_nvrs = self.patcher.patch( - 'freshmaker.errata.Errata.get_cve_affected_rpm_nvrs', - return_value=["httpd-2.4-11.el7"]) + "freshmaker.errata.Errata.get_cve_affected_rpm_nvrs", return_value=["httpd-2.4-11.el7"] + ) self.find_images_to_rebuild = self.patcher.patch( - 'freshmaker.image.PyxisAPI.find_images_to_rebuild', - return_value=[[]]) + "freshmaker.image.PyxisAPI.find_images_to_rebuild", return_value=[[]] + ) self.event = ErrataRPMAdvisoryShippedEvent( "123", - ErrataAdvisory(123, "RHBA-2017", "REL_PREP", [], - security_impact="", - product_short_name="product")) + ErrataAdvisory( + 123, "RHBA-2017", "REL_PREP", [], security_impact="", product_short_name="product" + ), + ) self.manual_event = ManualRebuildWithAdvisoryEvent( "123", - ErrataAdvisory(123, "RHBA-2017", "REL_PREP", [], - security_impact="", - product_short_name="product"), - ["foo", "bar"]) + ErrataAdvisory( + 123, "RHBA-2017", "REL_PREP", [], security_impact="", product_short_name="product" + ), + ["foo", "bar"], + ) self.handler = RebuildImagesOnRPMAdvisoryChange() self.handler.event = self.event @@ -467,141 +517,192 @@ def tearDown(self): super(TestFindImagesToRebuild, self).tearDown() self.patcher.unpatch_all() - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'RebuildImagesOnRPMAdvisoryChange': { - 'image': {'advisory_name': 'RHBA-*'} - } - }) - @patch('os.path.exists', return_value=True) + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={"RebuildImagesOnRPMAdvisoryChange": {"image": {"advisory_name": "RHBA-*"}}}, + ) + @patch("os.path.exists", return_value=True) def test_published_unset(self, exists): for x in self.handler._find_images_to_rebuild(123456): pass self.find_images_to_rebuild.assert_called_once_with( - ['httpd-2.4-11.el7'], ['content-set-1', 'pulp_repo_x86_64'], + ["httpd-2.4-11.el7"], + ["content-set-1", "pulp_repo_x86_64"], filter_fnc=self.handler._filter_out_not_allowed_builds, - published=True, release_categories=conf.container_release_categories, - leaf_container_images=None, skip_nvrs=None) - - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'RebuildImagesOnRPMAdvisoryChange': { - 'image': {'advisory_name': 'RHBA-*'} - } - }) - @patch('os.path.exists', return_value=True) + published=True, + release_categories=conf.container_release_categories, + leaf_container_images=None, + skip_nvrs=None, + ) + + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={"RebuildImagesOnRPMAdvisoryChange": {"image": {"advisory_name": "RHBA-*"}}}, + ) + @patch("os.path.exists", return_value=True) def test_multiple_srpms(self, exists): self.get_affected_srpm_nvrs.return_value = ["httpd-2.4-11.el7", "httpd-2.2-11.el6"] for x in self.handler._find_images_to_rebuild(123456): pass self.find_images_to_rebuild.assert_called_once_with( - ['httpd-2.4-11.el7', 'httpd-2.2-11.el6'], - ['content-set-1', 'pulp_repo_x86_64'], + ["httpd-2.4-11.el7", "httpd-2.2-11.el6"], + ["content-set-1", "pulp_repo_x86_64"], filter_fnc=self.handler._filter_out_not_allowed_builds, - published=True, release_categories=conf.container_release_categories, - leaf_container_images=None, skip_nvrs=None) - - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'RebuildImagesOnRPMAdvisoryChange': { - 'image': any_({'advisory_name': 'RHBA-*', 'published': True, - 'advisory_product_short_name': 'foo'}, - {'advisory_name': 'RHBA-*', 'published': False, - 'advisory_product_short_name': 'product'}) - } - }) - @patch('os.path.exists', return_value=True) + published=True, + release_categories=conf.container_release_categories, + leaf_container_images=None, + skip_nvrs=None, + ) + + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={ + "RebuildImagesOnRPMAdvisoryChange": { + "image": any_( + { + "advisory_name": "RHBA-*", + "published": True, + "advisory_product_short_name": "foo", + }, + { + "advisory_name": "RHBA-*", + "published": False, + "advisory_product_short_name": "product", + }, + ) + } + }, + ) + @patch("os.path.exists", return_value=True) def test_published_false(self, exists): for x in self.handler._find_images_to_rebuild(123456): pass self.find_images_to_rebuild.assert_called_once_with( - ['httpd-2.4-11.el7'], ['content-set-1', 'pulp_repo_x86_64'], + ["httpd-2.4-11.el7"], + ["content-set-1", "pulp_repo_x86_64"], filter_fnc=self.handler._filter_out_not_allowed_builds, - published=None, release_categories=None, - leaf_container_images=None, skip_nvrs=None) - - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'RebuildImagesOnRPMAdvisoryChange': { - 'image': {'advisory_name': 'RHBA-*', - 'published': True} - } - }) - @patch('os.path.exists', return_value=True) + published=None, + release_categories=None, + leaf_container_images=None, + skip_nvrs=None, + ) + + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={ + "RebuildImagesOnRPMAdvisoryChange": { + "image": {"advisory_name": "RHBA-*", "published": True} + } + }, + ) + @patch("os.path.exists", return_value=True) def test_published_true(self, exists): for x in self.handler._find_images_to_rebuild(123456): pass self.find_images_to_rebuild.assert_called_once_with( - ['httpd-2.4-11.el7'], ['content-set-1', 'pulp_repo_x86_64'], + ["httpd-2.4-11.el7"], + ["content-set-1", "pulp_repo_x86_64"], filter_fnc=self.handler._filter_out_not_allowed_builds, - published=True, release_categories=conf.container_release_categories, - leaf_container_images=None, skip_nvrs=None) - - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'RebuildImagesOnRPMAdvisoryChange': { - 'image': {'advisory_name': 'RHBA-*', - 'published': True} - } - }) - @patch('os.path.exists', return_value=True) + published=True, + release_categories=conf.container_release_categories, + leaf_container_images=None, + skip_nvrs=None, + ) + + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={ + "RebuildImagesOnRPMAdvisoryChange": { + "image": {"advisory_name": "RHBA-*", "published": True} + } + }, + ) + @patch("os.path.exists", return_value=True) def test_manual_event_leaf_container_images(self, exists): self.handler.event = self.manual_event for x in self.handler._find_images_to_rebuild(123456): pass self.find_images_to_rebuild.assert_called_once_with( - ['httpd-2.4-11.el7'], ['content-set-1', 'pulp_repo_x86_64'], + ["httpd-2.4-11.el7"], + ["content-set-1", "pulp_repo_x86_64"], filter_fnc=self.handler._filter_out_not_allowed_builds, - published=True, release_categories=conf.container_release_categories, - leaf_container_images=["foo", "bar"], skip_nvrs=None) - - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'RebuildImagesOnRPMAdvisoryChange': { - 'image': {'advisory_name': 'RHBA-*'} - } - }) - @patch("freshmaker.errata.ErrataAdvisory.affected_rpm_nvrs", - new_callable=PropertyMock, - return_value=["nodejs-10.19.0-1.module+el8.1.0+5726+6ed65f8c.x86_64"]) - @patch('os.path.exists', return_value=True) + published=True, + release_categories=conf.container_release_categories, + leaf_container_images=["foo", "bar"], + skip_nvrs=None, + ) + + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={"RebuildImagesOnRPMAdvisoryChange": {"image": {"advisory_name": "RHBA-*"}}}, + ) + @patch( + "freshmaker.errata.ErrataAdvisory.affected_rpm_nvrs", + new_callable=PropertyMock, + return_value=["nodejs-10.19.0-1.module+el8.1.0+5726+6ed65f8c.x86_64"], + ) + @patch("os.path.exists", return_value=True) def test_affected_packages_with_modules(self, exists, affected_rpm_nvrs): self.handler._find_images_to_rebuild(123456) self.find_images_to_rebuild.assert_called_once_with( - ['nodejs-10.19.0-1.module+el8.1.0+5726+6ed65f8c.x86_64'], - ['content-set-1', 'pulp_repo_x86_64'], + ["nodejs-10.19.0-1.module+el8.1.0+5726+6ed65f8c.x86_64"], + ["content-set-1", "pulp_repo_x86_64"], filter_fnc=self.handler._filter_out_not_allowed_builds, - published=True, release_categories=conf.container_release_categories, - leaf_container_images=None, skip_nvrs=None) + published=True, + release_categories=conf.container_release_categories, + leaf_container_images=None, + skip_nvrs=None, + ) class TestAllowBuild(helpers.ModelsTestCase): """Test RebuildImagesOnRPMAdvisoryChange.allow_build""" - @patch("freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange." - "_find_images_to_rebuild", return_value=[]) - @patch("freshmaker.config.Config.handler_build_allowlist", - new_callable=PropertyMock, return_value={ - "RebuildImagesOnRPMAdvisoryChange": {"image": {"advisory_name": "RHSA-.*"}}}) + @patch( + "freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange." "_find_images_to_rebuild", + return_value=[], + ) + @patch( + "freshmaker.config.Config.handler_build_allowlist", + new_callable=PropertyMock, + return_value={"RebuildImagesOnRPMAdvisoryChange": {"image": {"advisory_name": "RHSA-.*"}}}, + ) def test_allow_build_false(self, handler_build_allowlist, record_images): """ Tests that allow_build filters out advisories based on advisory_name. """ event = ErrataRPMAdvisoryShippedEvent( "123", - ErrataAdvisory(123, "RHBA-2017", "REL_PREP", [], - security_impact="", - product_short_name="product")) + ErrataAdvisory( + 123, "RHBA-2017", "REL_PREP", [], security_impact="", product_short_name="product" + ), + ) handler = RebuildImagesOnRPMAdvisoryChange() handler.handle(event) record_images.assert_not_called() - @patch("freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange." - "_find_images_to_rebuild", return_value=[]) - @patch("freshmaker.config.Config.handler_build_allowlist", - new_callable=PropertyMock, return_value={ - "RebuildImagesOnRPMAdvisoryChange": {"image": {"advisory_name": "RHSA-.*"}}}) + @patch( + "freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange." "_find_images_to_rebuild", + return_value=[], + ) + @patch( + "freshmaker.config.Config.handler_build_allowlist", + new_callable=PropertyMock, + return_value={"RebuildImagesOnRPMAdvisoryChange": {"image": {"advisory_name": "RHSA-.*"}}}, + ) def test_allow_build_true(self, handler_build_allowlist, record_images): """ Tests that allow_build does not filter out advisories based on @@ -609,71 +710,82 @@ def test_allow_build_true(self, handler_build_allowlist, record_images): """ event = ErrataRPMAdvisoryShippedEvent( "123", - ErrataAdvisory(123, "RHSA-2017", "REL_PREP", [], - security_impact="", - product_short_name="product")) + ErrataAdvisory( + 123, "RHSA-2017", "REL_PREP", [], security_impact="", product_short_name="product" + ), + ) handler = RebuildImagesOnRPMAdvisoryChange() handler.handle(event) record_images.assert_called_once() self.assertEqual(handler.current_db_event_id, 1) - @patch("freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange." - "_find_images_to_rebuild", return_value=[]) + @patch( + "freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange." "_find_images_to_rebuild", + return_value=[], + ) @patch( "freshmaker.config.Config.handler_build_allowlist", new_callable=PropertyMock, return_value={ "RebuildImagesOnRPMAdvisoryChange": { "image": { - "advisory_security_impact": [ - "Normal", "Important" - ], + "advisory_security_impact": ["Normal", "Important"], "image_name": "foo", } } - }) - def test_allow_security_impact_important_true( - self, handler_build_allowlist, record_images): + }, + ) + def test_allow_security_impact_important_true(self, handler_build_allowlist, record_images): """ Tests that allow_build does not filter out advisories based on advisory_security_impact. """ event = ErrataRPMAdvisoryShippedEvent( "123", - ErrataAdvisory(123, "RHSA-2017", "REL_PREP", [], - security_impact="Important", - product_short_name="product")) + ErrataAdvisory( + 123, + "RHSA-2017", + "REL_PREP", + [], + security_impact="Important", + product_short_name="product", + ), + ) handler = RebuildImagesOnRPMAdvisoryChange() handler.handle(event) record_images.assert_called_once() - @patch("freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange." - "_find_images_to_rebuild", return_value=[]) + @patch( + "freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange." "_find_images_to_rebuild", + return_value=[], + ) @patch( "freshmaker.config.Config.handler_build_allowlist", new_callable=PropertyMock, return_value={ "RebuildImagesOnRPMAdvisoryChange": { - "image": { - "advisory_security_impact": [ - "Normal", "Important" - ] - } + "image": {"advisory_security_impact": ["Normal", "Important"]} } - }) - def test_allow_security_impact_important_false( - self, handler_build_allowlist, record_images): + }, + ) + def test_allow_security_impact_important_false(self, handler_build_allowlist, record_images): """ Tests that allow_build dost filter out advisories based on advisory_security_impact. """ event = ErrataRPMAdvisoryShippedEvent( "123", - ErrataAdvisory(123, "RHSA-2017", "REL_PREP", [], - security_impact="None", - product_short_name="product")) + ErrataAdvisory( + 123, + "RHSA-2017", + "REL_PREP", + [], + security_impact="None", + product_short_name="product", + ), + ) handler = RebuildImagesOnRPMAdvisoryChange() handler.handle(event) @@ -683,14 +795,10 @@ def test_allow_security_impact_important_false( "freshmaker.config.Config.handler_build_allowlist", new_callable=PropertyMock, return_value={ - "RebuildImagesOnRPMAdvisoryChange": { - "image": { - "image_name": ["foo", "bar"] - } - } - }) - def test_filter_out_not_allowed_builds( - self, handler_build_allowlist): + "RebuildImagesOnRPMAdvisoryChange": {"image": {"image_name": ["foo", "bar"]}} + }, + ) + def test_filter_out_not_allowed_builds(self, handler_build_allowlist): """ Tests that allow_build does filter images based on image_name. """ @@ -698,9 +806,15 @@ def test_filter_out_not_allowed_builds( handler = RebuildImagesOnRPMAdvisoryChange() handler.event = ErrataRPMAdvisoryShippedEvent( "123", - ErrataAdvisory(123, "RHSA-2017", "REL_PREP", [], - security_impact="None", - product_short_name="product")) + ErrataAdvisory( + 123, + "RHSA-2017", + "REL_PREP", + [], + security_impact="None", + product_short_name="product", + ), + ) image = ContainerImage({"brew": {"build": "foo-1-2.3"}}) ret = handler._filter_out_not_allowed_builds(image) @@ -728,9 +842,9 @@ def test_filter_out_not_allowed_builds( "advisory_name": "RHSA-.*", } } - }) - def test_filter_out_image_name_and_advisory_name( - self, handler_build_allowlist): + }, + ) + def test_filter_out_image_name_and_advisory_name(self, handler_build_allowlist): """ Tests that allow_build does filter images based on image_name. """ @@ -738,9 +852,15 @@ def test_filter_out_image_name_and_advisory_name( handler = RebuildImagesOnRPMAdvisoryChange() handler.event = ErrataRPMAdvisoryShippedEvent( "123", - ErrataAdvisory(123, "RHSA-2017", "REL_PREP", [], - security_impact="None", - product_short_name="product")) + ErrataAdvisory( + 123, + "RHSA-2017", + "REL_PREP", + [], + security_impact="None", + product_short_name="product", + ), + ) image = ContainerImage({"brew": {"build": "foo-1-2.3"}}) ret = handler._filter_out_not_allowed_builds(image) @@ -754,12 +874,9 @@ def test_filter_out_image_name_and_advisory_name( "freshmaker.config.Config.handler_build_allowlist", new_callable=PropertyMock, return_value={ - "RebuildImagesOnRPMAdvisoryChange": { - "image": { - "image_name": ["foo", "bar"] - } - } - }) + "RebuildImagesOnRPMAdvisoryChange": {"image": {"image_name": ["foo", "bar"]}} + }, + ) @patch( "freshmaker.config.Config.handler_build_blocklist", new_callable=PropertyMock, @@ -772,15 +889,23 @@ def test_filter_out_image_name_and_advisory_name( } ) } - }) + }, + ) def test_filter_out_not_allowed_builds_image_version( - self, handler_build_blocklist, handler_build_allowlist): + self, handler_build_blocklist, handler_build_allowlist + ): handler = RebuildImagesOnRPMAdvisoryChange() handler.event = ErrataRPMAdvisoryShippedEvent( "123", - ErrataAdvisory(123, "RHSA-2017", "REL_PREP", [], - security_impact="None", - product_short_name="product")) + ErrataAdvisory( + 123, + "RHSA-2017", + "REL_PREP", + [], + security_impact="None", + product_short_name="product", + ), + ) image = ContainerImage({"brew": {"build": "foo-1-2.3"}}) ret = handler._filter_out_not_allowed_builds(image) @@ -804,31 +929,29 @@ class TestBatches(helpers.ModelsTestCase): def setUp(self): super(TestBatches, self).setUp() - self.patcher = helpers.Patcher( - 'freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange.') + self.patcher = helpers.Patcher("freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange.") def tearDown(self): super(TestBatches, self).tearDown() self.patcher.unpatch_all() - def _mock_build( - self, build, parent=None, error=None, **kwargs): + def _mock_build(self, build, parent=None, error=None, **kwargs): if parent: parent = ContainerImage({"brew": {"build": parent + "-1-1.25"}}) d = { - 'brew': {'build': build + "-1-1.25"}, - 'repository': build + '_repo', - 'parsed_data': { - 'layers': [ - 'sha512:1234', - 'sha512:4567', - 'sha512:7890', + "brew": {"build": build + "-1-1.25"}, + "repository": build + "_repo", + "parsed_data": { + "layers": [ + "sha512:1234", + "sha512:4567", + "sha512:7890", ], }, - 'commit': build + '_123', - 'parent': parent, + "commit": build + "_123", + "parent": parent, "target": "t1", - 'git_branch': 'mybranch', + "git_branch": "mybranch", "error": error, "content_sets": ["first-content-set"], "generate_pulp_repos": True, @@ -840,7 +963,7 @@ def _mock_build( d.update(kwargs) return ContainerImage(d) - @patch('freshmaker.odcsclient.create_odcs_client') + @patch("freshmaker.odcsclient.create_odcs_client") def test_batches_records(self, create_odcs_client): """ Tests that batches are properly recorded in DB. @@ -848,11 +971,14 @@ def test_batches_records(self, create_odcs_client): odcs = create_odcs_client.return_value # There are 8 mock builds below and each of them requires one pulp # compose. - composes = [{ - 'id': compose_id, - 'result_repofile': 'http://localhost/{}.repo'.format(compose_id), - 'state_name': 'done' - } for compose_id in range(1, 9)] + composes = [ + { + "id": compose_id, + "result_repofile": "http://localhost/{}.repo".format(compose_id), + "state_name": "done", + } + for compose_id in range(1, 9) + ] odcs.new_compose.side_effect = composes odcs.get_compose.return_value = {} @@ -865,14 +991,22 @@ def test_batches_records(self, create_odcs_client): # |- child2_parent2 # |- child2_parent1 # |- child2 - batches = [[self._mock_build("shared_parent")], - [self._mock_build("child1_parent3", "shared_parent"), - self._mock_build("child2_parent2", "shared_parent")], - [self._mock_build("child1_parent2", "child1_parent3"), - self._mock_build("child2_parent1", "child2_parent2")], - [self._mock_build("child1_parent1", "child1_parent2", error="Fail"), - self._mock_build("child2", "child2_parent1", directly_affected=True)], - [self._mock_build("child1", "child1_parent1", directly_affected=True)]] + batches = [ + [self._mock_build("shared_parent")], + [ + self._mock_build("child1_parent3", "shared_parent"), + self._mock_build("child2_parent2", "shared_parent"), + ], + [ + self._mock_build("child1_parent2", "child1_parent3"), + self._mock_build("child2_parent1", "child2_parent2"), + ], + [ + self._mock_build("child1_parent1", "child1_parent2", error="Fail"), + self._mock_build("child2", "child2_parent1", directly_affected=True), + ], + [self._mock_build("child1", "child1_parent1", directly_affected=True)], + ] # Flat list of images from batches with brew build id as a key. images = {} @@ -898,8 +1032,8 @@ def test_batches_records(self, create_odcs_client): self.assertEqual(build.type, ArtifactType.IMAGE.value) image = images[build.original_nvr] - if image['parent']: - self.assertEqual(build.dep_on.original_nvr, image['parent']['brew']['build']) + if image["parent"]: + self.assertEqual(build.dep_on.original_nvr, image["parent"]["brew"]["build"]) else: self.assertEqual(build.dep_on, None) @@ -911,10 +1045,10 @@ def test_batches_records(self, create_odcs_client): args = json.loads(build.build_args) self.assertEqual(args["repository"], build.name + "_repo") self.assertEqual(args["commit"], build.name + "_123") - self.assertEqual(args["original_parent"], - build.dep_on.original_nvr if build.dep_on else None) - self.assertEqual(args["renewed_odcs_compose_ids"], - [10, 11]) + self.assertEqual( + args["original_parent"], build.dep_on.original_nvr if build.dep_on else None + ) + self.assertEqual(args["renewed_odcs_compose_ids"], [10, 11]) class TestCheckImagesToRebuild(helpers.ModelsTestCase): @@ -923,37 +1057,45 @@ class TestCheckImagesToRebuild(helpers.ModelsTestCase): def setUp(self): super(TestCheckImagesToRebuild, self).setUp() - build_args = json.dumps({ - "original_parent": "nvr", - "repository": "repo", - "target": "target", - "commit": "hash", - "branch": "mybranch", - "yum_repourl": "http://localhost/composes/latest-odcs-3-1/compose/" - "Temporary/odcs-3.repo", - "odcs_pulp_compose_id": 15, - }) - - self.ev = Event.create(db.session, 'msg-id', '123', - EVENT_TYPES[ErrataRPMAdvisoryShippedEvent]) + build_args = json.dumps( + { + "original_parent": "nvr", + "repository": "repo", + "target": "target", + "commit": "hash", + "branch": "mybranch", + "yum_repourl": "http://localhost/composes/latest-odcs-3-1/compose/" + "Temporary/odcs-3.repo", + "odcs_pulp_compose_id": 15, + } + ) + + self.ev = Event.create( + db.session, "msg-id", "123", EVENT_TYPES[ErrataRPMAdvisoryShippedEvent] + ) self.b1 = ArtifactBuild.create( - db.session, self.ev, "parent", "image", + db.session, + self.ev, + "parent", + "image", state=ArtifactBuildState.PLANNED, - original_nvr="parent-1-25") + original_nvr="parent-1-25", + ) self.b1.build_args = build_args self.b2 = ArtifactBuild.create( - db.session, self.ev, "child", "image", + db.session, + self.ev, + "child", + "image", state=ArtifactBuildState.PLANNED, dep_on=self.b1, - original_nvr="child-1-25") + original_nvr="child-1-25", + ) self.b2.build_args = build_args db.session.commit() def test_check_images_to_rebuild(self): - builds = { - "parent-1-25": self.b1, - "child-1-25": self.b2 - } + builds = {"parent-1-25": self.b1, "child-1-25": self.b2} handler = RebuildImagesOnRPMAdvisoryChange() handler.set_context(self.ev) @@ -967,9 +1109,7 @@ def test_check_images_to_rebuild(self): def test_check_images_to_rebuild_missing_dep(self): # Do not include child nvr here to test that _check_images_to_rebuild # sets the state of event to failed. - builds = { - "parent-1-25": self.b1 - } + builds = {"parent-1-25": self.b1} handler = RebuildImagesOnRPMAdvisoryChange() handler.set_context(self.ev) @@ -1003,18 +1143,18 @@ class TestRecordBatchesImages(helpers.ModelsTestCase): def setUp(self): super(TestRecordBatchesImages, self).setUp() - self.mock_event = Mock(spec=BaseEvent, msg_id='msg-id', search_key=12345, - manual=False, dry_run=False) + self.mock_event = Mock( + spec=BaseEvent, msg_id="msg-id", search_key=12345, manual=False, dry_run=False + ) - self.patcher = helpers.Patcher( - 'freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange.') + self.patcher = helpers.Patcher("freshmaker.handlers.koji.RebuildImagesOnRPMAdvisoryChange.") self.mock_prepare_pulp_repo = self.patcher.patch( - 'freshmaker.odcsclient.FreshmakerODCSClient.prepare_pulp_repo', - side_effect=[{'id': 1}, {'id': 2}]) + "freshmaker.odcsclient.FreshmakerODCSClient.prepare_pulp_repo", + side_effect=[{"id": 1}, {"id": 2}], + ) - self.patcher.patch_dict( - 'freshmaker.models.EVENT_TYPES', {self.mock_event.__class__: 0}) + self.patcher.patch_dict("freshmaker.models.EVENT_TYPES", {self.mock_event.__class__: 0}) def tearDown(self): super(TestRecordBatchesImages, self).tearDown() @@ -1022,76 +1162,78 @@ def tearDown(self): def test_record_batches(self): batches = [ - [ContainerImage({ - "brew": { - "completion_date": "20170420T17:05:37.000-0400", - "build": "rhel-server-docker-7.3-82", - "package": "rhel-server-docker" - }, - 'parsed_data': { - 'layers': [ - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": None, - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "123456789", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": None, - "generate_pulp_repos": True, - "arches": "x86_64", - "odcs_compose_ids": [], - "compose_sources": [], - "published": False, - })], - [ContainerImage({ - "brew": { - "build": "rh-dotnetcore10-docker-1.0-16", - "package": "rh-dotnetcore10-docker", - "completion_date": "20170511T10:06:09.000-0400" - }, - 'parsed_data': { - 'layers': [ - 'sha512:2345af2e293', - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": ContainerImage({ - "brew": { - "completion_date": "20170420T17:05:37.000-0400", - "build": "rhel-server-docker-7.3-82", - "package": "rhel-server-docker" - }, - 'parsed_data': { - 'layers': [ - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": None, - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "123456789", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": None - }), - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "987654321", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": None, - "generate_pulp_repos": True, - "arches": "x86_64", - "odcs_compose_ids": [], - "compose_sources": [], - "published": False, - })] + [ + ContainerImage( + { + "brew": { + "completion_date": "20170420T17:05:37.000-0400", + "build": "rhel-server-docker-7.3-82", + "package": "rhel-server-docker", + }, + "parsed_data": {"layers": ["sha512:12345678980", "sha512:10987654321"]}, + "parent": None, + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "123456789", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": None, + "generate_pulp_repos": True, + "arches": "x86_64", + "odcs_compose_ids": [], + "compose_sources": [], + "published": False, + } + ) + ], + [ + ContainerImage( + { + "brew": { + "build": "rh-dotnetcore10-docker-1.0-16", + "package": "rh-dotnetcore10-docker", + "completion_date": "20170511T10:06:09.000-0400", + }, + "parsed_data": { + "layers": [ + "sha512:2345af2e293", + "sha512:12345678980", + "sha512:10987654321", + ] + }, + "parent": ContainerImage( + { + "brew": { + "completion_date": "20170420T17:05:37.000-0400", + "build": "rhel-server-docker-7.3-82", + "package": "rhel-server-docker", + }, + "parsed_data": { + "layers": ["sha512:12345678980", "sha512:10987654321"] + }, + "parent": None, + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "123456789", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": None, + } + ), + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "987654321", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": None, + "generate_pulp_repos": True, + "arches": "x86_64", + "odcs_compose_ids": [], + "compose_sources": [], + "published": False, + } + ) + ], ] handler = RebuildImagesOnRPMAdvisoryChange() @@ -1100,14 +1242,14 @@ def test_record_batches(self): # Check parent image query = db.session.query(ArtifactBuild) parent_image = query.filter( - ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82' + ArtifactBuild.original_nvr == "rhel-server-docker-7.3-82" ).first() self.assertNotEqual(None, parent_image) self.assertEqual(ArtifactBuildState.PLANNED.value, parent_image.state) # Check child image child_image = query.filter( - ArtifactBuild.original_nvr == 'rh-dotnetcore10-docker-1.0-16' + ArtifactBuild.original_nvr == "rh-dotnetcore10-docker-1.0-16" ).first() self.assertNotEqual(None, child_image) self.assertEqual(parent_image, child_image.dep_on) @@ -1115,31 +1257,30 @@ def test_record_batches(self): def test_record_batches_should_not_generate_pulp_repos(self): batches = [ - [ContainerImage({ - "brew": { - "completion_date": "20170420T17:05:37.000-0400", - "build": "rhel-server-docker-7.3-82", - "package": "rhel-server-docker" - }, - 'parsed_data': { - 'layers': [ - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": None, - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "123456789", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": None, - "generate_pulp_repos": False, - "arches": "x86_64", - "odcs_compose_ids": [], - "compose_sources": ["content-set-1"], - "published": True, - })] + [ + ContainerImage( + { + "brew": { + "completion_date": "20170420T17:05:37.000-0400", + "build": "rhel-server-docker-7.3-82", + "package": "rhel-server-docker", + }, + "parsed_data": {"layers": ["sha512:12345678980", "sha512:10987654321"]}, + "parent": None, + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "123456789", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": None, + "generate_pulp_repos": False, + "arches": "x86_64", + "odcs_compose_ids": [], + "compose_sources": ["content-set-1"], + "published": True, + } + ) + ] ] handler = RebuildImagesOnRPMAdvisoryChange() @@ -1148,7 +1289,7 @@ def test_record_batches_should_not_generate_pulp_repos(self): # Check parent image query = db.session.query(ArtifactBuild) parent_image = query.filter( - ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82' + ArtifactBuild.original_nvr == "rhel-server-docker-7.3-82" ).first() self.assertNotEqual(None, parent_image) self.assertEqual(ArtifactBuildState.PLANNED.value, parent_image.state) @@ -1156,31 +1297,30 @@ def test_record_batches_should_not_generate_pulp_repos(self): def test_record_batches_generate_pulp_repos_when_image_unpublished(self): batches = [ - [ContainerImage({ - "brew": { - "completion_date": "20170420T17:05:37.000-0400", - "build": "rhel-server-docker-7.3-82", - "package": "rhel-server-docker" - }, - 'parsed_data': { - 'layers': [ - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": None, - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "123456789", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": None, - "generate_pulp_repos": False, - "arches": "x86_64", - "odcs_compose_ids": [], - "compose_sources": [], - "published": False, - })] + [ + ContainerImage( + { + "brew": { + "completion_date": "20170420T17:05:37.000-0400", + "build": "rhel-server-docker-7.3-82", + "package": "rhel-server-docker", + }, + "parsed_data": {"layers": ["sha512:12345678980", "sha512:10987654321"]}, + "parent": None, + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "123456789", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": None, + "generate_pulp_repos": False, + "arches": "x86_64", + "odcs_compose_ids": [], + "compose_sources": [], + "published": False, + } + ) + ] ] handler = RebuildImagesOnRPMAdvisoryChange() @@ -1189,7 +1329,7 @@ def test_record_batches_generate_pulp_repos_when_image_unpublished(self): # Check parent image query = db.session.query(ArtifactBuild) parent_image = query.filter( - ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82' + ArtifactBuild.original_nvr == "rhel-server-docker-7.3-82" ).first() self.assertNotEqual(None, parent_image) self.assertEqual(ArtifactBuildState.PLANNED.value, parent_image.state) @@ -1197,31 +1337,30 @@ def test_record_batches_generate_pulp_repos_when_image_unpublished(self): def test_record_batches_not_generate_hidden_pulp_repos(self): batches = [ - [ContainerImage({ - "brew": { - "completion_date": "20170420T17:05:37.000-0400", - "build": "rhel-server-docker-7.3-82", - "package": "rhel-server-docker" - }, - 'parsed_data': { - 'layers': [ - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": None, - "content_sets": ["content-hidden-rpms"], - "repository": "repo-1", - "commit": "123456789", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": None, - "generate_pulp_repos": False, - "arches": "x86_64", - "odcs_compose_ids": [], - "compose_sources": [], - "published": False, - })] + [ + ContainerImage( + { + "brew": { + "completion_date": "20170420T17:05:37.000-0400", + "build": "rhel-server-docker-7.3-82", + "package": "rhel-server-docker", + }, + "parsed_data": {"layers": ["sha512:12345678980", "sha512:10987654321"]}, + "parent": None, + "content_sets": ["content-hidden-rpms"], + "repository": "repo-1", + "commit": "123456789", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": None, + "generate_pulp_repos": False, + "arches": "x86_64", + "odcs_compose_ids": [], + "compose_sources": [], + "published": False, + } + ) + ] ] handler = RebuildImagesOnRPMAdvisoryChange() @@ -1230,7 +1369,7 @@ def test_record_batches_not_generate_hidden_pulp_repos(self): # Check parent image query = db.session.query(ArtifactBuild) parent_image = query.filter( - ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82' + ArtifactBuild.original_nvr == "rhel-server-docker-7.3-82" ).first() self.assertNotEqual(None, parent_image) self.assertEqual(ArtifactBuildState.PLANNED.value, parent_image.state) @@ -1238,75 +1377,77 @@ def test_record_batches_not_generate_hidden_pulp_repos(self): def test_pulp_compose_generated_just_once(self): batches = [ - [ContainerImage({ - "brew": { - "completion_date": "20170420T17:05:37.000-0400", - "build": "rhel-server-docker-7.3-82", - "package": "rhel-server-docker" - }, - 'parsed_data': { - 'layers': [ - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": None, - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "123456789", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": None, - "arches": "x86_64", - "odcs_compose_ids": [], - "compose_sources": [], - "published": False, - })], - [ContainerImage({ - "brew": { - "build": "rh-dotnetcore10-docker-1.0-16", - "package": "rh-dotnetcore10-docker", - "completion_date": "20170511T10:06:09.000-0400" - }, - 'parsed_data': { - 'layers': [ - 'sha512:2345af2e293', - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": ContainerImage({ - "brew": { - "completion_date": "20170420T17:05:37.000-0400", - "build": "rhel-server-docker-7.3-82", - "package": "rhel-server-docker" - }, - 'parsed_data': { - 'layers': [ - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": None, - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "123456789", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": None - }), - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "987654321", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": None, - "arches": "x86_64", - "generate_pulp_repos": True, - "odcs_compose_ids": [], - "compose_sources": [], - "published": False, - })] + [ + ContainerImage( + { + "brew": { + "completion_date": "20170420T17:05:37.000-0400", + "build": "rhel-server-docker-7.3-82", + "package": "rhel-server-docker", + }, + "parsed_data": {"layers": ["sha512:12345678980", "sha512:10987654321"]}, + "parent": None, + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "123456789", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": None, + "arches": "x86_64", + "odcs_compose_ids": [], + "compose_sources": [], + "published": False, + } + ) + ], + [ + ContainerImage( + { + "brew": { + "build": "rh-dotnetcore10-docker-1.0-16", + "package": "rh-dotnetcore10-docker", + "completion_date": "20170511T10:06:09.000-0400", + }, + "parsed_data": { + "layers": [ + "sha512:2345af2e293", + "sha512:12345678980", + "sha512:10987654321", + ] + }, + "parent": ContainerImage( + { + "brew": { + "completion_date": "20170420T17:05:37.000-0400", + "build": "rhel-server-docker-7.3-82", + "package": "rhel-server-docker", + }, + "parsed_data": { + "layers": ["sha512:12345678980", "sha512:10987654321"] + }, + "parent": None, + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "123456789", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": None, + } + ), + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "987654321", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": None, + "arches": "x86_64", + "generate_pulp_repos": True, + "odcs_compose_ids": [], + "compose_sources": [], + "published": False, + } + ) + ], ] handler = RebuildImagesOnRPMAdvisoryChange() @@ -1314,255 +1455,240 @@ def test_pulp_compose_generated_just_once(self): query = db.session.query(ArtifactBuild) parent_build = query.filter( - ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82' + ArtifactBuild.original_nvr == "rhel-server-docker-7.3-82" ).first() self.assertEqual(1, len(parent_build.composes)) - compose_ids = sorted([rel.compose.odcs_compose_id - for rel in parent_build.composes]) + compose_ids = sorted([rel.compose.odcs_compose_id for rel in parent_build.composes]) self.assertEqual([1], compose_ids) child_build = query.filter( - ArtifactBuild.original_nvr == 'rh-dotnetcore10-docker-1.0-16' + ArtifactBuild.original_nvr == "rh-dotnetcore10-docker-1.0-16" ).first() self.assertEqual(1, len(child_build.composes)) - self.mock_prepare_pulp_repo.assert_has_calls([ - call(parent_build, ["content-set-1"]) - ]) + self.mock_prepare_pulp_repo.assert_has_calls([call(parent_build, ["content-set-1"])]) - @patch('freshmaker.odcsclient.create_odcs_client') + @patch("freshmaker.odcsclient.create_odcs_client") def test_do_not_generate_duplicate_pulp_compose(self, create_odcs_client): batches = [ - [ContainerImage({ - "brew": { - "completion_date": "20170420T17:05:37.000-0400", - "build": "rhel-server-docker-7.3-82", - "package": "rhel-server-docker" - }, - 'parsed_data': { - 'layers': [ - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": None, - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "123456789", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": None, - "arches": "x86_64", - "odcs_compose_ids": [123], - "compose_sources": ["content-set-1"], - "published": False, - })] + [ + ContainerImage( + { + "brew": { + "completion_date": "20170420T17:05:37.000-0400", + "build": "rhel-server-docker-7.3-82", + "package": "rhel-server-docker", + }, + "parsed_data": {"layers": ["sha512:12345678980", "sha512:10987654321"]}, + "parent": None, + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "123456789", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": None, + "arches": "x86_64", + "odcs_compose_ids": [123], + "compose_sources": ["content-set-1"], + "published": False, + } + ) + ] ] odcs = create_odcs_client.return_value - odcs.get_compose.return_value = { - "source_type": 4, - "source": "content-set-1" - } + odcs.get_compose.return_value = {"source_type": 4, "source": "content-set-1"} handler = RebuildImagesOnRPMAdvisoryChange() handler._record_batches(batches, self.mock_event) query = db.session.query(ArtifactBuild) - build = query.filter( - ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82' - ).first() + build = query.filter(ArtifactBuild.original_nvr == "rhel-server-docker-7.3-82").first() self.assertFalse(build.composes) self.mock_prepare_pulp_repo.assert_not_called() def test_no_parent(self): batches = [ - [ContainerImage({ - "brew": { - "completion_date": "20170420T17:05:37.000-0400", - "build": "rhel-server-docker-7.3-82", - "package": "rhel-server-docker" - }, - 'parsed_data': { - 'layers': [ - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "123456789", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": "Some error occurs while getting this image.", - "arches": "x86_64", - "odcs_compose_ids": [], - "published": False, - })] + [ + ContainerImage( + { + "brew": { + "completion_date": "20170420T17:05:37.000-0400", + "build": "rhel-server-docker-7.3-82", + "package": "rhel-server-docker", + }, + "parsed_data": {"layers": ["sha512:12345678980", "sha512:10987654321"]}, + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "123456789", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": "Some error occurs while getting this image.", + "arches": "x86_64", + "odcs_compose_ids": [], + "published": False, + } + ) + ] ] handler = RebuildImagesOnRPMAdvisoryChange() handler._record_batches(batches, self.mock_event) query = db.session.query(ArtifactBuild) - build = query.filter( - ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82' - ).first() + build = query.filter(ArtifactBuild.original_nvr == "rhel-server-docker-7.3-82").first() self.assertEqual(ArtifactBuildState.FAILED.value, build.state) def test_mark_failed_state_if_image_has_error(self): batches = [ - [ContainerImage({ - "brew": { - "completion_date": "20170420T17:05:37.000-0400", - "build": "rhel-server-docker-7.3-82", - "package": "rhel-server-docker" - }, - 'parsed_data': { - 'layers': [ - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": None, - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "123456789", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": "Some error occurs while getting this image.", - "arches": "x86_64", - "odcs_compose_ids": [], - "published": False, - })] + [ + ContainerImage( + { + "brew": { + "completion_date": "20170420T17:05:37.000-0400", + "build": "rhel-server-docker-7.3-82", + "package": "rhel-server-docker", + }, + "parsed_data": {"layers": ["sha512:12345678980", "sha512:10987654321"]}, + "parent": None, + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "123456789", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": "Some error occurs while getting this image.", + "arches": "x86_64", + "odcs_compose_ids": [], + "published": False, + } + ) + ] ] handler = RebuildImagesOnRPMAdvisoryChange() handler._record_batches(batches, self.mock_event) query = db.session.query(ArtifactBuild) - build = query.filter( - ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82' - ).first() + build = query.filter(ArtifactBuild.original_nvr == "rhel-server-docker-7.3-82").first() self.assertEqual(ArtifactBuildState.FAILED.value, build.state) def test_mark_state_failed_if_depended_image_is_failed(self): batches = [ - [ContainerImage({ - "brew": { - "completion_date": "20170420T17:05:37.000-0400", - "build": "rhel-server-docker-7.3-82", - "package": "rhel-server-docker" - }, - 'parsed_data': { - 'layers': [ - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": None, - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "123456789", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": "Some error occured.", - "arches": "x86_64", - "odcs_compose_ids": [], - "published": False, - })], - [ContainerImage({ - "brew": { - "build": "rh-dotnetcore10-docker-1.0-16", - "package": "rh-dotnetcore10-docker", - "completion_date": "20170511T10:06:09.000-0400" - }, - 'parsed_data': { - 'layers': [ - 'sha512:378a8ef2730', - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": ContainerImage({ - "brew": { - "completion_date": "20170420T17:05:37.000-0400", - "build": "rhel-server-docker-7.3-82", - "package": "rhel-server-docker" - }, - 'parsed_data': { - 'layers': [ - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": None, - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "123456789", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": None - }), - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "987654321", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": "Some error occured too.", - "arches": "x86_64", - "odcs_compose_ids": [], - "published": False, - })] + [ + ContainerImage( + { + "brew": { + "completion_date": "20170420T17:05:37.000-0400", + "build": "rhel-server-docker-7.3-82", + "package": "rhel-server-docker", + }, + "parsed_data": {"layers": ["sha512:12345678980", "sha512:10987654321"]}, + "parent": None, + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "123456789", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": "Some error occured.", + "arches": "x86_64", + "odcs_compose_ids": [], + "published": False, + } + ) + ], + [ + ContainerImage( + { + "brew": { + "build": "rh-dotnetcore10-docker-1.0-16", + "package": "rh-dotnetcore10-docker", + "completion_date": "20170511T10:06:09.000-0400", + }, + "parsed_data": { + "layers": [ + "sha512:378a8ef2730", + "sha512:12345678980", + "sha512:10987654321", + ] + }, + "parent": ContainerImage( + { + "brew": { + "completion_date": "20170420T17:05:37.000-0400", + "build": "rhel-server-docker-7.3-82", + "package": "rhel-server-docker", + }, + "parsed_data": { + "layers": ["sha512:12345678980", "sha512:10987654321"] + }, + "parent": None, + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "123456789", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": None, + } + ), + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "987654321", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": "Some error occured too.", + "arches": "x86_64", + "odcs_compose_ids": [], + "published": False, + } + ) + ], ] handler = RebuildImagesOnRPMAdvisoryChange() handler._record_batches(batches, self.mock_event) query = db.session.query(ArtifactBuild) - build = query.filter( - ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82' - ).first() + build = query.filter(ArtifactBuild.original_nvr == "rhel-server-docker-7.3-82").first() self.assertEqual(ArtifactBuildState.FAILED.value, build.state) - build = query.filter( - ArtifactBuild.original_nvr == 'rh-dotnetcore10-docker-1.0-16' - ).first() + build = query.filter(ArtifactBuild.original_nvr == "rh-dotnetcore10-docker-1.0-16").first() self.assertEqual(ArtifactBuildState.FAILED.value, build.state) def test_mark_base_image_failed_if_fail_to_request_boot_iso_compose(self): batches = [ - [ContainerImage({ - "brew": { - "completion_date": "20170420T17:05:37.000-0400", - "build": "rhel-server-docker-7.3-82", - "package": "rhel-server-docker" - }, - 'parsed_data': { - 'layers': [ - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": None, - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "123456789", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": "Some error occured.", - "arches": "x86_64", - "odcs_compose_ids": [], - "published": False, - })], + [ + ContainerImage( + { + "brew": { + "completion_date": "20170420T17:05:37.000-0400", + "build": "rhel-server-docker-7.3-82", + "package": "rhel-server-docker", + }, + "parsed_data": {"layers": ["sha512:12345678980", "sha512:10987654321"]}, + "parent": None, + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "123456789", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": "Some error occured.", + "arches": "x86_64", + "odcs_compose_ids": [], + "published": False, + } + ) + ], ] handler = RebuildImagesOnRPMAdvisoryChange() handler._record_batches(batches, self.mock_event) - build = db.session.query(ArtifactBuild).filter_by( - original_nvr='rhel-server-docker-7.3-82').first() + build = ( + db.session.query(ArtifactBuild) + .filter_by(original_nvr="rhel-server-docker-7.3-82") + .first() + ) self.assertEqual(ArtifactBuildState.FAILED.value, build.state) # Pulp repo should not be prepared for FAILED build. @@ -1570,89 +1696,104 @@ def test_mark_base_image_failed_if_fail_to_request_boot_iso_compose(self): def test_parent_image_already_built(self): batches = [ - [ContainerImage({ - "brew": { - "completion_date": "20170420T17:05:37.000-0400", - "build": "rhel-server-docker-7.3-82", - "package": "rhel-server-docker" - }, - 'parsed_data': { - 'layers': [ - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": None, - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "123456789", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": None, - "arches": "x86_64", - "odcs_compose_ids": [], - "compose_sources": [], - "published": False, - })], - [ContainerImage({ - "brew": { - "build": "rh-dotnetcore10-docker-1.0-16", - "package": "rh-dotnetcore10-docker", - "completion_date": "20170511T10:06:09.000-0400" - }, - 'parsed_data': { - 'layers': [ - 'sha512:2345af2e293', - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": ContainerImage({ - "brew": { - "completion_date": "20170420T17:05:37.000-0400", - "build": "rhel-server-docker-7.3-82", - "package": "rhel-server-docker" - }, - 'parsed_data': { - 'layers': [ - 'sha512:12345678980', - 'sha512:10987654321' - ] - }, - "parent": None, - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "123456789", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": None - }), - "content_sets": ["content-set-1"], - "repository": "repo-1", - "commit": "987654321", - "target": "target-candidate", - "git_branch": "rhel-7", - "error": None, - "arches": "x86_64", - "odcs_compose_ids": [], - "compose_sources": [], - "published": False, - })] + [ + ContainerImage( + { + "brew": { + "completion_date": "20170420T17:05:37.000-0400", + "build": "rhel-server-docker-7.3-82", + "package": "rhel-server-docker", + }, + "parsed_data": {"layers": ["sha512:12345678980", "sha512:10987654321"]}, + "parent": None, + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "123456789", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": None, + "arches": "x86_64", + "odcs_compose_ids": [], + "compose_sources": [], + "published": False, + } + ) + ], + [ + ContainerImage( + { + "brew": { + "build": "rh-dotnetcore10-docker-1.0-16", + "package": "rh-dotnetcore10-docker", + "completion_date": "20170511T10:06:09.000-0400", + }, + "parsed_data": { + "layers": [ + "sha512:2345af2e293", + "sha512:12345678980", + "sha512:10987654321", + ] + }, + "parent": ContainerImage( + { + "brew": { + "completion_date": "20170420T17:05:37.000-0400", + "build": "rhel-server-docker-7.3-82", + "package": "rhel-server-docker", + }, + "parsed_data": { + "layers": ["sha512:12345678980", "sha512:10987654321"] + }, + "parent": None, + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "123456789", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": None, + } + ), + "content_sets": ["content-set-1"], + "repository": "repo-1", + "commit": "987654321", + "target": "target-candidate", + "git_branch": "rhel-7", + "error": None, + "arches": "x86_64", + "odcs_compose_ids": [], + "compose_sources": [], + "published": False, + } + ) + ], ] et_event = ErrataRPMAdvisoryShippedEvent( "msg-id-2", - ErrataAdvisory(123, "RHSA-2017", "REL_PREP", [], - security_impact="None", - product_short_name="product")) - event0 = Event.create(db.session, 'msg-id-1', '1230', - EVENT_TYPES[ErrataRPMAdvisoryShippedEvent]) - event1 = Event.create(db.session, 'msg-id-2', '1231', - EVENT_TYPES[ErrataRPMAdvisoryShippedEvent]) + ErrataAdvisory( + 123, + "RHSA-2017", + "REL_PREP", + [], + security_impact="None", + product_short_name="product", + ), + ) + event0 = Event.create( + db.session, "msg-id-1", "1230", EVENT_TYPES[ErrataRPMAdvisoryShippedEvent] + ) + event1 = Event.create( + db.session, "msg-id-2", "1231", EVENT_TYPES[ErrataRPMAdvisoryShippedEvent] + ) ArtifactBuild.create( - db.session, event0, "parent", "image", + db.session, + event0, + "parent", + "image", state=ArtifactBuildState.DONE, - original_nvr="rhel-server-docker-7.3-82", rebuilt_nvr="some-test-nvr") + original_nvr="rhel-server-docker-7.3-82", + rebuilt_nvr="some-test-nvr", + ) db.session.commit() event1.add_event_dependency(db.session, event0) db.session.commit() @@ -1662,15 +1803,13 @@ def test_parent_image_already_built(self): # Check parent image query = db.session.query(ArtifactBuild) - parent_image = query.filter( - ArtifactBuild.original_nvr == 'rhel-server-docker-7.3-82' - ).all() + parent_image = query.filter(ArtifactBuild.original_nvr == "rhel-server-docker-7.3-82").all() self.assertEqual(len(parent_image), 1) self.assertEqual(ArtifactBuildState.DONE.value, parent_image[0].state) # Check child image child_image = query.filter( - ArtifactBuild.original_nvr == 'rh-dotnetcore10-docker-1.0-16' + ArtifactBuild.original_nvr == "rh-dotnetcore10-docker-1.0-16" ).first() self.assertNotEqual(None, child_image) self.assertEqual(child_image.dep_on, None) diff --git a/tests/helpers.py b/tests/helpers.py index f1659585..ef92192c 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -80,23 +80,21 @@ def unpatch_all(self): class FreshmakerTestCase(unittest.TestCase): - def get_event_from_msg(self, message): - event = events.BaseEvent.from_fedmsg(message['body']['topic'], message['body']) + event = events.BaseEvent.from_fedmsg(message["body"]["topic"], message["body"]) return event def create_consumer(self): hub = MagicMock() hub.config = {} - hub.config['freshmakerconsumer'] = True - hub.config['validate_signatures'] = False + hub.config["freshmakerconsumer"] = True + hub.config["validate_signatures"] = False consumer = freshmaker.consumer.FreshmakerConsumer(hub) consumer.incoming = queue.Queue() return consumer class ModelsTestCase(FreshmakerTestCase): - def setUp(self): super(ModelsTestCase, self).setUp() db.session.remove() @@ -104,7 +102,7 @@ def setUp(self): db.create_all() db.session.commit() - self.user = User(username='tester1') + self.user = User(username="tester1") db.session.add(self.user) db.session.commit() @@ -175,13 +173,15 @@ def add_build_rpms(self, build_nvr, rpm_nvrs=None, arches=None): for nvr in rpm_nvrs: for arch in arches: parsed_nvr = koji.parse_NVR(nvr) - self.rpms[build_nvr].append({ - 'arch': arch, - 'name': parsed_nvr["name"], - 'release': parsed_nvr["release"], - 'version': parsed_nvr["version"], - 'nvr': nvr, - }) + self.rpms[build_nvr].append( + { + "arch": arch, + "name": parsed_nvr["name"], + "release": parsed_nvr["release"], + "version": parsed_nvr["version"], + "nvr": nvr, + } + ) def _get_build_rpms(self, build_nvr, arches=None): """ @@ -198,12 +198,12 @@ def _get_build_target(self, build_target): """ if build_target == "guest-rhel-7.4-docker": return { - 'build_tag': 10052, - 'build_tag_name': 'guest-rhel-7.4-docker-build', - 'dest_tag': 10051, - 'dest_tag_name': 'guest-rhel-7.4-candidate', - 'id': 3205, - 'name': 'guest-rhel-7.4-docker' + "build_tag": 10052, + "build_tag_name": "guest-rhel-7.4-docker-build", + "dest_tag": 10051, + "dest_tag_name": "guest-rhel-7.4-candidate", + "id": 3205, + "name": "guest-rhel-7.4-docker", } return None @@ -214,9 +214,7 @@ def _session_list_tags(self, nvr): ret = [] for tag_name, nvrs in self.tags.items(): if nvr in nvrs: - ret.append({ - "name": tag_name - }) + ret.append({"name": tag_name}) return ret def _session_list_tagged(self, tag, **kwargs): @@ -236,9 +234,11 @@ def _session_list_tagged(self, tag, **kwargs): continue packages.append(package) - ret.append({ - 'nvr': nvr, - }) + ret.append( + { + "nvr": nvr, + } + ) return ret @@ -246,8 +246,7 @@ def start(self): """ Starts the Koji mocking. """ - self._mocked_koji_service_patch = patch( - 'freshmaker.kojiservice.KojiService') + self._mocked_koji_service_patch = patch("freshmaker.kojiservice.KojiService") self._koji_service = self._mocked_koji_service_patch.start().return_value self._koji_service.get_build_target.side_effect = self._get_build_target @@ -279,10 +278,11 @@ def mock_koji(f): Wrapper which mocks the Koji. It adds MockedKoji instance as the last *arg of original ufnction. """ + @wraps(f) def wrapped(*args, **kwargs): with MockedKoji() as mocked_koji: - return f(*args + (mocked_koji, ), **kwargs) + return f(*args + (mocked_koji,), **kwargs) return wrapped @@ -291,12 +291,12 @@ class FedMsgFactory(object): def __init__(self, *args, **kwargs): self.msg_id = "%s-%s" % (time.strftime("%Y"), uuid.uuid4()) self.msg = {} - self.signature = '123' - self.source_name = 'unittest' - self.source_version = '0.1.1' + self.signature = "123" + self.source_name = "unittest" + self.source_version = "0.1.1" self.timestamp = time.time() - self.topic = '' - self.username = 'freshmaker' + self.topic = "" + self.username = "freshmaker" self.i = random.randint(0, 100) @property @@ -305,132 +305,128 @@ def inner_msg(self): def produce(self): message_body = { - 'i': self.i, - 'msg_id': self.msg_id, - 'topic': self.topic, - 'username': self.username, - 'timestamp': self.timestamp, - 'signature': self.signature, - 'source_name': self.source_name, - 'source_version': self.source_version, - 'msg': self.inner_msg, - } - return { - 'body': message_body, - 'topic': self.topic + "i": self.i, + "msg_id": self.msg_id, + "topic": self.topic, + "username": self.username, + "timestamp": self.timestamp, + "signature": self.signature, + "source_name": self.source_name, + "source_version": self.source_version, + "msg": self.inner_msg, } + return {"body": message_body, "topic": self.topic} class ModuleStateChangeMessage(FedMsgFactory): - def __init__(self, name, stream, state='ready', build_id=None, *args, **kwargs): + def __init__(self, name, stream, state="ready", build_id=None, *args, **kwargs): super(ModuleStateChangeMessage, self).__init__(*args, **kwargs) - self.topic = 'org.fedoraproject.prod.mbs.module.state.change' + self.topic = "org.fedoraproject.prod.mbs.module.state.change" self.name = name self.stream = stream self.state = state self.build_id = build_id if build_id else random.randint(0, 1000) - self.scmurl = "git://pkgs.fedoraproject.org/modules/%s?#%s" % (self.name, '123') + self.scmurl = "git://pkgs.fedoraproject.org/modules/%s?#%s" % (self.name, "123") self._states_dict = {} for state, code in BUILD_STATES.items(): - self._states_dict[state] = {'state_name': state, 'state': code} + self._states_dict[state] = {"state_name": state, "state": code} @property def inner_msg(self): return { - 'component_builds': [], - 'id': self.build_id, - 'modulemd': '', - 'name': self.name, - 'owner': 'freshmaker', - 'scmurl': self.scmurl, - 'state': self._states_dict[self.state]['state'], - 'state_name': self.state, - 'state_reason': None, - 'state_trace': [], - 'state_url': u'/module-build-service/1/module-builds/%s' % self.build_id, - 'stream': u'master', - 'tasks': {}, - 'time_completed': None, - 'time_modified': None, - 'time_submitted': time.time(), - 'version': time.strftime("%Y%m%d%H%M%S"), + "component_builds": [], + "id": self.build_id, + "modulemd": "", + "name": self.name, + "owner": "freshmaker", + "scmurl": self.scmurl, + "state": self._states_dict[self.state]["state"], + "state_name": self.state, + "state_reason": None, + "state_trace": [], + "state_url": "/module-build-service/1/module-builds/%s" % self.build_id, + "stream": "master", + "tasks": {}, + "time_completed": None, + "time_modified": None, + "time_submitted": time.time(), + "version": time.strftime("%Y%m%d%H%M%S"), } class DistGitMessage(FedMsgFactory): def __init__(self, namespace, repo, branch, rev, *args, **kwargs): super(DistGitMessage, self).__init__(*args, **kwargs) - self.topic = 'org.fedoraproject.prod.git.receive' + self.topic = "org.fedoraproject.prod.git.receive" self.namespace = namespace self.repo = repo self.branch = branch self.rev = rev self.stats = { - 'files': {}, - 'total': { - 'additions': 0, - 'deletions': 0, - 'files': 0, - 'lines': 0, - } + "files": {}, + "total": { + "additions": 0, + "deletions": 0, + "files": 0, + "lines": 0, + }, } @property def inner_msg(self): return { - 'commit': { - 'repo': self.repo, - 'namespace': self.namespace, - 'branch': self.branch, - 'rev': self.rev, - 'agent': 'freshmaker', - 'name': 'freshmaker', - 'username': 'freshmaker', - 'email': 'freshmaker@example.com', - 'message': 'test message', - 'summary': 'test', - 'path': "/srv/git/repositories/%s/%s.git" % (self.namespace, self.repo), - 'seen': False, - 'stats': self.stats, + "commit": { + "repo": self.repo, + "namespace": self.namespace, + "branch": self.branch, + "rev": self.rev, + "agent": "freshmaker", + "name": "freshmaker", + "username": "freshmaker", + "email": "freshmaker@example.com", + "message": "test message", + "summary": "test", + "path": "/srv/git/repositories/%s/%s.git" % (self.namespace, self.repo), + "seen": False, + "stats": self.stats, } } def add_changed_file(self, filename, additions, deletions): - self.stats['files'].setdefault(filename, {})['additions'] = additions - self.stats['files'][filename]['deletions'] = deletions - self.stats['files'][filename]['lines'] = additions + deletions - self.stats['total']['additions'] += additions - self.stats['total']['deletions'] += deletions - self.stats['total']['files'] += 1 - self.stats['total']['lines'] += self.stats['files'][filename]['lines'] + self.stats["files"].setdefault(filename, {})["additions"] = additions + self.stats["files"][filename]["deletions"] = deletions + self.stats["files"][filename]["lines"] = additions + deletions + self.stats["total"]["additions"] += additions + self.stats["total"]["deletions"] += deletions + self.stats["total"]["files"] += 1 + self.stats["total"]["lines"] += self.stats["files"][filename]["lines"] class KojiTaskStateChangeMessage(FedMsgFactory): def __init__(self, task_id, old_state, new_state, *args, **kwargs): super(KojiTaskStateChangeMessage, self).__init__(*args, **kwargs) - self.topic = 'org.fedoraproject.prod.buildsys.task.state.change' - self.attribute = 'state' + self.topic = "org.fedoraproject.prod.buildsys.task.state.change" + self.attribute = "state" self.task_id = task_id self.old_state = old_state self.new_state = new_state - self.owner = 'freshmaker' - self.method = 'build' + self.owner = "freshmaker" + self.method = "build" @property def inner_msg(self): return { - 'attribute': self.attribute, - 'id': self.task_id, - 'method': self.method, - 'new': self.new_state, - 'old': self.old_state, - 'owner': self.owner, + "attribute": self.attribute, + "id": self.task_id, + "method": self.method, + "new": self.new_state, + "old": self.old_state, + "owner": self.owner, } class ConsumerBaseTest(ModelsTestCase): - def setUp(self): super(ConsumerBaseTest, self).setUp() self.patched_parsers = patch( @@ -439,7 +435,7 @@ def setUp(self): return_value=[ "freshmaker.parsers.internal:FreshmakerManualRebuildParser", "freshmaker.parsers.odcs:ComposeStateChangeParser", - ] + ], ) self.patched_parsers.start() self.patched_handlers = patch( @@ -448,7 +444,7 @@ def setUp(self): return_value=[ "freshmaker.handlers.internal:UpdateDBOnODCSComposeFail", "freshmaker.handlers.koji:RebuildImagesOnODCSComposeDone", - ] + ], ) self.patched_handlers.start() @@ -458,16 +454,18 @@ def tearDown(self): self.patched_handlers.stop() def _compose_state_change_msg(self, state=None): - msg = {'body': { - "msg_id": "2017-7afcb214-cf82-4130-92d2-22f45cf59cf7", - "topic": "org.fedoraproject.prod.odcs.state.change", - "signature": "qRZ6oXBpKD/q8BTjBNa4MREkAPxT+KzI8Oret+TSKazGq/6gk0uuprdFpkfBXLR5dd4XDoh3NQWp\nyC74VYTDVqJR7IsEaqHtrv01x1qoguU/IRWnzrkGwqXm+Es4W0QZjHisBIRRZ4ywYBG+DtWuskvy\n6/5Mc3dXaUBcm5TnT0c=\n", - "msg": { - "compose": { - "id": 1, - "state": 4, - } + msg = { + "body": { + "msg_id": "2017-7afcb214-cf82-4130-92d2-22f45cf59cf7", + "topic": "org.fedoraproject.prod.odcs.state.change", + "signature": "qRZ6oXBpKD/q8BTjBNa4MREkAPxT+KzI8Oret+TSKazGq/6gk0uuprdFpkfBXLR5dd4XDoh3NQWp\nyC74VYTDVqJR7IsEaqHtrv01x1qoguU/IRWnzrkGwqXm+Es4W0QZjHisBIRRZ4ywYBG+DtWuskvy\n6/5Mc3dXaUBcm5TnT0c=\n", + "msg": { + "compose": { + "id": 1, + "state": 4, + } + }, } - }} + } return msg diff --git a/tests/test_auth.py b/tests/test_auth.py index c1abefbb..d92d03eb 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -41,25 +41,25 @@ class TestLoadSSLUserFromRequest(ModelsTestCase): - def setUp(self): super(TestLoadSSLUserFromRequest, self).setUp() - self.user = User(username='CN=tester1,L=prod,DC=example,DC=com') + self.user = User(username="CN=tester1,L=prod,DC=example,DC=com") db.session.add(self.user) db.session.commit() def test_create_new_user(self): environ_base = { - 'SSL_CLIENT_VERIFY': 'SUCCESS', - 'SSL_CLIENT_S_DN': 'CN=client,L=prod,DC=example,DC=com', + "SSL_CLIENT_VERIFY": "SUCCESS", + "SSL_CLIENT_S_DN": "CN=client,L=prod,DC=example,DC=com", } with app.test_request_context(environ_base=environ_base): load_ssl_user_from_request(flask.request) expected_user = db.session.query(User).filter( - User.username == 'CN=client,L=prod,DC=example,DC=com')[0] + User.username == "CN=client,L=prod,DC=example,DC=com" + )[0] self.assertEqual(expected_user.id, flask.g.user.id) self.assertEqual(expected_user.username, flask.g.user.username) @@ -69,8 +69,8 @@ def test_create_new_user(self): def test_return_existing_user(self): environ_base = { - 'SSL_CLIENT_VERIFY': 'SUCCESS', - 'SSL_CLIENT_S_DN': self.user.username, + "SSL_CLIENT_VERIFY": "SUCCESS", + "SSL_CLIENT_S_DN": self.user.username, } with app.test_request_context(environ_base=environ_base): @@ -84,40 +84,37 @@ def test_return_existing_user(self): def test_401_if_ssl_client_verify_not_success(self): environ_base = { - 'SSL_CLIENT_VERIFY': 'GENEROUS', - 'SSL_CLIENT_S_DN': self.user.username, + "SSL_CLIENT_VERIFY": "GENEROUS", + "SSL_CLIENT_S_DN": self.user.username, } with app.test_request_context(environ_base=environ_base): with self.assertRaises(Unauthorized) as ctx: load_ssl_user_from_request(flask.request) - self.assertIn('Cannot verify client: GENEROUS', - ctx.exception.description) + self.assertIn("Cannot verify client: GENEROUS", ctx.exception.description) def test_401_if_cn_not_set(self): environ_base = { - 'SSL_CLIENT_VERIFY': 'SUCCESS', + "SSL_CLIENT_VERIFY": "SUCCESS", } with app.test_request_context(environ_base=environ_base): with self.assertRaises(Unauthorized) as ctx: load_ssl_user_from_request(flask.request) - self.assertIn('Unable to get user information (DN) from client certificate', - ctx.exception.description) + self.assertIn( + "Unable to get user information (DN) from client certificate", + ctx.exception.description, + ) class TestLoadKrbOrSSLUserFromRequest(ModelsTestCase): - @patch("freshmaker.auth.load_ssl_user_from_request") @patch("freshmaker.auth.load_krb_user_from_request") - def test_load_krb_or_ssl_user_from_request_remote_user( - self, load_krb_user, load_ssl_user): + def test_load_krb_or_ssl_user_from_request_remote_user(self, load_krb_user, load_ssl_user): load_krb_user.return_value = "krb_user" load_ssl_user.return_value = "ssl_user" - environ_base = { - 'REMOTE_USER': 'newuser@EXAMPLE.COM' - } + environ_base = {"REMOTE_USER": "newuser@EXAMPLE.COM"} with app.test_request_context(environ_base=environ_base): user = load_krb_or_ssl_user_from_request(flask.request) @@ -125,14 +122,13 @@ def test_load_krb_or_ssl_user_from_request_remote_user( @patch("freshmaker.auth.load_ssl_user_from_request") @patch("freshmaker.auth.load_krb_user_from_request") - def test_load_krb_or_ssl_user_from_request_ssl_client( - self, load_krb_user, load_ssl_user): + def test_load_krb_or_ssl_user_from_request_ssl_client(self, load_krb_user, load_ssl_user): load_krb_user.return_value = "krb_user" load_ssl_user.return_value = "ssl_user" environ_base = { - 'SSL_CLIENT_VERIFY': 'SUCCESS', - 'SSL_CLIENT_S_DN': 'ssl_user', + "SSL_CLIENT_VERIFY": "SUCCESS", + "SSL_CLIENT_S_DN": "ssl_user", } with app.test_request_context(environ_base=environ_base): @@ -142,23 +138,20 @@ def test_load_krb_or_ssl_user_from_request_ssl_client( class TestLoadKrbUserFromRequest(ModelsTestCase): sample_groups = { - 'cn=admins,ou=groups,dc=example,dc=com', - 'cn=devel,ou=groups,dc=example,dc=com', + "cn=admins,ou=groups,dc=example,dc=com", + "cn=devel,ou=groups,dc=example,dc=com", } - @patch('freshmaker.auth.query_ldap_groups') + @patch("freshmaker.auth.query_ldap_groups") def test_create_new_user(self, query_ldap_groups): query_ldap_groups.return_value = self.sample_groups - environ_base = { - 'REMOTE_USER': 'newuser@EXAMPLE.COM' - } + environ_base = {"REMOTE_USER": "newuser@EXAMPLE.COM"} with app.test_request_context(environ_base=environ_base): load_krb_user_from_request(flask.request) - expected_user = db.session.query(User).filter( - User.username == 'newuser')[0] + expected_user = db.session.query(User).filter(User.username == "newuser")[0] self.assertEqual(expected_user.id, flask.g.user.id) self.assertEqual(expected_user.username, flask.g.user.username) @@ -167,20 +160,17 @@ def test_create_new_user(self, query_ldap_groups): self.assertEqual(2, len(flask.g.groups)) self.assertEqual(self.sample_groups, flask.g.groups) - @patch('freshmaker.auth.query_ldap_groups') + @patch("freshmaker.auth.query_ldap_groups") def test_return_existing_user(self, query_ldap_groups): query_ldap_groups.return_value = self.sample_groups original_users_count = db.session.query(User.id).count() - environ_base = { - 'REMOTE_USER': '{0}@EXAMPLE.COM'.format(self.user.username) - } + environ_base = {"REMOTE_USER": "{0}@EXAMPLE.COM".format(self.user.username)} with app.test_request_context(environ_base=environ_base): load_krb_user_from_request(flask.request) - self.assertEqual(original_users_count, - db.session.query(User.id).count()) + self.assertEqual(original_users_count, db.session.query(User.id).count()) self.assertEqual(self.user.id, flask.g.user.id) self.assertEqual(self.user.username, flask.g.user.username) self.assertEqual(self.sample_groups, flask.g.groups) @@ -189,51 +179,47 @@ def test_401_if_remote_user_not_present(self): with app.test_request_context(): with self.assertRaises(Unauthorized) as ctx: load_krb_user_from_request(flask.request) - self.assertIn('REMOTE_USER is not present in request.', - ctx.exception.description) + self.assertIn("REMOTE_USER is not present in request.", ctx.exception.description) class TestLoadOpenIDCUserFromRequest(ModelsTestCase): - - @patch('freshmaker.auth.requests.get') + @patch("freshmaker.auth.requests.get") def test_create_new_user(self, get): get.return_value.status_code = 200 get.return_value.json.return_value = { - 'groups': ['tester', 'admin'], - 'name': 'new_user', + "groups": ["tester", "admin"], + "name": "new_user", } environ_base = { - 'REMOTE_USER': 'new_user', - 'OIDC_access_token': '39283', - 'OIDC_CLAIM_iss': 'https://iddev.fedorainfracloud.org/openidc/', - 'OIDC_CLAIM_scope': 'openid https://id.fedoraproject.org/scope/groups', + "REMOTE_USER": "new_user", + "OIDC_access_token": "39283", + "OIDC_CLAIM_iss": "https://iddev.fedorainfracloud.org/openidc/", + "OIDC_CLAIM_scope": "openid https://id.fedoraproject.org/scope/groups", } with app.test_request_context(environ_base=environ_base): load_openidc_user(flask.request) - new_user = db.session.query(User).filter( - User.username == 'new_user')[0] + new_user = db.session.query(User).filter(User.username == "new_user")[0] self.assertEqual(new_user, flask.g.user) - self.assertEqual('new_user', flask.g.user.username) - self.assertEqual(sorted(['admin', 'tester']), - sorted(flask.g.groups)) + self.assertEqual("new_user", flask.g.user.username) + self.assertEqual(sorted(["admin", "tester"]), sorted(flask.g.groups)) - @patch('freshmaker.auth.requests.get') + @patch("freshmaker.auth.requests.get") def test_return_existing_user(self, get): get.return_value.status_code = 200 get.return_value.json.return_value = { - 'groups': ['testers', 'admins'], - 'name': self.user.username, + "groups": ["testers", "admins"], + "name": self.user.username, } environ_base = { - 'REMOTE_USER': self.user.username, - 'OIDC_access_token': '39283', - 'OIDC_CLAIM_iss': 'https://iddev.fedorainfracloud.org/openidc/', - 'OIDC_CLAIM_scope': 'openid https://id.fedoraproject.org/scope/groups', + "REMOTE_USER": self.user.username, + "OIDC_access_token": "39283", + "OIDC_CLAIM_iss": "https://iddev.fedorainfracloud.org/openidc/", + "OIDC_CLAIM_scope": "openid https://id.fedoraproject.org/scope/groups", } with app.test_request_context(environ_base=environ_base): @@ -246,33 +232,33 @@ def test_return_existing_user(self, get): # Ensure existing user is set in g self.assertEqual(self.user.id, flask.g.user.id) - self.assertEqual(['admins', 'testers'], sorted(flask.g.groups)) + self.assertEqual(["admins", "testers"], sorted(flask.g.groups)) def test_401_if_remote_user_not_present(self): environ_base = { # Missing REMOTE_USER here - 'OIDC_access_token': '39283', - 'OIDC_CLAIM_iss': 'https://iddev.fedorainfracloud.org/openidc/', - 'OIDC_CLAIM_scope': 'openid https://id.fedoraproject.org/scope/groups', + "OIDC_access_token": "39283", + "OIDC_CLAIM_iss": "https://iddev.fedorainfracloud.org/openidc/", + "OIDC_CLAIM_scope": "openid https://id.fedoraproject.org/scope/groups", } with app.test_request_context(environ_base=environ_base): self.assertRaises(Unauthorized, load_openidc_user, flask.request) def test_401_if_access_token_not_present(self): environ_base = { - 'REMOTE_USER': 'tester1', + "REMOTE_USER": "tester1", # Missing OIDC_access_token here - 'OIDC_CLAIM_iss': 'https://iddev.fedorainfracloud.org/openidc/', - 'OIDC_CLAIM_scope': 'openid https://id.fedoraproject.org/scope/groups', + "OIDC_CLAIM_iss": "https://iddev.fedorainfracloud.org/openidc/", + "OIDC_CLAIM_scope": "openid https://id.fedoraproject.org/scope/groups", } with app.test_request_context(environ_base=environ_base): self.assertRaises(Unauthorized, load_openidc_user, flask.request) def test_401_if_scope_not_present(self): environ_base = { - 'REMOTE_USER': 'tester1', - 'OIDC_access_token': '39283', - 'OIDC_CLAIM_iss': 'https://iddev.fedorainfracloud.org/openidc/', + "REMOTE_USER": "tester1", + "OIDC_access_token": "39283", + "OIDC_CLAIM_iss": "https://iddev.fedorainfracloud.org/openidc/", # Missing OIDC_CLAIM_scope here } with app.test_request_context(environ_base=environ_base): @@ -280,43 +266,42 @@ def test_401_if_scope_not_present(self): def test_401_if_required_scope_not_present_in_token_scope(self): environ_base = { - 'REMOTE_USER': 'new_user', - 'OIDC_access_token': '39283', - 'OIDC_CLAIM_iss': 'https://iddev.fedorainfracloud.org/openidc/', - 'OIDC_CLAIM_scope': 'openid https://id.fedoraproject.org/scope/groups', + "REMOTE_USER": "new_user", + "OIDC_access_token": "39283", + "OIDC_CLAIM_iss": "https://iddev.fedorainfracloud.org/openidc/", + "OIDC_CLAIM_scope": "openid https://id.fedoraproject.org/scope/groups", } - with patch.object(freshmaker.auth.conf, - 'auth_openidc_required_scopes', ['new-compose']): + with patch.object(freshmaker.auth.conf, "auth_openidc_required_scopes", ["new-compose"]): with app.test_request_context(environ_base=environ_base): with self.assertRaises(Unauthorized) as ctx: load_openidc_user(flask.request) self.assertTrue( - 'Required OIDC scope new-compose not present.' in - ctx.exception.description) + "Required OIDC scope new-compose not present." in ctx.exception.description + ) class TestQueryLdapGroups(FreshmakerTestCase): """Test auth.query_ldap_groups""" - @patch('freshmaker.auth.ldap.initialize') + @patch("freshmaker.auth.ldap.initialize") def test_get_groups(self, initialize): initialize.return_value.search_s.return_value = [ ( - 'uid=tom_hanks,ou=users,dc=example,dc=com', + "uid=tom_hanks,ou=users,dc=example,dc=com", { - 'memberOf': [ - b'cn=Toy Story,ou=groups,dc=example,dc=com', - b'cn=Forrest Gump,ou=groups,dc=example,dc=com', + "memberOf": [ + b"cn=Toy Story,ou=groups,dc=example,dc=com", + b"cn=Forrest Gump,ou=groups,dc=example,dc=com", ], - } + }, ) ] - groups = query_ldap_groups('tom_hanks') + groups = query_ldap_groups("tom_hanks") expected = { - 'cn=Toy Story,ou=groups,dc=example,dc=com', - 'cn=Forrest Gump,ou=groups,dc=example,dc=com', + "cn=Toy Story,ou=groups,dc=example,dc=com", + "cn=Forrest Gump,ou=groups,dc=example,dc=com", } self.assertEqual(expected, groups) @@ -330,40 +315,34 @@ def setUp(self): self.login_manager = Mock() def test_select_kerberos_auth_backend(self): - init_auth(self.login_manager, 'kerberos') - self.login_manager.request_loader.assert_called_once_with( - load_krb_user_from_request) + init_auth(self.login_manager, "kerberos") + self.login_manager.request_loader.assert_called_once_with(load_krb_user_from_request) def test_select_openidc_auth_backend(self): - init_auth(self.login_manager, 'openidc') - self.login_manager.request_loader.assert_called_once_with( - load_openidc_user) + init_auth(self.login_manager, "openidc") + self.login_manager.request_loader.assert_called_once_with(load_openidc_user) def test_select_ssl_auth_backend(self): - init_auth(self.login_manager, 'ssl') - self.login_manager.request_loader.assert_called_once_with( - load_ssl_user_from_request) + init_auth(self.login_manager, "ssl") + self.login_manager.request_loader.assert_called_once_with(load_ssl_user_from_request) def test_select_kerberos_or_ssl_auth_backend(self): - init_auth(self.login_manager, 'kerberos_or_ssl') - self.login_manager.request_loader.assert_called_once_with( - load_krb_or_ssl_user_from_request) + init_auth(self.login_manager, "kerberos_or_ssl") + self.login_manager.request_loader.assert_called_once_with(load_krb_or_ssl_user_from_request) def test_not_use_auth_backend(self): - init_auth(self.login_manager, 'noauth') + init_auth(self.login_manager, "noauth") self.login_manager.request_loader.assert_not_called() def test_error_if_select_an_unknown_backend(self): - self.assertRaises(ValueError, init_auth, self.login_manager, 'xxx') - self.assertRaises(ValueError, init_auth, self.login_manager, '') + self.assertRaises(ValueError, init_auth, self.login_manager, "xxx") + self.assertRaises(ValueError, init_auth, self.login_manager, "") self.assertRaises(ValueError, init_auth, self.login_manager, None) def test_init_auth_no_ldap_server(self): - with patch.object(freshmaker.auth.conf, 'auth_ldap_server', ''): - self.assertRaises(ValueError, init_auth, self.login_manager, - 'kerberos') + with patch.object(freshmaker.auth.conf, "auth_ldap_server", ""): + self.assertRaises(ValueError, init_auth, self.login_manager, "kerberos") def test_init_auths_no_ldap_user_base(self): - with patch.object(freshmaker.auth.conf, 'auth_ldap_user_base', ''): - self.assertRaises(ValueError, init_auth, self.login_manager, - 'kerberos') + with patch.object(freshmaker.auth.conf, "auth_ldap_user_base", ""): + self.assertRaises(ValueError, init_auth, self.login_manager, "kerberos") diff --git a/tests/test_config.py b/tests/test_config.py index d7b07cd2..355d5b51 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -30,22 +30,24 @@ class TestConfig(helpers.FreshmakerTestCase): - def test_krb_auth_ccache_file(self): self.assertEqual( conf.krb_auth_ccache_file, - "freshmaker_cc_%s_%s" % (os.getpid(), - threading.current_thread().ident)) - - -@pytest.mark.parametrize('value', ( - 'not a dict', - {'admin': 'not a dict'}, - {'admin': {'groups': 'not a list'}}, - {'admin': {'users': 'not a list'}}, - {'admin': {'invalid key': []}}, - {'admin': {'groups': [1]}}, -)) + "freshmaker_cc_%s_%s" % (os.getpid(), threading.current_thread().ident), + ) + + +@pytest.mark.parametrize( + "value", + ( + "not a dict", + {"admin": "not a dict"}, + {"admin": {"groups": "not a list"}}, + {"admin": {"users": "not a list"}}, + {"admin": {"invalid key": []}}, + {"admin": {"groups": [1]}}, + ), +) def test_permissions(value): - with pytest.raises(ValueError, match='The permissions configuration must be a dictionary'): + with pytest.raises(ValueError, match="The permissions configuration must be a dictionary"): conf.permissions = value diff --git a/tests/test_consumer.py b/tests/test_consumer.py index c773f38a..32da3476 100644 --- a/tests/test_consumer.py +++ b/tests/test_consumer.py @@ -53,12 +53,13 @@ def test_consumer_processing_message(self, global_consumer, handle, handler_can_ self.assertEqual(event.msg_id, "ModuleBuilt handled") @mock.patch("freshmaker.handlers.koji.RebuildImagesOnODCSComposeDone.can_handle") - @mock.patch("freshmaker.handlers.internal.UpdateDBOnODCSComposeFail.order", - new_callable=mock.PropertyMock) + @mock.patch( + "freshmaker.handlers.internal.UpdateDBOnODCSComposeFail.order", + new_callable=mock.PropertyMock, + ) @mock.patch("freshmaker.handlers.internal.UpdateDBOnODCSComposeFail.can_handle") @mock.patch("freshmaker.consumer.get_global_consumer") - def test_consumer_handlers_order(self, global_consumer, handler1, - handler1_order, handler2): + def test_consumer_handlers_order(self, global_consumer, handler1, handler1_order, handler2): """ Tests that consumer parses the message, forwards the event to proper handler and is able to get the further work from @@ -92,8 +93,8 @@ def mocked_handler2(*args, **kwargs): @mock.patch("freshmaker.handlers.internal.UpdateDBOnODCSComposeFail.can_handle") @mock.patch("freshmaker.consumer.get_global_consumer") def test_consumer_multiple_handlers_called( - self, global_consumer, handler1_can_handle, handler1, handler2_can_handle, - handler2): + self, global_consumer, handler1_can_handle, handler1, handler2_can_handle, handler2 + ): consumer = self.create_consumer() global_consumer.return_value = consumer @@ -118,11 +119,11 @@ def test_consumer_subscribe_to_specified_topics(self, global_consumer): self.assertIn(mock.call(topic, callback), consumer.hub.subscribe.call_args_list) @mock.patch("freshmaker.handlers.internal.UpdateDBOnODCSComposeFail.can_handle") - @mock.patch("freshmaker.handlers.internal.UpdateDBOnODCSComposeFail.handle", - autospec=True) + @mock.patch("freshmaker.handlers.internal.UpdateDBOnODCSComposeFail.handle", autospec=True) @mock.patch("freshmaker.consumer.get_global_consumer") def test_consumer_mark_event_as_failed_on_exception( - self, global_consumer, handle, handler_can_handle): + self, global_consumer, handle, handler_can_handle + ): """ Tests that Consumer.consume marks the DB Event as failed in case there is an error in a handler. @@ -151,5 +152,5 @@ def mocked_handle(cls, msg): self.assertTrue(build.state_reason, "Failed with traceback") -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_container.py b/tests/test_container.py index f80c982b..cb6ef857 100644 --- a/tests/test_container.py +++ b/tests/test_container.py @@ -709,9 +709,7 @@ def test_resolve_content_sets(): "toplevel_url": "http://download.example.com/odcs/prod/odcs-124", }, ] - flexmock(RetryingODCS).should_receive("get_compose").and_return( - odcs_composes - ).one_by_one() + flexmock(RetryingODCS).should_receive("get_compose").and_return(odcs_composes).one_by_one() pyxis_gql = PyxisGQL(url="graphql.pyxis.local", cert="/path/to/cert") @@ -892,9 +890,7 @@ def test_resolve_published(): "toplevel_url": "http://download.example.com/odcs/prod/odcs-124", }, ] - flexmock(RetryingODCS).should_receive("get_compose").and_return( - odcs_composes - ).one_by_one() + flexmock(RetryingODCS).should_receive("get_compose").and_return(odcs_composes).one_by_one() pyxis_gql = PyxisGQL(url="graphql.pyxis.local", cert="/path/to/cert") diff --git a/tests/test_errata.py b/tests/test_errata.py index f882c039..a63f4041 100644 --- a/tests/test_errata.py +++ b/tests/test_errata.py @@ -26,7 +26,10 @@ from freshmaker.errata import Errata, ErrataAdvisory from freshmaker.events import ( - BrewSignRPMEvent, GitRPMSpecChangeEvent, ErrataAdvisoryStateChangedEvent) + BrewSignRPMEvent, + GitRPMSpecChangeEvent, + ErrataAdvisoryStateChangedEvent, +) from tests import helpers @@ -34,41 +37,44 @@ class MockedErrataAPI(object): """ Class mocking methods accessing Errata API in Errata class. """ + def __init__(self, errata_rest_get, errata_http_get=None): - errata_rest_get.side_effect = (self.errata_rest_get) + errata_rest_get.side_effect = self.errata_rest_get if errata_http_get: errata_http_get.side_effect = self.errata_http_get self.builds_json = { "PRODUCT1": [ { - "libntirpc-1.4.3-4.el6rhs": - { - "PRODUCT1-3.2-NFS": - {"x86_64": ["libntirpc-devel-1.4.3-4.el6rhs.x86_64.rpm"], - "SRPMS": ["libntirpc-1.4.3-4.el6rhs.src.rpm"]} + "libntirpc-1.4.3-4.el6rhs": { + "PRODUCT1-3.2-NFS": { + "x86_64": ["libntirpc-devel-1.4.3-4.el6rhs.x86_64.rpm"], + "SRPMS": ["libntirpc-1.4.3-4.el6rhs.src.rpm"], + } } } ], "PRODUCT2": [ { - "libntirpc-1.4.3-4.el7rhgs": - { - "PRODUCT2-3.2-NFS": - {"x86_64": ["libntirpc-devel-1.4.3-4.el7rhgs.x86_64.rpm"], - "SRPMS": ["libntirpc-1.4.3-4.el7rhgs.src.rpm"]} + "libntirpc-1.4.3-4.el7rhgs": { + "PRODUCT2-3.2-NFS": { + "x86_64": ["libntirpc-devel-1.4.3-4.el7rhgs.x86_64.rpm"], + "SRPMS": ["libntirpc-1.4.3-4.el7rhgs.src.rpm"], + } } } - ] + ], } self.builds = {} self.builds["libntirpc-1.4.3-4.el6rhs"] = { "all_errata": [{"id": 28484, "name": "RHSA-2017:28484", "status": "QE"}], - "rpms_signed": True} + "rpms_signed": True, + } self.builds["libntirpc-1.4.3-4.el7rhgs"] = { "all_errata": [{"id": 28484, "name": "RHSA-2017:28484", "status": "QE"}], - "rpms_signed": True} + "rpms_signed": True, + } self.advisory_json = { "id": 28484, @@ -80,9 +86,7 @@ def __init__(self, errata_rest_get, errata_http_get=None): "id": 89, "short_name": "product", }, - "people": { - "reporter": "botas/dev-jenkins.some.strange.letters.redhat.com@REDHAT.COM" - }, + "people": {"reporter": "botas/dev-jenkins.some.strange.letters.redhat.com@REDHAT.COM"}, } self.advisory_rest_json = { @@ -100,7 +104,7 @@ def __init__(self, errata_rest_get, errata_http_get=None): "content": { "cve": "CVE-2015-3253 CVE-2016-6814", } - } + }, } self.bugs = [ @@ -148,29 +152,17 @@ def __init__(self, errata_rest_get, errata_http_get=None): "is_module": False, "variant_arch": { "PRODUCT1": { - "x86_64": [ - "libntirpc-1.4.3-4.el6rhs.x86_64.rpm" - ], - "ppc64le": [ - "libntirpc-1.4.3-4.el6rhs.ppc64le.rpm" - ], + "x86_64": ["libntirpc-1.4.3-4.el6rhs.x86_64.rpm"], + "ppc64le": ["libntirpc-1.4.3-4.el6rhs.ppc64le.rpm"], "s390x": ["libntirpc-1.4.3-4.el6rhs.s390x.rpm"], - "aarch64": [ - "libntirpc-1.4.3-4.el6rhs.aarch64.rpm" - ], + "aarch64": ["libntirpc-1.4.3-4.el6rhs.aarch64.rpm"], "SRPMS": ["libntirpc-1.4.3-4.el6rhs.src.rpm"], }, "PRODUCT2": { - "x86_64": [ - "libntirpc-1.4.3-4.el6rhs.x86_64.rpm" - ], - "ppc64le": [ - "libntirpc-1.4.3-4.el6rhs.ppc64le.rpm" - ], + "x86_64": ["libntirpc-1.4.3-4.el6rhs.x86_64.rpm"], + "ppc64le": ["libntirpc-1.4.3-4.el6rhs.ppc64le.rpm"], "s390x": ["libntirpc-1.4.3-4.el6rhs.s390x.rpm"], - "aarch64": [ - "libntirpc-1.4.3-4.el6rhs.aarch64.rpm" - ], + "aarch64": ["libntirpc-1.4.3-4.el6rhs.aarch64.rpm"], "SRPMS": ["libntirpc-1.4.3-4.el6rhs.src.rpm"], }, }, @@ -192,25 +184,19 @@ def __init__(self, errata_rest_get, errata_http_get=None): "nevr": "pkg1-0:4.18.0-305.10.2.rt7.83.el8_4", "id": 1000, "is_module": False, - "is_signed": True + "is_signed": True, }, "pkg2-4.18.0-305.10.2.rt7.83.el8_4": { "nvr": "pkg2-4.18.0-305.10.2.rt7.83.el8_4", "nevr": "pkg2-0:4.18.0-305.10.2.rt7.83.el8_4", "id": 1001, "is_module": False, - "is_signed": True - } + "is_signed": True, + }, } ], - "sig_key": { - "name": "releasekey", - "keyid": "abcdef01" - }, - "container_sig_key": { - "name": "releasekey", - "keyid": "abcdef01" - } + "sig_key": {"name": "releasekey", "keyid": "abcdef01"}, + "container_sig_key": {"name": "releasekey", "keyid": "abcdef01"}, } } @@ -268,14 +254,12 @@ def test_advisories_from_event(self, errata_http_get, errata_rest_get): self.assertEqual(advisories[0].content_types, ["rpm"]) self.assertEqual(advisories[0].security_impact, "important") self.assertEqual(advisories[0].product_short_name, "product") - self.assertEqual(advisories[0].cve_list, - ["CVE-2015-3253", "CVE-2016-6814"]) + self.assertEqual(advisories[0].cve_list, ["CVE-2015-3253", "CVE-2016-6814"]) self.assertEqual(advisories[0].has_hightouch_bug, True) @patch.object(Errata, "_errata_rest_get") @patch.object(Errata, "_errata_http_get") - def test_advisories_from_event_empty_cve( - self, errata_http_get, errata_rest_get): + def test_advisories_from_event_empty_cve(self, errata_http_get, errata_rest_get): mocked_errata = MockedErrataAPI(errata_rest_get, errata_http_get) mocked_errata.advisory_rest_json["content"]["content"]["cve"] = "" event = BrewSignRPMEvent("msgid", "libntirpc-1.4.3-4.el7rhgs") @@ -285,8 +269,7 @@ def test_advisories_from_event_empty_cve( @patch.object(Errata, "_errata_rest_get") @patch.object(Errata, "_errata_http_get") - def test_advisories_from_event_no_bugs( - self, errata_http_get, errata_rest_get): + def test_advisories_from_event_no_bugs(self, errata_http_get, errata_rest_get): mocked_errata = MockedErrataAPI(errata_rest_get, errata_http_get) mocked_errata.bugs = [] event = BrewSignRPMEvent("msgid", "libntirpc-1.4.3-4.el7rhgs") @@ -296,8 +279,7 @@ def test_advisories_from_event_no_bugs( @patch.object(Errata, "_errata_rest_get") @patch.object(Errata, "_errata_http_get") - def test_advisories_from_event_empty_bug_flags( - self, errata_http_get, errata_rest_get): + def test_advisories_from_event_empty_bug_flags(self, errata_http_get, errata_rest_get): mocked_errata = MockedErrataAPI(errata_rest_get, errata_http_get) for bug in mocked_errata.bugs: bug["flags"] = "" @@ -324,10 +306,12 @@ def test_advisories_from_event_unsupported_event(self): @patch.object(Errata, "_errata_rest_get") @patch.object(Errata, "_errata_http_get") def test_advisories_from_event_errata_state_change_event( - self, errata_http_get, errata_rest_get): + self, errata_http_get, errata_rest_get + ): MockedErrataAPI(errata_rest_get, errata_http_get) event = ErrataAdvisoryStateChangedEvent( - "msgid", ErrataAdvisory(28484, "name", "SHIPPED_LIVE", ['rpm'])) + "msgid", ErrataAdvisory(28484, "name", "SHIPPED_LIVE", ["rpm"]) + ) advisories = self.errata.advisories_from_event(event) self.assertEqual(len(advisories), 1) self.assertEqual(advisories[0].errata_id, 28484) @@ -354,77 +338,77 @@ def test_builds_signed_missing_data(self, errata_http_get, errata_rest_get): builds["pkg1-4.18.0-305.10.2.rt7.83.el8_4"] = {} self.assertFalse(self.errata.builds_signed(28484)) - @patch('freshmaker.errata.requests.get') + @patch("freshmaker.errata.requests.get") def test_get_errata_repo_ids(self, get): get.return_value.json.return_value = { - 'rhel-6-server-eus-source-rpms__6_DOT_7__x86_64': [ - ], - 'rhel-6-server-eus-optional-debug-rpms__6_DOT_7__i386': [ - '/path/to/package.rpm', - '/path/to/package1.rpm', - '/path/to/package2.rpm', - ], - 'rhel-6-server-eus-rpms__6_DOT_7__x86_64': [ + "rhel-6-server-eus-source-rpms__6_DOT_7__x86_64": [], + "rhel-6-server-eus-optional-debug-rpms__6_DOT_7__i386": [ + "/path/to/package.rpm", + "/path/to/package1.rpm", + "/path/to/package2.rpm", ], + "rhel-6-server-eus-rpms__6_DOT_7__x86_64": [], } repo_ids = self.errata.get_pulp_repository_ids(25718) - self.assertEqual(set(['rhel-6-server-eus-source-rpms__6_DOT_7__x86_64', - 'rhel-6-server-eus-optional-debug-rpms__6_DOT_7__i386', - 'rhel-6-server-eus-rpms__6_DOT_7__x86_64']), - set(repo_ids)) + self.assertEqual( + set( + [ + "rhel-6-server-eus-source-rpms__6_DOT_7__x86_64", + "rhel-6-server-eus-optional-debug-rpms__6_DOT_7__i386", + "rhel-6-server-eus-rpms__6_DOT_7__x86_64", + ] + ), + set(repo_ids), + ) @patch.object(Errata, "_errata_rest_get") @patch.object(Errata, "_errata_http_get") - def test_rhel_release_from_product_version( - self, errata_http_get, errata_rest_get): + def test_rhel_release_from_product_version(self, errata_http_get, errata_rest_get): MockedErrataAPI(errata_rest_get, errata_http_get) - ret = self.errata._rhel_release_from_product_version( - 28484, "PRODUCT1-3.2-NFS") + ret = self.errata._rhel_release_from_product_version(28484, "PRODUCT1-3.2-NFS") self.assertEqual(ret, "RHEL-6-foobar") @patch.object(Errata, "_errata_rest_get") @patch.object(Errata, "_errata_http_get") def test_rhel_release_from_product_version_unknown_product_ver( - self, errata_http_get, errata_rest_get): + self, errata_http_get, errata_rest_get + ): MockedErrataAPI(errata_rest_get, errata_http_get) with self.assertRaises(ValueError): - self.errata._rhel_release_from_product_version( - 28484, "PRODUCT1-2.9-NFS") + self.errata._rhel_release_from_product_version(28484, "PRODUCT1-2.9-NFS") @patch.object(Errata, "_errata_rest_get") @patch.object(Errata, "_errata_http_get") - def test_get_nvrs( - self, errata_http_get, errata_rest_get): + def test_get_nvrs(self, errata_http_get, errata_rest_get): MockedErrataAPI(errata_rest_get, errata_http_get) srpms = self.errata.get_srpm_nvrs(28484, "") binary_rpms = self.errata.get_binary_rpm_nvrs(28484) - self.assertEqual(set(srpms), set(['libntirpc-1.4.3-4.el7rhgs', - 'libntirpc-1.4.3-4.el6rhs'])) - self.assertEqual(set(binary_rpms), set(['libntirpc-devel-1.4.3-4.el6rhs', - 'libntirpc-devel-1.4.3-4.el7rhgs'])) + self.assertEqual(set(srpms), set(["libntirpc-1.4.3-4.el7rhgs", "libntirpc-1.4.3-4.el6rhs"])) + self.assertEqual( + set(binary_rpms), + set(["libntirpc-devel-1.4.3-4.el6rhs", "libntirpc-devel-1.4.3-4.el7rhgs"]), + ) @patch.object(Errata, "_errata_rest_get") @patch.object(Errata, "_errata_http_get") - def test_get_binary_rpms_rhel_7( - self, errata_http_get, errata_rest_get): + def test_get_binary_rpms_rhel_7(self, errata_http_get, errata_rest_get): MockedErrataAPI(errata_rest_get, errata_http_get) ret = self.errata.get_binary_rpm_nvrs(28484, "RHEL-7") - self.assertEqual(ret, ['libntirpc-devel-1.4.3-4.el7rhgs']) + self.assertEqual(ret, ["libntirpc-devel-1.4.3-4.el7rhgs"]) @patch.object(Errata, "_errata_rest_get") @patch.object(Errata, "_errata_http_get") - def test_get_srpm_nvrs_empty( - self, errata_http_get, errata_rest_get): + def test_get_srpm_nvrs_empty(self, errata_http_get, errata_rest_get): api = MockedErrataAPI(errata_rest_get, errata_http_get) api.builds_json = { "PRODUCT1": [ { - "libntirpc-1.4.3-4.el7rhgs": - { - "PRODUCT2-3.2-NFS": - {"x86_64": ["libntirpc-devel-1.4.3-4.el7rhgs.x86_64.rpm"]} + "libntirpc-1.4.3-4.el7rhgs": { + "PRODUCT2-3.2-NFS": { + "x86_64": ["libntirpc-devel-1.4.3-4.el7rhgs.x86_64.rpm"] + } } } ] @@ -434,18 +418,16 @@ def test_get_srpm_nvrs_empty( @patch.object(Errata, "_errata_rest_get") @patch.object(Errata, "_errata_http_get") - def test_get_binary_nvrs_empty( - self, errata_http_get, errata_rest_get): + def test_get_binary_nvrs_empty(self, errata_http_get, errata_rest_get): api = MockedErrataAPI(errata_rest_get, errata_http_get) api.builds_json = { "PRODUCT1": [ { - "libntirpc-1.4.3-4.el7rhgs": - { - "PRODUCT2-3.2-NFS": - {"SRPMS": [ - "libntirpc-devel-1.4.3-4.el7rhgs.x86_64.rpm"]} + "libntirpc-1.4.3-4.el7rhgs": { + "PRODUCT2-3.2-NFS": { + "SRPMS": ["libntirpc-devel-1.4.3-4.el7rhgs.x86_64.rpm"] } + } } ] } @@ -454,16 +436,17 @@ def test_get_binary_nvrs_empty( @patch.object(Errata, "_errata_rest_get") @patch.object(Errata, "_errata_http_get") - def test_get_attached_build_nvrs( - self, errata_http_get, errata_rest_get): + def test_get_attached_build_nvrs(self, errata_http_get, errata_rest_get): api = MockedErrataAPI(errata_rest_get, errata_http_get) api.builds_json = { "PRODUCT1": [ { - "libreoffice-flatpak-8050020220215203934.84f422e1": - { - "Hidden-PRODUCT2": - {"x86_64": ["libfontenc-1.1.3-8.module+el8.5.0+12446+59af0ebd.x86_64.rpm"]} + "libreoffice-flatpak-8050020220215203934.84f422e1": { + "Hidden-PRODUCT2": { + "x86_64": [ + "libfontenc-1.1.3-8.module+el8.5.0+12446+59af0ebd.x86_64.rpm" + ] + } } } ] @@ -476,13 +459,12 @@ def test_get_attached_build_nvrs( def test_errata_get_cve_affected_rpm_nvrs(self, errata_http_get, errata_rest_get): MockedErrataAPI(errata_rest_get, errata_http_get) ret = self.errata.get_cve_affected_rpm_nvrs(28484) - self.assertEqual(ret, ['libntirpc-1.4.3-4.el6rhs']) + self.assertEqual(ret, ["libntirpc-1.4.3-4.el6rhs"]) @patch.object(Errata, "_get_attached_builds") @patch.object(Errata, "_get_blocking_advisories") def test_get_blocking_advisories_builds(self, get_blocks, get_builds): - get_builds.return_value = {"product3": [{"nvr1": "some_info"}, - {"nvr2": "some_info"}]} + get_builds.return_value = {"product3": [{"nvr1": "some_info"}, {"nvr2": "some_info"}]} get_blocks.side_effect = [["28484"], []] builds = self.errata.get_blocking_advisories_builds("123") @@ -492,16 +474,15 @@ def test_get_blocking_advisories_builds(self, get_blocks, get_builds): @patch.object(Errata, "_get_attached_builds") @patch.object(Errata, "_get_blocking_advisories") - def test_get_recursive_blocking_advisories_builds(self, get_blocks, - get_builds): + def test_get_recursive_blocking_advisories_builds(self, get_blocks, get_builds): get_blocks.side_effect = [["12:34"], ["56:78"], []] - get_builds.side_effect = [{"product1": [{"nvr1": "some_info", - "nvr2": "some_info"}], - "product2": [{"nvr3": "some_info", - "nvr4": "some_info"}] - }, - {"product3": [{"nvr5": "some_info"}]} - ] + get_builds.side_effect = [ + { + "product1": [{"nvr1": "some_info", "nvr2": "some_info"}], + "product2": [{"nvr3": "some_info", "nvr4": "some_info"}], + }, + {"product3": [{"nvr5": "some_info"}]}, + ] builds = self.errata.get_blocking_advisories_builds("123") @@ -514,8 +495,7 @@ def setUp(self): super(TestErrataAuthorizedGet, self).setUp() self.errata = Errata("https://localhost/") - self.patcher = helpers.Patcher( - 'freshmaker.errata.') + self.patcher = helpers.Patcher("freshmaker.errata.") self.requests_get = self.patcher.patch("requests.get") self.response = MagicMock() self.response.json.return_value = {"foo": "bar"} @@ -545,7 +525,8 @@ def test_errata_authorized_get_kerberos_exception_401(self): error_response = MagicMock() error_response.status_code = 401 error_response.raise_for_status.side_effect = HTTPError( - "Expected exception", response=error_response) + "Expected exception", response=error_response + ) self.requests_get.side_effect = [error_response, self.response] data = self.errata._errata_authorized_get("http://localhost/test") diff --git a/tests/test_handler.py b/tests/test_handler.py index cdc4afc3..fc7d4ad7 100644 --- a/tests/test_handler.py +++ b/tests/test_handler.py @@ -33,8 +33,12 @@ from freshmaker.events import ErrataRPMAdvisoryShippedEvent, BotasErrataShippedEvent from freshmaker.handlers import ContainerBuildHandler, ODCSComposeNotReady from freshmaker.models import ( - ArtifactBuild, ArtifactBuildState, ArtifactBuildCompose, - Compose, Event, EVENT_TYPES + ArtifactBuild, + ArtifactBuildState, + ArtifactBuildCompose, + Compose, + Event, + EVENT_TYPES, ) from freshmaker.errors import UnprocessableEntity, ProgrammingError from freshmaker.types import ArtifactType, EventState @@ -60,7 +64,8 @@ class TestContext(helpers.ModelsTestCase): def test_context_event(self): db_event = Event.get_or_create( - db.session, "msg1", "current_event", ErrataRPMAdvisoryShippedEvent) + db.session, "msg1", "current_event", ErrataRPMAdvisoryShippedEvent + ) db.session.commit() handler = MyHandler() handler.set_context(db_event) @@ -70,9 +75,9 @@ def test_context_event(self): def test_context_artifact_build(self): db_event = Event.get_or_create( - db.session, "msg1", "current_event", ErrataRPMAdvisoryShippedEvent) - build = ArtifactBuild.create(db.session, db_event, "parent1-1-4", - "image") + db.session, "msg1", "current_event", ErrataRPMAdvisoryShippedEvent + ) + build = ArtifactBuild.create(db.session, db_event, "parent1-1-4", "image") db.session.commit() handler = MyHandler() handler.set_context(build) @@ -86,7 +91,6 @@ def test_context_unknown(self): class TestDryRun(helpers.FreshmakerTestCase): - def test_force_dry_run(self): handler = MyHandler() self.assertFalse(handler.dry_run) @@ -96,7 +100,6 @@ def test_force_dry_run(self): class TestGetRepoURLs(helpers.ModelsTestCase): - def setUp(self): super(TestGetRepoURLs, self).setUp() @@ -110,10 +113,13 @@ def setUp(self): db.session.add(self.compose_4) self.event = Event.create( - db.session, 'msg-1', 'search-key-1', + db.session, + "msg-1", + "search-key-1", EVENT_TYPES[ErrataRPMAdvisoryShippedEvent], state=EventState.BUILDING, - released=False) + released=False, + ) build_args = {} build_args["repository"] = "repo" @@ -125,15 +131,23 @@ def setUp(self): build_args["renewed_odcs_compose_ids"] = None self.build_1 = ArtifactBuild.create( - db.session, self.event, 'build-1', ArtifactType.IMAGE, + db.session, + self.event, + "build-1", + ArtifactType.IMAGE, state=ArtifactBuildState.PLANNED, - original_nvr="foo-1-2") + original_nvr="foo-1-2", + ) self.build_1.build_args = json.dumps(build_args) self.build_2 = ArtifactBuild.create( - db.session, self.event, 'build-2', ArtifactType.IMAGE, + db.session, + self.event, + "build-2", + ArtifactType.IMAGE, state=ArtifactBuildState.PLANNED, - original_nvr="foo-2-2") + original_nvr="foo-2-2", + ) self.build_2.build_args = json.dumps(build_args) db.session.commit() @@ -146,9 +160,7 @@ def setUp(self): ) for build_id, compose_id in rels: - db.session.add( - ArtifactBuildCompose( - build_id=build_id, compose_id=compose_id)) + db.session.add(ArtifactBuildCompose(build_id=build_id, compose_id=compose_id)) db.session.commit() @@ -161,7 +173,8 @@ def mocked_odcs_get_compose(compose_id): self.patch_odcs_get_compose = patch( "freshmaker.handlers.ContainerBuildHandler.odcs_get_compose", - side_effect=mocked_odcs_get_compose) + side_effect=mocked_odcs_get_compose, + ) self.odcs_get_compose = self.patch_odcs_get_compose.start() def tearDown(self): @@ -178,13 +191,18 @@ def test_get_repo_urls_only_odcs_composes(self): repos = handler.get_repo_urls(self.build_1) self.assertEqual(repos, []) - @patch.object(freshmaker.conf, 'image_extra_repo', new={ - 'build-3': "http://localhost/test.repo" - }) + @patch.object( + freshmaker.conf, "image_extra_repo", new={"build-3": "http://localhost/test.repo"} + ) def test_get_repo_urls_extra_image_repo(self): build_3 = ArtifactBuild.create( - db.session, self.event, 'build-3', ArtifactType.IMAGE, - state=ArtifactBuildState.PLANNED, original_nvr="build-3-1") + db.session, + self.event, + "build-3", + ArtifactType.IMAGE, + state=ArtifactBuildState.PLANNED, + original_nvr="build-3-1", + ) handler = MyHandler() repos = handler.get_repo_urls(build_3) @@ -192,21 +210,27 @@ def test_get_repo_urls_extra_image_repo(self): @patch("time.time") @patch("freshmaker.handlers.ContainerBuildHandler.build_container") - def test_build_image_artifact_build_only_odcs_composes( - self, build_container, time): + def test_build_image_artifact_build_only_odcs_composes(self, build_container, time): time.return_value = 1234567.1234 handler = MyHandler() handler.build_image_artifact_build(self.build_1) build_container.assert_called_once_with( - 'git://pkgs.devel.redhat.com/repo#hash', 'branch', 'target', - arch_override='x86_64', compose_ids=[5, 6, 7, 8], flatpak=False, - isolated=True, koji_parent_build=None, release='2.1234567', - repo_urls=None, operator_csv_modifications_url=None) + "git://pkgs.devel.redhat.com/repo#hash", + "branch", + "target", + arch_override="x86_64", + compose_ids=[5, 6, 7, 8], + flatpak=False, + isolated=True, + koji_parent_build=None, + release="2.1234567", + repo_urls=None, + operator_csv_modifications_url=None, + ) @patch("time.time") @patch("freshmaker.handlers.ContainerBuildHandler.build_container") - def test_build_image_artifact_build_renewed_odcs_composes( - self, build_container, time): + def test_build_image_artifact_build_renewed_odcs_composes(self, build_container, time): time.return_value = 1234567.1234 build_args = json.loads(self.build_1.build_args) build_args["renewed_odcs_compose_ids"] = [7300, 7301] @@ -216,68 +240,91 @@ def test_build_image_artifact_build_renewed_odcs_composes( handler = MyHandler() handler.build_image_artifact_build(self.build_1) build_container.assert_called_once_with( - 'git://pkgs.devel.redhat.com/repo#hash', 'branch', 'target', - arch_override='x86_64', compose_ids=[5, 6, 7, 8, 7300, 7301], flatpak=False, - isolated=True, koji_parent_build=None, release='2.1234567', repo_urls=None, - operator_csv_modifications_url=None) + "git://pkgs.devel.redhat.com/repo#hash", + "branch", + "target", + arch_override="x86_64", + compose_ids=[5, 6, 7, 8, 7300, 7301], + flatpak=False, + isolated=True, + koji_parent_build=None, + release="2.1234567", + repo_urls=None, + operator_csv_modifications_url=None, + ) @patch("time.time") @patch("freshmaker.handlers.ContainerBuildHandler.build_container") - def test_build_image_artifact_build_repo_urls( - self, build_container, time): + def test_build_image_artifact_build_repo_urls(self, build_container, time): time.return_value = 1234567.1234 handler = MyHandler() handler.build_image_artifact_build(self.build_1, ["http://localhost/x.repo"]) - repo_urls = ['http://localhost/x.repo'] + repo_urls = ["http://localhost/x.repo"] build_container.assert_called_once_with( - 'git://pkgs.devel.redhat.com/repo#hash', 'branch', 'target', - arch_override='x86_64', compose_ids=[5, 6, 7, 8], flatpak=False, - isolated=True, koji_parent_build=None, release='2.1234567', - repo_urls=repo_urls, operator_csv_modifications_url=None) + "git://pkgs.devel.redhat.com/repo#hash", + "branch", + "target", + arch_override="x86_64", + compose_ids=[5, 6, 7, 8], + flatpak=False, + isolated=True, + koji_parent_build=None, + release="2.1234567", + repo_urls=repo_urls, + operator_csv_modifications_url=None, + ) @patch("time.time") @patch("freshmaker.kojiservice.KojiService.session") - @patch.object(freshmaker.conf, 'koji_container_scratch_build', new=False) + @patch.object(freshmaker.conf, "koji_container_scratch_build", new=False) def test_build_image_artifact_build_flatpak(self, koji_session, time): time.return_value = 1234567.1234 handler = MyHandler() flatpak_build = ArtifactBuild.create( - db.session, self.event, 'flatpak-build-1', ArtifactType.IMAGE, + db.session, + self.event, + "flatpak-build-1", + ArtifactType.IMAGE, state=ArtifactBuildState.PLANNED, - original_nvr="foo-1-2") + original_nvr="foo-1-2", + ) build_args = json.loads(self.build_1.build_args) - build_args.update({ - "flatpak": True, - "renewed_odcs_compose_ids": [7300], - }) + build_args.update( + { + "flatpak": True, + "renewed_odcs_compose_ids": [7300], + } + ) flatpak_build.build_args = json.dumps(build_args) db.session.add( - ArtifactBuildCompose(build_id=flatpak_build.id, compose_id=self.compose_1.id)) + ArtifactBuildCompose(build_id=flatpak_build.id, compose_id=self.compose_1.id) + ) handler.build_image_artifact_build(flatpak_build) koji_session.buildContainer.assert_called_once_with( - 'git://pkgs.devel.redhat.com/repo#hash', 'target', { - 'scratch': False, - 'git_branch': 'branch', - 'compose_ids': [5, 7300], - 'flatpak': True, - 'isolated': True, - 'arch_override': 'x86_64', - 'release': '2.1234567' - } + "git://pkgs.devel.redhat.com/repo#hash", + "target", + { + "scratch": False, + "git_branch": "branch", + "compose_ids": [5, 7300], + "flatpak": True, + "isolated": True, + "arch_override": "x86_64", + "release": "2.1234567", + }, ) @patch("freshmaker.handlers.ContainerBuildHandler.build_container") - def test_build_image_artifact_build_repo_urls_compose_not_ready( - self, build_container): - + def test_build_image_artifact_build_repo_urls_compose_not_ready(self, build_container): def mocked_odcs_get_compose(compose_id): return { "id": compose_id, "result_repofile": "http://localhost/%d.repo" % compose_id, "state": COMPOSE_STATES["generating"], } + self.odcs_get_compose.side_effect = mocked_odcs_get_compose with self.assertRaises(ODCSComposeNotReady): @@ -290,16 +337,20 @@ def mocked_odcs_get_compose(compose_id): class TestAllowBuildBasedOnAllowlist(helpers.FreshmakerTestCase): """Test BaseHandler.allow_build""" - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'global': { - 'image': any_( - { - 'advisory_state': 'ON_QA', - 'advisory_name': 'RHBA-.*', - } - ) + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={ + "global": { + "image": any_( + { + "advisory_state": "ON_QA", + "advisory_name": "RHBA-.*", + } + ) + }, }, - }) + ) def test_allowlist_not_overwritten(self): """ Test that "global" config section is not overwritten by handler-specific @@ -307,275 +358,267 @@ def test_allowlist_not_overwritten(self): """ handler = MyHandler() handler.name = "foo" - allowed = handler.allow_build( - ArtifactType.IMAGE, advisory_state="SHIPPED_LIVE") + allowed = handler.allow_build(ArtifactType.IMAGE, advisory_state="SHIPPED_LIVE") self.assertFalse(allowed) - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'MyHandler': { - 'image': { - 'name': 'test' - } - } - }) + @patch.object( + freshmaker.conf, "handler_build_allowlist", new={"MyHandler": {"image": {"name": "test"}}} + ) def test_allow_build_in_allowlist(self): - """ Test if artifact is in the handlers allowlist """ + """Test if artifact is in the handlers allowlist""" handler = MyHandler() container = {"name": "test", "branch": "branch"} - allow = handler.allow_build(ArtifactType.IMAGE, - name=container["name"], - branch=container["branch"]) + allow = handler.allow_build( + ArtifactType.IMAGE, name=container["name"], branch=container["branch"] + ) assert allow - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'MyHandler': { - 'image': { - 'name': 'test1' - } - } - }) + @patch.object( + freshmaker.conf, "handler_build_allowlist", new={"MyHandler": {"image": {"name": "test1"}}} + ) def test_allow_build_not_in_allowlist(self): - """ Test if artifact is not in the handlers allowlist """ + """Test if artifact is not in the handlers allowlist""" handler = MyHandler() container = {"name": "test", "branch": "branch"} - allow = handler.allow_build(ArtifactType.IMAGE, - name=container["name"], - branch=container["branch"]) + allow = handler.allow_build( + ArtifactType.IMAGE, name=container["name"], branch=container["branch"] + ) assert not allow - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'MyHandler': { - 'image': { - 'name': 'te(st' - } - } - }) + @patch.object( + freshmaker.conf, "handler_build_allowlist", new={"MyHandler": {"image": {"name": "te(st"}}} + ) def test_allow_build_regex_exception(self): - """ If there is a regex error, method will raise UnprocessableEntity error """ + """If there is a regex error, method will raise UnprocessableEntity error""" handler = MyHandler() container = {"name": "test", "branch": "branch"} with self.assertRaises(UnprocessableEntity): - handler.allow_build(ArtifactType.IMAGE, - name=container["name"], - branch=container["branch"]) - - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'MyHandler': { - 'image': { - 'advisory_state': ['REL_PREP', 'SHIPPED_LIVE'] - } - } - }) + handler.allow_build( + ArtifactType.IMAGE, name=container["name"], branch=container["branch"] + ) + + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={"MyHandler": {"image": {"advisory_state": ["REL_PREP", "SHIPPED_LIVE"]}}}, + ) def test_rule_not_defined(self): handler = MyHandler() - allowed = handler.allow_build( - ArtifactType.IMAGE, advisory_state='SHIPPED_LIVE') + allowed = handler.allow_build(ArtifactType.IMAGE, advisory_state="SHIPPED_LIVE") self.assertTrue(allowed) allowed = handler.allow_build( - ArtifactType.IMAGE, advisory_state='SHIPPED_LIVE', published=True) + ArtifactType.IMAGE, advisory_state="SHIPPED_LIVE", published=True + ) self.assertTrue(allowed) - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'MyHandler': { - 'image': { - 'advisory_state': ['REL_PREP', 'SHIPPED_LIVE'], - 'published': False + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={ + "MyHandler": { + "image": {"advisory_state": ["REL_PREP", "SHIPPED_LIVE"], "published": False} } - } - }) + }, + ) def test_boolean_rule(self): handler = MyHandler() allowed = handler.allow_build( - ArtifactType.IMAGE, advisory_state='SHIPPED_LIVE', published=True) + ArtifactType.IMAGE, advisory_state="SHIPPED_LIVE", published=True + ) self.assertFalse(allowed) - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'MyHandler': { - 'image': { - 'advisory_state': ['REL_PREP', 'SHIPPED_LIVE'] - } - } - }) + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={"MyHandler": {"image": {"advisory_state": ["REL_PREP", "SHIPPED_LIVE"]}}}, + ) def test_not_allow_if_none_passed_rule_is_configured(self): handler = MyHandler() - allowed = handler.allow_build(ArtifactType.IMAGE, state='SHIPPED_LIVE') + allowed = handler.allow_build(ArtifactType.IMAGE, state="SHIPPED_LIVE") self.assertFalse(allowed) - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={}) + @patch.object(freshmaker.conf, "handler_build_allowlist", new={}) def test_not_allow_if_allowlist_is_not_configured(self): handler = MyHandler() - allowed = handler.allow_build(ArtifactType.IMAGE, state='SHIPPED_LIVE') + allowed = handler.allow_build(ArtifactType.IMAGE, state="SHIPPED_LIVE") self.assertFalse(allowed) - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'MyHandler': { - 'image': { - 'advisory_state': ['REL_PREP', 'SHIPPED_LIVE'] - } - } - }) + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={"MyHandler": {"image": {"advisory_state": ["REL_PREP", "SHIPPED_LIVE"]}}}, + ) def test_define_rule_values_as_list(self): handler = MyHandler() - allowed = handler.allow_build(ArtifactType.IMAGE, - advisory_state='SHIPPED_LIVE') + allowed = handler.allow_build(ArtifactType.IMAGE, advisory_state="SHIPPED_LIVE") self.assertTrue(allowed) - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'MyHandler': { - 'image': { - 'advisory_name': r'RHSA-\d+:\d+' - } - } - }) + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={"MyHandler": {"image": {"advisory_name": r"RHSA-\d+:\d+"}}}, + ) def test_define_rule_value_as_single_regex_string(self): handler = MyHandler() - allowed = handler.allow_build(ArtifactType.IMAGE, - advisory_name='RHSA-2017:31861') + allowed = handler.allow_build(ArtifactType.IMAGE, advisory_name="RHSA-2017:31861") self.assertTrue(allowed) - allowed = handler.allow_build(ArtifactType.IMAGE, - advisory_name='RHBA-2017:31861') + allowed = handler.allow_build(ArtifactType.IMAGE, advisory_name="RHBA-2017:31861") self.assertFalse(allowed) - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'MyHandler': { - 'image': { - 'advisory_name': r'RHSA-\d+:\d+', - 'advisory_state': 'REL_PREP' - } - } - }) + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={ + "MyHandler": {"image": {"advisory_name": r"RHSA-\d+:\d+", "advisory_state": "REL_PREP"}} + }, + ) def test_AND_rule(self): handler = MyHandler() - allowed = handler.allow_build(ArtifactType.IMAGE, - advisory_name='RHSA-2017:1000', - advisory_state='REL_PREP') + allowed = handler.allow_build( + ArtifactType.IMAGE, advisory_name="RHSA-2017:1000", advisory_state="REL_PREP" + ) self.assertTrue(allowed) - allowed = handler.allow_build(ArtifactType.IMAGE, - advisory_name='RHSA-2017:1000', - advisory_state='SHIPPED_LIVE') + allowed = handler.allow_build( + ArtifactType.IMAGE, advisory_name="RHSA-2017:1000", advisory_state="SHIPPED_LIVE" + ) self.assertFalse(allowed) - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'MyHandler': { - 'image': any_( - {'advisory_name': r'RHSA-\d+:\d+'}, - {'advisory_state': 'REL_PREP'}, - ) - } - }) + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={ + "MyHandler": { + "image": any_( + {"advisory_name": r"RHSA-\d+:\d+"}, + {"advisory_state": "REL_PREP"}, + ) + } + }, + ) def test_OR_rule(self): handler = MyHandler() - allowed = handler.allow_build(ArtifactType.IMAGE, - advisory_name='RHSA-2017:1000', - advisory_state='SHIPPED_LIVE') + allowed = handler.allow_build( + ArtifactType.IMAGE, advisory_name="RHSA-2017:1000", advisory_state="SHIPPED_LIVE" + ) self.assertTrue(allowed) - allowed = handler.allow_build(ArtifactType.IMAGE, - advisory_name='RHSA-2017', - advisory_state='REL_PREP') + allowed = handler.allow_build( + ArtifactType.IMAGE, advisory_name="RHSA-2017", advisory_state="REL_PREP" + ) self.assertTrue(allowed) - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'MyHandler': { - 'image': all_( - {'advisory_name': r'RHSA-\d+:\d+'}, - any_( - {'has_hightouch_bugs': True}, - {'severity': ['critical', 'important']} - ), - ) - } - }) + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={ + "MyHandler": { + "image": all_( + {"advisory_name": r"RHSA-\d+:\d+"}, + any_({"has_hightouch_bugs": True}, {"severity": ["critical", "important"]}), + ) + } + }, + ) def test_OR_between_subrules(self): handler = MyHandler() - allowed = handler.allow_build(ArtifactType.IMAGE, - advisory_name='RHSA-2017:1000', - has_hightouch_bugs=True, - severity="low") + allowed = handler.allow_build( + ArtifactType.IMAGE, + advisory_name="RHSA-2017:1000", + has_hightouch_bugs=True, + severity="low", + ) self.assertTrue(allowed) - allowed = handler.allow_build(ArtifactType.IMAGE, - advisory_name='RHSA-2017:1000', - has_hightouch_bugs=False, - severity="critical") + allowed = handler.allow_build( + ArtifactType.IMAGE, + advisory_name="RHSA-2017:1000", + has_hightouch_bugs=False, + severity="critical", + ) self.assertTrue(allowed) - allowed = handler.allow_build(ArtifactType.IMAGE, - advisory_name='RHSA-2017:1000', - has_hightouch_bugs=False, - severity="low") + allowed = handler.allow_build( + ArtifactType.IMAGE, + advisory_name="RHSA-2017:1000", + has_hightouch_bugs=False, + severity="low", + ) self.assertFalse(allowed) - allowed = handler.allow_build(ArtifactType.IMAGE, - advisory_name='RHBA-2017:1000', - has_hightouch_bugs=False, - severity="critical") + allowed = handler.allow_build( + ArtifactType.IMAGE, + advisory_name="RHBA-2017:1000", + has_hightouch_bugs=False, + severity="critical", + ) self.assertFalse(allowed) - @patch.object(freshmaker.conf, 'handler_build_allowlist', new={ - 'MyHandler': { - 'image': {'advisory_name': r'RHSA-\d+:\d+'}, - } - }) - @patch.object(freshmaker.conf, 'handler_build_blocklist', new={ - 'MyHandler': { - 'image': {'advisory_name': r'RHSA-2016:\d+'}, - } - }) + @patch.object( + freshmaker.conf, + "handler_build_allowlist", + new={ + "MyHandler": { + "image": {"advisory_name": r"RHSA-\d+:\d+"}, + } + }, + ) + @patch.object( + freshmaker.conf, + "handler_build_blocklist", + new={ + "MyHandler": { + "image": {"advisory_name": r"RHSA-2016:\d+"}, + } + }, + ) def test_blocklist(self): handler = MyHandler() - allowed = handler.allow_build( - ArtifactType.IMAGE, advisory_name='RHSA-2017:1000') + allowed = handler.allow_build(ArtifactType.IMAGE, advisory_name="RHSA-2017:1000") self.assertTrue(allowed) - allowed = handler.allow_build( - ArtifactType.IMAGE, advisory_name='RHSA-2016:1000') + allowed = handler.allow_build(ArtifactType.IMAGE, advisory_name="RHSA-2016:1000") self.assertFalse(allowed) class TestStartToBuildImages(helpers.ModelsTestCase): - - @patch('freshmaker.handlers.ContainerBuildHandler.build_image_artifact_build') + @patch("freshmaker.handlers.ContainerBuildHandler.build_image_artifact_build") def test_start_to_build_images(self, build_artifact): - build_artifact.side_effect = [HTTPError('500 Server Error'), 1] + build_artifact.side_effect = [HTTPError("500 Server Error"), 1] db_event = Event.get_or_create( - db.session, 'msg1', 'current_event', ErrataRPMAdvisoryShippedEvent) - build = ArtifactBuild.create(db.session, db_event, 'parent1-1-4', - 'image') - build2 = ArtifactBuild.create(db.session, db_event, 'parent1-1-5', - 'image') + db.session, "msg1", "current_event", ErrataRPMAdvisoryShippedEvent + ) + build = ArtifactBuild.create(db.session, db_event, "parent1-1-4", "image") + build2 = ArtifactBuild.create(db.session, db_event, "parent1-1-5", "image") db.session.commit() handler = MyHandler() - with self.assertLogs('freshmaker', 'ERROR'): + with self.assertLogs("freshmaker", "ERROR"): handler.start_to_build_images([build, build2]) self.assertEqual(build.state, ArtifactBuildState.FAILED.value) self.assertEqual(build2.state, ArtifactBuildState.BUILD.value) self.assertEqual(len(db.session.query(ArtifactBuild).all()), 2) - @patch('freshmaker.kojiservice.KojiService.get_ocp_versions_range') + @patch("freshmaker.kojiservice.KojiService.get_ocp_versions_range") def test_start_to_build_invalid_bundle_image(self, mock_get_ocp): - mock_get_ocp.return_value = 'v4.7,v4.8' + mock_get_ocp.return_value = "v4.7,v4.8" - db_event = Event.get_or_create( - db.session, 'msg1', 'current_event', BotasErrataShippedEvent) + db_event = Event.get_or_create(db.session, "msg1", "current_event", BotasErrataShippedEvent) build = ArtifactBuild.create( - db.session, db_event, 'foobar-2-123', - 'image', state=ArtifactBuildState.PLANNED.value + db.session, db_event, "foobar-2-123", "image", state=ArtifactBuildState.PLANNED.value ) - build.build_args = json.dumps({'repo': 'foobar'}) - build.original_nvr = 'foobar-2-123' + build.build_args = json.dumps({"repo": "foobar"}) + build.original_nvr = "foobar-2-123" db.session.commit() handler = MyHandler() handler.build_image_artifact_build(build) self.assertEqual(build.state, ArtifactBuildState.FAILED.value) - self.assertTrue('invalid openshift versions range' in build.state_reason) + self.assertTrue("invalid openshift versions range" in build.state_reason) diff --git a/tests/test_image.py b/tests/test_image.py index 6908c631..eaf122e1 100644 --- a/tests/test_image.py +++ b/tests/test_image.py @@ -407,9 +407,7 @@ def test_resolve_published(self): "brew": { "build": "package-name-1-4-12.10", }, - "repositories": [ - {"published": True} - ], + "repositories": [{"published": True}], } ) @@ -426,10 +424,8 @@ def test_resolve_published_unpublished(self): "brew": { "build": "package-name-1-4-12.10", }, - "repositories": [ - {"published": False} - ], - "edges": {"rpm_manifest": {"data": {"rpms": [{"name": "foobar"}]}}} + "repositories": [{"published": False}], + "edges": {"rpm_manifest": {"data": {"rpms": [{"name": "foobar"}]}}}, } ) @@ -438,7 +434,7 @@ def test_resolve_published_unpublished(self): image.resolve_published(pyxis) self.assertEqual(image["published"], False) pyxis.find_images_by_nvr.assert_called_once_with("package-name-1-4-12.10") - self.assertEqual(image["rpm_manifest"][0], {"rpms": [{'name': 'foobar'}]}) + self.assertEqual(image["rpm_manifest"][0], {"rpms": [{"name": "foobar"}]}) def test_resolve_published_not_image_in_pyxis(self): image = ContainerImage.create( @@ -2188,7 +2184,6 @@ def test_images_with_content_set_packages_leaf_container_images( def test_content_sets_of_multiarch_images_to_rebuild( self, koji_task_request, koji_get_build, gql_client ): - gql_client.return_value.execute.side_effect = [ self.fake_pyxis_find_repos, self.fake_pyxis_find_images_by_nvrs, diff --git a/tests/test_image_verifier.py b/tests/test_image_verifier.py index 57876fb1..8a0204d6 100644 --- a/tests/test_image_verifier.py +++ b/tests/test_image_verifier.py @@ -55,7 +55,7 @@ def test_get_verify_repository_multiple_repos(self): "auto_rebuild_tags": ["latest"], "published": True, "repository": "foo", - } + }, ] self.assertRaisesRegex( ValueError, diff --git a/tests/test_kojiservice.py b/tests/test_kojiservice.py index 4f3b21a7..8bd0aa6b 100644 --- a/tests/test_kojiservice.py +++ b/tests/test_kojiservice.py @@ -94,9 +94,7 @@ def test_get_ocp_versions_range(mock_koji): @mock.patch("freshmaker.kojiservice.ZipFile") @mock.patch("freshmaker.kojiservice.BytesIO") @mock.patch("freshmaker.kojiservice.yaml") -def test_get_bundle_csv_success( - mock_yaml, mock_bytesio, mock_zipfile, mock_get, mock_koji -): +def test_get_bundle_csv_success(mock_yaml, mock_bytesio, mock_zipfile, mock_get, mock_koji): mock_session = mock.Mock() mock_session.getBuild.return_value = { "id": 123, diff --git a/tests/test_messaging.py b/tests/test_messaging.py index 2cfa8947..1f74eb3f 100644 --- a/tests/test_messaging.py +++ b/tests/test_messaging.py @@ -37,7 +37,7 @@ class BaseMessagingTest(helpers.FreshmakerTestCase): - """ Base class for messaging related tests """ + """Base class for messaging related tests""" def setUp(self): super(BaseMessagingTest, self).setUp() @@ -51,106 +51,95 @@ def tearDown(self): class TestSelectMessagingBackend(BaseMessagingTest): """Test messaging backend is selected correctly in publish method""" - @patch('freshmaker.messaging._fedmsg_publish') - @patch('freshmaker.messaging._rhmsg_publish') - @patch('freshmaker.messaging._in_memory_publish') - def test_select_backend( - self, _in_memory_publish, _rhmsg_publish, _fedmsg_publish): - fake_msg = {'build': 'n-v-r'} + @patch("freshmaker.messaging._fedmsg_publish") + @patch("freshmaker.messaging._rhmsg_publish") + @patch("freshmaker.messaging._in_memory_publish") + def test_select_backend(self, _in_memory_publish, _rhmsg_publish, _fedmsg_publish): + fake_msg = {"build": "n-v-r"} mock_messaging_backends = { - 'fedmsg': {'publish': _fedmsg_publish}, - 'rhmsg': {'publish': _rhmsg_publish}, - 'in_memory': {'publish': _in_memory_publish}, + "fedmsg": {"publish": _fedmsg_publish}, + "rhmsg": {"publish": _rhmsg_publish}, + "in_memory": {"publish": _in_memory_publish}, } - with patch.dict('freshmaker.messaging._messaging_backends', - mock_messaging_backends): - with patch.object(conf, 'messaging_sender', new='fedmsg'): - publish('images.ready', fake_msg) - _fedmsg_publish.assert_called_once_with( - 'images.ready', fake_msg) - - with patch.object(conf, 'messaging_sender', new='rhmsg'): - publish('images.ready', fake_msg) - _rhmsg_publish.assert_called_once_with( - 'images.ready', fake_msg) - - with patch.object(conf, 'messaging_sender', new='in_memory'): - publish('images.ready', fake_msg) - _in_memory_publish.assert_called_once_with( - 'images.ready', fake_msg) + with patch.dict("freshmaker.messaging._messaging_backends", mock_messaging_backends): + with patch.object(conf, "messaging_sender", new="fedmsg"): + publish("images.ready", fake_msg) + _fedmsg_publish.assert_called_once_with("images.ready", fake_msg) + + with patch.object(conf, "messaging_sender", new="rhmsg"): + publish("images.ready", fake_msg) + _rhmsg_publish.assert_called_once_with("images.ready", fake_msg) + + with patch.object(conf, "messaging_sender", new="in_memory"): + publish("images.ready", fake_msg) + _in_memory_publish.assert_called_once_with("images.ready", fake_msg) def test_raise_error_if_backend_not_exists(self): - messaging_patcher = patch.object(conf, 'messaging_sender', new='XXXX') - self.assertRaisesRegex( - ValueError, 'Unsupported messaging system', - messaging_patcher.start) + messaging_patcher = patch.object(conf, "messaging_sender", new="XXXX") + self.assertRaisesRegex(ValueError, "Unsupported messaging system", messaging_patcher.start) class TestPublishToFedmsg(BaseMessagingTest): """Test publish message to fedmsg using _fedmsg_publish backend""" - @patch.object(conf, 'messaging_sender', new='fedmsg') - @patch.object(conf, 'messaging_backends', - new={'fedmsg': {'SERVICE': 'freshmaker'}}) - @patch('fedmsg.publish') + @patch.object(conf, "messaging_sender", new="fedmsg") + @patch.object(conf, "messaging_backends", new={"fedmsg": {"SERVICE": "freshmaker"}}) + @patch("fedmsg.publish") def test_publish(self, fedmsg_publish): fake_msg = {} - publish('images.ready', fake_msg) + publish("images.ready", fake_msg) - fedmsg_publish.assert_called_once_with( - 'images.ready', msg=fake_msg, modname='freshmaker') + fedmsg_publish.assert_called_once_with("images.ready", msg=fake_msg, modname="freshmaker") -@unittest.skipUnless(rhmsg, 'rhmsg is not available in Fedora yet.') +@unittest.skipUnless(rhmsg, "rhmsg is not available in Fedora yet.") class TestPublishToRhmsg(BaseMessagingTest): """Test publish message to UMB using _rhmsg_publish backend""" - @patch.object(conf, 'messaging_sender', new='rhmsg') - @patch('rhmsg.activemq.producer.AMQProducer') - @patch('proton.Message') + @patch.object(conf, "messaging_sender", new="rhmsg") + @patch("rhmsg.activemq.producer.AMQProducer") + @patch("proton.Message") def test_publish(self, Message, AMQProducer): fake_msg = {} rhmsg_config = { - 'rhmsg': { - 'BROKER_URLS': ['amqps://localhost:5671'], - 'CERT_FILE': '/path/to/cert', - 'KEY_FILE': '/path/to/key', - 'CA_CERT': '/path/to/ca-cert', - 'TOPIC_PREFIX': 'VirtualTopic.eng.freshmaker', + "rhmsg": { + "BROKER_URLS": ["amqps://localhost:5671"], + "CERT_FILE": "/path/to/cert", + "KEY_FILE": "/path/to/key", + "CA_CERT": "/path/to/ca-cert", + "TOPIC_PREFIX": "VirtualTopic.eng.freshmaker", } } - with patch.object(conf, 'messaging_backends', new=rhmsg_config): - publish('images.ready', fake_msg) - - AMQProducer.assert_called_with(**{ - 'urls': ['amqps://localhost:5671'], - 'certificate': '/path/to/cert', - 'private_key': '/path/to/key', - 'trusted_certificates': '/path/to/ca-cert', - }) + with patch.object(conf, "messaging_backends", new=rhmsg_config): + publish("images.ready", fake_msg) + + AMQProducer.assert_called_with( + **{ + "urls": ["amqps://localhost:5671"], + "certificate": "/path/to/cert", + "private_key": "/path/to/key", + "trusted_certificates": "/path/to/ca-cert", + } + ) producer = AMQProducer.return_value.__enter__.return_value - producer.through_topic.assert_called_once_with( - 'VirtualTopic.eng.freshmaker.images.ready') - producer.send.assert_called_once_with( - Message.return_value) + producer.through_topic.assert_called_once_with("VirtualTopic.eng.freshmaker.images.ready") + producer.send.assert_called_once_with(Message.return_value) class TestInMemoryPublish(BaseMessagingTest): """Test publish message in memory using _in_memory_publish backend""" - @patch('freshmaker.consumer.work_queue_put') - @patch('freshmaker.events.BaseEvent.from_fedmsg') + @patch("freshmaker.consumer.work_queue_put") + @patch("freshmaker.events.BaseEvent.from_fedmsg") def test_publish(self, from_fedmsg, work_queue_put): fake_msg = {} - in_memory_config = { - 'in_memory': {'SERVICE': 'freshmaker'} - } + in_memory_config = {"in_memory": {"SERVICE": "freshmaker"}} - with patch.object(conf, 'messaging_backends', new=in_memory_config): - publish('images.ready', fake_msg) + with patch.object(conf, "messaging_backends", new=in_memory_config): + publish("images.ready", fake_msg) from_fedmsg.assert_called_once_with( - 'freshmaker.images.ready', - {'msg_id': '1', 'msg': fake_msg}) + "freshmaker.images.ready", {"msg_id": "1", "msg": fake_msg} + ) work_queue_put.assert_called_once_with(from_fedmsg.return_value) diff --git a/tests/test_models.py b/tests/test_models.py index 4a2de237..fecf2d9d 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -33,21 +33,33 @@ class TestModels(helpers.ModelsTestCase): - def test_get_or_create_from_event(self): - event = events.TestingEvent('msg-1') + event = events.TestingEvent("msg-1") # First call creates new event, second call returns the same one. for i in range(2): db_event = Event.get_or_create_from_event(db.session, event) self.assertEqual(db_event.id, 1) - self.assertEqual(db_event.message_id, 'msg-1') + self.assertEqual(db_event.message_id, "msg-1") def test_creating_event_and_builds(self): event = Event.create(db.session, "test_msg_id", "RHSA-2017-284", events.TestingEvent) - build = ArtifactBuild.create(db.session, event, "ed", "module", 1234, - rebuild_reason=RebuildReason.DIRECTLY_AFFECTED.value) - ArtifactBuild.create(db.session, event, "mksh", "module", 1235, build, - rebuild_reason=RebuildReason.DEPENDENCY.value) + build = ArtifactBuild.create( + db.session, + event, + "ed", + "module", + 1234, + rebuild_reason=RebuildReason.DIRECTLY_AFFECTED.value, + ) + ArtifactBuild.create( + db.session, + event, + "mksh", + "module", + 1235, + build, + rebuild_reason=RebuildReason.DEPENDENCY.value, + ) db.session.commit() db.session.expire_all() @@ -96,17 +108,21 @@ def test_depending_artifact_builds(self): self.assertEqual(deps, set([build2, build3])) def test_event_transition(self): - for i, state in enumerate([ - EventState.COMPLETE, EventState.COMPLETE.value, "complete"]): - event = Event.create(db.session, "test_msg_id_{}".format(i), "test", events.TestingEvent) + for i, state in enumerate([EventState.COMPLETE, EventState.COMPLETE.value, "complete"]): + event = Event.create( + db.session, "test_msg_id_{}".format(i), "test", events.TestingEvent + ) event.transition(state, "reason") self.assertEqual(event.state, EventState.COMPLETE.value) self.assertTrue(event.time_done is not None) def test_build_transition_recursion(self): - for i, state in enumerate([ArtifactBuildState.FAILED.value, - ArtifactBuildState.CANCELED.value]): - event = Event.create(db.session, "test_msg_id_{}".format(i), "test", events.TestingEvent) + for i, state in enumerate( + [ArtifactBuildState.FAILED.value, ArtifactBuildState.CANCELED.value] + ): + event = Event.create( + db.session, "test_msg_id_{}".format(i), "test", events.TestingEvent + ) build1 = ArtifactBuild.create(db.session, event, "ed", "module", 1234) build2 = ArtifactBuild.create(db.session, event, "mksh", "module", 1235, build1) build3 = ArtifactBuild.create(db.session, event, "runtime", "module", 1236, build2) @@ -120,16 +136,20 @@ def test_build_transition_recursion(self): for build in [build2, build3]: self.assertEqual(build.state, state) self.assertEqual( - build.state_reason, "Cannot build artifact, because its " - "dependency cannot be built.") + build.state_reason, + "Cannot build artifact, because its " "dependency cannot be built.", + ) self.assertEqual(build4.state, ArtifactBuildState.BUILD.value) self.assertEqual(build4.state_reason, None) def test_build_transition_recursion_not_done_for_ok_states(self): - for i, state in enumerate([ArtifactBuildState.DONE.value, - ArtifactBuildState.PLANNED.value]): - event = Event.create(db.session, "test_msg_id_{}".format(i), "test", events.TestingEvent) + for i, state in enumerate( + [ArtifactBuildState.DONE.value, ArtifactBuildState.PLANNED.value] + ): + event = Event.create( + db.session, "test_msg_id_{}".format(i), "test", events.TestingEvent + ) build1 = ArtifactBuild.create(db.session, event, "ed", "module", 1234) build2 = ArtifactBuild.create(db.session, event, "mksh", "module", 1235, build1) build3 = ArtifactBuild.create(db.session, event, "runtime", "module", 1236, build2) @@ -171,17 +191,15 @@ def test_get_unreleased(self): self.assertEqual(ret, [event3]) def test_str(self): - event = Event.create(db.session, "test_msg_id1", "test", - events.TestingEvent) + event = Event.create(db.session, "test_msg_id1", "test", events.TestingEvent) self.assertEqual(str(event), "") def test_str_unknown_event_type(self): event = Event.create(db.session, "test_msg_id1", "test", 1024) - self.assertEqual( - str(event), "") + self.assertEqual(str(event), "") def test_event_json_min(self): - with patch('freshmaker.models.datetime') as datetime_patch: + with patch("freshmaker.models.datetime") as datetime_patch: datetime_patch.utcnow.return_value = datetime.datetime(2017, 8, 21, 13, 42, 20) event = Event.create(db.session, "test_msg_id5", "RHSA-2017-289", events.TestingEvent) @@ -189,40 +207,64 @@ def test_event_json_min(self): build.state = ArtifactBuildState.FAILED ArtifactBuild.create(db.session, event, "mksh", "module", 1235, build) db.session.commit() - self.assertEqual(event.json_min(), { - 'builds_summary': {'BUILD': 1, 'FAILED': 1, 'total': 2}, - 'dry_run': False, - 'event_type_id': 3, - 'id': 1, - 'message_id': 'test_msg_id5', - 'requester': None, - 'search_key': 'RHSA-2017-289', - 'state': 0, - 'state_name': 'INITIALIZED', - 'state_reason': None, - 'time_created': '2017-08-21T13:42:20Z', - 'time_done': None, - 'url': 'http://localhost:5001/api/1/events/1', - 'requested_rebuilds': [], - 'requester_metadata': {}, - 'depending_events': [], - 'depends_on_events': [], - }) + self.assertEqual( + event.json_min(), + { + "builds_summary": {"BUILD": 1, "FAILED": 1, "total": 2}, + "dry_run": False, + "event_type_id": 3, + "id": 1, + "message_id": "test_msg_id5", + "requester": None, + "search_key": "RHSA-2017-289", + "state": 0, + "state_name": "INITIALIZED", + "state_reason": None, + "time_created": "2017-08-21T13:42:20Z", + "time_done": None, + "url": "http://localhost:5001/api/1/events/1", + "requested_rebuilds": [], + "requester_metadata": {}, + "depending_events": [], + "depends_on_events": [], + }, + ) def test_get_rebuilt_original_nvrs_by_search_key(self): event = Event.create(db.session, "test_msg_id", "12345", events.TestingEvent) - ArtifactBuild.create(db.session, event, "foo", "image", 1001, - state="done", - original_nvr="foo-2-20", rebuilt_nvr="foo-2-20.1582020101", - rebuild_reason=RebuildReason.DIRECTLY_AFFECTED.value) - ArtifactBuild.create(db.session, event, "bar", "image", 1002, - state="done", - original_nvr="bar-3-30", rebuilt_nvr="bar-3-30.1582020135", - rebuild_reason=RebuildReason.DIRECTLY_AFFECTED.value) - ArtifactBuild.create(db.session, event, "qux", "image", 1003, - state="failed", - original_nvr="qux-1-11", rebuilt_nvr="qux-1-11.1582020218", - rebuild_reason=RebuildReason.DIRECTLY_AFFECTED.value) + ArtifactBuild.create( + db.session, + event, + "foo", + "image", + 1001, + state="done", + original_nvr="foo-2-20", + rebuilt_nvr="foo-2-20.1582020101", + rebuild_reason=RebuildReason.DIRECTLY_AFFECTED.value, + ) + ArtifactBuild.create( + db.session, + event, + "bar", + "image", + 1002, + state="done", + original_nvr="bar-3-30", + rebuilt_nvr="bar-3-30.1582020135", + rebuild_reason=RebuildReason.DIRECTLY_AFFECTED.value, + ) + ArtifactBuild.create( + db.session, + event, + "qux", + "image", + 1003, + state="failed", + original_nvr="qux-1-11", + rebuilt_nvr="qux-1-11.1582020218", + rebuild_reason=RebuildReason.DIRECTLY_AFFECTED.value, + ) db.session.commit() db.session.expire_all() nvrs = ArtifactBuild.get_rebuilt_original_nvrs_by_search_key(db.session, "12345") @@ -233,83 +275,89 @@ class TestFindDependentEvents(helpers.ModelsTestCase): """Test Event.find_dependent_events""" def setUp(self): - super(TestFindDependentEvents, self). setUp() + super(TestFindDependentEvents, self).setUp() self.event_1 = Event.create( - db.session, 'msg-1', 'search-key-1', + db.session, + "msg-1", + "search-key-1", EVENT_TYPES[ErrataRPMAdvisoryShippedEvent], state=EventState.INITIALIZED, - released=False) - ArtifactBuild.create( - db.session, self.event_1, 'build-1', ArtifactType.IMAGE) - ArtifactBuild.create( - db.session, self.event_1, 'build-2', ArtifactType.IMAGE) - ArtifactBuild.create( - db.session, self.event_1, 'build-3', ArtifactType.IMAGE) - ArtifactBuild.create( - db.session, self.event_1, 'build-4', ArtifactType.IMAGE) + released=False, + ) + ArtifactBuild.create(db.session, self.event_1, "build-1", ArtifactType.IMAGE) + ArtifactBuild.create(db.session, self.event_1, "build-2", ArtifactType.IMAGE) + ArtifactBuild.create(db.session, self.event_1, "build-3", ArtifactType.IMAGE) + ArtifactBuild.create(db.session, self.event_1, "build-4", ArtifactType.IMAGE) self.event_2 = Event.create( - db.session, 'msg-2', 'search-key-2', + db.session, + "msg-2", + "search-key-2", EVENT_TYPES[ErrataRPMAdvisoryShippedEvent], state=EventState.BUILDING, - released=False) - ArtifactBuild.create( - db.session, self.event_2, 'build-2', ArtifactType.IMAGE) - ArtifactBuild.create( - db.session, self.event_2, 'build-5', ArtifactType.IMAGE) - ArtifactBuild.create( - db.session, self.event_2, 'build-6', ArtifactType.IMAGE) + released=False, + ) + ArtifactBuild.create(db.session, self.event_2, "build-2", ArtifactType.IMAGE) + ArtifactBuild.create(db.session, self.event_2, "build-5", ArtifactType.IMAGE) + ArtifactBuild.create(db.session, self.event_2, "build-6", ArtifactType.IMAGE) self.event_3 = Event.create( - db.session, 'msg-3', 'search-key-3', + db.session, + "msg-3", + "search-key-3", EVENT_TYPES[ErrataRPMAdvisoryShippedEvent], state=EventState.COMPLETE, - released=False) - ArtifactBuild.create( - db.session, self.event_3, 'build-2', ArtifactType.IMAGE) - ArtifactBuild.create( - db.session, self.event_3, 'build-4', ArtifactType.IMAGE) - ArtifactBuild.create( - db.session, self.event_3, 'build-7', ArtifactType.IMAGE) - ArtifactBuild.create( - db.session, self.event_3, 'build-8', ArtifactType.IMAGE) + released=False, + ) + ArtifactBuild.create(db.session, self.event_3, "build-2", ArtifactType.IMAGE) + ArtifactBuild.create(db.session, self.event_3, "build-4", ArtifactType.IMAGE) + ArtifactBuild.create(db.session, self.event_3, "build-7", ArtifactType.IMAGE) + ArtifactBuild.create(db.session, self.event_3, "build-8", ArtifactType.IMAGE) # Some noises # Failed events should not be included self.event_4 = Event.create( - db.session, 'msg-4', 'search-key-4', + db.session, + "msg-4", + "search-key-4", EVENT_TYPES[ErrataRPMAdvisoryShippedEvent], state=EventState.FAILED, - released=False) - ArtifactBuild.create( - db.session, self.event_4, 'build-3', ArtifactType.IMAGE) + released=False, + ) + ArtifactBuild.create(db.session, self.event_4, "build-3", ArtifactType.IMAGE) # Manual triggered rebuild should not be included as well self.event_5 = Event.create( - db.session, 'msg-5', 'search-key-5', + db.session, + "msg-5", + "search-key-5", EVENT_TYPES[ErrataRPMAdvisoryShippedEvent], state=EventState.BUILDING, - released=False, manual=True) - ArtifactBuild.create( - db.session, self.event_5, 'build-4', ArtifactType.IMAGE) + released=False, + manual=True, + ) + ArtifactBuild.create(db.session, self.event_5, "build-4", ArtifactType.IMAGE) # Released event should not be included also self.event_6 = Event.create( - db.session, 'msg-6', 'search-key-6', + db.session, + "msg-6", + "search-key-6", EVENT_TYPES[ErrataRPMAdvisoryShippedEvent], state=EventState.COMPLETE, - released=True) - ArtifactBuild.create( - db.session, self.event_5, 'build-4', ArtifactType.IMAGE) + released=True, + ) + ArtifactBuild.create(db.session, self.event_5, "build-4", ArtifactType.IMAGE) db.session.commit() def test_find_dependent_events(self): dep_events = self.event_1.find_dependent_events() - self.assertEqual([self.event_2.id, self.event_3.id], - sorted([event.id for event in dep_events])) + self.assertEqual( + [self.event_2.id, self.event_3.id], sorted([event.id for event in dep_events]) + ) dep_rels = db.session.query(EventDependency).all() dep_rels = [(rel.event_id, rel.event_dependency_id) for rel in dep_rels] @@ -323,7 +371,7 @@ class TestArtifactBuildComposesRel(helpers.ModelsTestCase): """Test m2m relationship between ArtifactBuild and Compose""" def setUp(self): - super(TestArtifactBuildComposesRel, self). setUp() + super(TestArtifactBuildComposesRel, self).setUp() self.compose_1 = Compose(odcs_compose_id=-1) self.compose_2 = Compose(odcs_compose_id=2) @@ -335,18 +383,18 @@ def setUp(self): db.session.add(self.compose_4) self.event = Event.create( - db.session, 'msg-1', 'search-key-1', + db.session, + "msg-1", + "search-key-1", EVENT_TYPES[ErrataRPMAdvisoryShippedEvent], state=EventState.INITIALIZED, - released=False) - self.build_1 = ArtifactBuild.create( - db.session, self.event, 'build-1', ArtifactType.IMAGE) + released=False, + ) + self.build_1 = ArtifactBuild.create(db.session, self.event, "build-1", ArtifactType.IMAGE) self.build_1.build_id = 3 - self.build_2 = ArtifactBuild.create( - db.session, self.event, 'build-2', ArtifactType.IMAGE) + self.build_2 = ArtifactBuild.create(db.session, self.event, "build-2", ArtifactType.IMAGE) self.build_2.build_id = -2 - self.build_3 = ArtifactBuild.create( - db.session, self.event, 'build-3', ArtifactType.IMAGE) + self.build_3 = ArtifactBuild.create(db.session, self.event, "build-3", ArtifactType.IMAGE) self.build_3.build_id = None db.session.commit() @@ -360,9 +408,7 @@ def setUp(self): ) for build_id, compose_id in rels: - db.session.add( - ArtifactBuildCompose( - build_id=build_id, compose_id=compose_id)) + db.session.add(ArtifactBuildCompose(build_id=build_id, compose_id=compose_id)) db.session.commit() @@ -378,12 +424,14 @@ def test_build_composes(self): self.assertEqual(3, len(self.build_1.composes)) self.assertEqual( [self.compose_1.id, self.compose_2.id, self.compose_3.id], - sorted([rel.compose.id for rel in self.build_1.composes])) + sorted([rel.compose.id for rel in self.build_1.composes]), + ) self.assertEqual(2, len(self.build_2.composes)) self.assertEqual( [self.compose_2.id, self.compose_4.id], - sorted([rel.compose.id for rel in self.build_2.composes])) + sorted([rel.compose.id for rel in self.build_2.composes]), + ) self.assertEqual([], self.build_3.composes) @@ -397,9 +445,7 @@ def test_compose_builds(self): for compose, builds_count, builds in expected_rels: self.assertEqual(builds_count, len(compose.builds)) - self.assertEqual( - builds, - sorted([rel.build.id for rel in compose.builds])) + self.assertEqual(builds, sorted([rel.build.id for rel in compose.builds])) class TestEventDependency(helpers.ModelsTestCase): diff --git a/tests/test_monitor.py b/tests/test_monitor.py index 65c8ece1..8e01bd72 100644 --- a/tests/test_monitor.py +++ b/tests/test_monitor.py @@ -45,47 +45,66 @@ def setUp(self): self.client = app.test_client() def _init_data(self): - event = models.Event.create(db.session, "2017-00000000-0000-0000-0000-000000000001", "RHSA-2018-101", events.TestingEvent) + event = models.Event.create( + db.session, + "2017-00000000-0000-0000-0000-000000000001", + "RHSA-2018-101", + events.TestingEvent, + ) build = models.ArtifactBuild.create(db.session, event, "ed", "module", 1234) build.build_args = '{"key": "value"}' models.ArtifactBuild.create(db.session, event, "mksh", "module", 1235) models.ArtifactBuild.create(db.session, event, "bash", "module", 1236) - models.Event.create(db.session, "2017-00000000-0000-0000-0000-000000000002", "RHSA-2018-102", events.TestingEvent) + models.Event.create( + db.session, + "2017-00000000-0000-0000-0000-000000000002", + "RHSA-2018-102", + events.TestingEvent, + ) db.session.commit() db.session.expire_all() def test_monitor_api_structure(self): - resp = self.client.get('/api/1/monitor/metrics') + resp = self.client.get("/api/1/monitor/metrics") self.assertEqual( - len([line for line in resp.get_data(as_text=True).splitlines() - if line.startswith('# TYPE')]), num_of_metrics) + len( + [ + line + for line in resp.get_data(as_text=True).splitlines() + if line.startswith("# TYPE") + ] + ), + num_of_metrics, + ) class ConsumerTest(helpers.ConsumerBaseTest): def setUp(self): - super(ConsumerTest, self). setUp() + super(ConsumerTest, self).setUp() self.client = app.test_client() def tearDown(self): - super(ConsumerTest, self). tearDown() + super(ConsumerTest, self).tearDown() def _compose_state_change_msg(self, state=None): - msg = {'body': { - "msg_id": "2017-7afcb214-cf82-4130-92d2-22f45cf59cf7", - "topic": "org.fedoraproject.prod.odcs.state.change", - "signature": "qRZ6oXBpKD/q8BTjBNa4MREkAPxT+KzI8Oret+TSKazGq/6gk0uuprdFpkfBXLR5dd4XDoh3NQWp\nyC74VYTDVqJR7IsEaqHtrv01x1qoguU/IRWnzrkGwqXm+Es4W0QZjHisBIRRZ4ywYBG+DtWuskvy\n6/5Mc3dXaUBcm5TnT0c=\n", - "msg": { - "compose": { - "id": 1, - "state": 4, - } + msg = { + "body": { + "msg_id": "2017-7afcb214-cf82-4130-92d2-22f45cf59cf7", + "topic": "org.fedoraproject.prod.odcs.state.change", + "signature": "qRZ6oXBpKD/q8BTjBNa4MREkAPxT+KzI8Oret+TSKazGq/6gk0uuprdFpkfBXLR5dd4XDoh3NQWp\nyC74VYTDVqJR7IsEaqHtrv01x1qoguU/IRWnzrkGwqXm+Es4W0QZjHisBIRRZ4ywYBG+DtWuskvy\n6/5Mc3dXaUBcm5TnT0c=\n", + "msg": { + "compose": { + "id": 1, + "state": 4, + } + }, } - }} + } return msg def _get_monitor_value(self, key): - resp = self.client.get('/api/1/monitor/metrics') + resp = self.client.get("/api/1/monitor/metrics") for line in resp.get_data(as_text=True).splitlines(): k, v = line.split(" ")[:2] if k == key: @@ -121,14 +140,15 @@ def test_consumer_processing_message(self, global_consumer, handle, handler_can_ def test_standalone_metrics_server_disabled_by_default(): with pytest.raises(requests.exceptions.ConnectionError): - requests.get('http://127.0.0.1:10040/metrics') + requests.get("http://127.0.0.1:10040/metrics") def test_standalone_metrics_server(): - os.environ['MONITOR_STANDALONE_METRICS_SERVER_ENABLE'] = 'true' + os.environ["MONITOR_STANDALONE_METRICS_SERVER_ENABLE"] = "true" importlib.reload(freshmaker.monitor) - r = requests.get('http://127.0.0.1:10040/metrics') + r = requests.get("http://127.0.0.1:10040/metrics") - assert len([line for line in r.text.splitlines() - if line.startswith('# TYPE')]) == num_of_metrics + assert ( + len([line for line in r.text.splitlines() if line.startswith("# TYPE")]) == num_of_metrics + ) diff --git a/tests/test_odcsclient.py b/tests/test_odcsclient.py index 74b2392b..066587f7 100644 --- a/tests/test_odcsclient.py +++ b/tests/test_odcsclient.py @@ -49,26 +49,23 @@ def handle(self, event): class TestCreateODCSClient(helpers.FreshmakerTestCase): """Test odcsclient.create_odcs_client""" - @patch.object(conf, 'odcs_auth_mech', new='kerberos') - @patch('freshmaker.odcsclient.RetryingODCS') + @patch.object(conf, "odcs_auth_mech", new="kerberos") + @patch("freshmaker.odcsclient.RetryingODCS") def test_create_with_kerberos_auth(self, ODCS): odcs = create_odcs_client() self.assertEqual(ODCS.return_value, odcs) ODCS.assert_called_once_with( - conf.odcs_server_url, - auth_mech=AuthMech.Kerberos, - verify_ssl=conf.odcs_verify_ssl) + conf.odcs_server_url, auth_mech=AuthMech.Kerberos, verify_ssl=conf.odcs_verify_ssl + ) - @patch.object(conf, 'odcs_auth_mech', new='fas') + @patch.object(conf, "odcs_auth_mech", new="fas") def test_error_if_unsupported_auth_configured(self): - self.assertRaisesRegex( - ValueError, r'.*fas is not supported yet.', - create_odcs_client) + self.assertRaisesRegex(ValueError, r".*fas is not supported yet.", create_odcs_client) - @patch.object(conf, 'odcs_auth_mech', new='openidc') - @patch.object(conf, 'odcs_openidc_token', new='12345') - @patch('freshmaker.odcsclient.RetryingODCS') + @patch.object(conf, "odcs_auth_mech", new="openidc") + @patch.object(conf, "odcs_openidc_token", new="12345") + @patch("freshmaker.odcsclient.RetryingODCS") def test_create_with_openidc_auth(self, ODCS): odcs = create_odcs_client() @@ -76,14 +73,13 @@ def test_create_with_openidc_auth(self, ODCS): ODCS.assert_called_once_with( conf.odcs_server_url, auth_mech=AuthMech.OpenIDC, - openidc_token='12345', - verify_ssl=conf.odcs_verify_ssl) + openidc_token="12345", + verify_ssl=conf.odcs_verify_ssl, + ) - @patch.object(conf, 'odcs_auth_mech', new='openidc') + @patch.object(conf, "odcs_auth_mech", new="openidc") def test_error_if_missing_openidc_token(self): - self.assertRaisesRegex( - ValueError, r'Missing OpenIDC token.*', - create_odcs_client) + self.assertRaisesRegex(ValueError, r"Missing OpenIDC token.*", create_odcs_client) class TestGetPackagesForCompose(helpers.FreshmakerTestCase): @@ -91,17 +87,14 @@ class TestGetPackagesForCompose(helpers.FreshmakerTestCase): @helpers.mock_koji def test_get_packages(self, mocked_koji): - build_nvr = 'chkconfig-1.7.2-1.el7_3.1' + build_nvr = "chkconfig-1.7.2-1.el7_3.1" mocked_koji.add_build(build_nvr) - mocked_koji.add_build_rpms( - build_nvr, - [build_nvr, "chkconfig-debuginfo-1.7.2-1.el7_3.1"]) + mocked_koji.add_build_rpms(build_nvr, [build_nvr, "chkconfig-debuginfo-1.7.2-1.el7_3.1"]) handler = MyHandler() packages = handler.odcs._get_packages_for_compose(build_nvr) - self.assertEqual(set(['chkconfig', 'chkconfig-debuginfo']), - set(packages)) + self.assertEqual(set(["chkconfig", "chkconfig-debuginfo"]), set(packages)) class TestGetComposeSource(helpers.FreshmakerTestCase): @@ -111,34 +104,31 @@ class TestGetComposeSource(helpers.FreshmakerTestCase): def test_get_tag(self, mocked_koji): mocked_koji.add_build("rh-postgresql96-3.0-9.el6") handler = MyHandler() - tag = handler.odcs._get_compose_source('rh-postgresql96-3.0-9.el6') - self.assertEqual('tag-candidate', tag) + tag = handler.odcs._get_compose_source("rh-postgresql96-3.0-9.el6") + self.assertEqual("tag-candidate", tag) @helpers.mock_koji def test_get_None_if_tag_has_new_build(self, mocked_koji): mocked_koji.add_build("rh-postgresql96-3.0-9.el6") mocked_koji.add_build("rh-postgresql96-3.0-10.el6") handler = MyHandler() - tag = handler.odcs._get_compose_source('rh-postgresql96-3.0-9.el6') + tag = handler.odcs._get_compose_source("rh-postgresql96-3.0-9.el6") self.assertEqual(None, tag) @helpers.mock_koji def test_get_tag_prefer_final_over_candidate(self, mocked_koji): - mocked_koji.add_build("rh-postgresql96-3.0-9.el6", - ["tag-candidate", "tag"]) + mocked_koji.add_build("rh-postgresql96-3.0-9.el6", ["tag-candidate", "tag"]) handler = MyHandler() - tag = handler.odcs._get_compose_source('rh-postgresql96-3.0-9.el6') - self.assertEqual('tag', tag) + tag = handler.odcs._get_compose_source("rh-postgresql96-3.0-9.el6") + self.assertEqual("tag", tag) @helpers.mock_koji def test_get_tag_fallback_to_second_tag(self, mocked_koji): - mocked_koji.add_build("rh-postgresql96-3.0-10.el6", - ["tag"]) - mocked_koji.add_build("rh-postgresql96-3.0-9.el6", - ["tag", "tag-candidate"]) + mocked_koji.add_build("rh-postgresql96-3.0-10.el6", ["tag"]) + mocked_koji.add_build("rh-postgresql96-3.0-9.el6", ["tag", "tag-candidate"]) handler = MyHandler() - tag = handler.odcs._get_compose_source('rh-postgresql96-3.0-9.el6') - self.assertEqual('tag-candidate', tag) + tag = handler.odcs._get_compose_source("rh-postgresql96-3.0-9.el6") + self.assertEqual("tag-candidate", tag) class TestPrepareYumRepo(helpers.ModelsTestCase): @@ -147,23 +137,23 @@ class TestPrepareYumRepo(helpers.ModelsTestCase): def setUp(self): super(TestPrepareYumRepo, self).setUp() - self.ev = Event.create(db.session, 'msg-id', '123', 100) + self.ev = Event.create(db.session, "msg-id", "123", 100) ArtifactBuild.create( - db.session, self.ev, "parent", "image", - state=ArtifactBuildState.PLANNED) + db.session, self.ev, "parent", "image", state=ArtifactBuildState.PLANNED + ) db.session.commit() - @patch('freshmaker.odcsclient.create_odcs_client') - @patch('freshmaker.odcsclient.FreshmakerODCSClient._get_packages_for_compose') - @patch('freshmaker.odcsclient.FreshmakerODCSClient._get_compose_source') - @patch('time.sleep') - @patch('freshmaker.odcsclient.Errata') + @patch("freshmaker.odcsclient.create_odcs_client") + @patch("freshmaker.odcsclient.FreshmakerODCSClient._get_packages_for_compose") + @patch("freshmaker.odcsclient.FreshmakerODCSClient._get_compose_source") + @patch("time.sleep") + @patch("freshmaker.odcsclient.Errata") def test_get_repo_url_when_succeed_to_generate_compose( - self, errata, sleep, _get_compose_source, - _get_packages_for_compose, create_odcs_client): + self, errata, sleep, _get_compose_source, _get_packages_for_compose, create_odcs_client + ): odcs = create_odcs_client.return_value - _get_packages_for_compose.return_value = ['httpd', 'httpd-debuginfo'] - _get_compose_source.return_value = 'rhel-7.2-candidate' + _get_packages_for_compose.return_value = ["httpd", "httpd-debuginfo"] + _get_compose_source.return_value = "rhel-7.2-candidate" odcs.new_compose.return_value = { "id": 3, "result_repo": "http://localhost/composes/latest-odcs-3-1/compose/Temporary", @@ -180,35 +170,41 @@ def test_get_repo_url_when_succeed_to_generate_compose( compose = handler.odcs.prepare_yum_repo(self.ev) db.session.refresh(self.ev) - self.assertEqual(3, compose['id']) + self.assertEqual(3, compose["id"]) _get_compose_source.assert_called_once_with("httpd-2.4.15-1.f27") _get_packages_for_compose.assert_called_once_with("httpd-2.4.15-1.f27") # Ensure new_compose is called to request a new compose odcs.new_compose.assert_called_once_with( - 'rhel-7.2-candidate', 'tag', packages=['httpd', 'httpd-debuginfo'], - sigkeys=[], flags=["no_deps"]) + "rhel-7.2-candidate", + "tag", + packages=["httpd", "httpd-debuginfo"], + sigkeys=[], + flags=["no_deps"], + ) # We should get the right repo URL eventually self.assertEqual( "http://localhost/composes/latest-odcs-3-1/compose/Temporary/odcs-3.repo", - compose['result_repofile']) - - @patch('freshmaker.odcsclient.create_odcs_client') - @patch('freshmaker.odcsclient.FreshmakerODCSClient._get_packages_for_compose') - @patch('freshmaker.odcsclient.FreshmakerODCSClient._get_compose_source') - @patch('time.sleep') - @patch('freshmaker.odcsclient.Errata') + compose["result_repofile"], + ) + + @patch("freshmaker.odcsclient.create_odcs_client") + @patch("freshmaker.odcsclient.FreshmakerODCSClient._get_packages_for_compose") + @patch("freshmaker.odcsclient.FreshmakerODCSClient._get_compose_source") + @patch("time.sleep") + @patch("freshmaker.odcsclient.Errata") def test_get_repo_url_packages_in_multiple_tags( - self, errata, sleep, _get_compose_source, - _get_packages_for_compose, create_odcs_client): - _get_packages_for_compose.return_value = ['httpd', 'httpd-debuginfo'] - _get_compose_source.side_effect = [ - 'rhel-7.2-candidate', 'rhel-7.7-candidate'] + self, errata, sleep, _get_compose_source, _get_packages_for_compose, create_odcs_client + ): + _get_packages_for_compose.return_value = ["httpd", "httpd-debuginfo"] + _get_compose_source.side_effect = ["rhel-7.2-candidate", "rhel-7.7-candidate"] errata.return_value.get_srpm_nvrs.return_value = [ - set(["httpd-2.4.15-1.f27"]), set(["foo-2.4.15-1.f27"])] + set(["httpd-2.4.15-1.f27"]), + set(["foo-2.4.15-1.f27"]), + ] handler = MyHandler() repo_url = handler.odcs.prepare_yum_repo(self.ev) @@ -219,22 +215,26 @@ def test_get_repo_url_packages_in_multiple_tags( db.session.refresh(self.ev) for build in self.ev.builds: self.assertEqual(build.state, ArtifactBuildState.FAILED.value) - self.assertEqual(build.state_reason, "Packages for errata " - "advisory 123 found in multiple different tags.") - - @patch('freshmaker.odcsclient.create_odcs_client') - @patch('freshmaker.odcsclient.FreshmakerODCSClient._get_packages_for_compose') - @patch('freshmaker.odcsclient.FreshmakerODCSClient._get_compose_source') - @patch('time.sleep') - @patch('freshmaker.odcsclient.Errata') + self.assertEqual( + build.state_reason, + "Packages for errata " "advisory 123 found in multiple different tags.", + ) + + @patch("freshmaker.odcsclient.create_odcs_client") + @patch("freshmaker.odcsclient.FreshmakerODCSClient._get_packages_for_compose") + @patch("freshmaker.odcsclient.FreshmakerODCSClient._get_compose_source") + @patch("time.sleep") + @patch("freshmaker.odcsclient.Errata") def test_get_repo_url_packages_not_found_in_tag( - self, errata, sleep, _get_compose_source, - _get_packages_for_compose, create_odcs_client): - _get_packages_for_compose.return_value = ['httpd', 'httpd-debuginfo'] + self, errata, sleep, _get_compose_source, _get_packages_for_compose, create_odcs_client + ): + _get_packages_for_compose.return_value = ["httpd", "httpd-debuginfo"] _get_compose_source.return_value = None errata.return_value.get_srpm_nvrs.return_value = [ - set(["httpd-2.4.15-1.f27"]), set(["foo-2.4.15-1.f27"])] + set(["httpd-2.4.15-1.f27"]), + set(["foo-2.4.15-1.f27"]), + ] handler = MyHandler() repo_url = handler.odcs.prepare_yum_repo(self.ev) @@ -245,41 +245,52 @@ def test_get_repo_url_packages_not_found_in_tag( db.session.refresh(self.ev) for build in self.ev.builds: self.assertEqual(build.state, ArtifactBuildState.FAILED.value) - self.assertTrue(build.state_reason.endswith( - "of advisory 123 is the latest build in its candidate tag.")) - - def _get_fake_container_image(self, architecture='amd64', arches='x86_64'): - rpm_manifest = [{u'rpms': [{ - u'architecture': architecture, - u'gpg': u'199e2f91fd431d51', - u'name': u'apache-commons-lang', - u'nvra': u'apache-commons-lang-2.6-15.el7.noarch', - u'release': u'15.el7', - u'srpm_name': u'apache-commons-lang', - u'srpm_nevra': u'apache-commons-lang-0:2.6-15.el7.src', - u'summary': u'Provides a host of helper utilities for the java.lang API', - u'version': u'2.6' - }, { - u'architecture': architecture, - u'gpg': u'199e2f91fd431d51', - u'name': u'avalon-logkit', - u'nvra': u'avalon-logkit-2.1-14.el7.noarch', - u'release': u'14.el7', - u'srpm_name': u'avalon-logkit', - u'srpm_nevra': u'avalon-logkit-0:2.1-14.el7.src', - u'summary': u'Java logging toolkit', - u'version': u'2.1' - }]}] - return ContainerImage.create({ - u'arches': arches, # Populated based on Brew build - u'architecture': architecture, # Populated from Lightblue data - u'rpm_manifest': rpm_manifest, - }) - - @patch('freshmaker.odcsclient.create_odcs_client') - @patch('time.sleep') - def test_prepare_odcs_compose_with_image_rpms( - self, sleep, create_odcs_client): + self.assertTrue( + build.state_reason.endswith( + "of advisory 123 is the latest build in its candidate tag." + ) + ) + + def _get_fake_container_image(self, architecture="amd64", arches="x86_64"): + rpm_manifest = [ + { + "rpms": [ + { + "architecture": architecture, + "gpg": "199e2f91fd431d51", + "name": "apache-commons-lang", + "nvra": "apache-commons-lang-2.6-15.el7.noarch", + "release": "15.el7", + "srpm_name": "apache-commons-lang", + "srpm_nevra": "apache-commons-lang-0:2.6-15.el7.src", + "summary": "Provides a host of helper utilities for the java.lang API", + "version": "2.6", + }, + { + "architecture": architecture, + "gpg": "199e2f91fd431d51", + "name": "avalon-logkit", + "nvra": "avalon-logkit-2.1-14.el7.noarch", + "release": "14.el7", + "srpm_name": "avalon-logkit", + "srpm_nevra": "avalon-logkit-0:2.1-14.el7.src", + "summary": "Java logging toolkit", + "version": "2.1", + }, + ] + } + ] + return ContainerImage.create( + { + "arches": arches, # Populated based on Brew build + "architecture": architecture, # Populated from Lightblue data + "rpm_manifest": rpm_manifest, + } + ) + + @patch("freshmaker.odcsclient.create_odcs_client") + @patch("time.sleep") + def test_prepare_odcs_compose_with_image_rpms(self, sleep, create_odcs_client): odcs = create_odcs_client.return_value odcs.new_compose.return_value = { "id": 3, @@ -297,18 +308,22 @@ def test_prepare_odcs_compose_with_image_rpms( compose = handler.odcs.prepare_odcs_compose_with_image_rpms(image) db.session.refresh(self.ev) - self.assertEqual(3, compose['id']) + self.assertEqual(3, compose["id"]) # Ensure new_compose is called to request a new compose odcs.new_compose.assert_called_once_with( - '', 'build', builds=['apache-commons-lang-2.6-15.el7', 'avalon-logkit-2.1-14.el7'], - flags=['no_deps'], packages=[u'apache-commons-lang', u'avalon-logkit'], sigkeys=[], - arches=['x86_64']) - - @patch('freshmaker.odcsclient.create_odcs_client') - @patch('time.sleep') - def test_prepare_odcs_compose_with_multi_arch_image_rpms( - self, sleep, create_odcs_client): + "", + "build", + builds=["apache-commons-lang-2.6-15.el7", "avalon-logkit-2.1-14.el7"], + flags=["no_deps"], + packages=["apache-commons-lang", "avalon-logkit"], + sigkeys=[], + arches=["x86_64"], + ) + + @patch("freshmaker.odcsclient.create_odcs_client") + @patch("time.sleep") + def test_prepare_odcs_compose_with_multi_arch_image_rpms(self, sleep, create_odcs_client): odcs = create_odcs_client.return_value odcs.new_compose.return_value = { "id": 3, @@ -320,23 +335,28 @@ def test_prepare_odcs_compose_with_multi_arch_image_rpms( "state_name": "wait", } - arches = 's390x x86_64' - image_x86_64 = self._get_fake_container_image(architecture='amd64', arches=arches) - image_s390x = self._get_fake_container_image(architecture='s390x', arches=arches) + arches = "s390x x86_64" + image_x86_64 = self._get_fake_container_image(architecture="amd64", arches=arches) + image_s390x = self._get_fake_container_image(architecture="s390x", arches=arches) for image in (image_x86_64, image_s390x): handler = MyHandler() compose = handler.odcs.prepare_odcs_compose_with_image_rpms(image) db.session.refresh(self.ev) - self.assertEqual(3, compose['id']) + self.assertEqual(3, compose["id"]) # Ensure new_compose is called to request a new multi-arch # compose regardless of which image is used. odcs.new_compose.assert_called_once_with( - '', 'build', builds=['apache-commons-lang-2.6-15.el7', 'avalon-logkit-2.1-14.el7'], - flags=['no_deps'], packages=[u'apache-commons-lang', u'avalon-logkit'], sigkeys=[], - arches=['s390x', 'x86_64']) + "", + "build", + builds=["apache-commons-lang-2.6-15.el7", "avalon-logkit-2.1-14.el7"], + flags=["no_deps"], + packages=["apache-commons-lang", "avalon-logkit"], + sigkeys=[], + arches=["s390x", "x86_64"], + ) odcs.reset_mock() @@ -352,11 +372,11 @@ def test_prepare_odcs_compose_with_image_rpms_dry_run(self, global_consumer): handler = MyHandler() handler.force_dry_run() compose = handler.odcs.prepare_odcs_compose_with_image_rpms(image) - db_compose = Compose(odcs_compose_id=compose['id']) + db_compose = Compose(odcs_compose_id=compose["id"]) db.session.add(db_compose) db.session.commit() - self.assertEqual(-i, compose['id']) + self.assertEqual(-i, compose["id"]) event = consumer.incoming.get() self.assertEqual(event.msg_id, "fake_compose_msg") @@ -366,20 +386,25 @@ def test_prepare_odcs_compose_with_image_rpms_no_rpm_manifest(self): compose = handler.odcs.prepare_odcs_compose_with_image_rpms({}) self.assertEqual(compose, None) - compose = handler.odcs.prepare_odcs_compose_with_image_rpms( - {"multi_arch_rpm_manifest": {}}) + compose = handler.odcs.prepare_odcs_compose_with_image_rpms({"multi_arch_rpm_manifest": {}}) self.assertEqual(compose, None) compose = handler.odcs.prepare_odcs_compose_with_image_rpms( - {"multi_arch_rpm_manifest": { - "amd64": [], - }}) + { + "multi_arch_rpm_manifest": { + "amd64": [], + } + } + ) self.assertEqual(compose, None) compose = handler.odcs.prepare_odcs_compose_with_image_rpms( - {"multi_arch_rpm_manifest": { - "amd64": [{"rpms": []}], - }}) + { + "multi_arch_rpm_manifest": { + "amd64": [{"rpms": []}], + } + } + ) self.assertEqual(compose, None) @@ -392,25 +417,28 @@ def setUp(self): self.patcher = helpers.Patcher() self.mock_prepare_yum_repo = self.patcher.patch( - 'freshmaker.odcsclient.FreshmakerODCSClient.prepare_yum_repo', + "freshmaker.odcsclient.FreshmakerODCSClient.prepare_yum_repo", side_effect=[ - {'id': 1, 'result_repofile': 'http://localhost/repo/1'}, - {'id': 2, 'result_repofile': 'http://localhost/repo/2'}, - {'id': 3, 'result_repofile': 'http://localhost/repo/3'}, - {'id': 4, 'result_repofile': 'http://localhost/repo/4'}, - ]) + {"id": 1, "result_repofile": "http://localhost/repo/1"}, + {"id": 2, "result_repofile": "http://localhost/repo/2"}, + {"id": 3, "result_repofile": "http://localhost/repo/3"}, + {"id": 4, "result_repofile": "http://localhost/repo/4"}, + ], + ) self.mock_find_dependent_event = self.patcher.patch( - 'freshmaker.models.Event.find_dependent_events') + "freshmaker.models.Event.find_dependent_events" + ) self.db_event = Event.create( - db.session, 'msg-1', 'search-key-1', 1, - state=EventState.INITIALIZED, - released=False) + db.session, "msg-1", "search-key-1", 1, state=EventState.INITIALIZED, released=False + ) self.build_1 = ArtifactBuild.create( - db.session, self.db_event, 'build-1', ArtifactType.IMAGE) + db.session, self.db_event, "build-1", ArtifactType.IMAGE + ) self.build_2 = ArtifactBuild.create( - db.session, self.db_event, 'build-2', ArtifactType.IMAGE) + db.session, self.db_event, "build-2", ArtifactType.IMAGE + ) db.session.commit() @@ -426,12 +454,10 @@ def test_prepare_without_dependent_events(self): self.assertEqual(1, self.build_1.composes[0].compose.id) self.assertEqual(1, self.build_2.composes[0].compose.id) - self.assertEqual(['http://localhost/repo/1'], urls) + self.assertEqual(["http://localhost/repo/1"], urls) def test_prepare_with_dependent_events(self): - self.mock_find_dependent_event.return_value = [ - Mock(), Mock(), Mock() - ] + self.mock_find_dependent_event.return_value = [Mock(), Mock(), Mock()] handler = MyHandler() urls = handler.odcs.prepare_yum_repos_for_rebuilds(self.db_event) @@ -442,9 +468,12 @@ def test_prepare_with_dependent_events(self): odcs_compose_ids = [rel.compose.id for rel in self.build_2.composes] self.assertEqual([1, 2, 3, 4], sorted(odcs_compose_ids)) - self.assertEqual([ - 'http://localhost/repo/1', - 'http://localhost/repo/2', - 'http://localhost/repo/3', - 'http://localhost/repo/4', - ], sorted(urls)) + self.assertEqual( + [ + "http://localhost/repo/1", + "http://localhost/repo/2", + "http://localhost/repo/3", + "http://localhost/repo/4", + ], + sorted(urls), + ) diff --git a/tests/test_producer.py b/tests/test_producer.py index 418b77cb..6c181fe5 100644 --- a/tests/test_producer.py +++ b/tests/test_producer.py @@ -36,19 +36,19 @@ class TestCheckUnfinishedKojiTasks(helpers.ModelsTestCase): - def setUp(self): super(TestCheckUnfinishedKojiTasks, self).setUp() self.koji_read_config_patcher = patch( - 'koji.read_config', return_value={'server': 'http://localhost/'}) + "koji.read_config", return_value={"server": "http://localhost/"} + ) self.koji_read_config_patcher.start() db_event = Event.get_or_create( - db.session, "msg1", "current_event", ErrataRPMAdvisoryShippedEvent) + db.session, "msg1", "current_event", ErrataRPMAdvisoryShippedEvent + ) db_event.state = EventState.BUILDING - self.build = ArtifactBuild.create(db.session, db_event, "parent1-1-4", - "image") + self.build = ArtifactBuild.create(db.session, db_event, "parent1-1-4", "image") self.build.state = ArtifactBuildState.BUILD self.build.build_id = 10 db.session.commit() @@ -56,13 +56,13 @@ def setUp(self): def tearDown(self): self.koji_read_config_patcher.stop() - @patch('freshmaker.kojiservice.KojiService.get_task_info') - @patch('freshmaker.consumer.get_global_consumer') + @patch("freshmaker.kojiservice.KojiService.get_task_info") + @patch("freshmaker.consumer.get_global_consumer") def test_koji_task_failed(self, global_consumer, get_task_info): consumer = self.create_consumer() global_consumer.return_value = consumer - get_task_info.return_value = {'state': koji.TASK_STATES['FAILED']} + get_task_info.return_value = {"state": koji.TASK_STATES["FAILED"]} hub = MagicMock() producer = FreshmakerProducer(hub) @@ -71,13 +71,13 @@ def test_koji_task_failed(self, global_consumer, get_task_info): self.assertEqual(event.task_id, 10) self.assertEqual(event.new_state, "FAILED") - @patch('freshmaker.kojiservice.KojiService.get_task_info') - @patch('freshmaker.consumer.get_global_consumer') + @patch("freshmaker.kojiservice.KojiService.get_task_info") + @patch("freshmaker.consumer.get_global_consumer") def test_koji_task_closed(self, global_consumer, get_task_info): consumer = self.create_consumer() global_consumer.return_value = consumer - get_task_info.return_value = {'state': koji.TASK_STATES['CLOSED']} + get_task_info.return_value = {"state": koji.TASK_STATES["CLOSED"]} hub = MagicMock() producer = FreshmakerProducer(hub) @@ -86,43 +86,44 @@ def test_koji_task_closed(self, global_consumer, get_task_info): self.assertEqual(event.task_id, 10) self.assertEqual(event.new_state, "CLOSED") - @patch('freshmaker.kojiservice.KojiService.get_task_info') - @patch('freshmaker.consumer.get_global_consumer') + @patch("freshmaker.kojiservice.KojiService.get_task_info") + @patch("freshmaker.consumer.get_global_consumer") def test_koji_task_dry_run(self, global_consumer, get_task_info): self.build.build_id = -10 consumer = self.create_consumer() global_consumer.return_value = consumer - get_task_info.return_value = {'state': koji.TASK_STATES['CLOSED']} + get_task_info.return_value = {"state": koji.TASK_STATES["CLOSED"]} hub = MagicMock() producer = FreshmakerProducer(hub) producer.check_unfinished_koji_tasks(db.session) self.assertRaises(queue.Empty, consumer.incoming.get, block=False) - @patch('freshmaker.kojiservice.KojiService.get_task_info') - @patch('freshmaker.consumer.get_global_consumer') + @patch("freshmaker.kojiservice.KojiService.get_task_info") + @patch("freshmaker.consumer.get_global_consumer") def test_koji_task_open(self, global_consumer, get_task_info): self.build.build_id = -10 consumer = self.create_consumer() global_consumer.return_value = consumer - get_task_info.return_value = {'state': koji.TASK_STATES['OPEN']} + get_task_info.return_value = {"state": koji.TASK_STATES["OPEN"]} hub = MagicMock() producer = FreshmakerProducer(hub) producer.check_unfinished_koji_tasks(db.session) self.assertRaises(queue.Empty, consumer.incoming.get, block=False) - @patch('freshmaker.kojiservice.KojiService.get_task_info') - @patch('freshmaker.consumer.get_global_consumer') + @patch("freshmaker.kojiservice.KojiService.get_task_info") + @patch("freshmaker.consumer.get_global_consumer") def test_koji_invalid_request(self, global_consumer, get_task_info): from sqlalchemy import select + self.build.build_id = -10 consumer = self.create_consumer() global_consumer.return_value = consumer - get_task_info.return_value = {'state': koji.TASK_STATES['OPEN']} + get_task_info.return_value = {"state": koji.TASK_STATES["OPEN"]} hub = MagicMock() producer = FreshmakerProducer(hub) diff --git a/tests/test_pulp.py b/tests/test_pulp.py index 1d270f97..531d6280 100644 --- a/tests/test_pulp.py +++ b/tests/test_pulp.py @@ -36,134 +36,127 @@ class TestPulp(helpers.FreshmakerTestCase): def setUp(self): super(TestPulp, self).setUp() - self.server_url = 'http://localhost/' + self.server_url = "http://localhost/" self.cert = ("path/to/crt", "path/to/key") - @patch('freshmaker.pulp.requests.post') + @patch("freshmaker.pulp.requests.post") def test_query_content_set_by_repo_ids(self, post): post.return_value.json.return_value = [ { - '_href': '/pulp/api/v2/repositories/rhel-7-workstation-rpms__7Workstation__x86_64/', - '_id': - { - '$oid': '53853a247bc9f61b85909cfe' - }, - 'id': 'rhel-7-workstation-rpms__7Workstation__x86_64', - 'notes': - { - 'content_set': 'rhel-7-workstation-rpms', + "_href": "/pulp/api/v2/repositories/rhel-7-workstation-rpms__7Workstation__x86_64/", + "_id": {"$oid": "53853a247bc9f61b85909cfe"}, + "id": "rhel-7-workstation-rpms__7Workstation__x86_64", + "notes": { + "content_set": "rhel-7-workstation-rpms", }, }, { - '_href': '/pulp/api/v2/repositories/rhel-7-hpc-node-rpms__7ComputeNode__x86_64/', - '_id': { - '$oid': '5384ee7c7bc9f619942a8f89', - }, - 'id': 'rhel-7-hpc-node-rpms__7ComputeNode__x86_64', - 'notes': { - 'content_set': 'rhel-7-hpc-node-rpms' + "_href": "/pulp/api/v2/repositories/rhel-7-hpc-node-rpms__7ComputeNode__x86_64/", + "_id": { + "$oid": "5384ee7c7bc9f619942a8f89", }, + "id": "rhel-7-hpc-node-rpms__7ComputeNode__x86_64", + "notes": {"content_set": "rhel-7-hpc-node-rpms"}, }, { - '_href': '/pulp/api/v2/repositories/rhel-7-desktop-rpms__7Client__x86_64/', - '_id': { - '$oid': '5384ee6a7bc9f619942a8bca', + "_href": "/pulp/api/v2/repositories/rhel-7-desktop-rpms__7Client__x86_64/", + "_id": { + "$oid": "5384ee6a7bc9f619942a8bca", }, - 'id': 'rhel-7-desktop-rpms__7Client__x86_64', - 'notes': { - 'content_set': 'rhel-7-desktop-rpms', - } - } + "id": "rhel-7-desktop-rpms__7Client__x86_64", + "notes": { + "content_set": "rhel-7-desktop-rpms", + }, + }, ] pulp = Pulp(self.server_url, cert=self.cert) repo_ids = [ - 'rhel-7-hpc-node-rpms__7ComputeNode__x86_64', - 'rhel-7-workstation-rpms__7Workstation__x86_64', - 'rhel-7-desktop-rpms__7Client__x86_64', + "rhel-7-hpc-node-rpms__7ComputeNode__x86_64", + "rhel-7-workstation-rpms__7Workstation__x86_64", + "rhel-7-desktop-rpms__7Client__x86_64", ] content_sets = pulp.get_content_set_by_repo_ids(repo_ids) post.assert_called_once_with( - '{}pulp/api/v2/repositories/search/'.format(self.server_url), - json.dumps({ - 'criteria': { - 'filters': { - 'id': {'$in': repo_ids}, - }, - 'fields': ['notes'], + "{}pulp/api/v2/repositories/search/".format(self.server_url), + json.dumps( + { + "criteria": { + "filters": { + "id": {"$in": repo_ids}, + }, + "fields": ["notes"], + } } - }), + ), cert=self.cert, - timeout=conf.requests_timeout) + timeout=conf.requests_timeout, + ) self.assertEqual( - ['rhel-7-workstation-rpms', - 'rhel-7-hpc-node-rpms', - 'rhel-7-desktop-rpms'], - content_sets) + ["rhel-7-workstation-rpms", "rhel-7-hpc-node-rpms", "rhel-7-desktop-rpms"], content_sets + ) - @patch('freshmaker.pulp.requests.post') + @patch("freshmaker.pulp.requests.post") def test_get_content_sets_by_ignoring_nonexisting_ones(self, post): post.return_value.json.return_value = [ { - '_href': '/pulp/api/v2/repositories/rhel-7-workstation-rpms__7Workstation__x86_64/', - '_id': - { - '$oid': '53853a247bc9f61b85909cfe' - }, - 'id': 'rhel-7-workstation-rpms__7Workstation__x86_64', - 'notes': - { - 'content_set': 'rhel-7-workstation-rpms', + "_href": "/pulp/api/v2/repositories/rhel-7-workstation-rpms__7Workstation__x86_64/", + "_id": {"$oid": "53853a247bc9f61b85909cfe"}, + "id": "rhel-7-workstation-rpms__7Workstation__x86_64", + "notes": { + "content_set": "rhel-7-workstation-rpms", }, }, { - '_href': '/pulp/api/v2/repositories/rhel-7-hpc-node-rpms__7ComputeNode__x86_64/', - '_id': { - '$oid': '5384ee7c7bc9f619942a8f89', + "_href": "/pulp/api/v2/repositories/rhel-7-hpc-node-rpms__7ComputeNode__x86_64/", + "_id": { + "$oid": "5384ee7c7bc9f619942a8f89", }, - 'id': 'rhel-7-hpc-node-rpms__7ComputeNode__x86_64', - 'notes': {}, + "id": "rhel-7-hpc-node-rpms__7ComputeNode__x86_64", + "notes": {}, }, { - '_href': '/pulp/api/v2/repositories/rhel-7-desktop-rpms__7Client__x86_64/', - '_id': { - '$oid': '5384ee6a7bc9f619942a8bca', + "_href": "/pulp/api/v2/repositories/rhel-7-desktop-rpms__7Client__x86_64/", + "_id": { + "$oid": "5384ee6a7bc9f619942a8bca", }, - 'id': 'rhel-7-desktop-rpms__7Client__x86_64', - 'notes': { - 'content_set': 'rhel-7-desktop-rpms', - } - } + "id": "rhel-7-desktop-rpms__7Client__x86_64", + "notes": { + "content_set": "rhel-7-desktop-rpms", + }, + }, ] pulp = Pulp(self.server_url, cert=self.cert) repo_ids = [ - 'rhel-7-hpc-node-rpms__7ComputeNode__x86_64', - 'rhel-7-workstation-rpms__7Workstation__x86_64', - 'rhel-7-desktop-rpms__7Client__x86_64', + "rhel-7-hpc-node-rpms__7ComputeNode__x86_64", + "rhel-7-workstation-rpms__7Workstation__x86_64", + "rhel-7-desktop-rpms__7Client__x86_64", ] content_sets = pulp.get_content_set_by_repo_ids(repo_ids) post.assert_called_once_with( - '{}pulp/api/v2/repositories/search/'.format(self.server_url), - json.dumps({ - 'criteria': { - 'filters': { - 'id': {'$in': repo_ids}, - }, - 'fields': ['notes'], + "{}pulp/api/v2/repositories/search/".format(self.server_url), + json.dumps( + { + "criteria": { + "filters": { + "id": {"$in": repo_ids}, + }, + "fields": ["notes"], + } } - }), + ), cert=self.cert, - timeout=conf.requests_timeout) + timeout=conf.requests_timeout, + ) - self.assertEqual(['rhel-7-workstation-rpms', 'rhel-7-desktop-rpms'], - content_sets) + self.assertEqual(["rhel-7-workstation-rpms", "rhel-7-desktop-rpms"], content_sets) - @patch('freshmaker.pulp.requests.post') - @patch('freshmaker.pulp.requests.get') + @patch("freshmaker.pulp.requests.post") + @patch("freshmaker.pulp.requests.get") def test_retrying_calls(self, get, post): get.side_effect = exceptions.HTTPError("Connection error: get") post.side_effect = exceptions.HTTPError("Connection error: post") @@ -171,5 +164,5 @@ def test_retrying_calls(self, get, post): pulp = Pulp(self.server_url, cert=self.cert) with self.assertRaises(exceptions.HTTPError): - pulp.get_content_set_by_repo_ids(['test1', 'test2']) + pulp.get_content_set_by_repo_ids(["test1", "test2"]) self.assertGreater(post.call_count, 1) diff --git a/tests/test_pyxis.py b/tests/test_pyxis.py index ad93a7d7..e720e01d 100644 --- a/tests/test_pyxis.py +++ b/tests/test_pyxis.py @@ -38,10 +38,9 @@ class TestQueryPyxis(helpers.FreshmakerTestCase): def setUp(self): super().setUp() - self.patcher = helpers.Patcher( - 'freshmaker.pyxis.') + self.patcher = helpers.Patcher("freshmaker.pyxis.") - self.fake_server_url = 'https://pyxis.localhost/' + self.fake_server_url = "https://pyxis.localhost/" self.px = Pyxis(self.fake_server_url) self.response = create_autospec(requests.Response) self.response.status_code = HTTPStatus.OK @@ -50,19 +49,14 @@ def setUp(self): "Unable to parse the filter from URL.", "Please verify the 'Field Name' in the RSQL Expression.", "Please visit the following end-point for more details:", - " /v1/docs/filtering-language" + " /v1/docs/filtering-language", ], "status": 400, "title": "Bad Request", - "type": "about:blank" + "type": "about:blank", } - self.empty_response_page = { - "data": [], - "page": 0, - "page_size": 100, - "total": 0 - } + self.empty_response_page = {"data": [], "page": 0, "page_size": 100, "total": 0} self.indices = [ { @@ -73,7 +67,7 @@ def setUp(self): "last_updated_by": "meteor", "ocp_version": "4.5", "organization": "org", - "path": "path/to/registry:v4.5" + "path": "path/to/registry:v4.5", }, { "_id": "2", @@ -83,7 +77,7 @@ def setUp(self): "last_updated_by": "meteor", "ocp_version": "4.6", "organization": "org", - "path": "path/to/registry:v4.6" + "path": "path/to/registry:v4.6", }, { "_id": "2", @@ -93,8 +87,8 @@ def setUp(self): "last_updated_by": "meteor", "ocp_version": "4.6", "organization": "org", - "path": "" - } + "path": "", + }, ] self.bundles = [ @@ -105,25 +99,25 @@ def setUp(self): { "image": "registry/amq7/amq-streams-r-operator@sha256:111", "name": "strimzi-cluster-operator", - "digest": "sha256:111" + "digest": "sha256:111", }, { "image": "registry/amq7/amq-streams-kafka-24-r@sha256:222", "name": "strimzi-kafka-24", - "digest": "sha256:222" + "digest": "sha256:222", }, { "image": "registry/amq7/amq-streams-kafka-25-r@sha256:333", "name": "strimzi-kafka-25", - "digest": "sha256:333" + "digest": "sha256:333", }, { "image": "registry/amq7/amq-streams-bridge-r@sha256:444", "name": "strimzi-bridge", - "digest": "sha256:444" - } + "digest": "sha256:444", + }, ], - "version_original": "1.5.3" + "version_original": "1.5.3", }, { "channel_name": "streams-1.5.x", @@ -132,25 +126,25 @@ def setUp(self): { "image": "registry/amq7/amq-streams-r-operator@sha256:555", "name": "strimzi-cluster-operator", - "digest": "sha256:555" + "digest": "sha256:555", }, { "image": "registry/amq7/amq-streams-kafka-24-r@sha256:666", "name": "strimzi-kafka-24", - "digest": "sha256:666" + "digest": "sha256:666", }, { "image": "registry/amq7/amq-streams-kafka-25-r@sha256:777", "name": "strimzi-kafka-25", - "digest": "sha256:777" + "digest": "sha256:777", }, { "image": "registry/amq7/amq-streams-bridge-r@sha256:888", "name": "strimzi-bridge", - "digest": "sha256:888" - } + "digest": "sha256:888", + }, ], - "version_original": "1.5.4" + "version_original": "1.5.4", }, { "channel_name": "stable", @@ -159,25 +153,25 @@ def setUp(self): { "image": "registry/amq7/amq--operator@sha256:999", "name": "strimzi-cluster-operator", - "digest": "sha256:999" + "digest": "sha256:999", }, { "image": "registry/amq7/kafka-24-r@sha256:aaa", "name": "strimzi-kafka-24", - "digest": "sha256:aaa" + "digest": "sha256:aaa", }, { "image": "registry/amq7/kafka-25-r@sha256:bbb", "name": "strimzi-kafka-25", - "digest": "sha256:bbb" + "digest": "sha256:bbb", }, { "image": "registry/amq7/amq-streams-bridge-r@sha256:ccc", "name": "strimzi-bridge", - "digest": "sha256:ccc" - } + "digest": "sha256:ccc", + }, ], - "version_original": "1.5.3" + "version_original": "1.5.3", }, { "channel_name": "stable", @@ -186,15 +180,15 @@ def setUp(self): { "image": "registry/tracing/j-operator:1.13.2", "name": "j-1.13.2-annotation", - "digest": "sha256:fff" + "digest": "sha256:fff", }, { "image": "registry/tracing/j-operator:1.13.2", "name": "j-operator", - "digest": "sha256:ffff" - } + "digest": "sha256:ffff", + }, ], - "version": "1.5.2" + "version": "1.5.2", }, { "channel_name": "quay-v3.3", @@ -203,15 +197,15 @@ def setUp(self): { "image": "registry/quay/quay-operator@sha256:ddd", "name": "quay-operator-annotation", - "digest": "sha256:ddd" + "digest": "sha256:ddd", }, { "image": "registry/quay/quay-security-r-operator@sha256:eee", "name": "container-security-operator", - "digest": "sha256:eee" - } + "digest": "sha256:eee", + }, ], - "version": "3.3.1" + "version": "3.3.1", }, ] @@ -221,7 +215,7 @@ def setUp(self): "build": "s2i-1-2", "completion_date": "2020-08-12T11:31:39+00:00", "nvra": "s2i-1-2.ppc64le", - "package": "s2i-core-container" + "package": "s2i-core-container", }, "repositories": [ { @@ -230,7 +224,7 @@ def setUp(self): "published": False, "registry": "reg1", "repository": "repo1", - "tags": [{"name": "tag0"}] + "tags": [{"name": "tag0"}], }, { "manifest_list_digest": "sha256:1112", @@ -238,16 +232,16 @@ def setUp(self): "published": True, "registry": "reg2", "repository": "repo2", - "tags": [{"name": "tag1"}, {"name": "tag2"}] - } - ] + "tags": [{"name": "tag1"}, {"name": "tag2"}], + }, + ], }, { "brew": { "build": "s2i-1-2", "completion_date": "2020-08-12T11:31:39+00:00", "nvra": "s2i-1-2.s390x", - "package": "s2i-core-container" + "package": "s2i-core-container", }, "repositories": [ { @@ -256,16 +250,16 @@ def setUp(self): "published": True, "registry": "reg2", "repository": "repo2", - "tags": [{"name": "tag2"}] + "tags": [{"name": "tag2"}], } - ] + ], }, { "brew": { "build": "s2i-1-2", "completion_date": "2020-08-12T11:31:39+00:00", "nvra": "s2i-1-2.amd64", - "package": "s2i-core-container" + "package": "s2i-core-container", }, "repositories": [ { @@ -274,16 +268,16 @@ def setUp(self): "published": True, "registry": "reg3", "repository": "repo3", - "tags": [{"name": "latest"}] + "tags": [{"name": "latest"}], } - ] + ], }, { "brew": { "build": "s2i-1-2", "completion_date": "2020-08-12T11:31:39+00:00", "nvra": "s2i-1-2.arm64", - "package": "s2i-core-container" + "package": "s2i-core-container", }, "repositories": [ { @@ -292,10 +286,10 @@ def setUp(self): "published": True, "registry": "reg4", "repository": "repo4", - "tags": [{"name": "tag1"}] + "tags": [{"name": "tag1"}], } - ] - } + ], + }, ] def tearDown(self): @@ -314,82 +308,87 @@ def side_effect(*args, **kwargs): args = deepcopy(args) kwargs = deepcopy(kwargs) return new_mock(*args, **kwargs) + mock.side_effect = side_effect return new_mock - @patch('freshmaker.pyxis.HTTPKerberosAuth') - @patch('freshmaker.pyxis.requests.get') + @patch("freshmaker.pyxis.HTTPKerberosAuth") + @patch("freshmaker.pyxis.requests.get") def test_make_request(self, get, auth): get.return_value = self.response - test_params = {'key1': 'val1'} - self.px._make_request('test', test_params) + test_params = {"key1": "val1"} + self.px._make_request("test", test_params) - get_url = self.fake_server_url + 'v1/test' + get_url = self.fake_server_url + "v1/test" self.response.json.assert_called_once() - test_params['page_size'] = "100" - get.assert_called_once_with(get_url, params=test_params, auth=auth(), - timeout=conf.net_timeout) + test_params["page_size"] = "100" + get.assert_called_once_with( + get_url, params=test_params, auth=auth(), timeout=conf.net_timeout + ) - @patch('freshmaker.pyxis.HTTPKerberosAuth') - @patch('freshmaker.pyxis.requests.get') + @patch("freshmaker.pyxis.HTTPKerberosAuth") + @patch("freshmaker.pyxis.requests.get") def test_make_request_error(self, get, auth): get.return_value = self.response self.response.ok = False self.response.json.side_effect = ValueError - self.response.json.text = 'test message' + self.response.json.text = "test message" self.response.request = Mock() - self.response.request.url = 'test/url' + self.response.request.url = "test/url" self.response.headers = {"trace_id": "123"} - with self.assertRaises(PyxisRequestError, msg='test message') as cm: - self.px._make_request('test', {}) + with self.assertRaises(PyxisRequestError, msg="test message") as cm: + self.px._make_request("test", {}) pyxis_exception = cm.exception self.assertEqual(pyxis_exception.trace_id, "123") - @patch('freshmaker.pyxis.HTTPKerberosAuth') - @patch('freshmaker.pyxis.Pyxis._make_request') + @patch("freshmaker.pyxis.HTTPKerberosAuth") + @patch("freshmaker.pyxis.Pyxis._make_request") def test_pagination(self, request, auth): my_request = self.copy_call_args(request) my_request.side_effect = [ {"page": 0, "data": ["fake_data1"]}, {"page": 1, "data": ["fake_data2"]}, - {"page": 2, "data": []} + {"page": 2, "data": []}, ] - test_params = {'include': ['total', 'field1']} - entity = 'test' + test_params = {"include": ["total", "field1"]} + entity = "test" auth.return_value = 1 self.px._pagination(entity, test_params) self.assertEqual(request.call_count, 3) - default_params = {'page_size': '100', 'include': ['total', 'field1']} - calls = [call('test', params={**default_params, 'page': 0}), - call('test', params={**default_params, 'page': 1}), - call('test', params={**default_params, 'page': 2}) - ] + default_params = {"page_size": "100", "include": ["total", "field1"]} + calls = [ + call("test", params={**default_params, "page": 0}), + call("test", params={**default_params, "page": 1}), + call("test", params={**default_params, "page": 2}), + ] my_request.assert_has_calls(calls) - @patch.object(conf, 'pyxis_index_image_organizations', new=['org1', 'org2']) - @patch('freshmaker.pyxis.Pyxis.ocp_is_released', return_value=True) - @patch('freshmaker.pyxis.Pyxis._pagination') + @patch.object(conf, "pyxis_index_image_organizations", new=["org1", "org2"]) + @patch("freshmaker.pyxis.Pyxis.ocp_is_released", return_value=True) + @patch("freshmaker.pyxis.Pyxis._pagination") def test_get_operator_indices(self, page, is_released): page.return_value = self.indices indices = self.px.get_operator_indices() page.assert_called_once_with( - 'operators/indices', {'filter': 'organization==org1 or organization==org2'}) + "operators/indices", {"filter": "organization==org1 or organization==org2"} + ) self.assertEqual(len(indices), 3) - @patch.object(conf, 'pyxis_index_image_organizations', new=['org1', 'org2']) - @patch('freshmaker.pyxis.Pyxis.ocp_is_released', return_value=True) - @patch('freshmaker.pyxis.Pyxis._pagination') + @patch.object(conf, "pyxis_index_image_organizations", new=["org1", "org2"]) + @patch("freshmaker.pyxis.Pyxis.ocp_is_released", return_value=True) + @patch("freshmaker.pyxis.Pyxis._pagination") def test_get_index_paths(self, page, is_released): page.return_value = self.indices paths = self.px.get_index_paths() page.assert_called_once_with( - 'operators/indices', {'filter': 'organization==org1 or organization==org2'}) + "operators/indices", {"filter": "organization==org1 or organization==org2"} + ) self.assertEqual(len(paths), 2) - self.assertTrue('path/to/registry:v4.5' in paths) - self.assertTrue('path/to/registry:v4.6' in paths) + self.assertTrue("path/to/registry:v4.5" in paths) + self.assertTrue("path/to/registry:v4.6" in paths) @patch.object(conf, "product_pages_api_url", new="http://pp.example.com/api") @patch("freshmaker.pyxis.Pyxis._pagination") @@ -397,20 +396,17 @@ def test_get_operator_indices_with_unreleased_filtered_out(self, page): pp_mock_data = [ { "url": "http://pp.example.com/api/releases/openshift-4.5/schedule-tasks/", - "json": [{"name": "GA", "date_finish": "2020-02-05"}] + "json": [{"name": "GA", "date_finish": "2020-02-05"}], }, { "url": "http://pp.example.com/api/releases/openshift-4.6/schedule-tasks/", - "json": [{"name": "GA", "date_finish": "2020-05-23"}] + "json": [{"name": "GA", "date_finish": "2020-05-23"}], }, { "url": "http://pp.example.com/api/releases/openshift-4.8/schedule-tasks/", - "json": [{"name": "GA", "date_finish": "2021-08-12"}] + "json": [{"name": "GA", "date_finish": "2021-08-12"}], }, - { - "url": "http://pp.example.com/api/releases/openshift-4.9/schedule-tasks/", - "json": [] - } + {"url": "http://pp.example.com/api/releases/openshift-4.9/schedule-tasks/", "json": []}, ] page.return_value = self.indices + [ { @@ -421,7 +417,7 @@ def test_get_operator_indices_with_unreleased_filtered_out(self, page): "last_updated_by": "meteor", "ocp_version": "4.8", "organization": "org", - "path": "" + "path": "", } ] now = datetime(year=2020, month=12, day=15, hour=0, minute=0, second=0) @@ -437,10 +433,10 @@ def test_get_operator_indices_with_unreleased_filtered_out(self, page): assert "4.8" not in [i["ocp_version"] for i in indices] assert "4.9" not in [i["ocp_version"] for i in indices] - @patch('freshmaker.pyxis.Pyxis._pagination') + @patch("freshmaker.pyxis.Pyxis._pagination") def test_get_bundles_by_related_image_digest(self, page): self.px.get_bundles_by_related_image_digest( - 'sha256:111', ["path/to/registry:v4.5", "path/to/registry:v4.6"] + "sha256:111", ["path/to/registry:v4.5", "path/to/registry:v4.6"] ) request_params = { "include": "data.channel_name,data.version_original,data.related_images," @@ -448,21 +444,20 @@ def test_get_bundles_by_related_image_digest(self, page): "filter": "related_images.digest==sha256:111 and latest_in_channel==true and " "source_index_container_path=in=(path/to/registry:v4.5,path/to/registry:v4.6)", } - page.assert_called_once_with('operators/bundles', request_params) + page.assert_called_once_with("operators/bundles", request_params) - @patch('freshmaker.pyxis.Pyxis._pagination') + @patch("freshmaker.pyxis.Pyxis._pagination") def test_get_manifest_list_digest_by_nvr(self, page): page.return_value = self.images - digest = self.px.get_manifest_list_digest_by_nvr('s2i-1-2') + digest = self.px.get_manifest_list_digest_by_nvr("s2i-1-2") - expected_digest = 'sha256:1112' + expected_digest = "sha256:1112" self.assertEqual(digest, expected_digest) page.assert_called_once_with( - 'images/nvr/s2i-1-2', - {'include': 'data.brew,data.repositories'} + "images/nvr/s2i-1-2", {"include": "data.brew,data.repositories"} ) - @patch('freshmaker.pyxis.Pyxis._pagination') + @patch("freshmaker.pyxis.Pyxis._pagination") def test_get_manifest_list_digest_by_nvr_unpublished(self, page): page.return_value = [ { @@ -470,7 +465,7 @@ def test_get_manifest_list_digest_by_nvr_unpublished(self, page): "build": "s2i-1-2", "completion_date": "2020-08-12T11:31:39+00:00", "nvra": "s2i-1-2.arm64", - "package": "s2i-core-container" + "package": "s2i-core-container", }, "repositories": [ { @@ -478,197 +473,221 @@ def test_get_manifest_list_digest_by_nvr_unpublished(self, page): "published": False, "registry": "reg4", "repository": "repo4", - "tags": [{"name": "tag1"}] + "tags": [{"name": "tag1"}], } - ] + ], } ] - digest = self.px.get_manifest_list_digest_by_nvr('s2i-1-2', False) + digest = self.px.get_manifest_list_digest_by_nvr("s2i-1-2", False) - expected_digest = 'sha256:4444' + expected_digest = "sha256:4444" self.assertEqual(digest, expected_digest) page.assert_called_once_with( - 'images/nvr/s2i-1-2', - {'include': 'data.brew,data.repositories'} + "images/nvr/s2i-1-2", {"include": "data.brew,data.repositories"} ) - @patch('freshmaker.pyxis.Pyxis._pagination') + @patch("freshmaker.pyxis.Pyxis._pagination") def test_get_manifest_schema2_digest_by_nvr(self, page): page.return_value = self.images - digests = self.px.get_manifest_schema2_digests_by_nvr('s2i-1-2') + digests = self.px.get_manifest_schema2_digests_by_nvr("s2i-1-2") expected_digests = [ - 'sha256:22224444', 'sha256:33336666', 'sha256:11112222', 'sha256:44448888' + "sha256:22224444", + "sha256:33336666", + "sha256:11112222", + "sha256:44448888", ] self.assertEqual(set(digests), set(expected_digests)) page.assert_called_once_with( - 'images/nvr/s2i-1-2', - {'include': 'data.brew,data.repositories'} + "images/nvr/s2i-1-2", {"include": "data.brew,data.repositories"} ) - @patch('freshmaker.pyxis.Pyxis._pagination') + @patch("freshmaker.pyxis.Pyxis._pagination") def test_get_bundles_by_digests(self, page): page.return_value = {"some_bundle"} digests = ["digest1", "digest2"] self.px.get_bundles_by_digests(digests) - page.assert_called_once_with("operators/bundles", { - "include": "data.version_original,data.csv_name", - "filter": "bundle_path_digest==digest1 or bundle_path_digest==digest2" - }) + page.assert_called_once_with( + "operators/bundles", + { + "include": "data.version_original,data.csv_name", + "filter": "bundle_path_digest==digest1 or bundle_path_digest==digest2", + }, + ) - @patch('freshmaker.pyxis.Pyxis.get_manifest_schema2_digests_by_nvr') - @patch('freshmaker.pyxis.Pyxis._pagination') + @patch("freshmaker.pyxis.Pyxis.get_manifest_schema2_digests_by_nvr") + @patch("freshmaker.pyxis.Pyxis._pagination") def test_get_bundles_by_nvr(self, page, get_digests): get_digests.return_value = ["some_digest"] self.px.get_bundles_by_nvr("some-nvr") - page.assert_called_once_with("operators/bundles", { - "include": "data.version_original,data.csv_name", - "filter": "bundle_path_digest==some_digest" - }) + page.assert_called_once_with( + "operators/bundles", + { + "include": "data.version_original,data.csv_name", + "filter": "bundle_path_digest==some_digest", + }, + ) - @patch('freshmaker.pyxis.Pyxis._pagination') + @patch("freshmaker.pyxis.Pyxis._pagination") def test_get_images_by_nvr(self, page): self.px.get_images_by_nvr("some-nvr") - page.assert_called_once_with("images/nvr/some-nvr", { - "include": "data.architecture,data.brew,data.repositories" - }) + page.assert_called_once_with( + "images/nvr/some-nvr", {"include": "data.architecture,data.brew,data.repositories"} + ) - @patch('freshmaker.pyxis.requests.get') + @patch("freshmaker.pyxis.requests.get") def test_get_images_by_digest(self, mock_get): image_1 = { - 'brew': { - 'build': 'foo-operator-2.1-2', - 'nvra': 'foo-operator-2.1-2.amd64', - 'package': 'foo', + "brew": { + "build": "foo-operator-2.1-2", + "nvra": "foo-operator-2.1-2.amd64", + "package": "foo", }, - 'repositories': [ + "repositories": [ { - 'content_advisory_ids': [], - 'manifest_list_digest': 'sha256:12345', - 'manifest_schema2_digest': 'sha256:23456', - 'published': True, - 'registry': 'registry.example.com', - 'repository': 'foo/foo-operator-bundle', - 'tags': [{'name': '2'}, {'name': '2.1'}], + "content_advisory_ids": [], + "manifest_list_digest": "sha256:12345", + "manifest_schema2_digest": "sha256:23456", + "published": True, + "registry": "registry.example.com", + "repository": "foo/foo-operator-bundle", + "tags": [{"name": "2"}, {"name": "2.1"}], } ], } fake_responses = [Mock(ok=True), Mock(ok=True)] - fake_responses[0].json.return_value = {'data': [image_1]} - fake_responses[1].json.return_value = {'data': []} + fake_responses[0].json.return_value = {"data": [image_1]} + fake_responses[1].json.return_value = {"data": []} mock_get.side_effect = fake_responses - digest = 'sha256:23456' + digest = "sha256:23456" images = self.px.get_images_by_digest(digest) self.assertListEqual(images, [image_1]) - @patch('freshmaker.pyxis.requests.get') + @patch("freshmaker.pyxis.requests.get") def test_get_auto_rebuild_tags(self, mock_get): mock_get.return_value = Mock(ok=True) mock_get.return_value.json.return_value = { - '_links': {}, - 'auto_rebuild_tags': [ - '2.3', - 'latest' - ] + "_links": {}, + "auto_rebuild_tags": ["2.3", "latest"], } - tags = self.px.get_auto_rebuild_tags('registry.example.com', 'foo/foo-operator-bundle') - self.assertListEqual(tags, ['2.3', 'latest']) + tags = self.px.get_auto_rebuild_tags("registry.example.com", "foo/foo-operator-bundle") + self.assertListEqual(tags, ["2.3", "latest"]) - @patch('freshmaker.pyxis.Pyxis._pagination') + @patch("freshmaker.pyxis.Pyxis._pagination") def test_is_bundle_true(self, page): - page.return_value = [{ - "parsed_data": { - "labels": [ - {"name": "architecture", "value": "x86_64"}, - {"name": "com.redhat.delivery.operator.bundle", "value": "true"}, - {"name": "com.redhat.openshift.versions", "value": "v4.6"}, - {"name": "version", "value": "2.11.0"}, - ] - }, - }] + page.return_value = [ + { + "parsed_data": { + "labels": [ + {"name": "architecture", "value": "x86_64"}, + {"name": "com.redhat.delivery.operator.bundle", "value": "true"}, + {"name": "com.redhat.openshift.versions", "value": "v4.6"}, + {"name": "version", "value": "2.11.0"}, + ] + }, + } + ] is_bundle = self.px.is_bundle("foobar-bundle-1-234") self.assertTrue(is_bundle) - @patch('freshmaker.pyxis.Pyxis._pagination') + @patch("freshmaker.pyxis.Pyxis._pagination") def test_is_bundle_false(self, page): - page.return_value = [{ - "parsed_data": { - "labels": [ - {"name": "architecture", "value": "x86_64"}, - {"name": "com.redhat.openshift.versions", "value": "v4.6"}, - {"name": "version", "value": "2.11.0"}, - ] - }, - }] + page.return_value = [ + { + "parsed_data": { + "labels": [ + {"name": "architecture", "value": "x86_64"}, + {"name": "com.redhat.openshift.versions", "value": "v4.6"}, + {"name": "version", "value": "2.11.0"}, + ] + }, + } + ] is_bundle = self.px.is_bundle("foobar-bundle-1-234") self.assertFalse(is_bundle) - @patch('freshmaker.pyxis.Pyxis._pagination') + @patch("freshmaker.pyxis.Pyxis._pagination") def test_is_hotfix_image_true(self, page): - page.return_value = [{ - "parsed_data": { - "labels": [ - {"name": "com.redhat.hotfix", "value": "bz1234567"}, - {"name": "architecture", "value": "ppc64le"}, - {"name": "brew", "value": "brew_value_1"}, - {"name": "repositories", "value": "repositories_value_1"}, - ] - }, - }] + page.return_value = [ + { + "parsed_data": { + "labels": [ + {"name": "com.redhat.hotfix", "value": "bz1234567"}, + {"name": "architecture", "value": "ppc64le"}, + {"name": "brew", "value": "brew_value_1"}, + {"name": "repositories", "value": "repositories_value_1"}, + ] + }, + } + ] is_hotfix_image = self.px.is_hotfix_image("foobar-bundle-1-234") self.assertTrue(is_hotfix_image) - @patch('freshmaker.pyxis.Pyxis._pagination') + @patch("freshmaker.pyxis.Pyxis._pagination") def test_is_hotfix_image_false(self, page): - page.return_value = [{ - "parsed_data": { - "labels": [ - {"name": "not_com.redhat.hotfix", "value": "bz1234567"}, - {"name": "architecture", "value": "ppc64le"}, - {"name": "brew", "value": "brew_value_2"}, - {"name": "repositories", "value": "repositories_value_2"}, - ] - }, - }] + page.return_value = [ + { + "parsed_data": { + "labels": [ + {"name": "not_com.redhat.hotfix", "value": "bz1234567"}, + {"name": "architecture", "value": "ppc64le"}, + {"name": "brew", "value": "brew_value_2"}, + {"name": "repositories", "value": "repositories_value_2"}, + ] + }, + } + ] is_hotfix_image = self.px.is_hotfix_image("foobar-bundle-1-234") self.assertFalse(is_hotfix_image) - @patch('freshmaker.pyxis.Pyxis.get_auto_rebuild_tags') - @patch('freshmaker.pyxis.Pyxis._pagination') + @patch("freshmaker.pyxis.Pyxis.get_auto_rebuild_tags") + @patch("freshmaker.pyxis.Pyxis._pagination") def test_image_is_tagged_auto_rebuild(self, page, get_auto_rebuild_tags): page.return_value = [ - {'_links': {}, - 'repositories': [{'_links': {}, - 'published': True, - 'registry': 'some.registry.com', - 'repository': 'product1/thunderbird-flatpak', - 'tags': [{'_links': {}, 'name': 'flatpak-8040020210719090808.1'}], - }] - }] - get_auto_rebuild_tags.return_value = ['flatpak-8040020210719090808.1', - 'latest' - ] - is_tagged_auto_rebuild = self.px.image_is_tagged_auto_rebuild('thunderbird-flatpak-container-flatpak-8040020210719090808.1') + { + "_links": {}, + "repositories": [ + { + "_links": {}, + "published": True, + "registry": "some.registry.com", + "repository": "product1/thunderbird-flatpak", + "tags": [{"_links": {}, "name": "flatpak-8040020210719090808.1"}], + } + ], + } + ] + get_auto_rebuild_tags.return_value = ["flatpak-8040020210719090808.1", "latest"] + is_tagged_auto_rebuild = self.px.image_is_tagged_auto_rebuild( + "thunderbird-flatpak-container-flatpak-8040020210719090808.1" + ) self.assertEqual(is_tagged_auto_rebuild, True) - @patch('freshmaker.pyxis.Pyxis.get_auto_rebuild_tags') - @patch('freshmaker.pyxis.Pyxis._pagination') + @patch("freshmaker.pyxis.Pyxis.get_auto_rebuild_tags") + @patch("freshmaker.pyxis.Pyxis._pagination") def test_image_is_not_tagged_auto_rebuild(self, page, get_auto_rebuild_tags): page.return_value = [ - {'_links': {}, - 'repositories': [{'_links': {}, - 'published': True, - 'registry': 'some.registry.com', - 'repository': 'product1/thunderbird-flatpak', - 'tags': [{'_links': {}, 'name': 'flatpak-8040020210719090808.1'}], - }] - }] - get_auto_rebuild_tags.return_value = ['latest'] - is_tagged_auto_rebuild = self.px.image_is_tagged_auto_rebuild('thunderbird-flatpak-container-flatpak-8040020210719090808.1') + { + "_links": {}, + "repositories": [ + { + "_links": {}, + "published": True, + "registry": "some.registry.com", + "repository": "product1/thunderbird-flatpak", + "tags": [{"_links": {}, "name": "flatpak-8040020210719090808.1"}], + } + ], + } + ] + get_auto_rebuild_tags.return_value = ["latest"] + is_tagged_auto_rebuild = self.px.image_is_tagged_auto_rebuild( + "thunderbird-flatpak-container-flatpak-8040020210719090808.1" + ) self.assertEqual(is_tagged_auto_rebuild, False) diff --git a/tests/test_utils.py b/tests/test_utils.py index 67ccfa0c..71f745ef 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -51,7 +51,6 @@ def test_is_valid_ocp_versions_range(): class TestSortedByNVR(helpers.FreshmakerTestCase): - def test_simple_list(self): lst = ["foo-1-10", "foo-1-2", "foo-1-1"] expected = ["foo-1-1", "foo-1-2", "foo-1-10"] diff --git a/tests/test_views.py b/tests/test_views.py index bab268c8..69f2d0a4 100644 --- a/tests/test_views.py +++ b/tests/test_views.py @@ -46,16 +46,17 @@ def user_loader(username): class ViewBaseTest(helpers.ModelsTestCase): def setUp(self): super(ViewBaseTest, self).setUp() - patched_permissions = defaultdict(lambda: {'groups': [], 'users': []}) - patched_permissions['admin'] = {'groups': ['admin'], 'users': ['root']} - patched_permissions['manual_rebuilder'] = {'groups': [], 'users': ['tom_hanks']} + patched_permissions = defaultdict(lambda: {"groups": [], "users": []}) + patched_permissions["admin"] = {"groups": ["admin"], "users": ["root"]} + patched_permissions["manual_rebuilder"] = {"groups": [], "users": ["tom_hanks"]} self.patched_permissions = patch.object( - freshmaker.auth.conf, 'permissions', new=patched_permissions) + freshmaker.auth.conf, "permissions", new=patched_permissions + ) self.patched_permissions.start() self.patch_oidc_base_namespace = patch.object( - freshmaker.auth.conf, 'oidc_base_namespace', - new='http://example.com/') + freshmaker.auth.conf, "oidc_base_namespace", new="http://example.com/" + ) self.patch_oidc_base_namespace.start() self.client = app.test_client() @@ -69,15 +70,18 @@ def tearDown(self): self.patch_oidc_base_namespace.stop() @contextlib.contextmanager - def test_request_context(self, user=None, groups=None, auth_backend=None, - oidc_scopes=None, **kwargs): + def test_request_context( + self, user=None, groups=None, auth_backend=None, oidc_scopes=None, **kwargs + ): with app.test_request_context(**kwargs): patch_auth_backend = None if user is not None: # authentication is disabled with auth_backend=noauth patch_auth_backend = patch.object( - freshmaker.auth.conf, 'auth_backend', - new=auth_backend if auth_backend else "kerberos") + freshmaker.auth.conf, + "auth_backend", + new=auth_backend if auth_backend else "kerberos", + ) patch_auth_backend.start() if not models.User.find_user_by_name(user): models.User.create_user(username=user) @@ -98,8 +102,8 @@ def test_request_context(self, user=None, groups=None, auth_backend=None, oidc_scopes = oidc_scopes if oidc_scopes else [] oidc_namespace = freshmaker.auth.conf.oidc_base_namespace flask.g.oidc_scopes = [ - '{0}{1}'.format(oidc_namespace, scope) for scope in - oidc_scopes] + "{0}{1}".format(oidc_namespace, scope) for scope in oidc_scopes + ] try: yield finally: @@ -119,38 +123,44 @@ def setUp(self): self.client = app.test_client() def _init_data(self): - event = models.Event.create(db.session, "2017-00000000-0000-0000-0000-000000000001", "101", events.TestingEvent) + event = models.Event.create( + db.session, "2017-00000000-0000-0000-0000-000000000001", "101", events.TestingEvent + ) build = models.ArtifactBuild.create(db.session, event, "ed", "module", 1234) build.build_args = '{"key": "value"}' models.ArtifactBuild.create(db.session, event, "mksh", "module", 1235) models.ArtifactBuild.create(db.session, event, "bash", "module", 1236) - models.Event.create(db.session, "2017-00000000-0000-0000-0000-000000000002", "102", events.TestingEvent) + models.Event.create( + db.session, "2017-00000000-0000-0000-0000-000000000002", "102", events.TestingEvent + ) db.session.commit() db.session.expire_all() def test_query_build(self): - resp = self.client.get('/api/1/builds/1') + resp = self.client.get("/api/1/builds/1") data = resp.json - self.assertEqual(data['id'], 1) - self.assertEqual(data['name'], 'ed') - self.assertEqual(data['type'], ArtifactType.MODULE.value) - self.assertEqual(data['state'], ArtifactBuildState.BUILD.value) - self.assertEqual(data['event_id'], 1) - self.assertEqual(data['build_id'], 1234) - self.assertEqual(data['build_args'], {"key": "value"}) - self.assertEqual(data['rebuild_reason'], "unknown") + self.assertEqual(data["id"], 1) + self.assertEqual(data["name"], "ed") + self.assertEqual(data["type"], ArtifactType.MODULE.value) + self.assertEqual(data["state"], ArtifactBuildState.BUILD.value) + self.assertEqual(data["event_id"], 1) + self.assertEqual(data["build_id"], 1234) + self.assertEqual(data["build_args"], {"key": "value"}) + self.assertEqual(data["rebuild_reason"], "unknown") def test_query_builds(self): - resp = self.client.get('/api/1/builds/') - builds = resp.json['items'] + resp = self.client.get("/api/1/builds/") + builds = resp.json["items"] self.assertEqual(len(builds), 3) - for name in ['ed', 'mksh', 'bash']: - self.assertIn(name, [b['name'] for b in builds]) + for name in ["ed", "mksh", "bash"]: + self.assertIn(name, [b["name"] for b in builds]) for build_id in [1234, 1235, 1236]: - self.assertIn(build_id, [b['build_id'] for b in builds]) + self.assertIn(build_id, [b["build_id"] for b in builds]) def test_query_builds_order_by_default(self): - event = models.Event.create(db.session, "2017-00000000-0000-0000-0000-000000000003", "103", events.TestingEvent) + event = models.Event.create( + db.session, "2017-00000000-0000-0000-0000-000000000003", "103", events.TestingEvent + ) build9 = models.ArtifactBuild.create(db.session, event, "make", "module", 1237) build9.id = 9 db.session.commit() @@ -158,14 +168,16 @@ def test_query_builds_order_by_default(self): build8.id = 8 db.session.commit() db.session.expire_all() - resp = self.client.get('/api/1/builds/') - builds = resp.json['items'] + resp = self.client.get("/api/1/builds/") + builds = resp.json["items"] self.assertEqual(len(builds), 5) for id, build in zip([9, 8, 3, 2, 1], builds): - self.assertEqual(id, build['id']) + self.assertEqual(id, build["id"]) def test_query_builds_order_by_id_asc(self): - event = models.Event.create(db.session, "2017-00000000-0000-0000-0000-000000000003", "103", events.TestingEvent) + event = models.Event.create( + db.session, "2017-00000000-0000-0000-0000-000000000003", "103", events.TestingEvent + ) build9 = models.ArtifactBuild.create(db.session, event, "make", "module", 1237) build9.id = 9 db.session.commit() @@ -173,14 +185,16 @@ def test_query_builds_order_by_id_asc(self): build8.id = 8 db.session.commit() db.session.expire_all() - resp = self.client.get('/api/1/builds/?order_by=id') - builds = resp.json['items'] + resp = self.client.get("/api/1/builds/?order_by=id") + builds = resp.json["items"] self.assertEqual(len(builds), 5) for id, build in zip([1, 2, 3, 8, 9], builds): - self.assertEqual(id, build['id']) + self.assertEqual(id, build["id"]) def test_query_builds_order_by_build_id_desc(self): - event = models.Event.create(db.session, "2017-00000000-0000-0000-0000-000000000003", "103", events.TestingEvent) + event = models.Event.create( + db.session, "2017-00000000-0000-0000-0000-000000000003", "103", events.TestingEvent + ) build9 = models.ArtifactBuild.create(db.session, event, "make", "module", 1237) build9.id = 9 db.session.commit() @@ -188,245 +202,277 @@ def test_query_builds_order_by_build_id_desc(self): build8.id = 8 db.session.commit() db.session.expire_all() - resp = self.client.get('/api/1/builds/?order_by=-build_id') - builds = resp.json['items'] + resp = self.client.get("/api/1/builds/?order_by=-build_id") + builds = resp.json["items"] self.assertEqual(len(builds), 5) for id, build in zip([8, 9, 3, 2, 1], builds): - self.assertEqual(id, build['id']) + self.assertEqual(id, build["id"]) def test_query_builds_order_by_unknown_key(self): - resp = self.client.get('/api/1/builds/?order_by=-foo') + resp = self.client.get("/api/1/builds/?order_by=-foo") data = resp.json - self.assertEqual(data['status'], 400) - self.assertEqual(data['error'], 'Bad Request') - self.assertTrue(data['message'].startswith( - "An invalid order_by key was suplied, allowed keys are")) + self.assertEqual(data["status"], 400) + self.assertEqual(data["error"], "Bad Request") + self.assertTrue( + data["message"].startswith("An invalid order_by key was suplied, allowed keys are") + ) def test_query_builds_by_name(self): - resp = self.client.get('/api/1/builds/?name=ed') - builds = resp.json['items'] + resp = self.client.get("/api/1/builds/?name=ed") + builds = resp.json["items"] self.assertEqual(len(builds), 1) - self.assertEqual(builds[0]['name'], 'ed') + self.assertEqual(builds[0]["name"], "ed") - resp = self.client.get('/api/1/builds/?name=mksh') - builds = resp.json['items'] + resp = self.client.get("/api/1/builds/?name=mksh") + builds = resp.json["items"] self.assertEqual(len(builds), 1) - self.assertEqual(builds[0]['name'], 'mksh') + self.assertEqual(builds[0]["name"], "mksh") - resp = self.client.get('/api/1/builds/?name=nonexist') - builds = resp.json['items'] + resp = self.client.get("/api/1/builds/?name=nonexist") + builds = resp.json["items"] self.assertEqual(len(builds), 0) def test_query_builds_by_type(self): - resp = self.client.get('/api/1/builds/?type=0') - builds = resp.json['items'] + resp = self.client.get("/api/1/builds/?type=0") + builds = resp.json["items"] self.assertEqual(len(builds), 0) - resp = self.client.get('/api/1/builds/?type=1') - builds = resp.json['items'] + resp = self.client.get("/api/1/builds/?type=1") + builds = resp.json["items"] self.assertEqual(len(builds), 0) - resp = self.client.get('/api/1/builds/?type=2') - builds = resp.json['items'] + resp = self.client.get("/api/1/builds/?type=2") + builds = resp.json["items"] self.assertEqual(len(builds), 3) - resp = self.client.get('/api/1/builds/?type=module') - builds = resp.json['items'] + resp = self.client.get("/api/1/builds/?type=module") + builds = resp.json["items"] self.assertEqual(len(builds), 3) def test_query_builds_by_invalid_type(self): - resp = self.client.get('/api/1/builds/?type=100') + resp = self.client.get("/api/1/builds/?type=100") data = resp.json self.assertEqual(data["status"], 400) - self.assertEqual(data["message"], - "An invalid artifact type was supplied") + self.assertEqual(data["message"], "An invalid artifact type was supplied") def test_query_builds_by_state(self): - resp = self.client.get('/api/1/builds/?state=0') - builds = resp.json['items'] + resp = self.client.get("/api/1/builds/?state=0") + builds = resp.json["items"] self.assertEqual(len(builds), 3) def test_query_builds_by_invalid_state(self): - resp = self.client.get('/api/1/builds/?state=100') + resp = self.client.get("/api/1/builds/?state=100") data = resp.json self.assertEqual(data["status"], 400) - self.assertEqual(data["message"], - "An invalid state was supplied") + self.assertEqual(data["message"], "An invalid state was supplied") def test_query_build_by_event_type_id(self): - event1 = models.Event.create(db.session, - "2018-00000000-0000-0000-0000-000000000001", - "testmodule/master/?#0000000000000000000000000000000000000001", - events.GitModuleMetadataChangeEvent) + event1 = models.Event.create( + db.session, + "2018-00000000-0000-0000-0000-000000000001", + "testmodule/master/?#0000000000000000000000000000000000000001", + events.GitModuleMetadataChangeEvent, + ) build1 = models.ArtifactBuild.create(db.session, event1, "testmodule", "module", 2345) - event2 = models.Event.create(db.session, - "2018-00000000-0000-0000-0000-000000000002", - "2345", - events.MBSModuleStateChangeEvent) + event2 = models.Event.create( + db.session, + "2018-00000000-0000-0000-0000-000000000002", + "2345", + events.MBSModuleStateChangeEvent, + ) models.ArtifactBuild.create(db.session, event2, "testmodule2", "module", 2346, build1) - event3 = models.Event.create(db.session, - "2018-00000000-0000-0000-0000-000000000003", - "testmodule3/master/?#0000000000000000000000000000000000000001", - events.GitModuleMetadataChangeEvent) + event3 = models.Event.create( + db.session, + "2018-00000000-0000-0000-0000-000000000003", + "testmodule3/master/?#0000000000000000000000000000000000000001", + events.GitModuleMetadataChangeEvent, + ) models.ArtifactBuild.create(db.session, event3, "testmodule3", "module", 2347, build1) db.session.commit() - resp = self.client.get('/api/1/builds/?event_type_id=%s' % models.EVENT_TYPES[events.TestingEvent]) - builds = resp.json['items'] + resp = self.client.get( + "/api/1/builds/?event_type_id=%s" % models.EVENT_TYPES[events.TestingEvent] + ) + builds = resp.json["items"] self.assertEqual(len(builds), 3) - resp = self.client.get('/api/1/builds/?event_type_id=%s' % models.EVENT_TYPES[events.GitModuleMetadataChangeEvent]) - builds = resp.json['items'] + resp = self.client.get( + "/api/1/builds/?event_type_id=%s" + % models.EVENT_TYPES[events.GitModuleMetadataChangeEvent] + ) + builds = resp.json["items"] self.assertEqual(len(builds), 2) - resp = self.client.get('/api/1/builds/?event_type_id=%s' % models.EVENT_TYPES[events.MBSModuleStateChangeEvent]) - builds = resp.json['items'] + resp = self.client.get( + "/api/1/builds/?event_type_id=%s" % models.EVENT_TYPES[events.MBSModuleStateChangeEvent] + ) + builds = resp.json["items"] self.assertEqual(len(builds), 1) - resp = self.client.get('/api/1/builds/?event_type_id=%s' % models.EVENT_TYPES[events.KojiTaskStateChangeEvent]) - builds = resp.json['items'] + resp = self.client.get( + "/api/1/builds/?event_type_id=%s" % models.EVENT_TYPES[events.KojiTaskStateChangeEvent] + ) + builds = resp.json["items"] self.assertEqual(len(builds), 0) def test_query_build_by_event_search_key(self): - resp = self.client.get('/api/1/builds/?event_search_key=101') - builds = resp.json['items'] + resp = self.client.get("/api/1/builds/?event_search_key=101") + builds = resp.json["items"] self.assertEqual(len(builds), 3) - resp = self.client.get('/api/1/builds/?event_search_key=102') - builds = resp.json['items'] + resp = self.client.get("/api/1/builds/?event_search_key=102") + builds = resp.json["items"] self.assertEqual(len(builds), 0) def test_query_build_by_event_type_id_and_search_key(self): - resp = self.client.get('/api/1/builds/?event_type_id=%s&event_search_key=101' % models.EVENT_TYPES[events.TestingEvent]) - builds = resp.json['items'] + resp = self.client.get( + "/api/1/builds/?event_type_id=%s&event_search_key=101" + % models.EVENT_TYPES[events.TestingEvent] + ) + builds = resp.json["items"] self.assertEqual(len(builds), 3) - resp = self.client.get('/api/1/builds/?event_type_id=%s&event_search_key=102' % models.EVENT_TYPES[events.TestingEvent]) - builds = resp.json['items'] + resp = self.client.get( + "/api/1/builds/?event_type_id=%s&event_search_key=102" + % models.EVENT_TYPES[events.TestingEvent] + ) + builds = resp.json["items"] self.assertEqual(len(builds), 0) def test_query_builds_pagination_includes_query_params(self): - event = models.Event.create(db.session, '2018-00000000-0000-0000-0000-000000000001', '101', events.TestingEvent) - models.ArtifactBuild.create(db.session, event, 'ed', 'module', 20081234) - models.ArtifactBuild.create(db.session, event, 'ed', 'module', 20081235) - resp = self.client.get('/api/1/builds/?name=ed&per_page=1&page=2') + event = models.Event.create( + db.session, "2018-00000000-0000-0000-0000-000000000001", "101", events.TestingEvent + ) + models.ArtifactBuild.create(db.session, event, "ed", "module", 20081234) + models.ArtifactBuild.create(db.session, event, "ed", "module", 20081235) + resp = self.client.get("/api/1/builds/?name=ed&per_page=1&page=2") data = resp.json - builds = data['items'] + builds = data["items"] self.assertEqual(len(builds), 1) - self.assertEqual(builds[0]['name'], 'ed') - meta = data['meta'] - for page in ['first', 'last', 'prev', 'next']: - for query in ['name=ed', 'per_page=1']: + self.assertEqual(builds[0]["name"], "ed") + meta = data["meta"] + for page in ["first", "last", "prev", "next"]: + for query in ["name=ed", "per_page=1"]: self.assertTrue(query in meta[page]) def test_query_builds_pagination_includes_prev_and_next_page(self): - resp = self.client.get('/api/1/builds/?name=ed') + resp = self.client.get("/api/1/builds/?name=ed") data = resp.json - builds = data['items'] + builds = data["items"] self.assertEqual(len(builds), 1) - self.assertEqual(builds[0]['name'], 'ed') - meta = data['meta'] - self.assertTrue(meta['prev'] is None) - self.assertTrue(meta['next'] is None) + self.assertEqual(builds[0]["name"], "ed") + meta = data["meta"] + self.assertTrue(meta["prev"] is None) + self.assertTrue(meta["next"] is None) def test_query_event(self): - resp = self.client.get('/api/1/events/1') + resp = self.client.get("/api/1/events/1") data = resp.json - self.assertEqual(data['id'], 1) - self.assertEqual(data['message_id'], '2017-00000000-0000-0000-0000-000000000001') - self.assertEqual(data['search_key'], '101') - self.assertEqual(data['event_type_id'], models.EVENT_TYPES[events.TestingEvent]) - self.assertEqual(len(data['builds']), 3) + self.assertEqual(data["id"], 1) + self.assertEqual(data["message_id"], "2017-00000000-0000-0000-0000-000000000001") + self.assertEqual(data["search_key"], "101") + self.assertEqual(data["event_type_id"], models.EVENT_TYPES[events.TestingEvent]) + self.assertEqual(len(data["builds"]), 3) def test_query_event_without_builds(self): - resp = self.client.get('/api/1/events/?show_full_json=False') + resp = self.client.get("/api/1/events/?show_full_json=False") data = resp.json - self.assertEqual(data['items'][0]['id'], 2) - self.assertRaises(KeyError, lambda: data['items'][0]['builds']) + self.assertEqual(data["items"][0]["id"], 2) + self.assertRaises(KeyError, lambda: data["items"][0]["builds"]) def test_query_event_id_without_builds(self): - resp = self.client.get('/api/1/events/2?show_full_json=False') + resp = self.client.get("/api/1/events/2?show_full_json=False") data = resp.json - self.assertEqual(data['id'], 2) - self.assertRaises(KeyError, lambda: data['builds']) + self.assertEqual(data["id"], 2) + self.assertRaises(KeyError, lambda: data["builds"]) def test_query_event_without_builds_v2(self): - resp = self.client.get('/api/2/events/') + resp = self.client.get("/api/2/events/") data = resp.json - self.assertEqual(data['items'][0]['id'], 2) - self.assertRaises(KeyError, lambda: data['items'][0]['builds']) + self.assertEqual(data["items"][0]["id"], 2) + self.assertRaises(KeyError, lambda: data["items"][0]["builds"]) def test_query_event_id_without_builds_v2(self): - resp = self.client.get('/api/2/events/2') + resp = self.client.get("/api/2/events/2") data = resp.json - self.assertEqual(data['id'], 2) - self.assertRaises(KeyError, lambda: data['builds']) + self.assertEqual(data["id"], 2) + self.assertRaises(KeyError, lambda: data["builds"]) def test_query_events(self): - resp = self.client.get('/api/1/events/') - evs = resp.json['items'] + resp = self.client.get("/api/1/events/") + evs = resp.json["items"] self.assertEqual(len(evs), 2) def test_query_event_complete(self): event = db.session.query(models.Event).get(1) - with patch('freshmaker.models.datetime') as datetime_patch: + with patch("freshmaker.models.datetime") as datetime_patch: datetime_patch.utcnow.return_value = datetime.datetime(2099, 8, 21, 13, 42, 20) event.transition(models.EventState.COMPLETE.value) - resp = self.client.get('/api/1/events/1') + resp = self.client.get("/api/1/events/1") data = resp.json - self.assertEqual(data['time_done'], '2099-08-21T13:42:20Z') + self.assertEqual(data["time_done"], "2099-08-21T13:42:20Z") def test_query_event_by_message_id(self): - resp = self.client.get('/api/1/events/?message_id=2017-00000000-0000-0000-0000-000000000001') - evs = resp.json['items'] + resp = self.client.get( + "/api/1/events/?message_id=2017-00000000-0000-0000-0000-000000000001" + ) + evs = resp.json["items"] self.assertEqual(len(evs), 1) - self.assertEqual(evs[0]['message_id'], '2017-00000000-0000-0000-0000-000000000001') + self.assertEqual(evs[0]["message_id"], "2017-00000000-0000-0000-0000-000000000001") def test_query_event_by_search_key(self): - resp = self.client.get('/api/1/events/?search_key=101') - evs = resp.json['items'] + resp = self.client.get("/api/1/events/?search_key=101") + evs = resp.json["items"] self.assertEqual(len(evs), 1) - self.assertEqual(evs[0]['search_key'], '101') + self.assertEqual(evs[0]["search_key"], "101") def test_query_event_by_state_name(self): - models.Event.create(db.session, - "2018-00000000-0000-0000-0123-000000000001", - "0123001", - events.MBSModuleStateChangeEvent, - state=EventState['COMPLETE'].value) - resp = self.client.get('/api/1/events/?state=complete') - evs = resp.json['items'] + models.Event.create( + db.session, + "2018-00000000-0000-0000-0123-000000000001", + "0123001", + events.MBSModuleStateChangeEvent, + state=EventState["COMPLETE"].value, + ) + resp = self.client.get("/api/1/events/?state=complete") + evs = resp.json["items"] self.assertEqual(len(evs), 1) - self.assertEqual(evs[0]['state'], EventState['COMPLETE'].value) + self.assertEqual(evs[0]["state"], EventState["COMPLETE"].value) def test_query_event_with_invalid_state_name(self): - resp = self.client.get('/api/1/events/?state=invalid') + resp = self.client.get("/api/1/events/?state=invalid") data = resp.json - self.assertEqual(data['status'], 400) - self.assertEqual(data['message'], "Invalid state was supplied: invalid") + self.assertEqual(data["status"], 400) + self.assertEqual(data["message"], "Invalid state was supplied: invalid") def test_query_event_by_multiple_state_names(self): - models.Event.create(db.session, - "2018-00000000-0000-0000-0123-000000000001", - "0123001", - events.MBSModuleStateChangeEvent, - state=EventState['BUILDING'].value) - models.Event.create(db.session, - "2018-00000000-0000-0000-0123-000000000002", - "0123002", - events.MBSModuleStateChangeEvent, - state=EventState['COMPLETE'].value) - models.Event.create(db.session, - "2018-00000000-0000-0000-0123-000000000003", - "0123003", - events.MBSModuleStateChangeEvent, - state=EventState['COMPLETE'].value) - resp = self.client.get('/api/1/events/?state=building&state=complete') - evs = resp.json['items'] + models.Event.create( + db.session, + "2018-00000000-0000-0000-0123-000000000001", + "0123001", + events.MBSModuleStateChangeEvent, + state=EventState["BUILDING"].value, + ) + models.Event.create( + db.session, + "2018-00000000-0000-0000-0123-000000000002", + "0123002", + events.MBSModuleStateChangeEvent, + state=EventState["COMPLETE"].value, + ) + models.Event.create( + db.session, + "2018-00000000-0000-0000-0123-000000000003", + "0123003", + events.MBSModuleStateChangeEvent, + state=EventState["COMPLETE"].value, + ) + resp = self.client.get("/api/1/events/?state=building&state=complete") + evs = resp.json["items"] self.assertEqual(len(evs), 3) - building_events = [e for e in evs if e['state'] == EventState['BUILDING'].value] - complete_events = [e for e in evs if e['state'] == EventState['COMPLETE'].value] + building_events = [e for e in evs if e["state"] == EventState["BUILDING"].value] + complete_events = [e for e in evs if e["state"] == EventState["COMPLETE"].value] self.assertEqual(len(building_events), 1) self.assertEqual(len(complete_events), 2) @@ -436,18 +482,18 @@ def test_query_event_by_requester(self): "2018-00000000-0000-0000-0123-000000000001", "0123001", events.ManualRebuildWithAdvisoryEvent, - state=EventState['COMPLETE'].value, + state=EventState["COMPLETE"].value, requester="bob", - requested_rebuilds="foo-1-1 bar-1-1" + requested_rebuilds="foo-1-1 bar-1-1", ) - resp = self.client.get('/api/1/events/?requester=bob') - evs = resp.json['items'] + resp = self.client.get("/api/1/events/?requester=bob") + evs = resp.json["items"] self.assertEqual(len(evs), 1) - self.assertEqual(evs[0]['requester'], ev1.requester) - self.assertEqual(evs[0]['search_key'], ev1.search_key) + self.assertEqual(evs[0]["requester"], ev1.requester) + self.assertEqual(evs[0]["search_key"], ev1.search_key) - resp = self.client.get('/api/1/events/?requester=alice') - evs = resp.json['items'] + resp = self.client.get("/api/1/events/?requester=alice") + evs = resp.json["items"] self.assertEqual(len(evs), 0) def test_query_event_by_multiple_requesters(self): @@ -456,201 +502,211 @@ def test_query_event_by_multiple_requesters(self): "2018-00000000-0000-0000-0123-000000000001", "0123001", events.ManualRebuildWithAdvisoryEvent, - state=EventState['COMPLETE'].value, + state=EventState["COMPLETE"].value, requester="bob", - requested_rebuilds="foo-1-1 bar-1-1" + requested_rebuilds="foo-1-1 bar-1-1", ), models.Event.create( db.session, "2018-00000000-0000-0000-0123-000000000002", "0123002", events.ManualRebuildWithAdvisoryEvent, - state=EventState['COMPLETE'].value, + state=EventState["COMPLETE"].value, requester="alice", - requested_rebuilds="foo-1-2 bar-1-2" + requested_rebuilds="foo-1-2 bar-1-2", ), - resp = self.client.get('/api/1/events/?requester=alice&requester=bob') - evs = resp.json['items'] + resp = self.client.get("/api/1/events/?requester=alice&requester=bob") + evs = resp.json["items"] self.assertEqual(len(evs), 2) - self.assertTrue(('bob', '0123001') in [(e['requester'], e['search_key']) for e in evs]) - self.assertTrue(('alice', '0123002') in [(e['requester'], e['search_key']) for e in evs]) + self.assertTrue(("bob", "0123001") in [(e["requester"], e["search_key"]) for e in evs]) + self.assertTrue(("alice", "0123002") in [(e["requester"], e["search_key"]) for e in evs]) def test_query_event_order_by_default(self): - resp = self.client.get('/api/1/events/') - evs = resp.json['items'] + resp = self.client.get("/api/1/events/") + evs = resp.json["items"] for id, build in zip([2, 1], evs): - self.assertEqual(id, build['id']) + self.assertEqual(id, build["id"]) def test_query_event_order_by_id_asc(self): - resp = self.client.get('/api/1/events/?order_by=id') - evs = resp.json['items'] + resp = self.client.get("/api/1/events/?order_by=id") + evs = resp.json["items"] for id, build in zip([1, 2], evs): - self.assertEqual(id, build['id']) + self.assertEqual(id, build["id"]) def test_query_event_order_by_id_message_id_desc(self): - resp = self.client.get('/api/1/events/?order_by=-message_id') - evs = resp.json['items'] + resp = self.client.get("/api/1/events/?order_by=-message_id") + evs = resp.json["items"] for id, build in zip([2, 1], evs): - self.assertEqual(id, build['id']) + self.assertEqual(id, build["id"]) def test_query_event_pagination_includes_query_params(self): - models.Event.create(db.session, '2018-00000000-0000-0000-0000-000000000001', '101', events.TestingEvent) - models.Event.create(db.session, '2018-00000000-0000-0000-0000-000000000002', '101', events.TestingEvent) - resp = self.client.get('/api/1/events/?search_key=101&per_page=1&page=2') + models.Event.create( + db.session, "2018-00000000-0000-0000-0000-000000000001", "101", events.TestingEvent + ) + models.Event.create( + db.session, "2018-00000000-0000-0000-0000-000000000002", "101", events.TestingEvent + ) + resp = self.client.get("/api/1/events/?search_key=101&per_page=1&page=2") data = resp.json - evs = data['items'] + evs = data["items"] self.assertEqual(len(evs), 1) - self.assertEqual(evs[0]['search_key'], '101') - meta = data['meta'] - for page in ['first', 'last', 'prev', 'next']: - for query in ['search_key=101', 'per_page=1']: + self.assertEqual(evs[0]["search_key"], "101") + meta = data["meta"] + for page in ["first", "last", "prev", "next"]: + for query in ["search_key=101", "per_page=1"]: self.assertTrue(query in meta[page]) def test_query_event_pagination_includes_prev_and_next_page(self): - resp = self.client.get('/api/1/events/?search_key=101') + resp = self.client.get("/api/1/events/?search_key=101") data = resp.json - evs = data['items'] + evs = data["items"] self.assertEqual(len(evs), 1) - self.assertEqual(evs[0]['search_key'], '101') - meta = data['meta'] - self.assertTrue(meta['prev'] is None) - self.assertTrue(meta['next'] is None) + self.assertEqual(evs[0]["search_key"], "101") + meta = data["meta"] + self.assertTrue(meta["prev"] is None) + self.assertTrue(meta["next"] is None) def test_patch_event_missing_action(self): - resp = self.client.patch( - '/api/1/events/1', - data=json.dumps({})) + resp = self.client.patch("/api/1/events/1", data=json.dumps({})) data = resp.json - self.assertEqual(data['error'], 'Bad Request') - self.assertTrue(data['message'].startswith('Missing action in request.')) + self.assertEqual(data["error"], "Bad Request") + self.assertTrue(data["message"].startswith("Missing action in request.")) def test_patch_event_unsupported_action(self): - resp = self.client.patch( - '/api/1/events/1', - data=json.dumps({'action': 'unsupported'})) + resp = self.client.patch("/api/1/events/1", data=json.dumps({"action": "unsupported"})) data = resp.json - self.assertEqual(data['error'], 'Bad Request') - self.assertTrue(data['message'].startswith('Unsupported action requested.')) + self.assertEqual(data["error"], "Bad Request") + self.assertTrue(data["message"].startswith("Unsupported action requested.")) def test_query_event_types(self): - resp = self.client.get('/api/1/event-types/') - event_types = resp.json['items'] + resp = self.client.get("/api/1/event-types/") + event_types = resp.json["items"] self.assertEqual(len(event_types), len(models.EVENT_TYPES)) def test_query_event_type(self): for cls, val in models.EVENT_TYPES.items(): - resp = self.client.get('/api/1/event-types/%s' % val) + resp = self.client.get("/api/1/event-types/%s" % val) event = resp.json - self.assertEqual(event['id'], val) - self.assertEqual(event['name'], cls.__name__) + self.assertEqual(event["id"], val) + self.assertEqual(event["name"], cls.__name__) def test_query_nonexist_event_type(self): - resp = self.client.get('/api/1/event-types/99999') + resp = self.client.get("/api/1/event-types/99999") data = resp.json - self.assertEqual(data['status'], 404) - self.assertEqual(data['error'], 'Not Found') - self.assertEqual(data['message'], 'No such event type found.') + self.assertEqual(data["status"], 404) + self.assertEqual(data["error"], "Not Found") + self.assertEqual(data["message"], "No such event type found.") def test_query_build_types(self): - resp = self.client.get('/api/1/build-types/') - build_types = resp.json['items'] + resp = self.client.get("/api/1/build-types/") + build_types = resp.json["items"] self.assertEqual(len(build_types), len(list(ArtifactType))) def test_query_build_type(self): for t in list(ArtifactType): - resp = self.client.get('/api/1/build-types/%s' % t.value) + resp = self.client.get("/api/1/build-types/%s" % t.value) build_type = resp.json - self.assertEqual(build_type['id'], t.value) - self.assertEqual(build_type['name'], t.name) + self.assertEqual(build_type["id"], t.value) + self.assertEqual(build_type["name"], t.name) def test_query_nonexist_build_type(self): - resp = self.client.get('/api/1/build-types/99999') + resp = self.client.get("/api/1/build-types/99999") data = resp.json - self.assertEqual(data['status'], 404) - self.assertEqual(data['error'], 'Not Found') - self.assertEqual(data['message'], 'No such build type found.') + self.assertEqual(data["status"], 404) + self.assertEqual(data["error"], "Not Found") + self.assertEqual(data["message"], "No such build type found.") def test_query_build_states(self): - resp = self.client.get('/api/1/build-states/') - build_types = resp.json['items'] + resp = self.client.get("/api/1/build-states/") + build_types = resp.json["items"] self.assertEqual(len(build_types), len(list(ArtifactBuildState))) def test_query_build_state(self): for t in list(ArtifactBuildState): - resp = self.client.get('/api/1/build-states/%s' % t.value) + resp = self.client.get("/api/1/build-states/%s" % t.value) build_type = resp.json - self.assertEqual(build_type['id'], t.value) - self.assertEqual(build_type['name'], t.name) + self.assertEqual(build_type["id"], t.value) + self.assertEqual(build_type["name"], t.name) def test_query_nonexist_build_state(self): - resp = self.client.get('/api/1/build-states/99999') + resp = self.client.get("/api/1/build-states/99999") data = resp.json - self.assertEqual(data['status'], 404) - self.assertEqual(data['error'], 'Not Found') - self.assertEqual(data['message'], 'No such build state found.') + self.assertEqual(data["status"], 404) + self.assertEqual(data["error"], "Not Found") + self.assertEqual(data["message"], "No such build state found.") def test_query_pullspec_overrides(self): - event = models.Event.create(db.session, "test_msg_id", "RHSA-2017-284", - events.TestingEvent) - build = models.ArtifactBuild.create(db.session, event, "parent", - "module", 1234) + event = models.Event.create(db.session, "test_msg_id", "RHSA-2017-284", events.TestingEvent) + build = models.ArtifactBuild.create(db.session, event, "parent", "module", 1234) pullspecs = { - 'update': 'update_placeholder', - 'pullspec_replacements': [ - {'new': 'registry.io/repo/example-operator@sha256:', - 'original': 'registry.io/repo/example-operator:v1.1.0', - 'pinned': True}, - {'new': 'registry.io/repo/2example-operator@sha256:', - 'original': 'registry.io/repo/2example-operator:v2.2.0', - 'pinned': True} - ] + "update": "update_placeholder", + "pullspec_replacements": [ + { + "new": "registry.io/repo/example-operator@sha256:", + "original": "registry.io/repo/example-operator:v1.1.0", + "pinned": True, + }, + { + "new": "registry.io/repo/2example-operator@sha256:", + "original": "registry.io/repo/2example-operator:v2.2.0", + "pinned": True, + }, + ], } expected_pullspecs = { - 'update': 'update_placeholder', - 'pullspec_replacements': [ - {'new': 'registry.io/repo/example-operator@sha256:', - 'original': 'registry.io/repo/example-operator:v1.1.0', - 'pinned': True}, - {'new': 'registry.io/repo/2example-operator@sha256:', - 'original': 'registry.io/repo/2example-operator:v2.2.0', - 'pinned': True} - ] + "update": "update_placeholder", + "pullspec_replacements": [ + { + "new": "registry.io/repo/example-operator@sha256:", + "original": "registry.io/repo/example-operator:v1.1.0", + "pinned": True, + }, + { + "new": "registry.io/repo/2example-operator@sha256:", + "original": "registry.io/repo/2example-operator:v2.2.0", + "pinned": True, + }, + ], } build.bundle_pullspec_overrides = pullspecs db.session.commit() - resp = self.client.get(f'/api/1/pullspec_overrides/{build.id}') + resp = self.client.get(f"/api/1/pullspec_overrides/{build.id}") self.assertEqual(json.loads(resp.data), expected_pullspecs) def test_query_nonexist_pullspec_overrides(self): - resp = self.client.get('/api/1/pullspec_overrides/123') + resp = self.client.get("/api/1/pullspec_overrides/123") - self.assertEqual(resp.json['status'], 404) - self.assertEqual(resp.json['error'], 'Not Found') - self.assertEqual(resp.json['message'], 'No such bundle build') + self.assertEqual(resp.json["status"], 404) + self.assertEqual(resp.json["error"], "Not Found") + self.assertEqual(resp.json["message"], "No such bundle build") def test_query_no_id_pullspec_overrides(self): - resp = self.client.get('/api/1/pullspec_overrides/') + resp = self.client.get("/api/1/pullspec_overrides/") - self.assertEqual(resp.json['status'], 500) - self.assertEqual(resp.json['error'], 'Internal Server Error') + self.assertEqual(resp.json["status"], 500) + self.assertEqual(resp.json["error"], "Internal Server Error") def test_about_api(self): # Since the version is always changing, let's just mock it to be consistent - with patch('freshmaker.views.version', '1.0.0'): - resp = self.client.get('/api/1/about/') + with patch("freshmaker.views.version", "1.0.0"): + resp = self.client.get("/api/1/about/") data = resp.json - self.assertEqual(data['version'], '1.0.0') + self.assertEqual(data["version"], "1.0.0") @patch("freshmaker.views.ImageVerifier") def test_verify_image(self, verifier): verifier.return_value.verify_image.return_value = {"foo-1-1": ["content-set"]} - resp = self.client.get('/api/1/verify-image/foo-1-1') + resp = self.client.get("/api/1/verify-image/foo-1-1") data = resp.json - self.assertEqual(data, { - 'images': {'foo-1-1': ['content-set']}, - 'msg': 'Found 1 images which are handled by Freshmaker for defined content_sets.'}) + self.assertEqual( + data, + { + "images": {"foo-1-1": ["content-set"]}, + "msg": "Found 1 images which are handled by Freshmaker for defined content_sets.", + }, + ) @patch("freshmaker.views.ImageVerifier") def test_verify_image_repository(self, verifier): @@ -658,7 +714,7 @@ def test_verify_image_repository(self, verifier): "repository": {"auto_rebuild_tags": ["latest"]}, "images": { "foo-1-1": {"content_sets": ["content-set"], "tags": ["1", "latest", "1-1"]} - } + }, } resp = self.client.get("/api/1/verify-image-repository/foo/bar") data = resp.json @@ -667,36 +723,36 @@ def test_verify_image_repository(self, verifier): "auto_rebuild_tags": ["latest"], }, "images": { - "foo-1-1": { - "tags": ["1", "latest", "1-1"], - "content_sets": ["content-set"] - } + "foo-1-1": {"tags": ["1", "latest", "1-1"], "content_sets": ["content-set"]} }, - "msg": "Found 1 images which are handled by Freshmaker for defined content_sets." + "msg": "Found 1 images which are handled by Freshmaker for defined content_sets.", } self.assertEqual(data, expected) def test_dependencies(self): - event = models.Event.create(db.session, "2017-00000000-0000-0000-0000-000000000003", "103", events.TestingEvent) - event1 = models.Event.create(db.session, "2017-00000000-0000-0000-0000-000000000004", "104", events.TestingEvent) + event = models.Event.create( + db.session, "2017-00000000-0000-0000-0000-000000000003", "103", events.TestingEvent + ) + event1 = models.Event.create( + db.session, "2017-00000000-0000-0000-0000-000000000004", "104", events.TestingEvent + ) db.session.commit() event.add_event_dependency(db.session, event1) db.session.commit() - resp = self.client.get('/api/1/events/4') + resp = self.client.get("/api/1/events/4") data = resp.json - self.assertEqual(data['id'], event1.id) - self.assertEqual(data['depends_on_events'], []) - self.assertEqual(data['depending_events'], [event.id]) + self.assertEqual(data["id"], event1.id) + self.assertEqual(data["depends_on_events"], []) + self.assertEqual(data["depending_events"], [event.id]) - resp = self.client.get('/api/1/events/3') + resp = self.client.get("/api/1/events/3") data = resp.json - self.assertEqual(data['id'], event.id) - self.assertEqual(data['depends_on_events'], [event1.id]) - self.assertEqual(data['depending_events'], []) + self.assertEqual(data["id"], event.id) + self.assertEqual(data["depends_on_events"], [event1.id]) + self.assertEqual(data["depending_events"], []) def test_trailing_slash(self): - urls = ('/api/2/builds', '/api/2/builds/', - '/api/2/events', '/api/2/events/') + urls = ("/api/2/builds", "/api/2/builds/", "/api/2/events", "/api/2/events/") for url in urls: response = self.client.get(url, follow_redirects=True) self.assertEqual(response.status_code, 200) @@ -712,35 +768,47 @@ def setUp(self): def _init_data(self): event = models.Event.create( - db.session, "2017-00000000-0000-0000-0000-000000000001", - "101", events.TestingEvent) + db.session, "2017-00000000-0000-0000-0000-000000000001", "101", events.TestingEvent + ) event.state = EventState.BUILDING.value build = models.ArtifactBuild.create(db.session, event, "ed", "module", 1234) build.build_args = '{"key": "value"}' models.ArtifactBuild.create(db.session, event, "mksh", "module", 1235) models.ArtifactBuild.create(db.session, event, "bash", "module", 1236) event2 = models.Event.create( - db.session, "2017-00000000-0000-0000-0000-000000000002", - "102", events.GitModuleMetadataChangeEvent) + db.session, + "2017-00000000-0000-0000-0000-000000000002", + "102", + events.GitModuleMetadataChangeEvent, + ) event2.state = EventState.SKIPPED.value event3 = models.Event.create( - db.session, "2017-00000000-0000-0000-0000-000000000003", - "103", events.MBSModuleStateChangeEvent) + db.session, + "2017-00000000-0000-0000-0000-000000000003", + "103", + events.MBSModuleStateChangeEvent, + ) event3.state = EventState.FAILED.value db.session.commit() db.session.expire_all() def test_query_event_multiple_states(self): - resp = self.client.get('/api/1/events/?state=%d&state=%d' % ( - EventState.SKIPPED.value, EventState.BUILDING.value)) - evs = resp.json['items'] + resp = self.client.get( + "/api/1/events/?state=%d&state=%d" + % (EventState.SKIPPED.value, EventState.BUILDING.value) + ) + evs = resp.json["items"] self.assertEqual(len(evs), 2) def test_query_event_multiple_event_type_ids(self): - resp = self.client.get('/api/1/events/?event_type_id=%d&event_type_id=%d' % ( - models.EVENT_TYPES[events.TestingEvent], - models.EVENT_TYPES[events.GitModuleMetadataChangeEvent])) - evs = resp.json['items'] + resp = self.client.get( + "/api/1/events/?event_type_id=%d&event_type_id=%d" + % ( + models.EVENT_TYPES[events.TestingEvent], + models.EVENT_TYPES[events.GitModuleMetadataChangeEvent], + ) + ) + evs = resp.json["items"] self.assertEqual(len(evs), 2) @@ -749,398 +817,426 @@ def setUp(self): super(TestManualTriggerRebuild, self).setUp() self.client = app.test_client() - @patch('freshmaker.messaging.publish') - @patch('freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory.' - 'from_advisory_id') - @patch('freshmaker.parsers.internal.manual_rebuild.time.time') + @patch("freshmaker.messaging.publish") + @patch("freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory." "from_advisory_id") + @patch("freshmaker.parsers.internal.manual_rebuild.time.time") def test_manual_rebuild(self, time, from_advisory_id, publish): time.return_value = 123 - from_advisory_id.return_value = ErrataAdvisory( - 123, 'name', 'REL_PREP', ['rpm']) - with patch('freshmaker.models.datetime') as datetime_patch: + from_advisory_id.return_value = ErrataAdvisory(123, "name", "REL_PREP", ["rpm"]) + with patch("freshmaker.models.datetime") as datetime_patch: datetime_patch.utcnow.return_value = datetime.datetime(2017, 8, 21, 13, 42, 20) - with self.test_request_context(user='root'): + with self.test_request_context(user="root"): resp = self.client.post( - '/api/1/builds/', - data=json.dumps({'errata_id': 1}), - content_type='application/json', + "/api/1/builds/", + data=json.dumps({"errata_id": 1}), + content_type="application/json", ) data = resp.json # Other fields are predictible. - self.assertEqual(data, { - u'builds': [], - u'depending_events': [], - u'depends_on_events': [], - u'event_type_id': 13, - u'id': 1, - u'message_id': u'manual_rebuild_123', - u'search_key': u'123', - u'state': 0, - u'state_name': u'INITIALIZED', - u'state_reason': None, - u'time_created': u'2017-08-21T13:42:20Z', - u'time_done': None, - u'url': u'/api/1/events/1', - u'dry_run': False, - u'requester': 'root', - u'requested_rebuilds': [], - u'requester_metadata': {}}) + self.assertEqual( + data, + { + "builds": [], + "depending_events": [], + "depends_on_events": [], + "event_type_id": 13, + "id": 1, + "message_id": "manual_rebuild_123", + "search_key": "123", + "state": 0, + "state_name": "INITIALIZED", + "state_reason": None, + "time_created": "2017-08-21T13:42:20Z", + "time_done": None, + "url": "/api/1/events/1", + "dry_run": False, + "requester": "root", + "requested_rebuilds": [], + "requester_metadata": {}, + }, + ) publish.assert_called_once_with( - 'manual.rebuild', - {'msg_id': 'manual_rebuild_123', u'errata_id': 1, - 'requester': 'root'}) - - @patch('freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory.from_advisory_id') - @patch('freshmaker.messaging.publish') - @patch('freshmaker.parsers.internal.manual_rebuild.time.time') - @patch('freshmaker.views._validate_rebuild_request', return_value=None) - def test_manually_triggered_bundle_rebuild(self, validate, time, publish, - from_advisory_id): + "manual.rebuild", {"msg_id": "manual_rebuild_123", "errata_id": 1, "requester": "root"} + ) + + @patch("freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory.from_advisory_id") + @patch("freshmaker.messaging.publish") + @patch("freshmaker.parsers.internal.manual_rebuild.time.time") + @patch("freshmaker.views._validate_rebuild_request", return_value=None) + def test_manually_triggered_bundle_rebuild(self, validate, time, publish, from_advisory_id): time.return_value = 111 # Create testing dependent event - dependent_event = models.Event.create(db.session, "msg_id", 123, - events.TestingEvent) - advisory = ErrataAdvisory(123, 'name', 'SHIPPED_LIVE', ['rpm']) - advisory._reporter = 'botas_123' + dependent_event = models.Event.create(db.session, "msg_id", 123, events.TestingEvent) + advisory = ErrataAdvisory(123, "name", "SHIPPED_LIVE", ["rpm"]) + advisory._reporter = "botas_123" from_advisory_id.return_value = advisory db.session.commit() - with patch('freshmaker.models.datetime') as datetime_patch: - datetime_patch.utcnow.return_value = datetime.datetime(2000, 1, 2, - 3, 4, 5) - with self.test_request_context(user='root'): + with patch("freshmaker.models.datetime") as datetime_patch: + datetime_patch.utcnow.return_value = datetime.datetime(2000, 1, 2, 3, 4, 5) + with self.test_request_context(user="root"): resp = self.client.post( - '/api/1/builds/', - data=json.dumps({'errata_id': 123, - 'freshmaker_event_id': dependent_event.id, - 'container_images': ['container_image'], - 'force': True}), - content_type='application/json', + "/api/1/builds/", + data=json.dumps( + { + "errata_id": 123, + "freshmaker_event_id": dependent_event.id, + "container_images": ["container_image"], + "force": True, + } + ), + content_type="application/json", ) - self.assertEqual(resp.json, - {'builds': [], - 'depending_events': [], - 'depends_on_events': [1], - 'dry_run': False, - 'event_type_id': 16, - 'id': 2, - 'message_id': 'manual_rebuild_111', - 'requested_rebuilds': ['container_image'], - 'requester': 'root', - 'requester_metadata': {}, - 'search_key': '123', - 'state': 0, - 'state_name': 'INITIALIZED', - 'state_reason': None, - 'time_created': '2000-01-02T03:04:05Z', - 'time_done': None, - 'url': '/api/1/events/2'}) - - publish.assert_called_once_with('manual.rebuild', - {'msg_id': 'manual_rebuild_111', - 'freshmaker_event_id': 1, - 'errata_id': 123, - 'container_images': ['container_image'], - 'requester': 'root', - 'force': True}) - - @patch('freshmaker.messaging.publish') - @patch('freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory.' - 'from_advisory_id') - @patch('freshmaker.parsers.internal.manual_rebuild.time.time') + self.assertEqual( + resp.json, + { + "builds": [], + "depending_events": [], + "depends_on_events": [1], + "dry_run": False, + "event_type_id": 16, + "id": 2, + "message_id": "manual_rebuild_111", + "requested_rebuilds": ["container_image"], + "requester": "root", + "requester_metadata": {}, + "search_key": "123", + "state": 0, + "state_name": "INITIALIZED", + "state_reason": None, + "time_created": "2000-01-02T03:04:05Z", + "time_done": None, + "url": "/api/1/events/2", + }, + ) + + publish.assert_called_once_with( + "manual.rebuild", + { + "msg_id": "manual_rebuild_111", + "freshmaker_event_id": 1, + "errata_id": 123, + "container_images": ["container_image"], + "requester": "root", + "force": True, + }, + ) + + @patch("freshmaker.messaging.publish") + @patch("freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory." "from_advisory_id") + @patch("freshmaker.parsers.internal.manual_rebuild.time.time") def test_manual_rebuild_dry_run(self, time, from_advisory_id, publish): time.return_value = 123 - from_advisory_id.return_value = ErrataAdvisory( - 123, 'name', 'REL_PREP', ['rpm']) + from_advisory_id.return_value = ErrataAdvisory(123, "name", "REL_PREP", ["rpm"]) - payload = {'errata_id': 1, 'dry_run': True} - with self.test_request_context(user='root'): - resp = self.client.post('/api/1/builds/', json=payload, content_type='application/json') + payload = {"errata_id": 1, "dry_run": True} + with self.test_request_context(user="root"): + resp = self.client.post("/api/1/builds/", json=payload, content_type="application/json") data = resp.json # Other fields are predictible. - self.assertEqual(data['dry_run'], True) + self.assertEqual(data["dry_run"], True) publish.assert_called_once_with( - 'manual.rebuild', - {'msg_id': 'manual_rebuild_123', u'errata_id': 1, 'dry_run': True, - 'requester': 'root'}) - - @patch('freshmaker.messaging.publish') - @patch('freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory.' - 'from_advisory_id') - @patch('freshmaker.parsers.internal.manual_rebuild.time.time') + "manual.rebuild", + {"msg_id": "manual_rebuild_123", "errata_id": 1, "dry_run": True, "requester": "root"}, + ) + + @patch("freshmaker.messaging.publish") + @patch("freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory." "from_advisory_id") + @patch("freshmaker.parsers.internal.manual_rebuild.time.time") def test_manual_rebuild_container_images(self, time, from_advisory_id, publish): time.return_value = 123 - from_advisory_id.return_value = ErrataAdvisory( - 123, 'name', 'REL_PREP', ['rpm']) + from_advisory_id.return_value = ErrataAdvisory(123, "name", "REL_PREP", ["rpm"]) payload = { - 'errata_id': 1, - 'container_images': ['foo-1-1', 'bar-1-1'], + "errata_id": 1, + "container_images": ["foo-1-1", "bar-1-1"], } - with self.test_request_context(user='root'): - resp = self.client.post('/api/1/builds/', json=payload, content_type='application/json') + with self.test_request_context(user="root"): + resp = self.client.post("/api/1/builds/", json=payload, content_type="application/json") data = resp.json # Other fields are predictible. - self.assertEqual(data['requested_rebuilds'], ["foo-1-1", "bar-1-1"]) + self.assertEqual(data["requested_rebuilds"], ["foo-1-1", "bar-1-1"]) publish.assert_called_once_with( - 'manual.rebuild', - {'msg_id': 'manual_rebuild_123', u'errata_id': 1, - 'container_images': ["foo-1-1", "bar-1-1"], 'requester': 'root'}) - - @patch('freshmaker.messaging.publish') - @patch('freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory.' - 'from_advisory_id') - @patch('freshmaker.parsers.internal.manual_rebuild.time.time') + "manual.rebuild", + { + "msg_id": "manual_rebuild_123", + "errata_id": 1, + "container_images": ["foo-1-1", "bar-1-1"], + "requester": "root", + }, + ) + + @patch("freshmaker.messaging.publish") + @patch("freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory." "from_advisory_id") + @patch("freshmaker.parsers.internal.manual_rebuild.time.time") def test_manual_rebuild_metadata(self, time, from_advisory_id, publish): time.return_value = 123 - from_advisory_id.return_value = ErrataAdvisory( - 123, 'name', 'REL_PREP', ['rpm']) + from_advisory_id.return_value = ErrataAdvisory(123, "name", "REL_PREP", ["rpm"]) payload = { - 'errata_id': 1, - 'metadata': {'foo': ['bar']}, + "errata_id": 1, + "metadata": {"foo": ["bar"]}, } - with self.test_request_context(user='root'): - resp = self.client.post('/api/1/builds/', json=payload, content_type='application/json') + with self.test_request_context(user="root"): + resp = self.client.post("/api/1/builds/", json=payload, content_type="application/json") data = resp.json # Other fields are predictible. - self.assertEqual(data['requester_metadata'], {"foo": ["bar"]}) + self.assertEqual(data["requester_metadata"], {"foo": ["bar"]}) publish.assert_called_once_with( - 'manual.rebuild', - {'msg_id': 'manual_rebuild_123', u'errata_id': 1, - 'metadata': {"foo": ["bar"]}, 'requester': 'root'}) - - @patch('freshmaker.messaging.publish') - @patch('freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory.' - 'from_advisory_id') - @patch('freshmaker.parsers.internal.manual_rebuild.time.time') + "manual.rebuild", + { + "msg_id": "manual_rebuild_123", + "errata_id": 1, + "metadata": {"foo": ["bar"]}, + "requester": "root", + }, + ) + + @patch("freshmaker.messaging.publish") + @patch("freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory." "from_advisory_id") + @patch("freshmaker.parsers.internal.manual_rebuild.time.time") def test_manual_rebuild_requester(self, time, from_advisory_id, publish): time.return_value = 123 - from_advisory_id.return_value = ErrataAdvisory( - 123, 'name', 'REL_PREP', ['rpm']) + from_advisory_id.return_value = ErrataAdvisory(123, "name", "REL_PREP", ["rpm"]) payload = { - 'errata_id': 1, + "errata_id": 1, } - with self.test_request_context(user='root'): - resp = self.client.post('/api/1/builds/', json=payload, content_type='application/json') + with self.test_request_context(user="root"): + resp = self.client.post("/api/1/builds/", json=payload, content_type="application/json") data = resp.json # Other fields are predictible. - self.assertEqual(data['requester'], "root") + self.assertEqual(data["requester"], "root") publish.assert_called_once_with( - 'manual.rebuild', - {'msg_id': 'manual_rebuild_123', u'errata_id': 1, - 'requester': 'root'}) + "manual.rebuild", {"msg_id": "manual_rebuild_123", "errata_id": 1, "requester": "root"} + ) def test_validate_rebuild_request_for_bundle_rebuild(self): - data = {'bundle_images': ['bundle'], - 'container_images': ['container_image']} + data = {"bundle_images": ["bundle"], "container_images": ["container_image"]} with app.test_request_context(json=data): ret = _validate_rebuild_request(request) self.assertEqual(ret, None) - @patch('freshmaker.messaging.publish') - @patch('freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory.' - 'from_advisory_id') - @patch('freshmaker.parsers.internal.manual_rebuild.time.time') - @patch('freshmaker.models.Event.add_event_dependency') - def test_dependent_manual_rebuild_on_existing_event(self, add_dependency, time, - from_advisory_id, publish): - models.Event.create(db.session, - "2017-00000000-0000-0000-0000-000000000003", - "103", events.TestingEvent) + @patch("freshmaker.messaging.publish") + @patch("freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory." "from_advisory_id") + @patch("freshmaker.parsers.internal.manual_rebuild.time.time") + @patch("freshmaker.models.Event.add_event_dependency") + def test_dependent_manual_rebuild_on_existing_event( + self, add_dependency, time, from_advisory_id, publish + ): + models.Event.create( + db.session, "2017-00000000-0000-0000-0000-000000000003", "103", events.TestingEvent + ) db.session.commit() time.return_value = 123 - from_advisory_id.return_value = ErrataAdvisory( - 103, 'name', 'REL_PREP', ['rpm']) + from_advisory_id.return_value = ErrataAdvisory(103, "name", "REL_PREP", ["rpm"]) payload = { - 'errata_id': 103, - 'container_images': ['foo-1-1'], - 'freshmaker_event_id': 1, - 'force': True, + "errata_id": 103, + "container_images": ["foo-1-1"], + "freshmaker_event_id": 1, + "force": True, } - with self.test_request_context(user='root'): - resp = self.client.post('/api/1/builds/', json=payload, content_type='application/json') + with self.test_request_context(user="root"): + resp = self.client.post("/api/1/builds/", json=payload, content_type="application/json") data = resp.json # Other fields are predictible. - self.assertEqual(data['requested_rebuilds'], ["foo-1-1"]) + self.assertEqual(data["requested_rebuilds"], ["foo-1-1"]) assert add_dependency.call_count == 1 assert "103" == add_dependency.call_args[0][1].search_key publish.assert_called_once_with( - 'manual.rebuild', - {'msg_id': 'manual_rebuild_123', u'errata_id': 103, - 'container_images': ["foo-1-1"], 'freshmaker_event_id': 1, - 'requester': 'root', 'force': True}) - - @patch('freshmaker.messaging.publish') - @patch('freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory.' - 'from_advisory_id') - @patch('freshmaker.parsers.internal.manual_rebuild.time.time') - @patch('freshmaker.models.Event.add_event_dependency') + "manual.rebuild", + { + "msg_id": "manual_rebuild_123", + "errata_id": 103, + "container_images": ["foo-1-1"], + "freshmaker_event_id": 1, + "requester": "root", + "force": True, + }, + ) + + @patch("freshmaker.messaging.publish") + @patch("freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory." "from_advisory_id") + @patch("freshmaker.parsers.internal.manual_rebuild.time.time") + @patch("freshmaker.models.Event.add_event_dependency") def test_dependent_manual_rebuild_on_existing_event_no_errata_id( - self, add_dependency, time, from_advisory_id, publish, + self, + add_dependency, + time, + from_advisory_id, + publish, ): models.Event.create( - db.session, '2017-00000000-0000-0000-0000-000000000003', '1', events.TestingEvent, + db.session, + "2017-00000000-0000-0000-0000-000000000003", + "1", + events.TestingEvent, ) db.session.commit() - from_advisory_id.return_value = ErrataAdvisory(1, 'name', 'REL_PREP', ['rpm']) + from_advisory_id.return_value = ErrataAdvisory(1, "name", "REL_PREP", ["rpm"]) payload = { - 'container_images': ['foo-1-1'], - 'freshmaker_event_id': 1, - 'force': True, + "container_images": ["foo-1-1"], + "freshmaker_event_id": 1, + "force": True, } - with self.test_request_context(user='root'): - resp = self.client.post('/api/1/builds/', json=payload, content_type='application/json') + with self.test_request_context(user="root"): + resp = self.client.post("/api/1/builds/", json=payload, content_type="application/json") self.assertEqual(resp.status_code, 200) - self.assertEqual(resp.json['search_key'], '1') + self.assertEqual(resp.json["search_key"], "1") def test_dependent_manual_rebuild_on_existing_event_errata_id_mismatch(self): models.Event.create( - db.session, '2017-00000000-0000-0000-0000-000000000003', '1', events.TestingEvent, + db.session, + "2017-00000000-0000-0000-0000-000000000003", + "1", + events.TestingEvent, ) db.session.commit() payload = { - 'container_images': ['foo-1-1'], - 'errata_id': 2, - 'freshmaker_event_id': 1, + "container_images": ["foo-1-1"], + "errata_id": 2, + "freshmaker_event_id": 1, } - with self.test_request_context(user='root'): - resp = self.client.post('/api/1/builds/', json=payload, content_type='application/json') + with self.test_request_context(user="root"): + resp = self.client.post("/api/1/builds/", json=payload, content_type="application/json") self.assertEqual(resp.status_code, 400) self.assertEqual( - resp.json['message'], + resp.json["message"], 'The provided "errata_id" doesn\'t match the Advisory ID associated with the input ' '"freshmaker_event_id".', ) def test_dependent_manual_rebuild_on_existing_event_invalid_dependent(self): payload = { - 'container_images': ['foo-1-1'], - 'freshmaker_event_id': 1, + "container_images": ["foo-1-1"], + "freshmaker_event_id": 1, } - with self.test_request_context(user='root'): - resp = self.client.post('/api/1/builds/', json=payload, content_type='application/json') + with self.test_request_context(user="root"): + resp = self.client.post("/api/1/builds/", json=payload, content_type="application/json") self.assertEqual(resp.status_code, 400) - self.assertEqual(resp.json['message'], 'The provided "freshmaker_event_id" is invalid.') + self.assertEqual(resp.json["message"], 'The provided "freshmaker_event_id" is invalid.') def test_manual_rebuild_missing_errata_id(self): - payload = {'container_images': ['foo-1-1']} - with self.test_request_context(user='root'): - resp = self.client.post('/api/1/builds/', json=payload, content_type='application/json') + payload = {"container_images": ["foo-1-1"]} + with self.test_request_context(user="root"): + resp = self.client.post("/api/1/builds/", json=payload, content_type="application/json") self.assertEqual(resp.status_code, 400) self.assertEqual( - resp.json['message'], + resp.json["message"], 'You must at least provide "errata_id" or "freshmaker_event_id" in the request.', ) def test_manual_rebuild_invalid_type_errata_id(self): - payload = {'errata_id': '123'} - with self.test_request_context(user='root'): - resp = self.client.post('/api/1/builds/', json=payload, content_type='application/json') + payload = {"errata_id": "123"} + with self.test_request_context(user="root"): + resp = self.client.post("/api/1/builds/", json=payload, content_type="application/json") self.assertEqual(resp.status_code, 400) - self.assertEqual(resp.json['message'], '"errata_id" must be an integer.') + self.assertEqual(resp.json["message"], '"errata_id" must be an integer.') def test_manual_rebuild_invalid_type_freshmaker_event_id(self): - payload = {'freshmaker_event_id': '123'} - with self.test_request_context(user='root'): - resp = self.client.post('/api/1/builds/', json=payload, content_type='application/json') + payload = {"freshmaker_event_id": "123"} + with self.test_request_context(user="root"): + resp = self.client.post("/api/1/builds/", json=payload, content_type="application/json") self.assertEqual(resp.status_code, 400) - self.assertEqual(resp.json['message'], '"freshmaker_event_id" must be an integer.') + self.assertEqual(resp.json["message"], '"freshmaker_event_id" must be an integer.') def test_manual_rebuild_invalid_type_container_images(self): - payload = {'container_images': '123'} - with self.test_request_context(user='root'): - resp = self.client.post('/api/1/builds/', json=payload, content_type='application/json') + payload = {"container_images": "123"} + with self.test_request_context(user="root"): + resp = self.client.post("/api/1/builds/", json=payload, content_type="application/json") self.assertEqual(resp.status_code, 400) - self.assertEqual(resp.json['message'], '"container_images" must be an array of strings.') + self.assertEqual(resp.json["message"], '"container_images" must be an array of strings.') def test_manual_rebuild_invalid_type_dry_run(self): - payload = {'dry_run': '123'} - with self.test_request_context(user='root'): - resp = self.client.post('/api/1/builds/', json=payload, content_type='application/json') + payload = {"dry_run": "123"} + with self.test_request_context(user="root"): + resp = self.client.post("/api/1/builds/", json=payload, content_type="application/json") self.assertEqual(resp.status_code, 400) - self.assertEqual(resp.json['message'], '"dry_run" must be a boolean.') + self.assertEqual(resp.json["message"], '"dry_run" must be a boolean.') def test_manual_rebuild_with_async_event(self): models.Event.create( - db.session, '2017-00000000-0000-0000-0000-000000000003', '123', - events.FreshmakerAsyncManualBuildEvent + db.session, + "2017-00000000-0000-0000-0000-000000000003", + "123", + events.FreshmakerAsyncManualBuildEvent, ) db.session.commit() - with patch('freshmaker.models.datetime') as datetime_patch: + with patch("freshmaker.models.datetime") as datetime_patch: datetime_patch.utcnow.return_value = datetime.datetime(2017, 8, 21, 13, 42, 20) payload = { - 'container_images': ['foo-1-1', 'bar-1-1'], - 'freshmaker_event_id': 1, + "container_images": ["foo-1-1", "bar-1-1"], + "freshmaker_event_id": 1, } - with self.test_request_context(user='root'): + with self.test_request_context(user="root"): resp = self.client.post( - '/api/1/builds/', + "/api/1/builds/", data=json.dumps(payload), - content_type='application/json', + content_type="application/json", ) self.assertEqual(resp.status_code, 400) self.assertEqual( - resp.json['message'], - 'The event (id=1) is an async build event, can not be used for this build.') - - @patch('freshmaker.messaging.publish') - @patch('freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory.' - 'from_advisory_id') - @patch('freshmaker.parsers.internal.manual_rebuild.time.time') - @patch('freshmaker.models.Event.add_event_dependency') - def test_dependent_manual_rebuild_in_building_state(self, add_dependency, time, - from_advisory_id, publish): - models.Event.create(db.session, - "2017-00000000-0000-0000-0000-000000000003", - "103", events.TestingEvent) + resp.json["message"], + "The event (id=1) is an async build event, can not be used for this build.", + ) + + @patch("freshmaker.messaging.publish") + @patch("freshmaker.parsers.internal.manual_rebuild.ErrataAdvisory." "from_advisory_id") + @patch("freshmaker.parsers.internal.manual_rebuild.time.time") + @patch("freshmaker.models.Event.add_event_dependency") + def test_dependent_manual_rebuild_in_building_state( + self, add_dependency, time, from_advisory_id, publish + ): + models.Event.create( + db.session, "2017-00000000-0000-0000-0000-000000000003", "103", events.TestingEvent + ) db.session.commit() time.return_value = 123 - from_advisory_id.return_value = ErrataAdvisory( - 103, 'name', 'REL_PREP', ['rpm']) + from_advisory_id.return_value = ErrataAdvisory(103, "name", "REL_PREP", ["rpm"]) payload = { - 'errata_id': 103, - 'container_images': ['foo-1-1'], - 'freshmaker_event_id': 1, + "errata_id": 103, + "container_images": ["foo-1-1"], + "freshmaker_event_id": 1, } - with self.test_request_context(user='root'): - resp = self.client.post('/api/1/builds/', json=payload, content_type='application/json') + with self.test_request_context(user="root"): + resp = self.client.post("/api/1/builds/", json=payload, content_type="application/json") self.assertEqual(resp.status_code, 400) - self.assertEqual(resp.json['message'], - 'Events triggered by advisory 103 are running: [1]. If you want to rebuild it anyway, use "force": true option.') + self.assertEqual( + resp.json["message"], + 'Events triggered by advisory 103 are running: [1]. If you want to rebuild it anyway, use "force": true option.', + ) def test_manual_rebuild_invalid_type_force(self): - payload = {'force': '123'} - with self.test_request_context(user='root'): - resp = self.client.post('/api/1/builds/', json=payload, content_type='application/json') + payload = {"force": "123"} + with self.test_request_context(user="root"): + resp = self.client.post("/api/1/builds/", json=payload, content_type="application/json") self.assertEqual(resp.status_code, 400) - self.assertEqual(resp.json['message'], '"force" must be a boolean.') + self.assertEqual(resp.json["message"], '"force" must be a boolean.') class TestAsyncBuild(ViewBaseTest): @@ -1148,229 +1244,258 @@ def setUp(self): super(TestAsyncBuild, self).setUp() self.client = app.test_client() - @patch('freshmaker.messaging.publish') - @patch('freshmaker.parsers.koji.async_manual_build.time.time') + @patch("freshmaker.messaging.publish") + @patch("freshmaker.parsers.koji.async_manual_build.time.time") def test_async_build(self, time, publish): time.return_value = 123 - with patch('freshmaker.models.datetime') as datetime_patch: + with patch("freshmaker.models.datetime") as datetime_patch: datetime_patch.utcnow.return_value = datetime.datetime(2017, 8, 21, 13, 42, 20) - payload = { - 'dist_git_branch': 'master', - 'container_images': ['foo-1-1', 'bar-1-1'] - } - with self.test_request_context(user='root'): + payload = {"dist_git_branch": "master", "container_images": ["foo-1-1", "bar-1-1"]} + with self.test_request_context(user="root"): resp = self.client.post( - '/api/1/async-builds/', + "/api/1/async-builds/", data=json.dumps(payload), - content_type='application/json', + content_type="application/json", ) data = json.loads(resp.get_data(as_text=True)) - self.assertEqual(data, { - u'builds': [], - u'depending_events': [], - u'depends_on_events': [], - u'dry_run': False, - u'event_type_id': 14, - u'id': 1, - u'message_id': 'async_build_123', - u'requested_rebuilds': ['foo-1-1-container', 'bar-1-1-container'], - u'requester': 'root', - u'requester_metadata': {}, - u'search_key': 'async_build_123', - u'state': 0, - u'state_name': 'INITIALIZED', - u'state_reason': None, - u'time_created': '2017-08-21T13:42:20Z', - u'time_done': None, - u'url': '/api/1/events/1'}) + self.assertEqual( + data, + { + "builds": [], + "depending_events": [], + "depends_on_events": [], + "dry_run": False, + "event_type_id": 14, + "id": 1, + "message_id": "async_build_123", + "requested_rebuilds": ["foo-1-1-container", "bar-1-1-container"], + "requester": "root", + "requester_metadata": {}, + "search_key": "async_build_123", + "state": 0, + "state_name": "INITIALIZED", + "state_reason": None, + "time_created": "2017-08-21T13:42:20Z", + "time_done": None, + "url": "/api/1/events/1", + }, + ) publish.assert_called_once_with( - 'async.manual.build', + "async.manual.build", { - 'msg_id': 'async_build_123', - 'dist_git_branch': 'master', - 'container_images': ['foo-1-1-container', 'bar-1-1-container'], - 'requester': 'root' - }) - - @patch('freshmaker.messaging.publish') - @patch('freshmaker.parsers.koji.async_manual_build.time.time') + "msg_id": "async_build_123", + "dist_git_branch": "master", + "container_images": ["foo-1-1-container", "bar-1-1-container"], + "requester": "root", + }, + ) + + @patch("freshmaker.messaging.publish") + @patch("freshmaker.parsers.koji.async_manual_build.time.time") def test_async_build_dry_run(self, time, publish): time.return_value = 123 payload = { - 'dist_git_branch': 'master', - 'container_images': ['foo-1-1', 'bar-1-1'], - 'dry_run': True + "dist_git_branch": "master", + "container_images": ["foo-1-1", "bar-1-1"], + "dry_run": True, } - with self.test_request_context(user='root'): + with self.test_request_context(user="root"): resp = self.client.post( - '/api/1/async-builds/', json=payload, content_type='application/json') + "/api/1/async-builds/", json=payload, content_type="application/json" + ) data = json.loads(resp.get_data(as_text=True)) - self.assertEqual(data['dry_run'], True) + self.assertEqual(data["dry_run"], True) publish.assert_called_once_with( - 'async.manual.build', + "async.manual.build", { - 'msg_id': 'async_build_123', - 'dist_git_branch': 'master', - 'container_images': ['foo-1-1-container', 'bar-1-1-container'], - 'dry_run': True, - 'requester': 'root', - }) + "msg_id": "async_build_123", + "dist_git_branch": "master", + "container_images": ["foo-1-1-container", "bar-1-1-container"], + "dry_run": True, + "requester": "root", + }, + ) def test_async_build_with_non_async_event(self): models.Event.create( - db.session, '2017-00000000-0000-0000-0000-000000000003', '123', events.TestingEvent, + db.session, + "2017-00000000-0000-0000-0000-000000000003", + "123", + events.TestingEvent, ) db.session.commit() - with patch('freshmaker.models.datetime') as datetime_patch: + with patch("freshmaker.models.datetime") as datetime_patch: datetime_patch.utcnow.return_value = datetime.datetime(2017, 8, 21, 13, 42, 20) payload = { - 'dist_git_branch': 'master', - 'container_images': ['foo-1-1', 'bar-1-1'], - 'freshmaker_event_id': 1, + "dist_git_branch": "master", + "container_images": ["foo-1-1", "bar-1-1"], + "freshmaker_event_id": 1, } - with self.test_request_context(user='root'): + with self.test_request_context(user="root"): resp = self.client.post( - '/api/1/async-builds/', + "/api/1/async-builds/", data=json.dumps(payload), - content_type='application/json', + content_type="application/json", ) self.assertEqual(resp.status_code, 400) - self.assertEqual(resp.json['message'], 'The event (id=1) is not an async build event.') + self.assertEqual(resp.json["message"], "The event (id=1) is not an async build event.") def test_async_build_invalid_dist_git_branch(self): - payload = {'dist_git_branch': 123} - with self.test_request_context(user='root'): + payload = {"dist_git_branch": 123} + with self.test_request_context(user="root"): resp = self.client.post( - '/api/1/async-builds/', json=payload, content_type='application/json') + "/api/1/async-builds/", json=payload, content_type="application/json" + ) self.assertEqual(resp.status_code, 400) - self.assertEqual(resp.json['message'], '"dist_git_branch" must be a string.') + self.assertEqual(resp.json["message"], '"dist_git_branch" must be a string.') def test_async_build_invalid_type_freshmaker_event_id(self): - payload = {'freshmaker_event_id': '123'} - with self.test_request_context(user='root'): + payload = {"freshmaker_event_id": "123"} + with self.test_request_context(user="root"): resp = self.client.post( - '/api/1/async-builds/', json=payload, content_type='application/json') + "/api/1/async-builds/", json=payload, content_type="application/json" + ) self.assertEqual(resp.status_code, 400) - self.assertEqual(resp.json['message'], '"freshmaker_event_id" must be an integer.') + self.assertEqual(resp.json["message"], '"freshmaker_event_id" must be an integer.') def test_async_build_invalid_type_container_images(self): - payload = {'container_images': '123'} - with self.test_request_context(user='root'): + payload = {"container_images": "123"} + with self.test_request_context(user="root"): resp = self.client.post( - '/api/1/async-builds/', json=payload, content_type='application/json') + "/api/1/async-builds/", json=payload, content_type="application/json" + ) self.assertEqual(resp.status_code, 400) - self.assertEqual(resp.json['message'], '"container_images" must be an array of strings.') + self.assertEqual(resp.json["message"], '"container_images" must be an array of strings.') def test_async_build_invalid_type_brew_target(self): - payload = {'brew_target': 123} - with self.test_request_context(user='root'): + payload = {"brew_target": 123} + with self.test_request_context(user="root"): resp = self.client.post( - '/api/1/async-builds/', json=payload, content_type='application/json') + "/api/1/async-builds/", json=payload, content_type="application/json" + ) self.assertEqual(resp.status_code, 400) - self.assertEqual(resp.json['message'], '"brew_target" must be a string.') + self.assertEqual(resp.json["message"], '"brew_target" must be a string.') def test_async_build_invalid_type_dry_run(self): - payload = {'dry_run': '123'} - with self.test_request_context(user='root'): + payload = {"dry_run": "123"} + with self.test_request_context(user="root"): resp = self.client.post( - '/api/1/async-builds/', json=payload, content_type='application/json') + "/api/1/async-builds/", json=payload, content_type="application/json" + ) self.assertEqual(resp.status_code, 400) - self.assertEqual(resp.json['message'], '"dry_run" must be a boolean.') + self.assertEqual(resp.json["message"], '"dry_run" must be a boolean.') class TestPatchAPI(ViewBaseTest): def test_patch_event_cancel(self): event = models.Event.create( db.session, - '2017-00000000-0000-0000-0000-000000000003', - '103', + "2017-00000000-0000-0000-0000-000000000003", + "103", events.TestingEvent, # Tests that admins can cancel any event, regardless of the requester - requester='tom_hanks', - ) - models.ArtifactBuild.create(db.session, event, "mksh", "module", build_id=1237, - state=ArtifactBuildState.PLANNED.value) - models.ArtifactBuild.create(db.session, event, "bash", "module", build_id=1238, - state=ArtifactBuildState.PLANNED.value) - models.ArtifactBuild.create(db.session, event, "dash", "module", build_id=1239, - state=ArtifactBuildState.BUILD.value) - models.ArtifactBuild.create(db.session, event, "tcsh", "module", build_id=1240, - state=ArtifactBuildState.DONE.value) + requester="tom_hanks", + ) + models.ArtifactBuild.create( + db.session, + event, + "mksh", + "module", + build_id=1237, + state=ArtifactBuildState.PLANNED.value, + ) + models.ArtifactBuild.create( + db.session, + event, + "bash", + "module", + build_id=1238, + state=ArtifactBuildState.PLANNED.value, + ) + models.ArtifactBuild.create( + db.session, event, "dash", "module", build_id=1239, state=ArtifactBuildState.BUILD.value + ) + models.ArtifactBuild.create( + db.session, event, "tcsh", "module", build_id=1240, state=ArtifactBuildState.DONE.value + ) db.session.commit() - with self.test_request_context(user='root'): - resp = self.client.patch(f'/api/1/events/{event.id}', json={'action': 'cancel'}) + with self.test_request_context(user="root"): + resp = self.client.patch(f"/api/1/events/{event.id}", json={"action": "cancel"}) data = resp.json - self.assertEqual(data['id'], event.id) - self.assertEqual(len(data['builds']), 4) - self.assertEqual(data['state_name'], 'CANCELED') - self.assertTrue(data['state_reason'].startswith( - 'Event id {} requested for canceling by user '.format(event.id))) - self.assertEqual(len([b for b in data['builds'] if b['state_name'] == 'CANCELED']), 3) - self.assertEqual(len([b for b in data['builds'] if b['state_name'] == 'DONE']), 1) + self.assertEqual(data["id"], event.id) + self.assertEqual(len(data["builds"]), 4) + self.assertEqual(data["state_name"], "CANCELED") + self.assertTrue( + data["state_reason"].startswith( + "Event id {} requested for canceling by user ".format(event.id) + ) + ) + self.assertEqual(len([b for b in data["builds"] if b["state_name"] == "CANCELED"]), 3) + self.assertEqual(len([b for b in data["builds"] if b["state_name"] == "DONE"]), 1) def test_patch_event_cancel_user(self): event = models.Event.create( db.session, - '2017-00000000-0000-0000-0000-000000000003', - '123', + "2017-00000000-0000-0000-0000-000000000003", + "123", events.TestingEvent, - requester='tom_hanks', + requester="tom_hanks", ) db.session.commit() - with self.test_request_context(user='tom_hanks'): - resp = self.client.patch(f'/api/1/events/{event.id}', json={'action': 'cancel'}) + with self.test_request_context(user="tom_hanks"): + resp = self.client.patch(f"/api/1/events/{event.id}", json={"action": "cancel"}) assert resp.status_code == 200 def test_patch_event_cancel_user_not_their_event(self): event = models.Event.create( db.session, - '2017-00000000-0000-0000-0000-000000000003', - '103', + "2017-00000000-0000-0000-0000-000000000003", + "103", events.TestingEvent, - requester='han_solo', + requester="han_solo", ) db.session.commit() - with self.test_request_context(user='tom_hanks'): - resp = self.client.patch(f'/api/1/events/{event.id}', json={'action': 'cancel'}) + with self.test_request_context(user="tom_hanks"): + resp = self.client.patch(f"/api/1/events/{event.id}", json={"action": "cancel"}) assert resp.status_code == 403 - assert resp.json['message'] == 'You must be an admin to cancel someone else\'s event.' + assert resp.json["message"] == "You must be an admin to cancel someone else's event." def test_patch_event_cancel_with_noauth(self): event = models.Event.create( db.session, - '2017-00000000-0000-0000-0000-000000000003', - '123', + "2017-00000000-0000-0000-0000-000000000003", + "123", events.TestingEvent, - requester='tom_hanks', + requester="tom_hanks", ) db.session.commit() - with self.test_request_context(user=None, auth_backend='noauth'): - resp = self.client.patch(f'/api/1/events/{event.id}', json={'action': 'cancel'}) + with self.test_request_context(user=None, auth_backend="noauth"): + resp = self.client.patch(f"/api/1/events/{event.id}", json={"action": "cancel"}) assert resp.status_code == 200 def test_patch_event_not_allowed(self): - with self.test_request_context(user='john_smith'): - resp = self.client.patch('/api/1/events/1', json={'action': 'cancel'}) + with self.test_request_context(user="john_smith"): + resp = self.client.patch("/api/1/events/1", json={"action": "cancel"}) assert resp.status_code == 403 - assert resp.json['message'] == ( - 'User john_smith does not have any of the following roles: admin, manual_rebuilder' + assert resp.json["message"] == ( + "User john_smith does not have any of the following roles: admin, manual_rebuilder" ) @@ -1379,8 +1504,7 @@ class TestOpenIDCLogin(ViewBaseTest): def setUp(self): super(TestOpenIDCLogin, self).setUp() - self.patch_auth_backend = patch.object( - freshmaker.auth.conf, 'auth_backend', new='openidc') + self.patch_auth_backend = patch.object(freshmaker.auth.conf, "auth_backend", new="openidc") self.patch_auth_backend.start() def tearDown(self): @@ -1388,19 +1512,20 @@ def tearDown(self): self.patch_auth_backend.stop() def test_openidc_manual_trigger_unauthorized(self): - rv = self.client.post('/api/1/builds/', - data=json.dumps({'errata_id': 1}), - content_type='application/json') - self.assertEqual(rv.status, '401 UNAUTHORIZED') + rv = self.client.post( + "/api/1/builds/", data=json.dumps({"errata_id": 1}), content_type="application/json" + ) + self.assertEqual(rv.status, "401 UNAUTHORIZED") def test_openidc_manual_trigger_authorized(self): - with self.test_request_context(user='dev', auth_backend="openidc", - oidc_scopes=["submit-build"]): - rv = self.client.post('/api/1/builds/', - data=json.dumps({'errata_id': 1}), - content_type='application/json') - self.assertEqual(rv.status, '403 FORBIDDEN') + with self.test_request_context( + user="dev", auth_backend="openidc", oidc_scopes=["submit-build"] + ): + rv = self.client.post( + "/api/1/builds/", data=json.dumps({"errata_id": 1}), content_type="application/json" + ) + self.assertEqual(rv.status, "403 FORBIDDEN") -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() From c4b864f4a0c62b9aa2de2b2bdcda205fbc20347f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=A1rio=20Foganholi=20Fernandes?= Date: Mon, 18 Sep 2023 16:09:06 -0300 Subject: [PATCH 2/2] Add black formatting in tox + rev ignore in git blame As part of our effort to standardize our project repos, we are adding black formatting to freshmaker in tox. This commit: - adds a step with `black --check` in tox, to verify the code is well formatted - adds a `.git-blame-ignore-revs` file to keep track of large reformatting commits that should be ignored by `git blame` - adds instructions on how to use `.git-blame-ignore-revs` automatically in a local environment - add E203 to flake8 ignores, to prevent conflicts with black's style For more information, please see the following links: https://black.readthedocs.io/en/stable/guides/introducing_black_to_your_project.html https://akrabat.com/ignoring-revisions-with-git-blame/ https://stackoverflow.com/questions/34957237/can-i-configure-git-blame-to-always-ignore-certain-commits-want-to-fix-git-blam/57129540#57129540 https://docs.github.com/en/repositories/working-with-files/using-files/viewing-a-file#ignore-commits-in-the-blame-view JIRA: CWFHEALTH-2279 --- .git-blame-ignore-revs | 7 +++++++ README.md | 20 ++++++++++++++++++++ tox.ini | 10 ++++++++-- 3 files changed, 35 insertions(+), 2 deletions(-) create mode 100644 .git-blame-ignore-revs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 00000000..b54f98a9 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,7 @@ +# Have a look at +# https://stackoverflow.com/questions/34957237/can-i-configure-git-blame-to-always-ignore-certain-commits-want-to-fix-git-blam/57129540#57129540 +# https://akrabat.com/ignoring-revisions-with-git-blame/ +# https://docs.github.com/en/repositories/working-with-files/using-files/viewing-a-file#ignore-commits-in-the-blame-view + +# Run code through black for formatting +e3061441e1492f44386d10b1c6426162e6d0c81c \ No newline at end of file diff --git a/README.md b/README.md index f0b119d6..58674ff1 100644 --- a/README.md +++ b/README.md @@ -120,3 +120,23 @@ python -m pip install pip-tools ``` more info available at: https://github.com/jazzband/pip-tools/ + + +# Ignoring large reformattings with git blame + +The commits listed in `.git-blame-ignore-revs` are automatically ignored in github blame view +(https://docs.github.com/en/repositories/working-with-files/using-files/viewing-a-file#ignore-commits-in-the-blame-view). +This feature is meant to ignore commits that just introduced reformatting, and maintain the logical +contributions of each line. + +You can make use of the `.git-blame-ignore-revs` file locally by passing it as an argument to the +blame command: +``` +git blame --ignore-revs-file .git-blame-ignore-revs ... +``` +or by adding it to your local git configurations: +``` +git config --local blame.ignoreRevsFile .git-blame-ignore-revs +``` +Note that this should be done for per-project, as some projects may lack the `.git-blame-ignore-revs` +file, and in this case `git blame` would return an error. \ No newline at end of file diff --git a/tox.ini b/tox.ini index 4abc022d..d420741a 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ # and then run "tox" from this directory. [tox] -envlist = bandit, docs, flake8, mypy, py311 +envlist = bandit, docs, flake8, black, mypy, py311 ignore_base_python_conflict = True [testenv] @@ -30,6 +30,12 @@ skip_install = true deps = flake8 commands = flake8 +[testenv:black] +basepython = python3 +deps = black +commands = black --line-length 100 --check . \ + --extend-exclude="dev_scripts\/|freshmaker\/migrations\/|\.tox\/|build\/|__pycache__|scripts\/print_handlers_md\.py|\.copr\/|\.env" + [testenv:mypy] description = type check deps = @@ -51,7 +57,7 @@ ignore_outcome = True [flake8] skip_install = true -ignore = E501,E731,W503,W504 +ignore = E501,E731,W503,W504,E203 exclude = dev_scripts/*,freshmaker/migrations/*,.tox/*,build/*,__pycache__,scripts/print_handlers_md.py,.copr/*,.env [testenv:docs]