From 88c365239f35517dab48995e38098fc68f6093a0 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Thu, 30 Mar 2023 13:22:39 -0700 Subject: [PATCH 01/30] Added additional environment variables for frontend to allow the displaying another projects code-coverage information - REPOSITORY: The repository url - PROJECT: The project name --- frontend/README.md | 8 ++++++++ frontend/src/common.js | 6 ++++-- frontend/src/index.js | 3 ++- frontend/src/zero_coverage_report.js | 3 ++- frontend/webpack.common.js | 2 ++ 5 files changed, 18 insertions(+), 4 deletions(-) diff --git a/frontend/README.md b/frontend/README.md index 4d1148ae2..a4036f71e 100644 --- a/frontend/README.md +++ b/frontend/README.md @@ -30,6 +30,14 @@ You can specify another remote backend like so: BACKEND_URL=https://api.coverage.moz.tools npm run start ``` +## Displaying Another Project + +You can specify another project by using the environment variables `REPOSITORY`, and `PROJECT` like so: + +``` +REPOSITORY=https://hg.mozilla.org/comm-central PROJECT=comm-central npm run start +``` + ## Help You can reach us on our Matrix instance: [#codecoverage:mozilla.org](https://chat.mozilla.org/#/room/#codecoverage:mozilla.org) diff --git a/frontend/src/common.js b/frontend/src/common.js index da0ecb080..81adc3ce3 100644 --- a/frontend/src/common.js +++ b/frontend/src/common.js @@ -33,6 +33,8 @@ export async function main(load, display) { // Coverage retrieval. const COVERAGE_BACKEND_HOST = process.env.BACKEND_URL; +const COVERAGE_REPOSITORY = process.env.REPOSITORY; +export const COVERAGE_PROJECT = process.env.PROJECT; function cacheGet(cache, key) { if (key in cache) { @@ -180,7 +182,7 @@ export async function getSource(file, revision) { if (!revision || revision === "latest") { revision = "tip"; } - const url = `https://hg.mozilla.org/mozilla-central/raw-file/${revision}/${file}`; + const url = `${COVERAGE_REPOSITORY}/raw-file/${revision}/${file}`; let source = cacheGet(sourceCache, url); if (source) { @@ -328,7 +330,7 @@ export function buildNavbar(path, revision) { let base = ""; const links = [ { - name: "mozilla-central", + name: COVERAGE_PROJECT, route: buildRoute({ path: "", revision }) } ]; diff --git a/frontend/src/index.js b/frontend/src/index.js index 5aa2eaeff..fe8d304bd 100644 --- a/frontend/src/index.js +++ b/frontend/src/index.js @@ -1,4 +1,5 @@ import { + COVERAGE_PROJECT, REV_LATEST, DOM_READY, main, @@ -232,7 +233,7 @@ async function load() { message( "loading", "Loading coverage data for " + - (route.path || "mozilla-central") + + (route.path || COVERAGE_PROJECT) + " @ " + (route.revision || REV_LATEST) ); diff --git a/frontend/src/zero_coverage_report.js b/frontend/src/zero_coverage_report.js index ea356854d..7c5bf697a 100644 --- a/frontend/src/zero_coverage_report.js +++ b/frontend/src/zero_coverage_report.js @@ -1,4 +1,5 @@ import { + COVERAGE_PROJECT, hide, message, buildNavbar, @@ -132,7 +133,7 @@ export async function zeroCoverageDisplay(data, dir) { hide("history"); message( "loading", - "Loading zero coverage report for " + (dir || "mozilla-central") + "Loading zero coverage report for " + (dir || COVERAGE_PROJECT) ); while (dir.endsWith("/")) { diff --git a/frontend/webpack.common.js b/frontend/webpack.common.js index bcdd0b9ae..908fcdd62 100644 --- a/frontend/webpack.common.js +++ b/frontend/webpack.common.js @@ -22,6 +22,8 @@ module.exports = { }), new webpack.EnvironmentPlugin({ BACKEND_URL: 'http://localhost:8000', + REPOSITORY: 'https://hg.mozilla.org/mozilla-central', + PROJECT: 'mozilla-central' }), ], module: { From a989f70db483cb2f4711fa2ccdec176f1d48c607 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Thu, 30 Mar 2023 15:07:07 -0700 Subject: [PATCH 02/30] Added an additional environment variable for frontend to adjust the zero coverage report location --- frontend/src/common.js | 5 ++--- frontend/webpack.common.js | 3 ++- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/frontend/src/common.js b/frontend/src/common.js index 81adc3ce3..4a56e40e3 100644 --- a/frontend/src/common.js +++ b/frontend/src/common.js @@ -35,6 +35,7 @@ export async function main(load, display) { const COVERAGE_BACKEND_HOST = process.env.BACKEND_URL; const COVERAGE_REPOSITORY = process.env.REPOSITORY; export const COVERAGE_PROJECT = process.env.PROJECT; +const ZERO_COVERAGE_REPORT = process.env.ZERO_COVERAGE_REPORT; function cacheGet(cache, key) { if (key in cache) { @@ -138,9 +139,7 @@ export async function getZeroCoverageData() { return data; } - const response = await fetch( - "https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/project.relman.code-coverage.production.cron.latest/artifacts/public/zero_coverage_report.json" - ); + const response = await fetch(ZERO_COVERAGE_REPORT); data = await response.json(); cacheSet(zeroCoverageCache, "", data); diff --git a/frontend/webpack.common.js b/frontend/webpack.common.js index 908fcdd62..cc45cafa8 100644 --- a/frontend/webpack.common.js +++ b/frontend/webpack.common.js @@ -23,7 +23,8 @@ module.exports = { new webpack.EnvironmentPlugin({ BACKEND_URL: 'http://localhost:8000', REPOSITORY: 'https://hg.mozilla.org/mozilla-central', - PROJECT: 'mozilla-central' + PROJECT: 'mozilla-central', + ZERO_COVERAGE_REPORT: 'https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/project.relman.code-coverage.production.cron.latest/artifacts/public/zero_coverage_report.json' }), ], module: { From 691ae682ba925c596612c3e47ca342486aa44e9a Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Mon, 3 Apr 2023 12:07:36 -0700 Subject: [PATCH 03/30] Moved DEFAULT_REPOSITORY to config and have it pull an environment variable named REPOSITORY --- backend/code_coverage_backend/api.py | 10 +++++----- backend/code_coverage_backend/config.py | 3 +++ backend/code_coverage_backend/gcp.py | 3 ++- backend/tools/cleanup.py | 4 +++- 4 files changed, 13 insertions(+), 7 deletions(-) diff --git a/backend/code_coverage_backend/api.py b/backend/code_coverage_backend/api.py index cd21c603d..bcda84d63 100644 --- a/backend/code_coverage_backend/api.py +++ b/backend/code_coverage_backend/api.py @@ -6,11 +6,11 @@ import structlog from flask import abort +from code_coverage_backend import config from code_coverage_backend.gcp import load_cache from code_coverage_backend.report import DEFAULT_FILTER from code_coverage_tools import COVERAGE_EXTENSIONS -DEFAULT_REPOSITORY = "mozilla-central" logger = structlog.get_logger(__name__) @@ -21,7 +21,7 @@ def coverage_supported_extensions(): return COVERAGE_EXTENSIONS -def coverage_latest(repository=DEFAULT_REPOSITORY): +def coverage_latest(repository=config.DEFAULT_REPOSITORY): """ List the last 10 reports available on the server """ @@ -43,7 +43,7 @@ def coverage_latest(repository=DEFAULT_REPOSITORY): def coverage_for_path( path="", changeset=None, - repository=DEFAULT_REPOSITORY, + repository=config.DEFAULT_REPOSITORY, platform=DEFAULT_FILTER, suite=DEFAULT_FILTER, ): @@ -84,7 +84,7 @@ def coverage_for_path( def coverage_history( - repository=DEFAULT_REPOSITORY, + repository=config.DEFAULT_REPOSITORY, path="", start=None, end=None, @@ -113,7 +113,7 @@ def coverage_history( abort(400) -def coverage_filters(repository=DEFAULT_REPOSITORY): +def coverage_filters(repository=config.DEFAULT_REPOSITORY): """ List all available filters for that repository """ diff --git a/backend/code_coverage_backend/config.py b/backend/code_coverage_backend/config.py index ac46c032e..2662bc61e 100644 --- a/backend/code_coverage_backend/config.py +++ b/backend/code_coverage_backend/config.py @@ -5,5 +5,8 @@ from __future__ import absolute_import +import os + PROJECT_NAME = "code-coverage-backend" APP_NAME = "code_coverage_backend" +DEFAULT_REPOSITORY = os.getenv("REPOSITORY", "mozilla-central") diff --git a/backend/code_coverage_backend/gcp.py b/backend/code_coverage_backend/gcp.py index c077408ad..5bb1f48d4 100644 --- a/backend/code_coverage_backend/gcp.py +++ b/backend/code_coverage_backend/gcp.py @@ -11,6 +11,7 @@ import structlog from dateutil.relativedelta import relativedelta +from code_coverage_backend import config from code_coverage_backend import covdir from code_coverage_backend import taskcluster from code_coverage_backend.hgmo import hgmo_pushes @@ -30,7 +31,7 @@ KEY_PLATFORMS = "platforms:{repository}" KEY_SUITES = "suites:{repository}" -REPOSITORIES = ("mozilla-central",) +REPOSITORIES = (config.DEFAULT_REPOSITORY,) MIN_PUSH = 0 MAX_PUSH = math.inf diff --git a/backend/tools/cleanup.py b/backend/tools/cleanup.py index 4901ea545..22bb9ae32 100644 --- a/backend/tools/cleanup.py +++ b/backend/tools/cleanup.py @@ -7,6 +7,8 @@ import redis +from code_coverage_backend import config + def cleanup(client, prefix): nb, memory = 0, 0 @@ -26,4 +28,4 @@ def cleanup(client, prefix): if __name__ == "__main__": client = redis.from_url(os.environ["REDIS_URL"]) - cleanup(client, "overall:mozilla-central") + cleanup(client, f"overall:{config.DEFAULT_REPOSITORY}") From 9942fc24949ebc5c72d01a7578488f8636ad8216 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Mon, 3 Apr 2023 15:59:51 -0700 Subject: [PATCH 04/30] Added additional environment variables for the bot to allow multi-repo use --- bot/code_coverage_bot/cli.py | 4 ++ bot/code_coverage_bot/commit_coverage.py | 8 ++-- bot/code_coverage_bot/config.py | 6 +++ bot/code_coverage_bot/hooks/base.py | 39 ++++++++++++++++-- bot/code_coverage_bot/hooks/cron.py | 46 +++++++++++++++------- bot/code_coverage_bot/hooks/crontrigger.py | 40 +++++++++++++------ bot/code_coverage_bot/hooks/repo.py | 42 +++++++++++--------- bot/code_coverage_bot/taskcluster.py | 4 +- bot/code_coverage_bot/trigger_missing.py | 10 ++--- bot/code_coverage_bot/zero_coverage.py | 10 ++++- 10 files changed, 149 insertions(+), 60 deletions(-) diff --git a/bot/code_coverage_bot/cli.py b/bot/code_coverage_bot/cli.py index ea31ff837..273d131d5 100644 --- a/bot/code_coverage_bot/cli.py +++ b/bot/code_coverage_bot/cli.py @@ -22,6 +22,10 @@ def setup_cli(ask_repository=True, ask_revision=True): parser.add_argument("--repository", default=os.environ.get("REPOSITORY")) if ask_revision: parser.add_argument("--revision", default=os.environ.get("REVISION")) + parser.add_argument("--namespace", default=os.environ.get("NAMESPACE")) + parser.add_argument("--project", default=os.environ.get("PROJECT")) + parser.add_argument("--upstream", default=os.environ.get("UPSTREAM")) + parser.add_argument("--prefix", default=os.environ.get("PREFIX")) parser.add_argument( "--cache-root", required=True, help="Cache root, used to pull changesets" ) diff --git a/bot/code_coverage_bot/commit_coverage.py b/bot/code_coverage_bot/commit_coverage.py index 24c265e6c..c8784dea8 100644 --- a/bot/code_coverage_bot/commit_coverage.py +++ b/bot/code_coverage_bot/commit_coverage.py @@ -39,7 +39,9 @@ def _init_thread(repo_dir: str) -> None: hg_servers.append(hg_server) -def generate(server_address: str, repo_dir: str, out_dir: str = ".") -> None: +def generate( + server_address: str, repo_dir: str, project: str, out_dir: str = "." +) -> None: start_time = time.monotonic() commit_coverage_path = os.path.join(out_dir, "commit_coverage.json.zst") @@ -70,7 +72,7 @@ def _upload(): # We are only interested in "overall" coverage, not platform or suite specific. changesets_to_analyze = [ changeset - for changeset, platform, suite in list_reports(bucket, "mozilla-central") + for changeset, platform, suite in list_reports(bucket, project) if platform == DEFAULT_FILTER and suite == DEFAULT_FILTER ] @@ -85,7 +87,7 @@ def _upload(): # correct. def analyze_changeset(changeset_to_analyze: str) -> None: report_name = get_name( - "mozilla-central", changeset_to_analyze, DEFAULT_FILTER, DEFAULT_FILTER + project, changeset_to_analyze, DEFAULT_FILTER, DEFAULT_FILTER ) assert download_report( os.path.join(out_dir, "ccov-reports"), bucket, report_name diff --git a/bot/code_coverage_bot/config.py b/bot/code_coverage_bot/config.py index be4d1d28c..b627a96bc 100644 --- a/bot/code_coverage_bot/config.py +++ b/bot/code_coverage_bot/config.py @@ -6,3 +6,9 @@ HG_BASE = "https://hg.mozilla.org/" MOZILLA_CENTRAL_REPOSITORY = "{}mozilla-central".format(HG_BASE) TRY_REPOSITORY = "{}try".format(HG_BASE) + +DEFAULT_UPSTREAM = "https://hg.mozilla.org/mozilla-unified" +DEFAULT_PROJECT = "mozilla-central" +DEFAULT_REPOSITORY = MOZILLA_CENTRAL_REPOSITORY +DEFAULT_NAMESPACE = "gecko" +DEFAULT_PREFIX = None diff --git a/bot/code_coverage_bot/hooks/base.py b/bot/code_coverage_bot/hooks/base.py index eab64f785..d41d4a69f 100644 --- a/bot/code_coverage_bot/hooks/base.py +++ b/bot/code_coverage_bot/hooks/base.py @@ -15,6 +15,7 @@ from code_coverage_bot import grcov from code_coverage_bot import taskcluster from code_coverage_bot.artifacts import ArtifactsHandler +from code_coverage_bot.secrets import secrets from code_coverage_bot.taskcluster import taskcluster_config from code_coverage_bot.utils import ThreadPoolExecutorResult @@ -22,10 +23,16 @@ class Hook(object): + HOOK_NAME = "base" + def __init__( self, + namespace, + project, repository, + upstream, revision, + prefix, task_name_filter, cache_root, working_dir, @@ -40,8 +47,12 @@ def __init__( reports=self.reports_dir, ) + self.namespace = namespace + self.project = project self.repository = repository + self.upstream = upstream self.revision = revision + self.prefix = prefix assert ( self.revision is not None and self.repository is not None ), "Missing repo/revision" @@ -54,7 +65,9 @@ def __init__( self.repo_dir = os.path.join(cache_root, self.branch) # Load coverage tasks for all platforms - decision_task_id = taskcluster.get_decision_task(self.branch, self.revision) + decision_task_id = taskcluster.get_decision_task( + self.namespace, self.branch, self.revision + ) assert decision_task_id is not None, "The decision task couldn't be found" @@ -81,6 +94,18 @@ def __init__( def branch(self): return self.repository[len(config.HG_BASE) :] + @property + def hook(self): + """Taskcluster path to this specific hook. For backwards compat mozilla-central does not include the project name.""" + if self.project == "mozilla-central": + return "project.relman.code-coverage.{dev}.{name}".format( + dev=secrets[secrets.APP_CHANNEL], + name=self.HOOK_NAME, + ) + return "project.relman.code-coverage.{dev}.{name}.{project}".format( + dev=secrets[secrets.APP_CHANNEL], name=self.HOOK_NAME, project=self.project + ) + def clone_repository(self): cmd = hglib.util.cmdbuilder( "robustcheckout", @@ -88,7 +113,7 @@ def clone_repository(self): self.repo_dir, purge=True, sharebase="hg-shared", - upstream="https://hg.mozilla.org/mozilla-unified", + upstream=self.upstream, revision=self.revision, networkattempts=7, ) @@ -146,8 +171,16 @@ def build_reports(self, only=None): platform=platform, artifacts=len(artifacts), ) + + options = [] + if self.prefix: + options = ["-p", self.prefix] + output = grcov.report( - artifacts, source_dir=self.repo_dir, out_format="covdir" + artifacts, + source_dir=self.repo_dir, + out_format="covdir", + options=options, ) # Write output on FS diff --git a/bot/code_coverage_bot/hooks/cron.py b/bot/code_coverage_bot/hooks/cron.py index 32d21d6fa..23611dcd9 100644 --- a/bot/code_coverage_bot/hooks/cron.py +++ b/bot/code_coverage_bot/hooks/cron.py @@ -10,7 +10,6 @@ from code_coverage_bot import uploader from code_coverage_bot.cli import setup_cli from code_coverage_bot.hooks.base import Hook -from code_coverage_bot.secrets import secrets from code_coverage_bot.zero_coverage import ZeroCov logger = structlog.get_logger(__name__) @@ -21,25 +20,31 @@ class CronHook(Hook): This function is executed when the bot is triggered via cron. """ - def __init__(self, *args, **kwargs): + HOOK_NAME = "cron" + + def __init__( + self, namespace, project, repository, upstream, prefix, *args, **kwargs + ): # Retrieve latest ingested revision try: - revision = uploader.gcp_latest("mozilla-central")[0]["revision"] + revision = uploader.gcp_latest(project)[0]["revision"] except Exception as e: logger.warn("Failed to retrieve the latest reports ingested: {}".format(e)) raise - super().__init__(config.MOZILLA_CENTRAL_REPOSITORY, revision, *args, **kwargs) + super().__init__( + namespace, project, repository, upstream, revision, prefix, *args, **kwargs + ) def run(self) -> None: self.retrieve_source_and_artifacts() - commit_coverage.generate(self.repository, self.repo_dir) + commit_coverage.generate(self.repository, self.project, self.repo_dir) logger.info("Generating zero coverage reports") zc = ZeroCov(self.repo_dir) - zc.generate(self.artifactsHandler.get(), self.revision) + zc.generate(self.artifactsHandler.get(), self.revision, prefix=self.prefix) # This is disabled as it is not used yet. # logger.info("Generating chunk mapping") @@ -48,20 +53,33 @@ def run(self) -> None: # Index the task in the TaskCluster index at the given revision and as "latest". # Given that all tasks have the same rank, the latest task that finishes will # overwrite the "latest" entry. + print(self.hook) self.index_task( [ - "project.relman.code-coverage.{}.cron.{}".format( - secrets[secrets.APP_CHANNEL], self.revision - ), - "project.relman.code-coverage.{}.cron.latest".format( - secrets[secrets.APP_CHANNEL] - ), + "{}.{}".format(self.hook, self.revision), + "{}.latest".format(self.hook), ] ) def main() -> None: logger.info("Starting code coverage bot for cron") - args = setup_cli(ask_revision=False, ask_repository=False) - hook = CronHook(args.task_name_filter, args.cache_root, args.working_dir) + args = setup_cli(ask_revision=False, ask_repository=True) + + namespace = args.namespace or config.DEFAULT_NAMESPACE + project = args.project or config.DEFAULT_PROJECT + repository = args.repository or config.DEFAULT_REPOSITORY + upstream = args.upstream or config.DEFAULT_UPSTREAM + prefix = args.prefix or None + + hook = CronHook( + namespace, + project, + repository, + upstream, + prefix, + args.task_name_filter, + args.cache_root, + args.working_dir, + ) hook.run() diff --git a/bot/code_coverage_bot/hooks/crontrigger.py b/bot/code_coverage_bot/hooks/crontrigger.py index b5dee72b1..937cdbd00 100644 --- a/bot/code_coverage_bot/hooks/crontrigger.py +++ b/bot/code_coverage_bot/hooks/crontrigger.py @@ -10,7 +10,6 @@ from code_coverage_bot import uploader from code_coverage_bot.cli import setup_cli from code_coverage_bot.hooks.base import Hook -from code_coverage_bot.secrets import secrets logger = structlog.get_logger(__name__) @@ -20,36 +19,51 @@ class CronTriggerHook(Hook): This function is executed when the bot is triggered via cron. """ - def __init__(self, *args, **kwargs): + HOOK_NAME = "crontrigger" + + def __init__(self, namespace, project, repository, *args, **kwargs): # Retrieve latest ingested revision try: - revision = uploader.gcp_latest("mozilla-central")[0]["revision"] + revision = uploader.gcp_latest(project)[0]["revision"] except Exception as e: logger.warn("Failed to retrieve the latest reports ingested: {}".format(e)) raise - super().__init__(config.MOZILLA_CENTRAL_REPOSITORY, revision, *args, **kwargs) + super().__init__(namespace, repository, revision, *args, **kwargs) def run(self) -> None: - trigger_missing.trigger_missing(config.MOZILLA_CENTRAL_REPOSITORY) + trigger_missing.trigger_missing(self.repository, self.namespace, self.project) # Index the task in the TaskCluster index at the given revision and as "latest". # Given that all tasks have the same rank, the latest task that finishes will # overwrite the "latest" entry. + + # Preserve the original path if we're using mozilla-central as the project, + # otherwise append the project name after 'crontrigger' self.index_task( [ - "project.relman.code-coverage.{}.crontrigger.{}".format( - secrets[secrets.APP_CHANNEL], self.revision - ), - "project.relman.code-coverage.{}.crontrigger.latest".format( - secrets[secrets.APP_CHANNEL] - ), + "{}.{}".format(self.hook, self.revision), + "{}.latest".format(self.hook), ] ) def main() -> None: logger.info("Starting code coverage bot for crontrigger") - args = setup_cli(ask_revision=False, ask_repository=False) - hook = CronTriggerHook(args.task_name_filter, None, args.working_dir) + args = setup_cli(ask_revision=False, ask_repository=True) + + namespace = args.namespace or config.DEFAULT_NAMESPACE + project = args.project or config.DEFAULT_PROJECT + repository = args.repository or config.DEFAULT_REPOSITORY + upstream = args.upstream or config.DEFAULT_UPSTREAM + + hook = CronTriggerHook( + namespace, + project, + repository, + upstream, + args.task_name_filter, + None, + args.working_dir, + ) hook.run() diff --git a/bot/code_coverage_bot/hooks/repo.py b/bot/code_coverage_bot/hooks/repo.py index 26a20acff..f7e6fd58b 100644 --- a/bot/code_coverage_bot/hooks/repo.py +++ b/bot/code_coverage_bot/hooks/repo.py @@ -30,6 +30,8 @@ class RepositoryHook(Hook): Base class to support specific workflows per repository """ + HOOK_NAME = "repo" + def upload_reports(self, reports): """ Upload all provided covdir reports on GCP @@ -95,7 +97,6 @@ class MozillaCentralHook(RepositoryHook): def __init__(self, *args, **kwargs): super().__init__( - config.MOZILLA_CENTRAL_REPOSITORY, # On mozilla-central, we want to assert that every platform was run (except for android platforms # as they are unstable). required_platforms=["linux", "windows"], @@ -132,12 +133,8 @@ def run(self): # Index on Taskcluster self.index_task( [ - "project.relman.code-coverage.{}.repo.mozilla-central.{}".format( - secrets[secrets.APP_CHANNEL], self.revision - ), - "project.relman.code-coverage.{}.repo.mozilla-central.latest".format( - secrets[secrets.APP_CHANNEL] - ), + "{}.{}".format(self.hook, self.revision), + "{}.latest".format(self.hook), ] ) @@ -181,7 +178,6 @@ class TryHook(RepositoryHook): def __init__(self, *args, **kwargs): super().__init__( - config.TRY_REPOSITORY, # On try, developers might have requested to run only one platform, and we trust them. required_platforms=[], *args, @@ -215,11 +211,9 @@ def run(self): # Index on Taskcluster self.index_task( [ - "project.relman.code-coverage.{}.repo.try.{}".format( - secrets[secrets.APP_CHANNEL], self.revision - ), - "project.relman.code-coverage.{}.repo.try.latest".format( - secrets[secrets.APP_CHANNEL] + "{}.{}".format(self.hook_path, self.revision), + "project.relman.code-coverage.{}.repo.{}.latest".format( + secrets[secrets.APP_CHANNEL], self.project ), ] ) @@ -229,14 +223,26 @@ def main(): logger.info("Starting code coverage bot for repository") args = setup_cli() + namespace = args.namespace or config.DEFAULT_NAMESPACE + project = args.project or config.DEFAULT_PROJECT + repository = args.repository or config.DEFAULT_REPOSITORY + upstream = args.upstream or config.DEFAULT_UPSTREAM + hooks = { - config.MOZILLA_CENTRAL_REPOSITORY: MozillaCentralHook, - config.TRY_REPOSITORY: TryHook, + "central": MozillaCentralHook, + "try": TryHook, } - hook_class = hooks.get(args.repository) - assert hook_class is not None, f"Unsupported repository {args.repository}" + hook_class = hooks.get(args.hook) + assert hook_class is not None, f"Unsupported hook type {args.hook}" hook = hook_class( - args.revision, args.task_name_filter, args.cache_root, args.working_dir + namespace, + project, + repository, + upstream, + args.revision, + args.task_name_filter, + args.cache_root, + args.working_dir, ) hook.run() diff --git a/bot/code_coverage_bot/taskcluster.py b/bot/code_coverage_bot/taskcluster.py index 2f23d46be..9a4f8b698 100644 --- a/bot/code_coverage_bot/taskcluster.py +++ b/bot/code_coverage_bot/taskcluster.py @@ -15,8 +15,8 @@ NAME_PARTS_TO_SKIP = ("opt", "debug", "e10s", "1proc") -def get_decision_task(branch, revision): - route = f"gecko.v2.{branch}.revision.{revision}.taskgraph.decision" +def get_decision_task(namespace, branch, revision): + route = f"{namespace}.v2.{branch}.revision.{revision}.taskgraph.decision" index = taskcluster_config.get_service("index") try: return index.findTask(route)["taskId"] diff --git a/bot/code_coverage_bot/trigger_missing.py b/bot/code_coverage_bot/trigger_missing.py index 3f805d020..47467e526 100644 --- a/bot/code_coverage_bot/trigger_missing.py +++ b/bot/code_coverage_bot/trigger_missing.py @@ -45,7 +45,9 @@ def trigger_task(task_group_id: str, revision: str) -> None: ) -def trigger_missing(server_address: str, out_dir: str = ".") -> None: +def trigger_missing( + server_address: str, namespace: str, project: str, out_dir: str = "." +) -> None: triggered_revisions_path = os.path.join(out_dir, "triggered_revisions.zst") url = f"https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/project.relman.code-coverage.{secrets[secrets.APP_CHANNEL]}.crontrigger.latest/artifacts/public/triggered_revisions.zst" @@ -90,9 +92,7 @@ def trigger_missing(server_address: str, out_dir: str = ".") -> None: continue # If the revision was already ingested, we don't need to trigger ingestion for it again. - if uploader.gcp_covdir_exists( - bucket, "mozilla-central", revision, "all", "all" - ): + if uploader.gcp_covdir_exists(bucket, project, revision, "all", "all"): triggered_revisions.add(revision) continue @@ -110,7 +110,7 @@ def trigger_missing(server_address: str, out_dir: str = ".") -> None: # If it is newer than yesterday, we load the group and check if all tasks in it finished. if timestamp > yesterday: decision_task_id = taskcluster.get_decision_task( - "mozilla-central", revision + namespace, project, revision ) if decision_task_id is None: continue diff --git a/bot/code_coverage_bot/zero_coverage.py b/bot/code_coverage_bot/zero_coverage.py index cd69dff8e..3069085ac 100644 --- a/bot/code_coverage_bot/zero_coverage.py +++ b/bot/code_coverage_bot/zero_coverage.py @@ -89,9 +89,15 @@ def get_fileinfo(self, filenames): return res - def generate(self, artifacts, hgrev, out_dir="."): + def generate(self, artifacts, hgrev, out_dir=".", prefix=None): + options = [] + if prefix: + options = ["-p", prefix] report = grcov.report( - artifacts, out_format="coveralls+", source_dir=self.repo_dir + artifacts, + out_format="coveralls+", + source_dir=self.repo_dir, + options=options, ) report = json.loads(report) From bc6bfa32179f1c9d07ce8d02aabd4b23dc0b7b5c Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Tue, 4 Apr 2023 09:28:06 -0700 Subject: [PATCH 05/30] Additional fixes to adjust window title based on project name, and add missing cli argument --- bot/code_coverage_bot/cli.py | 4 ++++ frontend/src/base.html | 2 +- frontend/src/common.js | 14 +++++++------- 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/bot/code_coverage_bot/cli.py b/bot/code_coverage_bot/cli.py index 273d131d5..5fa8ab6a1 100644 --- a/bot/code_coverage_bot/cli.py +++ b/bot/code_coverage_bot/cli.py @@ -26,6 +26,10 @@ def setup_cli(ask_repository=True, ask_revision=True): parser.add_argument("--project", default=os.environ.get("PROJECT")) parser.add_argument("--upstream", default=os.environ.get("UPSTREAM")) parser.add_argument("--prefix", default=os.environ.get("PREFIX")) + parser.add_argument( + "--hook", + help="Which hook mode you want repo to run in, either 'central' or 'try'", + ) parser.add_argument( "--cache-root", required=True, help="Cache root, used to pull changesets" ) diff --git a/frontend/src/base.html b/frontend/src/base.html index 9bcf4f97c..2e5e4ab0f 100644 --- a/frontend/src/base.html +++ b/frontend/src/base.html @@ -2,7 +2,7 @@ -mozilla-central coverage +code coverage diff --git a/frontend/src/common.js b/frontend/src/common.js index 4a56e40e3..cc9b5a689 100644 --- a/frontend/src/common.js +++ b/frontend/src/common.js @@ -4,6 +4,12 @@ import { ZERO_COVERAGE_FILTERS } from "./zero_coverage_report.js"; export const REV_LATEST = "latest"; +// Coverage retrieval. +export const COVERAGE_PROJECT = process.env.PROJECT; +const COVERAGE_BACKEND_HOST = process.env.BACKEND_URL; +const COVERAGE_REPOSITORY = process.env.REPOSITORY; +const ZERO_COVERAGE_REPORT = process.env.ZERO_COVERAGE_REPORT; + function domContentLoaded() { return new Promise(resolve => document.addEventListener("DOMContentLoaded", resolve) @@ -28,15 +34,9 @@ export async function main(load, display) { // React to url changes window.onhashchange = full; + window.title = `${COVERAGE_PROJECT} coverage`; } -// Coverage retrieval. - -const COVERAGE_BACKEND_HOST = process.env.BACKEND_URL; -const COVERAGE_REPOSITORY = process.env.REPOSITORY; -export const COVERAGE_PROJECT = process.env.PROJECT; -const ZERO_COVERAGE_REPORT = process.env.ZERO_COVERAGE_REPORT; - function cacheGet(cache, key) { if (key in cache) { return cache[key].val; From 859b0f704c42cfd5aed645d5be8f278f9d68cee2 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Tue, 4 Apr 2023 13:46:19 -0700 Subject: [PATCH 06/30] Added thunderbird cron for our code coverage needs, and adjusted a bunch of small things to make it deployable --- .aws/task-definition.json | 119 ++++++++++++++++ .dockerignore | 2 +- .github/workflows/aws.yml | 113 +++++++++++++++ .gitignore | 2 + .../code_coverage_backend/backend/__init__.py | 14 +- backend/deploy.dockerfile | 13 ++ backend/dev.dockerfile | 13 ++ backend/run.sh | 2 +- backend/tb-run.sh | 6 + bot/code_coverage_bot/cli.py | 23 ++-- .../hooks/cron_thunderbird.py | 130 ++++++++++++++++++ bot/deploy.dockerfile | 28 ++++ bot/dev.dockerfile | 38 +++++ bot/setup.py | 2 + docker-compose.yml | 50 +++++++ frontend/deploy.dockerfile | 30 ++++ frontend/dev.dockerfile | 24 ++++ .../docker/etc/nginx/conf.d/coverage.conf | 20 +++ 18 files changed, 614 insertions(+), 15 deletions(-) create mode 100644 .aws/task-definition.json create mode 100644 .github/workflows/aws.yml create mode 100644 backend/deploy.dockerfile create mode 100644 backend/dev.dockerfile create mode 100755 backend/tb-run.sh create mode 100644 bot/code_coverage_bot/hooks/cron_thunderbird.py create mode 100644 bot/deploy.dockerfile create mode 100644 bot/dev.dockerfile create mode 100644 docker-compose.yml create mode 100644 frontend/deploy.dockerfile create mode 100644 frontend/dev.dockerfile create mode 100644 frontend/docker/etc/nginx/conf.d/coverage.conf diff --git a/.aws/task-definition.json b/.aws/task-definition.json new file mode 100644 index 000000000..188ab66a9 --- /dev/null +++ b/.aws/task-definition.json @@ -0,0 +1,119 @@ +{ + "taskDefinitionArn": "arn:aws:ecs:us-east-1:768512802988:task-definition/coverage-definition:3", + "containerDefinitions": [ + { + "name": "frontend", + "image": "frontend:latest", + "cpu": 0, + "portMappings": [ + { + "name": "frontend-80-tcp", + "containerPort": 80, + "hostPort": 80, + "protocol": "tcp", + "appProtocol": "http" + } + ], + "essential": true, + "environment": [], + "environmentFiles": [], + "mountPoints": [], + "volumesFrom": [], + "logConfiguration": { + "logDriver": "awslogs", + "options": { + "awslogs-create-group": "true", + "awslogs-group": "/ecs/coverage-definition", + "awslogs-region": "us-east-1", + "awslogs-stream-prefix": "ecs" + } + } + }, + { + "name": "backend", + "image": "backend:latest", + "cpu": 0, + "portMappings": [ + { + "name": "backend-8080-tcp", + "containerPort": 8080, + "hostPort": 8080, + "protocol": "tcp", + "appProtocol": "http" + } + ], + "essential": true, + "environment": [], + "environmentFiles": [], + "secrets": [ + { + "name": "LOCAL_SECRETS", + "valueFrom": "arn:aws:secretsmanager:us-east-1:768512802988:secret:prod/coverage/secrets-eNXZJ2" + } + ], + "mountPoints": [], + "volumesFrom": [], + "logConfiguration": { + "logDriver": "awslogs", + "options": { + "awslogs-create-group": "true", + "awslogs-group": "/ecs/coverage-definition", + "awslogs-region": "us-east-1", + "awslogs-stream-prefix": "ecs" + } + } + } + ], + "family": "coverage-definition", + "taskRoleArn": "arn:aws:iam::768512802988:role/coverage-ci", + "executionRoleArn": "arn:aws:iam::768512802988:role/coverage-ci", + "networkMode": "awsvpc", + "revision": 3, + "volumes": [], + "status": "ACTIVE", + "requiresAttributes": [ + { + "name": "com.amazonaws.ecs.capability.logging-driver.awslogs" + }, + { + "name": "ecs.capability.execution-role-awslogs" + }, + { + "name": "com.amazonaws.ecs.capability.docker-remote-api.1.19" + }, + { + "name": "com.amazonaws.ecs.capability.task-iam-role" + }, + { + "name": "com.amazonaws.ecs.capability.docker-remote-api.1.18" + }, + { + "name": "ecs.capability.task-eni" + }, + { + "name": "com.amazonaws.ecs.capability.docker-remote-api.1.29" + } + ], + "placementConstraints": [], + "compatibilities": [ + "EC2", + "FARGATE" + ], + "requiresCompatibilities": [ + "FARGATE" + ], + "cpu": "512", + "memory": "3072", + "runtimePlatform": { + "cpuArchitecture": "X86_64", + "operatingSystemFamily": "LINUX" + }, + "registeredAt": "2023-04-04T18:24:48.399Z", + "registeredBy": "arn:aws:iam::768512802988:user/melissa", + "tags": [ + { + "key": "Project", + "value": "Coverage" + } + ] +} \ No newline at end of file diff --git a/.dockerignore b/.dockerignore index 7a1fb6f33..7816764af 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,4 +1,4 @@ .git -frontend/ +# frontend/ addon/ */tests/ diff --git a/.github/workflows/aws.yml b/.github/workflows/aws.yml new file mode 100644 index 000000000..56829bf2e --- /dev/null +++ b/.github/workflows/aws.yml @@ -0,0 +1,113 @@ +# This workflow will build and push a new container image to Amazon ECR, +# and then will deploy a new task definition to Amazon ECS, when there is a push to the "staging" branch. + +name: Deploy to Stage Environment + +# Stop any pending jobs +concurrency: + group: production + cancel-in-progress: true + +on: + push: + branches: [ "deploy" ] + +env: + AWS_REGION: us-east-1 + ECR_REPOSITORY: coverage + ECS_SERVICE: coverage-service + ECS_CLUSTER: coverage + ECS_TASK_DEFINITION: .aws/task-definition.json + + CONTAINER_FRONTEND: frontend + CONTAINER_BACKEND: backend + +permissions: + contents: read + +jobs: + deploy: + name: Build & Deploy + runs-on: ubuntu-latest + environment: + name: production + url: https://coverage.thunderbird.net + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: ${{ env.AWS_REGION }} + + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v1 + + - name: Build, tag, and push backend to Amazon ECR + id: build-backend + env: + ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} + IMAGE_TAG: backend-${{ github.sha }} + run: | + # Build a docker container and + # push it to ECR so that it can + # be deployed to ECS. + docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG ./backend -f ./backend/deploy.dockerfile + docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG + echo "image_backend=$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" >> $GITHUB_OUTPUT + + - name: Build, tag, and push frontend to Amazon ECR + id: build-frontend + env: + ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} + IMAGE_TAG: frontend-${{ github.sha }} + run: | + # Build a docker container and + # push it to ECR so that it can + # be deployed to ECS. + docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG ./frontend -f ./frontend/deploy.dockerfile + docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG + echo "image_frontend=$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" >> $GITHUB_OUTPUT + + # Note: Bot is automatically scheduled to run every 6 hours + - name: Build, tag, and push bot to Amazon ECR + id: build-bot + env: + ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} + IMAGE_TAG: bot:latest + run: | + # Build a docker container and + # push it to ECR so that it can + # be deployed to ECS. + docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG ./bot -f ./bot/deploy.dockerfile + docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG + echo "image_bot=$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" >> $GITHUB_OUTPUT + + - name: Fill in the new backend image ID in the Amazon ECS task definition + id: task-def-backend + uses: aws-actions/amazon-ecs-render-task-definition@v1 + with: + task-definition: ${{ env.ECS_TASK_DEFINITION }} + container-name: ${{ env.CONTAINER_BACKEND }} + image: ${{ steps.build-backend.outputs.image_backend }} + + - name: Fill in the new frontend image ID in the Amazon ECS task definition + id: task-def-frontend + uses: aws-actions/amazon-ecs-render-task-definition@v1 + with: + task-definition: ${{ steps.task-def-backend.outputs.task-definition }} + container-name: ${{ env.CONTAINER_FRONTEND }} + image: ${{ steps.build-frontend.outputs.image_frontend }} + + - name: Deploy Amazon ECS task definition + uses: aws-actions/amazon-ecs-deploy-task-definition@v1 + with: + task-definition: ${{ steps.task-def-frontend.outputs.task-definition }} + service: ${{ env.ECS_SERVICE }} + cluster: ${{ env.ECS_CLUSTER }} + wait-for-service-stability: true diff --git a/.gitignore b/.gitignore index 7fdea582c..2f5a10324 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,4 @@ *.pyc *.egg-info +build/ +code-coverage.yml \ No newline at end of file diff --git a/backend/code_coverage_backend/backend/__init__.py b/backend/code_coverage_backend/backend/__init__.py index cc0392bce..bac5f47ed 100644 --- a/backend/code_coverage_backend/backend/__init__.py +++ b/backend/code_coverage_backend/backend/__init__.py @@ -2,7 +2,7 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. - +import json import os.path import structlog @@ -19,6 +19,14 @@ def create_app(): # Load secrets from Taskcluster local_secrets_path = os.environ.get("LOCAL_CONFIGURATION") + local_secrets_aws = os.environ.get("LOCAL_SECRETS") + local_secrets = None + + if local_secrets_path: + local_secrets = yaml.safe_load(open(local_secrets_path)) + elif local_secrets_aws: + local_secrets = json.loads(local_secrets_aws) + if local_secrets_path is not None: assert os.path.exists( local_secrets_path @@ -29,9 +37,7 @@ def create_app(): prefixes=["common", "backend", "code-coverage-backend"], required=["GOOGLE_CLOUD_STORAGE", "APP_CHANNEL"], existing={"REDIS_URL": os.environ.get("REDIS_URL", "redis://localhost:6379")}, - local_secrets=yaml.safe_load(open(local_secrets_path)) - if local_secrets_path - else None, + local_secrets=local_secrets, ) # Configure logger diff --git a/backend/deploy.dockerfile b/backend/deploy.dockerfile new file mode 100644 index 000000000..f067e3f8d --- /dev/null +++ b/backend/deploy.dockerfile @@ -0,0 +1,13 @@ +FROM python:3.11.1-slim + +ADD tools /src/tools +ADD backend /src/backend + +RUN cd /src/tools && pip install --disable-pip-version-check --no-cache-dir --quiet . +RUN cd /src/backend && pip install --disable-pip-version-check --no-cache-dir --quiet . + +ENV LOCAL_CONFIGURATION=/src/backend/code-coverage-conf.yml +ENV REPOSITORY=comm-central + +CMD "/src/backend/tb-run.sh" +#CMD ["gunicorn", "code_coverage_backend.flask:app", "--timeout", "30"] diff --git a/backend/dev.dockerfile b/backend/dev.dockerfile new file mode 100644 index 000000000..18a1857b7 --- /dev/null +++ b/backend/dev.dockerfile @@ -0,0 +1,13 @@ +FROM python:3.11.1-slim + +ADD tools /src/tools +ADD backend /src/backend + +RUN cd /src/tools && pip install --disable-pip-version-check --no-cache-dir --quiet . +RUN cd /src/backend && pip install --disable-pip-version-check --no-cache-dir --quiet . + +ENV LOCAL_CONFIGURATION=/src/backend/code-coverage-conf.yml +ENV REPOSITORY=comm-central + +CMD "/src/backend/run.sh" +#CMD ["gunicorn", "code_coverage_backend.flask:app", "--timeout", "30"] diff --git a/backend/run.sh b/backend/run.sh index e78089123..89a1f5bcb 100755 --- a/backend/run.sh +++ b/backend/run.sh @@ -3,4 +3,4 @@ if [[ ! $TASKCLUSTER_SECRET ]]; then export TASKCLUSTER_SECRET="project/relman/code-coverage/dev" echo 'Using dev secret' fi -gunicorn --bind localhost:8000 --reload --reload-engine=poll --log-file=- code_coverage_backend.flask:app +gunicorn --bind 0.0.0.0:8001 --reload --reload-engine=poll --log-file=- code_coverage_backend.flask:app diff --git a/backend/tb-run.sh b/backend/tb-run.sh new file mode 100755 index 000000000..f0fe7b603 --- /dev/null +++ b/backend/tb-run.sh @@ -0,0 +1,6 @@ +#!/bin/bash +if [[ ! $TASKCLUSTER_SECRET ]]; then + export TASKCLUSTER_SECRET="project/relman/code-coverage/dev" + echo 'Using dev secret' +fi +gunicorn --bind 0.0.0.0:8080 --reload --reload-engine=poll --log-file=- code_coverage_backend.flask:app diff --git a/bot/code_coverage_bot/cli.py b/bot/code_coverage_bot/cli.py index 5fa8ab6a1..5a11a6134 100644 --- a/bot/code_coverage_bot/cli.py +++ b/bot/code_coverage_bot/cli.py @@ -4,12 +4,13 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. import argparse +import json +import logging import os import yaml from code_coverage_bot.secrets import secrets -from code_coverage_bot.taskcluster import taskcluster_config from code_coverage_tools.log import init_logger @@ -57,19 +58,23 @@ def setup_cli(ask_repository=True, ask_revision=True): parser.add_argument("--taskcluster-access-token", help="Taskcluster Access token") args = parser.parse_args() - # Auth on Taskcluster - taskcluster_config.auth(args.taskcluster_client_id, args.taskcluster_access_token) + # Auth on Taskcluster - We don't need this for now + # taskcluster_config.auth(args.taskcluster_client_id, args.taskcluster_access_token) + + local_secrets_aws = os.environ.get("LOCAL_SECRETS") + local_secrets = None + + if args.local_configuration: + local_secrets = yaml.safe_load(open(args.local_configuration)) + elif local_secrets_aws: + local_secrets = json.loads(local_secrets_aws) # Then load secrets - secrets.load( - args.taskcluster_secret, - local_secrets=yaml.safe_load(args.local_configuration) - if args.local_configuration - else None, - ) + secrets.load(args.taskcluster_secret, local_secrets=local_secrets) init_logger( "bot", + level=logging.INFO, channel=secrets.get("APP_CHANNEL", "dev"), PAPERTRAIL_HOST=secrets.get("PAPERTRAIL_HOST"), PAPERTRAIL_PORT=secrets.get("PAPERTRAIL_PORT"), diff --git a/bot/code_coverage_bot/hooks/cron_thunderbird.py b/bot/code_coverage_bot/hooks/cron_thunderbird.py new file mode 100644 index 000000000..34ec76355 --- /dev/null +++ b/bot/code_coverage_bot/hooks/cron_thunderbird.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +import re + +import requests +import structlog +from requests import HTTPError + +from code_coverage_bot import commit_coverage +from code_coverage_bot import config +from code_coverage_bot import uploader +from code_coverage_bot.cli import setup_cli +from code_coverage_bot.hooks.base import Hook +from code_coverage_bot.secrets import secrets +from code_coverage_bot.zero_coverage import ZeroCov +from code_coverage_tools import gcp + +logger = structlog.get_logger(__name__) + + +class CronThunderbirdHook(Hook): + """ + This cron class handles all report generation for Thunderbird's comm-central + """ + + def upload_reports(self, reports): + """ + Upload all provided covdir reports on GCP + """ + for (platform, suite), path in reports.items(): + report = open(path, "rb").read() + uploader.gcp( + self.branch, self.revision, report, suite=suite, platform=platform + ) + + def __init__( + self, namespace, project, repository, upstream, prefix, *args, **kwargs + ): + + tip_response = requests.get(f"{repository}/raw-rev/tip") + # Yell if there's any issues + try: + tip_response.raise_for_status() + except HTTPError as e: + logger.error(f"Could not access raw revision for {project} tip: {e}") + raise + + # Node ID == Revision + revision_regex = r"^# Node ID ([\w\d]*)$" + matches = re.search(revision_regex, tip_response.text[:2048], re.MULTILINE) + + if len(matches.groups()) == 0: + error = "Failed to retrieve revision from tip, no match within 2048 bytes!" + logger.error(error) + raise Exception(error) + + # Grab that revision + revision = matches.groups()[0] + + super().__init__( + namespace, project, repository, upstream, revision, prefix, *args, **kwargs + ) + + def run(self) -> None: + # Check the covdir report does not already exists + bucket = gcp.get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE]) + if uploader.gcp_covdir_exists(bucket, self.branch, self.revision, "all", "all"): + logger.warn("Full covdir report already on GCP") + return + + self.retrieve_source_and_artifacts() + + logger.info("Generating full report") + reports = self.build_reports(only=[("all", "all")]) + + # Generate all reports except the full one which we generated earlier. + all_report_combinations = self.artifactsHandler.get_combinations() + del all_report_combinations[("all", "all")] + reports.update(self.build_reports()) + logger.info("Built all covdir reports", nb=len(reports)) + + # Upload reports on GCP + self.upload_reports(reports) + logger.info("Uploaded all covdir reports", nb=len(reports)) + + # Commit cov is automatically uploaded to GCP...for reasons + logger.info("Generating commit coverage reports") + commit_coverage.generate(self.repository, self.project, self.repo_dir) + + logger.info("Generating zero coverage reports") + zc = ZeroCov(self.repo_dir) + zc.generate( + self.artifactsHandler.get(), self.revision, self.reports_dir, self.prefix + ) + + # Upload zero cov on GCP + self.upload_reports( + { + ( + "zero-coverage", + "zero-coverage", + ): f"{self.reports_dir}/zero_coverage_report.json" + } + ) + logger.info("Uploaded zero coverage report", nb=len(reports)) + + +def main() -> None: + logger.info("Starting code coverage bot for cron thunderbird") + args = setup_cli(ask_revision=False, ask_repository=True) + + namespace = args.namespace or config.DEFAULT_NAMESPACE + project = args.project or config.DEFAULT_PROJECT + repository = args.repository or config.DEFAULT_REPOSITORY + upstream = args.upstream or config.DEFAULT_UPSTREAM + prefix = args.prefix or None + + hook = CronThunderbirdHook( + namespace, + project, + repository, + upstream, + prefix, + args.task_name_filter, + args.cache_root, + args.working_dir, + ) + hook.run() diff --git a/bot/deploy.dockerfile b/bot/deploy.dockerfile new file mode 100644 index 000000000..8ad35c1c1 --- /dev/null +++ b/bot/deploy.dockerfile @@ -0,0 +1,28 @@ +FROM python:3.11.1-slim-bullseye + +ADD tools /src/tools +ADD bot /src/bot + +RUN /src/bot/ci/bootstrap.sh + +RUN cd /src/bot/ && pip install -r requirements.txt -r requirements-dev.txt +RUN cd /src/bot/ && pip install -e . +RUN cd /src/bot/ && python3 ./setup.py install + + +RUN cd /src/tools && pip install --disable-pip-version-check --no-cache-dir --quiet . +#RUN cd /src/bot && pip install --disable-pip-version-check --no-cache-dir --quiet . + +WORKDIR /src/bot + +RUN mkdir -p build/cache +RUN mkdir -p build/work + +# Thunderbird settings +ENV UPSTREAM="https://hg.mozilla.org/comm-central" +ENV REPOSITORY="https://hg.mozilla.org/comm-central" +ENV PROJECT="comm-central" +ENV NAMESPACE="comm" +ENV PREFIX="comm" + +CMD ["code-coverage-cron-thunderbird", "--cache-root=build/cache", "--working-dir=build/work", "--local-configuration=code-coverage.yml"] diff --git a/bot/dev.dockerfile b/bot/dev.dockerfile new file mode 100644 index 000000000..1351bcc34 --- /dev/null +++ b/bot/dev.dockerfile @@ -0,0 +1,38 @@ +FROM python:3.11.1-slim-bullseye + +#RUN apt-get update && apt-get install -y mercurial + +#COPY bot/ci/hgrc /etc/mercurial/hgrc + +ADD tools /src/tools +ADD bot /src/bot + +RUN /src/bot/ci/bootstrap.sh + +RUN cd /src/bot/ && pip install -r requirements.txt -r requirements-dev.txt +RUN cd /src/bot/ && pip install -e . +RUN cd /src/bot/ && python3 ./setup.py install +#RUN pre-commit install +#RUN pre-commit run -a +#RUN pytest -v + + +RUN cd /src/tools && pip install --disable-pip-version-check --no-cache-dir --quiet . +#RUN cd /src/bot && pip install --disable-pip-version-check --no-cache-dir --quiet . + +WORKDIR /src/bot + +RUN mkdir -p build/cache +RUN mkdir -p build/work + +# Thunderbird settings +ENV UPSTREAM="https://hg.mozilla.org/comm-central" +ENV REPOSITORY="https://hg.mozilla.org/comm-central" +ENV PROJECT="comm-central" +ENV NAMESPACE="comm" +ENV PREFIX="comm" + +#CMD "cd /src/bot && code-coverage-cron" +#CMD "code-coverage-cron --cache-root=build/cache --working-dir=build/work --local-configuration=code-coverage.yml" +#CMD ["python3 /src/bot/code-coverage-bot"] +CMD ["code-coverage-cron", "--cache-root=build/cache", "--working-dir=build/work", "--local-configuration=code-coverage.yml"] \ No newline at end of file diff --git a/bot/setup.py b/bot/setup.py index 17528da69..cb47479db 100644 --- a/bot/setup.py +++ b/bot/setup.py @@ -50,6 +50,8 @@ def read_requirements(file_): "code-coverage-crontrigger = code_coverage_bot.hooks.crontrigger:main", "code-coverage-cron = code_coverage_bot.hooks.cron:main", "code-coverage-repo = code_coverage_bot.hooks.repo:main", + # Thunderbird + "code-coverage-cron = code_coverage_bot.hooks.cron_thunderbird:main", ] }, ) diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 000000000..851abcb0f --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,50 @@ +services: + bot: + #platform: linux/x86_64 + build: + context: . + dockerfile: bot/dev.dockerfile + environment: + DEBUG: True + TASKCLUSTER_URL: "https://firefox-ci-tc.services.mozilla.com/" + volumes: + - ./build/cache:/src/bot/build/cache + - ./build/work:/src/bot/build/work + ports: + - 8000:8000 + networks: + - codecov + backend: + build: + context: . + dockerfile: backend/dev.dockerfile + environment: + LOCAL_CONFIGURATION: "/src/backend/code-coverage.yml" + DEBUG: True + ports: + - "8001:8001" + networks: + - codecov + redis: + image: redis:latest + ports: + - "6379:6379" + volumes: + - /tmp/ccov-redis:/data + networks: + - codecov + frontend: + #platform: linux/x86_64 + build: + context: . + dockerfile: frontend/dev.dockerfile + ports: + - "8080:80" + environment: + - NGINX_HOST=code-coverage.local + - NGINX_PORT=80 + networks: + - codecov + +networks: + codecov: {} \ No newline at end of file diff --git a/frontend/deploy.dockerfile b/frontend/deploy.dockerfile new file mode 100644 index 000000000..f34a48e64 --- /dev/null +++ b/frontend/deploy.dockerfile @@ -0,0 +1,30 @@ +FROM nginx:latest + +ADD frontend /src/frontend + +WORKDIR /src/frontend + +# Install some essentials +RUN apt-get update && apt-get install -y build-essential python python-dev + +# Install node +RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - &&\ +apt-get install -y nodejs + +# Backend is proxy'd +ENV BACKEND_URL=/ +ENV REPOSITORY=https://hg.mozilla.org/comm-central +ENV PROJECT=comm-central +# TODO: Figure out zero coverage report location +ENV ZERO_COVERAGE_REPORT=/zero_coverage_report.json + +RUN npm install +RUN npm run build + +# Use our custom nginx config +RUN rm /etc/nginx/conf.d/default.conf +COPY docker/etc/nginx/conf.d/coverage.conf /etc/nginx/conf.d/default.conf + +RUN cp -r /src/frontend/dist/* /usr/share/nginx/html/ + +#CMD "nginx -g daemon off;" \ No newline at end of file diff --git a/frontend/dev.dockerfile b/frontend/dev.dockerfile new file mode 100644 index 000000000..41d1a0279 --- /dev/null +++ b/frontend/dev.dockerfile @@ -0,0 +1,24 @@ +FROM nginx:latest + +ADD frontend /src/frontend + +WORKDIR /src/frontend + +# Install some essentials +RUN apt-get update && apt-get install -y build-essential python python-dev + +# Install node +RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - &&\ +apt-get install -y nodejs + +ENV BACKEND_URL=http://localhost:8001 +ENV REPOSITORY=https://hg.mozilla.org/comm-central +ENV PROJECT=comm-central +ENV ZERO_COVERAGE_REPORT=/assets/zero_coverage_report.json + +RUN npm install +RUN npm run build + +RUN cp -r /src/frontend/dist/* /usr/share/nginx/html/ + +#CMD "nginx -g daemon off;" \ No newline at end of file diff --git a/frontend/docker/etc/nginx/conf.d/coverage.conf b/frontend/docker/etc/nginx/conf.d/coverage.conf new file mode 100644 index 000000000..a74e3724e --- /dev/null +++ b/frontend/docker/etc/nginx/conf.d/coverage.conf @@ -0,0 +1,20 @@ +server { + listen 80; + listen [::]:80; + server_name localhost; + + # Backend API proxy + location /api/v2 { + # Remove our fake /api/v1/ prefix for FastAPI + #rewrite ^/api/(.*)$ /$1 break; + proxy_pass http://127.0.0.1:8080; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Host $http_host; + proxy_http_version 1.1; + } + # Frontend Vue static files + location / { + root /usr/share/nginx/html; + try_files $uri $uri/ /index.html; + } +} From 5df43d8118be2a1a853da018e917b3e5d5f1ce76 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Tue, 4 Apr 2023 14:40:09 -0700 Subject: [PATCH 07/30] Various fixes to prevent the application from crashing --- .github/workflows/aws.yml | 2 +- backend/deploy.dockerfile | 1 - backend/dev.dockerfile | 2 +- backend/run.sh | 2 +- bot/code_coverage_bot/cli.py | 2 +- bot/code_coverage_bot/hooks/cron_thunderbird.py | 2 ++ bot/code_coverage_bot/utils.py | 2 +- bot/deploy.dockerfile | 2 +- bot/dev.dockerfile | 2 +- bot/setup.py | 2 +- docker-compose.yml | 4 ++-- frontend/dev.dockerfile | 2 +- 12 files changed, 13 insertions(+), 12 deletions(-) diff --git a/.github/workflows/aws.yml b/.github/workflows/aws.yml index 56829bf2e..529520294 100644 --- a/.github/workflows/aws.yml +++ b/.github/workflows/aws.yml @@ -1,7 +1,7 @@ # This workflow will build and push a new container image to Amazon ECR, # and then will deploy a new task definition to Amazon ECS, when there is a push to the "staging" branch. -name: Deploy to Stage Environment +name: Deploy to Production Environment # Stop any pending jobs concurrency: diff --git a/backend/deploy.dockerfile b/backend/deploy.dockerfile index f067e3f8d..554e2bca7 100644 --- a/backend/deploy.dockerfile +++ b/backend/deploy.dockerfile @@ -6,7 +6,6 @@ ADD backend /src/backend RUN cd /src/tools && pip install --disable-pip-version-check --no-cache-dir --quiet . RUN cd /src/backend && pip install --disable-pip-version-check --no-cache-dir --quiet . -ENV LOCAL_CONFIGURATION=/src/backend/code-coverage-conf.yml ENV REPOSITORY=comm-central CMD "/src/backend/tb-run.sh" diff --git a/backend/dev.dockerfile b/backend/dev.dockerfile index 18a1857b7..f067e3f8d 100644 --- a/backend/dev.dockerfile +++ b/backend/dev.dockerfile @@ -9,5 +9,5 @@ RUN cd /src/backend && pip install --disable-pip-version-check --no-cache-dir -- ENV LOCAL_CONFIGURATION=/src/backend/code-coverage-conf.yml ENV REPOSITORY=comm-central -CMD "/src/backend/run.sh" +CMD "/src/backend/tb-run.sh" #CMD ["gunicorn", "code_coverage_backend.flask:app", "--timeout", "30"] diff --git a/backend/run.sh b/backend/run.sh index 89a1f5bcb..e78089123 100755 --- a/backend/run.sh +++ b/backend/run.sh @@ -3,4 +3,4 @@ if [[ ! $TASKCLUSTER_SECRET ]]; then export TASKCLUSTER_SECRET="project/relman/code-coverage/dev" echo 'Using dev secret' fi -gunicorn --bind 0.0.0.0:8001 --reload --reload-engine=poll --log-file=- code_coverage_backend.flask:app +gunicorn --bind localhost:8000 --reload --reload-engine=poll --log-file=- code_coverage_backend.flask:app diff --git a/bot/code_coverage_bot/cli.py b/bot/code_coverage_bot/cli.py index 5a11a6134..7ad4bbfb0 100644 --- a/bot/code_coverage_bot/cli.py +++ b/bot/code_coverage_bot/cli.py @@ -65,7 +65,7 @@ def setup_cli(ask_repository=True, ask_revision=True): local_secrets = None if args.local_configuration: - local_secrets = yaml.safe_load(open(args.local_configuration)) + local_secrets = yaml.safe_load(args.local_configuration) elif local_secrets_aws: local_secrets = json.loads(local_secrets_aws) diff --git a/bot/code_coverage_bot/hooks/cron_thunderbird.py b/bot/code_coverage_bot/hooks/cron_thunderbird.py index 34ec76355..96c165047 100644 --- a/bot/code_coverage_bot/hooks/cron_thunderbird.py +++ b/bot/code_coverage_bot/hooks/cron_thunderbird.py @@ -59,6 +59,8 @@ def __init__( # Grab that revision revision = matches.groups()[0] + logger.info(f"Using revision id {revision} from tip") + super().__init__( namespace, project, repository, upstream, revision, prefix, *args, **kwargs ) diff --git a/bot/code_coverage_bot/utils.py b/bot/code_coverage_bot/utils.py index e1588b954..4f03b20a4 100644 --- a/bot/code_coverage_bot/utils.py +++ b/bot/code_coverage_bot/utils.py @@ -56,7 +56,7 @@ def run_check(command, **kwargs): # Use error to send log to sentry log.error( - f"Command failed with code: {proc.returncode}", + f"Command failed with code: {proc.returncode}\nError: {error}", exit=proc.returncode, command=" ".join(command), output=output, diff --git a/bot/deploy.dockerfile b/bot/deploy.dockerfile index 8ad35c1c1..9cdb7a738 100644 --- a/bot/deploy.dockerfile +++ b/bot/deploy.dockerfile @@ -25,4 +25,4 @@ ENV PROJECT="comm-central" ENV NAMESPACE="comm" ENV PREFIX="comm" -CMD ["code-coverage-cron-thunderbird", "--cache-root=build/cache", "--working-dir=build/work", "--local-configuration=code-coverage.yml"] +CMD ["code-coverage-cron-thunderbird", "--cache-root=build/cache", "--working-dir=build/work"] diff --git a/bot/dev.dockerfile b/bot/dev.dockerfile index 1351bcc34..6a13a13f5 100644 --- a/bot/dev.dockerfile +++ b/bot/dev.dockerfile @@ -35,4 +35,4 @@ ENV PREFIX="comm" #CMD "cd /src/bot && code-coverage-cron" #CMD "code-coverage-cron --cache-root=build/cache --working-dir=build/work --local-configuration=code-coverage.yml" #CMD ["python3 /src/bot/code-coverage-bot"] -CMD ["code-coverage-cron", "--cache-root=build/cache", "--working-dir=build/work", "--local-configuration=code-coverage.yml"] \ No newline at end of file +CMD ["code-coverage-cron-thunderbird", "--cache-root=build/cache", "--working-dir=build/work", "--local-configuration=code-coverage.yml"] \ No newline at end of file diff --git a/bot/setup.py b/bot/setup.py index cb47479db..3e4cd6b09 100644 --- a/bot/setup.py +++ b/bot/setup.py @@ -51,7 +51,7 @@ def read_requirements(file_): "code-coverage-cron = code_coverage_bot.hooks.cron:main", "code-coverage-repo = code_coverage_bot.hooks.repo:main", # Thunderbird - "code-coverage-cron = code_coverage_bot.hooks.cron_thunderbird:main", + "code-coverage-cron-thunderbird = code_coverage_bot.hooks.cron_thunderbird:main", ] }, ) diff --git a/docker-compose.yml b/docker-compose.yml index 851abcb0f..aad9dbc66 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -22,7 +22,7 @@ services: LOCAL_CONFIGURATION: "/src/backend/code-coverage.yml" DEBUG: True ports: - - "8001:8001" + - "8080:8080" networks: - codecov redis: @@ -39,7 +39,7 @@ services: context: . dockerfile: frontend/dev.dockerfile ports: - - "8080:80" + - "80:80" environment: - NGINX_HOST=code-coverage.local - NGINX_PORT=80 diff --git a/frontend/dev.dockerfile b/frontend/dev.dockerfile index 41d1a0279..e734c5a69 100644 --- a/frontend/dev.dockerfile +++ b/frontend/dev.dockerfile @@ -11,7 +11,7 @@ RUN apt-get update && apt-get install -y build-essential python python-dev RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - &&\ apt-get install -y nodejs -ENV BACKEND_URL=http://localhost:8001 +ENV BACKEND_URL=http://localhost:8080 ENV REPOSITORY=https://hg.mozilla.org/comm-central ENV PROJECT=comm-central ENV ZERO_COVERAGE_REPORT=/assets/zero_coverage_report.json From 72c80802e076cd683822765a761e4e5ee7cf585a Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Tue, 4 Apr 2023 14:44:20 -0700 Subject: [PATCH 08/30] Various fixes for the build and deployment process --- .aws/task-definition.json | 25 +++++++++++-------- .dockerignore | 1 - .github/workflows/aws.yml | 8 +++--- .../code_coverage_backend/backend/__init__.py | 13 +++++++--- backend/tb-run.sh | 2 +- bot/ci/bootstrap.sh | 7 +++++- bot/code_coverage_bot/cli.py | 11 +++++--- frontend/deploy.dockerfile | 6 ++--- .../docker/etc/nginx/conf.d/coverage.conf | 9 ++++++- frontend/src/common.js | 2 +- 10 files changed, 54 insertions(+), 30 deletions(-) diff --git a/.aws/task-definition.json b/.aws/task-definition.json index 188ab66a9..f5e376a66 100644 --- a/.aws/task-definition.json +++ b/.aws/task-definition.json @@ -1,9 +1,9 @@ { - "taskDefinitionArn": "arn:aws:ecs:us-east-1:768512802988:task-definition/coverage-definition:3", + "taskDefinitionArn": "arn:aws:ecs:us-east-1:768512802988:task-definition/coverage-definition:5", "containerDefinitions": [ { "name": "frontend", - "image": "frontend:latest", + "image": "frontend-latest", "cpu": 0, "portMappings": [ { @@ -16,7 +16,6 @@ ], "essential": true, "environment": [], - "environmentFiles": [], "mountPoints": [], "volumesFrom": [], "logConfiguration": { @@ -31,7 +30,7 @@ }, { "name": "backend", - "image": "backend:latest", + "image": "backend-latest", "cpu": 0, "portMappings": [ { @@ -44,15 +43,14 @@ ], "essential": true, "environment": [], - "environmentFiles": [], + "mountPoints": [], + "volumesFrom": [], "secrets": [ { "name": "LOCAL_SECRETS", "valueFrom": "arn:aws:secretsmanager:us-east-1:768512802988:secret:prod/coverage/secrets-eNXZJ2" } ], - "mountPoints": [], - "volumesFrom": [], "logConfiguration": { "logDriver": "awslogs", "options": { @@ -65,10 +63,9 @@ } ], "family": "coverage-definition", - "taskRoleArn": "arn:aws:iam::768512802988:role/coverage-ci", "executionRoleArn": "arn:aws:iam::768512802988:role/coverage-ci", "networkMode": "awsvpc", - "revision": 3, + "revision": 5, "volumes": [], "status": "ACTIVE", "requiresAttributes": [ @@ -78,11 +75,17 @@ { "name": "ecs.capability.execution-role-awslogs" }, + { + "name": "com.amazonaws.ecs.capability.ecr-auth" + }, { "name": "com.amazonaws.ecs.capability.docker-remote-api.1.19" }, { - "name": "com.amazonaws.ecs.capability.task-iam-role" + "name": "ecs.capability.secrets.asm.environment-variables" + }, + { + "name": "ecs.capability.execution-role-ecr-pull" }, { "name": "com.amazonaws.ecs.capability.docker-remote-api.1.18" @@ -108,7 +111,7 @@ "cpuArchitecture": "X86_64", "operatingSystemFamily": "LINUX" }, - "registeredAt": "2023-04-04T18:24:48.399Z", + "registeredAt": "2023-04-04T22:53:59.248Z", "registeredBy": "arn:aws:iam::768512802988:user/melissa", "tags": [ { diff --git a/.dockerignore b/.dockerignore index 7816764af..224786cd6 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,4 +1,3 @@ .git -# frontend/ addon/ */tests/ diff --git a/.github/workflows/aws.yml b/.github/workflows/aws.yml index 529520294..c994b6ff3 100644 --- a/.github/workflows/aws.yml +++ b/.github/workflows/aws.yml @@ -57,7 +57,7 @@ jobs: # Build a docker container and # push it to ECR so that it can # be deployed to ECS. - docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG ./backend -f ./backend/deploy.dockerfile + docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . -f ./backend/deploy.dockerfile docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG echo "image_backend=$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" >> $GITHUB_OUTPUT @@ -70,7 +70,7 @@ jobs: # Build a docker container and # push it to ECR so that it can # be deployed to ECS. - docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG ./frontend -f ./frontend/deploy.dockerfile + docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . -f ./frontend/deploy.dockerfile docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG echo "image_frontend=$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" >> $GITHUB_OUTPUT @@ -79,12 +79,12 @@ jobs: id: build-bot env: ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} - IMAGE_TAG: bot:latest + IMAGE_TAG: bot-latest run: | # Build a docker container and # push it to ECR so that it can # be deployed to ECS. - docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG ./bot -f ./bot/deploy.dockerfile + docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . -f ./bot/deploy.dockerfile docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG echo "image_bot=$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" >> $GITHUB_OUTPUT diff --git a/backend/code_coverage_backend/backend/__init__.py b/backend/code_coverage_backend/backend/__init__.py index bac5f47ed..295d40dd5 100644 --- a/backend/code_coverage_backend/backend/__init__.py +++ b/backend/code_coverage_backend/backend/__init__.py @@ -18,14 +18,19 @@ def create_app(): # Load secrets from Taskcluster - local_secrets_path = os.environ.get("LOCAL_CONFIGURATION") local_secrets_aws = os.environ.get("LOCAL_SECRETS") + local_secrets_path = os.environ.get("LOCAL_CONFIGURATION") local_secrets = None - if local_secrets_path: - local_secrets = yaml.safe_load(open(local_secrets_path)) - elif local_secrets_aws: + if local_secrets_aws: local_secrets = json.loads(local_secrets_aws) + # Fix our secrets, GCS needs to be json decoded, and everything needs to be wrapped in common + local_secrets["GOOGLE_CLOUD_STORAGE"] = json.loads( + local_secrets.get("GOOGLE_CLOUD_STORAGE") + ) + local_secrets = {"common": local_secrets} + elif local_secrets_path: + local_secrets = yaml.safe_load(open(local_secrets_path)) if local_secrets_path is not None: assert os.path.exists( diff --git a/backend/tb-run.sh b/backend/tb-run.sh index f0fe7b603..d40cb75d4 100755 --- a/backend/tb-run.sh +++ b/backend/tb-run.sh @@ -3,4 +3,4 @@ if [[ ! $TASKCLUSTER_SECRET ]]; then export TASKCLUSTER_SECRET="project/relman/code-coverage/dev" echo 'Using dev secret' fi -gunicorn --bind 0.0.0.0:8080 --reload --reload-engine=poll --log-file=- code_coverage_backend.flask:app +gunicorn --bind 0.0.0.0:8080 --timeout 90 --reload --reload-engine=poll --log-file=- code_coverage_backend.flask:app diff --git a/bot/ci/bootstrap.sh b/bot/ci/bootstrap.sh index 5c34223f5..88bac496f 100755 --- a/bot/ci/bootstrap.sh +++ b/bot/ci/bootstrap.sh @@ -1,8 +1,13 @@ #!/bin/bash -ex +GRCOV_FILE="grcov-tcmalloc-linux-x86_64.tar.bz2" GRCOV_VERSION="v0.7.1" MERCURIAL_VERSION="6.3.1" VERSION_CONTROL_TOOLS_REV="d0d8dd1934dd" +# OVERRIDES +GRCOV_FILE="grcov-x86_64-unknown-linux-gnu.tar.bz2" +GRCOV_VERSION="v0.8.13" + apt-get update # libgoogle-perftools4 is currently required for grcov (until https://github.com/mozilla/grcov/issues/403 is fixed). apt-get install --no-install-recommends -y gcc curl bzip2 python-dev libgoogle-perftools4 @@ -10,7 +15,7 @@ apt-get install --no-install-recommends -y gcc curl bzip2 python-dev libgoogle-p pip install --disable-pip-version-check --quiet --no-cache-dir mercurial==$MERCURIAL_VERSION # Setup grcov -curl -L https://github.com/mozilla/grcov/releases/download/$GRCOV_VERSION/grcov-tcmalloc-linux-x86_64.tar.bz2 | tar -C /usr/bin -xjv +curl -L https://github.com/mozilla/grcov/releases/download/$GRCOV_VERSION/$GRCOV_FILE | tar -C /usr/bin -xjv chmod +x /usr/bin/grcov # Setup mercurial with needed extensions diff --git a/bot/code_coverage_bot/cli.py b/bot/code_coverage_bot/cli.py index 7ad4bbfb0..d9c9aa031 100644 --- a/bot/code_coverage_bot/cli.py +++ b/bot/code_coverage_bot/cli.py @@ -64,10 +64,15 @@ def setup_cli(ask_repository=True, ask_revision=True): local_secrets_aws = os.environ.get("LOCAL_SECRETS") local_secrets = None - if args.local_configuration: - local_secrets = yaml.safe_load(args.local_configuration) - elif local_secrets_aws: + if local_secrets_aws: local_secrets = json.loads(local_secrets_aws) + # Fix our secrets, GCS needs to be json decoded, and everything needs to be wrapped in common + local_secrets["GOOGLE_CLOUD_STORAGE"] = json.loads( + local_secrets.get("GOOGLE_CLOUD_STORAGE") + ) + local_secrets = {"common": local_secrets} + elif args.local_configuration: + local_secrets = yaml.safe_load(args.local_configuration) # Then load secrets secrets.load(args.taskcluster_secret, local_secrets=local_secrets) diff --git a/frontend/deploy.dockerfile b/frontend/deploy.dockerfile index f34a48e64..95a42e036 100644 --- a/frontend/deploy.dockerfile +++ b/frontend/deploy.dockerfile @@ -12,18 +12,18 @@ RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - &&\ apt-get install -y nodejs # Backend is proxy'd -ENV BACKEND_URL=/ +ENV BACKEND_URL=https://coverage.thunderbird.net ENV REPOSITORY=https://hg.mozilla.org/comm-central ENV PROJECT=comm-central # TODO: Figure out zero coverage report location -ENV ZERO_COVERAGE_REPORT=/zero_coverage_report.json +ENV ZERO_COVERAGE_REPORT=https://coverage.thunderbird.net/zero_coverage_report.json RUN npm install RUN npm run build # Use our custom nginx config RUN rm /etc/nginx/conf.d/default.conf -COPY docker/etc/nginx/conf.d/coverage.conf /etc/nginx/conf.d/default.conf +COPY frontend/docker/etc/nginx/conf.d/coverage.conf /etc/nginx/conf.d/default.conf RUN cp -r /src/frontend/dist/* /usr/share/nginx/html/ diff --git a/frontend/docker/etc/nginx/conf.d/coverage.conf b/frontend/docker/etc/nginx/conf.d/coverage.conf index a74e3724e..2a2401342 100644 --- a/frontend/docker/etc/nginx/conf.d/coverage.conf +++ b/frontend/docker/etc/nginx/conf.d/coverage.conf @@ -4,7 +4,7 @@ server { server_name localhost; # Backend API proxy - location /api/v2 { + location /v2 { # Remove our fake /api/v1/ prefix for FastAPI #rewrite ^/api/(.*)$ /$1 break; proxy_pass http://127.0.0.1:8080; @@ -12,6 +12,13 @@ server { proxy_set_header Host $http_host; proxy_http_version 1.1; } + # Zero coverage proxy + location /zero-coverage-report.json { + proxy_pass http://127.0.0.1:8080; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Host $http_host; + proxy_http_version 1.1; + } # Frontend Vue static files location / { root /usr/share/nginx/html; diff --git a/frontend/src/common.js b/frontend/src/common.js index cc9b5a689..6222e7da4 100644 --- a/frontend/src/common.js +++ b/frontend/src/common.js @@ -34,7 +34,7 @@ export async function main(load, display) { // React to url changes window.onhashchange = full; - window.title = `${COVERAGE_PROJECT} coverage`; + document.title = `${COVERAGE_PROJECT} coverage`; } function cacheGet(cache, key) { From 507c73d0a3626ebfee5bd53e87c4c9b1b5274d53 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Thu, 6 Apr 2023 13:14:47 -0700 Subject: [PATCH 09/30] Upload and serve zero-coverage-report differently - We upload zero coverage first - We upload rest of reports - On report ingest, if there's a new revision in redis download a fresh zero coverage report - Serve via filesystem --- backend/code_coverage_backend/api.py | 19 ++++++++ backend/code_coverage_backend/api.yml | 15 +++++++ backend/code_coverage_backend/gcp.py | 13 ++++++ .../hooks/cron_thunderbird.py | 43 +++++++++++-------- bot/code_coverage_bot/uploader.py | 30 +++++++++++++ frontend/deploy.dockerfile | 2 +- .../docker/etc/nginx/conf.d/coverage.conf | 7 --- 7 files changed, 102 insertions(+), 27 deletions(-) diff --git a/backend/code_coverage_backend/api.py b/backend/code_coverage_backend/api.py index bcda84d63..711ef8d78 100644 --- a/backend/code_coverage_backend/api.py +++ b/backend/code_coverage_backend/api.py @@ -2,6 +2,7 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. +import json import structlog from flask import abort @@ -130,3 +131,21 @@ def coverage_filters(repository=config.DEFAULT_REPOSITORY): except Exception as e: logger.warn("Failed to load filters", repo=repository, error=str(e)) abort(400) + + +def zero_coverage_report(repository=config.DEFAULT_REPOSITORY): + """ + Return the zero coverage report stored in Google Cloud Storage + """ + file = None + + try: + with open("/tmp/zero-cov-report/zero_coverage_report.json", "r") as fh: + file = fh.read() + except FileNotFoundError as e: + logger.warn( + "Failed to find zero coverage report", repo=repository, error=str(e) + ) + abort(404) + + return json.loads(file) diff --git a/backend/code_coverage_backend/api.yml b/backend/code_coverage_backend/api.yml index 76e12a8bc..2c1e3ac2e 100644 --- a/backend/code_coverage_backend/api.yml +++ b/backend/code_coverage_backend/api.yml @@ -121,3 +121,18 @@ paths: description: Available filters on the endpoints tags: - v2 + + /v2/zero-coverage-report: + get: + operationId: "code_coverage_backend.api.zero_coverage_report" + parameters: + - name: repository + in: query + description: Mozilla repository for these reports (default to mozilla-central) + required: false + type: string + responses: + 200: + description: The zero coverage report + tags: + - v2 diff --git a/backend/code_coverage_backend/gcp.py b/backend/code_coverage_backend/gcp.py index 5bb1f48d4..058d1c9e9 100644 --- a/backend/code_coverage_backend/gcp.py +++ b/backend/code_coverage_backend/gcp.py @@ -130,6 +130,9 @@ def ingest_report(self, report: Report) -> bool: logger.info("Report not available", report=str(report)) return False + # Crudely check if we need to download a fresh zero coverage report + self.ingest_zero_coverage_report(report.changeset) + # Read overall coverage for history data = covdir.open_report(report.path) assert data is not None, "No report to ingest" @@ -358,3 +361,13 @@ def ingest_available_reports( # Build report instance and ingest it report = Report(self.reports_dir, repository, changeset, platform, suite) self.ingest_report(report) + + def ingest_zero_coverage_report(self, revision): + """If it's a new revision, download a fresh zero coverage report""" + if self.redis.hget("zero_coverage", "latest-rev").decode() == revision: + return + + # Load the most recent zero coverage report into cache + download_report("/tmp/zero-cov-report/", self.bucket, "zero_coverage_report") + + self.redis.hset("zero_coverage", "latest-rev", revision) diff --git a/bot/code_coverage_bot/hooks/cron_thunderbird.py b/bot/code_coverage_bot/hooks/cron_thunderbird.py index 96c165047..caec658bb 100644 --- a/bot/code_coverage_bot/hooks/cron_thunderbird.py +++ b/bot/code_coverage_bot/hooks/cron_thunderbird.py @@ -25,15 +25,19 @@ class CronThunderbirdHook(Hook): This cron class handles all report generation for Thunderbird's comm-central """ - def upload_reports(self, reports): + def upload_reports(self, reports, zero_cov=False): """ Upload all provided covdir reports on GCP """ for (platform, suite), path in reports.items(): report = open(path, "rb").read() - uploader.gcp( - self.branch, self.revision, report, suite=suite, platform=platform - ) + + if zero_cov: + uploader.gcp_zero_coverage(self.branch, report) + else: + uploader.gcp( + self.branch, self.revision, report, suite=suite, platform=platform + ) def __init__( self, namespace, project, repository, upstream, prefix, *args, **kwargs @@ -74,19 +78,6 @@ def run(self) -> None: self.retrieve_source_and_artifacts() - logger.info("Generating full report") - reports = self.build_reports(only=[("all", "all")]) - - # Generate all reports except the full one which we generated earlier. - all_report_combinations = self.artifactsHandler.get_combinations() - del all_report_combinations[("all", "all")] - reports.update(self.build_reports()) - logger.info("Built all covdir reports", nb=len(reports)) - - # Upload reports on GCP - self.upload_reports(reports) - logger.info("Uploaded all covdir reports", nb=len(reports)) - # Commit cov is automatically uploaded to GCP...for reasons logger.info("Generating commit coverage reports") commit_coverage.generate(self.repository, self.project, self.repo_dir) @@ -104,9 +95,23 @@ def run(self) -> None: "zero-coverage", "zero-coverage", ): f"{self.reports_dir}/zero_coverage_report.json" - } + }, + True, ) - logger.info("Uploaded zero coverage report", nb=len(reports)) + logger.info("Uploaded zero coverage report") + + logger.info("Generating full report") + reports = self.build_reports(only=[("all", "all")]) + + # Generate all reports except the full one which we generated earlier. + all_report_combinations = self.artifactsHandler.get_combinations() + del all_report_combinations[("all", "all")] + reports.update(self.build_reports()) + logger.info("Built all covdir reports", nb=len(reports)) + + # Upload reports on GCP + self.upload_reports(reports) + logger.info("Uploaded all covdir reports", nb=len(reports)) def main() -> None: diff --git a/bot/code_coverage_bot/uploader.py b/bot/code_coverage_bot/uploader.py index 920028798..ea845be08 100644 --- a/bot/code_coverage_bot/uploader.py +++ b/bot/code_coverage_bot/uploader.py @@ -51,6 +51,36 @@ def gcp(repository, revision, report, platform, suite): return blob +def gcp_zero_coverage(repository, report): + """ + Upload a grcov a zero coverage report on Google Cloud Storage + * Compress with zstandard + * Upload in the main bucket directory + """ + assert isinstance(report, bytes) + bucket = get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE]) + + # Compress report + compressor = zstd.ZstdCompressor(threads=-1) + archive = compressor.compress(report) + + # Upload archive + path = GCP_COVDIR_PATH.format( + repository=repository, + ) + blob = bucket.blob(path) + blob.upload_from_string(archive) + + # Update headers + blob.content_type = "application/json" + blob.content_encoding = "zstd" + blob.patch() + + logger.info("Uploaded {} on {}".format(path, bucket)) + + return blob + + def gcp_covdir_exists( bucket: Bucket, repository: str, revision: str, platform: str, suite: str ) -> bool: diff --git a/frontend/deploy.dockerfile b/frontend/deploy.dockerfile index 95a42e036..db12d4aaa 100644 --- a/frontend/deploy.dockerfile +++ b/frontend/deploy.dockerfile @@ -16,7 +16,7 @@ ENV BACKEND_URL=https://coverage.thunderbird.net ENV REPOSITORY=https://hg.mozilla.org/comm-central ENV PROJECT=comm-central # TODO: Figure out zero coverage report location -ENV ZERO_COVERAGE_REPORT=https://coverage.thunderbird.net/zero_coverage_report.json +ENV ZERO_COVERAGE_REPORT=https://coverage.thunderbird.net/v2/zero-coverage-report RUN npm install RUN npm run build diff --git a/frontend/docker/etc/nginx/conf.d/coverage.conf b/frontend/docker/etc/nginx/conf.d/coverage.conf index 2a2401342..542fa5c02 100644 --- a/frontend/docker/etc/nginx/conf.d/coverage.conf +++ b/frontend/docker/etc/nginx/conf.d/coverage.conf @@ -12,13 +12,6 @@ server { proxy_set_header Host $http_host; proxy_http_version 1.1; } - # Zero coverage proxy - location /zero-coverage-report.json { - proxy_pass http://127.0.0.1:8080; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header Host $http_host; - proxy_http_version 1.1; - } # Frontend Vue static files location / { root /usr/share/nginx/html; From dc1ae44048df5211111f2fe10adaa3f2cb358db0 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Thu, 6 Apr 2023 14:11:33 -0700 Subject: [PATCH 10/30] Fix zero coverage upload process, and read the json file as binary for the api --- backend/code_coverage_backend/api.py | 2 +- bot/code_coverage_bot/hooks/cron_thunderbird.py | 2 +- bot/code_coverage_bot/uploader.py | 6 ++---- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/backend/code_coverage_backend/api.py b/backend/code_coverage_backend/api.py index 711ef8d78..746e7abf2 100644 --- a/backend/code_coverage_backend/api.py +++ b/backend/code_coverage_backend/api.py @@ -140,7 +140,7 @@ def zero_coverage_report(repository=config.DEFAULT_REPOSITORY): file = None try: - with open("/tmp/zero-cov-report/zero_coverage_report.json", "r") as fh: + with open("/tmp/zero-cov-report/zero_coverage_report.json", "rb") as fh: file = fh.read() except FileNotFoundError as e: logger.warn( diff --git a/bot/code_coverage_bot/hooks/cron_thunderbird.py b/bot/code_coverage_bot/hooks/cron_thunderbird.py index caec658bb..f60ce5353 100644 --- a/bot/code_coverage_bot/hooks/cron_thunderbird.py +++ b/bot/code_coverage_bot/hooks/cron_thunderbird.py @@ -33,7 +33,7 @@ def upload_reports(self, reports, zero_cov=False): report = open(path, "rb").read() if zero_cov: - uploader.gcp_zero_coverage(self.branch, report) + uploader.gcp_zero_coverage(report) else: uploader.gcp( self.branch, self.revision, report, suite=suite, platform=platform diff --git a/bot/code_coverage_bot/uploader.py b/bot/code_coverage_bot/uploader.py index ea845be08..13eb73dbc 100644 --- a/bot/code_coverage_bot/uploader.py +++ b/bot/code_coverage_bot/uploader.py @@ -51,7 +51,7 @@ def gcp(repository, revision, report, platform, suite): return blob -def gcp_zero_coverage(repository, report): +def gcp_zero_coverage(report): """ Upload a grcov a zero coverage report on Google Cloud Storage * Compress with zstandard @@ -65,9 +65,7 @@ def gcp_zero_coverage(repository, report): archive = compressor.compress(report) # Upload archive - path = GCP_COVDIR_PATH.format( - repository=repository, - ) + path = "zero_coverage_report" blob = bucket.blob(path) blob.upload_from_string(archive) From 7126927234bd3719ef59760d5fea73823b79160d Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Thu, 6 Apr 2023 15:02:24 -0700 Subject: [PATCH 11/30] Fix zero coverage's upload filename --- bot/code_coverage_bot/uploader.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/bot/code_coverage_bot/uploader.py b/bot/code_coverage_bot/uploader.py index 13eb73dbc..2ed3c37c5 100644 --- a/bot/code_coverage_bot/uploader.py +++ b/bot/code_coverage_bot/uploader.py @@ -7,6 +7,7 @@ import tenacity import zstandard as zstd from google.cloud.storage.bucket import Bucket +from requests import HTTPError from code_coverage_bot.secrets import secrets from code_coverage_tools.gcp import get_bucket @@ -45,8 +46,11 @@ def gcp(repository, revision, report, platform, suite): logger.info("Uploaded {} on {}".format(path, bucket)) - # Trigger ingestion on backend - gcp_ingest(repository, revision, platform, suite) + try: + # Trigger ingestion on backend + gcp_ingest(repository, revision, platform, suite) + except HTTPError as e: + logger.warn(f"Failed to ingest report. {e}") return blob @@ -65,7 +69,7 @@ def gcp_zero_coverage(report): archive = compressor.compress(report) # Upload archive - path = "zero_coverage_report" + path = "zero_coverage_report.json.zstd" blob = bucket.blob(path) blob.upload_from_string(archive) From 4b35b69853128a3ff2c4e751dcb05fe5d364dd08 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Thu, 6 Apr 2023 15:21:22 -0700 Subject: [PATCH 12/30] Ignore ignoring re-runs --- bot/code_coverage_bot/hooks/cron_thunderbird.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/code_coverage_bot/hooks/cron_thunderbird.py b/bot/code_coverage_bot/hooks/cron_thunderbird.py index f60ce5353..40a9eb5f8 100644 --- a/bot/code_coverage_bot/hooks/cron_thunderbird.py +++ b/bot/code_coverage_bot/hooks/cron_thunderbird.py @@ -74,7 +74,7 @@ def run(self) -> None: bucket = gcp.get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE]) if uploader.gcp_covdir_exists(bucket, self.branch, self.revision, "all", "all"): logger.warn("Full covdir report already on GCP") - return + # return self.retrieve_source_and_artifacts() From 11a575815657041350f5ea53d610224d76b2ea2b Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Thu, 6 Apr 2023 15:32:58 -0700 Subject: [PATCH 13/30] Revert "Ignore ignoring re-runs" This reverts commit 4b35b69853128a3ff2c4e751dcb05fe5d364dd08. --- bot/code_coverage_bot/hooks/cron_thunderbird.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/code_coverage_bot/hooks/cron_thunderbird.py b/bot/code_coverage_bot/hooks/cron_thunderbird.py index 40a9eb5f8..f60ce5353 100644 --- a/bot/code_coverage_bot/hooks/cron_thunderbird.py +++ b/bot/code_coverage_bot/hooks/cron_thunderbird.py @@ -74,7 +74,7 @@ def run(self) -> None: bucket = gcp.get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE]) if uploader.gcp_covdir_exists(bucket, self.branch, self.revision, "all", "all"): logger.warn("Full covdir report already on GCP") - # return + return self.retrieve_source_and_artifacts() From 73962a863e7f6e443fdcfbbd12ef0a58c65b30d2 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Fri, 7 Apr 2023 11:35:10 -0700 Subject: [PATCH 14/30] Fix zero coverage breaking report ingestion --- backend/code_coverage_backend/api.py | 8 +++++++- backend/code_coverage_backend/gcp.py | 16 ++++++++++++++-- bot/code_coverage_bot/hooks/cron_thunderbird.py | 4 ++++ bot/code_coverage_bot/uploader.py | 2 +- 4 files changed, 26 insertions(+), 4 deletions(-) diff --git a/backend/code_coverage_backend/api.py b/backend/code_coverage_backend/api.py index 746e7abf2..559f46a70 100644 --- a/backend/code_coverage_backend/api.py +++ b/backend/code_coverage_backend/api.py @@ -3,6 +3,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import json +import os +import tempfile import structlog from flask import abort @@ -139,8 +141,12 @@ def zero_coverage_report(repository=config.DEFAULT_REPOSITORY): """ file = None + path = os.path.join( + tempfile.gettempdir(), "zero-cov-report", "zero_coverage_report.json" + ) + try: - with open("/tmp/zero-cov-report/zero_coverage_report.json", "rb") as fh: + with open(path, "rb") as fh: file = fh.read() except FileNotFoundError as e: logger.warn( diff --git a/backend/code_coverage_backend/gcp.py b/backend/code_coverage_backend/gcp.py index 058d1c9e9..a5173e0d2 100644 --- a/backend/code_coverage_backend/gcp.py +++ b/backend/code_coverage_backend/gcp.py @@ -87,6 +87,15 @@ def __init__(self, reports_dir=None): for report in self.list_reports(repo, nb=1): download_report(self.reports_dir, self.bucket, report.name) + self.zerocov_dir = os.path.join(tempfile.gettempdir(), "zero-cov-report") + os.makedirs(self.zerocov_dir, exist_ok=True) + logger.info( + "Zero Coverage reports will be stored in {}".format(self.zerocov_dir) + ) + + # Grab the latest zero-cov-report + download_report(self.zerocov_dir, self.bucket, "zero_coverage_report") + def ingest_pushes(self, repository, platform, suite, min_push_id=None, nb_pages=3): """ Ingest HGMO changesets and pushes into our Redis Cache @@ -364,10 +373,13 @@ def ingest_available_reports( def ingest_zero_coverage_report(self, revision): """If it's a new revision, download a fresh zero coverage report""" - if self.redis.hget("zero_coverage", "latest-rev").decode() == revision: + if ( + self.redis.hget("zero_coverage", "latest-rev") is None + or self.redis.hget("zero_coverage", "latest-rev").decode() == revision + ): return # Load the most recent zero coverage report into cache - download_report("/tmp/zero-cov-report/", self.bucket, "zero_coverage_report") + download_report(self.zerocov_dir, self.bucket, "zero_coverage_report") self.redis.hset("zero_coverage", "latest-rev", revision) diff --git a/bot/code_coverage_bot/hooks/cron_thunderbird.py b/bot/code_coverage_bot/hooks/cron_thunderbird.py index f60ce5353..bd6c2cfbb 100644 --- a/bot/code_coverage_bot/hooks/cron_thunderbird.py +++ b/bot/code_coverage_bot/hooks/cron_thunderbird.py @@ -74,6 +74,10 @@ def run(self) -> None: bucket = gcp.get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE]) if uploader.gcp_covdir_exists(bucket, self.branch, self.revision, "all", "all"): logger.warn("Full covdir report already on GCP") + + # Ping the backend to ingest any reports that may have failed + uploader.gcp_ingest(self.branch, self.revision, "all", "all") + return self.retrieve_source_and_artifacts() diff --git a/bot/code_coverage_bot/uploader.py b/bot/code_coverage_bot/uploader.py index 2ed3c37c5..1c2359433 100644 --- a/bot/code_coverage_bot/uploader.py +++ b/bot/code_coverage_bot/uploader.py @@ -68,7 +68,7 @@ def gcp_zero_coverage(report): compressor = zstd.ZstdCompressor(threads=-1) archive = compressor.compress(report) - # Upload archive + # Upload archive (this should be in the base directory, because we only care about the latest report) path = "zero_coverage_report.json.zstd" blob = bucket.blob(path) blob.upload_from_string(archive) From 0b7342f91fa7c4710a9ad05f327637adbdaa5ad1 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Mon, 10 Apr 2023 10:29:05 -0700 Subject: [PATCH 15/30] Readme updates, and docker compose cleanup. --- README.md | 9 +++++++++ docker-compose.yml | 2 -- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 1e8a1beac..a93e510ce 100644 --- a/README.md +++ b/README.md @@ -10,3 +10,12 @@ This project has 4 parts: ## Help You can reach us on our Matrix instance: [#codecoverage:mozilla.org](https://chat.mozilla.org/#/room/#codecoverage:mozilla.org) + +## Thunderbird Changes + +This fork contains some Thunderbird specific changes: + +* Zero coverage reports are uploaded to Google Cloud Storage, and pulled down by the backend api. +* A Thunderbird Cron file has been added, that pulls down comm-central's tip revision and generates all reports. +* The frontend has been removed from `.dockerignore`, and runs in a docker container for deployment purposes. +* Various tweaks to allow a repository that isn't mozilla-central to generate reports (these changes are also available on `multi-repo` branch.) diff --git a/docker-compose.yml b/docker-compose.yml index aad9dbc66..6c199646e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,5 @@ services: bot: - #platform: linux/x86_64 build: context: . dockerfile: bot/dev.dockerfile @@ -34,7 +33,6 @@ services: networks: - codecov frontend: - #platform: linux/x86_64 build: context: . dockerfile: frontend/dev.dockerfile From e43ad361704c690c4ac6a26437d236b1de199391 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Wed, 12 Apr 2023 09:03:02 -0700 Subject: [PATCH 16/30] Add an environment variable to use iso date strings `USE_ISO_DATE` defaults to false. --- frontend/deploy.dockerfile | 2 +- frontend/dev.dockerfile | 3 ++- frontend/src/index.js | 8 ++++++++ frontend/webpack.common.js | 3 ++- 4 files changed, 13 insertions(+), 3 deletions(-) diff --git a/frontend/deploy.dockerfile b/frontend/deploy.dockerfile index db12d4aaa..7234b8349 100644 --- a/frontend/deploy.dockerfile +++ b/frontend/deploy.dockerfile @@ -15,8 +15,8 @@ apt-get install -y nodejs ENV BACKEND_URL=https://coverage.thunderbird.net ENV REPOSITORY=https://hg.mozilla.org/comm-central ENV PROJECT=comm-central -# TODO: Figure out zero coverage report location ENV ZERO_COVERAGE_REPORT=https://coverage.thunderbird.net/v2/zero-coverage-report +ENV USE_ISO_DATE=true RUN npm install RUN npm run build diff --git a/frontend/dev.dockerfile b/frontend/dev.dockerfile index e734c5a69..6069f0e78 100644 --- a/frontend/dev.dockerfile +++ b/frontend/dev.dockerfile @@ -14,7 +14,8 @@ apt-get install -y nodejs ENV BACKEND_URL=http://localhost:8080 ENV REPOSITORY=https://hg.mozilla.org/comm-central ENV PROJECT=comm-central -ENV ZERO_COVERAGE_REPORT=/assets/zero_coverage_report.json +ENV ZERO_COVERAGE_REPORT=http://localhost:8080/v2/zero-coverage-report +ENV USE_ISO_DATE=true RUN npm install RUN npm run build diff --git a/frontend/src/index.js b/frontend/src/index.js index fe8d304bd..0f304f841 100644 --- a/frontend/src/index.js +++ b/frontend/src/index.js @@ -56,6 +56,14 @@ async function graphHistory(history, path) { const dateStr = function(timestamp) { const date = new Date(timestamp); + + if (process.env.USE_ISO_DATE) { + const month = `${date.getMonth() + 1}`.padStart(2, '0'); + const day = `${date.getDate()}`.padStart(2, '0'); + + return `${date.getFullYear()}/${month}/${day}`; + } + return `${date.getDate()}/${date.getMonth() + 1}/${date.getFullYear()}`; }; diff --git a/frontend/webpack.common.js b/frontend/webpack.common.js index cc45cafa8..34e628d17 100644 --- a/frontend/webpack.common.js +++ b/frontend/webpack.common.js @@ -24,7 +24,8 @@ module.exports = { BACKEND_URL: 'http://localhost:8000', REPOSITORY: 'https://hg.mozilla.org/mozilla-central', PROJECT: 'mozilla-central', - ZERO_COVERAGE_REPORT: 'https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/project.relman.code-coverage.production.cron.latest/artifacts/public/zero_coverage_report.json' + ZERO_COVERAGE_REPORT: 'https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/project.relman.code-coverage.production.cron.latest/artifacts/public/zero_coverage_report.json', + USE_ISO_DATE: false, }), ], module: { From db35e74316cb8b6b98370eff0646e2d163a5086e Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Wed, 12 Apr 2023 09:15:57 -0700 Subject: [PATCH 17/30] Fix the prettier error.. --- frontend/src/index.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/index.js b/frontend/src/index.js index 0f304f841..ac519d036 100644 --- a/frontend/src/index.js +++ b/frontend/src/index.js @@ -58,8 +58,8 @@ async function graphHistory(history, path) { const date = new Date(timestamp); if (process.env.USE_ISO_DATE) { - const month = `${date.getMonth() + 1}`.padStart(2, '0'); - const day = `${date.getDate()}`.padStart(2, '0'); + const month = `${date.getMonth() + 1}`.padStart(2, "0"); + const day = `${date.getDate()}`.padStart(2, "0"); return `${date.getFullYear()}/${month}/${day}`; } From 35e35f43e208d319ec276000212fe461075500d7 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Thu, 13 Apr 2023 09:47:17 -0700 Subject: [PATCH 18/30] Tag frontend/backend as latest instead of with the commit hash --- .github/workflows/aws.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/aws.yml b/.github/workflows/aws.yml index c994b6ff3..e27c58ee3 100644 --- a/.github/workflows/aws.yml +++ b/.github/workflows/aws.yml @@ -52,7 +52,7 @@ jobs: id: build-backend env: ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} - IMAGE_TAG: backend-${{ github.sha }} + IMAGE_TAG: backend-latest run: | # Build a docker container and # push it to ECR so that it can @@ -65,7 +65,7 @@ jobs: id: build-frontend env: ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} - IMAGE_TAG: frontend-${{ github.sha }} + IMAGE_TAG: frontend-latest run: | # Build a docker container and # push it to ECR so that it can From 732636364644abe68334de2810df9055209d02ed Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Tue, 3 Oct 2023 15:06:19 -0700 Subject: [PATCH 19/30] Fix frontend's dockerfile --- frontend/deploy.dockerfile | 2 +- frontend/dev.dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/deploy.dockerfile b/frontend/deploy.dockerfile index 7234b8349..42b9a659d 100644 --- a/frontend/deploy.dockerfile +++ b/frontend/deploy.dockerfile @@ -5,7 +5,7 @@ ADD frontend /src/frontend WORKDIR /src/frontend # Install some essentials -RUN apt-get update && apt-get install -y build-essential python python-dev +RUN apt-get update && apt-get install -y build-essential python3 python3-dev # Install node RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - &&\ diff --git a/frontend/dev.dockerfile b/frontend/dev.dockerfile index 6069f0e78..c31148d6b 100644 --- a/frontend/dev.dockerfile +++ b/frontend/dev.dockerfile @@ -5,7 +5,7 @@ ADD frontend /src/frontend WORKDIR /src/frontend # Install some essentials -RUN apt-get update && apt-get install -y build-essential python python-dev +RUN apt-get update && apt-get install -y build-essential python3 python3-dev # Install node RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - &&\ From d08552b59aea97148b479ceb6db2c1f6663d9043 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Thu, 5 Oct 2023 10:40:38 -0700 Subject: [PATCH 20/30] Force the commit_coverage download to use `raw_download`. (Fixes #1) --- bot/code_coverage_bot/commit_coverage.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/code_coverage_bot/commit_coverage.py b/bot/code_coverage_bot/commit_coverage.py index c8784dea8..f877aeea2 100644 --- a/bot/code_coverage_bot/commit_coverage.py +++ b/bot/code_coverage_bot/commit_coverage.py @@ -54,7 +54,9 @@ def generate( blob = bucket.blob("commit_coverage.json.zst") if blob.exists(): dctx = zstandard.ZstdDecompressor() - commit_coverage = json.loads(dctx.decompress(blob.download_as_bytes())) + commit_coverage = json.loads( + dctx.decompress(blob.download_as_bytes(raw_download=True)) + ) else: commit_coverage = {} From 490b473bc2d05ddb31144098daa9427e37897f2f Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Thu, 5 Oct 2023 12:23:09 -0700 Subject: [PATCH 21/30] Guard against grcov failures for specific report sections --- .../hooks/cron_thunderbird.py | 66 +++++++++++-------- 1 file changed, 39 insertions(+), 27 deletions(-) diff --git a/bot/code_coverage_bot/hooks/cron_thunderbird.py b/bot/code_coverage_bot/hooks/cron_thunderbird.py index bd6c2cfbb..815cd5168 100644 --- a/bot/code_coverage_bot/hooks/cron_thunderbird.py +++ b/bot/code_coverage_bot/hooks/cron_thunderbird.py @@ -86,36 +86,48 @@ def run(self) -> None: logger.info("Generating commit coverage reports") commit_coverage.generate(self.repository, self.project, self.repo_dir) - logger.info("Generating zero coverage reports") - zc = ZeroCov(self.repo_dir) - zc.generate( - self.artifactsHandler.get(), self.revision, self.reports_dir, self.prefix - ) - - # Upload zero cov on GCP - self.upload_reports( - { - ( - "zero-coverage", - "zero-coverage", - ): f"{self.reports_dir}/zero_coverage_report.json" - }, - True, - ) - logger.info("Uploaded zero coverage report") + try: + logger.info("Generating zero coverage reports") + zc = ZeroCov(self.repo_dir) + zc.generate( + self.artifactsHandler.get(), + self.revision, + self.reports_dir, + self.prefix, + ) + + # Upload zero cov on GCP + self.upload_reports( + { + ( + "zero-coverage", + "zero-coverage", + ): f"{self.reports_dir}/zero_coverage_report.json" + }, + True, + ) + logger.info("Uploaded zero coverage report") + except Exception as e: + # Can occur on grcov failure + logger.error("Zero coverage report failed: {0}".format(e)) logger.info("Generating full report") - reports = self.build_reports(only=[("all", "all")]) - - # Generate all reports except the full one which we generated earlier. - all_report_combinations = self.artifactsHandler.get_combinations() - del all_report_combinations[("all", "all")] - reports.update(self.build_reports()) - logger.info("Built all covdir reports", nb=len(reports)) - # Upload reports on GCP - self.upload_reports(reports) - logger.info("Uploaded all covdir reports", nb=len(reports)) + try: + reports = self.build_reports(only=[("all", "all")]) + + # Generate all reports except the full one which we generated earlier. + all_report_combinations = self.artifactsHandler.get_combinations() + del all_report_combinations[("all", "all")] + reports.update(self.build_reports()) + logger.info("Built all covdir reports", nb=len(reports)) + + # Upload reports on GCP + self.upload_reports(reports) + logger.info("Uploaded all covdir reports", nb=len(reports)) + except Exception as e: + # Can occur on grcov failure + logger.error("Covdir coverage report failed: {0}".format(e)) def main() -> None: From 561a1d7e97bef5487c9be9d65957bdd1037819ae Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Thu, 5 Oct 2023 13:16:24 -0700 Subject: [PATCH 22/30] Split all and platform specific report generation --- .../hooks/cron_thunderbird.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/bot/code_coverage_bot/hooks/cron_thunderbird.py b/bot/code_coverage_bot/hooks/cron_thunderbird.py index 815cd5168..b2a273061 100644 --- a/bot/code_coverage_bot/hooks/cron_thunderbird.py +++ b/bot/code_coverage_bot/hooks/cron_thunderbird.py @@ -113,22 +113,33 @@ def run(self) -> None: logger.info("Generating full report") + reports = {} + try: reports = self.build_reports(only=[("all", "all")]) + except Exception as e: + # Can occur on grcov failure + logger.error("All covdir coverage report failed: {0}".format(e)) + try: # Generate all reports except the full one which we generated earlier. all_report_combinations = self.artifactsHandler.get_combinations() del all_report_combinations[("all", "all")] + reports.update(self.build_reports()) logger.info("Built all covdir reports", nb=len(reports)) - - # Upload reports on GCP - self.upload_reports(reports) - logger.info("Uploaded all covdir reports", nb=len(reports)) except Exception as e: # Can occur on grcov failure logger.error("Covdir coverage report failed: {0}".format(e)) + if len(reports) == 0: + logger.warning("No reports to upload...") + return + + # Upload reports on GCP + self.upload_reports(reports) + logger.info("Uploaded all covdir reports", nb=len(reports)) + def main() -> None: logger.info("Starting code coverage bot for cron thunderbird") From fff06d21ac967e4bdbc084ea3c93d3d49ea0e2aa Mon Sep 17 00:00:00 2001 From: Martin Giger Date: Fri, 10 Nov 2023 16:40:17 +0100 Subject: [PATCH 23/30] Correct path to Thunderbird ThirdPartyPaths.txt --- frontend/src/common.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/common.js b/frontend/src/common.js index 6222e7da4..58babcb46 100644 --- a/frontend/src/common.js +++ b/frontend/src/common.js @@ -203,7 +203,7 @@ const getThirdPartyPaths = (function() { let paths = null; return async function() { if (!paths) { - const response = await getSource("tools/rewriting/ThirdPartyPaths.txt"); + const response = await getSource("tools/lint/ThirdPartyPaths.txt"); paths = response.split("\n").filter(path => path !== ""); } From 272d20ae9bfd9e9a870d3c6201d73bbaecf29141 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Mon, 20 Nov 2023 11:14:38 -0800 Subject: [PATCH 24/30] Don't split ThirdPartyPaths.txt twice. --- frontend/src/common.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/common.js b/frontend/src/common.js index 58babcb46..e0da9b5d4 100644 --- a/frontend/src/common.js +++ b/frontend/src/common.js @@ -204,7 +204,7 @@ const getThirdPartyPaths = (function() { return async function() { if (!paths) { const response = await getSource("tools/lint/ThirdPartyPaths.txt"); - paths = response.split("\n").filter(path => path !== ""); + paths = response.filter(path => path !== ""); } return paths; From 2942da93f794af029ba8bfd8d2812b1f07c99390 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Wed, 22 Nov 2023 10:56:55 -0800 Subject: [PATCH 25/30] Filter out the comm directory for third party paths. --- frontend/src/common.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frontend/src/common.js b/frontend/src/common.js index e0da9b5d4..d3f230f2f 100644 --- a/frontend/src/common.js +++ b/frontend/src/common.js @@ -204,7 +204,8 @@ const getThirdPartyPaths = (function() { return async function() { if (!paths) { const response = await getSource("tools/lint/ThirdPartyPaths.txt"); - paths = response.filter(path => path !== ""); + // Filter out the comm directory for TB + paths = response.filter(path => path !== "").map((path) => path.replace('comm/', '')); } return paths; From 0d6cb5c76efe31258e0b3e12f12855f5a3b7eb82 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Wed, 22 Nov 2023 11:09:27 -0800 Subject: [PATCH 26/30] Fix linting issue. --- frontend/src/common.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/frontend/src/common.js b/frontend/src/common.js index d3f230f2f..110eb057f 100644 --- a/frontend/src/common.js +++ b/frontend/src/common.js @@ -205,7 +205,9 @@ const getThirdPartyPaths = (function() { if (!paths) { const response = await getSource("tools/lint/ThirdPartyPaths.txt"); // Filter out the comm directory for TB - paths = response.filter(path => path !== "").map((path) => path.replace('comm/', '')); + paths = response + .filter(path => path !== "") + .map(path => path.replace("comm/", "")); } return paths; From 1d1f5260bcc888929980559c16df3fcd4f19e3b1 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Thu, 23 Nov 2023 10:41:26 -0800 Subject: [PATCH 27/30] Check raw-log for the most recent revisions that are done building, and use that for coverage stats. --- .../hooks/cron_thunderbird.py | 94 +++++++++++++++---- 1 file changed, 78 insertions(+), 16 deletions(-) diff --git a/bot/code_coverage_bot/hooks/cron_thunderbird.py b/bot/code_coverage_bot/hooks/cron_thunderbird.py index b2a273061..ec86c622b 100644 --- a/bot/code_coverage_bot/hooks/cron_thunderbird.py +++ b/bot/code_coverage_bot/hooks/cron_thunderbird.py @@ -10,6 +10,7 @@ from code_coverage_bot import commit_coverage from code_coverage_bot import config +from code_coverage_bot import taskcluster from code_coverage_bot import uploader from code_coverage_bot.cli import setup_cli from code_coverage_bot.hooks.base import Hook @@ -25,6 +26,9 @@ class CronThunderbirdHook(Hook): This cron class handles all report generation for Thunderbird's comm-central """ + # The last revision that we checked to see if it was usable (for fail exception use only) + last_revision_tested = None + def upload_reports(self, reports, zero_cov=False): """ Upload all provided covdir reports on GCP @@ -39,31 +43,90 @@ def upload_reports(self, reports, zero_cov=False): self.branch, self.revision, report, suite=suite, platform=platform ) - def __init__( - self, namespace, project, repository, upstream, prefix, *args, **kwargs - ): + def has_revision_been_processed_before(self, branch, revision): + """Returns True if the revision is in our storage bucket.""" + bucket = gcp.get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE]) + return uploader.gcp_covdir_exists(bucket, branch, revision, "all", "all") + + def is_revision_usable(self, namespace, branch, revision): + """Checks if a given revision (from branch, and namespace) is usable (tasks==completed, and exists)""" + self.last_revision_tested = revision + + # Load coverage tasks for all platforms + decision_task_id = taskcluster.get_decision_task(namespace, branch, revision) + + # No build! + if decision_task_id is None: + return False + + group = taskcluster.get_task_details(decision_task_id)["taskGroupId"] + + test_tasks = [ + task + for task in taskcluster.get_tasks_in_group(group) + if taskcluster.is_coverage_task(task["task"]) + ] + + if len(test_tasks) == 0: + return False + + # Find a task that isn't pending (this includes failed tasks btw) + for test_task in test_tasks: + status = test_task["status"]["state"] + if status not in taskcluster.FINISHED_STATUSES: + return False + + return True + + def search_for_latest_built_revision(self, namespace, branch, project, repository): + """Pulls down raw-log and goes through each changeset until we find a revision that is built (or not and return None)""" + log_response = requests.get(f"{repository}/raw-log") - tip_response = requests.get(f"{repository}/raw-rev/tip") # Yell if there's any issues try: - tip_response.raise_for_status() + log_response.raise_for_status() except HTTPError as e: - logger.error(f"Could not access raw revision for {project} tip: {e}") + logger.error(f"Could not access raw log for {project}: {e}") raise - # Node ID == Revision - revision_regex = r"^# Node ID ([\w\d]*)$" - matches = re.search(revision_regex, tip_response.text[:2048], re.MULTILINE) + # Changeset == Revision + revision_regex = r"^changeset:[\s]*([\w\d]*)$" + matches = re.findall(revision_regex, log_response.text[:10240], re.MULTILINE) - if len(matches.groups()) == 0: - error = "Failed to retrieve revision from tip, no match within 2048 bytes!" + if len(matches) == 0: + error = ( + "Failed to retrieve revision from raw-log, no match within 10240 bytes!" + ) logger.error(error) raise Exception(error) - # Grab that revision - revision = matches.groups()[0] + for revision in matches: + # If we hit a revision we've processed before, we don't want to process anything past that! + if self.has_revision_been_processed_before(branch, revision): + break + + # Is this revision usable (has a build/artifacts, and not a pending build) + if self.is_revision_usable(namespace, branch, revision): + return revision - logger.info(f"Using revision id {revision} from tip") + return None + + def __init__( + self, namespace, project, repository, upstream, prefix, *args, **kwargs + ): + # Assign early so we can get self.branch property working + self.repository = repository + + revision = self.search_for_latest_built_revision( + namespace, self.branch, project, repository + ) + + if revision is None: + error = f"No available revision has been found, exiting! Last revision tested: {self.last_revision_tested}." + logger.error(error) + raise Exception(error) + + logger.info(f"Using revision id {revision} for coverage stats.") super().__init__( namespace, project, repository, upstream, revision, prefix, *args, **kwargs @@ -71,8 +134,7 @@ def __init__( def run(self) -> None: # Check the covdir report does not already exists - bucket = gcp.get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE]) - if uploader.gcp_covdir_exists(bucket, self.branch, self.revision, "all", "all"): + if self.has_revision_been_processed_before(self.branch, self.revision): logger.warn("Full covdir report already on GCP") # Ping the backend to ingest any reports that may have failed From 10f0e752239a58d954fe5b0a25fc9341551cce90 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Thu, 23 Nov 2023 15:21:36 -0800 Subject: [PATCH 28/30] Use the json-pushes endpoint instead of raw-log, and only check the last changeset of a given push. --- .../hooks/cron_thunderbird.py | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/bot/code_coverage_bot/hooks/cron_thunderbird.py b/bot/code_coverage_bot/hooks/cron_thunderbird.py index ec86c622b..09d229e66 100644 --- a/bot/code_coverage_bot/hooks/cron_thunderbird.py +++ b/bot/code_coverage_bot/hooks/cron_thunderbird.py @@ -2,8 +2,6 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -import re - import requests import structlog from requests import HTTPError @@ -80,7 +78,10 @@ def is_revision_usable(self, namespace, branch, revision): def search_for_latest_built_revision(self, namespace, branch, project, repository): """Pulls down raw-log and goes through each changeset until we find a revision that is built (or not and return None)""" - log_response = requests.get(f"{repository}/raw-log") + log_response = requests.get( + f"{repository}/json-pushes", + headers={"User-Agent": "thunderbird-code-coverage-bot"}, + ) # Yell if there's any issues try: @@ -89,18 +90,17 @@ def search_for_latest_built_revision(self, namespace, branch, project, repositor logger.error(f"Could not access raw log for {project}: {e}") raise - # Changeset == Revision - revision_regex = r"^changeset:[\s]*([\w\d]*)$" - matches = re.findall(revision_regex, log_response.text[:10240], re.MULTILINE) + log_data = log_response.json() - if len(matches) == 0: - error = ( - "Failed to retrieve revision from raw-log, no match within 10240 bytes!" - ) + if len(log_data) == 0: + error = "Failed to retrieve data from json-pushes!" logger.error(error) raise Exception(error) - for revision in matches: + # Look through each push and grab the last changeset (that's the one that builds!) + for _, push in reversed(log_data.items()): + revision = push["changesets"][-1] + # If we hit a revision we've processed before, we don't want to process anything past that! if self.has_revision_been_processed_before(branch, revision): break From cebdb0cce6815c87820075fc613611d694174cc0 Mon Sep 17 00:00:00 2001 From: Melissa Autumn Date: Mon, 15 Jan 2024 10:59:40 -0800 Subject: [PATCH 29/30] Mention the memory requirement for running bot's grcov step. --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index a93e510ce..bdcaa10a6 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,8 @@ You can reach us on our Matrix instance: [#codecoverage:mozilla.org](https://cha ## Thunderbird Changes +Note: The system running the container must be supplied with at least 16gb of memory. Other-wise you will run into out of memory killer issues while grcov runs. + This fork contains some Thunderbird specific changes: * Zero coverage reports are uploaded to Google Cloud Storage, and pulled down by the backend api. From a92900ff97b0ab9f554eb4d171f27acf261cea38 Mon Sep 17 00:00:00 2001 From: Arron Atchison Date: Thu, 17 Jul 2025 14:50:36 -0700 Subject: [PATCH 30/30] Use ${{ github.sha }} to append commit has to image name. The task definition should automatically get the correct name from the stage output. --- .github/workflows/aws.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/aws.yml b/.github/workflows/aws.yml index e27c58ee3..ecf42d203 100644 --- a/.github/workflows/aws.yml +++ b/.github/workflows/aws.yml @@ -52,7 +52,7 @@ jobs: id: build-backend env: ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} - IMAGE_TAG: backend-latest + IMAGE_TAG: backend-${{ github.sha }} run: | # Build a docker container and # push it to ECR so that it can @@ -65,7 +65,7 @@ jobs: id: build-frontend env: ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} - IMAGE_TAG: frontend-latest + IMAGE_TAG: frontend-${{ github.sha }} run: | # Build a docker container and # push it to ECR so that it can @@ -79,7 +79,7 @@ jobs: id: build-bot env: ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} - IMAGE_TAG: bot-latest + IMAGE_TAG: bot-${{ github.sha }} run: | # Build a docker container and # push it to ECR so that it can