diff --git a/.github/workflows/test_integration.yml b/.github/workflows/test_integration.yml index 6f4fc00dd..4809d04ee 100644 --- a/.github/workflows/test_integration.yml +++ b/.github/workflows/test_integration.yml @@ -14,11 +14,12 @@ # Workflow configuration for S-CORE CI - Bazel Build & Test baselibs # This workflow runs Bazel build and test when triggered by specific pull request events. -name: Bazel Build some repositories +name: build latest mains on: workflow_dispatch: - push: pull_request: + schedule: + - cron: '30 2 * * *' # Every night at 02:30 UTC on main branch jobs: integration_test: runs-on: ubuntu-latest @@ -51,9 +52,21 @@ jobs: disk-cache: ${{ github.workflow }} # Share repository cache between workflows. repository-cache: true + - name: Update known good commits + run: | + echo "::group::get latest commits from module branches" + python3 tools/update_module_latest.py --output known_good.updated.json + cat known_good.updated.json + echo "::endgroup::" + echo "::group::update score_modules.MODULE.bazel" + python3 tools/update_module_from_known_good.py --known known_good.updated.json + cat score_modules.MODULE.bazel + echo "::endgroup::" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Bazel build targets run: | - ./integration_test.sh + ./integration_test.sh --known-good known_good.updated.json - name: Show disk space after build if: always() run: | diff --git a/MODULE.bazel b/MODULE.bazel index 8f2fb7e02..7cce020da 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -47,6 +47,7 @@ use_repo(pip, "pip_score_venv_test") # communication module dependencies # archive_override are not forwarded by bazel_dep, so we need to redefine it here +bazel_dep(name = "rules_boost", repo_name = "com_github_nelhage_rules_boost") archive_override( module_name = "rules_boost", strip_prefix = "rules_boost-master", @@ -54,6 +55,7 @@ archive_override( ) # git_override are not forwarded by bazel_dep, so we need to redefine it here +bazel_dep(name = "trlc") git_override( module_name = "trlc", remote = "https://github.com/bmw-software-engineering/trlc.git", diff --git a/README.md b/README.md index ca07fd6ef..f56b3abeb 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ bazel build --config bl-x86_64-linux @score_baselibs//score/... --verbose_failur ### Communication ```bash -bazel build --config bl-x86_64-linux @score_communication//score/mw/com:com --verbose_failures +bazel build --config bl-x86_64-linux @communication//score/mw/com:com --verbose_failures ``` ### Persistency diff --git a/integration_test.sh b/integration_test.sh index b35e2492c..f7aa9e6ea 100755 --- a/integration_test.sh +++ b/integration_test.sh @@ -1,24 +1,79 @@ #!/usr/bin/env bash -set -euox pipefail +set -euo pipefail # Integration build script. # Captures warning counts for regression tracking. +# +# Usage: ./integration_test.sh [--known-good ] +# --known-good: Optional path to known_good.json file CONFIG=${CONFIG:-bl-x86_64-linux} LOG_DIR=${LOG_DIR:-_logs/logs} SUMMARY_FILE=${SUMMARY_FILE:-_logs/build_summary.md} -mkdir -p "${LOG_DIR}" || true +KNOWN_GOOD_FILE="" +# maybe move this to known_good.json or a config file later declare -A BUILD_TARGET_GROUPS=( - [baselibs]="@score_baselibs//score/..." + [score_baselibs]="@score_baselibs//score/..." [score_communication]="@score_communication//score/mw/com:com" - [persistency]="@score_persistency//src/cpp/src/... @score_persistency//src/rust/..." + [score_persistency]="@score_persistency//src/cpp/src/... @score_persistency//src/rust/..." #[score_logging]="@score_logging//src/..." [score_orchestrator]="@score_orchestrator//src/..." [score_test_scenarios]="@score_test_scenarios//..." [score_feo]="@score_feo//..." ) +# Parse command line arguments +while [[ $# -gt 0 ]]; do + case $1 in + --known-good) + KNOWN_GOOD_FILE="$2" + shift 2 + ;; + *) + echo "Unknown option: $1" + echo "Usage: $0 [--known-good ]" + exit 1 + ;; + esac +done + +mkdir -p "${LOG_DIR}" || true + +# Function to extract commit hash from known_good.json +get_commit_hash() { + local module_name=$1 + local known_good_file=$2 + + if [[ -z "${known_good_file}" ]] || [[ ! -f "${known_good_file}" ]]; then + echo "N/A" + return + fi + + # Get the script directory + local script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + + # Use the Python script to extract module info + python3 "${script_dir}/tools/get_module_info.py" "${known_good_file}" "${module_name}" "hash" 2>/dev/null || echo "N/A" +} + +# Function to extract repo URL from known_good.json +get_module_repo() { + local module_name=$1 + local known_good_file=$2 + + if [[ -z "${known_good_file}" ]] || [[ ! -f "${known_good_file}" ]]; then + echo "N/A" + return + fi + + # Get the script directory + local script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + + # Use the Python script to extract module repo + python3 "${script_dir}/tools/get_module_info.py" "${known_good_file}" "${module_name}" "repo" 2>/dev/null || echo "N/A" +} + warn_count() { # Grep typical compiler and Bazel warnings; adjust patterns as needed. local file=$1 @@ -35,29 +90,41 @@ timestamp() { date '+%Y-%m-%d %H:%M:%S'; } echo "=== Integration Build Started $(timestamp) ===" | tee "${SUMMARY_FILE}" echo "Config: ${CONFIG}" | tee -a "${SUMMARY_FILE}" +if [[ -n "${KNOWN_GOOD_FILE}" ]]; then + echo "Known Good File: ${KNOWN_GOOD_FILE}" | tee -a "${SUMMARY_FILE}" +fi echo "" >> "${SUMMARY_FILE}" echo "## Build Groups Summary" >> "${SUMMARY_FILE}" echo "" >> "${SUMMARY_FILE}" # Markdown table header { - echo "| Group | Status | Duration (s) | Warnings | Deprecated refs |"; - echo "|-------|--------|--------------|----------|-----------------|"; + echo "| Group | Status | Duration (s) | Warnings | Deprecated refs | Commit/Version |"; + echo "|-------|--------|--------------|----------|-----------------|----------------|"; } >> "${SUMMARY_FILE}" overall_warn_total=0 overall_depr_total=0 +# Track if any build group failed +any_failed=0 + for group in "${!BUILD_TARGET_GROUPS[@]}"; do targets="${BUILD_TARGET_GROUPS[$group]}" log_file="${LOG_DIR}/${group}.log" + # Log build group banner only to stdout/stderr (not into summary table file) echo "--- Building group: ${group} ---" + start_ts=$(date +%s) + echo "bazel build --config "${CONFIG}" ${targets} --verbose_failures" # GitHub Actions log grouping start echo "::group::Bazel build (${group})" - start_ts=$(date +%s) set +e bazel build --config "${CONFIG}" ${targets} --verbose_failures 2>&1 | tee "$log_file" build_status=${PIPESTATUS[0]} + # Track if any build group failed + if [[ ${build_status} -ne 0 ]]; then + any_failed=1 + fi set -e echo "::endgroup::" # End Bazel build group end_ts=$(date +%s) @@ -72,16 +139,37 @@ for group in "${!BUILD_TARGET_GROUPS[@]}"; do else status_symbol="❌(${build_status})" fi - echo "| ${group} | ${status_symbol} | ${duration} | ${w_count} | ${d_count} |" | tee -a "${SUMMARY_FILE}" + + # Get commit hash/version for this group (group name is the module name) + commit_hash=$(get_commit_hash "${group}" "${KNOWN_GOOD_FILE}") + repo=$(get_module_repo "${group}" "${KNOWN_GOOD_FILE}") + + # Truncate commit hash for display (first 8 chars) + if [[ "${commit_hash}" != "N/A" ]] && [[ ${#commit_hash} -gt 8 ]]; then + commit_hash_display="${commit_hash:0:8}" + else + commit_hash_display="${commit_hash}" + fi + + # Only add link if KNOWN_GOOD_FILE is set + if [[ -n "${KNOWN_GOOD_FILE}" ]]; then + commit_version_cell="[${commit_hash_display}](${repo}/tree/${commit_hash})" + else + commit_version_cell="${commit_hash_display}" + fi + + echo "| ${group} | ${status_symbol} | ${duration} | ${w_count} | ${d_count} | ${commit_version_cell} |" | tee -a "${SUMMARY_FILE}" done # Append aggregate totals row to summary table -echo "| TOTAL | | | ${overall_warn_total} | ${overall_depr_total} |" >> "${SUMMARY_FILE}" - -# Display the full build summary explicitly at the end +echo "| TOTAL | | | ${overall_warn_total} | ${overall_depr_total} | |" >> "${SUMMARY_FILE}" echo '::group::Build Summary' echo '=== Build Summary (echo) ===' cat "${SUMMARY_FILE}" || echo "(Could not read summary file ${SUMMARY_FILE})" echo '::endgroup::' -exit 0 \ No newline at end of file +# Report to GitHub Actions if any build group failed +if [[ ${any_failed} -eq 1 ]]; then + echo "::error::One or more build groups failed. See summary above." + exit 1 +fi diff --git a/known_good.json b/known_good.json new file mode 100644 index 000000000..f0df662a8 --- /dev/null +++ b/known_good.json @@ -0,0 +1,55 @@ +{ + "timestamp": "2025-08-13T12:55:10Z", + "modules": { + "score_baselibs": { + "version": "0.1.3", + "repo": "https://github.com/eclipse-score/baselibs.git", + "branch": "s_core_release_v0_5_0" + }, + "score_communication": { + "version": "0.1.1", + "repo": "https://github.com/eclipse-score/communication.git", + "branch": "s_core_release_v0_5_0" + }, + "score_persistency": { + "version": "0.2.1", + "repo": "https://github.com/eclipse-score/persistency.git" + }, + "score_orchestrator": { + "version": "0.0.3", + "repo": "https://github.com/eclipse-score/orchestrator.git" + }, + "score_tooling": { + "version": "1.0.2", + "repo": "https://github.com/eclipse-score/tooling.git" + }, + "score_platform": { + "hash": "a9cf44be1342f3c62111de2249eb3132f5ab88da", + "repo": "https://github.com/eclipse-score/score.git" + }, + "score_bazel_platforms": { + "version": "0.0.2", + "repo": "https://github.com/eclipse-score/bazel_platforms.git" + }, + "score_test_scenarios": { + "version": "0.3.0", + "repo": "https://github.com/eclipse-score/testing_tools.git" + }, + "score_docs_as_code": { + "version": "2.0.1", + "repo": "https://github.com/eclipse-score/docs-as-code.git" + }, + "score_process": { + "version": "1.3.1", + "repo": "https://github.com/eclipse-score/process_description.git" + }, + "score_feo": { + "version": "1.0.2", + "repo": "https://github.com/eclipse-score/feo.git", + "branch": "candidate_v0.5" + } + }, + "manifest_sha256": "4c9b7f...", + "suite": "full", + "duration_s": 742 +} \ No newline at end of file diff --git a/tools/get_module_info.py b/tools/get_module_info.py new file mode 100644 index 000000000..ac463ef1f --- /dev/null +++ b/tools/get_module_info.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python3 +"""Extract module information from known_good.json.""" + +import json +import sys +from typing import Dict, Any + + +def load_module_data(known_good_file: str, module_name: str) -> Dict[str, Any]: + """ + Load module data from known_good.json. + + Args: + known_good_file: Path to the known_good.json file + module_name: Name of the module to look up + + Returns: + Dictionary with module data, or empty dict if not found + """ + try: + with open(known_good_file, 'r') as f: + data = json.load(f) + modules = data.get('modules', {}) + return modules.get(module_name, {}) + except Exception: + return {} + + +def get_module_field(module_data: Dict[str, Any], field: str = 'hash') -> str: + """ + Extract a specific field from module data. + + Args: + module_data: Dictionary with module information + field: Field to extract ('hash', 'version', 'repo', or 'all') + + Returns: + Requested field value, or 'N/A' if not found + For 'hash': truncated to 8 chars if longer + For 'all': returns hash/version (prefers hash, falls back to version) + """ + if not module_data: + return 'N/A' + + if field == 'repo': + repo = module_data.get('repo', 'N/A') + # Remove .git suffix if present + if repo.endswith('.git'): + repo = repo[:-4] + return repo + elif field == 'version': + return module_data.get('version', 'N/A') + elif field == 'hash': + hash_val = module_data.get('hash', 'N/A') + return hash_val + else: # field == 'all' or default + hash_val = module_data.get('hash', module_data.get('version', 'N/A')) + return hash_val + + +if __name__ == '__main__': + if len(sys.argv) < 3 or len(sys.argv) > 4: + print('Usage: get_module_info.py [field]') + print(' field: hash (default), version, repo, or all') + print('N/A') + sys.exit(1) + + known_good_file = sys.argv[1] + module_name = sys.argv[2] + field = sys.argv[3] if len(sys.argv) == 4 else 'all' + + module_data = load_module_data(known_good_file, module_name) + result = get_module_field(module_data, field) + print(result) diff --git a/tools/requirements.txt b/tools/requirements.txt new file mode 100644 index 000000000..1c871cd93 --- /dev/null +++ b/tools/requirements.txt @@ -0,0 +1 @@ +PyGithub>=2.1.1 diff --git a/tools/update_module_from_known_good.py b/tools/update_module_from_known_good.py new file mode 100644 index 000000000..67820ecf0 --- /dev/null +++ b/tools/update_module_from_known_good.py @@ -0,0 +1,198 @@ +#!/usr/bin/env python3 +""" +Read a known_good.json file and generate a score_modules.MODULE.bazel file +with `bazel_dep` and `git_override` calls for each module in the JSON. + +Usage: + python3 tools/update_module_from_known_good.py \ + --known known_good.json \ + --output score_modules.MODULE.bazel + +The generated score_modules.MODULE.bazel file is included by MODULE.bazel. +""" +import argparse +import json +import os +import re +from datetime import datetime +import logging +from typing import Dict, List, Any, Optional + + +def load_known_good(path: str) -> Dict[str, Any]: + """Load and parse the known_good.json file.""" + with open(path, "r", encoding="utf-8") as f: + data = json.load(f) + + # Expect a single JSON object containing a "modules" dict/object + if isinstance(data, dict) and isinstance(data.get("modules"), dict): + return data + raise SystemExit( + f"Invalid known_good.json at {path} (expected object with 'modules' dict)" + ) + + +def generate_git_override_blocks(modules_dict: Dict[str, Any], repo_commit_dict: Dict[str, str]) -> List[str]: + """Generate bazel_dep and git_override blocks for each module.""" + blocks = [] + + for name, module in modules_dict.items(): + repo = module.get("repo") + commit = module.get("hash") or module.get("commit") + patches = module.get("patches", []) + + # Allow overriding specific repos via command line + if repo in repo_commit_dict: + commit = repo_commit_dict[repo] + + # Check if module has a version, use different logic + version = module.get("version") + patches_lines = "" + if patches: + patches_lines = " patches = [\n" + for patch in patches: + patches_lines += f' "{patch}",\n' + patches_lines += " ],\n patch_strip = 1,\n" + + if version: + # If version is provided, use bazel_dep with single_version_override + block = ( + f'bazel_dep(name = "{name}")\n' + 'single_version_override(\n' + f' module_name = "{name}",\n' + f' version = "{version}",\n' + f'{patches_lines}' + ')\n' + ) + else: + if not repo or not commit: + logging.warning("Skipping module %s with missing repo or commit: repo=%s, commit=%s", name, repo, commit) + continue + + # Validate commit hash format (7-40 hex characters) + if not re.match(r'^[a-fA-F0-9]{7,40}$', commit): + logging.warning("Skipping module %s with invalid commit hash: %s", name, commit) + continue + + # If no version, use bazel_dep with git_override + + block = ( + f'bazel_dep(name = "{name}")\n' + 'git_override(\n' + f' module_name = "{name}",\n' + f' remote = "{repo}",\n' + f' commit = "{commit}",\n' + f'{patches_lines}' + ')\n' + ) + + blocks.append(block) + + return blocks + + +def generate_file_content(modules: Dict[str, Any], repo_commit_dict: Dict[str, str], timestamp: Optional[str] = None) -> str: + """Generate the complete content for score_modules.MODULE.bazel.""" + # License header assembled with parenthesis grouping (no indentation preserved in output). + header = ( + "# *******************************************************************************\n" + "# Copyright (c) 2025 Contributors to the Eclipse Foundation\n" + "#\n" + "# See the NOTICE file(s) distributed with this work for additional\n" + "# information regarding copyright ownership.\n" + "#\n" + "# This program and the accompanying materials are made available under the\n" + "# terms of the Apache License Version 2.0 which is available at\n" + "# https://www.apache.org/licenses/LICENSE-2.0\n" + "#\n" + "# SPDX-License-Identifier: Apache-2.0\n" + "# *******************************************************************************\n" + "\n" + ) + + if timestamp: + header += ( + f"# Generated from known_good.json at {timestamp}\n" + "# Do not edit manually - use tools/update_module_from_known_good.py\n" + "\n" + ) + + blocks = generate_git_override_blocks(modules, repo_commit_dict) + + if not blocks: + raise SystemExit("No valid modules to generate git_override blocks") + + return header + "\n".join(blocks) + + +def main() -> None: + parser = argparse.ArgumentParser( + description="Generate score_modules.MODULE.bazel from known_good.json" + ) + parser.add_argument( + "--known", + default="known_good.json", + help="Path to known_good.json (default: known_good.json)" + ) + parser.add_argument( + "--output", + default="score_modules.MODULE.bazel", + help="Output file path (default: score_modules.MODULE.bazel)" + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Print generated content instead of writing to file" + ) + parser.add_argument( + "--repo-override", + action="append", + help="Override commit for a specific repo (format: @)" + ) + + args = parser.parse_args() + + known_path = os.path.abspath(args.known) + output_path = os.path.abspath(args.output) + + if not os.path.exists(known_path): + raise SystemExit(f"known_good.json not found at {known_path}") + + # Parse repo overrides + repo_commit_dict = {} + if args.repo_override: + repo_pattern = re.compile(r'https://[a-zA-Z0-9.-]+/[a-zA-Z0-9._/-]+\.git@[a-fA-F0-9]{7,40}$') + for entry in args.repo_override: + if not repo_pattern.match(entry): + raise SystemExit( + f"Invalid --repo-override format: {entry}\n" + "Expected format: https://github.com/org/repo.git@" + ) + repo_url, commit_hash = entry.split("@", 1) + repo_commit_dict[repo_url] = commit_hash + + # Load known_good.json + data = load_known_good(known_path) + modules = data.get("modules") or {} + + if not modules: + raise SystemExit("No modules found in known_good.json") + + # Generate file content + timestamp = data.get("timestamp") or datetime.now().isoformat() + content = generate_file_content(modules, repo_commit_dict, timestamp) + + if args.dry_run: + print(f"Dry run: would write to {output_path}\n") + print("---- BEGIN GENERATED CONTENT ----") + print(content) + print("---- END GENERATED CONTENT ----") + print(f"\nGenerated {len(modules)} git_override entries") + else: + with open(output_path, "w", encoding="utf-8") as f: + f.write(content) + print(f"Generated {output_path} with {len(modules)} git_override entries") + + +if __name__ == "__main__": + main() diff --git a/tools/update_module_latest.py b/tools/update_module_latest.py new file mode 100644 index 000000000..77fdee311 --- /dev/null +++ b/tools/update_module_latest.py @@ -0,0 +1,243 @@ +#!/usr/bin/env python3 +"""Update module commit hashes to latest on a given branch. + +Reads a known_good.json file containing a list of modules with keys: + name, hash, repo + +For each module's repo (expected to be a GitHub HTTPS URL), queries the GitHub API +for the latest commit hash on the provided branch (default: main) and prints a +summary. Optionally writes out an updated JSON file with refreshed hashes. + +Usage: + python tools/update_module_latest.py \ + --known-good score_reference_integration/known_good.json \ + [--branch main] [--output updated_known_good.json] + +Environment: + Optionally set GITHUB_TOKEN to increase rate limits / access private repos. + +Exit codes: + 0 success + 2 partial failure (at least one repo failed) + 3 fatal failure (e.g., cannot read JSON) +""" + +from __future__ import annotations + +import argparse +import shutil +import subprocess +import datetime as dt +import json +import os +import sys +from dataclasses import dataclass +from urllib.parse import urlparse + +try: + from github import Github, GithubException + HAS_PYGITHUB = True +except ImportError: + HAS_PYGITHUB = False + Github = None + GithubException = None + + +@dataclass +class Module: + name: str + hash: str + repo: str + version: str | None = None + patches: list[str] | None = None + branch: str = "main" + + @property + def owner_repo(self) -> str: + """Return owner/repo part extracted from HTTPS GitHub URL.""" + # Examples: + # https://github.com/eclipse-score/logging.git -> eclipse-score/logging + parsed = urlparse(self.repo) + if parsed.netloc != "github.com": + raise ValueError(f"Not a GitHub URL: {self.repo}") + + # Extract path, remove leading slash and .git suffix + path = parsed.path.lstrip("/").removesuffix(".git") + + # Split and validate owner/repo format + parts = path.split("/", 2) # Split max 2 times to get owner and repo + if len(parts) < 2 or not parts[0] or not parts[1]: + raise ValueError(f"Cannot parse owner/repo from: {self.repo}") + + return f"{parts[0]}/{parts[1]}" + + +def fetch_latest_commit(owner_repo: str, branch: str, token: str | None) -> str: + """Fetch latest commit sha for given owner_repo & branch using PyGithub.""" + if not HAS_PYGITHUB: + raise RuntimeError("PyGithub not installed. Install it with: pip install PyGithub") + try: + gh = Github(token) if token else Github() + repo = gh.get_repo(owner_repo) + branch_obj = repo.get_branch(branch) + return branch_obj.commit.sha + except GithubException as e: + raise RuntimeError(f"GitHub API error for {owner_repo}:{branch} - {e.status}: {e.data.get('message', str(e))}") from e + except Exception as e: + raise RuntimeError(f"Error fetching {owner_repo}:{branch} - {e}") from e + + +def fetch_latest_commit_gh(owner_repo: str, branch: str) -> str: + """Fetch latest commit using GitHub CLI 'gh' if installed. + + Uses: gh api repos/{owner_repo}/branches/{branch} --jq .commit.sha + Raises RuntimeError on failure. + """ + if not shutil.which("gh"): + raise RuntimeError("'gh' CLI not found in PATH") + cmd = [ + "gh", + "api", + f"repos/{owner_repo}/branches/{branch}", + "--jq", + ".commit.sha", + ] + try: + res = subprocess.run(cmd, check=True, capture_output=True, text=True, timeout=30) + except subprocess.CalledProcessError as e: + raise RuntimeError(f"gh api failed: {e.stderr.strip() or e}") from e + sha = res.stdout.strip() + if not sha: + raise RuntimeError("Empty sha returned by gh") + return sha + + +def load_known_good(path: str) -> dict: + with open(path, "r", encoding="utf-8") as f: + return json.load(f) + + +def write_known_good(path: str, original: dict, modules: list[Module]) -> None: + out = dict(original) # shallow copy + out["timestamp"] = dt.datetime.utcnow().replace(microsecond=0).isoformat() + "Z" + out["modules"] = {} + for m in modules: + mod_dict = {"repo": m.repo, "hash": m.hash} + if m.patches: + mod_dict["patches"] = m.patches + if m.branch: + mod_dict["branch"] = m.branch + out["modules"][m.name] = mod_dict + with open(path, "w", encoding="utf-8") as f: + json.dump(out, f, indent=4, sort_keys=False) + f.write("\n") + + +def parse_args(argv: list[str]) -> argparse.Namespace: + p = argparse.ArgumentParser(description="Update module hashes to latest commit on branch") + p.add_argument( + "--known-good", + default="known_good.json", + help="Path to known_good.json file (default: known_good.json in CWD)", + ) + p.add_argument("--branch", default="main", help="Git branch to fetch latest commits from (default: main)") + p.add_argument("--output", help="Optional output path to write updated JSON") + p.add_argument("--fail-fast", action="store_true", help="Stop on first failure instead of continuing") + p.add_argument("--no-gh", action="store_true", help="Disable GitHub CLI usage even if installed; fall back to HTTP API; GITHUB_TOKEN has to be known in the environment") + return p.parse_args(argv) + + +def main(argv: list[str]) -> int: + args = parse_args(argv) + try: + data = load_known_good(args.known_good) + except OSError as e: + print(f"ERROR: Cannot read known_good file: {e}", file=sys.stderr) + return 3 + except json.JSONDecodeError as e: + print(f"ERROR: Invalid JSON: {e}", file=sys.stderr) + return 3 + + modules_raw = data.get("modules", {}) + modules: list[Module] = [] + for name, m in modules_raw.items(): + try: + version = m.get("version") + hash_val = m.get("hash", "") + patches = m.get("patches") + repo = m.get("repo") + branch = m.get("branch") + if not repo: + print(f"WARNING: skipping module {name} with missing repo", file=sys.stderr) + continue + modules.append(Module( + name=name, + hash=hash_val, + repo=repo, + version=version, + patches=patches, + branch=branch + )) + except KeyError as e: + print(f"WARNING: skipping module {name} missing key {e}: {m}", file=sys.stderr) + if not modules: + print("ERROR: No modules found to update.", file=sys.stderr) + return 3 + + token = os.environ.get("GITHUB_TOKEN") + failures = 0 + updated: list[Module] = [] + # Default: use gh if available unless --no-gh specified + use_gh = (not args.no_gh) and shutil.which("gh") is not None + + # If PyGithub is not available and gh CLI is not available, error out + if not use_gh and not HAS_PYGITHUB: + print("ERROR: Neither 'gh' CLI nor PyGithub library found.", file=sys.stderr) + print("Please install PyGithub (pip install PyGithub) or install GitHub CLI.", file=sys.stderr) + return 3 + + if not args.no_gh and not use_gh: + print("INFO: 'gh' CLI not found; using direct GitHub API", file=sys.stderr) + if args.no_gh and shutil.which("gh") is not None: + print("INFO: --no-gh specified; ignoring installed 'gh' CLI", file=sys.stderr) + + for mod in modules: + try: + # Use module-specific branch if available, otherwise use command-line branch + branch = mod.branch if mod.branch else args.branch + if use_gh: + latest = fetch_latest_commit_gh(mod.owner_repo, branch) + else: + latest = fetch_latest_commit(mod.owner_repo, branch, token) + updated.append(Module(name=mod.name, hash=latest, repo=mod.repo, version=mod.version, patches=mod.patches, branch=mod.branch)) + + # Display format: if version exists, show "version -> hash", otherwise "hash -> hash" + if mod.version: + print(f"{mod.name}: {mod.version} -> {latest[:8]} (branch {branch})") + else: + print(f"{mod.name}: {mod.hash[:8]} -> {latest[:8]} (branch {branch})") + except Exception as e: # noqa: BLE001 + failures += 1 + print(f"ERROR {mod.name}: {e}", file=sys.stderr) + if args.fail_fast: + break + # Preserve old hash if continuing + updated.append(mod) + + if args.output and updated: + try: + write_known_good(args.output, data, updated) + print(f"Updated JSON written to {args.output}") + except OSError as e: + print(f"ERROR: Failed writing output file: {e}", file=sys.stderr) + return 3 + + if failures: + print(f"Completed with {failures} failure(s).", file=sys.stderr) + return 2 + return 0 + + +if __name__ == "__main__": # pragma: no cover + sys.exit(main(sys.argv[1:])) +