diff --git a/.github/workflows/tooling-unit-tests.yml b/.github/workflows/tooling-unit-tests.yml index 333b4ce024..490d399e8b 100644 --- a/.github/workflows/tooling-unit-tests.yml +++ b/.github/workflows/tooling-unit-tests.yml @@ -96,3 +96,22 @@ jobs: - name: Run PyTest run: | pytest scripts/guideline_recategorization/recategorize_test.py + + release-tests: + name: Run release tests + runs-on: ubuntu-22.04 + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Install Python + uses: actions/setup-python@v4 + with: + python-version: "3.9" + + - name: Install Python dependencies + run: pip install -r scripts/release/requirements.txt + + - name: Run PyTest + run: | + pytest scripts/release/update_release_assets_test.py \ No newline at end of file diff --git a/.github/workflows/update-release.yml b/.github/workflows/update-release.yml index 9a7d95c846..7a2ae00797 100644 --- a/.github/workflows/update-release.yml +++ b/.github/workflows/update-release.yml @@ -54,13 +54,12 @@ jobs: GITHUB_TOKEN: ${{ github.token }} RELEASE_ENGINEERING_TOKEN: ${{ steps.generate-token.outputs.token }} run: | - python scripts/release/update-release-assets.py \ + python scripts/release/update_release_assets.py \ --head-sha $HEAD_SHA \ --layout scripts/release/release-layout.yml \ --repo "$GITHUB_REPOSITORY" \ --github-token "$GITHUB_REPOSITORY:$GITHUB_TOKEN" "github/codeql-coding-standards-release-engineering:$RELEASE_ENGINEERING_TOKEN" \ - --skip-checkrun "release-status" \ - --skip-checks + --skip-checkrun "release-status" - name: Update release notes env: diff --git a/scripts/release/release-layout.yml b/scripts/release/release-layout.yml index 3ffc3ba0de..5e496120f2 100644 --- a/scripts/release/release-layout.yml +++ b/scripts/release/release-layout.yml @@ -20,4 +20,6 @@ layout: - file: docs/user_manual.md checksums.txt: - shell: | - sha256sum ./* > checksums.txt \ No newline at end of file + sha256sum ${{ layout.root }}/* > checksums.txt + # Remove the layout root from the paths in checksums.txt + sed -i -e "s|${{ layout.root }}/||g" checksums.txt \ No newline at end of file diff --git a/scripts/release/requirements.txt b/scripts/release/requirements.txt index 79ccbcefbe..63874950c1 100644 --- a/scripts/release/requirements.txt +++ b/scripts/release/requirements.txt @@ -1,4 +1,5 @@ semantic-version==2.10.0 PyGithub==1.59.1 PyYAML==6.0.1 -GitPython==3.1.36 \ No newline at end of file +GitPython==3.1.36 +pytest==7.4.3 \ No newline at end of file diff --git a/scripts/release/test-data/release-layout.yml b/scripts/release/test-data/release-layout.yml new file mode 100644 index 0000000000..9c6850fd5d --- /dev/null +++ b/scripts/release/test-data/release-layout.yml @@ -0,0 +1,19 @@ +version: 0.1.0 + +layout: + hello-world.txt: + - shell: | + echo "hello world!" > hello-world.txt + hello-world.zip: + - shell: | + echo "hello!" > hello.txt + echo "world!" > world.txt + # reset the creation and modification times to a fixed value + touch -a -m -t 197001010000.00 hello.txt world.txt + checksums.txt: + - shell: | + shasum -a 256 ${{ layout.root }}/* > checksums.txt + # Remove the layout root from the checksums.txt + # We don't use inplace because of BSD vs GNU shenanigans + sed -e "s|${{ layout.root }}/||g" checksums.txt > checksums-rewritten.txt + mv checksums-rewritten.txt checksums.txt \ No newline at end of file diff --git a/scripts/release/update-release-assets.py b/scripts/release/update_release_assets.py similarity index 93% rename from scripts/release/update-release-assets.py rename to scripts/release/update_release_assets.py index 79b06cbcfe..1beb543c77 100644 --- a/scripts/release/update-release-assets.py +++ b/scripts/release/update_release_assets.py @@ -1,5 +1,5 @@ from __future__ import annotations # This enables postponed evaluation of type annotations. Required for typing.TYPE_CHECKING. See https://peps.python.org/pep-0563/ -from typing import TYPE_CHECKING, List, Union, cast, Dict, Any +from typing import TYPE_CHECKING, List, Union, cast, Dict, Any, TypeVar, Callable, Sequence, Optional import shutil from tempfile import TemporaryDirectory import subprocess @@ -12,7 +12,7 @@ if TYPE_CHECKING: from github import WorkflowRun, Repository - + script_path = Path(__file__).resolve() root_path = script_path.parent.parent.parent @@ -30,7 +30,7 @@ def get_check_runs(self: Repository.Repository, ref: str, **kwargs: str) -> Pagi f"{self.url}/commits/{ref}/check-runs", firstParams=None, list_item="check_runs") - + Repository.Repository = MyRepository from github import WorkflowRun, Artifact @@ -51,7 +51,7 @@ def download_logs(self, path: Path) -> None: if self._requester._Requester__auth is not None: # type: ignore headers["Authorization"] = f"{self._requester._Requester__auth.token_type} {self._requester._Requester__auth.token}" # type: ignore headers["User-Agent"] = self._requester._Requester__userAgent # type: ignore - + resp = requests.get(url, headers=headers, allow_redirects=True) if resp.status_code != 200: @@ -70,7 +70,7 @@ def download_artifacts(self, path: Path) -> None: if self._requester._Requester__auth is not None: # type: ignore headers["Authorization"] = f"{self._requester._Requester__auth.token_type} {self._requester._Requester__auth.token}" # type: ignore headers["User-Agent"] = self._requester._Requester__userAgent # type: ignore - + resp = requests.get(artifact.archive_download_url, headers=headers, allow_redirects=True) if resp.status_code != 200: @@ -93,7 +93,7 @@ def download_artifact(self, name: str, path: Path) -> None: if self._requester._Requester__auth is not None: # type: ignore headers["Authorization"] = f"{self._requester._Requester__auth.token_type} {self._requester._Requester__auth.token}" # type: ignore headers["User-Agent"] = self._requester._Requester__userAgent # type: ignore - + resp = requests.get(artifact.archive_download_url, headers=headers, allow_redirects=True) if resp.status_code != 200: @@ -101,7 +101,7 @@ def download_artifact(self, name: str, path: Path) -> None: with (path / f"{artifact.name}.zip").open("wb") as f: f.write(resp.content) - + WorkflowRun.WorkflowRun = MyWorkflowRun @@ -124,12 +124,16 @@ def make(self, directory: Path, workflow_runs: List[WorkflowRun.WorkflowRun]) -> elif action_type == "workflow-artifact": actions.append(WorkflowArtifactAction(workflow_runs, **cast(Dict[str, Any], action_args))) elif action_type == "shell": - actions.append(ShellAction(action_args)) + modifiers : List[Callable[[str], str]] = [ + lambda cmd: re.sub(pattern=r"\${{\s*coding-standards\.root\s*}}", repl=str(root_path), string=cmd), + lambda cmd: re.sub(pattern=r"\${{\s*layout\.root\s*}}", repl=str(directory), string=cmd) + ] + actions.append(ShellAction(action_args, modifiers=modifiers)) elif action_type == "file": actions.append(FileAction(action_args)) else: raise Exception(f"Unknown action type {action_type}") - + artifacts.append(ReleaseArtifact(artifact, actions, self.skip_checks)) for artifact in artifacts: @@ -153,7 +157,7 @@ def run(self) -> List[Path]: print(f"Downloading logs for {workflow_run.name}") workflow_run.download_logs(Path(self.temp_workdir.name)) # type: ignore return list(map(Path, Path(self.temp_workdir.name).glob("**/*"))) - + class WorkflowArtifactAction(): def __init__(self, workflow_runs: List[WorkflowRun.WorkflowRun], **kwargs: str) -> None: @@ -176,17 +180,29 @@ def run(self) -> List[Path]: print(f"Downloading artifacts for {workflow_run.name} to {self.temp_workdir.name}") workflow_run.download_artifacts(Path(self.temp_workdir.name)) # type: ignore return list(map(Path, Path(self.temp_workdir.name).glob("**/*"))) - + class ShellAction(): - def __init__(self, command: str) -> None: + def __init__(self, command: str, **kwargs: Any) -> None: self.command = command.strip() self.temp_workdir = TemporaryDirectory() + self.options = kwargs + + def _rewrite_command(self) -> str: + E = TypeVar("E") + R = TypeVar("R") + def lfold(fn: Callable[[R, E], R], lst: Sequence[E], init: R) -> R: + return lfold(fn, lst[1:], fn(init, lst[0])) if lst else init + if 'modifiers' in self.options: + return lfold(lambda acc, x: x(acc), self.options['modifiers'], self.command) + else: + return self.command def run(self) -> List[Path]: - concrete_command = re.sub(pattern=r"\${{\s*coding-standards\.root\s*}}", repl=str(root_path), string=self.command) + #concrete_command = re.sub(pattern=r"\${{\s*coding-standards\.root\s*}}", repl=str(root_path), string=self.command) + concrete_command = self._rewrite_command() subprocess.run(concrete_command, cwd=self.temp_workdir.name, check=True, shell=True) return list(map(Path, Path(self.temp_workdir.name).glob("**/*"))) - + class FileAction(): def __init__(self, path: Path) -> None: self.path = path @@ -200,7 +216,7 @@ def __init__(self, name: str, actions: List[Union[WorkflowLogAction, WorkflowArt self.actions = actions self.allow_no_files = allow_no_files - def make(self, directory: Path) -> Path: + def make(self, directory: Path) -> Optional[Path]: files: list[Path] = [file for action in self.actions for file in action.run()] if len(files) == 0: if not self.allow_no_files: @@ -212,8 +228,8 @@ def make(self, directory: Path) -> Path: extension = "".join(self.name.suffixes)[1:] if not extension in ["zip", "tar", "tar.gz", "tar.bz2", "tar.xz"]: raise Exception(f"Artifact {self.name} is not a support archive file, but has multiple files associated with it!") - - ext_format_map = { + + ext_format_map = { "zip": "zip", "tar": "tar", "tar.gz": "gztar", @@ -225,7 +241,7 @@ def make(self, directory: Path) -> Path: temp_dir_path = Path(temp_dir) for file in files: shutil.copy(file, temp_dir_path / file.name) - + return Path(shutil.make_archive(str(directory / self.name.with_suffix("")), ext_format_map[extension], root_dir=temp_dir_path)) def main(args: 'argparse.Namespace') -> int: @@ -248,13 +264,13 @@ def main(args: 'argparse.Namespace') -> int: if len(pull_candidates) != 1: print(f"Error: expected exactly one PR for SHA {args.head_sha}, but found {len(pull_candidates)}", file=sys.stderr) return 1 - + pull_request = pull_candidates[0] if pull_request.state != "open": print(f"Error: PR {pull_request.url} is not open", file=sys.stderr) return 1 - + print(f"Found PR {pull_request.url} based on {pull_request.base.ref}") rc_branch_regex = r"^rc/(?P.*)$" @@ -286,7 +302,7 @@ def main(args: 'argparse.Namespace') -> int: action_workflow_run_url_regex = r"^https://(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/actions/runs/(?P\d+)$" action_workflow_job_run_url_regex = r"^https://(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/actions/runs/(?P\d+)/job/(?P\d+)$" - + workflow_runs: List[WorkflowRun.WorkflowRun] = [] for check_run in check_runs: # type: ignore check_run = cast(CheckRun.CheckRun, check_run) @@ -306,7 +322,7 @@ def main(args: 'argparse.Namespace') -> int: else: print(f"Unable to handle checkrun {check_run.name} with id {check_run.id} with {check_run.details_url}") return 1 - + print("Filtering workflow runs to only include the latest run for each workflow.") workflow_runs_per_id: Dict[int, WorkflowRun.WorkflowRun] = {} for workflow_run in workflow_runs: diff --git a/scripts/release/update_release_assets_test.py b/scripts/release/update_release_assets_test.py new file mode 100644 index 0000000000..35e5baf0bb --- /dev/null +++ b/scripts/release/update_release_assets_test.py @@ -0,0 +1,30 @@ +from pathlib import Path +from tempfile import TemporaryDirectory +import yaml +from update_release_assets import ReleaseLayout + +SCRIPT_PATH = Path(__file__) +TEST_DIR = SCRIPT_PATH.parent / 'test-data' + +def test_release_layout(): + spec = TEST_DIR / 'release-layout.yml' + release_layout = ReleaseLayout(spec) + with TemporaryDirectory() as tmp_dir: + tmp_path = Path(tmp_dir) + release_layout.make(tmp_path, []) + + for artifact in yaml.safe_load(spec.read_text())['layout'].keys(): + artifact_path = tmp_path / artifact + assert artifact_path.is_file() + + if artifact == "hello-world.txt": + content = artifact_path.read_text() + assert content == "hello world!\n" + if artifact == "checksums.txt": + content = artifact_path.read_text() + # The hash of the hello-world.txt is deterministic, so we can assert it here. + assert "ecf701f727d9e2d77c4aa49ac6fbbcc997278aca010bddeeb961c10cf54d435a hello-world.txt" in content + # The has of the hello-world.zip is not deterministic, so we can't assert its hash. + assert "hello-world.zip" in content + +