|
1 | 1 | import * as fs from 'fs';
|
| 2 | +import { Octokit } from "@octokit/rest"; |
| 3 | +import { Git } from './git.js'; |
| 4 | +import path from 'path'; |
| 5 | +import Axios from 'axios'; |
| 6 | +import extract from 'extract-zip'; |
| 7 | + |
| 8 | +const octokit = new Octokit({ |
| 9 | + auth: process.env.GITHUB_TOKEN, |
| 10 | + // log: console, |
| 11 | +}); |
| 12 | + |
| 13 | +const [_, owner, repo] = (await Git.repository).split('/'); |
| 14 | +const defaultArgs = { owner: owner, repo: repo } |
| 15 | + |
| 16 | +export function downloadFile(url: string, path: string) { |
| 17 | + const writer = fs.createWriteStream(path); |
| 18 | + return Axios({ |
| 19 | + method: 'get', |
| 20 | + url: url, |
| 21 | + responseType: 'stream', |
| 22 | + }).then(response => { |
| 23 | + return new Promise((resolve, reject) => { |
| 24 | + response.data.pipe(writer); |
| 25 | + let error: Error; |
| 26 | + writer.on('error', err => { |
| 27 | + error = err; |
| 28 | + writer.close(); |
| 29 | + reject(err); |
| 30 | + }); |
| 31 | + writer.on('close', () => { |
| 32 | + if (!error) resolve(true); |
| 33 | + }); |
| 34 | + }); |
| 35 | + }); |
| 36 | +} |
2 | 37 |
|
3 | 38 | export const GitHub = {
|
4 | 39 | writeOutput(name: string, value: any): void {
|
5 | 40 | if (typeof process.env.GITHUB_OUTPUT == 'string' && process.env.GITHUB_OUTPUT.length > 0) {
|
6 | 41 | fs.appendFileSync(process.env.GITHUB_OUTPUT, `${name}=${value}\n`);
|
7 | 42 | }
|
8 |
| - console.log(`Output ${name}`, value); |
| 43 | + console.log(`Output ${name} = ${value}`); |
9 | 44 | },
|
10 | 45 |
|
11 |
| - downloadPreviousArtifact(branch: string, targetDir: string, artifactName: string): void { |
12 |
| - fs.mkdirSync(targetDir, { recursive: true }); |
| 46 | + downloadPreviousArtifact(branch: string, targetDir: string, artifactName: string): Promise<void> { |
| 47 | + return (async () => { |
| 48 | + fs.mkdirSync(targetDir, { recursive: true }); |
| 49 | + |
| 50 | + const workflow = (await octokit.actions.listRepoWorkflows(defaultArgs)) |
| 51 | + .data.workflows.find((w) => w.name == process.env.GITHUB_WORKFLOW); |
| 52 | + if (workflow == undefined) { |
| 53 | + console.log(`Skipping previous artifact '${artifactName}' download for branch '${branch}' - not running in CI`); |
| 54 | + return; |
| 55 | + } |
| 56 | + console.log(`Trying to download previous artifact '${artifactName}' for branch '${branch}'`); |
| 57 | + |
| 58 | + const workflowRuns = await octokit.actions.listWorkflowRuns({ |
| 59 | + ...defaultArgs, |
| 60 | + workflow_id: workflow.id, |
| 61 | + branch: branch, |
| 62 | + status: 'success', |
| 63 | + }); |
| 64 | + |
| 65 | + if (workflowRuns.data.total_count == 0) { |
| 66 | + console.warn(`Couldn't find any successful run for workflow '${workflow.name}'`); |
| 67 | + return; |
| 68 | + } |
| 69 | + |
| 70 | + const artifact = (await octokit.actions.listWorkflowRunArtifacts({ |
| 71 | + ...defaultArgs, |
| 72 | + run_id: workflowRuns.data.workflow_runs[0].id, |
| 73 | + })).data.artifacts.find((it) => it.name == artifactName); |
13 | 74 |
|
14 |
| - // if (workflow == null) { |
15 |
| - // println("Skipping previous artifact '$artifactName' download for branch '$branch' - not running in CI") |
16 |
| - // return |
17 |
| - // } |
18 |
| - console.log(`Trying to download previous artifact '${artifactName}' for branch '${branch}'`) |
| 75 | + if (artifact == undefined) { |
| 76 | + console.warn(`Couldn't find any artifact matching ${artifactName}`); |
| 77 | + return; |
| 78 | + } |
19 | 79 |
|
20 |
| - // val run = workflow!!.listRuns() |
21 |
| - // .firstOrNull { it.headBranch == branch && it.conclusion == GHWorkflowRun.Conclusion.SUCCESS } |
22 |
| - // if (run == null) { |
23 |
| - // println("Couldn't find any successful run workflow ${workflow!!.name}") |
24 |
| - // return |
25 |
| - // } |
| 80 | + console.log(`Downloading artifact ${artifact.archive_download_url} and extracting to $targetDir`); |
26 | 81 |
|
27 |
| - // val artifact = run.listArtifacts().firstOrNull { it.name == artifactName } |
28 |
| - // if (artifact == null) { |
29 |
| - // println("Couldn't find any artifact matching $artifactName") |
30 |
| - // return |
31 |
| - // } |
| 82 | + const tempFilePath = path.resolve(targetDir, '../tmp-artifacts.zip'); |
| 83 | + if (fs.existsSync(tempFilePath)) { |
| 84 | + fs.unlinkSync(tempFilePath); |
| 85 | + } |
32 | 86 |
|
33 |
| - // println("Downloading artifact ${artifact.archiveDownloadUrl} and extracting to $targetDir") |
34 |
| - // artifact.download { |
35 |
| - // val zipStream = ZipInputStream(it) |
36 |
| - // var entry: ZipEntry? |
37 |
| - // // while there are entries I process them |
38 |
| - // while (true) { |
39 |
| - // entry = zipStream.nextEntry |
40 |
| - // if (entry == null) { |
41 |
| - // break |
42 |
| - // } |
43 |
| - // if (entry.isDirectory) { |
44 |
| - // Path.of(entry.name).createDirectories() |
45 |
| - // } else { |
46 |
| - // println("Extracting ${entry.name}") |
47 |
| - // val outFile = FileOutputStream(targetDir.resolve(entry.name).toFile()) |
48 |
| - // while (zipStream.available() > 0) { |
49 |
| - // val c = zipStream.read() |
50 |
| - // if (c > 0) { |
51 |
| - // outFile.write(c) |
52 |
| - // } else { |
53 |
| - // break |
54 |
| - // } |
55 |
| - // } |
56 |
| - // outFile.close() |
57 |
| - // } |
58 |
| - // } |
59 |
| - // } |
| 87 | + try { |
| 88 | + await downloadFile(artifact.archive_download_url, tempFilePath); |
| 89 | + await extract(tempFilePath, { dir: targetDir }); |
| 90 | + } finally { |
| 91 | + fs.unlinkSync(tempFilePath); |
| 92 | + } |
| 93 | + })(); |
60 | 94 | },
|
61 | 95 |
|
62 | 96 | // fun addOrUpdateComment(commentBuilder: PrCommentBuilder) {
|
|
0 commit comments