Skip to content

Commit 9c5c5ea

Browse files
committed
ci: collect sdk metrics
1 parent 52d02cb commit 9c5c5ea

File tree

5 files changed

+343
-52
lines changed

5 files changed

+343
-52
lines changed

.github/workflows/metrics.yml

Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,81 @@
1+
name: Collect SDK metrics
2+
on:
3+
push:
4+
paths:
5+
- .github/workflows/metrics.yml
6+
- packages/**
7+
- patches/**
8+
- lerna.json
9+
- package.json
10+
- tsconfig.json
11+
- yarn.lock
12+
branches-ignore:
13+
- deps/**
14+
- dependabot/**
15+
tags-ignore: ['**']
16+
17+
env:
18+
CACHED_DEPENDENCY_PATHS: |
19+
${{ github.workspace }}/node_modules
20+
${{ github.workspace }}/packages/*/node_modules
21+
~/.cache/ms-playwright/
22+
~/.cache/mongodb-binaries/
23+
24+
jobs:
25+
cancel-previous-workflow:
26+
runs-on: ubuntu-latest
27+
steps:
28+
- name: Cancel Previous Runs
29+
uses: styfle/cancel-workflow-action@b173b6ec0100793626c2d9e6b90435061f4fc3e5 # pin@0.11.0
30+
with:
31+
access_token: ${{ github.token }}
32+
33+
replay:
34+
name: Replay SDK metrics
35+
runs-on: ubuntu-20.04
36+
37+
steps:
38+
- uses: actions/checkout@v3
39+
40+
- name: Set up Node
41+
uses: volta-cli/action@v4
42+
43+
- name: Compute dependency cache key
44+
id: compute_lockfile_hash
45+
# we use a hash of yarn.lock as our cache key, because if it hasn't changed, our dependencies haven't changed,
46+
# so no need to reinstall them
47+
run: echo "hash=${{ hashFiles('yarn.lock') }}" >> "$GITHUB_OUTPUT"
48+
49+
- name: Check dependency cache
50+
uses: actions/cache@v3
51+
id: cache_dependencies
52+
with:
53+
path: ${{ env.CACHED_DEPENDENCY_PATHS }}
54+
key: ${{ steps.compute_lockfile_hash.outputs.hash }}
55+
56+
- name: Install dependencies
57+
if: steps.cache_dependencies.outputs.cache-hit == ''
58+
run: yarn install --ignore-engines --frozen-lockfile
59+
60+
- name: Build
61+
run: |
62+
yarn install --ignore-engines --frozen-lockfile
63+
yarn deps
64+
working-directory: packages/replay/metrics
65+
66+
- name: Collect
67+
run: yarn ci:collect
68+
working-directory: packages/replay/metrics
69+
70+
- name: Process
71+
id: process
72+
run: yarn ci:process
73+
working-directory: packages/replay/metrics
74+
env:
75+
GITHUB_TOKEN: ${{ github.token }}
76+
77+
- name: Upload results
78+
uses: actions/upload-artifact@v3
79+
with:
80+
name: ${{ steps.process.outputs.artifactName }}
81+
path: ${{ steps.process.outputs.artifactPath }}

packages/replay/metrics/configs/ci/process.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import path from 'path';
22
// import { AnalyzerItemMetric, ResultsAnalyzer } from '../../src/results/analyzer.js';
33
import { Result } from '../../src/results/result.js';
4-
// import { ResultsSet } from '../../src/results/results-set.js';
4+
import { ResultsSet } from '../../src/results/results-set.js';
55
import { Git } from '../../src/util/git.js';
66
import { GitHub } from '../../src/util/github.js';
77
import { latestResultFile, previousResultsDir, baselineResultsDir, artifactName } from './env.js';
@@ -15,7 +15,7 @@ GitHub.downloadPreviousArtifact(await Git.branch, previousResultsDir, artifactNa
1515
GitHub.writeOutput("artifactName", artifactName)
1616
GitHub.writeOutput("artifactPath", path.resolve(previousResultsDir));
1717

18-
// const resultsSet = new ResultsSet(outDir);
18+
const resultsSet = new ResultsSet(previousResultsDir);
1919
// const analysis = ResultsAnalyzer.analyze(latestResult, resultsSet);
2020

2121
// val prComment = PrCommentBuilder()
@@ -34,4 +34,4 @@ GitHub.writeOutput("artifactPath", path.resolve(previousResultsDir));
3434
// GitHub.addOrUpdateComment(prComment);
3535

3636
// Copy the latest test run results to the archived result dir.
37-
// await resultsSet.add(latestResultFile, true);
37+
await resultsSet.add(latestResultFile, true);

packages/replay/metrics/package.json

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,10 @@
1414
"ci:process": "ts-node-esm ./configs/ci/process.ts"
1515
},
1616
"dependencies": {
17+
"@octokit/rest": "^19.0.5",
1718
"@types/node": "^18.11.17",
19+
"axios": "^1.2.2",
20+
"extract-zip": "^2.0.1",
1821
"filesize": "^10.0.6",
1922
"puppeteer": "^19.4.1",
2023
"simple-git": "^3.15.1",

packages/replay/metrics/src/util/github.ts

Lines changed: 80 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -1,62 +1,96 @@
11
import * as fs from 'fs';
2+
import { Octokit } from "@octokit/rest";
3+
import { Git } from './git.js';
4+
import path from 'path';
5+
import Axios from 'axios';
6+
import extract from 'extract-zip';
7+
8+
const octokit = new Octokit({
9+
auth: process.env.GITHUB_TOKEN,
10+
// log: console,
11+
});
12+
13+
const [_, owner, repo] = (await Git.repository).split('/');
14+
const defaultArgs = { owner: owner, repo: repo }
15+
16+
export function downloadFile(url: string, path: string) {
17+
const writer = fs.createWriteStream(path);
18+
return Axios({
19+
method: 'get',
20+
url: url,
21+
responseType: 'stream',
22+
}).then(response => {
23+
return new Promise((resolve, reject) => {
24+
response.data.pipe(writer);
25+
let error: Error;
26+
writer.on('error', err => {
27+
error = err;
28+
writer.close();
29+
reject(err);
30+
});
31+
writer.on('close', () => {
32+
if (!error) resolve(true);
33+
});
34+
});
35+
});
36+
}
237

338
export const GitHub = {
439
writeOutput(name: string, value: any): void {
540
if (typeof process.env.GITHUB_OUTPUT == 'string' && process.env.GITHUB_OUTPUT.length > 0) {
641
fs.appendFileSync(process.env.GITHUB_OUTPUT, `${name}=${value}\n`);
742
}
8-
console.log(`Output ${name}`, value);
43+
console.log(`Output ${name} = ${value}`);
944
},
1045

11-
downloadPreviousArtifact(branch: string, targetDir: string, artifactName: string): void {
12-
fs.mkdirSync(targetDir, { recursive: true });
46+
downloadPreviousArtifact(branch: string, targetDir: string, artifactName: string): Promise<void> {
47+
return (async () => {
48+
fs.mkdirSync(targetDir, { recursive: true });
49+
50+
const workflow = (await octokit.actions.listRepoWorkflows(defaultArgs))
51+
.data.workflows.find((w) => w.name == process.env.GITHUB_WORKFLOW);
52+
if (workflow == undefined) {
53+
console.log(`Skipping previous artifact '${artifactName}' download for branch '${branch}' - not running in CI`);
54+
return;
55+
}
56+
console.log(`Trying to download previous artifact '${artifactName}' for branch '${branch}'`);
57+
58+
const workflowRuns = await octokit.actions.listWorkflowRuns({
59+
...defaultArgs,
60+
workflow_id: workflow.id,
61+
branch: branch,
62+
status: 'success',
63+
});
64+
65+
if (workflowRuns.data.total_count == 0) {
66+
console.warn(`Couldn't find any successful run for workflow '${workflow.name}'`);
67+
return;
68+
}
69+
70+
const artifact = (await octokit.actions.listWorkflowRunArtifacts({
71+
...defaultArgs,
72+
run_id: workflowRuns.data.workflow_runs[0].id,
73+
})).data.artifacts.find((it) => it.name == artifactName);
1374

14-
// if (workflow == null) {
15-
// println("Skipping previous artifact '$artifactName' download for branch '$branch' - not running in CI")
16-
// return
17-
// }
18-
console.log(`Trying to download previous artifact '${artifactName}' for branch '${branch}'`)
75+
if (artifact == undefined) {
76+
console.warn(`Couldn't find any artifact matching ${artifactName}`);
77+
return;
78+
}
1979

20-
// val run = workflow!!.listRuns()
21-
// .firstOrNull { it.headBranch == branch && it.conclusion == GHWorkflowRun.Conclusion.SUCCESS }
22-
// if (run == null) {
23-
// println("Couldn't find any successful run workflow ${workflow!!.name}")
24-
// return
25-
// }
80+
console.log(`Downloading artifact ${artifact.archive_download_url} and extracting to $targetDir`);
2681

27-
// val artifact = run.listArtifacts().firstOrNull { it.name == artifactName }
28-
// if (artifact == null) {
29-
// println("Couldn't find any artifact matching $artifactName")
30-
// return
31-
// }
82+
const tempFilePath = path.resolve(targetDir, '../tmp-artifacts.zip');
83+
if (fs.existsSync(tempFilePath)) {
84+
fs.unlinkSync(tempFilePath);
85+
}
3286

33-
// println("Downloading artifact ${artifact.archiveDownloadUrl} and extracting to $targetDir")
34-
// artifact.download {
35-
// val zipStream = ZipInputStream(it)
36-
// var entry: ZipEntry?
37-
// // while there are entries I process them
38-
// while (true) {
39-
// entry = zipStream.nextEntry
40-
// if (entry == null) {
41-
// break
42-
// }
43-
// if (entry.isDirectory) {
44-
// Path.of(entry.name).createDirectories()
45-
// } else {
46-
// println("Extracting ${entry.name}")
47-
// val outFile = FileOutputStream(targetDir.resolve(entry.name).toFile())
48-
// while (zipStream.available() > 0) {
49-
// val c = zipStream.read()
50-
// if (c > 0) {
51-
// outFile.write(c)
52-
// } else {
53-
// break
54-
// }
55-
// }
56-
// outFile.close()
57-
// }
58-
// }
59-
// }
87+
try {
88+
await downloadFile(artifact.archive_download_url, tempFilePath);
89+
await extract(tempFilePath, { dir: targetDir });
90+
} finally {
91+
fs.unlinkSync(tempFilePath);
92+
}
93+
})();
6094
},
6195

6296
// fun addOrUpdateComment(commentBuilder: PrCommentBuilder) {

0 commit comments

Comments
 (0)