Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions .github/workflows/ci-jobs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -192,3 +192,16 @@ jobs:
- run: firefox --version
- name: test
run: pnpm ember test --path dist -c testem.ci-browsers.js

perf-check:
name: Perf script still works
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0
- uses: ./.github/actions/setup
- name: Check that the perf script works, so we don't regress
run: RUNS='2' pnpm bench
env:
GIT_LFS_SKIP_SMUDGE: 1
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
tracerbench-testing/
*.bpkg
*.gem
*.rbc
Expand Down
1 change: 1 addition & 0 deletions .prettierignore
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,4 @@ package.json
pnpm-lock.yaml
glimmer-vm/**/*.md
glimmer-vm/**/*.yaml
tracerbench-testing/
41 changes: 41 additions & 0 deletions bin/benchmark.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
/* eslint-disable no-console */
/* eslint-disable n/no-process-exit */

import { runBenchmark } from './benchmark/run.mjs';
import { hasFlag } from './benchmark/utils.mjs';

if (hasFlag(process.argv, '--help', '-h')) {
console.log(`
Runs tracerbench compare between origin/main and your current working tree.

NOTE: only ember-source is linked, not other packages.

Output directory:
tracerbench-testing/

Options:
--force delete cached directories before running
--reuse reuse existing apps and tarballs, if available (by default only the control app/tarball is reused)

Notes:
- This script runs \`pnpm install\` and \`node ./bin/build-for-publishing.js\` in both repos.
- build-for-publishing updates files in-place; it will modify your working tree.
- Benchmark apps are built with \`vite build\` and served using \`vite preview\`.
`);
process.exit(0);
}

const FORCE = hasFlag(process.argv, '--force');
const REUSE = hasFlag(process.argv, '--reuse');

try {
const result = await runBenchmark({
force: FORCE,
reuse: REUSE,
});

console.log(`\nWrote report: ${result.msgFile}`);
} catch (error) {
console.error(error);
process.exit(1);
}
39 changes: 39 additions & 0 deletions bin/benchmark/control.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import { join } from 'node:path';

import fs from 'fs-extra';

import { buildEmberSource, latestTarball, run } from './utils.mjs';

const { ensureDir, pathExists } = fs;

export async function getOrBuildControlTarball({ repoRoot, controlRepoDir, controlBranchName }) {
try {
return await latestTarball(controlRepoDir);
} catch {
// fall through; rebuild
}

await run('git', ['fetch', 'origin'], { cwd: repoRoot, quiet: true });
const controlRef = (
await run('git', ['rev-parse', `origin/${controlBranchName}`], {
cwd: repoRoot,
quiet: true,
})
).stdout.trim();

if (!(await pathExists(controlRepoDir))) {
await ensureDir(controlRepoDir);
// clone from the local .git directory (fast, avoids network)
if (process.env.CI) {
await run('git', ['clone', 'https://github.com/emberjs/ember.js.git', controlRepoDir]);
} else {
await run('git', ['clone', join(repoRoot, '.git'), controlRepoDir]);
}
} else {
await run('git', ['fetch'], { cwd: controlRepoDir, quiet: true });
}

await run('git', ['checkout', '--force', controlRef], { cwd: controlRepoDir });
await buildEmberSource(controlRepoDir);
return await latestTarball(controlRepoDir);
}
14 changes: 14 additions & 0 deletions bin/benchmark/experiment.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import { latestTarball, buildEmberSource } from './utils.mjs';

export async function buildExperimentTarball({ repoDir, reuse = false }) {
if (reuse) {
try {
return await latestTarball(repoDir);
} catch {
// fall through; rebuild
}
}

await buildEmberSource(repoDir);
return await latestTarball(repoDir);
}
201 changes: 201 additions & 0 deletions bin/benchmark/run.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,201 @@
/* eslint-disable no-console */
import { join } from 'node:path';
import { killPortProcess } from 'kill-port-process';

import fs from 'fs-extra';

import { getOrBuildControlTarball } from './control.mjs';
import { buildExperimentTarball } from './experiment.mjs';
import { run, prepareApp, sleep, startVitePreview, lsof } from './utils.mjs';

const { ensureDir, remove, writeFile } = fs;

function buildMarkersString(markers) {
return markers
.reduce((acc, marker) => {
return acc + ',' + marker + 'Start,' + marker + 'End';
}, '')
.split(',')
.map((s) => s.trim())
.filter(Boolean)
.join(',');
}

// Default configuration for runBenchmark
const DEFAULT_CONTROL_BRANCH_NAME = 'main';
const DEFAULT_CONTROL_APP_FROM_MAIN = false;
const DEFAULT_CONTROL_PORT = 4500;
const DEFAULT_EXPERIMENT_PORT = 4501;
const DEFAULT_FIDELITY = process.env['RUNS'] || '20';
const DEFAULT_THROTTLE = '1';
const DEFAULT_REGRESSION_THRESHOLD = '25';
const DEFAULT_SAMPLE_TIMEOUT = '60';
const DEFAULT_MARKERS = [
// Copied from glimmer-vm/bin/setup-bench.mts (krausest benchmark)
'render',
'render1000Items1',
'clearItems1',
'render1000Items2',
'clearItems2',
'render5000Items1',
'clearManyItems1',
'render5000Items2',
'clearManyItems2',
'render1000Items3',
'append1000Items1',
'append1000Items2',
'updateEvery10thItem1',
'updateEvery10thItem2',
'selectFirstRow1',
'selectSecondRow1',
'removeFirstRow1',
'removeSecondRow1',
'swapRows1',
'swapRows2',
'clearItems4',
];

import { REPO_ROOT, BENCH_ROOT } from './utils.mjs';

export async function runBenchmark({ force = false, reuse = false } = {}) {
// Use config constants directly; no local re-assignment

await ensureDir(BENCH_ROOT);

const CONTROL_DIRS = {
repo: join(BENCH_ROOT, 'ember-source-control'),
app: join(BENCH_ROOT, 'control'),
};
const EXPERIMENT_DIRS = {
app: join(BENCH_ROOT, 'experiment'),
repo: REPO_ROOT,
};

const controlUrl = `http://127.0.0.1:${DEFAULT_CONTROL_PORT}`;
const experimentUrl = `http://127.0.0.1:${DEFAULT_EXPERIMENT_PORT}`;
const markersString = buildMarkersString(DEFAULT_MARKERS);

if (force) {
await killPortProcess([DEFAULT_CONTROL_PORT, DEFAULT_EXPERIMENT_PORT]);
await remove(CONTROL_DIRS.repo);
await remove(CONTROL_DIRS.app);
await remove(EXPERIMENT_DIRS.app);
}

await ensureDir(BENCH_ROOT);
await ensureDir(EXPERIMENT_DIRS.app);
await ensureDir(CONTROL_DIRS.app);

const controlTarball = await getOrBuildControlTarball({
repoRoot: REPO_ROOT,
controlRepoDir: CONTROL_DIRS.repo,
controlBranchName: DEFAULT_CONTROL_BRANCH_NAME,
});

const experimentTarball = await buildExperimentTarball({
repoDir: EXPERIMENT_DIRS.repo,
reuse,
});

const experimentAppSource = join(REPO_ROOT, 'smoke-tests/benchmark-app');
const controlAppSource = DEFAULT_CONTROL_APP_FROM_MAIN
? join(CONTROL_DIRS.repo, 'smoke-tests/benchmark-app')
: experimentAppSource;

await Promise.all([
prepareApp({
sourceAppDir: controlAppSource,
destAppDir: CONTROL_DIRS.app,
emberSourceTarball: controlTarball,
reuse,
}),
prepareApp({
sourceAppDir: experimentAppSource,
destAppDir: EXPERIMENT_DIRS.app,
emberSourceTarball: experimentTarball,
reuse,
}),
]);

// These will error if the parts are occupied (--strict-port)
startVitePreview({ appDir: CONTROL_DIRS.app, port: DEFAULT_CONTROL_PORT });
startVitePreview({
appDir: EXPERIMENT_DIRS.app,
port: DEFAULT_EXPERIMENT_PORT,
});

async function cleanup() {
console.log(`\n\tCleaning up servers...`);

await killPortProcess([DEFAULT_CONTROL_PORT, DEFAULT_EXPERIMENT_PORT]);
}

process.on('exit', cleanup);
process.on('SIGINT', () => {
cleanup();
// eslint-disable-next-line n/no-process-exit
process.exit(1);
});

// give servers a moment to start
await sleep(5000);

/**
* We need to make sure both servers are running before starting the benchmark.
*/
let controlLsof = await lsof(DEFAULT_CONTROL_PORT);
let experimentLsof = await lsof(DEFAULT_EXPERIMENT_PORT);

if (!controlLsof || !experimentLsof) {
throw new Error(
`One of the servers failed to start. Control server lsof:\n${controlLsof}\n\nExperiment server lsof:\n${experimentLsof}`
);
}

const tracerbenchBin = join(REPO_ROOT, 'node_modules/tracerbench/bin/run');

const args = [
'--single-threaded-gc',
tracerbenchBin,
'compare',
'--regressionThreshold',
DEFAULT_REGRESSION_THRESHOLD,
'--sampleTimeout',
DEFAULT_SAMPLE_TIMEOUT,
'--fidelity',
DEFAULT_FIDELITY,
'--controlURL',
controlUrl,
'--experimentURL',
experimentUrl,
'--report',
'--headless',
'--cpuThrottleRate',
DEFAULT_THROTTLE,
'--markers',
markersString,
'--debug',
'--browserArgs',
`"--incognito,--disable-gpu,--mute-audio,--log-level=3,--headless=new"`,
];

const output = await run('node', args, { cwd: EXPERIMENT_DIRS.app });
const msgFile = join(BENCH_ROOT, 'msg.txt');

if (!process.env.CI) {
await writeFile(
msgFile,
output.stdout.split('Benchmark Results Summary').pop() ?? output.stdout,
'utf8'
);
}

await cleanup();

return {
benchRoot: BENCH_ROOT,
msgFile,
controlUrl,
experimentUrl,
};
}
Loading