Skip to content

Commit

Permalink
Log fail test to file went running tests locally (#2809)
Browse files Browse the repository at this point in the history
The issue suggested showing a pop-up, but this wasn't optimal. It was
either blocking the tests from terminating or it closed immediately by
itself. I also don't want to clobber the clipboard all the time.

Let's log to a file because it seems like the best of the options.

Fixes #2805

---------

Co-authored-by: Phil Cohen <phillip@phillip.io>
  • Loading branch information
AndreasArvidsson and phillco authored Feb 1, 2025
1 parent 1273b1c commit f23ffc5
Show file tree
Hide file tree
Showing 4 changed files with 50 additions and 8 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ next-env.d.ts

# test subset config
packages/test-harness/testSubsetGrep.properties
packages/test-harness/failedTests.properties


# cursorless-neovim
cursorless.nvim/node/cursorless-neovim
Expand Down
4 changes: 4 additions & 0 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
"request": "launch",
"env": {
"CURSORLESS_MODE": "test",
"CURSORLESS_LOG_FAILED": "true",
"CURSORLESS_REPO_ROOT": "${workspaceFolder}"
},
"args": [
Expand All @@ -52,6 +53,7 @@
"env": {
"CURSORLESS_MODE": "test",
"CURSORLESS_RUN_TEST_SUBSET": "true",
"CURSORLESS_LOG_FAILED": "true",
"CURSORLESS_REPO_ROOT": "${workspaceFolder}"
},
"args": [
Expand Down Expand Up @@ -136,6 +138,7 @@
"program": "${workspaceFolder}/packages/test-harness/dist/runTalonTests.cjs",
"env": {
"CURSORLESS_MODE": "test",
"CURSORLESS_LOG_FAILED": "true",
"CURSORLESS_REPO_ROOT": "${workspaceFolder}"
},
"outFiles": ["${workspaceFolder}/**/out/**/*.js"],
Expand Down Expand Up @@ -171,6 +174,7 @@
"program": "${workspaceFolder}/packages/test-harness/dist/runTalonJsTests.cjs",
"env": {
"CURSORLESS_MODE": "test",
"CURSORLESS_LOG_FAILED": "true",
"CURSORLESS_REPO_ROOT": "${workspaceFolder}"
},
"outFiles": ["${workspaceFolder}/**/out/**/*.js"],
Expand Down
30 changes: 22 additions & 8 deletions packages/test-harness/src/runAllTests.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
import Mocha from "mocha";
import * as path from "node:path";
import { getCursorlessRepoRoot } from "@cursorless/node-common";
import { runTestSubset, testSubsetGrepString } from "./testSubset";
import { glob } from "glob";
import Mocha from "mocha";
import * as path from "node:path";
import {
logFailedTests,
runTestSubset,
shouldLogFailedTests,
testSubsetGrepString,
} from "./testSubset";

/**
* Type of test to run, eg unit, vscode, talon
Expand All @@ -24,7 +29,7 @@ export enum TestType {
neovim,
}

export function runAllTests(...types: TestType[]) {
export function runAllTests(...types: TestType[]): Promise<void> {
return runTestsInDir(
path.join(getCursorlessRepoRoot(), "packages"),
(files) =>
Expand Down Expand Up @@ -68,14 +73,23 @@ async function runTestsInDir(

try {
// Run the mocha test
await new Promise<void>((c, e) => {
mocha.run((failures) => {
await new Promise<void>((resolve, reject) => {
const failedTests: string[] = [];

const runner = mocha.run((failures) => {
if (failures > 0) {
e(new Error(`${failures} tests failed.`));
if (shouldLogFailedTests()) {
logFailedTests(failedTests);
}
reject(`${failures} tests failed.`);
} else {
c();
resolve();
}
});

if (shouldLogFailedTests()) {
runner.on("fail", (test) => failedTests.push(test.fullTitle()));
}
});
} catch (err) {
console.error(err);
Expand Down
22 changes: 22 additions & 0 deletions packages/test-harness/src/testSubset.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,20 @@ export function testSubsetFilePath() {
);
}

function testFailedFilePath() {
return path.join(
getCursorlessRepoRoot(),
"packages",
"test-harness",
"failedTests.properties",
);
}

export function logFailedTests(testNames: string[]) {
const lines = [`${testNames.length} failed tests`, "", ...testNames];
fs.writeFileSync(testFailedFilePath(), lines.join("\n"));
}

/**
* Determine whether we should run just the subset of the tests specified by
* {@link TEST_SUBSET_GREP_STRING}.
Expand All @@ -37,3 +51,11 @@ export function testSubsetFilePath() {
export function runTestSubset() {
return process.env.CURSORLESS_RUN_TEST_SUBSET === "true";
}

/**
* Determine whether we should log the failed tests to a file. This makes it easier to put them in `testSubsetGrep.properties` for faster iterating.
* @returns `true` if we should log failed tests to `packages/test-harness/failedTests.properties`
*/
export function shouldLogFailedTests() {
return process.env.CURSORLESS_LOG_FAILED === "true";
}

0 comments on commit f23ffc5

Please sign in to comment.