feat(core): rust task hasher (#19617)

This commit is contained in:
Jonathan Cammisuli 2023-11-21 08:55:41 -05:00 committed by GitHub
parent 4e1289b50b
commit 5d82a2aab2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
69 changed files with 3345 additions and 1181 deletions

75
Cargo.lock generated
View File

@ -293,6 +293,20 @@ dependencies = [
"syn 2.0.15",
]
[[package]]
name = "dashmap"
version = "5.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
dependencies = [
"cfg-if",
"hashbrown",
"lock_api",
"once_cell",
"parking_lot_core",
"rayon",
]
[[package]]
name = "difflib"
version = "0.4.0"
@ -930,6 +944,12 @@ version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6"
[[package]]
name = "json_comments"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41ee439ee368ba4a77ac70d04f14015415af8600d6c894dc1f11bd79758c57d5"
[[package]]
name = "kqueue"
version = "1.0.7"
@ -1053,9 +1073,9 @@ checksum = "ece97ea872ece730aed82664c424eb4c8291e1ff2480247ccf7409044bc6479f"
[[package]]
name = "lock_api"
version = "0.4.9"
version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df"
checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45"
dependencies = [
"autocfg",
"scopeguard",
@ -1343,6 +1363,7 @@ dependencies = [
"assert_fs",
"colored",
"crossbeam-channel",
"dashmap",
"fs_extra",
"globset",
"hashbrown",
@ -1357,6 +1378,8 @@ dependencies = [
"parking_lot",
"rayon",
"regex",
"serde",
"serde_json",
"swc_common",
"swc_ecma_ast",
"swc_ecma_dep_graph",
@ -1366,6 +1389,7 @@ dependencies = [
"tokio",
"tracing",
"tracing-subscriber",
"tsconfig",
"walkdir",
"watchexec",
"watchexec-events",
@ -1398,15 +1422,15 @@ dependencies = [
[[package]]
name = "parking_lot_core"
version = "0.9.7"
version = "0.9.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521"
checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
dependencies = [
"cfg-if",
"libc",
"redox_syscall 0.2.16",
"redox_syscall 0.4.1",
"smallvec",
"windows-sys 0.45.0",
"windows-targets 0.48.0",
]
[[package]]
@ -1652,6 +1676,15 @@ dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "redox_syscall"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "redox_users"
version = "0.4.3"
@ -1727,6 +1760,12 @@ dependencies = [
"windows-sys 0.45.0",
]
[[package]]
name = "ryu"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741"
[[package]]
name = "same-file"
version = "1.0.6"
@ -1774,6 +1813,17 @@ dependencies = [
"syn 1.0.107",
]
[[package]]
name = "serde_json"
version = "1.0.99"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46266871c240a00b8f503b877622fe33430b3c7d963bdc0f2adc511e54a1eae3"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "sha1_smol"
version = "1.0.0"
@ -2284,6 +2334,19 @@ dependencies = [
"stable_deref_trait",
]
[[package]]
name = "tsconfig"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c89ed286b13fd7e343eb628d8511fb4fdc99053acccb2263897e0d89526462b"
dependencies = [
"json_comments",
"regex",
"serde",
"serde_json",
"thiserror",
]
[[package]]
name = "typed-arena"
version = "2.0.2"

View File

@ -1,5 +1,6 @@
[workspace]
resolver = "2"
members = [
'packages/nx'
]

View File

@ -44,7 +44,7 @@ export default async function run(
const nodes = {};
const hashes = [] as string[];
for (const d of Object.keys(res.details.nodes)) {
for (const d of Object.keys(res.details.nodes).sort()) {
if (d.indexOf('$fileset') === -1) {
nodes[d] = res.details.nodes[d];
hashes.push(res.details.nodes[d]);

View File

@ -1,9 +1,5 @@
import { join } from 'path';
import { readNxJson } from 'nx/src/config/configuration';
import {
getTargetInputs,
filterUsingGlobPatterns,
} from 'nx/src/hasher/task-hasher';
import {
type ProjectGraph,
type ProjectGraphProjectNode,
@ -15,6 +11,10 @@ import {
import { fileExists } from 'nx/src/utils/fileutils';
import { fileDataDepTarget } from 'nx/src/config/project-graph';
import { readTsConfig } from './typescript/ts-config';
import {
filterUsingGlobPatterns,
getTargetInputs,
} from 'nx/src/hasher/task-hasher';
/**
* Finds all npm dependencies and their expected versions for a given project.

View File

@ -7,6 +7,7 @@ edition = '2021'
anyhow = "1.0.71"
colored = "2"
crossbeam-channel = '0.5'
dashmap = { version = "5.5.3", features= ["rayon"] }
fs_extra = "1.3.0"
globset = "0.4.10"
hashbrown = { version = "0.14.0", features = ["rayon"] }
@ -24,10 +25,13 @@ napi-derive = '2.9.3'
nom = '7.1.3'
regex = "1.9.1"
rayon = "1.7.0"
serde = "1"
serde_json = "1"
thiserror = "1.0.40"
tokio = { version = "1.28.2", features = ["fs"] }
tracing = "0.1.37"
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
tsconfig = "0.2"
walkdir = '2.3.3'
watchexec = "2.3.0"
watchexec-events = "1.0.0"

View File

@ -10,12 +10,18 @@ import {
mapTargetDefaultsToDependencies,
} from '../../tasks-runner/create-task-graph';
import { NxJsonConfiguration } from '../../config/nx-json';
import { InProcessTaskHasher } from '../../hasher/task-hasher';
import {
DaemonBasedTaskHasher,
InProcessTaskHasher,
TaskHasher,
} from '../../hasher/task-hasher';
import { hashTask } from '../../hasher/hash-task';
import { getPackageManagerCommand } from '../../utils/package-manager';
import { printAffectedDeprecationMessage } from './command-object';
import { logger, NX_PREFIX } from '../../utils/logger';
import { getTaskSpecificEnv } from '../../tasks-runner/task-env';
import { getFileMap } from '../../project-graph/build-project-graph';
import { daemonClient } from '../../daemon/client/client';
/**
* @deprecated Use showProjectsHandler, generateGraph, or affected (without the print-affected mode) instead.
@ -72,7 +78,22 @@ async function createTasks(
nxArgs.configuration,
overrides
);
const hasher = new InProcessTaskHasher({}, [], projectGraph, nxJson, {});
let hasher: TaskHasher;
if (daemonClient.enabled()) {
hasher = new DaemonBasedTaskHasher(daemonClient, {});
} else {
const { fileMap, allWorkspaceFiles, rustReferences } = getFileMap();
hasher = new InProcessTaskHasher(
fileMap?.projectFileMap,
allWorkspaceFiles,
projectGraph,
nxJson,
rustReferences,
{}
);
}
const execCommand = getPackageManagerCommand().exec;
const tasks = Object.values(taskGraph.tasks);

View File

@ -31,7 +31,6 @@ import { Server } from 'net';
import { FileData } from '../../config/project-graph';
import { TaskGraph } from '../../config/task-graph';
import { daemonClient } from '../../daemon/client/client';
import { filterUsingGlobPatterns } from '../../hasher/task-hasher';
import { getRootTsConfigPath } from '../../plugins/js/utils/typescript';
import { pruneExternalNodes } from '../../project-graph/operators';
import { createProjectGraphAsync } from '../../project-graph/project-graph';
@ -42,11 +41,13 @@ import {
import { allFileData } from '../../utils/all-file-data';
import { splitArgsIntoNxArgsAndOverrides } from '../../utils/command-line-utils';
import { NxJsonConfiguration } from '../../config/nx-json';
import { HashPlanner } from '../../native';
import { HashPlanner, transferProjectGraph } from '../../native';
import { transformProjectGraphForRust } from '../../native/transform-objects';
import { getAffectedGraphNodes } from '../affected/affected';
import { readFileMapCache } from '../../project-graph/nx-deps-cache';
import { filterUsingGlobPatterns } from '../../hasher/task-hasher';
export interface ProjectGraphClientResponse {
hash: string;
projects: ProjectGraphProjectNode[];
@ -682,9 +683,8 @@ async function createTaskGraphClientResponse(
performance.mark('task graph generation:end');
const planner = new HashPlanner(
workspaceRoot,
nxJson,
transformProjectGraphForRust(graph)
transferProjectGraph(transformProjectGraphForRust(graph))
);
performance.mark('task hash plan generation:start');
const plans: Record<string, string[]> = {};

View File

@ -16,7 +16,7 @@ export async function handleHashTasks(payload: {
tasks: Task[];
taskGraph: TaskGraph;
}) {
const { projectGraph, allWorkspaceFiles, fileMap } =
const { projectGraph, allWorkspaceFiles, fileMap, rustReferences } =
await getCachedSerializedProjectGraphPromise();
const nxJson = readNxJson();
@ -27,6 +27,7 @@ export async function handleHashTasks(payload: {
allWorkspaceFiles,
projectGraph,
nxJson,
rustReferences,
payload.runnerOptions
);
}

View File

@ -28,6 +28,7 @@ import {
import { workspaceRoot } from '../../utils/workspace-root';
import { notifyFileWatcherSockets } from './file-watching/file-watcher-sockets';
import { serverLogger } from './logger';
import { NxWorkspaceFilesExternals } from '../../native';
let cachedSerializedProjectGraphPromise: Promise<{
error: Error | null;
@ -35,9 +36,14 @@ let cachedSerializedProjectGraphPromise: Promise<{
fileMap: FileMap | null;
allWorkspaceFiles: FileData[] | null;
serializedProjectGraph: string | null;
rustReferences: NxWorkspaceFilesExternals | null;
}>;
export let fileMapWithFiles:
| { fileMap: FileMap; allWorkspaceFiles: FileData[] }
| {
fileMap: FileMap;
allWorkspaceFiles: FileData[];
rustReferences: NxWorkspaceFilesExternals;
}
| undefined;
export let currentProjectFileMapCache: FileMapCache | undefined;
export let currentProjectGraph: ProjectGraph | undefined;
@ -77,6 +83,7 @@ export async function getCachedSerializedProjectGraphPromise() {
projectGraph: null,
fileMap: null,
allWorkspaceFiles: null,
rustReferences: null,
};
}
}
@ -169,9 +176,8 @@ async function processCollectedUpdatedAndDeletedFiles(
if (fileMapWithFiles) {
fileMapWithFiles = updateFileMap(
projects,
fileMapWithFiles.fileMap,
fileMapWithFiles.allWorkspaceFiles,
new Map(Object.entries(updatedFileHashes)),
fileMapWithFiles.rustReferences,
updatedFileHashes,
deletedFiles
);
} else {
@ -215,6 +221,7 @@ async function processFilesAndCreateAndSerializeProjectGraph() {
error: err,
projectGraph: null,
fileMap: null,
rustReferences: null,
allWorkspaceFiles: null,
serializedProjectGraph: null,
});
@ -243,18 +250,21 @@ async function createAndSerializeProjectGraph(
projectGraph: ProjectGraph | null;
fileMap: FileMap | null;
allWorkspaceFiles: FileData[] | null;
rustReferences: NxWorkspaceFilesExternals | null;
serializedProjectGraph: string | null;
}> {
try {
performance.mark('create-project-graph-start');
const fileMap = copyFileMap(fileMapWithFiles.fileMap);
const allWorkspaceFiles = copyFileData(fileMapWithFiles.allWorkspaceFiles);
const rustReferences = fileMapWithFiles.rustReferences;
const { projectGraph, projectFileMapCache } =
await buildProjectGraphUsingFileMap(
projects,
knownExternalNodes,
fileMap,
allWorkspaceFiles,
rustReferences,
currentProjectFileMapCache || readFileMapCache(),
true
);
@ -283,6 +293,7 @@ async function createAndSerializeProjectGraph(
fileMap,
allWorkspaceFiles,
serializedProjectGraph,
rustReferences,
};
} catch (e) {
serverLogger.log(
@ -294,6 +305,7 @@ async function createAndSerializeProjectGraph(
fileMap: null,
allWorkspaceFiles: null,
serializedProjectGraph: null,
rustReferences: null,
};
}
}

View File

@ -421,7 +421,7 @@ exports[`TaskHasher should hash tasks where the project graph has circular depen
},
"runtime": {},
},
"value": "13762475745855667295",
"value": "9112654928859037831",
}
`;
@ -444,7 +444,7 @@ exports[`TaskHasher should hash tasks where the project graph has circular depen
},
"runtime": {},
},
"value": "11293543081462853412",
"value": "144123652661336112",
}
`;

View File

@ -12,6 +12,7 @@ export async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(
taskGraph: TaskGraph,
nxJson: NxJsonConfiguration
) {
performance.mark('hashMultipleTasks:start');
const tasks = Object.values(taskGraph.tasks);
const tasksWithHashers = await Promise.all(
tasks.map(async (task) => {
@ -39,6 +40,12 @@ export async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(
tasksToHash[i].hash = hashes[i].value;
tasksToHash[i].hashDetails = hashes[i].details;
}
performance.mark('hashMultipleTasks:end');
performance.measure(
'hashMultipleTasks',
'hashMultipleTasks:start',
'hashMultipleTasks:end'
);
}
export async function hashTask(
@ -48,6 +55,7 @@ export async function hashTask(
task: Task,
env: NodeJS.ProcessEnv
) {
performance.mark('hashSingleTask:start');
const customHasher = await getCustomHasher(task, projectGraph);
const projectsConfigurations =
readProjectsConfigurationFromProjectGraph(projectGraph);
@ -64,4 +72,10 @@ export async function hashTask(
: hasher.hashTask(task, taskGraph, env));
task.hash = value;
task.hashDetails = details;
performance.mark('hashSingleTask:end');
performance.measure(
'hashSingleTask',
'hashSingleTask:start',
'hashSingleTask:end'
);
}

View File

@ -0,0 +1,641 @@
import { TempFs } from '../internal-testing-utils/temp-fs';
import { retrieveWorkspaceFiles } from '../project-graph/utils/retrieve-workspace-files';
import { NxJsonConfiguration } from '../config/nx-json';
import { createTaskGraph } from '../tasks-runner/create-task-graph';
import { NativeTaskHasherImpl } from './native-task-hasher-impl';
import { ProjectGraphBuilder } from '../project-graph/project-graph-builder';
describe('native task hasher', () => {
let tempFs: TempFs;
const packageJson = {
name: 'nrwl',
};
const tsConfigBaseJson = JSON.stringify({
compilerOptions: {
paths: {
'@nx/parent': ['libs/parent/src/index.ts'],
'@nx/child': ['libs/child/src/index.ts'],
},
},
});
const nxJson: NxJsonConfiguration = {
namedInputs: {
default: ['{projectRoot}/**/*', 'sharedGlobals'],
production: ['default'],
sharedGlobals: [],
},
targetDefaults: {
build: {
cache: true,
dependsOn: ['^build'],
inputs: ['production', '^production'],
},
},
};
beforeEach(async () => {
tempFs = new TempFs('NativeTaskHasher');
await tempFs.createFiles({
'libs/parent/src/index.ts': 'parent-content',
'libs/parent/project.json': JSON.stringify({
name: 'parent',
targets: {
build: {
executor: 'nx:run-commands',
},
},
}),
'libs/parent/filea.ts': 'filea-content',
'libs/parent/filea.spec.ts': 'test-content',
'libs/child/fileb.ts': 'child-content',
'libs/child/fileb.spec.ts': 'test-content',
'libs/child/src/index.ts': 'child-content',
'libs/child/project.json': JSON.stringify({ name: 'child' }),
'libs/unrelated/project.json': JSON.stringify({
name: 'unrelated',
targets: { build: {} },
}),
'libs/unrelated/filec.ts': 'filec-content',
'libs/tagged/project.json': JSON.stringify({
name: 'tagged',
targets: { build: {} },
tags: ['some-tag'],
}),
global1: 'global1-content',
global2: 'global2-content',
'tsconfig.base.json': tsConfigBaseJson,
// 'yarn.lock': 'content',
'package.json': JSON.stringify(packageJson),
'nx.json': JSON.stringify(nxJson),
});
});
afterEach(() => {
tempFs.cleanup();
});
it('should create a task hash', async () => {
const workspaceFiles = await retrieveWorkspaceFiles(tempFs.tempDir, nxJson);
const builder = new ProjectGraphBuilder(
undefined,
workspaceFiles.fileMap.projectFileMap
);
builder.addNode({
name: 'parent',
type: 'lib',
data: {
root: 'parent',
targets: {
build: {
executor: 'nx:run-commands',
inputs: [
'default',
'^default',
{ runtime: 'echo runtime123' },
{ env: 'TESTENV' },
{ env: 'NONEXISTENTENV' },
{
input: 'default',
projects: ['unrelated', 'tag:some-tag'],
},
],
},
},
},
});
builder.addNode({
name: 'unrelated',
type: 'lib',
data: {
root: 'libs/unrelated',
targets: { build: {} },
},
});
builder.addNode({
name: 'tagged',
type: 'lib',
data: {
root: 'libs/tagged',
targets: { build: {} },
tags: ['some-tag'],
},
});
const projectGraph = builder.getUpdatedProjectGraph();
const taskGraph = createTaskGraph(
projectGraph,
{ build: ['^build'] },
['parent'],
['build'],
undefined,
{}
);
const hash = await new NativeTaskHasherImpl(
tempFs.tempDir,
nxJson,
projectGraph,
workspaceFiles.rustReferences,
{ selectivelyHashTsConfig: false }
).hashTasks(Object.values(taskGraph.tasks), taskGraph, {
TESTENV: 'test',
});
expect(hash).toMatchInlineSnapshot(`
[
{
"details": {
"AllExternalDependencies": "3244421341483603138",
"env:NONEXISTENTENV": "3244421341483603138",
"env:TESTENV": "11441948532827618368",
"parent:ProjectConfiguration": "15828052557461792163",
"parent:TsConfig": "2264969541778889434",
"parent:{projectRoot}/**/*": "3244421341483603138",
"runtime:echo runtime123": "29846575039086708",
"tagged:ProjectConfiguration": "1604492097835699503",
"tagged:TsConfig": "2264969541778889434",
"tagged:{projectRoot}/**/*": "112200405683630828",
"unrelated:ProjectConfiguration": "439515135357674343",
"unrelated:TsConfig": "2264969541778889434",
"unrelated:{projectRoot}/**/*": "10505120368757496776",
"{workspaceRoot}/.gitignore": "3244421341483603138",
"{workspaceRoot}/.nxignore": "3244421341483603138",
"{workspaceRoot}/nx.json": "5219582320960288192",
},
"value": "2902224107680327789",
},
]
`);
});
it('should hash tasks where the project has dependencies', async () => {
console.log('read first', await tempFs.readFile('nx.json'));
const workspaceFiles = await retrieveWorkspaceFiles(tempFs.tempDir, nxJson);
console.dir(workspaceFiles.allWorkspaceFiles);
console.log('read second', await tempFs.readFile('nx.json'));
const builder = new ProjectGraphBuilder(
undefined,
workspaceFiles.fileMap.projectFileMap
);
builder.addNode({
name: 'parent',
type: 'lib',
data: {
root: 'libs/parent',
targets: { build: { executor: 'unknown' } },
},
});
builder.addNode({
name: 'child',
type: 'lib',
data: {
root: 'libs/child',
targets: { build: { executor: 'none' } },
},
});
builder.addStaticDependency('parent', 'child', 'libs/parent/filea.ts');
const projectGraph = builder.getUpdatedProjectGraph();
const taskGraph = createTaskGraph(
projectGraph,
{ build: ['^build'] },
['parent'],
['build'],
undefined,
{}
);
const hash = await new NativeTaskHasherImpl(
tempFs.tempDir,
nxJson,
projectGraph,
workspaceFiles.rustReferences,
{ selectivelyHashTsConfig: false }
).hashTask(taskGraph.tasks['parent:build'], taskGraph, {});
expect(hash).toMatchInlineSnapshot(`
{
"details": {
"AllExternalDependencies": "3244421341483603138",
"child:ProjectConfiguration": "7051130583729928229",
"child:TsConfig": "2264969541778889434",
"child:{projectRoot}/**/*": "7694964870822928111",
"parent:ProjectConfiguration": "7704699416930647320",
"parent:TsConfig": "2264969541778889434",
"parent:{projectRoot}/**/*": "15295586939211629225",
"{workspaceRoot}/.gitignore": "3244421341483603138",
"{workspaceRoot}/.nxignore": "3244421341483603138",
"{workspaceRoot}/nx.json": "5219582320960288192",
},
"value": "18412450685244791672",
}
`);
});
it('should plan non-default filesets', async () => {
let nxJsonModified = {
namedInputs: {
prod: ['!{projectRoot}/**/*.spec.ts'],
},
} as any;
tempFs.writeFile('nx.json', JSON.stringify(nxJsonModified));
const workspaceFiles = await retrieveWorkspaceFiles(
tempFs.tempDir,
nxJsonModified
);
let builder = new ProjectGraphBuilder(
undefined,
workspaceFiles.fileMap.projectFileMap
);
builder.addNode({
name: 'parent',
type: 'lib',
data: {
root: 'libs/parent',
targets: {
build: {
inputs: ['prod', '^prod'],
executor: 'nx:run-commands',
},
},
},
});
builder.addNode({
name: 'child',
type: 'lib',
data: {
root: 'libs/child',
namedInputs: {
prod: ['default'],
},
targets: { build: { executor: 'unknown' } },
},
});
builder.addStaticDependency('parent', 'child', 'libs/parent/filea.ts');
let projectGraph = builder.getUpdatedProjectGraph();
let taskGraph = createTaskGraph(
projectGraph,
{ build: ['^build'] },
['parent'],
['build'],
undefined,
{}
);
const hash = await new NativeTaskHasherImpl(
tempFs.tempDir,
nxJsonModified,
projectGraph,
workspaceFiles.rustReferences,
{ selectivelyHashTsConfig: false }
).hashTask(taskGraph.tasks['parent:build'], taskGraph, {});
expect(hash).toMatchInlineSnapshot(`
{
"details": {
"AllExternalDependencies": "3244421341483603138",
"child:ProjectConfiguration": "2562552455862160288",
"child:TsConfig": "2264969541778889434",
"child:{projectRoot}/**/*": "7694964870822928111",
"parent:!{projectRoot}/**/*.spec.ts": "7663204892242899157",
"parent:ProjectConfiguration": "4131510303084753861",
"parent:TsConfig": "2264969541778889434",
"{workspaceRoot}/.gitignore": "3244421341483603138",
"{workspaceRoot}/.nxignore": "3244421341483603138",
"{workspaceRoot}/nx.json": "4641558175996703359",
},
"value": "5825507912633865657",
}
`);
});
it('should make a plan with multiple filesets of a project', async () => {
let nxJson = {
namedInputs: {
prod: ['!{projectRoot}/**/*.spec.ts'],
},
};
tempFs.writeFile('nx.json', JSON.stringify(nxJson));
const workspaceFiles = await retrieveWorkspaceFiles(tempFs.tempDir, nxJson);
let builder = new ProjectGraphBuilder(
undefined,
workspaceFiles.fileMap.projectFileMap
);
builder.addNode({
name: 'parent',
type: 'lib',
data: {
root: 'libs/parent',
targets: {
build: {
inputs: ['prod'],
executor: 'nx:run-commands',
},
test: {
inputs: ['default'],
dependsOn: ['build'],
executor: 'nx:run-commands',
},
},
},
});
let projectGraph = builder.getUpdatedProjectGraph();
let taskGraph = createTaskGraph(
projectGraph,
{},
['parent'],
['build', 'test'],
undefined,
{}
);
const hash = await new NativeTaskHasherImpl(
tempFs.tempDir,
nxJson,
projectGraph,
workspaceFiles.rustReferences,
{ selectivelyHashTsConfig: false }
).hashTasks(Object.values(taskGraph.tasks), taskGraph, {});
expect(hash).toMatchInlineSnapshot(`
[
{
"details": {
"AllExternalDependencies": "3244421341483603138",
"parent:!{projectRoot}/**/*.spec.ts": "7663204892242899157",
"parent:ProjectConfiguration": "8008830016795210968",
"parent:TsConfig": "2264969541778889434",
"{workspaceRoot}/.gitignore": "3244421341483603138",
"{workspaceRoot}/.nxignore": "3244421341483603138",
"{workspaceRoot}/nx.json": "4641558175996703359",
},
"value": "16919987205625802616",
},
{
"details": {
"AllExternalDependencies": "3244421341483603138",
"parent:ProjectConfiguration": "8008830016795210968",
"parent:TsConfig": "2264969541778889434",
"parent:{projectRoot}/**/*": "15295586939211629225",
"{workspaceRoot}/.gitignore": "3244421341483603138",
"{workspaceRoot}/.nxignore": "3244421341483603138",
"{workspaceRoot}/nx.json": "4641558175996703359",
},
"value": "2732213649703581334",
},
]
`);
});
it('should be able to handle multiple filesets per project', async () => {
let nxJson = {
namedInputs: {
default: ['{projectRoot}/**/*', '{workspaceRoot}/global1'],
prod: ['!{projectRoot}/**/*.spec.ts'],
},
};
tempFs.writeFile('nx.json', JSON.stringify(nxJson));
const workspaceFiles = await retrieveWorkspaceFiles(tempFs.tempDir, nxJson);
const builder = new ProjectGraphBuilder(
undefined,
workspaceFiles.fileMap.projectFileMap
);
builder.addNode({
name: 'parent',
type: 'lib',
data: {
root: 'libs/parent',
targets: {
test: {
inputs: ['default', '^prod'],
executor: 'nx:run-commands',
},
},
},
});
builder.addNode({
name: 'child',
type: 'lib',
data: {
root: 'libs/child',
namedInputs: {
prod: [
'!{projectRoot}/**/*.spec.ts',
'{workspaceRoot}/global2',
{ env: 'MY_TEST_HASH_ENV' },
],
},
targets: {
test: {
inputs: ['default'],
executor: 'nx:run-commands',
},
},
},
});
builder.addStaticDependency('parent', 'child', 'libs/parent/filea.ts');
let projectGraph = builder.getUpdatedProjectGraph();
let taskGraph = createTaskGraph(
projectGraph,
{ build: ['^build'] },
['parent'],
['test'],
undefined,
{}
);
let hash = await new NativeTaskHasherImpl(
tempFs.tempDir,
nxJson,
projectGraph,
workspaceFiles.rustReferences,
{ selectivelyHashTsConfig: false }
).hashTasks(Object.values(taskGraph.tasks), taskGraph, {
MY_TEST_HASH_ENV: 'MY_TEST_HASH_ENV_VALUE',
});
expect(hash).toMatchInlineSnapshot(`
[
{
"details": {
"AllExternalDependencies": "3244421341483603138",
"child:!{projectRoot}/**/*.spec.ts": "13790135045935437026",
"child:ProjectConfiguration": "11541456798478268276",
"child:TsConfig": "2264969541778889434",
"env:MY_TEST_HASH_ENV": "17357374746554314488",
"parent:ProjectConfiguration": "2287392686890337925",
"parent:TsConfig": "2264969541778889434",
"parent:{projectRoot}/**/*": "15295586939211629225",
"{workspaceRoot}/.gitignore": "3244421341483603138",
"{workspaceRoot}/.nxignore": "3244421341483603138",
"{workspaceRoot}/global1": "13078141817211771580",
"{workspaceRoot}/global2": "13625885481717016690",
"{workspaceRoot}/nx.json": "10897751101872977225",
},
"value": "1217581064022758580",
},
]
`);
});
it('should be able to include only a part of the base tsconfig', async () => {
let workspaceFiles = await retrieveWorkspaceFiles(tempFs.tempDir, nxJson);
const builder = new ProjectGraphBuilder(
undefined,
workspaceFiles.fileMap.projectFileMap
);
builder.addNode({
name: 'parent',
type: 'lib',
data: {
root: 'libs/parent',
targets: {
build: {
inputs: ['default', '^prod'],
executor: 'nx:run-commands',
},
},
},
});
let projectGraph = builder.getUpdatedProjectGraph();
let taskGraph = createTaskGraph(
projectGraph,
{ build: ['^build'] },
['parent'],
['build'],
undefined,
{}
);
let hash = await new NativeTaskHasherImpl(
tempFs.tempDir,
nxJson,
projectGraph,
workspaceFiles.rustReferences,
{ selectivelyHashTsConfig: true }
).hashTask(taskGraph.tasks['parent:build'], taskGraph, {});
expect(hash).toMatchInlineSnapshot(`
{
"details": {
"AllExternalDependencies": "3244421341483603138",
"parent:ProjectConfiguration": "4131510303084753861",
"parent:TsConfig": "8661678577354855152",
"parent:{projectRoot}/**/*": "15295586939211629225",
"{workspaceRoot}/.gitignore": "3244421341483603138",
"{workspaceRoot}/.nxignore": "3244421341483603138",
"{workspaceRoot}/nx.json": "5219582320960288192",
},
"value": "9574395623667735815",
}
`);
});
it('should hash tasks where the project graph has circular dependencies', async () => {
const workspaceFiles = await retrieveWorkspaceFiles(tempFs.tempDir, nxJson);
const builder = new ProjectGraphBuilder(
undefined,
workspaceFiles.fileMap.projectFileMap
);
builder.addNode({
name: 'parent',
type: 'lib',
data: {
root: 'libs/parent',
targets: {
build: {
executor: 'nx:run-commands',
},
},
},
});
builder.addNode({
name: 'child',
type: 'lib',
data: {
root: 'libs/child',
targets: {
build: {
executor: 'nx:run-commands',
},
},
},
});
builder.addStaticDependency('parent', 'child', 'libs/parent/filea.ts');
builder.addStaticDependency('child', 'parent', 'libs/child/fileb.ts');
let projectGraph = builder.getUpdatedProjectGraph();
const taskGraph = createTaskGraph(
projectGraph,
{ build: ['^build'] },
['parent', 'child'],
['build'],
undefined,
{}
);
let hasher = new NativeTaskHasherImpl(
tempFs.tempDir,
nxJson,
projectGraph,
workspaceFiles.rustReferences,
{ selectivelyHashTsConfig: false }
);
let taskHash = await hasher.hashTask(
taskGraph.tasks['parent:build'],
taskGraph,
{}
);
expect(taskHash).toMatchInlineSnapshot(`
{
"details": {
"AllExternalDependencies": "3244421341483603138",
"child:ProjectConfiguration": "3898391056798628885",
"child:TsConfig": "2264969541778889434",
"child:{projectRoot}/**/*": "7694964870822928111",
"parent:ProjectConfiguration": "4131510303084753861",
"parent:TsConfig": "2264969541778889434",
"parent:{projectRoot}/**/*": "15295586939211629225",
"{workspaceRoot}/.gitignore": "3244421341483603138",
"{workspaceRoot}/.nxignore": "3244421341483603138",
"{workspaceRoot}/nx.json": "5219582320960288192",
},
"value": "3140483997697830788",
}
`);
const hashb = await hasher.hashTask(
taskGraph.tasks['child:build'],
taskGraph,
{}
);
expect(hashb).toMatchInlineSnapshot(`
{
"details": {
"AllExternalDependencies": "3244421341483603138",
"child:ProjectConfiguration": "3898391056798628885",
"child:TsConfig": "2264969541778889434",
"child:{projectRoot}/**/*": "7694964870822928111",
"parent:ProjectConfiguration": "4131510303084753861",
"parent:TsConfig": "2264969541778889434",
"parent:{projectRoot}/**/*": "15295586939211629225",
"{workspaceRoot}/.gitignore": "3244421341483603138",
"{workspaceRoot}/.nxignore": "3244421341483603138",
"{workspaceRoot}/nx.json": "5219582320960288192",
},
"value": "3140483997697830788",
}
`);
});
});

View File

@ -0,0 +1,87 @@
import { NxJsonConfiguration } from '../config/nx-json';
import { ProjectGraph } from '../config/project-graph';
import { Task, TaskGraph } from '../config/task-graph';
import {
ExternalObject,
FileData,
HashPlanner,
HasherOptions,
ProjectGraph as NativeProjectGraph,
transferProjectGraph,
TaskHasher,
NxWorkspaceFilesExternals,
} from '../native';
import { transformProjectGraphForRust } from '../native/transform-objects';
import { PartialHash, TaskHasherImpl } from './task-hasher';
import { readJson } from '../generators/utils/json';
import { readJsonFile } from '../utils/fileutils';
import { getRootTsConfigPath } from '../plugins/js/utils/typescript';
export class NativeTaskHasherImpl implements TaskHasherImpl {
hasher: TaskHasher;
planner: HashPlanner;
projectGraphRef: ExternalObject<NativeProjectGraph>;
allWorkspaceFilesRef: ExternalObject<FileData[]>;
projectFileMapRef: ExternalObject<Record<string, FileData[]>>;
options: HasherOptions | undefined;
constructor(
workspaceRoot: string,
nxJson: NxJsonConfiguration,
projectGraph: ProjectGraph,
externals: NxWorkspaceFilesExternals,
options: { selectivelyHashTsConfig: boolean }
) {
this.projectGraphRef = transferProjectGraph(
transformProjectGraphForRust(projectGraph)
);
this.allWorkspaceFilesRef = externals.allWorkspaceFiles;
this.projectFileMapRef = externals.projectFiles;
let tsconfig: { compilerOptions?: import('typescript').CompilerOptions } =
{};
let paths = {};
let rootTsConfigPath = getRootTsConfigPath();
if (rootTsConfigPath) {
tsconfig = readJsonFile(getRootTsConfigPath());
paths = tsconfig.compilerOptions?.paths ?? {};
delete tsconfig.compilerOptions.paths;
}
this.planner = new HashPlanner(nxJson, this.projectGraphRef);
this.hasher = new TaskHasher(
workspaceRoot,
this.projectGraphRef,
this.projectFileMapRef,
this.allWorkspaceFilesRef,
Buffer.from(JSON.stringify(tsconfig)),
paths,
options
);
}
async hashTask(
task: Task,
taskGraph: TaskGraph,
env: NodeJS.ProcessEnv
): Promise<PartialHash> {
const plans = this.planner.getPlansReference([task.id], taskGraph);
const hashes = this.hasher.hashPlans(plans, env);
return hashes[task.id];
}
async hashTasks(
tasks: Task[],
taskGraph: TaskGraph,
env: NodeJS.ProcessEnv
): Promise<PartialHash[]> {
const plans = this.planner.getPlansReference(
tasks.map((t) => t.id),
taskGraph
);
const hashes = this.hasher.hashPlans(plans, env);
return tasks.map((t) => hashes[t.id]);
}
}

View File

@ -0,0 +1,680 @@
import { NxJsonConfiguration } from '../config/nx-json';
import {
FileData,
ProjectFileMap,
ProjectGraph,
ProjectGraphDependency,
} from '../config/project-graph';
import { createProjectRootMappings } from '../project-graph/utils/find-project-for-path';
import { Task, TaskGraph } from '../config/task-graph';
import { hashArray, hashObject } from './file-hasher';
import { getOutputsForTargetAndConfiguration } from '../tasks-runner/utils';
import { workspaceRoot } from '../utils/workspace-root';
import * as minimatch from 'minimatch';
import { join } from 'path';
import { hashFile } from '../native';
import { findAllProjectNodeDependencies } from '../utils/project-graph-utils';
import { findMatchingProjects } from '../utils/find-matching-projects';
import { exec } from 'child_process';
import {
ExpandedDepsOutput,
ExpandedInput,
ExpandedSelfInput,
expandNamedInput,
expandSingleProjectInputs,
extractPatternsFromFileSets,
filterUsingGlobPatterns,
getInputs,
getNamedInputs,
isDepsOutput,
isSelfInput,
PartialHash,
TaskHasher,
TaskHasherImpl,
} from './task-hasher';
import { hashTsConfig } from '../plugins/js/hasher/hasher';
export class NodeTaskHasherImpl implements TaskHasherImpl {
private filesetHashes: {
[taskId: string]: Promise<PartialHash>;
} = {};
private runtimeHashes: {
[runtime: string]: Promise<PartialHash>;
} = {};
private externalDependencyHashes: Map<string, PartialHash[]> = new Map<
string,
PartialHash[]
>();
private allExternalDependenciesHash: PartialHash;
private projectRootMappings = createProjectRootMappings(
this.projectGraph.nodes
);
constructor(
private readonly nxJson: NxJsonConfiguration,
private readonly legacyRuntimeInputs: { runtime: string }[],
private readonly legacyFilesetInputs: { fileset: string }[],
private readonly projectFileMap: ProjectFileMap,
private readonly allWorkspaceFiles: FileData[],
private readonly projectGraph: ProjectGraph,
private readonly options: { selectivelyHashTsConfig: boolean }
) {
// External Dependencies are all calculated up front in a deterministic order
this.calculateExternalDependencyHashes();
}
hashTasks(
tasks: Task[],
taskGraph: TaskGraph,
env: NodeJS.ProcessEnv
): Promise<PartialHash[]> {
return Promise.all(tasks.map((t) => this.hashTask(t, taskGraph, env, [])));
}
async hashTask(
task: Task,
taskGraph: TaskGraph,
env: NodeJS.ProcessEnv,
visited: string[] = []
): Promise<PartialHash> {
return Promise.resolve().then(async () => {
const { selfInputs, depsInputs, depsOutputs, projectInputs } = getInputs(
task,
this.projectGraph,
this.nxJson
);
const selfAndInputs = await this.hashSelfAndDepsInputs(
task.target.project,
task,
selfInputs,
depsInputs,
depsOutputs,
projectInputs,
taskGraph,
env,
visited
);
const target = this.hashTarget(
task.target.project,
task.target.target,
selfInputs
);
if (target) {
return this.combinePartialHashes([selfAndInputs, target]);
}
return selfAndInputs;
});
}
private async hashNamedInputForDependencies(
projectName: string,
task: Task,
namedInput: string,
taskGraph: TaskGraph,
env: NodeJS.ProcessEnv,
visited: string[]
): Promise<PartialHash> {
const projectNode = this.projectGraph.nodes[projectName];
const namedInputs = {
default: [{ fileset: '{projectRoot}/**/*' }],
...this.nxJson.namedInputs,
...projectNode.data.namedInputs,
};
const expandedInputs = expandNamedInput(namedInput, namedInputs);
const selfInputs = expandedInputs.filter(isSelfInput);
const depsOutputs = expandedInputs.filter(isDepsOutput);
const depsInputs = [{ input: namedInput, dependencies: true as true }]; // true is boolean by default
return this.hashSelfAndDepsInputs(
projectName,
task,
selfInputs,
depsInputs,
depsOutputs,
[],
taskGraph,
env,
visited
);
}
private async hashSelfAndDepsInputs(
projectName: string,
task: Task,
selfInputs: ExpandedSelfInput[],
depsInputs: { input: string; dependencies: true }[],
depsOutputs: ExpandedDepsOutput[],
projectInputs: { input: string; projects: string[] }[],
taskGraph: TaskGraph,
env: NodeJS.ProcessEnv,
visited: string[]
) {
const projectGraphDeps = this.projectGraph.dependencies[projectName] ?? [];
// we don't want random order of dependencies to change the hash
projectGraphDeps.sort((a, b) => a.target.localeCompare(b.target));
const self = await this.hashSingleProjectInputs(
projectName,
selfInputs,
env
);
const deps = await this.hashDepsInputs(
task,
depsInputs,
projectGraphDeps,
taskGraph,
env,
visited
);
const depsOut = await this.hashDepsOutputs(task, depsOutputs, taskGraph);
const projects = await this.hashProjectInputs(projectInputs, env);
return this.combinePartialHashes([
...self,
...deps,
...projects,
...depsOut,
]);
}
private combinePartialHashes(partialHashes: PartialHash[]): PartialHash {
if (partialHashes.length === 1) {
return partialHashes[0];
}
const details = {};
const hashValues: string[] = [];
for (const partial of partialHashes) {
hashValues.push(partial.value);
Object.assign(details, partial.details);
}
const value = hashArray(hashValues);
return { value, details };
}
private async hashDepsInputs(
task: Task,
inputs: { input: string }[],
projectGraphDeps: ProjectGraphDependency[],
taskGraph: TaskGraph,
env: NodeJS.ProcessEnv,
visited: string[]
): Promise<PartialHash[]> {
return (
await Promise.all(
inputs.map(async (input) => {
return await Promise.all(
projectGraphDeps.map(async (d) => {
if (visited.indexOf(d.target) > -1) {
return null;
} else {
visited.push(d.target);
if (this.projectGraph.nodes[d.target]) {
return await this.hashNamedInputForDependencies(
d.target,
task,
input.input || 'default',
taskGraph,
env,
visited
);
} else {
return this.getExternalDependencyHash(d.target);
}
}
})
);
})
)
)
.flat()
.filter((r) => !!r);
}
private async hashDepsOutputs(
task: Task,
depsOutputs: ExpandedDepsOutput[],
taskGraph: TaskGraph
): Promise<PartialHash[]> {
if (depsOutputs.length === 0) {
return [];
}
const result: PartialHash[] = [];
for (const { dependentTasksOutputFiles, transitive } of depsOutputs) {
result.push(
...(await this.hashDepOuputs(
task,
dependentTasksOutputFiles,
taskGraph,
transitive
))
);
}
return result;
}
private async hashDepOuputs(
task: Task,
dependentTasksOutputFiles: string,
taskGraph: TaskGraph,
transitive?: boolean
): Promise<PartialHash[]> {
// task has no dependencies
if (!taskGraph.dependencies[task.id]) {
return [];
}
const partialHashes: PartialHash[] = [];
for (const d of taskGraph.dependencies[task.id]) {
const childTask = taskGraph.tasks[d];
const outputs = getOutputsForTargetAndConfiguration(
childTask.target,
childTask.overrides,
this.projectGraph.nodes[childTask.target.project]
);
const { getFilesForOutputs } =
require('../native') as typeof import('../native');
const outputFiles = getFilesForOutputs(workspaceRoot, outputs);
const filteredFiles = outputFiles.filter(
(p) =>
p === dependentTasksOutputFiles ||
minimatch(p, dependentTasksOutputFiles, { dot: true })
);
const hashDetails = {};
const hashes: string[] = [];
for (const [file, hash] of this.hashFiles(
filteredFiles.map((p) => join(workspaceRoot, p))
)) {
hashes.push(hash);
}
let hash = hashArray(hashes);
partialHashes.push({
value: hash,
details: {
[`${dependentTasksOutputFiles}:${outputs.join(',')}`]: hash,
},
});
if (transitive) {
partialHashes.push(
...(await this.hashDepOuputs(
childTask,
dependentTasksOutputFiles,
taskGraph,
transitive
))
);
}
}
return partialHashes;
}
private hashFiles(files: string[]): Map<string, string> {
const r = new Map<string, string>();
for (let f of files) {
r.set(f, hashFile(f));
}
return r;
}
private getExternalDependencyHash(externalNodeName: string) {
const combinedHash = this.combinePartialHashes(
this.externalDependencyHashes.get(externalNodeName)
);
// Set the combined hash into the hashes so it's not recalculated next time
this.externalDependencyHashes.set(externalNodeName, [combinedHash]);
return combinedHash;
}
private hashSingleExternalDependency(externalNodeName: string): PartialHash {
const node = this.projectGraph.externalNodes[externalNodeName];
if (node.data.hash) {
// we already know the hash of this dependency
return {
value: node.data.hash,
details: {
[externalNodeName]: node.data.hash,
},
};
} else {
// we take version as a hash
return {
value: node.data.version,
details: {
[externalNodeName]: node.data.version,
},
};
}
}
private hashExternalDependency(externalNodeName: string) {
const partialHashes: Set<PartialHash> = new Set<PartialHash>();
partialHashes.add(this.hashSingleExternalDependency(externalNodeName));
const deps = findAllProjectNodeDependencies(
externalNodeName,
this.projectGraph,
true
);
for (const dep of deps) {
partialHashes.add(this.hashSingleExternalDependency(dep));
}
return Array.from(partialHashes);
}
private hashTarget(
projectName: string,
targetName: string,
selfInputs: ExpandedSelfInput[]
): PartialHash {
const projectNode = this.projectGraph.nodes[projectName];
const target = projectNode.data.targets[targetName];
if (!target) {
return;
}
let hash: string;
// we can only vouch for @nx packages's executor dependencies
// if it's "run commands" or third-party we skip traversing since we have no info what this command depends on
if (
target.executor.startsWith(`@nrwl/`) ||
target.executor.startsWith(`@nx/`)
) {
const executorPackage = target.executor.split(':')[0];
const executorNodeName =
this.findExternalDependencyNodeName(executorPackage);
// This is either a local plugin or a non-existent executor
if (!executorNodeName) {
// TODO: This should not return null if it is a local plugin's executor
return null;
}
return this.getExternalDependencyHash(executorNodeName);
} else {
// use command external dependencies if available to construct the hash
const partialHashes: PartialHash[] = [];
let hasCommandExternalDependencies = false;
for (const input of selfInputs) {
if (input['externalDependencies']) {
// if we have externalDependencies with empty array we still want to override the default hash
hasCommandExternalDependencies = true;
const externalDependencies = input['externalDependencies'];
for (let dep of externalDependencies) {
dep = this.findExternalDependencyNodeName(dep);
if (!dep) {
throw new Error(
`The externalDependency "${dep}" for "${projectName}:${targetName}" could not be found`
);
}
partialHashes.push(this.getExternalDependencyHash(dep));
}
}
}
if (hasCommandExternalDependencies) {
return this.combinePartialHashes(partialHashes);
} else {
// cache the hash of the entire external dependencies tree
if (this.allExternalDependenciesHash) {
return this.allExternalDependenciesHash;
} else {
hash = hashObject(this.projectGraph.externalNodes);
this.allExternalDependenciesHash = {
value: hash,
details: {
AllExternalDependencies: hash,
},
};
return this.allExternalDependenciesHash;
}
}
}
}
private findExternalDependencyNodeName(packageName: string): string | null {
if (this.projectGraph.externalNodes[packageName]) {
return packageName;
}
if (this.projectGraph.externalNodes[`npm:${packageName}`]) {
return `npm:${packageName}`;
}
for (const node of Object.values(this.projectGraph.externalNodes)) {
if (node.data.packageName === packageName) {
return node.name;
}
}
// not found
return null;
}
private async hashSingleProjectInputs(
projectName: string,
inputs: ExpandedInput[],
env: NodeJS.ProcessEnv
): Promise<PartialHash[]> {
const filesets = extractPatternsFromFileSets(inputs);
const projectFilesets = [];
const workspaceFilesets = [];
let invalidFilesetNoPrefix = null;
let invalidFilesetWorkspaceRootNegative = null;
for (let f of filesets) {
if (f.startsWith('{projectRoot}/') || f.startsWith('!{projectRoot}/')) {
projectFilesets.push(f);
} else if (
f.startsWith('{workspaceRoot}/') ||
f.startsWith('!{workspaceRoot}/')
) {
workspaceFilesets.push(f);
} else {
invalidFilesetNoPrefix = f;
}
}
if (invalidFilesetNoPrefix) {
throw new Error(
[
`"${invalidFilesetNoPrefix}" is an invalid fileset.`,
'All filesets have to start with either {workspaceRoot} or {projectRoot}.',
'For instance: "!{projectRoot}/**/*.spec.ts" or "{workspaceRoot}/package.json".',
`If "${invalidFilesetNoPrefix}" is a named input, make sure it is defined in, for instance, nx.json.`,
].join('\n')
);
}
if (invalidFilesetWorkspaceRootNegative) {
throw new Error(
[
`"${invalidFilesetWorkspaceRootNegative}" is an invalid fileset.`,
'It is not possible to negative filesets starting with {workspaceRoot}.',
].join('\n')
);
}
const notFilesets = inputs.filter((r) => !r['fileset']);
return Promise.all([
this.hashProjectFileset(projectName, projectFilesets),
this.hashProjectConfig(projectName),
this.hashTsConfig(projectName),
...[
...workspaceFilesets,
...this.legacyFilesetInputs.map((r) => r.fileset),
].map((fileset) => this.hashRootFileset(fileset)),
...[...notFilesets, ...this.legacyRuntimeInputs].map((r) =>
r['runtime']
? this.hashRuntime(env, r['runtime'])
: this.hashEnv(env, r['env'])
),
]);
}
private async hashProjectInputs(
projectInputs: { input: string; projects: string[] }[],
env: NodeJS.ProcessEnv
): Promise<PartialHash[]> {
const partialHashes: Promise<PartialHash[]>[] = [];
for (const input of projectInputs) {
const projects = findMatchingProjects(
input.projects,
this.projectGraph.nodes
);
for (const project of projects) {
const namedInputs = getNamedInputs(
this.nxJson,
this.projectGraph.nodes[project]
);
const expandedInput = expandSingleProjectInputs(
[{ input: input.input }],
namedInputs
);
partialHashes.push(
this.hashSingleProjectInputs(project, expandedInput, env)
);
}
}
return Promise.all(partialHashes).then((hashes) => hashes.flat());
}
private async hashRootFileset(fileset: string): Promise<PartialHash> {
const mapKey = fileset;
const withoutWorkspaceRoot = fileset.substring(16);
if (!this.filesetHashes[mapKey]) {
this.filesetHashes[mapKey] = new Promise(async (res) => {
const parts = [];
const matchingFile = this.allWorkspaceFiles.find(
(t) => t.file === withoutWorkspaceRoot
);
if (matchingFile) {
parts.push(matchingFile.hash);
} else {
this.allWorkspaceFiles
.filter((f) => minimatch(f.file, withoutWorkspaceRoot))
.forEach((f) => {
parts.push(f.hash);
});
}
const value = hashArray(parts);
res({
value,
details: { [mapKey]: value },
});
});
}
return this.filesetHashes[mapKey];
}
private hashProjectConfig(projectName: string): PartialHash {
const p = this.projectGraph.nodes[projectName];
const projectConfig = hashArray([
JSON.stringify({ ...p.data, files: undefined }),
]);
return {
value: projectConfig,
details: {
[`${projectName}:ProjectConfiguration`]: projectConfig,
},
};
}
private hashTsConfig(projectName: string): PartialHash {
const p = this.projectGraph.nodes[projectName];
const tsConfig = hashArray([
hashTsConfig(p, this.projectRootMappings, this.options),
]);
return {
value: tsConfig,
details: {
[`${projectName}:TsConfig`]: tsConfig,
},
};
}
private async hashProjectFileset(
projectName: string,
filesetPatterns: string[]
): Promise<PartialHash> {
const mapKey = `${projectName}:${filesetPatterns.join(',')}`;
if (!this.filesetHashes[mapKey]) {
this.filesetHashes[mapKey] = new Promise(async (res) => {
const p = this.projectGraph.nodes[projectName];
const filteredFiles = filterUsingGlobPatterns(
p.data.root,
this.projectFileMap[projectName] || [],
filesetPatterns
);
const files: string[] = [];
for (const { file, hash } of filteredFiles) {
files.push(file, hash);
}
const value = hashArray(files);
res({
value,
details: { [mapKey]: value },
});
});
}
return this.filesetHashes[mapKey];
}
private async hashRuntime(
env: NodeJS.ProcessEnv,
runtime: string
): Promise<PartialHash> {
const env_key = JSON.stringify(env);
const mapKey = `runtime:${runtime}-${env_key}`;
if (!this.runtimeHashes[mapKey]) {
this.runtimeHashes[mapKey] = new Promise((res, rej) => {
exec(
runtime,
{
windowsHide: true,
cwd: workspaceRoot,
env,
},
(err, stdout, stderr) => {
if (err) {
rej(
new Error(
`Nx failed to execute {runtime: '${runtime}'}. ${err}.`
)
);
} else {
const value = hashArray([`${stdout}${stderr}`.trim()]);
res({
details: { [`runtime:${runtime}`]: value },
value,
});
}
}
);
});
}
return this.runtimeHashes[mapKey];
}
private async hashEnv(
env: NodeJS.ProcessEnv,
envVarName: string
): Promise<PartialHash> {
const value = hashArray([env[envVarName] ?? '']);
return {
details: { [`env:${envVarName}`]: value },
value,
};
}
private calculateExternalDependencyHashes() {
const keys = Object.keys(this.projectGraph.externalNodes);
for (const externalNodeName of keys) {
this.externalDependencyHashes.set(
externalNodeName,
this.hashExternalDependency(externalNodeName)
);
}
}
}

View File

@ -1,5 +1,6 @@
// This must come before the Hasher import
import { TempFs } from '../internal-testing-utils/temp-fs';
let tempFs = new TempFs('TaskHasher');
import { DependencyType } from '../config/project-graph';
@ -10,6 +11,7 @@ import {
} from './task-hasher';
describe('TaskHasher', () => {
process.env.NX_NATIVE_TASK_HASHER = 'false';
const packageJson = {
name: 'nrwl',
};
@ -107,6 +109,7 @@ describe('TaskHasher', () => {
},
{} as any,
null,
{
runtimeCacheInputs: ['echo runtime456'],
}
@ -175,6 +178,7 @@ describe('TaskHasher', () => {
},
},
{} as any,
null,
{}
);
@ -261,6 +265,7 @@ describe('TaskHasher', () => {
prod: ['!{projectRoot}/**/*.spec.ts'],
},
} as any,
null,
{}
);
@ -337,6 +342,7 @@ describe('TaskHasher', () => {
prod: ['!{projectRoot}/**/*.spec.ts'],
},
} as any,
null,
{}
);
@ -441,6 +447,7 @@ describe('TaskHasher', () => {
prod: ['!{projectRoot}/**/*.spec.ts'],
},
} as any,
null,
{}
);
@ -542,6 +549,7 @@ describe('TaskHasher', () => {
},
},
} as any,
null,
{}
);
@ -601,6 +609,7 @@ describe('TaskHasher', () => {
},
{ npmScope: 'nrwl' } as any,
null,
{
runtimeCacheInputs: ['echo runtime123', 'echo runtime456'],
selectivelyHashTsConfig: true,
@ -666,6 +675,7 @@ describe('TaskHasher', () => {
},
{} as any,
null,
{}
);
@ -740,6 +750,7 @@ describe('TaskHasher', () => {
},
},
{} as any,
null,
{
runtimeCacheInputs: ['boom'],
}
@ -813,6 +824,7 @@ describe('TaskHasher', () => {
},
{} as any,
null,
{}
);
@ -880,6 +892,7 @@ describe('TaskHasher', () => {
},
{} as any,
null,
{}
);
@ -938,6 +951,7 @@ describe('TaskHasher', () => {
dependencies: {},
},
{} as any,
null,
{}
);
@ -1024,6 +1038,7 @@ describe('TaskHasher', () => {
},
{} as any,
null,
{}
);
}
@ -1176,6 +1191,7 @@ describe('TaskHasher', () => {
},
},
{} as any,
null,
{}
);
@ -1321,6 +1337,7 @@ describe('TaskHasher', () => {
},
{} as any,
null,
{}
);
@ -1391,6 +1408,7 @@ describe('TaskHasher', () => {
},
{} as any,
null,
{}
);
@ -1474,6 +1492,7 @@ describe('TaskHasher', () => {
},
{} as any,
null,
{}
);
@ -1555,6 +1574,7 @@ describe('TaskHasher', () => {
},
{} as any,
null,
{}
);
@ -1675,6 +1695,7 @@ describe('TaskHasher', () => {
},
},
} as any,
null,
{}
);
@ -1814,6 +1835,7 @@ describe('TaskHasher', () => {
},
},
} as any,
null,
{}
);

View File

@ -1,39 +1,19 @@
import { exec } from 'child_process';
import * as minimatch from 'minimatch';
import {
FileData,
ProjectFileMap,
ProjectGraph,
ProjectGraphDependency,
ProjectGraphProjectNode,
} from '../config/project-graph';
import { NxJsonConfiguration } from '../config/nx-json';
import { Task, TaskGraph } from '../config/task-graph';
import { InputDefinition } from '../config/workspace-json-project-json';
import { hashTsConfig } from '../plugins/js/hasher/hasher';
import { DaemonClient } from '../daemon/client/client';
import { createProjectRootMappings } from '../project-graph/utils/find-project-for-path';
import { findMatchingProjects } from '../utils/find-matching-projects';
import { hashArray, hashObject } from './file-hasher';
import { getOutputsForTargetAndConfiguration } from '../tasks-runner/utils';
import { hashArray } from './file-hasher';
import { NodeTaskHasherImpl } from './node-task-hasher-impl';
import { InputDefinition } from '../config/workspace-json-project-json';
import * as minimatch from 'minimatch';
import { NativeTaskHasherImpl } from './native-task-hasher-impl';
import { workspaceRoot } from '../utils/workspace-root';
import { join, relative } from 'path';
import { normalizePath } from '../utils/path';
import { findAllProjectNodeDependencies } from '../utils/project-graph-utils';
import { hashFile } from '../native';
type ExpandedSelfInput =
| { fileset: string }
| { runtime: string }
| { env: string }
| { externalDependencies: string[] };
type ExpandedDepsOutput = {
dependentTasksOutputFiles: string;
transitive?: boolean;
};
type ExpandedInput = ExpandedSelfInput | ExpandedDepsOutput;
import { NxWorkspaceFilesExternals } from '../native';
/**
* A data structure returned by the default hasher.
@ -94,6 +74,21 @@ export interface TaskHasher {
): Promise<Hash[]>;
}
export interface TaskHasherImpl {
hashTasks(
tasks: Task[],
taskGraph: TaskGraph,
env: NodeJS.ProcessEnv
): Promise<PartialHash[]>;
hashTask(
task: Task,
taskGraph: TaskGraph,
env: NodeJS.ProcessEnv,
visited?: string[]
): Promise<PartialHash>;
}
export type Hasher = TaskHasher;
export class DaemonBasedTaskHasher implements TaskHasher {
@ -135,11 +130,14 @@ export class InProcessTaskHasher implements TaskHasher {
static version = '3.0';
private taskHasher: TaskHasherImpl;
private useNativeTaskHasher = process.env.NX_NATIVE_TASK_HASHER !== 'false';
constructor(
private readonly projectFileMap: ProjectFileMap,
private readonly allWorkspaceFiles: FileData[],
private readonly projectGraph: ProjectGraph,
private readonly nxJson: NxJsonConfiguration,
private readonly externalRustReferences: NxWorkspaceFilesExternals | null,
private readonly options: any
) {
const legacyRuntimeInputs = (
@ -160,15 +158,29 @@ export class InProcessTaskHasher implements TaskHasher {
'.nxignore',
].map((d) => ({ fileset: `{workspaceRoot}/${d}` }));
this.taskHasher = new TaskHasherImpl(
nxJson,
legacyRuntimeInputs,
legacyFilesetInputs,
this.projectFileMap,
this.allWorkspaceFiles,
this.projectGraph,
{ selectivelyHashTsConfig: this.options.selectivelyHashTsConfig ?? false }
);
this.taskHasher = !this.useNativeTaskHasher
? new NodeTaskHasherImpl(
nxJson,
legacyRuntimeInputs,
legacyFilesetInputs,
this.projectFileMap,
this.allWorkspaceFiles,
this.projectGraph,
{
selectivelyHashTsConfig:
this.options.selectivelyHashTsConfig ?? false,
}
)
: new NativeTaskHasherImpl(
workspaceRoot,
nxJson,
this.projectGraph,
this.externalRustReferences,
{
selectivelyHashTsConfig:
this.options.selectivelyHashTsConfig ?? false,
}
);
}
async hashTasks(
@ -176,9 +188,20 @@ export class InProcessTaskHasher implements TaskHasher {
taskGraph?: TaskGraph,
env?: NodeJS.ProcessEnv
): Promise<Hash[]> {
return await Promise.all(
tasks.map((t) => this.hashTask(t, taskGraph, env))
);
if (this.useNativeTaskHasher) {
const hashes = await this.taskHasher.hashTasks(
tasks,
taskGraph,
env ?? process.env
);
return tasks.map((task, index) =>
this.createHashDetails(task, hashes[index])
);
} else {
return await Promise.all(
tasks.map((t) => this.hashTask(t, taskGraph, env))
);
}
}
async hashTask(
@ -189,9 +212,12 @@ export class InProcessTaskHasher implements TaskHasher {
const res = await this.taskHasher.hashTask(
task,
taskGraph,
env ?? process.env,
[task.target.project]
env ?? process.env
);
return this.createHashDetails(task, res);
}
private createHashDetails(task: Task, res: PartialHash) {
const command = this.hashCommand(task);
return {
value: hashArray([res.value, command]),
@ -221,6 +247,16 @@ export class InProcessTaskHasher implements TaskHasher {
}
}
export type ExpandedSelfInput =
| { fileset: string }
| { runtime: string }
| { env: string }
| { externalDependencies: string[] };
export type ExpandedDepsOutput = {
dependentTasksOutputFiles: string;
transitive?: boolean;
};
export type ExpandedInput = ExpandedSelfInput | ExpandedDepsOutput;
const DEFAULT_INPUTS: ReadonlyArray<InputDefinition> = [
{
fileset: '{projectRoot}/**/*',
@ -231,643 +267,6 @@ const DEFAULT_INPUTS: ReadonlyArray<InputDefinition> = [
},
];
class TaskHasherImpl {
private filesetHashes: {
[taskId: string]: Promise<PartialHash>;
} = {};
private runtimeHashes: {
[runtime: string]: Promise<PartialHash>;
} = {};
private externalDependencyHashes: Map<string, PartialHash[]> = new Map<
string,
PartialHash[]
>();
private allExternalDependenciesHash: PartialHash;
private projectRootMappings = createProjectRootMappings(
this.projectGraph.nodes
);
constructor(
private readonly nxJson: NxJsonConfiguration,
private readonly legacyRuntimeInputs: { runtime: string }[],
private readonly legacyFilesetInputs: { fileset: string }[],
private readonly projectFileMap: ProjectFileMap,
private readonly allWorkspaceFiles: FileData[],
private readonly projectGraph: ProjectGraph,
private readonly options: { selectivelyHashTsConfig: boolean }
) {
// External Dependencies are all calculated up front in a deterministic order
this.calculateExternalDependencyHashes();
}
async hashTask(
task: Task,
taskGraph: TaskGraph,
env: NodeJS.ProcessEnv,
visited: string[]
): Promise<PartialHash> {
return Promise.resolve().then(async () => {
const { selfInputs, depsInputs, depsOutputs, projectInputs } = getInputs(
task,
this.projectGraph,
this.nxJson
);
const selfAndInputs = await this.hashSelfAndDepsInputs(
task.target.project,
task,
selfInputs,
depsInputs,
depsOutputs,
projectInputs,
taskGraph,
env,
visited
);
const target = this.hashTarget(
task.target.project,
task.target.target,
selfInputs
);
if (target) {
return this.combinePartialHashes([selfAndInputs, target]);
}
return selfAndInputs;
});
}
private async hashNamedInputForDependencies(
projectName: string,
task: Task,
namedInput: string,
taskGraph: TaskGraph,
env: NodeJS.ProcessEnv,
visited: string[]
): Promise<PartialHash> {
const projectNode = this.projectGraph.nodes[projectName];
const namedInputs = {
default: [{ fileset: '{projectRoot}/**/*' }],
...this.nxJson.namedInputs,
...projectNode.data.namedInputs,
};
const expandedInputs = expandNamedInput(namedInput, namedInputs);
const selfInputs = expandedInputs.filter(isSelfInput);
const depsOutputs = expandedInputs.filter(isDepsOutput);
const depsInputs = [{ input: namedInput, dependencies: true as true }]; // true is boolean by default
return this.hashSelfAndDepsInputs(
projectName,
task,
selfInputs,
depsInputs,
depsOutputs,
[],
taskGraph,
env,
visited
);
}
private async hashSelfAndDepsInputs(
projectName: string,
task: Task,
selfInputs: ExpandedSelfInput[],
depsInputs: { input: string; dependencies: true }[],
depsOutputs: ExpandedDepsOutput[],
projectInputs: { input: string; projects: string[] }[],
taskGraph: TaskGraph,
env: NodeJS.ProcessEnv,
visited: string[]
) {
const projectGraphDeps = this.projectGraph.dependencies[projectName] ?? [];
// we don't want random order of dependencies to change the hash
projectGraphDeps.sort((a, b) => a.target.localeCompare(b.target));
const self = await this.hashSingleProjectInputs(
projectName,
selfInputs,
env
);
const deps = await this.hashDepsInputs(
task,
depsInputs,
projectGraphDeps,
taskGraph,
env,
visited
);
const depsOut = await this.hashDepsOutputs(task, depsOutputs, taskGraph);
const projects = await this.hashProjectInputs(projectInputs, env);
return this.combinePartialHashes([
...self,
...deps,
...projects,
...depsOut,
]);
}
private combinePartialHashes(partialHashes: PartialHash[]): PartialHash {
if (partialHashes.length === 1) {
return partialHashes[0];
}
const details = {};
const hashValues: string[] = [];
for (const partial of partialHashes) {
hashValues.push(partial.value);
Object.assign(details, partial.details);
}
const value = hashArray(hashValues);
return { value, details };
}
private async hashDepsInputs(
task: Task,
inputs: { input: string }[],
projectGraphDeps: ProjectGraphDependency[],
taskGraph: TaskGraph,
env: NodeJS.ProcessEnv,
visited: string[]
): Promise<PartialHash[]> {
return (
await Promise.all(
inputs.map(async (input) => {
return await Promise.all(
projectGraphDeps.map(async (d) => {
if (visited.indexOf(d.target) > -1) {
return null;
} else {
visited.push(d.target);
if (this.projectGraph.nodes[d.target]) {
return await this.hashNamedInputForDependencies(
d.target,
task,
input.input || 'default',
taskGraph,
env,
visited
);
} else {
return this.getExternalDependencyHash(d.target);
}
}
})
);
})
)
)
.flat()
.filter((r) => !!r);
}
private async hashDepsOutputs(
task: Task,
depsOutputs: ExpandedDepsOutput[],
taskGraph: TaskGraph
): Promise<PartialHash[]> {
if (depsOutputs.length === 0) {
return [];
}
const result: PartialHash[] = [];
for (const { dependentTasksOutputFiles, transitive } of depsOutputs) {
result.push(
...(await this.hashDepOuputs(
task,
dependentTasksOutputFiles,
taskGraph,
transitive
))
);
}
return result;
}
private async hashDepOuputs(
task: Task,
dependentTasksOutputFiles: string,
taskGraph: TaskGraph,
transitive?: boolean
): Promise<PartialHash[]> {
// task has no dependencies
if (!taskGraph.dependencies[task.id]) {
return [];
}
const partialHashes: PartialHash[] = [];
for (const d of taskGraph.dependencies[task.id]) {
const childTask = taskGraph.tasks[d];
const outputs = getOutputsForTargetAndConfiguration(
childTask.target,
childTask.overrides,
this.projectGraph.nodes[childTask.target.project]
);
const { getFilesForOutputs } =
require('../native') as typeof import('../native');
const outputFiles = getFilesForOutputs(workspaceRoot, outputs);
const filteredFiles = outputFiles.filter(
(p) =>
p === dependentTasksOutputFiles ||
minimatch(p, dependentTasksOutputFiles, { dot: true })
);
const hashDetails = {};
const hashes: string[] = [];
for (const [file, hash] of this.hashFiles(
filteredFiles.map((p) => join(workspaceRoot, p))
)) {
hashes.push(hash);
}
let hash = hashArray(hashes);
partialHashes.push({
value: hash,
details: {
[`${dependentTasksOutputFiles}:${outputs.join(',')}`]: hash,
},
});
if (transitive) {
partialHashes.push(
...(await this.hashDepOuputs(
childTask,
dependentTasksOutputFiles,
taskGraph,
transitive
))
);
}
}
return partialHashes;
}
private hashFiles(files: string[]): Map<string, string> {
const r = new Map<string, string>();
for (let f of files) {
r.set(f, hashFile(f));
}
return r;
}
private getExternalDependencyHash(externalNodeName: string) {
const combinedHash = this.combinePartialHashes(
this.externalDependencyHashes.get(externalNodeName)
);
// Set the combined hash into the hashes so it's not recalculated next time
this.externalDependencyHashes.set(externalNodeName, [combinedHash]);
return combinedHash;
}
private hashSingleExternalDependency(externalNodeName: string): PartialHash {
const node = this.projectGraph.externalNodes[externalNodeName];
if (node.data.hash) {
// we already know the hash of this dependency
return {
value: node.data.hash,
details: {
[externalNodeName]: node.data.hash,
},
};
} else {
// we take version as a hash
return {
value: node.data.version,
details: {
[externalNodeName]: node.data.version,
},
};
}
}
private hashExternalDependency(externalNodeName: string) {
const partialHashes: Set<PartialHash> = new Set<PartialHash>();
partialHashes.add(this.hashSingleExternalDependency(externalNodeName));
const deps = findAllProjectNodeDependencies(
externalNodeName,
this.projectGraph,
true
);
for (const dep of deps) {
partialHashes.add(this.hashSingleExternalDependency(dep));
}
return Array.from(partialHashes);
}
private hashTarget(
projectName: string,
targetName: string,
selfInputs: ExpandedSelfInput[]
): PartialHash {
const projectNode = this.projectGraph.nodes[projectName];
const target = projectNode.data.targets[targetName];
if (!target) {
return;
}
let hash: string;
// we can only vouch for @nx packages's executor dependencies
// if it's "run commands" or third-party we skip traversing since we have no info what this command depends on
if (
target.executor.startsWith(`@nrwl/`) ||
target.executor.startsWith(`@nx/`)
) {
const executorPackage = target.executor.split(':')[0];
const executorNodeName =
this.findExternalDependencyNodeName(executorPackage);
// This is either a local plugin or a non-existent executor
if (!executorNodeName) {
// TODO: This should not return null if it is a local plugin's executor
return null;
}
return this.getExternalDependencyHash(executorNodeName);
} else {
// use command external dependencies if available to construct the hash
const partialHashes: PartialHash[] = [];
let hasCommandExternalDependencies = false;
for (const input of selfInputs) {
if (input['externalDependencies']) {
// if we have externalDependencies with empty array we still want to override the default hash
hasCommandExternalDependencies = true;
const externalDependencies = input['externalDependencies'];
for (let dep of externalDependencies) {
dep = this.findExternalDependencyNodeName(dep);
if (!dep) {
throw new Error(
`The externalDependency "${dep}" for "${projectName}:${targetName}" could not be found`
);
}
partialHashes.push(this.getExternalDependencyHash(dep));
}
}
}
if (hasCommandExternalDependencies) {
return this.combinePartialHashes(partialHashes);
} else {
// cache the hash of the entire external dependencies tree
if (this.allExternalDependenciesHash) {
return this.allExternalDependenciesHash;
} else {
hash = hashObject(this.projectGraph.externalNodes);
this.allExternalDependenciesHash = {
value: hash,
details: {
AllExternalDependencies: hash,
},
};
return this.allExternalDependenciesHash;
}
}
}
}
private findExternalDependencyNodeName(packageName: string): string | null {
if (this.projectGraph.externalNodes[packageName]) {
return packageName;
}
if (this.projectGraph.externalNodes[`npm:${packageName}`]) {
return `npm:${packageName}`;
}
for (const node of Object.values(this.projectGraph.externalNodes)) {
if (node.data.packageName === packageName) {
return node.name;
}
}
// not found
return null;
}
private async hashSingleProjectInputs(
projectName: string,
inputs: ExpandedInput[],
env: NodeJS.ProcessEnv
): Promise<PartialHash[]> {
const filesets = extractPatternsFromFileSets(inputs);
const projectFilesets = [];
const workspaceFilesets = [];
let invalidFilesetNoPrefix = null;
let invalidFilesetWorkspaceRootNegative = null;
for (let f of filesets) {
if (f.startsWith('{projectRoot}/') || f.startsWith('!{projectRoot}/')) {
projectFilesets.push(f);
} else if (
f.startsWith('{workspaceRoot}/') ||
f.startsWith('!{workspaceRoot}/')
) {
workspaceFilesets.push(f);
} else {
invalidFilesetNoPrefix = f;
}
}
if (invalidFilesetNoPrefix) {
throw new Error(
[
`"${invalidFilesetNoPrefix}" is an invalid fileset.`,
'All filesets have to start with either {workspaceRoot} or {projectRoot}.',
'For instance: "!{projectRoot}/**/*.spec.ts" or "{workspaceRoot}/package.json".',
`If "${invalidFilesetNoPrefix}" is a named input, make sure it is defined in, for instance, nx.json.`,
].join('\n')
);
}
if (invalidFilesetWorkspaceRootNegative) {
throw new Error(
[
`"${invalidFilesetWorkspaceRootNegative}" is an invalid fileset.`,
'It is not possible to negative filesets starting with {workspaceRoot}.',
].join('\n')
);
}
const notFilesets = inputs.filter((r) => !r['fileset']);
return Promise.all([
this.hashProjectFileset(projectName, projectFilesets),
this.hashProjectConfig(projectName),
this.hashTsConfig(projectName),
...[
...workspaceFilesets,
...this.legacyFilesetInputs.map((r) => r.fileset),
].map((fileset) => this.hashRootFileset(fileset)),
...[...notFilesets, ...this.legacyRuntimeInputs].map((r) =>
r['runtime']
? this.hashRuntime(env, r['runtime'])
: this.hashEnv(env, r['env'])
),
]);
}
private async hashProjectInputs(
projectInputs: { input: string; projects: string[] }[],
env: NodeJS.ProcessEnv
): Promise<PartialHash[]> {
const partialHashes: Promise<PartialHash[]>[] = [];
for (const input of projectInputs) {
const projects = findMatchingProjects(
input.projects,
this.projectGraph.nodes
);
for (const project of projects) {
const namedInputs = getNamedInputs(
this.nxJson,
this.projectGraph.nodes[project]
);
const expandedInput = expandSingleProjectInputs(
[{ input: input.input }],
namedInputs
);
partialHashes.push(
this.hashSingleProjectInputs(project, expandedInput, env)
);
}
}
return Promise.all(partialHashes).then((hashes) => hashes.flat());
}
private async hashRootFileset(fileset: string): Promise<PartialHash> {
const mapKey = fileset;
const withoutWorkspaceRoot = fileset.substring(16);
if (!this.filesetHashes[mapKey]) {
this.filesetHashes[mapKey] = new Promise(async (res) => {
const parts = [];
const matchingFile = this.allWorkspaceFiles.find(
(t) => t.file === withoutWorkspaceRoot
);
if (matchingFile) {
parts.push(matchingFile.hash);
} else {
this.allWorkspaceFiles
.filter((f) => minimatch(f.file, withoutWorkspaceRoot))
.forEach((f) => {
parts.push(f.hash);
});
}
const value = hashArray(parts);
res({
value,
details: { [mapKey]: value },
});
});
}
return this.filesetHashes[mapKey];
}
private hashProjectConfig(projectName: string): PartialHash {
const p = this.projectGraph.nodes[projectName];
const projectConfig = hashArray([
JSON.stringify({ ...p.data, files: undefined }),
]);
return {
value: projectConfig,
details: {
[`${projectName}:ProjectConfiguration`]: projectConfig,
},
};
}
private hashTsConfig(projectName: string): PartialHash {
const p = this.projectGraph.nodes[projectName];
const tsConfig = hashArray([
hashTsConfig(p, this.projectRootMappings, this.options),
]);
return {
value: tsConfig,
details: {
[`${projectName}:TsConfig`]: tsConfig,
},
};
}
private async hashProjectFileset(
projectName: string,
filesetPatterns: string[]
): Promise<PartialHash> {
const mapKey = `${projectName}:${filesetPatterns.join(',')}`;
if (!this.filesetHashes[mapKey]) {
this.filesetHashes[mapKey] = new Promise(async (res) => {
const p = this.projectGraph.nodes[projectName];
const filteredFiles = filterUsingGlobPatterns(
p.data.root,
this.projectFileMap[projectName] || [],
filesetPatterns
);
const files: string[] = [];
for (const { file, hash } of filteredFiles) {
files.push(file, hash);
}
const value = hashArray(files);
res({
value,
details: { [mapKey]: value },
});
});
}
return this.filesetHashes[mapKey];
}
private async hashRuntime(
env: NodeJS.ProcessEnv,
runtime: string
): Promise<PartialHash> {
const env_key = JSON.stringify(env);
const mapKey = `runtime:${runtime}-${env_key}`;
if (!this.runtimeHashes[mapKey]) {
this.runtimeHashes[mapKey] = new Promise((res, rej) => {
exec(
runtime,
{
windowsHide: true,
cwd: workspaceRoot,
env,
},
(err, stdout, stderr) => {
if (err) {
rej(
new Error(
`Nx failed to execute {runtime: '${runtime}'}. ${err}.`
)
);
} else {
const value = hashArray([`${stdout}${stderr}`.trim()]);
res({
details: { [`runtime:${runtime}`]: value },
value,
});
}
}
);
});
}
return this.runtimeHashes[mapKey];
}
private async hashEnv(
env: NodeJS.ProcessEnv,
envVarName: string
): Promise<PartialHash> {
const value = hashArray([env[envVarName] ?? '']);
return {
details: { [`env:${envVarName}`]: value },
value,
};
}
private calculateExternalDependencyHashes() {
const keys = Object.keys(this.projectGraph.externalNodes);
for (const externalNodeName of keys) {
this.externalDependencyHashes.set(
externalNodeName,
this.hashExternalDependency(externalNodeName)
);
}
}
}
export function getNamedInputs(
nxJson: NxJsonConfiguration,
project: ProjectGraphProjectNode
@ -983,15 +382,17 @@ function splitInputsIntoSelfAndDependencies(
};
}
function isSelfInput(input: ExpandedInput): input is ExpandedSelfInput {
export function isSelfInput(input: ExpandedInput): input is ExpandedSelfInput {
return !('dependentTasksOutputFiles' in input);
}
function isDepsOutput(input: ExpandedInput): input is ExpandedDepsOutput {
export function isDepsOutput(
input: ExpandedInput
): input is ExpandedDepsOutput {
return 'dependentTasksOutputFiles' in input;
}
function expandSingleProjectInputs(
export function expandSingleProjectInputs(
inputs: ReadonlyArray<InputDefinition | string>,
namedInputs: { [inputName: string]: ReadonlyArray<InputDefinition | string> }
): ExpandedInput[] {

View File

@ -59,7 +59,7 @@ export class TempFs {
}
async readFile(filePath: string): Promise<string> {
return await readFile(filePath, 'utf-8');
return await readFile(joinPathFragments(this.tempDir, filePath), 'utf-8');
}
removeFileSync(filePath: string): void {

View File

@ -1,7 +1,7 @@
use std::path::PathBuf;
use crate::native::glob::build_glob_set;
use crate::native::utils::path::Normalize;
use crate::native::utils::Normalize;
use crate::native::walker::nx_walker_sync;
#[napi]
@ -58,6 +58,7 @@ pub fn get_files_for_outputs(
}
if !globs.is_empty() {
// todo(jcammisuli): optimize this as nx_walker_sync is very slow on the root directory. We need to change this to only search smaller directories
let glob_set = build_glob_set(&globs)?;
let found_paths = nx_walker_sync(&directory).filter_map(|path| {
if glob_set.is_match(&path) {
@ -74,17 +75,15 @@ pub fn get_files_for_outputs(
for dir in directories {
let dir = PathBuf::from(dir);
let dir_path = directory.join(&dir);
let files_in_dir: Vec<String> = nx_walker_sync(&dir_path)
.filter_map(|e| {
let path = dir_path.join(&e);
let files_in_dir = nx_walker_sync(&dir_path).filter_map(|e| {
let path = dir_path.join(&e);
if path.is_file() {
Some(dir.join(e).to_normalized_string())
} else {
None
}
})
.collect();
if path.is_file() {
Some(dir.join(e).to_normalized_string())
} else {
None
}
});
files.extend(files_in_dir);
}
}

View File

@ -65,7 +65,14 @@ pub struct NxGlobSet {
}
impl NxGlobSet {
pub fn is_match<P: AsRef<Path>>(&self, path: P) -> bool {
self.included_globs.is_match(path.as_ref()) && !self.excluded_globs.is_match(path.as_ref())
if self.included_globs.is_empty() {
!self.excluded_globs.is_match(path.as_ref())
} else if self.excluded_globs.is_empty() {
self.included_globs.is_match(path.as_ref())
} else {
self.included_globs.is_match(path.as_ref())
&& !self.excluded_globs.is_match(path.as_ref())
}
}
}
@ -106,7 +113,15 @@ mod test {
#[test]
fn should_work_with_simple_globs() {
let glob_set = build_glob_set(&["**/*"]).unwrap();
assert!(glob_set.is_match("packages/nx/package.json"))
assert!(glob_set.is_match("packages/nx/package.json"));
let glob_set = build_glob_set(&["!test/*.spec.ts"]).unwrap();
assert!(!glob_set.is_match("test/file.spec.ts"));
assert!(glob_set.is_match("test/file.ts"));
let glob_set = build_glob_set(&["test/*.spec.ts"]).unwrap();
assert!(glob_set.is_match("test/file.spec.ts"));
assert!(!glob_set.is_match("test/file.ts"));
}
#[test]

View File

@ -1,4 +1,4 @@
use crate::native::utils::path::Normalize;
use crate::native::utils::Normalize;
use crate::native::walker::nx_walker;
use std::collections::HashMap;
use xxhash_rust::xxh3;

View File

@ -3,6 +3,12 @@
/* auto-generated by NAPI-RS */
export class ExternalObject<T> {
readonly '': {
readonly '': unique symbol
[K: symbol]: T
}
}
/**
* Expands the given entries into a list of existing directories and files.
* This is used for copying outputs to and from the cache
@ -19,6 +25,11 @@ export function hashArray(input: Array<string>): string
export function hashFile(file: string): string | null
export function hashFiles(workspaceRoot: string): Record<string, string>
export function findImports(projectFileMap: Record<string, Array<string>>): Array<ImportResult>
/**
* Transfer the project graph from the JS world to the Rust world, so that we can pass the project graph via memory quicker
* This wont be needed once the project graph is created in Rust
*/
export function transferProjectGraph(projectGraph: ProjectGraph): ExternalObject<ProjectGraph>
export interface ExternalNodeData {
version: string
hash?: string
@ -31,6 +42,8 @@ export interface Target {
executor?: string
inputs?: Array<JsInputs>
outputs?: Array<string>
options?: string
configurations?: string
}
export interface Project {
root: string
@ -43,6 +56,13 @@ export interface ProjectGraph {
dependencies: Record<string, Array<string>>
externalNodes: Record<string, ExternalNode>
}
export interface HashDetails {
value: string
details: Record<string, string>
}
export interface HasherOptions {
selectivelyHashTsConfig: boolean
}
export interface Task {
id: string
target: TaskTarget
@ -87,7 +107,6 @@ export interface DepsOutputsInput {
/** Stripped version of the NxJson interface for use in rust */
export interface NxJson {
namedInputs?: Record<string, Array<JsInputs>>
targetDefaults?: Record<string, Target>
}
export const enum EventType {
delete = 'delete',
@ -104,8 +123,22 @@ export const enum WorkspaceErrors {
Generic = 'Generic'
}
export interface NxWorkspaceFiles {
projectFileMap: Record<string, Array<FileData>>
projectFileMap: ProjectFiles
globalFiles: Array<FileData>
externalReferences?: NxWorkspaceFilesExternals
}
export interface NxWorkspaceFilesExternals {
projectFiles: ExternalObject<ProjectFiles>
globalFiles: ExternalObject<Array<FileData>>
allWorkspaceFiles: ExternalObject<Array<FileData>>
}
export interface UpdatedWorkspaceFiles {
fileMap: FileMap
externalReferences: NxWorkspaceFilesExternals
}
export interface FileMap {
projectFileMap: ProjectFiles
nonProjectFiles: Array<FileData>
}
export class ImportResult {
file: string
@ -114,10 +147,14 @@ export class ImportResult {
staticImportExpressions: Array<string>
}
export class HashPlanner {
constructor(workspaceRoot: string, nxJson: NxJson, projectGraph: ProjectGraph)
constructor(nxJson: NxJson, projectGraph: ExternalObject<ProjectGraph>)
getPlans(taskIds: Array<string>, taskGraph: TaskGraph): Record<string, string[]>
getPlansReference(taskIds: Array<string>, taskGraph: TaskGraph): JsExternal
}
export class TaskHasher {
constructor(workspaceRoot: string, projectGraph: ExternalObject<ProjectGraph>, projectFileMap: ExternalObject<ProjectFiles>, allWorkspaceFiles: ExternalObject<Array<FileData>>, tsConfig: Buffer, tsConfigPaths: Record<string, Array<string>>, options?: HasherOptions | undefined | null)
hashPlans(hashPlans: ExternalObject<Record<string, Array<HashInstruction>>>, jsEnv: Record<string, string>): NapiDashMap
}
export class Watcher {
origin: string
/**
@ -139,5 +176,6 @@ export class WorkspaceContext {
hashFilesMatchingGlob(globs: Array<string>, exclude?: Array<string> | undefined | null): string
getProjectConfigurations(globs: Array<string>, parseConfigurations: (arg0: Array<string>) => Promise<Record<string, string>>): Promise<Record<string, string>>
incrementalUpdate(updatedFiles: Array<string>, deletedFiles: Array<string>): Record<string, string>
updateProjectFiles(projectRootMappings: ProjectRootMappings, projectFiles: ExternalObject<ProjectFiles>, globalFiles: ExternalObject<Array<FileData>>, updatedFiles: Record<string, string>, deletedFiles: Array<string>): UpdatedWorkspaceFiles
allFileData(): Array<FileData>
}

View File

@ -246,7 +246,7 @@ if (!nativeBinding) {
throw new Error(`Failed to load native binding`)
}
const { expandOutputs, getFilesForOutputs, remove, copy, hashArray, hashFile, hashFiles, ImportResult, findImports, HashPlanner, EventType, Watcher, WorkspaceContext, WorkspaceErrors } = nativeBinding
const { expandOutputs, getFilesForOutputs, remove, copy, hashArray, hashFile, hashFiles, ImportResult, findImports, transferProjectGraph, HashPlanner, TaskHasher, EventType, Watcher, WorkspaceContext, WorkspaceErrors } = nativeBinding
module.exports.expandOutputs = expandOutputs
module.exports.getFilesForOutputs = getFilesForOutputs
@ -257,7 +257,9 @@ module.exports.hashFile = hashFile
module.exports.hashFiles = hashFiles
module.exports.ImportResult = ImportResult
module.exports.findImports = findImports
module.exports.transferProjectGraph = transferProjectGraph
module.exports.HashPlanner = HashPlanner
module.exports.TaskHasher = TaskHasher
module.exports.EventType = EventType
module.exports.Watcher = Watcher
module.exports.WorkspaceContext = WorkspaceContext

View File

@ -1,4 +1,5 @@
use colored::Colorize;
use std::io::IsTerminal;
use tracing::{Event, Level, Subscriber};
use tracing_subscriber::fmt::{format, FmtContext, FormatEvent, FormatFields, FormattedFields};
use tracing_subscriber::registry::LookupSpan;
@ -21,7 +22,7 @@ where
let level = *metadata.level();
match level {
Level::TRACE | Level::DEBUG => {
Level::TRACE => {
write!(
&mut writer,
"{} {}: ",
@ -29,6 +30,15 @@ where
metadata.target()
)?;
}
Level::DEBUG => {
write!(
&mut writer,
"{} {}: ",
format!("{}", metadata.level()).bold().bright_blue(),
metadata.target()
)?;
}
Level::WARN => {
write!(&mut writer, "\n{} {} ", ">".yellow(), "NX".bold().yellow())?;
}
@ -76,6 +86,7 @@ pub(crate) fn enable_logger() {
EnvFilter::try_from_env("NX_NATIVE_LOGGING").unwrap_or_else(|_| EnvFilter::new("ERROR"));
_ = tracing_subscriber::fmt()
.with_env_filter(env_filter)
.with_ansi(std::io::stdout().is_terminal())
.event_format(NxLogFormatter)
.try_init()
.ok();

View File

@ -666,7 +666,7 @@ fn find_imports(
mod find_imports {
use super::*;
use crate::native::glob::build_glob_set;
use crate::native::utils::path::Normalize;
use crate::native::utils::Normalize;
use crate::native::walker::nx_walker;
use assert_fs::prelude::*;
use assert_fs::TempDir;

View File

@ -1 +1,3 @@
pub mod transfer_project_graph;
pub mod types;
pub mod utils;

View File

@ -0,0 +1,9 @@
use crate::native::project_graph::types::ProjectGraph;
use napi::bindgen_prelude::External;
#[napi]
/// Transfer the project graph from the JS world to the Rust world, so that we can pass the project graph via memory quicker
/// This wont be needed once the project graph is created in Rust
pub fn transfer_project_graph(project_graph: ProjectGraph) -> External<ProjectGraph> {
External::new(project_graph)
}

View File

@ -19,9 +19,12 @@ pub struct Target {
pub executor: Option<String>,
pub inputs: Option<Vec<JsInputs>>,
pub outputs: Option<Vec<String>>,
pub options: Option<String>,
pub configurations: Option<String>,
}
#[napi(object)]
#[derive(Default)]
pub struct Project {
pub root: String,
pub named_inputs: Option<HashMap<String, Vec<JsInputs>>>,

View File

@ -0,0 +1,32 @@
use crate::native::project_graph::types::Project;
use std::collections::HashMap;
mod find_project_for_path;
pub use find_project_for_path::*;
pub type ProjectRootMappings = HashMap<String, String>;
pub fn create_project_root_mappings(nodes: &HashMap<String, Project>) -> ProjectRootMappings {
let mut project_root_mappings = HashMap::new();
for (project_name, node) in nodes {
project_root_mappings.insert(
node.root.clone(),
normalize_project_root(project_name.clone()),
);
}
project_root_mappings
}
pub fn normalize_project_root(root: String) -> String {
let root = if root.is_empty() {
".".to_string()
} else {
root
};
if root.ends_with('/') {
root.strip_suffix('/')
.expect("'/' already checked to exist")
.to_string()
} else {
root
}
}

View File

@ -0,0 +1,85 @@
use crate::native::project_graph::utils::ProjectRootMappings;
use std::path::Path;
pub fn find_project_for_path<P: AsRef<Path>>(
file_path: P,
project_root_map: &ProjectRootMappings,
) -> Option<&str> {
let mut current_path = file_path.as_ref().to_path_buf();
while let Some(parent) = current_path.parent() {
if current_path == parent {
break;
}
if let Some(current_path_str) = current_path.to_str() {
if let Some(p) = project_root_map.get(current_path_str) {
return Some(p);
}
}
current_path.pop();
}
if let Some(current_path_str) = current_path.to_str() {
match project_root_map.get(current_path_str) {
Some(s) => Some(s),
None => None,
}
} else {
// current_path contained non-Unicode characters
None
}
}
#[cfg(test)]
mod test {
use crate::native::project_graph::types::Project;
use crate::native::project_graph::utils::{
create_project_root_mappings, find_project_for_path,
};
use std::collections::HashMap;
#[test]
fn should_find_the_project_given_a_file_within_its_src_root() {
let project_root_mapping = create_project_root_mappings(&HashMap::from([
(
"demo-app".into(),
Project {
tags: None,
targets: Default::default(),
root: "apps/demo-app".into(),
named_inputs: None,
},
),
(
"ui".into(),
Project {
tags: None,
targets: Default::default(),
root: "libs/ui".into(),
named_inputs: None,
},
),
(
"core".into(),
Project {
tags: None,
targets: Default::default(),
root: "libs/core".into(),
named_inputs: None,
},
),
]));
assert_eq!(
find_project_for_path("apps/demo-app", &project_root_mapping),
Some("demo-app")
);
assert_eq!(
find_project_for_path("apps/demo-app/src", &project_root_mapping),
Some("demo-app")
);
assert_eq!(
find_project_for_path("apps/demo-app/src/subdir/blah", &project_root_mapping),
Some("demo-app")
);
}
}

View File

@ -1,14 +1,9 @@
use crate::native::tasks::types::HashInstruction;
use crate::native::{
project_graph::types::ProjectGraph,
tasks::types::{Task, TaskGraph},
};
use crate::native::tasks::types::{Task, TaskGraph};
pub(super) fn get_dep_output(
workspace_root: &str,
task: &Task,
task_graph: &TaskGraph,
project_graph: &ProjectGraph,
dependent_tasks_output_files: &str,
transitive: bool,
) -> anyhow::Result<Vec<HashInstruction>> {
@ -29,10 +24,8 @@ pub(super) fn get_dep_output(
if transitive {
inputs.extend(get_dep_output(
workspace_root,
child_task,
task_graph,
project_graph,
dependent_tasks_output_files,
transitive,
)?);

View File

@ -7,6 +7,7 @@ use crate::native::{
project_graph::types::ProjectGraph,
tasks::{inputs::SplitInputs, types::Task},
};
use napi::bindgen_prelude::External;
use napi::{Env, JsExternal};
use rayon::prelude::*;
use std::collections::HashMap;
@ -20,18 +21,16 @@ use crate::native::utils::find_matching_projects;
#[napi]
pub struct HashPlanner {
nx_json: NxJson,
project_graph: ProjectGraph,
workspace_root: String,
project_graph: External<ProjectGraph>,
}
#[napi]
impl HashPlanner {
#[napi(constructor)]
pub fn new(workspace_root: String, nx_json: NxJson, project_graph: ProjectGraph) -> Self {
pub fn new(nx_json: NxJson, project_graph: External<ProjectGraph>) -> Self {
Self {
nx_json,
project_graph,
workspace_root,
}
}
@ -74,7 +73,7 @@ impl HashPlanner {
HashInstruction::WorkspaceFileSet("{workspaceRoot}/.gitignore".to_string()),
HashInstruction::WorkspaceFileSet("{workspaceRoot}/.nxignore".to_string()),
])
.chain(self_inputs.into_iter())
.chain(self_inputs)
.collect();
inputs.par_sort();
@ -115,7 +114,7 @@ impl HashPlanner {
) -> anyhow::Result<Option<Vec<HashInstruction>>> {
let project = &self.project_graph.nodes[project_name];
let Some(target) = project.targets.get(target_name) else {
return Ok(None)
return Ok(None);
};
let external_nodes_keys: Vec<&str> = self
@ -141,7 +140,7 @@ impl HashPlanner {
.expect("Executors should always have a ':'");
let existing_package =
find_external_dependency_node_name(executor_package, &external_nodes_keys)
.unwrap_or_else(|| executor_package);
.unwrap_or(executor_package);
Ok(Some(vec![HashInstruction::External(
existing_package.to_string(),
)]))
@ -206,9 +205,9 @@ impl HashPlanner {
Ok(self_inputs
.into_iter()
.chain(deps_inputs.into_iter())
.chain(deps_outputs.into_iter())
.chain(projects.into_iter())
.chain(deps_inputs)
.chain(deps_outputs)
.chain(projects)
.collect())
}
@ -252,12 +251,13 @@ impl HashPlanner {
if self.project_graph.nodes.contains_key(*dep) {
let Some(dep_inputs) = get_inputs_for_dependency(
&self.project_graph.nodes[*dep],
&self.nx_json,
input,
)? else {
continue;
};
&self.project_graph.nodes[*dep],
&self.nx_json,
input,
)?
else {
continue;
};
deps_inputs.extend(self.self_and_deps_inputs(
dep,
task,
@ -326,14 +326,16 @@ impl HashPlanner {
let mut result: Vec<HashInstruction> = vec![];
for dep in deps_outputs {
let Input::DepsOutputs { dependent_tasks_output_files, transitive } = dep else {
let Input::DepsOutputs {
dependent_tasks_output_files,
transitive,
} = dep
else {
continue;
};
result.extend(get_dep_output(
&self.workspace_root,
task,
task_graph,
&self.project_graph,
dependent_tasks_output_files,
*transitive,
)?);
@ -348,7 +350,7 @@ impl HashPlanner {
) -> anyhow::Result<Vec<HashInstruction>> {
let mut result: Vec<HashInstruction> = vec![];
for project in project_inputs {
let Input::Projects {input, projects} = project else {
let Input::Projects { input, projects } = project else {
continue;
};
let projects = find_matching_projects(projects, &self.project_graph)?;

View File

@ -0,0 +1,17 @@
mod hash_env;
mod hash_external;
mod hash_project_config;
mod hash_project_files;
mod hash_runtime;
mod hash_task_output;
mod hash_workspace_files;
mod hash_tsconfig;
pub use hash_env::*;
pub use hash_external::*;
pub use hash_project_config::*;
pub use hash_project_files::*;
pub use hash_runtime::*;
pub use hash_task_output::*;
pub use hash_workspace_files::*;
pub use hash_tsconfig::*;

View File

@ -0,0 +1,30 @@
use crate::native::hasher::hash;
use std::collections::HashMap;
pub fn hash_env(env_name: &str, env: &HashMap<String, String>) -> anyhow::Result<String> {
let env_value = env.get(env_name).map(|s| s.as_str()).unwrap_or("");
Ok(hash(env_value.as_bytes()))
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn should_hash_env_given_they_exist() {
let mut env = HashMap::new();
env.insert("foo".to_string(), "bar".to_string());
env.insert("baz".to_string(), "qux".to_string());
let hash = hash_env("foo", &env).unwrap();
assert_eq!(hash, "15304296276065178466");
}
#[test]
fn should_provide_a_default_hash_if_one_does_not_exist() {
let env = HashMap::new();
let hash = hash_env("foo", &env).unwrap();
assert_eq!(hash, "3244421341483603138");
}
}

View File

@ -0,0 +1,94 @@
use crate::native::hasher::{hash, hash_array};
use crate::native::project_graph::types::ExternalNode;
use std::collections::HashMap;
use std::sync::Arc;
use anyhow::*;
use dashmap::DashMap;
pub fn hash_external(
external_name: &str,
externals: &HashMap<String, ExternalNode>,
cache: Arc<DashMap<String, String>>,
) -> Result<String> {
let external = externals
.get(external_name)
.ok_or_else(|| anyhow!("Could not find external {}", external_name))?;
if let Some(cached_hash) = cache.get(external_name) {
return Ok(cached_hash.clone());
}
let hash = if let Some(external_hash) = &external.hash {
hash(external_hash.as_bytes())
} else {
hash(external.version.as_bytes())
};
cache.insert(external_name.to_string(), hash.clone());
Ok(hash)
}
pub fn hash_all_externals<S: AsRef<str>>(
sorted_externals: &[S],
externals: &HashMap<String, ExternalNode>,
cache: Arc<DashMap<String, String>>,
) -> Result<String> {
let hashes = sorted_externals
.iter()
.map(|name| hash_external(name.as_ref(), externals, Arc::clone(&cache)))
.collect::<Result<Vec<_>>>()?;
Ok(hash_array(hashes))
}
#[cfg(test)]
mod test {
use super::*;
use crate::native::project_graph::types::ExternalNode;
use dashmap::DashMap;
use std::sync::Arc;
fn get_external_nodes_map() -> HashMap<String, ExternalNode> {
HashMap::from([
(
"my_external".to_string(),
ExternalNode {
version: "0.0.1".into(),
hash: None,
},
),
(
"my_external_with_hash".to_string(),
ExternalNode {
version: "0.0.1".into(),
hash: Some("hashvalue".into()),
},
),
])
}
#[test]
fn test_hash_external() {
let external_nodes = get_external_nodes_map();
let cache: Arc<DashMap<String, String>> = Arc::new(DashMap::new());
let no_external_node_hash =
hash_external("my_external", &external_nodes, Arc::clone(&cache));
assert_eq!(no_external_node_hash.unwrap(), "3342527690135000204");
let external_node_hash =
hash_external("my_external_with_hash", &external_nodes, Arc::clone(&cache));
assert_eq!(external_node_hash.unwrap(), "4204073044699973956");
}
#[test]
fn test_hash_all_externals() {
let external_nodes = get_external_nodes_map();
let cache: Arc<DashMap<String, String>> = Arc::new(DashMap::new());
let all_externals = hash_all_externals(
&["my_external", "my_external_with_hash"],
&external_nodes,
Arc::clone(&cache),
);
assert_eq!(all_externals.unwrap(), "9354284926255893100");
}
}

View File

@ -0,0 +1,165 @@
use std::collections::HashMap;
use anyhow::*;
use itertools::Itertools;
use crate::native::hasher::hash;
use crate::native::project_graph::types::Project;
use crate::native::types::Input;
pub fn hash_project_config(
project_name: &str,
projects: &HashMap<String, Project>,
) -> Result<String> {
let project = projects
.get(project_name)
.ok_or_else(|| anyhow!("Could not find project '{}'", project_name))?;
let targets = project
.targets
.iter()
.map(|(k, v)| (k, v))
.sorted_by(|a, b| a.0.cmp(b.0))
.map(|(k, v)| {
format!(
"{}{}{}{}{}",
k,
v.executor.as_deref().unwrap_or_default(),
v.outputs.as_deref().unwrap_or_default().concat(),
v.options.as_deref().unwrap_or_default(),
v.configurations.as_deref().unwrap_or_default(),
)
})
.collect::<Vec<_>>()
.concat();
let tags = project.tags.as_deref().unwrap_or_default().concat();
let inputs = project
.named_inputs
.as_ref()
.map(|inputs| {
inputs
.iter()
.map(|(k, v)| (k, v))
.sorted_by(|a, b| a.0.cmp(b.0))
.map(|(_, v)| {
v.iter()
.map(Input::from)
.map(|i| format!("{:?}", i))
.collect::<Vec<_>>()
.concat()
})
.collect::<Vec<_>>()
.concat()
})
.unwrap_or_default();
Ok(hash(
&[
project.root.as_bytes(),
tags.as_bytes(),
targets.as_bytes(),
inputs.as_bytes(),
]
.concat(),
))
}
#[cfg(test)]
mod tests {
use super::*;
use crate::native::project_graph::types::Target;
use std::collections::HashMap;
#[test]
fn test_hash_project_config_with_data() {
let projects = HashMap::from([
(
"nx".into(),
Project {
root: "".into(),
named_inputs: None,
tags: None,
targets: Default::default(),
},
),
(
"js".into(),
Project {
root: "libs/js".into(),
named_inputs: None,
tags: Some(vec!["type:lib".into(), "scope:js".into()]),
targets: HashMap::from([
(
"build".into(),
Target {
executor: Some("@nx/node:build".into()),
options: Some("{}".into()),
configurations: Some("{\"production\":{}}".into()),
..Default::default()
},
),
(
"test".into(),
Target {
executor: Some("@nx/node:test".into()),
options: Some("{}".into()),
configurations: Some("{\"production\":{}}".into()),
..Default::default()
},
),
]),
},
),
(
"js-unsorted".into(),
Project {
root: "libs/js".into(),
named_inputs: None,
tags: Some(vec!["type:lib".into(), "scope:js".into()]),
targets: HashMap::from([
(
"test".into(),
Target {
executor: Some("@nx/node:test".into()),
options: Some("{}".into()),
configurations: Some("{\"production\":{}}".into()),
..Default::default()
},
),
(
"build".into(),
Target {
executor: Some("@nx/node:build".into()),
options: Some("{}".into()),
configurations: Some("{\"production\":{}}".into()),
..Default::default()
},
),
]),
},
),
]);
let nx_project_hash = hash_project_config("nx", &projects);
assert_eq!(nx_project_hash.unwrap(), "3244421341483603138");
let js_project_hash = hash_project_config("js", &projects).unwrap();
assert_eq!(js_project_hash, "18342193044952101577");
let js_unsorted = hash_project_config("js-unsorted", &projects);
assert_eq!(js_unsorted.unwrap(), js_project_hash);
}
#[test]
fn test_hash_project_config_with_no_project() {
let projects = HashMap::<String, Project>::new();
let result = hash_project_config("nx", &projects);
assert!(result.is_err());
assert_eq!(
result.unwrap_err().to_string(),
"Could not find project 'nx'"
);
}
}

View File

@ -0,0 +1,148 @@
use std::collections::HashMap;
use anyhow::*;
use tracing::trace;
use crate::native::glob::build_glob_set;
use crate::native::types::FileData;
pub fn hash_project_files(
project_name: &str,
project_root: &str,
file_sets: &str,
project_file_map: &HashMap<String, Vec<FileData>>,
) -> Result<String> {
let collected_files = collect_files(project_name, project_root, file_sets, project_file_map)?;
let mut hasher = xxhash_rust::xxh3::Xxh3::new();
for file in collected_files {
hasher.update(file.hash.as_bytes());
}
Ok(hasher.digest().to_string())
}
/// base function that should be testable (to make sure that we're getting the proper files back)
fn collect_files<'a>(
project_name: &str,
project_root: &str,
file_sets: &str,
project_file_map: &'a HashMap<String, Vec<FileData>>,
) -> Result<Vec<&'a FileData>> {
let globs = file_sets
.split(',')
.map(|f| f.replace("{projectRoot}", project_root))
.collect::<Vec<_>>();
let now = std::time::Instant::now();
let glob_set = build_glob_set(&globs)?;
trace!("build_glob_set for {}: {:?}", project_name, now.elapsed());
project_file_map.get(project_name).map_or_else(
|| Err(anyhow!("project {} not found", project_name)),
|files| {
let now = std::time::Instant::now();
let hashes = files
.iter()
.filter(|file| glob_set.is_match(&file.file))
.collect::<Vec<_>>();
trace!("hash_files for {}: {:?}", project_name, now.elapsed());
Ok(hashes)
},
)
}
#[cfg(test)]
mod tests {
use crate::native::hasher::hash;
use super::*;
use std::collections::HashMap;
#[test]
fn test_collect_files() {
let proj_name = "test_project";
let proj_root = "test/root";
let file_sets = "!{projectRoot}/**/?(*.)+(spec|test).[jt]s?(x)?(.snap),{projectRoot}/**/*";
let mut file_map = HashMap::new();
let tsfile_1 = FileData {
file: "test/root/test1.ts".into(),
hash: Default::default(),
};
let testfile_1 = FileData {
file: "test/root/test.spec.ts".into(),
hash: Default::default(),
};
let tsfile_2 = FileData {
file: "test/root/src/module/test3.ts".into(),
hash: Default::default(),
};
let testfile_2 = FileData {
file: "test/root/test.spec.tsx.snap".into(),
hash: Default::default(),
};
file_map.insert(
String::from(proj_name),
vec![
tsfile_1.clone(),
testfile_1.clone(),
tsfile_2.clone(),
testfile_2.clone(),
],
);
let result = collect_files(proj_name, proj_root, file_sets, &file_map).unwrap();
assert_eq!(result, vec![&tsfile_1, &tsfile_2]);
let result = collect_files(
proj_name,
proj_root,
"!{projectRoot}/**/*.spec.ts",
&file_map,
)
.unwrap();
assert_eq!(
result,
vec![
&tsfile_1,
&tsfile_2,
/* testfile_2 is included because it ends with spectsx.snap */ &testfile_2
]
);
}
#[test]
fn should_hash_deterministically() {
let proj_name = "test_project";
let proj_root = "test/root";
let file_sets = "!{projectRoot}/**/?(*.)+(spec|test).[jt]s?(x)?(.snap),{projectRoot}/**/*";
let mut file_map = HashMap::new();
let file_data1 = FileData {
file: "test/root/test1.ts".into(),
hash: "file_data1".into(),
};
let file_data2 = FileData {
file: "test/root/test.spec.ts".into(),
hash: "file_data2".into(),
};
let file_data3 = FileData {
file: "test/root/test3.ts".into(),
hash: "file_data3".into(),
};
let file_data4 = FileData {
file: "test/root/test.spec.tsx.snap".into(),
hash: "file_data4".into(),
};
file_map.insert(
String::from(proj_name),
vec![
file_data1.clone(),
file_data2.clone(),
file_data3.clone(),
file_data4.clone(),
],
);
let hash_result = hash_project_files(proj_name, proj_root, file_sets, &file_map).unwrap();
assert_eq!(
hash_result,
hash(&[file_data1.hash.as_bytes(), file_data3.hash.as_bytes()].concat())
);
}
}

View File

@ -0,0 +1,73 @@
use crate::native::hasher::hash;
use dashmap::DashMap;
use std::collections::HashMap;
use std::process::Command;
use std::sync::Arc;
use tracing::trace;
pub fn hash_runtime(
workspace_root: &str,
command: &str,
env: &HashMap<String, String>,
cache: Arc<DashMap<String, String>>,
) -> anyhow::Result<String> {
let cache_key = format!("{}-{:?}", command, env);
if let Some(cache_results) = cache.get(&cache_key) {
return Ok(cache_results.clone());
}
let mut command_builder = if cfg!(target_os = "windows") {
let comspec = std::env::var("COMSPEC");
let shell = comspec
.as_ref()
.map(|v| v.as_str())
.unwrap_or_else(|_| "cmd.exe");
let mut command = Command::new(shell);
command.arg("/C");
command
} else {
let mut command = Command::new("sh");
command.arg("-c");
command
};
command_builder.arg(command);
command_builder.current_dir(workspace_root);
env.iter().for_each(|(key, value)| {
command_builder.env(key, value);
});
trace!("executing: {:?}", command_builder);
let output = command_builder
.output()
.map_err(|e| anyhow::anyhow!("Failed to execute: '{}'\n{}", command, e))?;
trace!("{} output: {:?}", command, output);
let std_out = std::str::from_utf8(&output.stdout)?.trim();
let std_err = std::str::from_utf8(&output.stderr)?.trim();
let hash_result = hash(&[std_out.as_bytes(), std_err.as_bytes()].concat());
cache.insert(cache_key, hash_result.clone());
Ok(hash_result)
}
#[cfg(test)]
mod tests {
use super::*;
use dashmap::DashMap;
use std::collections::HashMap;
use std::sync::Arc;
#[test]
fn test_hash_runtime() {
let workspace_root = "/tmp";
let command = "echo 'runtime'";
let env: HashMap<String, String> = HashMap::new();
let cache = Arc::new(DashMap::new());
let result = hash_runtime(workspace_root, command, &env, Arc::clone(&cache)).unwrap();
assert_eq!(result, "10571312846059850300");
}
}

View File

@ -0,0 +1,19 @@
use crate::native::cache::expand_outputs::get_files_for_outputs;
use crate::native::glob::build_glob_set;
use crate::native::hasher::{hash_array, hash_file};
use anyhow::*;
use rayon::prelude::*;
use tracing::trace;
pub fn hash_task_output(workspace_root: &str, glob: &str, outputs: &[String]) -> Result<String> {
let now = std::time::Instant::now();
let output_files = get_files_for_outputs(workspace_root.to_string(), outputs.to_vec())?;
trace!("get_files_for_outputs: {:?}", now.elapsed());
let glob = build_glob_set(&[glob])?;
let hashes = output_files
.into_par_iter()
.filter(|file| glob.is_match(file))
.filter_map(hash_file)
.collect::<Vec<_>>();
Ok(hash_array(hashes))
}

View File

@ -0,0 +1,142 @@
use std::collections::HashMap;
use anyhow::*;
use crate::native::hasher::hash;
use crate::native::project_graph::utils::find_project_for_path;
pub fn hash_tsconfig_selectively(
project_name: &str,
ts_config: &[u8],
ts_config_paths: &HashMap<String, Vec<String>>,
project_root_mappings: &HashMap<String, String>,
) -> Result<String> {
let project_path =
remove_other_project_paths(project_name, project_root_mappings, ts_config_paths);
Ok(hash(&[project_path.as_bytes(), ts_config].concat()))
}
fn remove_other_project_paths(
project_name: &str,
project_root_mappings: &HashMap<String, String>,
paths: &HashMap<String, Vec<String>>,
) -> String {
let mut filtered_paths = paths
.iter()
.filter_map(|(key, files)| {
let project_files = files
.iter()
.filter(|&file| {
find_project_for_path(file, project_root_mappings)
.map_or_else(|| false, |p| project_name == p)
})
.map(|file| file.as_str())
.collect::<Vec<_>>();
(!project_files.is_empty()).then(|| format!("{}:{}", key, project_files.join(";")))
})
.collect::<Vec<_>>();
filtered_paths.sort();
filtered_paths.join(";")
}
#[cfg(test)]
mod test {
use std::collections::HashMap;
use crate::native::project_graph::types::Project;
use crate::native::project_graph::utils::create_project_root_mappings;
use super::*;
#[test]
fn test_remove_other_project_paths() {
let project_name = "project1";
let project_root_mappings = create_test_project_root_mappings();
let paths = &HashMap::from([
(
"@test/project1".into(),
vec!["path1/index.ts".into(), "path1/index2.ts".into()],
),
(
"@test/project2".into(),
vec!["packages/path2/index.ts".into()],
),
]);
let result = remove_other_project_paths(project_name, &project_root_mappings, paths);
assert_eq!(result, "@test/project1:path1/index.ts;path1/index2.ts");
}
#[test]
fn test_hash_tsconfig() {
let project_root_mappings = create_test_project_root_mappings();
let tsconfig = r#"
{
"compilerOptions": {
"target": "ES2021",
"importHelpers": true,
"module": "commonjs",
"moduleResolution": "node",
"outDir": "build",
"experimentalDecorators": true,
"emitDecoratorMetadata": true,
"skipLibCheck": true,
"types": ["node", "jest"],
"lib": ["ES2021"],
"declaration": true,
"resolveJsonModule": true,
"baseUrl": ".",
"rootDir": ".",
"allowJs": true
}
}
"#;
let paths: HashMap<String, Vec<String>> = HashMap::from([
(
"@test/project1".into(),
vec!["path1/index.ts".into(), "path1/index2.ts".into()],
),
(
"@test/project2".into(),
vec!["packages/path2/index.ts".into()],
),
]);
let result = hash_tsconfig_selectively(
"project1",
tsconfig.as_bytes(),
&paths,
&project_root_mappings,
)
.unwrap();
assert_eq!(result, "6431119472521503644");
let result = hash_tsconfig_selectively(
"project2",
tsconfig.as_bytes(),
&paths,
&project_root_mappings,
)
.unwrap();
assert_eq!(result, "13103308914505796317");
}
fn create_test_project_root_mappings() -> HashMap<String, String> {
create_project_root_mappings(&HashMap::from([
(
"project1".into(),
Project {
root: "path1".into(),
..Default::default()
},
),
(
"project2".into(),
Project {
root: "packages/path2".into(),
..Default::default()
},
),
]))
}
}

View File

@ -0,0 +1,89 @@
use std::sync::Arc;
use anyhow::*;
use dashmap::DashMap;
use tracing::trace;
use crate::native::glob::build_glob_set;
use crate::native::types::FileData;
pub fn hash_workspace_files(
workspace_file_set: &str,
all_workspace_files: &[FileData],
cache: Arc<DashMap<String, String>>,
) -> Result<String> {
let file_set = workspace_file_set
.strip_prefix("{workspaceRoot}/")
.ok_or_else(|| {
anyhow!(
"{workspace_file_set} does not start with {}",
"{workspaceRoot}/"
)
})?;
if let Some(cache_results) = cache.get(file_set) {
return Ok(cache_results.clone());
}
let glob = build_glob_set(&[file_set])?;
let mut hasher = xxhash_rust::xxh3::Xxh3::new();
for file in all_workspace_files
.iter()
.filter(|file| glob.is_match(&file.file))
{
trace!("{:?} was found with glob {:?}", file.file, file_set);
hasher.update(file.hash.as_bytes());
}
let hashed_value = hasher.digest().to_string();
cache.insert(file_set.to_string(), hashed_value.clone());
Ok(hashed_value)
}
#[cfg(test)]
mod test {
use crate::native::hasher::hash;
use super::*;
use dashmap::DashMap;
use std::sync::Arc;
#[test]
fn test_hash_workspace_files_error() {
let result = hash_workspace_files("packages/{package}", &[], Arc::new(DashMap::new()));
assert!(result.is_err());
}
#[test]
fn test_hash_workspace_files() {
let gitignore_file = FileData {
file: ".gitignore".into(),
hash: "123".into(),
};
let nxignore_file = FileData {
file: ".nxignore".into(),
hash: "456".into(),
};
let package_json_file = FileData {
file: "package.json".into(),
hash: "789".into(),
};
let project_file = FileData {
file: "packages/project/project.json".into(),
hash: "abc".into(),
};
let result = hash_workspace_files(
"{workspaceRoot}/.gitignore",
&[
gitignore_file.clone(),
nxignore_file.clone(),
package_json_file.clone(),
project_file.clone(),
],
Arc::new(DashMap::new()),
)
.unwrap();
assert_eq!(result, hash(gitignore_file.hash.as_bytes()));
}
}

View File

@ -25,15 +25,10 @@ pub(super) fn get_inputs<'a>(
.targets
.get(&task.target.target)
.expect("Task target should always have a target");
let target_defaults = nx_json
.target_defaults
.as_ref()
.and_then(|td| td.get(&task.target.target));
let inputs: Option<Vec<Input>> = target_data
.inputs
.as_ref()
.or_else(|| target_defaults.and_then(|td| td.inputs.as_ref()))
.map(|i| i.iter().map(|v| v.into()).collect());
split_inputs_into_self_and_deps(inputs, named_inputs)
@ -44,8 +39,8 @@ pub(super) fn get_inputs_for_dependency<'a>(
nx_json: &'a NxJson,
named_input: &'a Input,
) -> anyhow::Result<Option<SplitInputs<'a>>> {
let Input::Inputs { input, ..} = named_input else {
return Ok(None);
let Input::Inputs { input, .. } = named_input else {
return Ok(None);
};
let inputs = get_named_inputs(nx_json, project);
@ -151,7 +146,7 @@ pub(super) fn expand_single_project_inputs<'a>(
Input::Inputs {
input,
dependencies: false,
} => expanded.extend(expand_named_input(&input, named_inputs)?),
} => expanded.extend(expand_named_input(input, named_inputs)?),
Input::FileSet(fileset) => {
validate_file_set(fileset)?;
expanded.push(Input::FileSet(fileset));

View File

@ -1,5 +1,7 @@
mod dep_outputs;
mod hash_planner;
mod hashers;
mod inputs;
pub mod task_hasher;
mod types;
mod utils;

View File

@ -0,0 +1,262 @@
use std::collections::HashMap;
use std::sync::Arc;
use crate::native::{
hasher::hash,
project_graph::{types::ProjectGraph, utils::create_project_root_mappings},
tasks::types::HashInstruction,
types::NapiDashMap,
};
use crate::native::{
project_graph::utils::ProjectRootMappings,
tasks::hashers::{hash_env, hash_runtime, hash_workspace_files},
};
use crate::native::{
tasks::hashers::{
hash_all_externals, hash_external, hash_project_config, hash_project_files,
hash_task_output, hash_tsconfig_selectively,
},
types::FileData,
workspace::types::ProjectFiles,
};
use anyhow::anyhow;
use dashmap::DashMap;
use napi::bindgen_prelude::{Buffer, External};
use rayon::prelude::*;
use tracing::{debug, trace, trace_span};
#[napi(object)]
#[derive(Debug)]
pub struct HashDetails {
pub value: String,
pub details: HashMap<String, String>,
}
#[napi(object)]
pub struct HasherOptions {
pub selectively_hash_ts_config: bool,
}
#[napi]
pub struct TaskHasher {
workspace_root: String,
project_graph: External<ProjectGraph>,
project_file_map: External<HashMap<String, Vec<FileData>>>,
all_workspace_files: External<Vec<FileData>>,
ts_config: Vec<u8>,
ts_config_paths: HashMap<String, Vec<String>>,
options: Option<HasherOptions>,
workspace_files_cache: Arc<DashMap<String, String>>,
external_cache: Arc<DashMap<String, String>>,
runtime_cache: Arc<DashMap<String, String>>,
}
#[napi]
impl TaskHasher {
#[napi(constructor)]
pub fn new(
workspace_root: String,
project_graph: External<ProjectGraph>,
project_file_map: External<ProjectFiles>,
all_workspace_files: External<Vec<FileData>>,
ts_config: Buffer,
ts_config_paths: HashMap<String, Vec<String>>,
options: Option<HasherOptions>,
) -> Self {
Self {
workspace_root,
project_graph,
project_file_map,
all_workspace_files,
ts_config: ts_config.to_vec(),
ts_config_paths,
options,
workspace_files_cache: Arc::new(DashMap::new()),
external_cache: Arc::new(DashMap::new()),
runtime_cache: Arc::new(DashMap::new()),
}
}
#[napi]
pub fn hash_plans(
&self,
hash_plans: External<HashMap<String, Vec<HashInstruction>>>,
js_env: HashMap<String, String>,
) -> anyhow::Result<NapiDashMap<String, HashDetails>> {
debug!("{:?}", hash_plans.as_ref());
trace!("hash_plans: {}", hash_plans.len());
trace!("all workspace files: {}", self.all_workspace_files.len());
trace!("project_file_map: {}", self.project_file_map.len());
let ts_config_hash = hash(&self.ts_config);
let project_root_mappings = create_project_root_mappings(&self.project_graph.nodes);
let mut sorted_externals = self.project_graph.external_nodes.keys().collect::<Vec<_>>();
sorted_externals.par_sort();
let selectively_hash_tsconfig = self
.options
.as_ref()
.map(|o| o.selectively_hash_ts_config)
.unwrap_or(false);
let hash_time = std::time::Instant::now();
let hashes: NapiDashMap<String, HashDetails> = NapiDashMap::new();
let _ = hash_plans
.iter()
.flat_map(|(task_id, instructions)| {
instructions
.iter()
.map(move |instruction| (task_id, instruction))
})
.par_bridge()
.try_for_each(|(task_id, instruction)| {
let hash_detail = self.hash_instruction(
task_id,
instruction,
HashInstructionArgs {
js_env: &js_env,
ts_config_hash: &ts_config_hash,
project_root_mappings: &project_root_mappings,
sorted_externals: &sorted_externals,
selectively_hash_tsconfig,
},
)?;
let mut entry = hashes
.entry(task_id.to_string())
.or_insert_with(|| HashDetails {
value: String::new(),
details: HashMap::new(),
});
entry.details.insert(hash_detail.0, hash_detail.1);
Ok::<(), anyhow::Error>(())
});
hashes.iter_mut().for_each(|mut h| {
let hash_details = h.value_mut();
let mut keys = hash_details.details.keys().collect::<Vec<_>>();
keys.par_sort();
let mut hasher = xxhash_rust::xxh3::Xxh3::new();
for key in keys {
hasher.update(hash_details.details[key].as_bytes());
}
hash_details.value = hasher.digest().to_string();
});
trace!("hashing took {:?}", hash_time.elapsed());
debug!(?hashes);
Ok(hashes)
}
fn hash_instruction(
&self,
task_id: &str,
instruction: &HashInstruction,
HashInstructionArgs {
js_env,
ts_config_hash,
project_root_mappings,
sorted_externals,
selectively_hash_tsconfig,
}: HashInstructionArgs,
) -> anyhow::Result<(String, String)> {
let now = std::time::Instant::now();
let span = trace_span!("hashing", task_id).entered();
let hash = match instruction {
HashInstruction::WorkspaceFileSet(workspace_file_set) => {
let hashed_workspace_files = hash_workspace_files(
workspace_file_set,
&self.all_workspace_files,
Arc::clone(&self.workspace_files_cache),
);
trace!(parent: &span, "hash_workspace_files: {:?}", now.elapsed());
hashed_workspace_files?
}
HashInstruction::Runtime(runtime) => {
let hashed_runtime = hash_runtime(
&self.workspace_root,
runtime,
js_env,
Arc::clone(&self.runtime_cache),
)?;
trace!(parent: &span, "hash_runtime: {:?}", now.elapsed());
hashed_runtime
}
HashInstruction::Environment(env) => {
let hashed_env = hash_env(env, js_env)?;
trace!(parent: &span, "hash_env: {:?}", now.elapsed());
hashed_env
}
HashInstruction::ProjectFileSet(project_name, file_set) => {
let project = self
.project_graph
.nodes
.get(project_name)
.ok_or_else(|| anyhow!("project {} not found", project_name))?;
let hashed_project_files = hash_project_files(
project_name,
&project.root,
file_set,
&self.project_file_map,
)?;
trace!(parent: &span, "hash_project_files: {:?}", now.elapsed());
hashed_project_files
}
HashInstruction::ProjectConfiguration(project_name) => {
let hashed_project_config =
hash_project_config(project_name, &self.project_graph.nodes)?;
trace!(parent: &span, "hash_project_config: {:?}", now.elapsed());
hashed_project_config
}
HashInstruction::TsConfiguration(project_name) => {
let ts_config_hash = if !selectively_hash_tsconfig {
ts_config_hash.to_string()
} else {
hash_tsconfig_selectively(
project_name,
&self.ts_config,
&self.ts_config_paths,
project_root_mappings,
)?
};
trace!(parent: &span, "hash_tsconfig: {:?}", now.elapsed());
ts_config_hash
}
HashInstruction::TaskOutput(glob, outputs) => {
let hashed_task_output = hash_task_output(&self.workspace_root, glob, outputs)?;
trace!(parent: &span, "hash_task_output: {:?}", now.elapsed());
hashed_task_output
}
HashInstruction::External(external) => {
let hashed_external = hash_external(
external,
&self.project_graph.external_nodes,
Arc::clone(&self.external_cache),
)?;
trace!(parent: &span, "hash_external: {:?}", now.elapsed());
hashed_external
}
HashInstruction::AllExternalDependencies => {
let hashed_all_externals = hash_all_externals(
sorted_externals,
&self.project_graph.external_nodes,
Arc::clone(&self.external_cache),
)?;
trace!(parent: &span, "hash_all_externals: {:?}", now.elapsed());
hashed_all_externals
}
};
Ok((instruction.to_string(), hash))
}
}
struct HashInstructionArgs<'a> {
js_env: &'a HashMap<String, String>,
ts_config_hash: &'a str,
project_root_mappings: &'a ProjectRootMappings,
sorted_externals: &'a [&'a String],
selectively_hash_tsconfig: bool,
}

View File

@ -1,4 +1,5 @@
use std::{collections::HashMap, ptr};
use std::fmt::Formatter;
use std::{collections::HashMap, fmt, ptr};
use napi::{
bindgen_prelude::{check_status, ToNapiValue},
@ -29,7 +30,7 @@ pub struct TaskGraph {
pub dependencies: HashMap<String, Vec<String>>,
}
#[derive(PartialEq, Eq, PartialOrd, Ord)]
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum HashInstruction {
WorkspaceFileSet(String),
Runtime(String),
@ -49,7 +50,7 @@ impl ToNapiValue for HashInstruction {
) -> napi::Result<napi::sys::napi_value> {
let mut ptr = ptr::null_mut();
let val: String = val.into();
let val = val.to_string();
check_status!(
unsafe {
@ -62,27 +63,31 @@ impl ToNapiValue for HashInstruction {
}
}
impl From<HashInstruction> for String {
fn from(instruction: HashInstruction) -> Self {
match instruction {
HashInstruction::AllExternalDependencies => "AllExternalDependencies".to_string(),
HashInstruction::ProjectFileSet(project_name, file_set) => {
format!("{project_name}:{file_set}")
impl fmt::Display for HashInstruction {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
match self {
HashInstruction::AllExternalDependencies => "AllExternalDependencies".to_string(),
HashInstruction::ProjectFileSet(project_name, file_set) => {
format!("{project_name}:{file_set}")
}
HashInstruction::WorkspaceFileSet(file_set) => file_set.to_string(),
HashInstruction::Runtime(runtime) => format!("runtime:{}", runtime),
HashInstruction::Environment(env) => format!("env:{}", env),
HashInstruction::TaskOutput(task_output, dep_outputs) => {
let dep_outputs = dep_outputs.join(",");
format!("{task_output}:{dep_outputs}")
}
HashInstruction::External(external) => external.to_string(),
HashInstruction::ProjectConfiguration(project_name) => {
format!("{project_name}:ProjectConfiguration")
}
HashInstruction::TsConfiguration(project_name) => {
format!("{project_name}:TsConfig")
}
}
HashInstruction::WorkspaceFileSet(file_set) => file_set,
HashInstruction::Runtime(runtime) => format!("runtime:{}", runtime),
HashInstruction::Environment(env) => format!("env:{}", env),
HashInstruction::TaskOutput(task_output, dep_outputs) => {
let dep_outputs = dep_outputs.join(",");
format!("{task_output}:{dep_outputs}")
},
HashInstruction::External(external) => external,
HashInstruction::ProjectConfiguration(project_name) => {
format!("{project_name}:ProjectConfiguration")
}
HashInstruction::TsConfiguration(project_name) => {
format!("{project_name}:TsConfig")
}
}
)
}
}

View File

@ -136,20 +136,3 @@ exports[`task planner should plan the task where the project has dependencies 1`
],
}
`;
exports[`task planner should use targetDefaults from nx.json 1`] = `
{
"parent:build": [
"AllExternalDependencies",
"child:!{projectRoot}/**/*.spec.ts",
"child:ProjectConfiguration",
"child:TsConfig",
"parent:!{projectRoot}/**/*.spec.ts",
"parent:ProjectConfiguration",
"parent:TsConfig",
"{workspaceRoot}/.gitignore",
"{workspaceRoot}/.nxignore",
"{workspaceRoot}/nx.json",
],
}
`;

View File

@ -1,7 +1,7 @@
import { TempFs } from '../../internal-testing-utils/temp-fs';
let tempFs = new TempFs('task-planner');
import { HashPlanner } from '../index';
import { HashPlanner, transferProjectGraph } from '../index';
import { Task, TaskGraph } from '../../config/task-graph';
import { InProcessTaskHasher } from '../../hasher/task-hasher';
import { withEnvironmentVariables } from '../../internal-testing-utils/with-environment';
@ -9,7 +9,11 @@ import { ProjectGraphBuilder } from '../../project-graph/project-graph-builder';
import { createTaskGraph } from '../../tasks-runner/create-task-graph';
import { transformProjectGraphForRust } from '../transform-objects';
describe('task planner', () => {
// disable NX_NATIVE_TASK_HASHER for this test because we need to compare the results of the new planner with the old task hasher
process.env.NX_NATIVE_TASK_HASHER = 'false';
const packageJson = {
name: 'nrwl',
};
@ -156,13 +160,13 @@ describe('task planner', () => {
allWorkspaceFiles,
projectGraph,
nxJson,
null,
{}
);
const planner = new HashPlanner(
tempFs.tempDir,
nxJson as any,
transformProjectGraphForRust(projectGraph)
transferProjectGraph(transformProjectGraphForRust(projectGraph))
);
await assertHashPlan(
@ -223,12 +227,12 @@ describe('task planner', () => {
allWorkspaceFiles,
projectGraph,
nxJson,
null,
{}
);
const planner = new HashPlanner(
tempFs.tempDir,
nxJson as any,
transformProjectGraphForRust(projectGraph)
transferProjectGraph(transformProjectGraphForRust(projectGraph))
);
const hashPlan = await assertHashPlan(
taskGraph.tasks['parent:build'],
@ -300,12 +304,12 @@ describe('task planner', () => {
allWorkspaceFiles,
projectGraph,
nxJson,
null,
{}
);
const planner = new HashPlanner(
tempFs.tempDir,
nxJson as any,
transformProjectGraphForRust(projectGraph)
transferProjectGraph(transformProjectGraphForRust(projectGraph))
);
let hashPlans = await assertHashPlan(
taskGraph.tasks['parent:build'],
@ -363,12 +367,12 @@ describe('task planner', () => {
allWorkspaceFiles,
projectGraph,
nxJson,
null,
{}
);
const planner = new HashPlanner(
tempFs.tempDir,
nxJson as any,
transformProjectGraphForRust(projectGraph)
transferProjectGraph(transformProjectGraphForRust(projectGraph))
);
const tasks = Object.values(taskGraph.tasks);
@ -445,13 +449,13 @@ describe('task planner', () => {
allWorkspaceFiles,
projectGraph,
nxJson as any,
null,
{}
);
const planner = new HashPlanner(
tempFs.tempDir,
nxJson as any,
transformProjectGraphForRust(projectGraph)
transferProjectGraph(transformProjectGraphForRust(projectGraph))
);
const tasks = Object.values(taskGraph.tasks);
let plans = await assertHashPlan(tasks, taskGraph, hasher, planner);
@ -460,78 +464,6 @@ describe('task planner', () => {
);
});
it('should use targetDefaults from nx.json', async () => {
let projectFileMap = {
parent: [
{ file: 'libs/parent/filea.ts', hash: 'a.hash' },
{ file: 'libs/parent/filea.spec.ts', hash: 'a.spec.hash' },
],
child: [
{ file: 'libs/child/fileb.ts', hash: 'b.hash' },
{ file: 'libs/child/fileb.spec.ts', hash: 'b.spec.hash' },
],
};
const builder = new ProjectGraphBuilder(undefined, projectFileMap);
builder.addNode({
name: 'parent',
type: 'lib',
data: {
root: 'libs/parent',
targets: {
build: { executor: 'nx:run-commands' },
},
},
});
builder.addNode({
name: 'child',
type: 'lib',
data: {
root: 'libs/child',
targets: { build: { executor: 'nx:run-commands' } },
},
});
builder.addStaticDependency('parent', 'child', 'libs/parent/filea.ts');
let projectGraph = builder.getUpdatedProjectGraph();
let taskGraph = createTaskGraph(
projectGraph,
{ build: ['^build'] },
['parent'],
['build'],
undefined,
{}
);
let nxJson = {
namedInputs: {
prod: ['!{projectRoot}/**/*.spec.ts'],
},
targetDefaults: {
build: {
inputs: ['prod', '^prod'],
},
},
} as any;
const hasher = new InProcessTaskHasher(
projectFileMap,
allWorkspaceFiles,
projectGraph,
nxJson,
{}
);
const planner = new HashPlanner(
tempFs.tempDir,
nxJson as any,
transformProjectGraphForRust(projectGraph)
);
let plans = await assertHashPlan(
taskGraph.tasks['parent:build'],
taskGraph,
hasher,
planner
);
expect(plans).toMatchSnapshot();
});
it('should build plans where the project graph has circular dependencies', async () => {
let projectFileMap = {
parent: [{ file: '/filea.ts', hash: 'a.hash' }],
@ -571,12 +503,12 @@ describe('task planner', () => {
allWorkspaceFiles,
projectGraph,
nxJson,
null,
{}
);
const planner = new HashPlanner(
tempFs.tempDir,
nxJson as any,
transformProjectGraphForRust(projectGraph)
transferProjectGraph(transformProjectGraphForRust(projectGraph))
);
let tasks = Object.values(taskGraph.tasks);
let plans = await assertHashPlan(tasks, taskGraph, hasher, planner);
@ -620,10 +552,13 @@ describe('task planner', () => {
allWorkspaceFiles,
projectGraph,
nxJson,
null,
{}
);
const transformed = transformProjectGraphForRust(projectGraph);
const planner = new HashPlanner(tempFs.tempDir, nxJson as any, transformed);
const transformed = transferProjectGraph(
transformProjectGraphForRust(projectGraph)
);
const planner = new HashPlanner(nxJson as any, transformed);
let plans = await assertHashPlan(
taskGraph.tasks['app:build'],
taskGraph,
@ -739,15 +674,14 @@ describe('task planner', () => {
allWorkspaceFiles,
projectGraph,
nxJson,
null,
{}
);
const transformed = transformProjectGraphForRust(projectGraph);
const planner = new HashPlanner(
tempFs.tempDir,
nxJson as any,
transformed
const transformed = transferProjectGraph(
transformProjectGraphForRust(projectGraph)
);
const planner = new HashPlanner(nxJson, transformed);
let plans = await assertHashPlan(
taskGraph.tasks['parent:build'],
taskGraph,

View File

@ -21,6 +21,8 @@ export function transformProjectGraphForRust(
executor: targetConfig.executor,
inputs: targetConfig.inputs,
outputs: targetConfig.outputs,
options: JSON.stringify(targetConfig.options),
configurations: JSON.stringify(targetConfig.configurations),
};
}
nodes[projectName] = {
@ -57,6 +59,3 @@ export function transformProjectGraphForRust(
dependencies,
};
}

View File

@ -1,7 +1,9 @@
mod file_data;
mod inputs;
mod napi_dashmap;
mod nx_json;
pub use file_data::FileData;
pub use inputs::*;
pub use napi_dashmap::NapiDashMap;
pub use nx_json::*;

View File

@ -0,0 +1,58 @@
use dashmap::DashMap;
use napi::bindgen_prelude::{Object, ToNapiValue};
use napi::{sys, Env};
use std::collections::hash_map::RandomState;
use std::ops::{Deref, DerefMut};
#[derive(Debug)]
pub struct NapiDashMap<K, V, S = RandomState>(DashMap<K, V, S>)
where
K: Eq + PartialEq + std::hash::Hash,
S: std::hash::BuildHasher + std::clone::Clone;
impl<K, V> NapiDashMap<K, V, RandomState>
where
K: Eq + PartialEq + std::hash::Hash,
{
pub fn new() -> Self {
Self(DashMap::<K, V>::with_hasher(RandomState::default()))
}
}
impl<K, V, S> Deref for NapiDashMap<K, V, S>
where
K: Eq + PartialEq + std::hash::Hash,
S: std::hash::BuildHasher + std::clone::Clone,
{
type Target = DashMap<K, V, S>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<K, V, S> DerefMut for NapiDashMap<K, V, S>
where
K: Eq + PartialEq + std::hash::Hash,
S: std::hash::BuildHasher + std::clone::Clone,
{
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<K, V, S> ToNapiValue for NapiDashMap<K, V, S>
where
K: AsRef<str> + std::cmp::Eq + std::hash::Hash,
S: std::hash::BuildHasher + std::clone::Clone,
V: ToNapiValue,
{
unsafe fn to_napi_value(raw_env: sys::napi_env, val: Self) -> napi::Result<sys::napi_value> {
let env = Env::from(raw_env);
let mut obj = env.create_object()?;
for (k, v) in val.0.into_iter() {
obj.set(k.as_ref(), v)?;
}
unsafe { Object::to_napi_value(raw_env, obj) }
}
}

View File

@ -1,4 +1,3 @@
use crate::native::project_graph::types::Target;
use crate::native::types::JsInputs;
use std::collections::HashMap;
@ -6,5 +5,4 @@ use std::collections::HashMap;
/// Stripped version of the NxJson interface for use in rust
pub struct NxJson {
pub named_inputs: Option<HashMap<String, Vec<JsInputs>>>,
pub target_defaults: Option<HashMap<String, Target>>,
}

View File

@ -1,4 +1,6 @@
mod find_matching_projects;
mod normalize_trait;
pub mod path;
pub use find_matching_projects::*;
pub use normalize_trait::Normalize;

View File

@ -0,0 +1,3 @@
pub trait Normalize {
fn to_normalized_string(&self) -> String;
}

View File

@ -1,16 +1,13 @@
use std::path::Path;
use crate::native::utils::normalize_trait::Normalize;
use std::path::{Path, PathBuf};
pub trait Normalize {
fn to_normalized_string(&self) -> String;
}
impl Normalize for std::path::Path {
impl Normalize for Path {
fn to_normalized_string(&self) -> String {
normalize_path(self)
}
}
impl Normalize for std::path::PathBuf {
impl Normalize for PathBuf {
fn to_normalized_string(&self) -> String {
normalize_path(self)
}

View File

@ -98,7 +98,7 @@ mod test {
use assert_fs::prelude::*;
use assert_fs::TempDir;
use crate::native::utils::path::Normalize;
use crate::native::utils::Normalize;
use super::*;

View File

@ -67,7 +67,7 @@ impl From<&Event> for WatchEventInternal {
.expect("there should always be a file event kind");
let path_ref = path.0;
let event_type = if matches!(path.1, None) && !path_ref.exists() {
let event_type = if path.1.is_none() && !path_ref.exists() {
EventType::delete
} else {
#[cfg(target_os = "macos")]

View File

@ -1,26 +1,26 @@
use crate::native::glob::build_glob_set;
use crate::native::utils::path::Normalize;
use napi::bindgen_prelude::Promise;
use std::collections::HashMap;
use napi::bindgen_prelude::Promise;
use rayon::prelude::*;
use std::path::PathBuf;
use crate::native::glob::build_glob_set;
use crate::native::types::FileData;
/// Get workspace config files based on provided globs
pub(super) fn glob_files(
files: &[(PathBuf, String)],
files: &[FileData],
globs: Vec<String>,
exclude: Option<Vec<String>>,
) -> napi::Result<impl ParallelIterator<Item = &(PathBuf, String)>> {
) -> napi::Result<impl ParallelIterator<Item = &FileData>> {
let globs = build_glob_set(&globs)?;
let exclude_glob_set = exclude
.map(|exclude| build_glob_set(&exclude))
.transpose()?;
Ok(files.par_iter().filter(move |file| {
let path = file.0.to_normalized_string();
let is_match = globs.is_match(&path);
Ok(files.par_iter().filter(move |file_data| {
let path = &file_data.file;
let is_match = globs.is_match(path);
if !is_match {
return is_match;
@ -28,7 +28,7 @@ pub(super) fn glob_files(
exclude_glob_set
.as_ref()
.map(|exclude_glob_set| exclude_glob_set.is_match(&path))
.map(|exclude_glob_set| exclude_glob_set.is_match(path))
.unwrap_or(is_match)
}))
}
@ -36,7 +36,7 @@ pub(super) fn glob_files(
/// Get workspace config files based on provided globs
pub(super) fn get_project_configurations<ConfigurationParser>(
globs: Vec<String>,
files: &[(PathBuf, String)],
files: &[FileData],
parse_configurations: ConfigurationParser,
) -> napi::Result<Promise<HashMap<String, String>>>
where
@ -44,5 +44,5 @@ where
{
let files = glob_files(files, globs, None).map_err(anyhow::Error::from)?;
parse_configurations(files.map(|file| file.0.to_normalized_string()).collect())
parse_configurations(files.map(|file| file.file.to_owned()).collect())
}

View File

@ -1,19 +1,25 @@
use crate::native::logger::enable_logger;
use napi::bindgen_prelude::External;
use std::collections::HashMap;
use crate::native::hasher::hash;
use crate::native::types::FileData;
use crate::native::utils::path::Normalize;
use crate::native::utils::Normalize;
use napi::bindgen_prelude::*;
use parking_lot::{Condvar, Mutex};
use rayon::prelude::*;
use std::ops::Deref;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::thread;
use crate::native::logger::enable_logger;
use crate::native::project_graph::utils::{find_project_for_path, ProjectRootMappings};
use crate::native::types::FileData;
use parking_lot::{Condvar, Mutex};
use tracing::{trace, warn};
use crate::native::walker::nx_walker;
use crate::native::workspace::types::{
FileMap, NxWorkspaceFilesExternals, ProjectFiles, UpdatedWorkspaceFiles,
};
use crate::native::workspace::{config_files, workspace_files};
#[napi]
@ -62,7 +68,7 @@ impl FilesWorker {
FilesWorker(Some(files_lock))
}
pub fn get_files(&self) -> Vec<(PathBuf, String)> {
pub fn get_files(&self) -> Vec<FileData> {
if let Some(files_sync) = &self.0 {
let (files_lock, cvar) = files_sync.deref();
trace!("locking files");
@ -73,11 +79,18 @@ impl FilesWorker {
cvar.wait(&mut files);
}
let cloned_files = files.clone();
let file_data = files
.iter()
.map(|(path, hash)| FileData {
file: path.to_normalized_string(),
hash: hash.clone(),
})
.collect();
drop(files);
trace!("files are available");
cloned_files
file_data
} else {
vec![]
}
@ -106,8 +119,8 @@ impl FilesWorker {
.par_iter()
.filter_map(|path| {
let full_path = workspace_root_path.join(path);
let Ok(content) = std::fs::read(full_path) else {
trace!("could not read file: ?full_path");
let Ok(content) = std::fs::read(&full_path) else {
trace!("could not read file: {full_path:?}");
return None;
};
Some((path.to_string(), hash(&content)))
@ -154,8 +167,7 @@ impl WorkspaceContext {
where
ConfigurationParser: Fn(Vec<String>) -> napi::Result<Promise<HashMap<String, String>>>,
{
let files = self.files_worker.get_files();
workspace_files::get_files(env, globs, parse_configurations, &files)
workspace_files::get_files(env, globs, parse_configurations, self.all_file_data())
.map_err(anyhow::Error::from)
}
@ -165,12 +177,9 @@ impl WorkspaceContext {
globs: Vec<String>,
exclude: Option<Vec<String>>,
) -> napi::Result<Vec<String>> {
let files = self.files_worker.get_files();
let globbed_files = config_files::glob_files(&files, globs, exclude)?;
Ok(globbed_files
.map(|file| file.0.to_normalized_string())
.collect())
let file_data = self.all_file_data();
let globbed_files = config_files::glob_files(&file_data, globs, exclude)?;
Ok(globbed_files.map(|file| file.file.to_owned()).collect())
}
#[napi]
@ -179,11 +188,11 @@ impl WorkspaceContext {
globs: Vec<String>,
exclude: Option<Vec<String>>,
) -> napi::Result<String> {
let files = self.files_worker.get_files();
let files = &self.all_file_data();
let globbed_files = config_files::glob_files(&files, globs, exclude)?;
Ok(hash(
&globbed_files
.map(|file| file.1.as_bytes())
.map(|file| file.hash.as_bytes())
.collect::<Vec<_>>()
.concat(),
))
@ -199,11 +208,11 @@ impl WorkspaceContext {
where
ConfigurationParser: Fn(Vec<String>) -> napi::Result<Promise<HashMap<String, String>>>,
{
let files = self.files_worker.get_files();
let promise =
config_files::get_project_configurations(globs, &files, parse_configurations)?;
let promise = config_files::get_project_configurations(
globs,
&self.all_file_data(),
parse_configurations,
)?;
env.spawn_future(async move {
let result = promise.await?;
Ok(result)
@ -221,15 +230,89 @@ impl WorkspaceContext {
}
#[napi]
pub fn all_file_data(&self) -> Vec<FileData> {
let files = self.files_worker.get_files();
files
pub fn update_project_files(
&self,
project_root_mappings: ProjectRootMappings,
project_files: External<ProjectFiles>,
global_files: External<Vec<FileData>>,
updated_files: HashMap<String, String>,
deleted_files: Vec<&str>,
) -> UpdatedWorkspaceFiles {
trace!("updating project files");
trace!("{project_root_mappings:?}");
let mut project_files_map = project_files.clone();
let mut global_files = global_files
.iter()
.map(|(path, content)| FileData {
file: path.to_normalized_string(),
hash: content.clone(),
})
.collect()
.map(|f| (f.file.clone(), f.hash.clone()))
.collect::<HashMap<_, _>>();
trace!(
"adding {} updated files to project files",
updated_files.len()
);
for updated_file in updated_files.into_iter() {
let file = updated_file.0;
let hash = updated_file.1;
if let Some(project_files) = find_project_for_path(&file, &project_root_mappings)
.and_then(|project| project_files_map.get_mut(project))
{
trace!("{file:?} was found in a project");
if let Some(file) = project_files.iter_mut().find(|f| f.file == file) {
trace!("updating hash for file");
file.hash = hash;
} else {
trace!("{file:?} was not part of a project, adding to project files");
project_files.push(FileData { file, hash });
}
} else {
trace!("{file:?} was not found in any project, updating global files");
global_files
.entry(file)
.and_modify(|e| *e = hash.clone())
.or_insert(hash);
}
}
trace!(
"removing {} deleted files from project files",
deleted_files.len()
);
for deleted_file in deleted_files.into_iter() {
if let Some(project_files) = find_project_for_path(deleted_file, &project_root_mappings)
.and_then(|project| project_files_map.get_mut(project))
{
if let Some(pos) = project_files.iter().position(|f| f.file == deleted_file) {
trace!("removing file: {deleted_file:?} from project");
project_files.remove(pos);
}
}
if global_files.contains_key(deleted_file) {
trace!("removing {deleted_file:?} from global files");
global_files.remove(deleted_file);
}
}
let non_project_files = global_files
.into_iter()
.map(|(file, hash)| FileData { file, hash })
.collect::<Vec<_>>();
UpdatedWorkspaceFiles {
file_map: FileMap {
project_file_map: project_files_map.clone(),
non_project_files: non_project_files.clone(),
},
external_references: NxWorkspaceFilesExternals {
project_files: External::new(project_files_map),
global_files: External::new(non_project_files),
all_workspace_files: External::new(self.all_file_data()),
},
}
}
#[napi]
pub fn all_file_data(&self) -> Vec<FileData> {
self.files_worker.get_files()
}
}

View File

@ -1,5 +1,5 @@
pub mod config_files;
pub mod context;
mod errors;
mod types;
pub mod types;
pub mod workspace_files;

View File

@ -1,5 +1,38 @@
use crate::native::types::FileData;
use napi::bindgen_prelude::External;
use std::collections::HashMap;
#[derive(Debug, Eq, PartialEq)]
pub enum FileLocation {
Global,
Project(String),
}
pub type ProjectFiles = HashMap<String, Vec<FileData>>;
#[napi(object)]
#[derive(Default)]
pub struct NxWorkspaceFiles {
pub project_file_map: ProjectFiles,
pub global_files: Vec<FileData>,
pub external_references: Option<NxWorkspaceFilesExternals>,
}
#[napi(object)]
pub struct NxWorkspaceFilesExternals {
pub project_files: External<ProjectFiles>,
pub global_files: External<Vec<FileData>>,
pub all_workspace_files: External<Vec<FileData>>,
}
#[napi(object)]
pub struct UpdatedWorkspaceFiles {
pub file_map: FileMap,
pub external_references: NxWorkspaceFilesExternals,
}
#[napi(object)]
pub struct FileMap {
pub project_file_map: ProjectFiles,
pub non_project_files: Vec<FileData>,
}

View File

@ -2,35 +2,30 @@ use napi::bindgen_prelude::{Object, Promise};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use napi::bindgen_prelude::External;
use napi::Env;
use rayon::prelude::*;
use tracing::trace;
use crate::native::types::FileData;
use crate::native::utils::path::Normalize;
use crate::native::workspace::config_files;
use crate::native::workspace::types::FileLocation;
#[napi(object)]
#[derive(Default)]
pub struct NxWorkspaceFiles {
pub project_file_map: HashMap<String, Vec<FileData>>,
pub global_files: Vec<FileData>,
}
use crate::native::workspace::types::{FileLocation, NxWorkspaceFiles, NxWorkspaceFilesExternals};
pub(super) fn get_files<ConfigurationParser>(
env: Env,
globs: Vec<String>,
parse_configurations: ConfigurationParser,
file_data: &[(PathBuf, String)],
files: Vec<FileData>,
) -> napi::Result<Option<Object>>
where
ConfigurationParser: Fn(Vec<String>) -> napi::Result<Promise<HashMap<String, String>>>,
{
if files.is_empty() {
return Ok(Default::default());
};
trace!("{globs:?}");
let file_data = file_data.to_vec();
let promise =
config_files::get_project_configurations(globs, &file_data, parse_configurations)?;
let promise = config_files::get_project_configurations(globs, &files, parse_configurations)?;
let result = env.spawn_future(async move {
let parsed_graph_nodes = promise.await?;
@ -39,20 +34,17 @@ where
trace!(?root_map);
let file_locations = file_data
.into_par_iter()
.map(|(file_path, hash)| {
let file_locations = files
.par_iter()
.cloned()
.map(|file_data| {
let file_path = PathBuf::from(&file_data.file);
let mut parent = file_path.parent().unwrap_or_else(|| Path::new("."));
while root_map.get(parent).is_none() && parent != Path::new(".") {
parent = parent.parent().unwrap_or_else(|| Path::new("."));
}
let file_data = FileData {
file: file_path.to_normalized_string(),
hash: hash.clone(),
};
match root_map.get(parent) {
Some(project_name) => (FileLocation::Project(project_name.into()), file_data),
None => (FileLocation::Global, file_data),
@ -86,9 +78,17 @@ where
}
}
let project_files_external = External::new(project_file_map.clone());
let global_files_external = External::new(global_files.clone());
let all_workspace_files = External::new(files);
Ok(NxWorkspaceFiles {
project_file_map,
global_files,
external_references: Some(NxWorkspaceFilesExternals {
project_files: project_files_external,
global_files: global_files_external,
all_workspace_files,
}),
})
})?;
Ok(Some(result))

View File

@ -9,13 +9,13 @@ import {
import { PackageJson } from '../../../utils/package-json';
import { existsSync } from 'fs';
import { workspaceRoot } from '../../../utils/workspace-root';
import { readNxJson } from '../../../config/configuration';
import { readFileMapCache } from '../../../project-graph/nx-deps-cache';
import { join } from 'path';
import {
filterUsingGlobPatterns,
getTargetInputs,
} from '../../../hasher/task-hasher';
import { readNxJson } from '../../../config/configuration';
import { readFileMapCache } from '../../../project-graph/nx-deps-cache';
import { join } from 'path';
interface NpmDeps {
readonly dependencies: Record<string, string>;

View File

@ -34,18 +34,22 @@ import { existsSync } from 'fs';
import { PackageJson } from '../utils/package-json';
import { getNxRequirePaths } from '../utils/installation-directory';
import { output } from '../utils/output';
import { ExternalObject, NxWorkspaceFilesExternals } from '../native';
let storedFileMap: FileMap | null = null;
let storedAllWorkspaceFiles: FileData[] | null = null;
let storedRustReferences: NxWorkspaceFilesExternals | null = null;
export function getFileMap(): {
fileMap: FileMap;
allWorkspaceFiles: FileData[];
rustReferences: NxWorkspaceFilesExternals | null;
} {
if (!!storedFileMap) {
return {
fileMap: storedFileMap,
allWorkspaceFiles: storedAllWorkspaceFiles,
rustReferences: storedRustReferences,
};
} else {
return {
@ -54,6 +58,7 @@ export function getFileMap(): {
projectFileMap: {},
},
allWorkspaceFiles: [],
rustReferences: null,
};
}
}
@ -63,6 +68,7 @@ export async function buildProjectGraphUsingProjectFileMap(
externalNodes: Record<string, ProjectGraphExternalNode>,
fileMap: FileMap,
allWorkspaceFiles: FileData[],
rustReferences: NxWorkspaceFilesExternals,
fileMapCache: FileMapCache | null,
shouldWriteCache: boolean
): Promise<{
@ -71,6 +77,7 @@ export async function buildProjectGraphUsingProjectFileMap(
}> {
storedFileMap = fileMap;
storedAllWorkspaceFiles = allWorkspaceFiles;
storedRustReferences = rustReferences;
const nxJson = readNxJson();
const projectGraphVersion = '6.0';

View File

@ -53,93 +53,4 @@ describe('fileMapUtils', () => {
});
});
});
describe('updateFileMap', () => {
it('should map files to projects', () => {
const projectsConfigurations = {
demo: {
root: 'apps/demo',
sourceRoot: 'apps/demo/src',
projectType: 'application' as ProjectType,
},
'demo-e2e': {
root: 'apps/demo-e2e',
sourceRoot: 'apps/demo-e2e/src',
projectType: 'application' as ProjectType,
},
ui: {
root: 'libs/ui',
sourceRoot: 'libs/ui/src',
projectType: 'library' as ProjectType,
},
};
const files = [
{ file: 'apps/demo/src/main.ts', hash: 'some-hash' },
{ file: 'apps/demo-e2e/src/main.ts', hash: 'some-hash' },
{ file: 'libs/ui/src/index.ts', hash: 'some-hash' },
{ file: 'libs/ui/src/second.ts', hash: 'some-hash' },
{ file: 'tools/myfile.txt', hash: 'some-hash' },
{ file: 'tools/secondfile.txt', hash: 'some-hash' },
];
const projectFileMap = {
demo: [{ file: 'apps/demo/src/main.ts', hash: 'some-hash' }],
'demo-e2e': [{ file: 'apps/demo-e2e/src/main.ts', hash: 'some-hash' }],
ui: [
{ file: 'libs/ui/src/index.ts', hash: 'some-hash' },
{ file: 'libs/ui/src/second.ts', hash: 'some-hash' },
],
};
const fileMap = {
projectFileMap,
allWorkspaceFiles: files,
nonProjectFiles: files.filter(
(f) =>
!Object.values(projectFileMap).some((arr) =>
arr.some((projectFile) => projectFile.file === f.file)
)
),
};
const result = updateFileMap(
projectsConfigurations,
fileMap,
files,
new Map([
['apps/demo/src/main.ts', 'demo-main-update'],
['apps/demo/src/new-main.ts', 'new-main-hash'],
]),
['libs/ui/src/second.ts', 'tools/secondfile.txt']
);
expect(result).toEqual({
fileMap: {
projectFileMap: {
demo: [
{
file: 'apps/demo/src/main.ts',
hash: 'demo-main-update',
},
{
file: 'apps/demo/src/new-main.ts',
hash: 'new-main-hash',
},
],
'demo-e2e': [
{ file: 'apps/demo-e2e/src/main.ts', hash: 'some-hash' },
],
ui: [{ file: 'libs/ui/src/index.ts', hash: 'some-hash' }],
},
nonProjectFiles: [{ file: 'tools/myfile.txt', hash: 'some-hash' }],
},
allWorkspaceFiles: [
{ file: 'apps/demo/src/main.ts', hash: 'demo-main-update' },
{ file: 'apps/demo-e2e/src/main.ts', hash: 'some-hash' },
{ file: 'libs/ui/src/index.ts', hash: 'some-hash' },
{ file: 'tools/myfile.txt', hash: 'some-hash' },
{ file: 'apps/demo/src/new-main.ts', hash: 'new-main-hash' },
],
});
});
});
});

View File

@ -14,8 +14,13 @@ import {
} from '../config/workspace-json-project-json';
import { daemonClient } from '../daemon/client/client';
import { readProjectsConfigurationFromProjectGraph } from './project-graph';
import { getAllFileDataInContext } from '../utils/workspace-context';
import {
getAllFileDataInContext,
updateProjectFiles,
} from '../utils/workspace-context';
import { workspaceRoot } from '../utils/workspace-root';
import { ExternalObject, NxWorkspaceFilesExternals } from '../native';
import { buildAllWorkspaceFiles } from './utils/build-all-workspace-files';
export async function createProjectFileMapUsingProjectGraph(
graph: ProjectGraph
@ -71,72 +76,24 @@ export function createFileMap(
export function updateFileMap(
projectsConfigurations: Record<string, ProjectConfiguration>,
{ projectFileMap, nonProjectFiles }: FileMap,
allWorkspaceFiles: FileData[],
updatedFiles: Map<string, string>,
rustReferences: NxWorkspaceFilesExternals,
updatedFiles: Record<string, string>,
deletedFiles: string[]
): { fileMap: FileMap; allWorkspaceFiles: FileData[] } {
const projectRootMappings =
createProjectRootMappingsFromProjectConfigurations(projectsConfigurations);
let nonProjectFilesMap = new Map(nonProjectFiles.map((f) => [f.file, f]));
for (const f of updatedFiles.keys()) {
const project = findProjectForPath(f, projectRootMappings);
if (project) {
const matchingProjectFiles = projectFileMap[project] ?? [];
if (matchingProjectFiles) {
const fileData: FileData = matchingProjectFiles.find(
(t) => t.file === f
);
if (fileData) {
fileData.hash = updatedFiles.get(f);
} else {
matchingProjectFiles.push({
file: f,
hash: updatedFiles.get(f),
});
}
}
} else {
const hash = updatedFiles.get(f);
const entry = nonProjectFilesMap.get(f) ?? { file: f, hash };
entry.hash = hash;
nonProjectFilesMap.set(f, entry);
}
const fileData: FileData = allWorkspaceFiles.find((t) => t.file === f);
if (fileData) {
fileData.hash = updatedFiles.get(f);
} else {
allWorkspaceFiles.push({
file: f,
hash: updatedFiles.get(f),
});
}
}
for (const f of deletedFiles) {
const matchingProjectFiles =
projectFileMap[findProjectForPath(f, projectRootMappings)] ?? [];
if (matchingProjectFiles) {
const index = matchingProjectFiles.findIndex((t) => t.file === f);
if (index > -1) {
matchingProjectFiles.splice(index, 1);
}
}
if (nonProjectFilesMap.has(f)) {
nonProjectFilesMap.delete(f);
}
const index = allWorkspaceFiles.findIndex((t) => t.file === f);
if (index > -1) {
allWorkspaceFiles.splice(index, 1);
}
}
) {
const updates = updateProjectFiles(
Object.fromEntries(
createProjectRootMappingsFromProjectConfigurations(projectsConfigurations)
),
rustReferences,
updatedFiles,
deletedFiles
);
return {
fileMap: {
projectFileMap,
nonProjectFiles: Array.from(nonProjectFilesMap.values()),
},
allWorkspaceFiles,
fileMap: updates.fileMap,
allWorkspaceFiles: buildAllWorkspaceFiles(
updates.fileMap.projectFileMap,
updates.fileMap.nonProjectFiles
),
rustReferences: updates.externalReferences,
};
}

View File

@ -84,6 +84,7 @@ export async function buildProjectGraphWithoutDaemon() {
projectConfigurations,
externalNodes,
sourceMaps,
rustReferences,
} = await retrieveWorkspaceFiles(workspaceRoot, nxJson);
const cacheEnabled = process.env.NX_CACHE_PROJECT_GRAPH !== 'false';
@ -93,6 +94,7 @@ export async function buildProjectGraphWithoutDaemon() {
externalNodes,
fileMap,
allWorkspaceFiles,
rustReferences,
cacheEnabled ? readFileMapCache() : null,
cacheEnabled
)

View File

@ -0,0 +1,21 @@
import { FileData, ProjectFileMap } from '../../config/project-graph';
import { performance } from 'perf_hooks';
export function buildAllWorkspaceFiles(
projectFileMap: ProjectFileMap,
globalFiles: FileData[]
): FileData[] {
performance.mark('get-all-workspace-files:start');
let fileData: FileData[] = Object.values(projectFileMap).flat();
fileData = fileData
.concat(globalFiles)
.sort((a, b) => a.file.localeCompare(b.file));
performance.mark('get-all-workspace-files:end');
performance.measure(
'get-all-workspace-files',
'get-all-workspace-files:start',
'get-all-workspace-files:end'
);
return fileData;
}

View File

@ -5,16 +5,12 @@ import {
ProjectsConfigurations,
} from '../../config/workspace-json-project-json';
import {
NxAngularJsonPlugin,
NX_ANGULAR_JSON_PLUGIN_NAME,
NxAngularJsonPlugin,
shouldMergeAngularProjects,
} from '../../adapter/angular-json';
import { NxJsonConfiguration, readNxJson } from '../../config/nx-json';
import {
FileData,
ProjectFileMap,
ProjectGraphExternalNode,
} from '../../config/project-graph';
import { ProjectGraphExternalNode } from '../../config/project-graph';
import type { NxWorkspaceFiles } from '../../native';
import { getNxPackageJsonWorkspacesPlugin } from '../../../plugins/package-json-workspaces';
import {
@ -28,10 +24,11 @@ import {
} from '../../utils/nx-plugin';
import { CreateProjectJsonProjectsPlugin } from '../../plugins/project-json/build-nodes/project-json';
import {
globWithWorkspaceContext,
getProjectConfigurationsFromContext,
getNxWorkspaceFilesFromContext,
getProjectConfigurationsFromContext,
globWithWorkspaceContext,
} from '../../utils/workspace-context';
import { buildAllWorkspaceFiles } from './build-all-workspace-files';
/**
* Walks the workspace directory to create the `projectFileMap`, `ProjectConfigurations` and `allWorkspaceFiles`
@ -62,24 +59,25 @@ export async function retrieveWorkspaceFiles(
let externalNodes: Record<string, ProjectGraphExternalNode>;
let sourceMaps: ConfigurationSourceMaps;
const { projectFileMap, globalFiles } = (await getNxWorkspaceFilesFromContext(
workspaceRoot,
globs,
async (configs: string[]) => {
const projectConfigurations = await createProjectConfigurations(
workspaceRoot,
nxJson,
configs,
plugins
);
const { projectFileMap, globalFiles, externalReferences } =
(await getNxWorkspaceFilesFromContext(
workspaceRoot,
globs,
async (configs: string[]) => {
const projectConfigurations = await createProjectConfigurations(
workspaceRoot,
nxJson,
configs,
plugins
);
projects = projectConfigurations.projects;
sourceMaps = projectConfigurations.sourceMaps;
projects = projectConfigurations.projects;
sourceMaps = projectConfigurations.sourceMaps;
externalNodes = projectConfigurations.externalNodes;
return projectConfigurations.rootMap;
}
)) as NxWorkspaceFiles;
externalNodes = projectConfigurations.externalNodes;
return projectConfigurations.rootMap;
}
)) as NxWorkspaceFiles;
performance.mark('get-workspace-files:end');
performance.measure(
'get-workspace-files',
@ -99,6 +97,7 @@ export async function retrieveWorkspaceFiles(
} as ProjectsConfigurations,
externalNodes,
sourceMaps,
rustReferences: externalReferences,
};
}
@ -239,25 +238,6 @@ export async function retrieveProjectConfigurationsWithoutPluginInference(
return projects;
}
function buildAllWorkspaceFiles(
projectFileMap: ProjectFileMap,
globalFiles: FileData[]
): FileData[] {
performance.mark('get-all-workspace-files:start');
let fileData: FileData[] = Object.values(projectFileMap).flat();
fileData = fileData
.concat(globalFiles)
.sort((a, b) => a.file.localeCompare(b.file));
performance.mark('get-all-workspace-files:end');
performance.measure(
'get-all-workspace-files',
'get-all-workspace-files:start',
'get-all-workspace-files:end'
);
return fileData;
}
export async function createProjectConfigurations(
workspaceRoot: string,
nxJson: NxJsonConfiguration,

View File

@ -237,12 +237,13 @@ export async function invokeTasksRunner({
if (daemonClient.enabled()) {
hasher = new DaemonBasedTaskHasher(daemonClient, runnerOptions);
} else {
const { fileMap, allWorkspaceFiles } = getFileMap();
const { fileMap, allWorkspaceFiles, rustReferences } = getFileMap();
hasher = new InProcessTaskHasher(
fileMap?.projectFileMap,
allWorkspaceFiles,
projectGraph,
nxJson,
rustReferences,
runnerOptions
);
}
@ -250,15 +251,13 @@ export async function invokeTasksRunner({
// this is used for two reasons: to fetch all remote cache hits AND
// to submit everything that is known in advance to Nx Cloud to run in
// a distributed fashion
performance.mark('hashing:start');
await hashTasksThatDoNotDependOnOutputsOfOtherTasks(
hasher,
projectGraph,
taskGraph,
nxJson
);
performance.mark('hashing:end');
performance.measure('hashing', 'hashing:start', 'hashing:end');
const promiseOrObservable = tasksRunner(
tasks,

View File

@ -1,5 +1,6 @@
import type { WorkspaceContext } from '../native';
import type { NxWorkspaceFilesExternals, WorkspaceContext } from '../native';
import { performance } from 'perf_hooks';
import { ProjectRootMappings } from '../project-graph/utils/find-project-for-path';
let workspaceContext: WorkspaceContext | undefined;
@ -64,6 +65,21 @@ export function getAllFileDataInContext(workspaceRoot: string) {
return workspaceContext.allFileData();
}
export function updateProjectFiles(
projectRootMappings: Record<string, string>,
rustReferences: NxWorkspaceFilesExternals,
updatedFiles: Record<string, string>,
deletedFiles: string[]
) {
return workspaceContext?.updateProjectFiles(
projectRootMappings,
rustReferences.projectFiles,
rustReferences.globalFiles,
updatedFiles,
deletedFiles
);
}
function ensureContextAvailable(workspaceRoot: string) {
if (!workspaceContext || workspaceContext?.workspaceRoot !== workspaceRoot) {
setupWorkspaceContext(workspaceRoot);