fix(core): fix dep outputs hashing (#18331)
This commit is contained in:
parent
36dae70fb0
commit
e4d23383f5
@ -1,11 +1,3 @@
|
||||
export function withEnvironmentVariables(
|
||||
env: Record<string, string>,
|
||||
callback: () => void
|
||||
): void;
|
||||
export function withEnvironmentVariables(
|
||||
env: Record<string, string>,
|
||||
callback: () => Promise<void>
|
||||
): Promise<void>;
|
||||
export function withEnvironmentVariables(
|
||||
env: Record<string, string>,
|
||||
callback: () => void | Promise<void>
|
||||
|
||||
386
packages/nx/src/hasher/__snapshots__/task-hasher.spec.ts.snap
Normal file
386
packages/nx/src/hasher/__snapshots__/task-hasher.spec.ts.snap
Normal file
@ -0,0 +1,386 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`TaskHasher dependentTasksOutputFiles should depend on dependent tasks output files 1`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "4062279404379299270",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "12802727827024321009",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"dist/libs/child/index.d.ts": "3244421341483603138",
|
||||
"dist/libs/grandchild/index.d.ts": "3244421341483603138",
|
||||
"parent:!{projectRoot}/**/*.spec.ts": "17962802443644575456",
|
||||
"target": "1389868326933519382",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "17939200785615949974",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher dependentTasksOutputFiles should work with dependent tasks with globs as outputs 1`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "4062279404379299270",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "12802727827024321009",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"dist/libs/child/index.d.ts": "3244421341483603138",
|
||||
"dist/libs/grandchild/index.d.ts": "3244421341483603138",
|
||||
"parent:!{projectRoot}/**/*.spec.ts": "17962802443644575456",
|
||||
"target": "1389868326933519382",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "17939200785615949974",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher hashTarget should hash entire subtree of dependencies 1`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "14389236043839781668",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "12026883044296863450",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"app:{projectRoot}/**/*": "3244421341483603138",
|
||||
"target": "3789300870433976270",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "11829832011053499600",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher hashTarget should hash executor dependencies of @nx packages 1`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "14389236043839781668",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "12026883044296863450",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"app:{projectRoot}/**/*": "3244421341483603138",
|
||||
"target": "1274004356858584726",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "15096054768893599383",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher hashTarget should use externalDependencies to override nx:run-commands 1`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "14389236043839781668",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "17956886683554891195",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"app:{projectRoot}/**/*": "3244421341483603138",
|
||||
"env:undefined": "3244421341483603138",
|
||||
"target": "7688798210438770308",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "18142315317355318287",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher hashTarget should use externalDependencies with empty array to ignore all deps 1`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "14389236043839781668",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "9179552940021403596",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"app:{projectRoot}/**/*": "3244421341483603138",
|
||||
"env:undefined": "3244421341483603138",
|
||||
"target": "3244421341483603138",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "13520777692097937224",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher should be able to handle multiple filesets per project 1`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "13785966310271077209",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "17211930887387929067",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"child:!{projectRoot}/**/*.spec.ts": "17508782620731849000",
|
||||
"env:MY_TEST_HASH_ENV": "17357374746554314488",
|
||||
"parent:{projectRoot}/**/*": "7263479247245830838",
|
||||
"target": "1389868326933519382",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/global1": "3052102066027208710",
|
||||
"{workspaceRoot}/global2": "8197394511443659629",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "2881231822394274502",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher should be able to handle multiple filesets per project 2`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "6958627266354933907",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "17211930887387929067",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"child:{projectRoot}/**/*": "2300207741412661544",
|
||||
"target": "1389868326933519382",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/global1": "3052102066027208710",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "3952879073081711415",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher should be able to include only a part of the base tsconfig 1`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "4062279404379299270",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "18166168584521190546",
|
||||
"TsConfig": "4035819825874039301",
|
||||
"parent:{projectRoot}/**/*": "8263681721738113012",
|
||||
"runtime:echo runtime123": "runtime123",
|
||||
"runtime:echo runtime456": "runtime456",
|
||||
"target": "1389868326933519382",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "9968611270204908917",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher should create task hash 1`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "4062279404379299270",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "4875698716044094030",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"env:NONEXISTENTENV": "3244421341483603138",
|
||||
"env:TESTENV": "6544740722075256274",
|
||||
"parent:{projectRoot}/**/*": "8263681721738113012",
|
||||
"runtime:echo runtime123": "runtime123",
|
||||
"runtime:echo runtime456": "runtime456",
|
||||
"tagged:{projectRoot}/**/*": "3244421341483603138",
|
||||
"target": "1389868326933519382",
|
||||
"unrelated:{projectRoot}/**/*": "10091615118977982257",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "11412486534571442418",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher should hash multiple filesets of a project 1`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "13785966310271077209",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "10499856664466672714",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"parent:{projectRoot}/**/*": "7263479247245830838",
|
||||
"target": "1389868326933519382",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "14016847448680534278",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher should hash multiple filesets of a project 2`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "4062279404379299270",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "10499856664466672714",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"parent:!{projectRoot}/**/*.spec.ts": "17962802443644575456",
|
||||
"target": "1389868326933519382",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "10694393160053318712",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher should hash non-default filesets 1`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "4062279404379299270",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "8196293273405506196",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"child:{projectRoot}/**/*": "2300207741412661544",
|
||||
"parent:!{projectRoot}/**/*.spec.ts": "17962802443644575456",
|
||||
"target": "1389868326933519382",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "15803865862737990375",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher should hash npm project versions 1`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "14389236043839781668",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "8128657069648957137",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"app:{projectRoot}/**/*": "9104199730100321982",
|
||||
"npm:react": "4468841026152585217",
|
||||
"target": "14358315432887000841",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "3668827038634092448",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher should hash task where the project has dependencies 1`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "4062279404379299270",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "8876282510060012181",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"child:{projectRoot}/**/*": "5484012818475684626",
|
||||
"parent:{projectRoot}/**/*": "14822394489351823627",
|
||||
"target": "1389868326933519382",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "16888277333405079717",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher should hash tasks where the project graph has circular dependencies 1`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "4062279404379299270",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "9892649345820140726",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"child:{projectRoot}/**/*": "8973015561538144423",
|
||||
"parent:{projectRoot}/**/*": "9104199730100321982",
|
||||
"target": "1389868326933519382",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "16567132351487630958",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher should hash tasks where the project graph has circular dependencies 2`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "7833005669885463868",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "18166168584521190546",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"child:{projectRoot}/**/*": "8973015561538144423",
|
||||
"parent:{projectRoot}/**/*": "9104199730100321982",
|
||||
"target": "1389868326933519382",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "13014058586283814178",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`TaskHasher should use targetDefaults from nx.json 1`] = `
|
||||
{
|
||||
"details": {
|
||||
"command": "4062279404379299270",
|
||||
"implicitDeps": {},
|
||||
"nodes": {
|
||||
"ProjectConfiguration": "9892649345820140726",
|
||||
"TsConfig": "8767608672024750088",
|
||||
"child:!{projectRoot}/**/*.spec.ts": "17508782620731849000",
|
||||
"parent:!{projectRoot}/**/*.spec.ts": "17962802443644575456",
|
||||
"target": "1389868326933519382",
|
||||
"{workspaceRoot}/.gitignore": "3244421341483603138",
|
||||
"{workspaceRoot}/.nxignore": "3244421341483603138",
|
||||
"{workspaceRoot}/nx.json": "8942239360311677987",
|
||||
},
|
||||
"runtime": {},
|
||||
},
|
||||
"value": "2743377369761868812",
|
||||
}
|
||||
`;
|
||||
@ -28,12 +28,6 @@ export class FileHasher {
|
||||
return hashFile(path).hash;
|
||||
}
|
||||
|
||||
hashFilesMatchingGlobs(path: string, globs: string[]): string {
|
||||
// Import as needed. There is also an issue running unit tests in Nx repo if this is a top-level import.
|
||||
const { hashFilesMatchingGlobs } = require('../native');
|
||||
return hashFilesMatchingGlobs(path, globs);
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.fileHashes = new Map<string, string>();
|
||||
this.isInitialized = false;
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
// This must come before the Hasher import
|
||||
import { TempFs } from '../utils/testing/temp-fs';
|
||||
let tempFs = new TempFs('TaskHasher');
|
||||
import { DependencyType } from '../config/project-graph';
|
||||
import { vol } from 'memfs';
|
||||
import {
|
||||
expandNamedInput,
|
||||
filterUsingGlobPatterns,
|
||||
@ -12,23 +13,10 @@ import { withEnvironmentVariables } from '../../internal-testing-utils/with-envi
|
||||
|
||||
jest.mock('../utils/workspace-root', () => {
|
||||
return {
|
||||
workspaceRoot: '/root',
|
||||
workspaceRoot: tempFs.tempDir,
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('./file-hasher', () => {
|
||||
return {
|
||||
hashArray: (values: string[]) => values.join('|'),
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('fs', () => require('memfs').fs);
|
||||
jest.mock('../plugins/js/utils/typescript', () => ({
|
||||
getRootTsConfigFileName: jest
|
||||
.fn()
|
||||
.mockImplementation(() => '/root/tsconfig.base.json'),
|
||||
}));
|
||||
|
||||
describe('TaskHasher', () => {
|
||||
const packageJson = {
|
||||
name: 'nrwl',
|
||||
@ -54,26 +42,16 @@ describe('TaskHasher', () => {
|
||||
{ file: 'global2', hash: 'global2.hash' },
|
||||
];
|
||||
|
||||
function createFileHasher(): any {
|
||||
return {
|
||||
allFileData: () => allWorkspaceFiles,
|
||||
};
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vol.fromJSON(
|
||||
{
|
||||
'tsconfig.base.json': tsConfigBaseJson,
|
||||
'yarn.lock': 'content',
|
||||
'package.json': JSON.stringify(packageJson),
|
||||
},
|
||||
'/root'
|
||||
);
|
||||
beforeEach(async () => {
|
||||
await tempFs.createFiles({
|
||||
'tsconfig.base.json': tsConfigBaseJson,
|
||||
'yarn.lock': 'content',
|
||||
'package.json': JSON.stringify(packageJson),
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
vol.reset();
|
||||
tempFs.reset();
|
||||
});
|
||||
|
||||
it('should create task hash', () =>
|
||||
@ -147,7 +125,7 @@ describe('TaskHasher', () => {
|
||||
{
|
||||
runtimeCacheInputs: ['echo runtime456'],
|
||||
},
|
||||
createFileHasher()
|
||||
fileHasher
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask({
|
||||
@ -156,34 +134,7 @@ describe('TaskHasher', () => {
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
expect(hash.value).toContain('file.hash'); //project files
|
||||
expect(hash.value).toContain('prop-value'); //overrides
|
||||
expect(hash.value).toContain('parent'); //project
|
||||
expect(hash.value).toContain('build'); //target
|
||||
expect(hash.value).toContain('runtime123');
|
||||
expect(hash.value).toContain('runtime456');
|
||||
expect(hash.value).toContain('env123');
|
||||
expect(hash.value).toContain('filec.hash');
|
||||
|
||||
expect(hash.details.command).toEqual(
|
||||
'parent|build||{"prop":"prop-value"}'
|
||||
);
|
||||
expect(hash.details.nodes).toEqual({
|
||||
'parent:{projectRoot}/**/*':
|
||||
'/file|file.hash|{"root":"libs/parent","targets":{"build":{"executor":"nx:run-commands","inputs":["default","^default",{"runtime":"echo runtime123"},{"env":"TESTENV"},{"env":"NONEXISTENTENV"},{"input":"default","projects":["unrelated","tag:some-tag"]}]}}}|{"compilerOptions":{"paths":{"@nx/parent":["libs/parent/src/index.ts"],"@nx/child":["libs/child/src/index.ts"]}}}',
|
||||
target: 'nx:run-commands',
|
||||
'unrelated:{projectRoot}/**/*':
|
||||
'libs/unrelated/filec.ts|filec.hash|{"root":"libs/unrelated","targets":{"build":{}}}|{"compilerOptions":{"paths":{"@nx/parent":["libs/parent/src/index.ts"],"@nx/child":["libs/child/src/index.ts"]}}}',
|
||||
'tagged:{projectRoot}/**/*':
|
||||
'{"root":"libs/tagged","targets":{"build":{}},"tags":["some-tag"]}|{"compilerOptions":{"paths":{"@nx/parent":["libs/parent/src/index.ts"],"@nx/child":["libs/child/src/index.ts"]}}}',
|
||||
'{workspaceRoot}/nx.json': 'nx.json.hash',
|
||||
'{workspaceRoot}/.gitignore': '',
|
||||
'{workspaceRoot}/.nxignore': '',
|
||||
'runtime:echo runtime123': 'runtime123',
|
||||
'runtime:echo runtime456': 'runtime456',
|
||||
'env:TESTENV': 'env123',
|
||||
'env:NONEXISTENTENV': '',
|
||||
});
|
||||
expect(hash).toMatchSnapshot();
|
||||
}));
|
||||
|
||||
it('should hash task where the project has dependencies', async () => {
|
||||
@ -218,6 +169,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
},
|
||||
},
|
||||
externalNodes: {},
|
||||
dependencies: {
|
||||
parent: [{ source: 'parent', target: 'child', type: 'static' }],
|
||||
},
|
||||
@ -242,7 +194,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
{} as any,
|
||||
{},
|
||||
createFileHasher()
|
||||
fileHasher
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask({
|
||||
@ -251,16 +203,7 @@ describe('TaskHasher', () => {
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
assertFilesets(hash, {
|
||||
'child:{projectRoot}/**/*': {
|
||||
contains: '/fileb.ts|/fileb.spec.ts',
|
||||
excludes: '/filea.ts',
|
||||
},
|
||||
'parent:{projectRoot}/**/*': {
|
||||
contains: '/filea.ts|/filea.spec.ts',
|
||||
excludes: '/fileb.ts',
|
||||
},
|
||||
});
|
||||
expect(hash).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should hash non-default filesets', async () => {
|
||||
@ -303,6 +246,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
},
|
||||
},
|
||||
externalNodes: {},
|
||||
dependencies: {
|
||||
parent: [{ source: 'parent', target: 'child', type: 'static' }],
|
||||
},
|
||||
@ -331,7 +275,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
} as any,
|
||||
{},
|
||||
createFileHasher()
|
||||
fileHasher
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask({
|
||||
@ -340,16 +284,7 @@ describe('TaskHasher', () => {
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
assertFilesets(hash, {
|
||||
'child:{projectRoot}/**/*': {
|
||||
contains: 'libs/child/fileb.ts|libs/child/fileb.spec.ts',
|
||||
excludes: 'filea.ts',
|
||||
},
|
||||
'parent:!{projectRoot}/**/*.spec.ts': {
|
||||
contains: 'filea.ts',
|
||||
excludes: 'filea.spec.ts',
|
||||
},
|
||||
});
|
||||
expect(hash).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should hash multiple filesets of a project', async () => {
|
||||
@ -382,6 +317,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
},
|
||||
},
|
||||
externalNodes: {},
|
||||
dependencies: {
|
||||
parent: [],
|
||||
},
|
||||
@ -403,7 +339,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
} as any,
|
||||
{},
|
||||
createFileHasher()
|
||||
fileHasher
|
||||
);
|
||||
|
||||
const test = await hasher.hashTask({
|
||||
@ -412,11 +348,7 @@ describe('TaskHasher', () => {
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
assertFilesets(test, {
|
||||
'parent:{projectRoot}/**/*': {
|
||||
contains: 'libs/parent/filea.ts|libs/parent/filea.spec.ts',
|
||||
},
|
||||
});
|
||||
expect(test).toMatchSnapshot();
|
||||
|
||||
const build = await hasher.hashTask({
|
||||
target: { project: 'parent', target: 'build' },
|
||||
@ -424,16 +356,11 @@ describe('TaskHasher', () => {
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
assertFilesets(build, {
|
||||
'parent:!{projectRoot}/**/*.spec.ts': {
|
||||
contains: 'libs/parent/filea.ts',
|
||||
excludes: 'libs/parent/filea.spec.ts',
|
||||
},
|
||||
});
|
||||
expect(build).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should be able to handle multiple filesets per project', async () => {
|
||||
withEnvironmentVariables(
|
||||
await withEnvironmentVariables(
|
||||
{ MY_TEST_HASH_ENV: 'MY_TEST_HASH_ENV_VALUE' },
|
||||
async () => {
|
||||
const hasher = new InProcessTaskHasher(
|
||||
@ -484,6 +411,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
},
|
||||
},
|
||||
externalNodes: {},
|
||||
dependencies: {
|
||||
parent: [{ source: 'parent', target: 'child', type: 'static' }],
|
||||
},
|
||||
@ -513,7 +441,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
} as any,
|
||||
{},
|
||||
createFileHasher()
|
||||
fileHasher
|
||||
);
|
||||
|
||||
const parentHash = await hasher.hashTask({
|
||||
@ -522,25 +450,7 @@ describe('TaskHasher', () => {
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
assertFilesets(parentHash, {
|
||||
'child:!{projectRoot}/**/*.spec.ts': {
|
||||
contains: 'libs/child/fileb.ts',
|
||||
excludes: 'fileb.spec.ts',
|
||||
},
|
||||
'parent:{projectRoot}/**/*': {
|
||||
contains: 'libs/parent/filea.ts|libs/parent/filea.spec.ts',
|
||||
},
|
||||
});
|
||||
|
||||
expect(parentHash.details.nodes['{workspaceRoot}/global1']).toEqual(
|
||||
'global1.hash'
|
||||
);
|
||||
expect(parentHash.details.nodes['{workspaceRoot}/global2']).toBe(
|
||||
'global2.hash'
|
||||
);
|
||||
expect(parentHash.details.nodes['env:MY_TEST_HASH_ENV']).toEqual(
|
||||
'MY_TEST_HASH_ENV_VALUE'
|
||||
);
|
||||
expect(parentHash).toMatchSnapshot();
|
||||
|
||||
const childHash = await hasher.hashTask({
|
||||
target: { project: 'child', target: 'test' },
|
||||
@ -548,18 +458,7 @@ describe('TaskHasher', () => {
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
assertFilesets(childHash, {
|
||||
'child:{projectRoot}/**/*': {
|
||||
contains: 'libs/child/fileb.ts|libs/child/fileb.spec.ts',
|
||||
},
|
||||
});
|
||||
expect(childHash.details.nodes['{workspaceRoot}/global1']).toEqual(
|
||||
'global1.hash'
|
||||
);
|
||||
expect(childHash.details.nodes['{workspaceRoot}/global2']).toBe(
|
||||
undefined
|
||||
);
|
||||
expect(childHash.details.nodes['env:MY_TEST_HASH_ENV']).toBeUndefined();
|
||||
expect(childHash).toMatchSnapshot();
|
||||
}
|
||||
);
|
||||
});
|
||||
@ -632,7 +531,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
} as any,
|
||||
{},
|
||||
createFileHasher()
|
||||
fileHasher
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask({
|
||||
@ -640,17 +539,7 @@ describe('TaskHasher', () => {
|
||||
id: 'parent-build',
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
assertFilesets(hash, {
|
||||
'child:!{projectRoot}/**/*.spec.ts': {
|
||||
contains: 'libs/child/fileb.ts',
|
||||
excludes: 'libs/child/fileb.spec.ts',
|
||||
},
|
||||
'parent:!{projectRoot}/**/*.spec.ts': {
|
||||
contains: 'libs/parent/filea.ts',
|
||||
excludes: 'libs/parent/filea.spec.ts',
|
||||
},
|
||||
});
|
||||
expect(hash).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should be able to include only a part of the base tsconfig', async () => {
|
||||
@ -691,7 +580,7 @@ describe('TaskHasher', () => {
|
||||
runtimeCacheInputs: ['echo runtime123', 'echo runtime456'],
|
||||
selectivelyHashTsConfig: true,
|
||||
},
|
||||
createFileHasher()
|
||||
fileHasher
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask({
|
||||
@ -700,20 +589,7 @@ describe('TaskHasher', () => {
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
expect(hash.value).toContain('file.hash'); //project files
|
||||
expect(hash.value).toContain('prop-value'); //overrides
|
||||
expect(hash.value).toContain('parent'); //project
|
||||
expect(hash.value).toContain('build'); //target
|
||||
expect(hash.value).toContain('runtime123'); //target
|
||||
expect(hash.value).toContain('runtime456'); //target
|
||||
|
||||
expect(hash.details.command).toEqual('parent|build||{"prop":"prop-value"}');
|
||||
|
||||
assertFilesets(hash, {
|
||||
'parent:{projectRoot}/**/*': {
|
||||
contains: '/file',
|
||||
},
|
||||
});
|
||||
expect(hash).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should hash tasks where the project graph has circular dependencies', async () => {
|
||||
@ -768,7 +644,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
{} as any,
|
||||
{},
|
||||
createFileHasher()
|
||||
fileHasher
|
||||
);
|
||||
|
||||
const tasksHash = await hasher.hashTask({
|
||||
@ -777,22 +653,7 @@ describe('TaskHasher', () => {
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
expect(tasksHash.value).toContain('a.hash'); //project files
|
||||
expect(tasksHash.value).toContain('b.hash'); //project files
|
||||
expect(tasksHash.value).toContain('prop-value'); //overrides
|
||||
expect(tasksHash.value).toContain('parent|build'); //project and target
|
||||
expect(tasksHash.value).toContain('build'); //target
|
||||
|
||||
assertFilesets(tasksHash, {
|
||||
'child:{projectRoot}/**/*': {
|
||||
contains: 'fileb.ts',
|
||||
excludes: 'filea.tx',
|
||||
},
|
||||
'parent:{projectRoot}/**/*': {
|
||||
contains: 'filea.ts',
|
||||
excludes: 'fileb.tx',
|
||||
},
|
||||
});
|
||||
expect(tasksHash).toMatchSnapshot();
|
||||
|
||||
const hashb = await hasher.hashTask({
|
||||
target: { project: 'child', target: 'build' },
|
||||
@ -800,22 +661,7 @@ describe('TaskHasher', () => {
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
expect(hashb.value).toContain('a.hash'); //project files
|
||||
expect(hashb.value).toContain('b.hash'); //project files
|
||||
expect(hashb.value).toContain('prop-value'); //overrides
|
||||
expect(hashb.value).toContain('child|build'); //project and target
|
||||
expect(hashb.value).toContain('build'); //target
|
||||
|
||||
assertFilesets(hashb, {
|
||||
'child:{projectRoot}/**/*': {
|
||||
contains: 'fileb.ts',
|
||||
excludes: 'filea.tx',
|
||||
},
|
||||
'parent:{projectRoot}/**/*': {
|
||||
contains: 'filea.ts',
|
||||
excludes: 'fileb.tx',
|
||||
},
|
||||
});
|
||||
expect(hashb).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should throw an error when failed to execute runtimeCacheInputs', async () => {
|
||||
@ -854,7 +700,7 @@ describe('TaskHasher', () => {
|
||||
{
|
||||
runtimeCacheInputs: ['boom'],
|
||||
},
|
||||
createFileHasher()
|
||||
fileHasher
|
||||
);
|
||||
|
||||
try {
|
||||
@ -920,7 +766,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
{} as any,
|
||||
{},
|
||||
createFileHasher()
|
||||
fileHasher
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask({
|
||||
@ -928,11 +774,7 @@ describe('TaskHasher', () => {
|
||||
id: 'app-build',
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
// note that the parent hash is based on parent source files only!
|
||||
assertFilesets(hash, {
|
||||
'npm:react': { contains: '17.0.0' },
|
||||
});
|
||||
expect(hash).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should hash missing dependent npm project versions', async () => {
|
||||
@ -977,7 +819,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
{} as any,
|
||||
{},
|
||||
createFileHasher()
|
||||
fileHasher
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask({
|
||||
@ -1042,11 +884,7 @@ describe('TaskHasher', () => {
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
assertFilesets(hash, {
|
||||
target: { contains: '@nx/webpack:webpack' },
|
||||
});
|
||||
|
||||
expect(hash.value).toContain('|16.0.0|');
|
||||
expect(hash).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should hash entire subtree of dependencies', async () => {
|
||||
@ -1150,14 +988,7 @@ describe('TaskHasher', () => {
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
assertFilesets(hash, {
|
||||
target: { contains: '@nx/webpack:webpack' },
|
||||
});
|
||||
|
||||
expect(hash.value).toContain('|$nx/webpack16$|');
|
||||
expect(hash.value).toContain('|$nx/devkit16$|');
|
||||
expect(hash.value).toContain('|$nx16$|');
|
||||
expect(hash.value).toContain('|5.0.0|');
|
||||
expect(hash).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should hash entire subtree in a deterministic way', async () => {
|
||||
@ -1353,8 +1184,7 @@ describe('TaskHasher', () => {
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
expect(hash.value).not.toContain('|16.0.0|');
|
||||
expect(hash.details.nodes['target']).toEqual('nx:run-commands');
|
||||
expect(hash.details.nodes['target']).toEqual('13019111166724682201');
|
||||
});
|
||||
|
||||
it('should use externalDependencies to override nx:run-commands', async () => {
|
||||
@ -1430,10 +1260,7 @@ describe('TaskHasher', () => {
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
expect(hash.value).not.toContain('|16.0.0|');
|
||||
expect(hash.value).toContain('|17.0.0|');
|
||||
expect(hash.value).toContain('|5.0.0|');
|
||||
expect(hash.details.nodes['target']).toEqual('nx:run-commands');
|
||||
expect(hash).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should use externalDependencies with empty array to ignore all deps', async () => {
|
||||
@ -1509,21 +1336,12 @@ describe('TaskHasher', () => {
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
expect(hash.details.nodes['npm:nx']).not.toBeDefined();
|
||||
expect(hash.details.nodes['app']).not.toBeDefined();
|
||||
expect(hash).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('dependentTasksOutputFiles', () => {
|
||||
it('should depend on dependent tasks output files', async () => {
|
||||
const distFolder = [
|
||||
['dist/libs/parent/filea.js', 'a.js.hash'],
|
||||
['dist/libs/parent/filea.d.ts', 'a.d.ts.hash'],
|
||||
['dist/libs/child/fileb.js', 'b.js.hash'],
|
||||
['dist/libs/child/fileb.d.ts', 'b.d.ts.hash'],
|
||||
['dist/libs/grandchild/filec.js', 'c.js.hash'],
|
||||
['dist/libs/grandchild/filec.d.ts', 'c.d.ts.hash'],
|
||||
];
|
||||
const hasher = new InProcessTaskHasher(
|
||||
{
|
||||
parent: [
|
||||
@ -1567,10 +1385,6 @@ describe('TaskHasher', () => {
|
||||
dependsOn: ['^build'],
|
||||
inputs: ['prod', 'deps'],
|
||||
executor: 'nx:run-commands',
|
||||
// options: {
|
||||
// outputPath: 'dist/{projectRoot}',
|
||||
// },
|
||||
// outputs: ['{options.outputPath}'],
|
||||
outputs: ['dist/{projectRoot}'],
|
||||
},
|
||||
},
|
||||
@ -1592,6 +1406,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
},
|
||||
},
|
||||
externalNodes: {},
|
||||
dependencies: {
|
||||
parent: [{ source: 'parent', target: 'child', type: 'static' }],
|
||||
child: [{ source: 'child', target: 'grandchild', type: 'static' }],
|
||||
@ -1641,39 +1456,153 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
} as any,
|
||||
{},
|
||||
{
|
||||
hashFilesMatchingGlobs: (path: string, globs: string[]) => {
|
||||
const hashes = [];
|
||||
for (const [file, hash] of distFolder) {
|
||||
if (!file.startsWith(path)) {
|
||||
continue;
|
||||
}
|
||||
for (const glob of globs) {
|
||||
if (file.endsWith(glob.split('**/*')[1])) {
|
||||
hashes.push(hash);
|
||||
}
|
||||
}
|
||||
}
|
||||
return hashes.join('|');
|
||||
},
|
||||
} as any
|
||||
fileHasher
|
||||
);
|
||||
|
||||
await tempFs.createFiles({
|
||||
'dist/libs/child/index.d.ts': '',
|
||||
'dist/libs/grandchild/index.d.ts': '',
|
||||
});
|
||||
|
||||
const hash = await hasher.hashTask({
|
||||
target: { project: 'parent', target: 'build' },
|
||||
id: 'parent-build',
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
expect(hash.value).not.toContain('a.d.ts.hash');
|
||||
expect(hash.value).not.toContain('js.hash');
|
||||
expect(hash.value).toContain('b.d.ts.hash');
|
||||
expect(hash.value).toContain('c.d.ts.hash');
|
||||
expect(hash).toMatchSnapshot();
|
||||
});
|
||||
|
||||
assertFilesets(hash, {
|
||||
'dist/libs/child/**/*.d.ts': { contains: 'b.d.ts.hash' },
|
||||
'dist/libs/grandchild/**/*.d.ts': { contains: 'c.d.ts.hash' },
|
||||
it('should work with dependent tasks with globs as outputs', async () => {
|
||||
const hasher = new InProcessTaskHasher(
|
||||
{
|
||||
parent: [
|
||||
{ file: 'libs/parent/filea.ts', hash: 'a.hash' },
|
||||
{ file: 'libs/parent/filea.spec.ts', hash: 'a.spec.hash' },
|
||||
],
|
||||
child: [
|
||||
{ file: 'libs/child/fileb.ts', hash: 'b.hash' },
|
||||
{ file: 'libs/child/fileb.spec.ts', hash: 'b.spec.hash' },
|
||||
],
|
||||
grandchild: [
|
||||
{ file: 'libs/grandchild/filec.ts', hash: 'c.hash' },
|
||||
{ file: 'libs/grandchild/filec.spec.ts', hash: 'c.spec.hash' },
|
||||
],
|
||||
},
|
||||
allWorkspaceFiles,
|
||||
{
|
||||
nodes: {
|
||||
parent: {
|
||||
name: 'parent',
|
||||
type: 'lib',
|
||||
data: {
|
||||
root: 'libs/parent',
|
||||
targets: {
|
||||
build: {
|
||||
dependsOn: ['^build'],
|
||||
inputs: ['prod', 'deps'],
|
||||
executor: 'nx:run-commands',
|
||||
outputs: ['dist/{projectRoot}'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
child: {
|
||||
name: 'child',
|
||||
type: 'lib',
|
||||
data: {
|
||||
root: 'libs/child',
|
||||
targets: {
|
||||
build: {
|
||||
dependsOn: ['^build'],
|
||||
inputs: ['prod', 'deps'],
|
||||
executor: 'nx:run-commands',
|
||||
outputs: ['dist/{projectRoot}/**/*'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
grandchild: {
|
||||
name: 'grandchild',
|
||||
type: 'lib',
|
||||
data: {
|
||||
root: 'libs/grandchild',
|
||||
targets: {
|
||||
build: {
|
||||
dependsOn: ['^build'],
|
||||
inputs: ['prod', 'deps'],
|
||||
executor: 'nx:run-commands',
|
||||
outputs: ['dist/{projectRoot}'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
externalNodes: {},
|
||||
dependencies: {
|
||||
parent: [{ source: 'parent', target: 'child', type: 'static' }],
|
||||
child: [{ source: 'child', target: 'grandchild', type: 'static' }],
|
||||
},
|
||||
},
|
||||
{
|
||||
roots: ['grandchild-build'],
|
||||
tasks: {
|
||||
'parent-build': {
|
||||
id: 'parent-build',
|
||||
target: { project: 'parent', target: 'build' },
|
||||
overrides: {},
|
||||
},
|
||||
'child-build': {
|
||||
id: 'child-build',
|
||||
target: { project: 'child', target: 'build' },
|
||||
overrides: {},
|
||||
},
|
||||
'grandchild-build': {
|
||||
id: 'grandchild-build',
|
||||
target: { project: 'grandchild', target: 'build' },
|
||||
overrides: {},
|
||||
},
|
||||
},
|
||||
dependencies: {
|
||||
'parent-build': ['child-build'],
|
||||
'child-build': ['grandchild-build'],
|
||||
},
|
||||
},
|
||||
{
|
||||
namedInputs: {
|
||||
prod: ['!{projectRoot}/**/*.spec.ts'],
|
||||
deps: [
|
||||
{ dependentTasksOutputFiles: '**/*.d.ts', transitive: true },
|
||||
],
|
||||
},
|
||||
targetDefaults: {
|
||||
build: {
|
||||
dependsOn: ['^build'],
|
||||
inputs: ['prod', 'deps'],
|
||||
executor: 'nx:run-commands',
|
||||
options: {
|
||||
outputPath: 'dist/libs/{projectRoot}',
|
||||
},
|
||||
outputs: ['{options.outputPath}'],
|
||||
},
|
||||
},
|
||||
} as any,
|
||||
{},
|
||||
fileHasher
|
||||
);
|
||||
|
||||
await tempFs.createFiles({
|
||||
'dist/libs/child/index.d.ts': '',
|
||||
'dist/libs/grandchild/index.d.ts': '',
|
||||
});
|
||||
|
||||
const hash = await hasher.hashTask({
|
||||
target: { project: 'parent', target: 'build' },
|
||||
id: 'parent-build',
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
expect(hash).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@ -16,8 +16,10 @@ import { createProjectRootMappings } from '../project-graph/utils/find-project-f
|
||||
import { findMatchingProjects } from '../utils/find-matching-projects';
|
||||
import { FileHasher, hashArray } from './file-hasher';
|
||||
import { getOutputsForTargetAndConfiguration } from '../tasks-runner/utils';
|
||||
import { join } from 'path';
|
||||
import { getHashEnv } from './set-hash-env';
|
||||
import { workspaceRoot } from '../utils/workspace-root';
|
||||
import { join, relative } from 'path';
|
||||
import { normalizePath } from '../utils/path';
|
||||
|
||||
type ExpandedSelfInput =
|
||||
| { fileset: string }
|
||||
@ -282,8 +284,8 @@ class TaskHasherImpl {
|
||||
projectGraphDeps,
|
||||
visited
|
||||
);
|
||||
const depsOut = this.hashDepsOutputs(task, depsOutputs);
|
||||
const projects = await this.hashProjectInputs(projectInputs, visited);
|
||||
const depsOut = await this.hashDepsOutputs(task, depsOutputs);
|
||||
const projects = await this.hashProjectInputs(projectInputs);
|
||||
|
||||
let details = {};
|
||||
for (const s of self) {
|
||||
@ -350,27 +352,31 @@ class TaskHasherImpl {
|
||||
.filter((r) => !!r);
|
||||
}
|
||||
|
||||
private hashDepsOutputs(
|
||||
private async hashDepsOutputs(
|
||||
task: Task,
|
||||
depsOutputs: ExpandedDepsOutput[]
|
||||
): PartialHash[] {
|
||||
): Promise<PartialHash[]> {
|
||||
if (depsOutputs.length === 0) {
|
||||
return [];
|
||||
}
|
||||
const result: PartialHash[] = [];
|
||||
for (const { dependentTasksOutputFiles, transitive } of depsOutputs) {
|
||||
result.push(
|
||||
...this.hashDepOuputs(task, dependentTasksOutputFiles, transitive)
|
||||
...(await this.hashDepOuputs(
|
||||
task,
|
||||
dependentTasksOutputFiles,
|
||||
transitive
|
||||
))
|
||||
);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private hashDepOuputs(
|
||||
private async hashDepOuputs(
|
||||
task: Task,
|
||||
dependentTasksOutputFiles: string,
|
||||
transitive?: boolean
|
||||
): PartialHash[] {
|
||||
): Promise<PartialHash[]> {
|
||||
// task has no dependencies
|
||||
if (!this.taskGraph.dependencies[task.id]) {
|
||||
return [];
|
||||
@ -379,29 +385,38 @@ class TaskHasherImpl {
|
||||
const partialHashes: PartialHash[] = [];
|
||||
for (const d of this.taskGraph.dependencies[task.id]) {
|
||||
const childTask = this.taskGraph.tasks[d];
|
||||
const outputDirs = getOutputsForTargetAndConfiguration(
|
||||
const outputs = getOutputsForTargetAndConfiguration(
|
||||
childTask,
|
||||
this.projectGraph.nodes[childTask.target.project]
|
||||
);
|
||||
const hashes = {};
|
||||
for (const outputDir of outputDirs) {
|
||||
hashes[join(outputDir, dependentTasksOutputFiles)] =
|
||||
this.fileHasher.hashFilesMatchingGlobs(outputDir, [
|
||||
dependentTasksOutputFiles,
|
||||
]);
|
||||
const { getFilesForOutputs } =
|
||||
require('../native') as typeof import('../native');
|
||||
const outputFiles = getFilesForOutputs(workspaceRoot, outputs);
|
||||
const filteredFiles = outputFiles.filter(
|
||||
(p) =>
|
||||
p === dependentTasksOutputFiles ||
|
||||
minimatch(p, dependentTasksOutputFiles)
|
||||
);
|
||||
const hashDetails = {};
|
||||
const hashes: string[] = [];
|
||||
for (const [file, hash] of await this.fileHasher.hashFiles(
|
||||
filteredFiles.map((p) => join(workspaceRoot, p))
|
||||
)) {
|
||||
hashes.push(hash);
|
||||
hashDetails[normalizePath(relative(workspaceRoot, file))] = hash;
|
||||
}
|
||||
|
||||
partialHashes.push({
|
||||
value: hashArray(Object.values(hashes)),
|
||||
details: hashes,
|
||||
value: hashArray(hashes),
|
||||
details: hashDetails,
|
||||
});
|
||||
if (transitive) {
|
||||
partialHashes.push(
|
||||
...this.hashDepOuputs(
|
||||
...(await this.hashDepOuputs(
|
||||
childTask,
|
||||
dependentTasksOutputFiles,
|
||||
transitive
|
||||
)
|
||||
))
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -521,7 +536,7 @@ class TaskHasherImpl {
|
||||
return {
|
||||
value: hash,
|
||||
details: {
|
||||
target: target.executor,
|
||||
target: hash,
|
||||
},
|
||||
};
|
||||
}
|
||||
@ -588,6 +603,8 @@ class TaskHasherImpl {
|
||||
const notFilesets = inputs.filter((r) => !r['fileset']);
|
||||
return Promise.all([
|
||||
this.hashProjectFileset(projectName, projectFilesets),
|
||||
this.hashProjectConfig(projectName),
|
||||
this.hashTsConfig(projectName),
|
||||
...[
|
||||
...workspaceFilesets,
|
||||
...this.legacyFilesetInputs.map((r) => r.fileset),
|
||||
@ -599,8 +616,7 @@ class TaskHasherImpl {
|
||||
}
|
||||
|
||||
private async hashProjectInputs(
|
||||
projectInputs: { input: string; projects: string[] }[],
|
||||
visited: string[]
|
||||
projectInputs: { input: string; projects: string[] }[]
|
||||
): Promise<PartialHash[]> {
|
||||
const partialHashes: Promise<PartialHash[]>[] = [];
|
||||
for (const input of projectInputs) {
|
||||
@ -653,6 +669,33 @@ class TaskHasherImpl {
|
||||
return this.filesetHashes[mapKey];
|
||||
}
|
||||
|
||||
private hashProjectConfig(projectName: string): PartialHash {
|
||||
const p = this.projectGraph.nodes[projectName];
|
||||
const projectConfig = hashArray([
|
||||
JSON.stringify({ ...p.data, files: undefined }),
|
||||
]);
|
||||
|
||||
return {
|
||||
value: projectConfig,
|
||||
details: {
|
||||
ProjectConfiguration: projectConfig,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
private hashTsConfig(projectName: string): PartialHash {
|
||||
const p = this.projectGraph.nodes[projectName];
|
||||
const tsConfig = hashArray([
|
||||
hashTsConfig(p, this.projectRootMappings, this.options),
|
||||
]);
|
||||
return {
|
||||
value: tsConfig,
|
||||
details: {
|
||||
TsConfig: tsConfig,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
private async hashProjectFileset(
|
||||
projectName: string,
|
||||
filesetPatterns: string[]
|
||||
@ -666,15 +709,12 @@ class TaskHasherImpl {
|
||||
this.projectFileMap[projectName] || [],
|
||||
filesetPatterns
|
||||
);
|
||||
const fileNames = filteredFiles.map((f) => f.file);
|
||||
const values = filteredFiles.map((f) => f.hash);
|
||||
const files: string[] = [];
|
||||
for (const { file, hash } of filteredFiles) {
|
||||
files.push(file, hash);
|
||||
}
|
||||
|
||||
const value = hashArray([
|
||||
...fileNames,
|
||||
...values,
|
||||
JSON.stringify({ ...p.data, files: undefined }),
|
||||
hashTsConfig(p, this.projectRootMappings, this.options),
|
||||
]);
|
||||
const value = hashArray(files);
|
||||
res({
|
||||
value,
|
||||
details: { [mapKey]: value },
|
||||
|
||||
65
packages/nx/src/native/cache/expand_outputs.rs
vendored
65
packages/nx/src/native/cache/expand_outputs.rs
vendored
@ -5,8 +5,8 @@ use crate::native::utils::path::Normalize;
|
||||
use crate::native::walker::nx_walker_sync;
|
||||
|
||||
#[napi]
|
||||
/// Expands the given entries into a list of existing files.
|
||||
/// First checks if the entry exists, if not, it will glob the working directory to find the file.
|
||||
/// Expands the given entries into a list of existing directories and files.
|
||||
/// This is used for copying outputs to and from the cache
|
||||
pub fn expand_outputs(directory: String, entries: Vec<String>) -> anyhow::Result<Vec<String>> {
|
||||
let directory: PathBuf = directory.into();
|
||||
|
||||
@ -33,6 +33,67 @@ pub fn expand_outputs(directory: String, entries: Vec<String>) -> anyhow::Result
|
||||
Ok(found_paths.collect())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
/// Expands the given outputs into a list of existing files.
|
||||
/// This is used when hashing outputs
|
||||
pub fn get_files_for_outputs(
|
||||
directory: String,
|
||||
entries: Vec<String>,
|
||||
) -> anyhow::Result<Vec<String>> {
|
||||
let directory: PathBuf = directory.into();
|
||||
|
||||
let mut globs: Vec<String> = vec![];
|
||||
let mut files: Vec<String> = vec![];
|
||||
let mut directories: Vec<String> = vec![];
|
||||
for entry in entries.into_iter() {
|
||||
let path = directory.join(&entry);
|
||||
|
||||
if !path.exists() {
|
||||
globs.push(entry);
|
||||
} else if path.is_dir() {
|
||||
directories.push(entry);
|
||||
} else {
|
||||
files.push(entry);
|
||||
}
|
||||
}
|
||||
|
||||
if !globs.is_empty() {
|
||||
let glob_set = build_glob_set(&globs)?;
|
||||
let found_paths = nx_walker_sync(&directory).filter_map(|path| {
|
||||
if glob_set.is_match(&path) {
|
||||
Some(path.to_normalized_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
files.extend(found_paths);
|
||||
}
|
||||
|
||||
if !directories.is_empty() {
|
||||
for dir in directories {
|
||||
let dir = PathBuf::from(dir);
|
||||
let dir_path = directory.join(&dir);
|
||||
let files_in_dir: Vec<String> = nx_walker_sync(&dir_path)
|
||||
.filter_map(|e| {
|
||||
let path = dir_path.join(&e);
|
||||
|
||||
if path.is_file() {
|
||||
Some(dir.join(e).to_normalized_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
files.extend(files_in_dir);
|
||||
}
|
||||
}
|
||||
|
||||
files.sort();
|
||||
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
@ -1,8 +1,6 @@
|
||||
use crate::native::types::FileData;
|
||||
use crate::native::utils::glob::build_glob_set;
|
||||
use crate::native::utils::path::Normalize;
|
||||
use crate::native::walker::nx_walker;
|
||||
use rayon::prelude::*;
|
||||
use std::collections::HashMap;
|
||||
use xxhash_rust::xxh3;
|
||||
|
||||
@ -38,38 +36,6 @@ pub fn hash_files(workspace_root: String) -> HashMap<String, String> {
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
fn hash_files_matching_globs(
|
||||
directory: String,
|
||||
glob_patterns: Vec<String>,
|
||||
) -> anyhow::Result<Option<String>> {
|
||||
let glob_set = build_glob_set(&glob_patterns)?;
|
||||
|
||||
let mut hashes = nx_walker(directory, move |receiver| {
|
||||
let mut collection: Vec<FileData> = Vec::new();
|
||||
for (path, content) in receiver {
|
||||
if glob_set.is_match(&path) {
|
||||
collection.push(FileData {
|
||||
file: path.to_normalized_string(),
|
||||
hash: xxh3::xxh3_64(&content).to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
collection
|
||||
});
|
||||
|
||||
if hashes.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Sort the file data so that its in deterministically ordered by file path
|
||||
hashes.par_sort();
|
||||
|
||||
let sorted_file_hashes: Vec<String> =
|
||||
hashes.into_iter().map(|file_data| file_data.hash).collect();
|
||||
Ok(Some(hash_array(sorted_file_hashes)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@ -108,23 +74,4 @@ mod tests {
|
||||
|
||||
assert_eq!(content.unwrap().hash, "6193209363630369380");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_hashes_files_matching_globs() -> anyhow::Result<()> {
|
||||
// handle empty workspaces
|
||||
let content =
|
||||
hash_files_matching_globs("/does/not/exist".into(), Vec::from([String::from("**/*")]))?;
|
||||
assert!(content.is_none());
|
||||
|
||||
let temp_dir = setup_fs();
|
||||
|
||||
let content = hash_files_matching_globs(
|
||||
temp_dir.display().to_string(),
|
||||
Vec::from([String::from("fo*.txt")]),
|
||||
)?;
|
||||
// println!("{:?}", content);
|
||||
assert_eq!(content.unwrap(), String::from("12742692716897613184"),);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
10
packages/nx/src/native/index.d.ts
vendored
10
packages/nx/src/native/index.d.ts
vendored
@ -4,16 +4,20 @@
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
/**
|
||||
* Expands the given entries into a list of existing files.
|
||||
* First checks if the entry exists, if not, it will glob the working directory to find the file.
|
||||
* Expands the given entries into a list of existing directories and files.
|
||||
* This is used for copying outputs to and from the cache
|
||||
*/
|
||||
export function expandOutputs(directory: string, entries: Array<string>): Array<string>
|
||||
/**
|
||||
* Expands the given outputs into a list of existing files.
|
||||
* This is used when hashing outputs
|
||||
*/
|
||||
export function getFilesForOutputs(directory: string, entries: Array<string>): Array<string>
|
||||
export function remove(src: string): void
|
||||
export function copy(src: string, dest: string): void
|
||||
export function hashArray(input: Array<string>): string
|
||||
export function hashFile(file: string): FileData | null
|
||||
export function hashFiles(workspaceRoot: string): Record<string, string>
|
||||
export function hashFilesMatchingGlobs(directory: string, globPatterns: Array<string>): string | null
|
||||
export function findImports(projectFileMap: Record<string, Array<string>>): Array<ImportResult>
|
||||
export interface FileData {
|
||||
file: string
|
||||
|
||||
@ -246,15 +246,15 @@ if (!nativeBinding) {
|
||||
throw new Error(`Failed to load native binding`)
|
||||
}
|
||||
|
||||
const { expandOutputs, remove, copy, hashArray, hashFile, hashFiles, hashFilesMatchingGlobs, ImportResult, findImports, EventType, Watcher, WorkspaceErrors, getProjectConfigurationFiles, getProjectConfigurations, getWorkspaceFilesNative } = nativeBinding
|
||||
const { expandOutputs, getFilesForOutputs, remove, copy, hashArray, hashFile, hashFiles, ImportResult, findImports, EventType, Watcher, WorkspaceErrors, getProjectConfigurationFiles, getProjectConfigurations, getWorkspaceFilesNative } = nativeBinding
|
||||
|
||||
module.exports.expandOutputs = expandOutputs
|
||||
module.exports.getFilesForOutputs = getFilesForOutputs
|
||||
module.exports.remove = remove
|
||||
module.exports.copy = copy
|
||||
module.exports.hashArray = hashArray
|
||||
module.exports.hashFile = hashFile
|
||||
module.exports.hashFiles = hashFiles
|
||||
module.exports.hashFilesMatchingGlobs = hashFilesMatchingGlobs
|
||||
module.exports.ImportResult = ImportResult
|
||||
module.exports.findImports = findImports
|
||||
module.exports.EventType = EventType
|
||||
|
||||
@ -155,6 +155,12 @@ mod test {
|
||||
assert_eq!(no_patterns, ["dist/**/*.js",]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_work_with_simple_globs() {
|
||||
let glob_set = build_glob_set(&["**/*"]).unwrap();
|
||||
assert!(glob_set.is_match("packages/nx/package.json"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_detect_package_json() {
|
||||
let glob_set = build_glob_set(&["packages/*/package.json"]).unwrap();
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { ProjectGraphProjectNode } from '../../../config/project-graph';
|
||||
import { readJsonFile } from '../../../utils/fileutils';
|
||||
import { getRootTsConfigFileName } from '../utils/typescript';
|
||||
import { getRootTsConfigPath } from '../utils/typescript';
|
||||
import {
|
||||
findProjectForPath,
|
||||
ProjectRootMappings,
|
||||
@ -16,7 +16,7 @@ interface TsconfigJsonConfiguration {
|
||||
|
||||
function readTsConfigJson(): TsconfigJsonConfiguration {
|
||||
try {
|
||||
const res = readJsonFile(getRootTsConfigFileName());
|
||||
const res = readJsonFile(getRootTsConfigPath());
|
||||
res.compilerOptions.paths ??= {};
|
||||
return res;
|
||||
} catch {
|
||||
|
||||
@ -6,10 +6,6 @@ import { fileExists } from './fileutils';
|
||||
*/
|
||||
export let workspaceRoot = workspaceRootInner(process.cwd(), process.cwd());
|
||||
|
||||
export function setWorkspaceRoot(root: string): void {
|
||||
workspaceRoot = root;
|
||||
}
|
||||
|
||||
export function workspaceRootInner(
|
||||
dir: string,
|
||||
candidateRoot: string | null
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user