feat(core): implement basic support for schematics
This commit is contained in:
parent
68a4a7eb8c
commit
428edb0c32
@ -48,6 +48,12 @@ Type: `string`
|
||||
|
||||
Current working directory of the command
|
||||
|
||||
### envFile
|
||||
|
||||
Type: `string`
|
||||
|
||||
Env files to be loaded before executing the commands
|
||||
|
||||
### name
|
||||
|
||||
Type: `string`
|
||||
|
||||
@ -48,6 +48,12 @@ Type: `string`
|
||||
|
||||
Current working directory of the command
|
||||
|
||||
### envFile
|
||||
|
||||
Type: `string`
|
||||
|
||||
Env files to be loaded before executing the commands
|
||||
|
||||
### name
|
||||
|
||||
Type: `string`
|
||||
|
||||
@ -48,6 +48,12 @@ Type: `string`
|
||||
|
||||
Current working directory of the command
|
||||
|
||||
### envFile
|
||||
|
||||
Type: `string`
|
||||
|
||||
Env files to be loaded before executing the commands
|
||||
|
||||
### name
|
||||
|
||||
Type: `string`
|
||||
|
||||
@ -22,19 +22,13 @@ forEachCli(() => {
|
||||
`apps/${nodeapp}/.custom.env`,
|
||||
'SHARED_VAR=shared-nested-value\nNESTED_ONLY=nested-only-value'
|
||||
);
|
||||
const config = readJson(workspaceConfigName());
|
||||
config.projects[nodeapp].architect.echoEnvVariables = {
|
||||
builder: '@nrwl/workspace:run-commands',
|
||||
options: {
|
||||
commands: [
|
||||
{
|
||||
command: `echo "$SHARED_VAR $ROOT_ONLY $NESTED_ONLY"`,
|
||||
},
|
||||
],
|
||||
envFile: `apps/${nodeapp}/.custom.env`,
|
||||
},
|
||||
};
|
||||
updateFile(workspaceConfigName(), JSON.stringify(config));
|
||||
|
||||
const command = `echo "$SHARED_VAR $ROOT_ONLY $NESTED_ONLY"`;
|
||||
const envFile = `apps/${nodeapp}/.custom.env`;
|
||||
runCLI(
|
||||
`generate @nrwl/workspace:run-commands echoEnvVariables --command='${command}' --envFile='${envFile}' --project=${nodeapp}`
|
||||
);
|
||||
|
||||
const result = runCLI('echoEnvVariables');
|
||||
expect(result).toContain('shared-root-value');
|
||||
expect(result).not.toContain('shared-nested-value');
|
||||
|
||||
3
nx.json
3
nx.json
@ -34,6 +34,9 @@
|
||||
"tao": {
|
||||
"tags": []
|
||||
},
|
||||
"devkit": {
|
||||
"tags": []
|
||||
},
|
||||
"workspace": {
|
||||
"tags": [],
|
||||
"implicitDependencies": ["tao", "cli"]
|
||||
|
||||
5
packages/devkit/.eslintrc.json
Normal file
5
packages/devkit/.eslintrc.json
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"extends": "../../.eslintrc",
|
||||
"rules": {},
|
||||
"ignorePatterns": ["!**/*"]
|
||||
}
|
||||
13
packages/devkit/README.md
Normal file
13
packages/devkit/README.md
Normal file
@ -0,0 +1,13 @@
|
||||
<p align="center"><img src="https://raw.githubusercontent.com/nrwl/nx/master/images/nx.png" width="600"></p>
|
||||
|
||||
{{links}}
|
||||
|
||||
<hr>
|
||||
|
||||
# Nx Devkit
|
||||
|
||||
{{what-is-nx}}
|
||||
|
||||
{{getting-started}}
|
||||
|
||||
{{resources}}
|
||||
1
packages/devkit/index.ts
Normal file
1
packages/devkit/index.ts
Normal file
@ -0,0 +1 @@
|
||||
export { Tree, FileChange } from '@nrwl/tao/src/shared/tree';
|
||||
9
packages/devkit/jest.config.js
Normal file
9
packages/devkit/jest.config.js
Normal file
@ -0,0 +1,9 @@
|
||||
module.exports = {
|
||||
preset: '../../jest.preset.js',
|
||||
transform: {
|
||||
'^.+\\.[tj]sx?$': 'ts-jest',
|
||||
},
|
||||
moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'html'],
|
||||
globals: { 'ts-jest': { tsConfig: '<rootDir>/tsconfig.spec.json' } },
|
||||
displayName: 'cli',
|
||||
};
|
||||
31
packages/devkit/package.json
Normal file
31
packages/devkit/package.json
Normal file
@ -0,0 +1,31 @@
|
||||
{
|
||||
"name": "@nrwl/devkit",
|
||||
"version": "0.0.1",
|
||||
"description": "Extensible Dev Tools for Monorepos",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/nrwl/nx.git"
|
||||
},
|
||||
"keywords": [
|
||||
"Monorepo",
|
||||
"Angular",
|
||||
"React",
|
||||
"Web",
|
||||
"Node",
|
||||
"Nest",
|
||||
"Jest",
|
||||
"Cypress",
|
||||
"CLI"
|
||||
],
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"author": "Victor Savkin",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/nrwl/nx/issues"
|
||||
},
|
||||
"homepage": "https://nx.dev",
|
||||
"dependencies": {
|
||||
"@nrwl/tao": "*"
|
||||
}
|
||||
}
|
||||
6
packages/devkit/src/create-tree-with-empty-workspace.ts
Normal file
6
packages/devkit/src/create-tree-with-empty-workspace.ts
Normal file
@ -0,0 +1,6 @@
|
||||
import { FsTree } from '@nrwl/tao/src/shared/tree';
|
||||
|
||||
export function createTreeWithEmptyWorkspace() {
|
||||
const tree = new FsTree(null, false, console);
|
||||
return tree;
|
||||
}
|
||||
1
packages/devkit/testing.ts
Normal file
1
packages/devkit/testing.ts
Normal file
@ -0,0 +1 @@
|
||||
export * from './src/create-tree-with-empty-workspace';
|
||||
16
packages/devkit/tsconfig.json
Normal file
16
packages/devkit/tsconfig.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"types": ["node", "jest"]
|
||||
},
|
||||
"include": [],
|
||||
"files": [],
|
||||
"references": [
|
||||
{
|
||||
"path": "./tsconfig.lib.json"
|
||||
},
|
||||
{
|
||||
"path": "./tsconfig.spec.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
11
packages/devkit/tsconfig.lib.json
Normal file
11
packages/devkit/tsconfig.lib.json
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"outDir": "../../dist/out-tsc",
|
||||
"declaration": true,
|
||||
"types": ["node"]
|
||||
},
|
||||
"exclude": ["**/*.spec.ts", "**/*_spec.ts"],
|
||||
"include": ["**/*.ts"]
|
||||
}
|
||||
16
packages/devkit/tsconfig.spec.json
Normal file
16
packages/devkit/tsconfig.spec.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "../../dist/out-tsc",
|
||||
"module": "commonjs",
|
||||
"types": ["jest", "node"]
|
||||
},
|
||||
"include": [
|
||||
"**/*.spec.ts",
|
||||
"**/*_spec.ts",
|
||||
"**/*.spec.tsx",
|
||||
"**/*.spec.js",
|
||||
"**/*.spec.jsx",
|
||||
"**/*.d.ts"
|
||||
]
|
||||
}
|
||||
@ -1,44 +1,22 @@
|
||||
import {
|
||||
experimental,
|
||||
JsonObject,
|
||||
logging,
|
||||
normalize,
|
||||
Path,
|
||||
schema,
|
||||
tags,
|
||||
terminal,
|
||||
virtualFs,
|
||||
} from '@angular-devkit/core';
|
||||
import { NodeJsSyncHost } from '@angular-devkit/core/node';
|
||||
import {
|
||||
DryRunEvent,
|
||||
formats,
|
||||
HostTree,
|
||||
Schematic,
|
||||
} from '@angular-devkit/schematics';
|
||||
import {
|
||||
FileSystemCollectionDescription,
|
||||
FileSystemSchematicDescription,
|
||||
NodeWorkflow,
|
||||
validateOptionsWithSchema,
|
||||
} from '@angular-devkit/schematics/tools';
|
||||
import * as fs from 'fs';
|
||||
import * as inquirer from 'inquirer';
|
||||
import * as minimist from 'minimist';
|
||||
import { detectPackageManager } from '../shared/detect-package-manager';
|
||||
import { getLogger } from '../shared/logger';
|
||||
import {
|
||||
coerceTypes,
|
||||
convertAliases,
|
||||
combineOptionsForSchematic,
|
||||
convertToCamelCase,
|
||||
handleErrors,
|
||||
lookupUnmatched,
|
||||
Options,
|
||||
Schema,
|
||||
} from '../shared/params';
|
||||
import { commandName, printHelp } from '../shared/print-help';
|
||||
import { WorkspaceDefinition, Workspaces } from '../shared/workspace';
|
||||
import { statSync, unlinkSync, writeFileSync } from 'fs';
|
||||
import { mkdirpSync, rmdirSync } from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import { FileChange, FsTree } from '../shared/tree';
|
||||
|
||||
interface GenerateOptions {
|
||||
const chalk = require('chalk');
|
||||
|
||||
export interface GenerateOptions {
|
||||
collectionName: string;
|
||||
schematicName: string;
|
||||
schematicOptions: Options;
|
||||
@ -125,156 +103,10 @@ function parseGenerateOpts(
|
||||
return res;
|
||||
}
|
||||
|
||||
function normalizeOptions(opts: Options, schema: Schema): Options {
|
||||
return lookupUnmatched(
|
||||
convertAliases(coerceTypes(opts, schema), schema, true),
|
||||
schema
|
||||
);
|
||||
}
|
||||
|
||||
function createRecorder(
|
||||
record: {
|
||||
loggingQueue: string[];
|
||||
error: boolean;
|
||||
},
|
||||
logger: logging.Logger
|
||||
) {
|
||||
return (event: DryRunEvent) => {
|
||||
const eventPath = event.path.startsWith('/')
|
||||
? event.path.substr(1)
|
||||
: event.path;
|
||||
if (event.kind === 'error') {
|
||||
record.error = true;
|
||||
logger.warn(
|
||||
`ERROR! ${eventPath} ${
|
||||
event.description == 'alreadyExist'
|
||||
? 'already exists'
|
||||
: 'does not exist.'
|
||||
}.`
|
||||
);
|
||||
} else if (event.kind === 'update') {
|
||||
record.loggingQueue.push(
|
||||
tags.oneLine`${terminal.white('UPDATE')} ${eventPath} (${
|
||||
event.content.length
|
||||
} bytes)`
|
||||
);
|
||||
} else if (event.kind === 'create') {
|
||||
record.loggingQueue.push(
|
||||
tags.oneLine`${terminal.green('CREATE')} ${eventPath} (${
|
||||
event.content.length
|
||||
} bytes)`
|
||||
);
|
||||
} else if (event.kind === 'delete') {
|
||||
record.loggingQueue.push(`${terminal.yellow('DELETE')} ${eventPath}`);
|
||||
} else if (event.kind === 'rename') {
|
||||
record.loggingQueue.push(
|
||||
`${terminal.blue('RENAME')} ${eventPath} => ${event.to}`
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function isTTY(): boolean {
|
||||
return !!process.stdout.isTTY && process.env['CI'] !== 'true';
|
||||
}
|
||||
|
||||
async function createWorkflow(
|
||||
fsHost: virtualFs.Host<fs.Stats>,
|
||||
root: string,
|
||||
opts: GenerateOptions
|
||||
) {
|
||||
const workflow = new NodeWorkflow(fsHost, {
|
||||
force: opts.force,
|
||||
dryRun: opts.dryRun,
|
||||
packageManager: detectPackageManager(),
|
||||
root: normalize(root),
|
||||
registry: new schema.CoreSchemaRegistry(formats.standardFormats),
|
||||
resolvePaths: [process.cwd(), root],
|
||||
});
|
||||
const _params = opts.schematicOptions._;
|
||||
delete opts.schematicOptions._;
|
||||
workflow.registry.addSmartDefaultProvider('argv', (schema: JsonObject) => {
|
||||
if ('index' in schema) {
|
||||
return _params[Number(schema['index'])];
|
||||
} else {
|
||||
return _params;
|
||||
}
|
||||
});
|
||||
|
||||
if (opts.defaults) {
|
||||
workflow.registry.addPreTransform(schema.transforms.addUndefinedDefaults);
|
||||
} else {
|
||||
workflow.registry.addPostTransform(schema.transforms.addUndefinedDefaults);
|
||||
}
|
||||
|
||||
workflow.engineHost.registerOptionsTransform(
|
||||
validateOptionsWithSchema(workflow.registry)
|
||||
);
|
||||
|
||||
if (opts.interactive !== false && isTTY()) {
|
||||
workflow.registry.usePromptProvider(
|
||||
(definitions: schema.PromptDefinition[]) => {
|
||||
const questions: inquirer.QuestionCollection = definitions.map(
|
||||
(definition) => {
|
||||
const question = {
|
||||
name: definition.id,
|
||||
message: definition.message,
|
||||
default: definition.default as
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| string[],
|
||||
} as inquirer.Question;
|
||||
|
||||
const validator = definition.validator;
|
||||
if (validator) {
|
||||
question.validate = (input) => validator(input);
|
||||
}
|
||||
|
||||
switch (definition.type) {
|
||||
case 'confirmation':
|
||||
question.type = 'confirm';
|
||||
break;
|
||||
case 'list':
|
||||
question.type = definition.multiselect ? 'checkbox' : 'list';
|
||||
question.choices =
|
||||
definition.items &&
|
||||
definition.items.map((item) => {
|
||||
if (typeof item == 'string') {
|
||||
return item;
|
||||
} else {
|
||||
return {
|
||||
name: item.label,
|
||||
value: item.value,
|
||||
};
|
||||
}
|
||||
});
|
||||
break;
|
||||
default:
|
||||
question.type = definition.type;
|
||||
break;
|
||||
}
|
||||
return question;
|
||||
}
|
||||
);
|
||||
|
||||
return inquirer.prompt(questions);
|
||||
}
|
||||
);
|
||||
}
|
||||
return workflow;
|
||||
}
|
||||
|
||||
function getCollection(workflow: NodeWorkflow, name: string) {
|
||||
const collection = workflow.engine.createCollection(name);
|
||||
if (!collection) throw new Error(`Cannot find collection '${name}'`);
|
||||
return collection;
|
||||
}
|
||||
|
||||
function printGenHelp(
|
||||
export function printGenHelp(
|
||||
opts: GenerateOptions,
|
||||
schema: Schema,
|
||||
logger: logging.Logger
|
||||
logger: Console
|
||||
) {
|
||||
printHelp(
|
||||
`${commandName} generate ${opts.collectionName}:${opts.schematicName}`,
|
||||
@ -289,141 +121,52 @@ function printGenHelp(
|
||||
},
|
||||
},
|
||||
},
|
||||
logger
|
||||
logger as any
|
||||
);
|
||||
}
|
||||
|
||||
async function getSchematicDefaults(
|
||||
root: string,
|
||||
collection: string,
|
||||
schematic: string
|
||||
) {
|
||||
const workspace = await new experimental.workspace.Workspace(
|
||||
normalize(root) as Path,
|
||||
new NodeJsSyncHost()
|
||||
)
|
||||
.loadWorkspaceFromHost('workspace.json' as Path)
|
||||
.toPromise();
|
||||
function readDefaultCollection(workspace: WorkspaceDefinition) {
|
||||
return workspace.cli ? workspace.cli.defaultCollection : null;
|
||||
}
|
||||
|
||||
let result = {};
|
||||
if (workspace.getSchematics()) {
|
||||
const schematicObject = workspace.getSchematics()[
|
||||
`${collection}:${schematic}`
|
||||
];
|
||||
if (schematicObject) {
|
||||
result = { ...result, ...(schematicObject as {}) };
|
||||
export function flushChanges(root: string, fileChanges: FileChange[]) {
|
||||
fileChanges.forEach((f) => {
|
||||
const fpath = path.join(root, f.path);
|
||||
if (f.type === 'CREATE') {
|
||||
mkdirpSync(path.dirname(fpath));
|
||||
writeFileSync(fpath, f.content);
|
||||
} else if (f.type === 'UPDATE') {
|
||||
writeFileSync(fpath, f.content);
|
||||
} else if (f.type === 'DELETE') {
|
||||
try {
|
||||
const stat = statSync(fpath);
|
||||
if (stat.isDirectory()) {
|
||||
rmdirSync(fpath, { recursive: true });
|
||||
} else {
|
||||
unlinkSync(fpath);
|
||||
}
|
||||
} catch (e) {}
|
||||
}
|
||||
const collectionObject = workspace.getSchematics()[collection];
|
||||
if (
|
||||
typeof collectionObject == 'object' &&
|
||||
!Array.isArray(collectionObject)
|
||||
) {
|
||||
result = { ...result, ...(collectionObject[schematic] as {}) };
|
||||
});
|
||||
}
|
||||
|
||||
function printChanges(fileChanges: FileChange[]) {
|
||||
fileChanges.forEach((f) => {
|
||||
if (f.type === 'CREATE') {
|
||||
console.log(`${chalk.green('CREATE')} ${f.path}`);
|
||||
} else if (f.type === 'UPDATE') {
|
||||
console.log(`${chalk.white('UPDATE')} ${f.path}`);
|
||||
} else if (f.type === 'DELETE') {
|
||||
console.log(`${chalk.yellow('DELETE')} ${f.path}`);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async function runSchematic(
|
||||
root: string,
|
||||
workflow: NodeWorkflow,
|
||||
logger: logging.Logger,
|
||||
opts: GenerateOptions,
|
||||
schematic: Schematic<
|
||||
FileSystemCollectionDescription,
|
||||
FileSystemSchematicDescription
|
||||
>,
|
||||
allowAdditionalArgs = false
|
||||
): Promise<number> {
|
||||
const flattenedSchema = (await workflow.registry
|
||||
.flatten(schematic.description.schemaJson)
|
||||
.toPromise()) as Schema;
|
||||
|
||||
if (opts.help) {
|
||||
printGenHelp(opts, flattenedSchema as Schema, logger);
|
||||
return 0;
|
||||
}
|
||||
|
||||
const defaults =
|
||||
opts.schematicName === 'tao-new' || opts.schematicName === 'ng-new'
|
||||
? {}
|
||||
: await getSchematicDefaults(
|
||||
root,
|
||||
opts.collectionName,
|
||||
opts.schematicName
|
||||
);
|
||||
const record = { loggingQueue: [] as string[], error: false };
|
||||
workflow.reporter.subscribe(createRecorder(record, logger));
|
||||
|
||||
const schematicOptions = normalizeOptions(
|
||||
opts.schematicOptions,
|
||||
flattenedSchema
|
||||
);
|
||||
|
||||
if (schematicOptions['--'] && !allowAdditionalArgs) {
|
||||
schematicOptions['--'].forEach((unmatched) => {
|
||||
const message =
|
||||
`Could not match option '${unmatched.name}' to the ${opts.collectionName}:${opts.schematicName} schema.` +
|
||||
(unmatched.possible.length > 0
|
||||
? ` Possible matches : ${unmatched.possible.join()}`
|
||||
: '');
|
||||
logger.fatal(message);
|
||||
});
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
await workflow
|
||||
.execute({
|
||||
collection: opts.collectionName,
|
||||
schematic: opts.schematicName,
|
||||
options: { ...defaults, ...schematicOptions },
|
||||
debug: opts.debug,
|
||||
logger,
|
||||
})
|
||||
.toPromise();
|
||||
|
||||
if (!record.error) {
|
||||
record.loggingQueue.forEach((log) => logger.info(log));
|
||||
}
|
||||
|
||||
if (opts.dryRun) {
|
||||
logger.warn(`\nNOTE: The "dryRun" flag means no changes were made.`);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
async function readDefaultCollection(host: virtualFs.Host<fs.Stats>) {
|
||||
const workspaceJson = JSON.parse(
|
||||
new HostTree(host).read('workspace.json').toString()
|
||||
);
|
||||
return workspaceJson.cli ? workspaceJson.cli.defaultCollection : null;
|
||||
});
|
||||
}
|
||||
|
||||
export async function taoNew(root: string, args: string[], isVerbose = false) {
|
||||
const logger = getLogger(isVerbose);
|
||||
|
||||
return handleErrors(logger, isVerbose, async () => {
|
||||
const fsHost = new virtualFs.ScopedHost(
|
||||
new NodeJsSyncHost(),
|
||||
normalize(root)
|
||||
);
|
||||
const opts = parseGenerateOpts(args, 'new', null);
|
||||
const workflow = await createWorkflow(fsHost, root, opts);
|
||||
const collection = getCollection(workflow, opts.collectionName);
|
||||
const schematic = collection.createSchematic(
|
||||
opts.schematicOptions.cli === 'ng' ? 'ng-new' : 'tao-new',
|
||||
true
|
||||
);
|
||||
const allowAdditionalArgs = true; // we can't yet know the schema to validate against
|
||||
return runSchematic(
|
||||
root,
|
||||
workflow,
|
||||
logger,
|
||||
{ ...opts, schematicName: schematic.description.name },
|
||||
schematic,
|
||||
allowAdditionalArgs
|
||||
);
|
||||
return (await import('./ngcli-adapter')).invokeNew(logger, root, opts);
|
||||
});
|
||||
}
|
||||
|
||||
@ -433,27 +176,47 @@ export async function generate(
|
||||
isVerbose = false
|
||||
) {
|
||||
const logger = getLogger(isVerbose);
|
||||
const ws = new Workspaces();
|
||||
|
||||
return handleErrors(logger, isVerbose, async () => {
|
||||
const fsHost = new virtualFs.ScopedHost(
|
||||
new NodeJsSyncHost(),
|
||||
normalize(root)
|
||||
);
|
||||
const workspaceDefinition = await ws.readWorkspaceConfiguration(root);
|
||||
const opts = parseGenerateOpts(
|
||||
args,
|
||||
'generate',
|
||||
await readDefaultCollection(fsHost)
|
||||
readDefaultCollection(workspaceDefinition)
|
||||
);
|
||||
|
||||
const workflow = await createWorkflow(fsHost, root, opts);
|
||||
const collection = getCollection(workflow, opts.collectionName);
|
||||
const schematic = collection.createSchematic(opts.schematicName, true);
|
||||
return runSchematic(
|
||||
root,
|
||||
workflow,
|
||||
logger,
|
||||
{ ...opts, schematicName: schematic.description.name },
|
||||
schematic
|
||||
);
|
||||
if (ws.isNxSchematic(opts.collectionName, opts.schematicName)) {
|
||||
const { schema, implementation } = ws.readSchematic(
|
||||
opts.collectionName,
|
||||
opts.schematicName
|
||||
);
|
||||
|
||||
if (opts.help) {
|
||||
printGenHelp(opts, schema, logger as any);
|
||||
return 0;
|
||||
}
|
||||
|
||||
const combinedOpts = await combineOptionsForSchematic(
|
||||
opts.schematicOptions,
|
||||
opts.collectionName,
|
||||
opts.schematicName,
|
||||
workspaceDefinition,
|
||||
schema,
|
||||
opts.interactive
|
||||
);
|
||||
const host = new FsTree(root, isVerbose, logger);
|
||||
await implementation(combinedOpts)(host);
|
||||
const changes = host.listChanges();
|
||||
|
||||
printChanges(changes);
|
||||
if (!opts.dryRun) {
|
||||
flushChanges(root, changes);
|
||||
} else {
|
||||
logger.warn(`\nNOTE: The "dryRun" flag means no changes were made.`);
|
||||
}
|
||||
} else {
|
||||
return (await import('./ngcli-adapter')).generate(logger, root, opts);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@ -1,12 +1,40 @@
|
||||
import { Architect } from '@angular-devkit/architect';
|
||||
import { WorkspaceNodeModulesArchitectHost } from '@angular-devkit/architect/node';
|
||||
import { json, JsonObject, schema, workspaces } from '@angular-devkit/core';
|
||||
import {
|
||||
experimental,
|
||||
json,
|
||||
JsonObject,
|
||||
logging,
|
||||
normalize,
|
||||
Path,
|
||||
schema,
|
||||
tags,
|
||||
terminal,
|
||||
virtualFs,
|
||||
workspaces,
|
||||
} from '@angular-devkit/core';
|
||||
import { NodeJsSyncHost } from '@angular-devkit/core/node';
|
||||
import { coerceTypes, convertAliases, Options, Schema } from '../shared/params';
|
||||
import { printRunHelp, RunOptions } from '@nrwl/tao/src/commands/run';
|
||||
import {
|
||||
coerceTypesInOptions,
|
||||
convertAliases,
|
||||
Options,
|
||||
Schema,
|
||||
} from '../shared/params';
|
||||
import { printRunHelp, RunOptions } from './run';
|
||||
import {
|
||||
FileSystemCollectionDescription,
|
||||
FileSystemSchematicDescription,
|
||||
NodeWorkflow,
|
||||
validateOptionsWithSchema,
|
||||
} from '@angular-devkit/schematics/tools';
|
||||
import { DryRunEvent, formats, Schematic } from '@angular-devkit/schematics';
|
||||
import * as fs from 'fs';
|
||||
import * as inquirer from 'inquirer';
|
||||
import { detectPackageManager } from '../shared/detect-package-manager';
|
||||
import { GenerateOptions, printGenHelp } from './generate';
|
||||
|
||||
function normalizeOptions(opts: Options, schema: Schema): Options {
|
||||
return convertAliases(coerceTypes(opts, schema), schema, false);
|
||||
return convertAliases(coerceTypesInOptions(opts, schema), schema, false);
|
||||
}
|
||||
|
||||
export async function run(logger: any, root: string, opts: RunOptions) {
|
||||
@ -52,3 +80,289 @@ export async function run(logger: any, root: string, opts: RunOptions) {
|
||||
await run.stop();
|
||||
return result.success ? 0 : 1;
|
||||
}
|
||||
|
||||
async function createWorkflow(
|
||||
fsHost: virtualFs.Host<fs.Stats>,
|
||||
root: string,
|
||||
opts: any
|
||||
) {
|
||||
const workflow = new NodeWorkflow(fsHost, {
|
||||
force: opts.force,
|
||||
dryRun: opts.dryRun,
|
||||
packageManager: detectPackageManager(),
|
||||
root: normalize(root),
|
||||
registry: new schema.CoreSchemaRegistry(formats.standardFormats),
|
||||
resolvePaths: [process.cwd(), root],
|
||||
});
|
||||
const _params = opts.schematicOptions._;
|
||||
delete opts.schematicOptions._;
|
||||
workflow.registry.addSmartDefaultProvider('argv', (schema: JsonObject) => {
|
||||
if ('index' in schema) {
|
||||
return _params[Number(schema['index'])];
|
||||
} else {
|
||||
return _params;
|
||||
}
|
||||
});
|
||||
|
||||
if (opts.defaults) {
|
||||
workflow.registry.addPreTransform(schema.transforms.addUndefinedDefaults);
|
||||
} else {
|
||||
workflow.registry.addPostTransform(schema.transforms.addUndefinedDefaults);
|
||||
}
|
||||
|
||||
workflow.engineHost.registerOptionsTransform(
|
||||
validateOptionsWithSchema(workflow.registry)
|
||||
);
|
||||
|
||||
if (opts.interactive !== false && isTTY()) {
|
||||
workflow.registry.usePromptProvider(
|
||||
(definitions: schema.PromptDefinition[]) => {
|
||||
const questions: inquirer.QuestionCollection = definitions.map(
|
||||
(definition) => {
|
||||
const question = {
|
||||
name: definition.id,
|
||||
message: definition.message,
|
||||
default: definition.default as
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| string[],
|
||||
} as inquirer.Question;
|
||||
|
||||
const validator = definition.validator;
|
||||
if (validator) {
|
||||
question.validate = (input) => validator(input);
|
||||
}
|
||||
|
||||
switch (definition.type) {
|
||||
case 'confirmation':
|
||||
question.type = 'confirm';
|
||||
break;
|
||||
case 'list':
|
||||
question.type = definition.multiselect ? 'checkbox' : 'list';
|
||||
question.choices =
|
||||
definition.items &&
|
||||
definition.items.map((item) => {
|
||||
if (typeof item == 'string') {
|
||||
return item;
|
||||
} else {
|
||||
return {
|
||||
name: item.label,
|
||||
value: item.value,
|
||||
};
|
||||
}
|
||||
});
|
||||
break;
|
||||
default:
|
||||
question.type = definition.type;
|
||||
break;
|
||||
}
|
||||
return question;
|
||||
}
|
||||
);
|
||||
|
||||
return inquirer.prompt(questions);
|
||||
}
|
||||
);
|
||||
}
|
||||
return workflow;
|
||||
}
|
||||
|
||||
function getCollection(workflow: any, name: string) {
|
||||
const collection = workflow.engine.createCollection(name);
|
||||
if (!collection) throw new Error(`Cannot find collection '${name}'`);
|
||||
return collection;
|
||||
}
|
||||
|
||||
function createRecorder(
|
||||
record: {
|
||||
loggingQueue: string[];
|
||||
error: boolean;
|
||||
},
|
||||
logger: logging.Logger
|
||||
) {
|
||||
return (event: DryRunEvent) => {
|
||||
const eventPath = event.path.startsWith('/')
|
||||
? event.path.substr(1)
|
||||
: event.path;
|
||||
if (event.kind === 'error') {
|
||||
record.error = true;
|
||||
logger.warn(
|
||||
`ERROR! ${eventPath} ${
|
||||
event.description == 'alreadyExist'
|
||||
? 'already exists'
|
||||
: 'does not exist.'
|
||||
}.`
|
||||
);
|
||||
} else if (event.kind === 'update') {
|
||||
record.loggingQueue.push(
|
||||
tags.oneLine`${terminal.white('UPDATE')} ${eventPath} (${
|
||||
event.content.length
|
||||
} bytes)`
|
||||
);
|
||||
} else if (event.kind === 'create') {
|
||||
record.loggingQueue.push(
|
||||
tags.oneLine`${terminal.green('CREATE')} ${eventPath} (${
|
||||
event.content.length
|
||||
} bytes)`
|
||||
);
|
||||
} else if (event.kind === 'delete') {
|
||||
record.loggingQueue.push(`${terminal.yellow('DELETE')} ${eventPath}`);
|
||||
} else if (event.kind === 'rename') {
|
||||
record.loggingQueue.push(
|
||||
`${terminal.blue('RENAME')} ${eventPath} => ${event.to}`
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function getSchematicDefaults(
|
||||
root: string,
|
||||
collection: string,
|
||||
schematic: string
|
||||
) {
|
||||
const workspace = await new experimental.workspace.Workspace(
|
||||
normalize(root) as Path,
|
||||
new NodeJsSyncHost()
|
||||
)
|
||||
.loadWorkspaceFromHost('workspace.json' as Path)
|
||||
.toPromise();
|
||||
|
||||
let result = {};
|
||||
if (workspace.getSchematics()) {
|
||||
const schematicObject = workspace.getSchematics()[
|
||||
`${collection}:${schematic}`
|
||||
];
|
||||
if (schematicObject) {
|
||||
result = { ...result, ...(schematicObject as {}) };
|
||||
}
|
||||
const collectionObject = workspace.getSchematics()[collection];
|
||||
if (
|
||||
typeof collectionObject == 'object' &&
|
||||
!Array.isArray(collectionObject)
|
||||
) {
|
||||
result = { ...result, ...(collectionObject[schematic] as {}) };
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async function runSchematic(
|
||||
root: string,
|
||||
workflow: NodeWorkflow,
|
||||
logger: logging.Logger,
|
||||
opts: GenerateOptions,
|
||||
schematic: Schematic<
|
||||
FileSystemCollectionDescription,
|
||||
FileSystemSchematicDescription
|
||||
>,
|
||||
allowAdditionalArgs = false
|
||||
): Promise<number> {
|
||||
const flattenedSchema = (await workflow.registry
|
||||
.flatten(schematic.description.schemaJson)
|
||||
.toPromise()) as Schema;
|
||||
|
||||
if (opts.help) {
|
||||
printGenHelp(opts, flattenedSchema as Schema, logger as any);
|
||||
return 0;
|
||||
}
|
||||
|
||||
const defaults =
|
||||
opts.schematicName === 'tao-new' || opts.schematicName === 'ng-new'
|
||||
? {}
|
||||
: await getSchematicDefaults(
|
||||
root,
|
||||
opts.collectionName,
|
||||
opts.schematicName
|
||||
);
|
||||
const record = { loggingQueue: [] as string[], error: false };
|
||||
workflow.reporter.subscribe(createRecorder(record, logger));
|
||||
|
||||
const schematicOptions = normalizeOptions(
|
||||
opts.schematicOptions,
|
||||
flattenedSchema
|
||||
);
|
||||
|
||||
if (schematicOptions['--'] && !allowAdditionalArgs) {
|
||||
schematicOptions['--'].forEach((unmatched) => {
|
||||
const message =
|
||||
`Could not match option '${unmatched.name}' to the ${opts.collectionName}:${opts.schematicName} schema.` +
|
||||
(unmatched.possible.length > 0
|
||||
? ` Possible matches : ${unmatched.possible.join()}`
|
||||
: '');
|
||||
logger.fatal(message);
|
||||
});
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
await workflow
|
||||
.execute({
|
||||
collection: opts.collectionName,
|
||||
schematic: opts.schematicName,
|
||||
options: { ...defaults, ...schematicOptions },
|
||||
debug: opts.debug,
|
||||
logger,
|
||||
})
|
||||
.toPromise();
|
||||
|
||||
if (!record.error) {
|
||||
record.loggingQueue.forEach((log) => logger.info(log));
|
||||
}
|
||||
|
||||
if (opts.dryRun) {
|
||||
logger.warn(`\nNOTE: The "dryRun" flag means no changes were made.`);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
function isTTY(): boolean {
|
||||
return !!process.stdout.isTTY && process.env['CI'] !== 'true';
|
||||
}
|
||||
|
||||
export async function generate(
|
||||
logger: logging.Logger,
|
||||
root: string,
|
||||
opts: GenerateOptions
|
||||
) {
|
||||
const fsHost = new virtualFs.ScopedHost(
|
||||
new NodeJsSyncHost(),
|
||||
normalize(root)
|
||||
);
|
||||
const workflow = await createWorkflow(fsHost, root, opts);
|
||||
const collection = getCollection(workflow, opts.collectionName);
|
||||
const schematic = collection.createSchematic(opts.schematicName, true);
|
||||
return runSchematic(
|
||||
root,
|
||||
workflow,
|
||||
logger,
|
||||
{ ...opts, schematicName: schematic.description.name },
|
||||
schematic
|
||||
);
|
||||
}
|
||||
|
||||
export async function invokeNew(
|
||||
logger: logging.Logger,
|
||||
root: string,
|
||||
opts: GenerateOptions
|
||||
) {
|
||||
const fsHost = new virtualFs.ScopedHost(
|
||||
new NodeJsSyncHost(),
|
||||
normalize(root)
|
||||
);
|
||||
const workflow = await createWorkflow(fsHost, root, opts);
|
||||
const collection = getCollection(workflow, opts.collectionName);
|
||||
const schematic = collection.createSchematic(
|
||||
opts.schematicOptions.cli === 'ng' ? 'ng-new' : 'tao-new',
|
||||
true
|
||||
);
|
||||
const allowAdditionalArgs = true; // we can't yet know the schema to validate against
|
||||
return runSchematic(
|
||||
root,
|
||||
workflow,
|
||||
logger,
|
||||
{ ...opts, schematicName: schematic.description.name },
|
||||
schematic,
|
||||
allowAdditionalArgs
|
||||
);
|
||||
}
|
||||
|
||||
@ -1,8 +1,7 @@
|
||||
import * as chalk from 'chalk';
|
||||
import * as minimist from 'minimist';
|
||||
import { getLogger } from '../shared/logger';
|
||||
import {
|
||||
combineOptions,
|
||||
combineOptionsForBuilder,
|
||||
convertToCamelCase,
|
||||
handleErrors,
|
||||
Options,
|
||||
@ -10,6 +9,7 @@ import {
|
||||
} from '../shared/params';
|
||||
import { commandName, printHelp } from '../shared/print-help';
|
||||
import { WorkspaceDefinition, Workspaces } from '../shared/workspace';
|
||||
const chalk = require('chalk');
|
||||
|
||||
export interface RunOptions {
|
||||
project: string;
|
||||
@ -142,8 +142,8 @@ export async function run(root: string, args: string[], isVerbose: boolean) {
|
||||
const target =
|
||||
workspaceDefinition.projects[opts.project].architect[opts.target];
|
||||
if (ws.isNxBuilder(target)) {
|
||||
const schema = ws.readBuilderSchema(target);
|
||||
const combinedOptions = combineOptions(
|
||||
const { schema, implementation } = ws.readBuilder(target);
|
||||
const combinedOptions = combineOptionsForBuilder(
|
||||
opts.runOptions,
|
||||
opts.configuration,
|
||||
target,
|
||||
@ -153,8 +153,7 @@ export async function run(root: string, args: string[], isVerbose: boolean) {
|
||||
printRunHelp(opts, schema, logger);
|
||||
return 0;
|
||||
}
|
||||
const builderFn = ws.readBuilderFunction(target);
|
||||
return await builderFn(combinedOptions);
|
||||
return await implementation(combinedOptions);
|
||||
} else {
|
||||
return (await import('./ngcli-adapter')).run(logger, root, opts);
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { ParsedArgs } from 'minimist';
|
||||
import {
|
||||
coerceTypes,
|
||||
coerceTypesInOptions,
|
||||
convertAliases,
|
||||
convertToCamelCase,
|
||||
lookupUnmatched,
|
||||
@ -12,14 +12,17 @@ import {
|
||||
describe('params', () => {
|
||||
describe('coerceTypes', () => {
|
||||
it('should handle booleans', () => {
|
||||
const opts = coerceTypes({ a: true, b: 'true', c: false, d: 'true' }, {
|
||||
properties: {
|
||||
a: { type: 'boolean' },
|
||||
b: { type: 'boolean' },
|
||||
c: { type: 'boolean' },
|
||||
d: { type: 'string' },
|
||||
},
|
||||
} as Schema);
|
||||
const opts = coerceTypesInOptions(
|
||||
{ a: true, b: 'true', c: false, d: 'true' },
|
||||
{
|
||||
properties: {
|
||||
a: { type: 'boolean' },
|
||||
b: { type: 'boolean' },
|
||||
c: { type: 'boolean' },
|
||||
d: { type: 'string' },
|
||||
},
|
||||
} as Schema
|
||||
);
|
||||
|
||||
expect(opts).toEqual({
|
||||
a: true,
|
||||
@ -30,7 +33,7 @@ describe('params', () => {
|
||||
});
|
||||
|
||||
it('should handle numbers', () => {
|
||||
const opts = coerceTypes({ a: 1, b: '2', c: '3' }, {
|
||||
const opts = coerceTypesInOptions({ a: 1, b: '2', c: '3' }, {
|
||||
properties: {
|
||||
a: { type: 'number' },
|
||||
b: { type: 'number' },
|
||||
@ -46,7 +49,7 @@ describe('params', () => {
|
||||
});
|
||||
|
||||
it('should handle arrays', () => {
|
||||
const opts = coerceTypes({ a: 'one,two', b: 'three,four' }, {
|
||||
const opts = coerceTypesInOptions({ a: 'one,two', b: 'three,four' }, {
|
||||
properties: {
|
||||
a: { type: 'array' },
|
||||
b: { type: 'string' },
|
||||
@ -222,7 +225,8 @@ describe('params', () => {
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
expect(opts).toEqual({ b: true, c: false });
|
||||
@ -246,11 +250,32 @@ describe('params', () => {
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
expect(opts).toEqual({ a: [{ key: 'inner' }, { key: 'inner' }] });
|
||||
});
|
||||
|
||||
it('should set defaults from argv', () => {
|
||||
const opts = setDefaults(
|
||||
{},
|
||||
{
|
||||
properties: {
|
||||
a: {
|
||||
type: 'string',
|
||||
$default: {
|
||||
$source: 'argv',
|
||||
index: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
['argv-value']
|
||||
);
|
||||
|
||||
expect(opts).toEqual({ a: 'argv-value' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateOptsAgainstSchema', () => {
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { strings } from '@angular-devkit/core';
|
||||
import { ParsedArgs } from 'minimist';
|
||||
import { TargetDefinition } from './workspace';
|
||||
import { TargetDefinition, WorkspaceDefinition } from './workspace';
|
||||
import * as inquirer from 'inquirer';
|
||||
|
||||
type Properties = {
|
||||
[p: string]: {
|
||||
@ -11,6 +12,8 @@ type Properties = {
|
||||
alias?: string;
|
||||
description?: string;
|
||||
default?: string | number | boolean | string[];
|
||||
$default?: { $source: 'argv'; index: number };
|
||||
'x-prompt'?: string | { message: string; type: string; items: any[] };
|
||||
};
|
||||
};
|
||||
export type Schema = {
|
||||
@ -73,19 +76,28 @@ export function convertToCamelCase(parsed: ParsedArgs): Options {
|
||||
* @param schema The schema definition with types to check against
|
||||
*
|
||||
*/
|
||||
export function coerceTypes(opts: Options, schema: Schema): Options {
|
||||
export function coerceTypesInOptions(opts: Options, schema: Schema): Options {
|
||||
Object.keys(opts).forEach((k) => {
|
||||
if (schema.properties[k] && schema.properties[k].type == 'boolean') {
|
||||
opts[k] = opts[k] === true || opts[k] === 'true';
|
||||
} else if (schema.properties[k] && schema.properties[k].type == 'number') {
|
||||
opts[k] = Number(opts[k]);
|
||||
} else if (schema.properties[k] && schema.properties[k].type == 'array') {
|
||||
opts[k] = opts[k].toString().split(',');
|
||||
}
|
||||
opts[k] = coerceType(
|
||||
schema.properties[k] ? schema.properties[k].type : 'unknown',
|
||||
opts[k]
|
||||
);
|
||||
});
|
||||
return opts;
|
||||
}
|
||||
|
||||
function coerceType(type: string, value: any) {
|
||||
if (type == 'boolean') {
|
||||
return value === true || value == 'true';
|
||||
} else if (type == 'number') {
|
||||
return Number(value);
|
||||
} else if (type == 'array') {
|
||||
return value.toString().split(',');
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts any options passed in with short aliases to their full names if found
|
||||
* Unmatched options are added to opts['--']
|
||||
@ -196,29 +208,38 @@ function throwInvalidSchema(propName: string, schema: any) {
|
||||
);
|
||||
}
|
||||
|
||||
export function setDefaults(opts: { [k: string]: any }, schema: Schema) {
|
||||
setDefaultsInObject(opts, schema.properties);
|
||||
export function setDefaults(
|
||||
opts: { [k: string]: any },
|
||||
schema: Schema,
|
||||
argv: string[]
|
||||
) {
|
||||
setDefaultsInObject(opts, schema.properties, argv);
|
||||
return opts;
|
||||
}
|
||||
|
||||
function setDefaultsInObject(
|
||||
opts: { [k: string]: any },
|
||||
properties: Properties
|
||||
properties: Properties,
|
||||
argv: string[]
|
||||
) {
|
||||
Object.keys(properties).forEach((p) => {
|
||||
setPropertyDefault(opts, p, properties[p]);
|
||||
setPropertyDefault(opts, p, properties[p], argv);
|
||||
});
|
||||
}
|
||||
|
||||
function setPropertyDefault(
|
||||
opts: { [k: string]: any },
|
||||
propName: string,
|
||||
schema: any
|
||||
schema: any,
|
||||
argv: string[]
|
||||
) {
|
||||
if (schema.type !== 'object' && schema.type !== 'array') {
|
||||
if (opts[propName] === undefined && schema.default !== undefined) {
|
||||
opts[propName] = schema.default;
|
||||
}
|
||||
if (opts[propName] === undefined && schema.$default !== undefined) {
|
||||
opts[propName] = coerceType(schema.type, argv[schema.$default.index]);
|
||||
}
|
||||
} else if (schema.type === 'array') {
|
||||
const items = schema.items || {};
|
||||
if (
|
||||
@ -227,29 +248,101 @@ function setPropertyDefault(
|
||||
items.type === 'object'
|
||||
) {
|
||||
opts[propName].forEach((valueInArray) =>
|
||||
setDefaultsInObject(valueInArray, items.properties || {})
|
||||
setDefaultsInObject(valueInArray, items.properties || {}, argv)
|
||||
);
|
||||
}
|
||||
} else {
|
||||
setDefaultsInObject(opts[propName], schema.properties);
|
||||
setDefaultsInObject(opts[propName], schema.properties, argv);
|
||||
}
|
||||
}
|
||||
|
||||
export function combineOptions(
|
||||
export function combineOptionsForBuilder(
|
||||
commandLineOpts: Options,
|
||||
config: string,
|
||||
target: TargetDefinition,
|
||||
schema: Schema
|
||||
) {
|
||||
const r = convertAliases(coerceTypes(commandLineOpts, schema), schema, false);
|
||||
const r = convertAliases(
|
||||
coerceTypesInOptions(commandLineOpts, schema),
|
||||
schema,
|
||||
false
|
||||
);
|
||||
const configOpts =
|
||||
config && target.configurations ? target.configurations[config] || {} : {};
|
||||
const combined = { ...target.options, ...configOpts, ...r };
|
||||
setDefaults(combined, schema);
|
||||
setDefaults(combined, schema, (commandLineOpts['_'] as string[]) || []);
|
||||
validateOptsAgainstSchema(combined, schema);
|
||||
return combined;
|
||||
}
|
||||
|
||||
export async function combineOptionsForSchematic(
|
||||
commandLineOpts: Options,
|
||||
collectionName: string,
|
||||
schematicName: string,
|
||||
ws: WorkspaceDefinition,
|
||||
schema: Schema,
|
||||
isInteractive: boolean
|
||||
) {
|
||||
const schematicDefaults =
|
||||
ws.schematics &&
|
||||
ws.schematics[collectionName] &&
|
||||
ws.schematics[collectionName][schematicName]
|
||||
? ws.schematics[collectionName][schematicName]
|
||||
: {};
|
||||
let combined = convertAliases(
|
||||
coerceTypesInOptions({ ...schematicDefaults, ...commandLineOpts }, schema),
|
||||
schema,
|
||||
false
|
||||
);
|
||||
if (isInteractive) {
|
||||
combined = await promptForValues(combined, schema);
|
||||
}
|
||||
setDefaults(combined, schema, (commandLineOpts['_'] as string[]) || []);
|
||||
validateOptsAgainstSchema(combined, schema);
|
||||
return combined;
|
||||
}
|
||||
|
||||
async function promptForValues(opts: Options, schema: Schema) {
|
||||
const prompts = [];
|
||||
Object.entries(schema.properties).forEach(([k, v]) => {
|
||||
if (v['x-prompt'] && opts[k] === undefined) {
|
||||
const question = {
|
||||
name: k,
|
||||
message: v['x-prompt'],
|
||||
default: v.default,
|
||||
} as any;
|
||||
|
||||
if (typeof v['x-prompt'] === 'string') {
|
||||
question.type = v.type;
|
||||
} else if (
|
||||
v['x-prompt'].type == 'confirmation' ||
|
||||
v['x-prompt'].type == 'confirm'
|
||||
) {
|
||||
question.type = 'confirm';
|
||||
} else {
|
||||
question.type = 'list';
|
||||
question.choices =
|
||||
v['x-prompt'].items &&
|
||||
v['x-prompt'].items.map((item) => {
|
||||
if (typeof item == 'string') {
|
||||
return item;
|
||||
} else {
|
||||
return {
|
||||
name: item.label,
|
||||
value: item.value,
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
prompts.push(question);
|
||||
}
|
||||
});
|
||||
|
||||
return await inquirer
|
||||
.prompt(prompts)
|
||||
.then((values) => ({ ...opts, ...values }));
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to find what the user meant by unmatched commands
|
||||
*
|
||||
|
||||
215
packages/tao/src/shared/tree.spec.ts
Normal file
215
packages/tao/src/shared/tree.spec.ts
Normal file
@ -0,0 +1,215 @@
|
||||
import { rmdirSync } from 'fs-extra';
|
||||
import { lstatSync, readFileSync, writeFileSync } from 'fs';
|
||||
import { dirSync } from 'tmp';
|
||||
import * as path from 'path';
|
||||
import { mkdirpSync } from 'fs-extra';
|
||||
import { FileChange, FsTree, flushChanges } from './tree';
|
||||
|
||||
describe('tree', () => {
|
||||
describe('FsTree', () => {
|
||||
let dir;
|
||||
let tree: FsTree;
|
||||
beforeEach(() => {
|
||||
dir = dirSync().name;
|
||||
mkdirpSync(path.join(dir, 'parent/child'));
|
||||
writeFileSync(path.join(dir, 'root-file.txt'), 'root content');
|
||||
writeFileSync(
|
||||
path.join(dir, 'parent', 'parent-file.txt'),
|
||||
'parent content'
|
||||
);
|
||||
writeFileSync(
|
||||
path.join(dir, 'parent', 'child', 'child-file.txt'),
|
||||
'child content'
|
||||
);
|
||||
|
||||
tree = new FsTree(dir, false, console);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmdirSync(dir, { recursive: true });
|
||||
});
|
||||
|
||||
it('should return no changes, when no changes are made', () => {
|
||||
expect(tree.listChanges()).toEqual([]);
|
||||
});
|
||||
|
||||
it('should be able to read and write files', () => {
|
||||
expect(tree.read('parent/parent-file.txt').toString()).toEqual(
|
||||
'parent content'
|
||||
);
|
||||
|
||||
tree.write('parent/parent-file.txt', 'new content');
|
||||
|
||||
expect(tree.read('parent/parent-file.txt').toString()).toEqual(
|
||||
'new content'
|
||||
);
|
||||
|
||||
expect(s(tree.listChanges())).toEqual([
|
||||
{
|
||||
path: 'parent/parent-file.txt',
|
||||
type: 'UPDATE',
|
||||
content: 'new content',
|
||||
},
|
||||
]);
|
||||
|
||||
flushChanges(dir, tree.listChanges());
|
||||
|
||||
expect(
|
||||
readFileSync(path.join(dir, 'parent/parent-file.txt')).toString()
|
||||
).toEqual('new content');
|
||||
});
|
||||
|
||||
it('should be able to create files', () => {
|
||||
tree.write('parent/new-parent-file.txt', 'new parent content');
|
||||
tree.write('parent/new-child/new-child-file.txt', 'new child content');
|
||||
|
||||
expect(tree.read('parent/new-parent-file.txt').toString()).toEqual(
|
||||
'new parent content'
|
||||
);
|
||||
expect(
|
||||
tree.read('parent/new-child/new-child-file.txt').toString()
|
||||
).toEqual('new child content');
|
||||
|
||||
expect(s(tree.listChanges())).toEqual([
|
||||
{
|
||||
path: 'parent/new-parent-file.txt',
|
||||
type: 'CREATE',
|
||||
content: 'new parent content',
|
||||
},
|
||||
{
|
||||
path: 'parent/new-child/new-child-file.txt',
|
||||
type: 'CREATE',
|
||||
content: 'new child content',
|
||||
},
|
||||
]);
|
||||
|
||||
flushChanges(dir, tree.listChanges());
|
||||
|
||||
expect(
|
||||
readFileSync(path.join(dir, 'parent/new-parent-file.txt')).toString()
|
||||
).toEqual('new parent content');
|
||||
expect(
|
||||
readFileSync(
|
||||
path.join(dir, 'parent/new-child/new-child-file.txt')
|
||||
).toString()
|
||||
).toEqual('new child content');
|
||||
});
|
||||
|
||||
it('should be able to delete files', () => {
|
||||
tree.delete('parent/parent-file.txt');
|
||||
tree.write('parent/new-child/new-child-file.txt', 'new child content');
|
||||
tree.delete('parent/new-child/new-child-file.txt');
|
||||
|
||||
expect(tree.read('parent/parent-file.txt')).toEqual(null);
|
||||
expect(tree.read('parent/new-child/new-child-file.txt')).toEqual(null);
|
||||
|
||||
expect(s(tree.listChanges())).toEqual([
|
||||
{ path: 'parent/parent-file.txt', type: 'DELETE', content: null },
|
||||
]);
|
||||
|
||||
flushChanges(dir, tree.listChanges());
|
||||
|
||||
try {
|
||||
lstatSync(path.join(dir, 'parent/parent-file.txt')).isFile();
|
||||
fail('Should not reach');
|
||||
} catch (e) {}
|
||||
});
|
||||
|
||||
it('should be able to rename files', () => {
|
||||
tree.write('parent/new-child/new-child-file.txt', 'new child content');
|
||||
tree.rename(
|
||||
'parent/new-child/new-child-file.txt',
|
||||
'renamed-new-child-file.txt'
|
||||
);
|
||||
tree.rename('root-file.txt', 'renamed-root-file.txt');
|
||||
|
||||
expect(tree.read('parent/new-child/new-child-file.txt')).toEqual(null);
|
||||
expect(tree.read('root-file.txt')).toEqual(null);
|
||||
expect(tree.read('renamed-new-child-file.txt').toString()).toEqual(
|
||||
'new child content'
|
||||
);
|
||||
expect(tree.read('renamed-root-file.txt').toString()).toEqual(
|
||||
'root content'
|
||||
);
|
||||
|
||||
expect(s(tree.listChanges())).toEqual([
|
||||
{
|
||||
path: 'renamed-new-child-file.txt',
|
||||
type: 'CREATE',
|
||||
content: 'new child content',
|
||||
},
|
||||
{ path: 'root-file.txt', type: 'DELETE', content: null },
|
||||
{
|
||||
path: 'renamed-root-file.txt',
|
||||
type: 'CREATE',
|
||||
content: 'root content',
|
||||
},
|
||||
]);
|
||||
|
||||
flushChanges(dir, tree.listChanges());
|
||||
|
||||
expect(
|
||||
readFileSync(path.join(dir, 'renamed-new-child-file.txt')).toString()
|
||||
).toEqual('new child content');
|
||||
expect(
|
||||
readFileSync(path.join(dir, 'renamed-root-file.txt')).toString()
|
||||
).toEqual('root content');
|
||||
});
|
||||
|
||||
it('should be able to delete dirs', () => {
|
||||
tree.write('parent/new-child/new-child-file.txt', 'new child content');
|
||||
|
||||
tree.delete('parent/new-child');
|
||||
tree.delete('parent/child');
|
||||
|
||||
expect(s(tree.listChanges())).toEqual([
|
||||
{ path: 'parent/child', type: 'DELETE', content: null },
|
||||
]);
|
||||
|
||||
flushChanges(dir, tree.listChanges());
|
||||
|
||||
try {
|
||||
const q = lstatSync(path.join(dir, 'parent/child')).isDirectory();
|
||||
console.log(q);
|
||||
fail('Should not reach');
|
||||
} catch (e) {}
|
||||
|
||||
try {
|
||||
lstatSync(path.join(dir, 'parent/new-child')).isDirectory();
|
||||
fail('Should not reach');
|
||||
} catch (e) {}
|
||||
});
|
||||
|
||||
it('should return the list of children of a dir', () => {
|
||||
tree.write('parent/new-child/new-child-file.txt', 'new child content');
|
||||
|
||||
expect(tree.children('parent/child')).toEqual(['child-file.txt']);
|
||||
expect(tree.children('parent/new-child')).toEqual(['new-child-file.txt']);
|
||||
|
||||
tree.rename(
|
||||
'parent/child/child-file.txt',
|
||||
'parent/child/renamed-child-file.txt'
|
||||
);
|
||||
tree.rename(
|
||||
'parent/new-child/new-child-file.txt',
|
||||
'parent/new-child/renamed-new-child-file.txt'
|
||||
);
|
||||
|
||||
expect(tree.children('parent/child')).toEqual(['renamed-child-file.txt']);
|
||||
expect(tree.children('parent/new-child')).toEqual([
|
||||
'renamed-new-child-file.txt',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should be able to rename dirs', () => {
|
||||
// not supported yet
|
||||
});
|
||||
});
|
||||
|
||||
function s(changes: FileChange[]) {
|
||||
return changes.map((f) => {
|
||||
if (f.content) (f as any).content = f.content.toString();
|
||||
return f;
|
||||
});
|
||||
}
|
||||
});
|
||||
245
packages/tao/src/shared/tree.ts
Normal file
245
packages/tao/src/shared/tree.ts
Normal file
@ -0,0 +1,245 @@
|
||||
import * as path from 'path';
|
||||
import {
|
||||
readdirSync,
|
||||
readFileSync,
|
||||
statSync,
|
||||
unlinkSync,
|
||||
writeFileSync,
|
||||
} from 'fs';
|
||||
import { mkdirpSync, rmdirSync } from 'fs-extra';
|
||||
const chalk = require('chalk');
|
||||
|
||||
export interface Tree {
|
||||
read(filePath: string): Buffer | null;
|
||||
|
||||
write(filePath: string, content: Buffer | string): void;
|
||||
|
||||
exists(filePath: string): boolean;
|
||||
|
||||
delete(filePath: string): void;
|
||||
|
||||
rename(from: string, to: string): void;
|
||||
|
||||
isFile(filePath: string): boolean;
|
||||
|
||||
children(dirPath: string): string[];
|
||||
}
|
||||
|
||||
export interface FileChange {
|
||||
path: string;
|
||||
type: 'CREATE' | 'DELETE' | 'UPDATE';
|
||||
content: Buffer | null;
|
||||
}
|
||||
|
||||
export class FsTree implements Tree {
|
||||
private recordedChanges: {
|
||||
[path: string]: { content: Buffer | null; isDeleted: boolean };
|
||||
} = {};
|
||||
|
||||
constructor(
|
||||
private readonly root: string | null,
|
||||
private readonly isVerbose: boolean,
|
||||
private readonly logger: Console
|
||||
) {}
|
||||
|
||||
read(filePath: string): Buffer | null {
|
||||
try {
|
||||
if (this.recordedChanges[this.rp(filePath)]) {
|
||||
return this.recordedChanges[this.rp(filePath)].content;
|
||||
} else {
|
||||
return this.fsReadFile(filePath);
|
||||
}
|
||||
} catch (e) {
|
||||
if (this.isVerbose) {
|
||||
this.logger.error(e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
write(filePath: string, content: Buffer | string): void {
|
||||
try {
|
||||
this.recordedChanges[this.rp(filePath)] = {
|
||||
content: Buffer.from(content),
|
||||
isDeleted: false,
|
||||
};
|
||||
} catch (e) {
|
||||
if (this.isVerbose) {
|
||||
this.logger.error(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
overwrite(filePath: string, content: Buffer | string): void {
|
||||
this.write(filePath, content);
|
||||
}
|
||||
|
||||
exists(filePath: string): boolean {
|
||||
try {
|
||||
if (this.recordedChanges[this.rp(filePath)]) {
|
||||
return !this.recordedChanges[this.rp(filePath)].isDeleted;
|
||||
} else if (this.filesForDir(this.rp(filePath)).length > 0) {
|
||||
return true;
|
||||
} else {
|
||||
return this.fsExists(filePath);
|
||||
}
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
delete(filePath: string): void {
|
||||
if (this.filesForDir(this.rp(filePath)).length > 0) {
|
||||
this.filesForDir(this.rp(filePath)).forEach(
|
||||
(f) => (this.recordedChanges[f] = { content: null, isDeleted: true })
|
||||
);
|
||||
}
|
||||
this.recordedChanges[this.rp(filePath)] = {
|
||||
content: null,
|
||||
isDeleted: true,
|
||||
};
|
||||
}
|
||||
|
||||
rename(from: string, to: string): void {
|
||||
const content = this.read(this.rp(from));
|
||||
this.recordedChanges[this.rp(from)] = { content: null, isDeleted: true };
|
||||
this.recordedChanges[this.rp(to)] = { content: content, isDeleted: false };
|
||||
}
|
||||
|
||||
isFile(filePath: string): boolean {
|
||||
try {
|
||||
if (this.recordedChanges[this.rp(filePath)]) {
|
||||
return !this.recordedChanges[this.rp(filePath)].isDeleted;
|
||||
} else {
|
||||
return this.fsIsFile(filePath);
|
||||
}
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
children(dirPath: string): string[] {
|
||||
let res = this.fsReadDir(dirPath);
|
||||
|
||||
res = [...res, ...this.directChildrenOfDir(this.rp(dirPath))];
|
||||
return res.filter((q) => {
|
||||
const r = this.recordedChanges[path.join(this.rp(dirPath), q)];
|
||||
if (r && r.isDeleted) return false;
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
listChanges(): FileChange[] {
|
||||
const res = [] as FileChange[];
|
||||
Object.keys(this.recordedChanges).forEach((f) => {
|
||||
if (this.recordedChanges[f].isDeleted) {
|
||||
if (this.fsExists(f)) {
|
||||
res.push({ path: f, type: 'DELETE', content: null });
|
||||
}
|
||||
} else {
|
||||
if (this.fsExists(f)) {
|
||||
res.push({
|
||||
path: f,
|
||||
type: 'UPDATE',
|
||||
content: this.recordedChanges[f].content,
|
||||
});
|
||||
} else {
|
||||
res.push({
|
||||
path: f,
|
||||
type: 'CREATE',
|
||||
content: this.recordedChanges[f].content,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
return res;
|
||||
}
|
||||
|
||||
private fsReadDir(dirPath: string) {
|
||||
if (!this.delegateToFs) return [];
|
||||
try {
|
||||
return readdirSync(path.join(this.root, dirPath));
|
||||
} catch (e) {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
private fsIsFile(filePath: string) {
|
||||
if (!this.delegateToFs) return false;
|
||||
const stat = statSync(path.join(this.root, filePath));
|
||||
return stat.isFile();
|
||||
}
|
||||
|
||||
private fsReadFile(filePath: string) {
|
||||
if (!this.delegateToFs) return null;
|
||||
return readFileSync(path.join(this.root, filePath));
|
||||
}
|
||||
|
||||
private fsExists(filePath: string): boolean {
|
||||
if (!this.delegateToFs) return false;
|
||||
try {
|
||||
const stat = statSync(path.join(this.root, filePath));
|
||||
return stat.isFile() || stat.isDirectory();
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private delegateToFs(): boolean {
|
||||
return this.root !== null;
|
||||
}
|
||||
|
||||
private filesForDir(path: string): string[] {
|
||||
return Object.keys(this.recordedChanges).filter(
|
||||
(f) => f.startsWith(path + '/') && !this.recordedChanges[f].isDeleted
|
||||
);
|
||||
}
|
||||
|
||||
private directChildrenOfDir(path: string): string[] {
|
||||
const res = {};
|
||||
Object.keys(this.recordedChanges).forEach((f) => {
|
||||
if (f.startsWith(path + '/')) {
|
||||
const [_, file] = f.split(path + '/');
|
||||
res[file.split('/')[0]] = true;
|
||||
}
|
||||
});
|
||||
return Object.keys(res);
|
||||
}
|
||||
|
||||
private rp(pp: string) {
|
||||
return pp.startsWith('/') ? pp.substring(1) : pp;
|
||||
}
|
||||
}
|
||||
|
||||
export function flushChanges(root: string, fileChanges: FileChange[]) {
|
||||
fileChanges.forEach((f) => {
|
||||
const fpath = path.join(root, f.path);
|
||||
if (f.type === 'CREATE') {
|
||||
mkdirpSync(path.dirname(fpath));
|
||||
writeFileSync(fpath, f.content);
|
||||
} else if (f.type === 'UPDATE') {
|
||||
writeFileSync(fpath, f.content);
|
||||
} else if (f.type === 'DELETE') {
|
||||
try {
|
||||
const stat = statSync(fpath);
|
||||
if (stat.isDirectory()) {
|
||||
rmdirSync(fpath, { recursive: true });
|
||||
} else {
|
||||
unlinkSync(fpath);
|
||||
}
|
||||
} catch (e) {}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function printChanges(fileChanges: FileChange[]) {
|
||||
fileChanges.forEach((f) => {
|
||||
if (f.type === 'CREATE') {
|
||||
console.log(`${chalk.green('CREATE')} ${f.path}`);
|
||||
} else if (f.type === 'UPDATE') {
|
||||
console.log(`${chalk.white('UPDATE')} ${f.path}`);
|
||||
} else if (f.type === 'DELETE') {
|
||||
console.log(`${chalk.yellow('DELETE')} ${f.path}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -4,6 +4,8 @@ import * as path from 'path';
|
||||
export interface WorkspaceDefinition {
|
||||
projects: { [projectName: string]: ProjectDefinition };
|
||||
defaultProject: string | undefined;
|
||||
schematics: { [collectionName: string]: { [schematicName: string]: any } };
|
||||
cli: { defaultCollection: string };
|
||||
}
|
||||
|
||||
export interface ProjectDefinition {
|
||||
@ -38,35 +40,52 @@ export class Workspaces {
|
||||
return buildersJson['$schema'] === '@nrwl/tao/src/builders-schema.json';
|
||||
}
|
||||
|
||||
readBuilderSchema(target: TargetDefinition) {
|
||||
isNxSchematic(collectionName: string, schematicName: string) {
|
||||
const schema = this.readSchematic(collectionName, schematicName).schema;
|
||||
return schema['$schema'] === '@nrwl/tao/src/schematic-schema.json';
|
||||
}
|
||||
|
||||
readBuilder(target: TargetDefinition) {
|
||||
try {
|
||||
const { builder, buildersFilePath, buildersJson } = this.readBuildersJson(
|
||||
target
|
||||
);
|
||||
const schemaPath = path.join(
|
||||
path.dirname(buildersFilePath),
|
||||
buildersJson.builders[builder].schema || ''
|
||||
);
|
||||
return JSON.parse(
|
||||
fs.readFileSync(require.resolve(schemaPath)).toString()
|
||||
);
|
||||
const builderDir = path.dirname(buildersFilePath);
|
||||
const buildConfig = buildersJson.builders[builder];
|
||||
const schemaPath = path.join(builderDir, buildConfig.schema || '');
|
||||
const schema = JSON.parse(fs.readFileSync(schemaPath).toString());
|
||||
const module = require(path.join(builderDir, buildConfig.implementation));
|
||||
const implementation = module.default;
|
||||
return { schema, implementation };
|
||||
} catch (e) {
|
||||
throw new Error(`Unable to resolve ${target.builder}.\n${e.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
readBuilderFunction(target: TargetDefinition) {
|
||||
readSchematic(collectionName: string, schematicName: string) {
|
||||
try {
|
||||
const { builder, buildersFilePath, buildersJson } = this.readBuildersJson(
|
||||
target
|
||||
);
|
||||
const {
|
||||
schematicsFilePath,
|
||||
schematicsJson,
|
||||
normalizedSchematicName,
|
||||
} = this.readSchematicsJson(collectionName, schematicName);
|
||||
const schematicsDir = path.dirname(schematicsFilePath);
|
||||
const schematicConfig =
|
||||
schematicsJson.schematics[normalizedSchematicName];
|
||||
const schemaPath = path.join(schematicsDir, schematicConfig.schema || '');
|
||||
const schema = JSON.parse(fs.readFileSync(schemaPath).toString());
|
||||
const module = require(path.join(
|
||||
path.dirname(buildersFilePath),
|
||||
buildersJson.builders[builder].implementation
|
||||
schematicsDir,
|
||||
schematicConfig.implementation
|
||||
? schematicConfig.implementation
|
||||
: schematicConfig.factory
|
||||
));
|
||||
return module.default;
|
||||
const implementation = module.default;
|
||||
return { schema, implementation };
|
||||
} catch (e) {
|
||||
throw new Error(`Unable to resolve ${target.builder}.\n${e.message}`);
|
||||
throw new Error(
|
||||
`Unable to resolve ${collectionName}:${schematicName}.\n${e.message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -88,4 +107,44 @@ export class Workspaces {
|
||||
}
|
||||
return { builder, buildersFilePath, buildersJson };
|
||||
}
|
||||
|
||||
private readSchematicsJson(collectionName: string, schematic: string) {
|
||||
const packageJsonPath = require.resolve(`${collectionName}/package.json`);
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath).toString());
|
||||
const schematicsFile = packageJson.schematics;
|
||||
const schematicsFilePath = require.resolve(
|
||||
path.join(path.dirname(packageJsonPath), schematicsFile)
|
||||
);
|
||||
const schematicsJson = JSON.parse(
|
||||
fs.readFileSync(schematicsFilePath).toString()
|
||||
);
|
||||
|
||||
let normalizedSchematicName;
|
||||
for (let k of Object.keys(schematicsJson.schematics)) {
|
||||
if (k === schematic) {
|
||||
normalizedSchematicName = k;
|
||||
break;
|
||||
}
|
||||
if (
|
||||
schematicsJson.schematics[k].aliases &&
|
||||
schematicsJson.schematics[k].aliases.indexOf(schematic) > -1
|
||||
) {
|
||||
normalizedSchematicName = k;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!normalizedSchematicName) {
|
||||
for (let parent of schematicsJson.extends || []) {
|
||||
try {
|
||||
return this.readSchematicsJson(parent, schematic);
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`Cannot find schematic '${schematic}' in ${schematicsFilePath}.`
|
||||
);
|
||||
}
|
||||
return { schematicsFilePath, schematicsJson, normalizedSchematicName };
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,30 +1,23 @@
|
||||
import { Tree } from '@angular-devkit/schematics';
|
||||
import { createEmptyWorkspace } from '@nrwl/workspace/testing';
|
||||
import { runSchematic } from '../../utils/testing';
|
||||
import { readJsonInTree } from '../../utils/ast-utils';
|
||||
import { createTreeWithEmptyWorkspace } from '@nrwl/devkit/testing';
|
||||
import runCommands from './run-commands';
|
||||
|
||||
describe('run-commands', () => {
|
||||
let appTree: Tree;
|
||||
it('should generate a target', async () => {
|
||||
const tree = createTreeWithEmptyWorkspace();
|
||||
const opts = {
|
||||
name: 'custom',
|
||||
project: 'lib',
|
||||
command: 'echo 1',
|
||||
cwd: '/packages/foo',
|
||||
outputs: '/dist/a, /dist/b, /dist/c',
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
const t = createEmptyWorkspace(Tree.empty());
|
||||
appTree = await runSchematic('lib', { name: 'lib' }, t);
|
||||
});
|
||||
tree.write('workspace.json', JSON.stringify({ projects: { lib: {} } }));
|
||||
await runCommands(opts)(tree);
|
||||
|
||||
it('should generate files', async () => {
|
||||
const tree = await runSchematic(
|
||||
'run-commands',
|
||||
{
|
||||
name: 'custom',
|
||||
project: 'lib',
|
||||
command: 'echo 1',
|
||||
cwd: '/packages/foo',
|
||||
outputs: '/dist/a, /dist/b, /dist/c',
|
||||
},
|
||||
appTree
|
||||
);
|
||||
const workspaceJson = readJsonInTree(tree, '/workspace.json');
|
||||
expect(workspaceJson.projects['lib'].architect['custom']).toEqual({
|
||||
const customTarget = JSON.parse(tree.read('workspace.json').toString())
|
||||
.projects['lib'].architect['custom'];
|
||||
expect(customTarget).toEqual({
|
||||
builder: '@nrwl/workspace:run-commands',
|
||||
outputs: ['/dist/a', '/dist/b', '/dist/c'],
|
||||
options: {
|
||||
|
||||
@ -1,8 +1,7 @@
|
||||
import { Rule } from '@angular-devkit/schematics';
|
||||
import { Schema } from './schema';
|
||||
import { updateWorkspaceInTree } from '@nrwl/workspace';
|
||||
|
||||
export default function (schema: Schema): Rule {
|
||||
export default function (schema: Schema) {
|
||||
return updateWorkspaceInTree((json) => {
|
||||
const project = json.projects[schema.project];
|
||||
if (!project) {
|
||||
@ -17,8 +16,9 @@ export default function (schema: Schema): Rule {
|
||||
options: {
|
||||
command: schema.command,
|
||||
cwd: schema.cwd,
|
||||
envFile: schema.envFile,
|
||||
},
|
||||
};
|
||||
return json;
|
||||
});
|
||||
}) as any;
|
||||
}
|
||||
|
||||
@ -4,4 +4,5 @@ export interface Schema {
|
||||
project: string;
|
||||
cwd?: string;
|
||||
outputs?: string;
|
||||
envFile?: string;
|
||||
}
|
||||
|
||||
@ -36,6 +36,10 @@
|
||||
"outputs": {
|
||||
"description": "Allows you to specify where the build artifacts are stored. This allows Nx Cloud to pick them up correctly, in the case that the build artifacts are placed somewhere other than the top level dist folder.",
|
||||
"type": "string"
|
||||
},
|
||||
"envFile": {
|
||||
"description": "Env files to be loaded before executing the commands",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["name", "command", "project"]
|
||||
|
||||
@ -513,7 +513,7 @@ export function updateJsonInTree<T = any, O = T>(
|
||||
export function updateWorkspaceInTree<T = any, O = T>(
|
||||
callback: (json: T, context: SchematicContext, host: Tree) => O
|
||||
): Rule {
|
||||
return (host: Tree, context: SchematicContext): Tree => {
|
||||
return (host: Tree, context: SchematicContext = undefined): Tree => {
|
||||
const path = getWorkspacePath(host);
|
||||
host.overwrite(
|
||||
path,
|
||||
|
||||
@ -149,6 +149,7 @@ const pkgFiles = [
|
||||
'build/npm/workspace/package.json',
|
||||
'build/npm/cli/package.json',
|
||||
'build/npm/tao/package.json',
|
||||
'build/npm/devkit/package.json',
|
||||
'build/npm/eslint-plugin-nx/package.json',
|
||||
'build/npm/linter/package.json',
|
||||
'build/npm/nx-plugin/package.json',
|
||||
|
||||
@ -18,7 +18,7 @@ cd build/packages
|
||||
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
sed -i "" "s|exports.nxVersion = '\*';|exports.nxVersion = '$NX_VERSION';|g" {react,next,web,jest,node,express,nest,cypress,storybook,angular,workspace}/src/utils/versions.js
|
||||
sed -i "" "s|\*|$NX_VERSION|g" {react,next,web,jest,node,express,nest,cypress,storybook,angular,workspace,cli,linter,tao,eslint-plugin-nx,create-nx-workspace,create-nx-plugin,nx-plugin}/package.json
|
||||
sed -i "" "s|\*|$NX_VERSION|g" {react,next,web,jest,node,express,nest,cypress,storybook,angular,workspace,cli,linter,tao,devkit,eslint-plugin-nx,create-nx-workspace,create-nx-plugin,nx-plugin}/package.json
|
||||
sed -i "" "s|NX_VERSION|$NX_VERSION|g" create-nx-workspace/bin/create-nx-workspace.js
|
||||
sed -i "" "s|ANGULAR_CLI_VERSION|$ANGULAR_CLI_VERSION|g" create-nx-workspace/bin/create-nx-workspace.js
|
||||
sed -i "" "s|TYPESCRIPT_VERSION|$TYPESCRIPT_VERSION|g" create-nx-workspace/bin/create-nx-workspace.js
|
||||
@ -29,7 +29,7 @@ if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
sed -i "" "s|PRETTIER_VERSION|$PRETTIER_VERSION|g" create-nx-plugin/bin/create-nx-plugin.js
|
||||
else
|
||||
sed -i "s|exports.nxVersion = '\*';|exports.nxVersion = '$NX_VERSION';|g" {react,next,web,jest,node,express,nest,cypress,storybook,angular,workspace}/src/utils/versions.js
|
||||
sed -i "s|\*|$NX_VERSION|g" {react,next,web,jest,node,express,nest,cypress,storybook,angular,workspace,cli,linter,tao,eslint-plugin-nx,create-nx-workspace,create-nx-plugin,nx-plugin}/package.json
|
||||
sed -i "s|\*|$NX_VERSION|g" {react,next,web,jest,node,express,nest,cypress,storybook,angular,workspace,cli,linter,tao,devkit,eslint-plugin-nx,create-nx-workspace,create-nx-plugin,nx-plugin}/package.json
|
||||
sed -i "s|NX_VERSION|$NX_VERSION|g" create-nx-workspace/bin/create-nx-workspace.js
|
||||
sed -i "s|ANGULAR_CLI_VERSION|$ANGULAR_CLI_VERSION|g" create-nx-workspace/bin/create-nx-workspace.js
|
||||
sed -i "s|TYPESCRIPT_VERSION|$TYPESCRIPT_VERSION|g" create-nx-workspace/bin/create-nx-workspace.js
|
||||
@ -42,9 +42,9 @@ fi
|
||||
|
||||
if [[ $NX_VERSION == "*" ]]; then
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
sed -E -i "" "s|\"@nrwl\/([^\"]+)\": \"\\*\"|\"@nrwl\/\1\": \"file:$PWD\/\1\"|" {jest,web,react,next,node,express,nest,cypress,storybook,angular,workspace,linter,cli,tao,eslint-plugin-nx,create-nx-workspace,create-nx-plugin,nx-plugin}/package.json
|
||||
sed -E -i "" "s|\"@nrwl\/([^\"]+)\": \"\\*\"|\"@nrwl\/\1\": \"file:$PWD\/\1\"|" {jest,web,react,next,node,express,nest,cypress,storybook,angular,workspace,linter,cli,tao,devkit,eslint-plugin-nx,create-nx-workspace,create-nx-plugin,nx-plugin}/package.json
|
||||
else
|
||||
echo $PWD
|
||||
sed -E -i "s|\"@nrwl\/([^\"]+)\": \"\\*\"|\"@nrwl\/\1\": \"file:$PWD\/\1\"|" {jest,web,react,next,node,express,nest,cypress,storybook,angular,workspace,linter,cli,tao,eslint-plugin-nx,create-nx-workspace,create-nx-plugin,nx-plugin}/package.json
|
||||
sed -E -i "s|\"@nrwl\/([^\"]+)\": \"\\*\"|\"@nrwl\/\1\": \"file:$PWD\/\1\"|" {jest,web,react,next,node,express,nest,cypress,storybook,angular,workspace,linter,cli,tao,devkit,eslint-plugin-nx,create-nx-workspace,create-nx-plugin,nx-plugin}/package.json
|
||||
fi
|
||||
fi
|
||||
|
||||
@ -18,6 +18,8 @@
|
||||
"paths": {
|
||||
"@nrwl/tao": ["./packages/tao"],
|
||||
"@nrwl/tao/*": ["./packages/tao/*"],
|
||||
"@nrwl/devkit": ["./packages/devkit"],
|
||||
"@nrwl/devkit/*": ["./packages/devkit/*"],
|
||||
"@nrwl/workspace": ["./packages/workspace"],
|
||||
"@nrwl/workspace/*": ["./packages/workspace/*"],
|
||||
"@nrwl/cli": ["./packages/cli"],
|
||||
|
||||
@ -150,6 +150,82 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"devkit": {
|
||||
"root": "packages/devkit",
|
||||
"sourceRoot": "packages/devkit",
|
||||
"projectType": "library",
|
||||
"schematics": {},
|
||||
"architect": {
|
||||
"test": {
|
||||
"builder": "@nrwl/jest:jest",
|
||||
"options": {
|
||||
"jestConfig": "packages/devkit/jest.config.js",
|
||||
"passWithNoTests": true
|
||||
}
|
||||
},
|
||||
"build-base": {
|
||||
"builder": "@nrwl/node:package",
|
||||
"options": {
|
||||
"outputPath": "build/packages/devkit",
|
||||
"tsConfig": "packages/devkit/tsconfig.lib.json",
|
||||
"packageJson": "packages/devkit/package.json",
|
||||
"main": "packages/devkit/index.ts",
|
||||
"updateBuildableProjectDepsInPackageJson": false,
|
||||
"assets": [
|
||||
{
|
||||
"input": "packages/devkit",
|
||||
"glob": "**/files/**",
|
||||
"output": "/"
|
||||
},
|
||||
{
|
||||
"input": "packages/devkit",
|
||||
"glob": "**/files/**/.gitkeep",
|
||||
"output": "/"
|
||||
},
|
||||
{
|
||||
"input": "packages/devkit",
|
||||
"glob": "**/*.json",
|
||||
"output": "/"
|
||||
},
|
||||
{
|
||||
"input": "packages/devkit",
|
||||
"glob": "**/*.js",
|
||||
"output": "/"
|
||||
},
|
||||
"LICENSE"
|
||||
]
|
||||
}
|
||||
},
|
||||
"build": {
|
||||
"builder": "@nrwl/workspace:run-commands",
|
||||
"outputs": ["build/packages/devkit"],
|
||||
"options": {
|
||||
"commands": [
|
||||
{
|
||||
"command": "nx build-base devkit"
|
||||
},
|
||||
{
|
||||
"command": "node ./scripts/copy-readme.js devkit"
|
||||
}
|
||||
],
|
||||
"parallel": false
|
||||
}
|
||||
},
|
||||
"lint": {
|
||||
"builder": "@nrwl/linter:eslint",
|
||||
"options": {
|
||||
"lintFilePatterns": [
|
||||
"packages/devkit/**/*.ts",
|
||||
"packages/devkit/**/*.spec.ts",
|
||||
"packages/devkit/**/*.spec.tsx",
|
||||
"packages/devkit/**/*.spec.js",
|
||||
"packages/devkit/**/*.spec.jsx",
|
||||
"packages/devkit/**/*.d.ts"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"workspace": {
|
||||
"root": "packages/workspace",
|
||||
"sourceRoot": "packages/workspace",
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user