diff --git a/packages/nx/src/migrations/update-15-0-0/prefix-outputs.ts b/packages/nx/src/migrations/update-15-0-0/prefix-outputs.ts index f1cf4da50eeea..3c5f281670bf8 100644 --- a/packages/nx/src/migrations/update-15-0-0/prefix-outputs.ts +++ b/packages/nx/src/migrations/update-15-0-0/prefix-outputs.ts @@ -28,11 +28,7 @@ export default async function (tree: Tree) { continue; } - try { - validateOutputs(target.outputs); - } catch (e) { - target.outputs = transformLegacyOutputs(project.root, e); - } + target.outputs = transformLegacyOutputs(project.root, target.outputs); } try { updateProjectConfiguration(tree, projectName, project); @@ -44,11 +40,10 @@ export default async function (tree: Tree) { (json) => { for (const target of Object.values(json.nx?.targets ?? {})) { if (target.outputs) { - try { - validateOutputs(target.outputs); - } catch (e) { - target.outputs = transformLegacyOutputs(project.root, e); - } + target.outputs = transformLegacyOutputs( + project.root, + target.outputs + ); } } @@ -64,11 +59,7 @@ export default async function (tree: Tree) { if (!target.outputs) { continue; } - try { - validateOutputs(target.outputs); - } catch (e: any) { - target.outputs = transformLegacyOutputs('{projectRoot}', e); - } + target.outputs = transformLegacyOutputs('{projectRoot}', target.outputs); } updateNxJson(tree, nxJson); diff --git a/packages/nx/src/native/cache/expand_outputs.rs b/packages/nx/src/native/cache/expand_outputs.rs index 6a91769831a9e..2f51b5213ac9a 100644 --- a/packages/nx/src/native/cache/expand_outputs.rs +++ b/packages/nx/src/native/cache/expand_outputs.rs @@ -1,9 +1,11 @@ +use hashbrown::HashMap; use std::path::{Path, PathBuf}; use tracing::trace; -use crate::native::glob::{build_glob_set, contains_glob_pattern}; +use crate::native::glob::{build_glob_set, contains_glob_pattern, glob_transform::partition_glob}; +use crate::native::logger::enable_logger; use crate::native::utils::Normalize; -use crate::native::walker::nx_walker_sync; +use crate::native::walker::{nx_walker, nx_walker_sync}; #[napi] pub fn expand_outputs(directory: String, entries: Vec) -> anyhow::Result> { @@ -70,6 +72,24 @@ where Ok(found_paths) } +fn partition_globs_into_map(globs: Vec) -> anyhow::Result>> { + globs + .iter() + .map(|glob| partition_glob(glob)) + // Right now we have an iterator where each item is (root: String, patterns: String[]). + // We want a singular root, with the patterns mapped to it. + .fold( + Ok(HashMap::>::new()), + |map_result, parsed_glob| { + let mut map = map_result?; + let (root, patterns) = parsed_glob?; + let entry = map.entry(root).or_insert(vec![]); + entry.extend(patterns); + Ok(map) + }, + ) +} + #[napi] /// Expands the given outputs into a list of existing files. /// This is used when hashing outputs @@ -77,6 +97,8 @@ pub fn get_files_for_outputs( directory: String, entries: Vec, ) -> anyhow::Result> { + enable_logger(); + let directory: PathBuf = directory.into(); let mut globs: Vec = vec![]; @@ -86,7 +108,9 @@ pub fn get_files_for_outputs( let path = directory.join(&entry); if !path.exists() { - globs.push(entry); + if contains_glob_pattern(&entry) { + globs.push(entry); + } } else if path.is_dir() { directories.push(entry); } else { @@ -95,28 +119,41 @@ pub fn get_files_for_outputs( } if !globs.is_empty() { - // todo(jcammisuli): optimize this as nx_walker_sync is very slow on the root directory. We need to change this to only search smaller directories - let glob_set = build_glob_set(&globs)?; - let found_paths = nx_walker_sync(&directory, None).filter_map(|path| { - if glob_set.is_match(&path) { - Some(path.to_normalized_string()) - } else { - None - } - }); + let partitioned_globs = partition_globs_into_map(globs)?; + for (root, patterns) in partitioned_globs { + let root_path = directory.join(&root); + let glob_set = build_glob_set(&patterns)?; + trace!("walking directory: {:?}", root_path); + + let found_paths: Vec = nx_walker(&root_path, false) + .filter_map(|file| { + if glob_set.is_match(&file.normalized_path) { + Some( + // root_path contains full directory, + // root is only the leading dirs from glob + PathBuf::from(&root) + .join(&file.normalized_path) + .to_normalized_string(), + ) + } else { + None + } + }) + .collect(); - files.extend(found_paths); + files.extend(found_paths); + } } if !directories.is_empty() { for dir in directories { let dir = PathBuf::from(dir); let dir_path = directory.join(&dir); - let files_in_dir = nx_walker_sync(&dir_path, None).filter_map(|e| { - let path = dir_path.join(&e); + let files_in_dir = nx_walker(&dir_path, false).filter_map(|e| { + let path = dir_path.join(&e.normalized_path); if path.is_file() { - Some(dir.join(e).to_normalized_string()) + Some(dir.join(e.normalized_path).to_normalized_string()) } else { None } @@ -221,4 +258,24 @@ mod test { ] ); } + + #[test] + fn should_get_files_for_outputs_with_glob() { + let temp = setup_fs(); + let entries = vec![ + "packages/nx/src/native/*.node".to_string(), + "folder/nested-folder".to_string(), + "test.txt".to_string(), + ]; + let mut result = get_files_for_outputs(temp.display().to_string(), entries).unwrap(); + result.sort(); + assert_eq!( + result, + vec![ + "folder/nested-folder", + "packages/nx/src/native/nx.darwin-arm64.node", + "test.txt" + ] + ); + } } diff --git a/packages/nx/src/native/cache/mod.rs b/packages/nx/src/native/cache/mod.rs index 76c528ada9de2..e682df36301fb 100644 --- a/packages/nx/src/native/cache/mod.rs +++ b/packages/nx/src/native/cache/mod.rs @@ -1,3 +1,4 @@ +pub mod cache; pub mod expand_outputs; pub mod file_ops; -pub mod cache; +pub mod validate_outputs; diff --git a/packages/nx/src/native/cache/validate_outputs.rs b/packages/nx/src/native/cache/validate_outputs.rs new file mode 100644 index 0000000000000..18171ab612e63 --- /dev/null +++ b/packages/nx/src/native/cache/validate_outputs.rs @@ -0,0 +1,77 @@ +use itertools::Itertools; +use regex::Regex; + +use crate::native::glob::{contains_glob_pattern, glob_transform::partition_glob}; + +const ALLOWED_WORKSPACE_ROOT_OUTPUT_PREFIXES: [&str; 2] = ["!{workspaceRoot}", "{workspaceRoot}"]; + +fn is_missing_prefix(output: &str) -> bool { + let re = Regex::new(r"^!?\{[\s\S]+\}").expect("Output pattern regex should compile"); + + !re.is_match(output) +} + +#[napi] +pub fn validate_outputs(outputs: Vec) -> anyhow::Result<()> { + let outputs_len = outputs.len(); + let mut missing_prefix = Vec::with_capacity(outputs_len); + let mut workspace_globs = Vec::with_capacity(outputs_len); + + for output in outputs.iter() { + if is_missing_prefix(output) { + missing_prefix.push(output); + } else { + for prefix in ALLOWED_WORKSPACE_ROOT_OUTPUT_PREFIXES.iter() { + if let Some(trimmed) = output.strip_prefix(prefix) { + if contains_glob_pattern(&trimmed) { + let (root, _) = partition_glob(&trimmed)?; + if root.is_empty() { + workspace_globs.push(output); + } + } + } + } + } + } + + if workspace_globs.is_empty() && missing_prefix.is_empty() { + return Ok(()); + } + + let mut error_message = String::new(); + if !missing_prefix.is_empty() { + error_message.push_str(&format!( + "The following outputs are invalid: \n - {}\n\nRun `nx repair` to fix this.", + missing_prefix.iter().join("\n - ") + )); + } + if !workspace_globs.is_empty() { + error_message.push_str(&format!( + "The following outputs are defined by a glob pattern from the workspace root: \n - {}\n\nThese can be slow, replace them with a more specific pattern.", + workspace_globs.iter().join("\n - ") + )); + } + + Err(anyhow::anyhow!(error_message)) +} + +#[napi] +pub fn get_transformable_outputs(outputs: Vec) -> Vec { + outputs + .into_iter() + .filter(|output| is_missing_prefix(output)) + .collect() +} + +#[cfg(test)] +mod test { + use super::is_missing_prefix; + + #[test] + fn test_is_missing_prefix() { + assert!(is_missing_prefix("dist")); + assert!(is_missing_prefix("!dist")); + assert!(!is_missing_prefix("{workspaceRoot}/dist")); + assert!(!is_missing_prefix("!{workspaceRoot}/dist")); + } +} diff --git a/packages/nx/src/native/glob.rs b/packages/nx/src/native/glob.rs index 16eab6b07b947..d91ab482d294c 100644 --- a/packages/nx/src/native/glob.rs +++ b/packages/nx/src/native/glob.rs @@ -1,6 +1,6 @@ mod glob_group; mod glob_parser; -mod glob_transform; +pub mod glob_transform; use crate::native::glob::glob_transform::convert_glob; use globset::{GlobBuilder, GlobSet, GlobSetBuilder}; diff --git a/packages/nx/src/native/glob/glob_transform.rs b/packages/nx/src/native/glob/glob_transform.rs index 9ebd7e303bd32..2644f72feb646 100644 --- a/packages/nx/src/native/glob/glob_transform.rs +++ b/packages/nx/src/native/glob/glob_transform.rs @@ -2,6 +2,8 @@ use crate::native::glob::glob_group::GlobGroup; use crate::native::glob::glob_parser::parse_glob; use itertools::Itertools; use std::collections::HashSet; +use itertools::Either::{Left, Right}; +use super::contains_glob_pattern; #[derive(Debug)] enum GlobType { @@ -9,8 +11,7 @@ enum GlobType { Positive(String), } -pub fn convert_glob(glob: &str) -> anyhow::Result> { - let (negated, parsed) = parse_glob(glob)?; +fn convert_glob_segments(negated: bool, parsed: Vec>) -> Vec { let mut built_segments: Vec> = Vec::new(); for (index, glob_segment) in parsed.iter().enumerate() { let is_last = index == parsed.len() - 1; @@ -58,7 +59,12 @@ pub fn convert_glob(glob: &str) -> anyhow::Result> { .into_iter() .collect::>(); globs.sort(); - Ok(globs) + globs +} + +pub fn convert_glob(glob: &str) -> anyhow::Result> { + let (negated, parsed) = parse_glob(glob)?; + Ok(convert_glob_segments(negated, parsed)) } fn build_segment( @@ -107,6 +113,31 @@ fn build_segment( } } +pub fn partition_glob(glob: &str) -> anyhow::Result<(String, Vec)> { + let (negated, groups) = parse_glob(glob)?; + // Partition glob into leading directories and patterns that should be matched + let mut has_patterns = false; + let (leading_dir_segments, pattern_segments): (Vec, _) = groups + .into_iter() + .filter(|group| !group.is_empty()) + .partition_map(|group| { + match &group[0] { + GlobGroup::NonSpecial(value) if !contains_glob_pattern(&value) && !has_patterns => { + Left(value.to_string()) + } + _ => { + has_patterns = true; + Right(group) + } + } + }); + + Ok(( + leading_dir_segments.join("/"), + convert_glob_segments(negated, pattern_segments), + )) +} + #[cfg(test)] mod test { use super::convert_glob; @@ -233,4 +264,32 @@ mod test { ] ); } + + #[test] + fn should_partition_glob_with_leading_dirs() { + let (leading_dirs, globs) = super::partition_glob("dist/app/**/!(README|LICENSE).(js|ts)").unwrap(); + assert_eq!(leading_dirs, "dist/app"); + assert_eq!(globs, ["!**/{README,LICENSE}.{js,ts}", "**/*.{js,ts}",]); + } + + #[test] + fn should_partition_glob_with_leading_dirs_and_simple_patterns() { + let (leading_dirs, globs) = super::partition_glob("dist/app/**/*.css").unwrap(); + assert_eq!(leading_dirs, "dist/app"); + assert_eq!(globs, ["**/*.css"]); + } + + #[test] + fn should_partition_glob_with_leading_dirs_dirs_and_patterns() { + let (leading_dirs, globs) = super::partition_glob("dist/app/**/js/*.js").unwrap(); + assert_eq!(leading_dirs, "dist/app"); + assert_eq!(globs, ["**/js/*.js"]); + } + + #[test] + fn should_partition_glob_with_leading_dirs_and_no_patterns() { + let (leading_dirs, globs) = super::partition_glob("dist/app/").unwrap(); + assert_eq!(leading_dirs, "dist/app"); + assert_eq!(globs, [] as [String; 0]); + } } diff --git a/packages/nx/src/native/index.d.ts b/packages/nx/src/native/index.d.ts index 3c3c3e5bc20c2..2ab95f69f98a0 100644 --- a/packages/nx/src/native/index.d.ts +++ b/packages/nx/src/native/index.d.ts @@ -150,6 +150,8 @@ export declare export function getBinaryTarget(): string */ export declare export function getFilesForOutputs(directory: string, entries: Array): Array +export declare export function getTransformableOutputs(outputs: Array): Array + export declare export function hashArray(input: Array): string export interface HashDetails { @@ -263,6 +265,8 @@ export interface UpdatedWorkspaceFiles { externalReferences: NxWorkspaceFilesExternals } +export declare export function validateOutputs(outputs: Array): void + export interface WatchEvent { path: string type: EventType diff --git a/packages/nx/src/native/native-bindings.js b/packages/nx/src/native/native-bindings.js index 5a259bf472715..cd921e8e8710f 100644 --- a/packages/nx/src/native/native-bindings.js +++ b/packages/nx/src/native/native-bindings.js @@ -378,10 +378,12 @@ module.exports.expandOutputs = nativeBinding.expandOutputs module.exports.findImports = nativeBinding.findImports module.exports.getBinaryTarget = nativeBinding.getBinaryTarget module.exports.getFilesForOutputs = nativeBinding.getFilesForOutputs +module.exports.getTransformableOutputs = nativeBinding.getTransformableOutputs module.exports.hashArray = nativeBinding.hashArray module.exports.hashFile = nativeBinding.hashFile module.exports.IS_WASM = nativeBinding.IS_WASM module.exports.remove = nativeBinding.remove module.exports.testOnlyTransferFileMap = nativeBinding.testOnlyTransferFileMap module.exports.transferProjectGraph = nativeBinding.transferProjectGraph +module.exports.validateOutputs = nativeBinding.validateOutputs module.exports.WorkspaceErrors = nativeBinding.WorkspaceErrors diff --git a/packages/nx/src/native/plugins/js/ts_import_locators.rs b/packages/nx/src/native/plugins/js/ts_import_locators.rs index 109e03dafee4f..8a0ac034135db 100644 --- a/packages/nx/src/native/plugins/js/ts_import_locators.rs +++ b/packages/nx/src/native/plugins/js/ts_import_locators.rs @@ -1480,7 +1480,7 @@ import(myTag`react@${version}`); let root = PathBuf::from(ancestors.next().unwrap()); let glob = build_glob_set(&["**/*.[jt]s"]).unwrap(); - let files = nx_walker(root.clone()) + let files = nx_walker(root.clone(), true) .filter(|file| glob.is_match(&file.full_path)) .map(|file| file.full_path) .collect::>(); diff --git a/packages/nx/src/native/walker.rs b/packages/nx/src/native/walker.rs index 5375f6de794b4..3ae7f57368135 100644 --- a/packages/nx/src/native/walker.rs +++ b/packages/nx/src/native/walker.rs @@ -1,9 +1,10 @@ +use ignore::WalkBuilder; use std::fmt::Debug; use std::path::{Path, PathBuf}; -use ignore::{WalkBuilder}; use crate::native::glob::build_glob_set; +use crate::native::logger::enable_logger; use crate::native::utils::{get_mod_time, Normalize}; use walkdir::WalkDir; @@ -93,7 +94,7 @@ where /// Walk the directory and ignore files from .gitignore and .nxignore #[cfg(not(target_arch = "wasm32"))] -pub fn nx_walker

(directory: P) -> impl Iterator +pub fn nx_walker

(directory: P, use_ignores: bool) -> impl Iterator where P: AsRef, { @@ -102,9 +103,10 @@ where use crossbeam_channel::unbounded; use tracing::trace; + enable_logger(); let directory = directory.as_ref(); - let mut walker = create_walker(directory); + let mut walker = create_walker(directory, use_ignores); let cpus = available_parallelism().map_or(2, |n| n.get()) - 1; @@ -139,7 +141,7 @@ where normalized_path: file_path.to_normalized_string(), mod_time: get_mod_time(&metadata), }) - .ok(); + .ok(); Continue }) @@ -151,9 +153,9 @@ where receiver_thread.join().unwrap() } -fn create_walker

(directory: P) -> WalkBuilder +fn create_walker

(directory: P, use_ignores: bool) -> WalkBuilder where - P: AsRef + P: AsRef, { let directory: PathBuf = directory.as_ref().into(); @@ -164,12 +166,15 @@ where "**/.nx/workspace-data", "**/.yarn/cache", ]) - .expect("These static ignores always build"); + .expect("These static ignores always build"); let mut walker = WalkBuilder::new(&directory); walker.require_git(false); walker.hidden(false); - walker.add_custom_ignore_filename(".nxignore"); + walker.git_ignore(use_ignores); + if use_ignores { + walker.add_custom_ignore_filename(".nxignore"); + } // We should make sure to always ignore node_modules and the .git folder walker.filter_entry(move |entry| { @@ -210,12 +215,12 @@ mod test { #[test] fn it_walks_a_directory() { // handle empty workspaces - let content = nx_walker("/does/not/exist").collect::>(); + let content = nx_walker("/does/not/exist", true).collect::>(); assert!(content.is_empty()); let temp_dir = setup_fs(); - let mut content = nx_walker(&temp_dir).collect::>(); + let mut content = nx_walker(&temp_dir, true).collect::>(); content.sort(); let content = content .into_iter() @@ -282,7 +287,7 @@ nested/child-two/ ) .unwrap(); - let mut file_names = nx_walker(temp_dir) + let mut file_names = nx_walker(temp_dir, true) .map( |NxFile { normalized_path: relative_path, diff --git a/packages/nx/src/native/workspace/files_hashing.rs b/packages/nx/src/native/workspace/files_hashing.rs index b4f836a99e0c1..087d4cedd43fa 100644 --- a/packages/nx/src/native/workspace/files_hashing.rs +++ b/packages/nx/src/native/workspace/files_hashing.rs @@ -10,7 +10,7 @@ use crate::native::walker::{nx_walker, NxFile}; use crate::native::workspace::files_archive::{NxFileHashed, NxFileHashes}; pub fn full_files_hash(workspace_root: &Path) -> NxFileHashes { - let files = nx_walker(workspace_root).collect::>(); + let files = nx_walker(workspace_root, true).collect::>(); trace!("Found {} files", files.len()); hash_files(files).into_iter().collect() } @@ -19,7 +19,7 @@ pub fn selective_files_hash( workspace_root: &Path, mut archived_files: NxFileHashes, ) -> NxFileHashes { - let files = nx_walker(workspace_root).collect::>(); + let files = nx_walker(workspace_root, true).collect::>(); let mut archived = vec![]; let mut not_archived = vec![]; let now = std::time::Instant::now(); diff --git a/packages/nx/src/tasks-runner/utils.spec.ts b/packages/nx/src/tasks-runner/utils.spec.ts index eb0bc01b54ffc..a5d6bb7c3953b 100644 --- a/packages/nx/src/tasks-runner/utils.spec.ts +++ b/packages/nx/src/tasks-runner/utils.spec.ts @@ -405,47 +405,28 @@ describe('utils', () => { describe('transformLegacyOutputs', () => { it('should prefix paths with {workspaceRoot}', () => { const outputs = ['dist']; - try { - validateOutputs(outputs); - } catch (e) { - const result = transformLegacyOutputs('myapp', e); - expect(result).toEqual(['{workspaceRoot}/dist']); - } - expect.assertions(1); + const result = transformLegacyOutputs('myapp', outputs); + expect(result).toEqual(['{workspaceRoot}/dist']); }); it('should prefix unix-absolute paths with {workspaceRoot}', () => { const outputs = ['/dist']; - try { - validateOutputs(outputs); - } catch (e) { - const result = transformLegacyOutputs('myapp', e); - expect(result).toEqual(['{workspaceRoot}/dist']); - } - expect.assertions(1); + const result = transformLegacyOutputs('myapp', outputs); + expect(result).toEqual(['{workspaceRoot}/dist']); }); }); it('should prefix relative paths with {projectRoot}', () => { - const outputs = ['/dist']; - try { - validateOutputs(outputs); - } catch (e) { - const result = transformLegacyOutputs('myapp', e); - expect(result).toEqual(['{workspaceRoot}/dist']); - } - expect.assertions(1); + const outputs = ['./dist']; + const result = transformLegacyOutputs('myapp', outputs); + expect(result).toEqual(['{projectRoot}/dist']); }); it('should prefix paths within the project with {projectRoot}', () => { const outputs = ['myapp/dist']; - try { - validateOutputs(outputs); - } catch (e) { - const result = transformLegacyOutputs('myapp', e); - expect(result).toEqual(['{projectRoot}/dist']); - } - expect.assertions(1); + + const result = transformLegacyOutputs('myapp', outputs); + expect(result).toEqual(['{projectRoot}/dist']); }); describe('expandDependencyConfigSyntaxSugar', () => { @@ -772,6 +753,36 @@ describe('utils', () => { "The 'outputs' field must contain only strings, but received types: [string, number, object, boolean, object, object]" ); }); + + it('throws an error if the output is a glob pattern from the workspace root', () => { + expect(() => validateOutputs(['{workspaceRoot}/**/dist/*.js'])) + .toThrowErrorMatchingInlineSnapshot(` + "The following outputs are defined by a glob pattern from the workspace root: + - {workspaceRoot}/**/dist/*.js + + These can be slow, replace them with a more specific pattern." + `); + }); + + it("shouldn't throw an error if the output is a glob pattern from the project root", () => { + expect(() => validateOutputs(['{projectRoot}/*.js'])).not.toThrow(); + }); + + it("shouldn't throw an error if the pattern is a glob based in a subdirectory of workspace root", () => { + expect(() => + validateOutputs(['{workspaceRoot}/dist/**/*.js']) + ).not.toThrow(); + }); + + it("throws an error if the output doesn't start with a prefix", () => { + expect(() => validateOutputs(['dist'])) + .toThrowErrorMatchingInlineSnapshot(` + "The following outputs are invalid: + - dist + + Run \`nx repair\` to fix this." + `); + }); }); }); diff --git a/packages/nx/src/tasks-runner/utils.ts b/packages/nx/src/tasks-runner/utils.ts index 938d80404a73d..660c054b39184 100644 --- a/packages/nx/src/tasks-runner/utils.ts +++ b/packages/nx/src/tasks-runner/utils.ts @@ -18,6 +18,10 @@ import { readProjectsConfigurationFromProjectGraph } from '../project-graph/proj import { findMatchingProjects } from '../utils/find-matching-projects'; import { minimatch } from 'minimatch'; import { isGlobPattern } from '../utils/globs'; +import { + getTransformableOutputs, + validateOutputs as nativeValidateOutputs, +} from '../native'; export type NormalizedTargetDependencyConfig = TargetDependencyConfig & { projects: string[]; @@ -253,24 +257,16 @@ function assertOutputsAreValidType(outputs: unknown) { export function validateOutputs(outputs: string[]) { assertOutputsAreValidType(outputs); - const invalidOutputs = new Set(); - - for (const output of outputs) { - if (!/^!?{[\s\S]+}/.test(output)) { - invalidOutputs.add(output); - } - } - if (invalidOutputs.size > 0) { - throw new InvalidOutputsError(outputs, invalidOutputs); - } + nativeValidateOutputs(outputs); } -export function transformLegacyOutputs( - projectRoot: string, - error: InvalidOutputsError -) { - return error.outputs.map((output) => { - if (!error.invalidOutputs.has(output)) { +export function transformLegacyOutputs(projectRoot: string, outputs: string[]) { + const transformableOutputs = new Set(getTransformableOutputs(outputs)); + if (transformableOutputs.size === 0) { + return outputs; + } + return outputs.map((output) => { + if (!transformableOutputs.has(output)) { return output; } diff --git a/packages/nx/src/utils/collapse-expanded-outputs.spec.ts b/packages/nx/src/utils/collapse-expanded-outputs.spec.ts index a0ef185527473..45267e2b98aca 100644 --- a/packages/nx/src/utils/collapse-expanded-outputs.spec.ts +++ b/packages/nx/src/utils/collapse-expanded-outputs.spec.ts @@ -89,10 +89,10 @@ describe('collapseExpandedOutputs', () => { it('should collapse long lists of files in nested directories', async () => { const outputs = []; // Create dist/apps/app1/n/m.js + dist/apps/app1/n/m.d.ts - for (let i = 0; i < 6; i++) { + for (let i = 0; i < 6000; i++) { outputs.push(`dist/apps/app1/${i}.js`); outputs.push(`dist/apps/app1/${i}.d.ts`); - for (let j = 0; j < 6; j++) { + for (let j = 0; j < 600; j++) { outputs.push(`dist/apps/app1/${i}/${j}.js`); outputs.push(`dist/apps/app1/${i}/${j}.d.ts`); } diff --git a/packages/storybook/src/generators/convert-to-inferred/convert-to-inferred.spec.ts b/packages/storybook/src/generators/convert-to-inferred/convert-to-inferred.spec.ts index 88009f5474dee..46f21b380b451 100644 --- a/packages/storybook/src/generators/convert-to-inferred/convert-to-inferred.spec.ts +++ b/packages/storybook/src/generators/convert-to-inferred/convert-to-inferred.spec.ts @@ -236,7 +236,7 @@ describe('Storybook - Convert To Inferred', () => { }, "outputs": [ "{projectRoot}/{options.output-dir}", - "{workspaceRoot}/{projectRoot}/storybook-static", + "{projectRoot}/storybook-static", "{options.output-dir}", "{options.outputDir}", "{options.o}", @@ -312,7 +312,7 @@ describe('Storybook - Convert To Inferred', () => { }, "outputs": [ "{projectRoot}/{options.output-dir}", - "{workspaceRoot}/{projectRoot}/storybook-static", + "{projectRoot}/storybook-static", "{options.output-dir}", "{options.outputDir}", "{options.o}", @@ -387,7 +387,7 @@ describe('Storybook - Convert To Inferred', () => { }, "outputs": [ "{projectRoot}/{options.output-dir}", - "{workspaceRoot}/{projectRoot}/storybook-static", + "{projectRoot}/storybook-static", "{options.output-dir}", "{options.outputDir}", "{options.o}", @@ -446,7 +446,7 @@ describe('Storybook - Convert To Inferred', () => { }, "outputs": [ "{projectRoot}/{options.output-dir}", - "{workspaceRoot}/{projectRoot}/storybook-static", + "{projectRoot}/storybook-static", "{options.output-dir}", "{options.outputDir}", "{options.o}", @@ -565,7 +565,7 @@ describe('Storybook - Convert To Inferred', () => { }, "outputs": [ "{projectRoot}/{options.output-dir}", - "{workspaceRoot}/{projectRoot}/storybook-static", + "{projectRoot}/storybook-static", "{options.output-dir}", "{options.outputDir}", "{options.o}", @@ -597,7 +597,7 @@ describe('Storybook - Convert To Inferred', () => { }, "outputs": [ "{projectRoot}/{options.output-dir}", - "{workspaceRoot}/{projectRoot}/storybook-static", + "{projectRoot}/storybook-static", "{options.output-dir}", "{options.outputDir}", "{options.o}", diff --git a/packages/storybook/src/plugins/plugin.spec.ts b/packages/storybook/src/plugins/plugin.spec.ts index 7288be2f5c423..89be68b87a141 100644 --- a/packages/storybook/src/plugins/plugin.spec.ts +++ b/packages/storybook/src/plugins/plugin.spec.ts @@ -74,7 +74,7 @@ describe('@nx/storybook/plugin', () => { }, cache: true, outputs: [ - '{workspaceRoot}/{projectRoot}/storybook-static', + '{projectRoot}/storybook-static', '{options.output-dir}', '{options.outputDir}', '{options.o}', @@ -127,7 +127,7 @@ describe('@nx/storybook/plugin', () => { }, cache: true, outputs: [ - '{workspaceRoot}/{projectRoot}/storybook-static', + '{projectRoot}/storybook-static', '{options.output-dir}', '{options.outputDir}', '{options.o}', @@ -192,7 +192,7 @@ describe('@nx/storybook/plugin', () => { }, cache: true, outputs: [ - '{workspaceRoot}/{projectRoot}/storybook-static', + '{projectRoot}/storybook-static', '{options.output-dir}', '{options.outputDir}', '{options.o}', diff --git a/packages/storybook/src/plugins/plugin.ts b/packages/storybook/src/plugins/plugin.ts index abb1352a2e425..7fc597cb24b92 100644 --- a/packages/storybook/src/plugins/plugin.ts +++ b/packages/storybook/src/plugins/plugin.ts @@ -2,16 +2,16 @@ import { CreateDependencies, CreateNodes, CreateNodesContext, - TargetConfiguration, detectPackageManager, joinPathFragments, parseJson, readJsonFile, + TargetConfiguration, writeJsonFile, } from '@nx/devkit'; import { dirname, join } from 'path'; import { getNamedInputs } from '@nx/devkit/src/utils/get-named-inputs'; -import { existsSync, readFileSync, readdirSync } from 'fs'; +import { existsSync, readdirSync, readFileSync } from 'fs'; import { calculateHashForCreateNodes } from '@nx/devkit/src/utils/calculate-hash-for-create-nodes'; import { workspaceDataDirectory } from 'nx/src/utils/cache-directory'; import { getLockFileName } from '@nx/js'; @@ -109,7 +109,7 @@ async function buildStorybookTargets( context: CreateNodesContext, projectName: string ) { - const buildOutputs = getOutputs(projectRoot); + const buildOutputs = getOutputs(); const namedInputs = getNamedInputs(projectRoot, context); @@ -274,11 +274,9 @@ async function getStorybookFramework( return typeof framework === 'string' ? framework : framework.name; } -function getOutputs(projectRoot: string): string[] { - const normalizedOutputPath = normalizeOutputPath(projectRoot); - +function getOutputs(): string[] { const outputs = [ - normalizedOutputPath, + `{projectRoot}/storybook-static`, `{options.output-dir}`, `{options.outputDir}`, `{options.o}`, @@ -287,14 +285,6 @@ function getOutputs(projectRoot: string): string[] { return outputs; } -function normalizeOutputPath(projectRoot: string): string | undefined { - if (projectRoot === '.') { - return `{projectRoot}/storybook-static`; - } else { - return `{workspaceRoot}/{projectRoot}/storybook-static`; - } -} - function normalizeOptions( options: StorybookPluginOptions ): StorybookPluginOptions {