diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 02c4526369558..0a5a3f1652e19 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -156,7 +156,7 @@ jobs: with: PATTERNS: | examples/** - turborepo-tests/examples/** + examples-tests/** !**.md !**.mdx @@ -306,8 +306,8 @@ jobs: - name: E2E Tests run: turbo run test --filter=turborepo-tests-e2e - go_examples: - name: Go Cli Examples + turborepo_examples: + name: Turborepo Examples needs: determine_jobs if: needs.determine_jobs.outputs.examples == 'true' timeout-minutes: 30 @@ -320,43 +320,9 @@ jobs: runner: ubuntu-latest - name: macos runner: macos-latest - manager: [yarn, npm] - example: [with-yarn, with-npm, non-monorepo] - include: - - os: - name: ubuntu - runner: ubuntu-latest - manager: pnpm - example: basic - - os: - name: macos - runner: macos-latest - manager: pnpm - example: basic - - os: - name: ubuntu - runner: ubuntu-latest - manager: pnpm - example: kitchen-sink - - os: - name: macos - runner: macos-latest - manager: pnpm - example: kitchen-sink - - os: - name: ubuntu - runner: ubuntu-latest - manager: pnpm - example: with-svelte - - os: - name: macos - runner: macos-latest - manager: pnpm - example: with-svelte - runs-on: ${{ matrix.os.runner }} steps: - # Used by scripts/check-examples.sh + # Used by examples-tests/setup.sh - name: Install Sponge shell: bash run: | @@ -404,12 +370,12 @@ jobs: cache: ${{ matrix.manager }} cache-dependency-path: package.json - - name: Check \"${{ matrix.example }}\" example with \"${{ matrix.manager }}\" + - name: Check examples shell: bash # Note: using CLI flags instead of env vars because # envs var would apply to the actual tests that exercise turbo also, # making test output non-deterministic. - run: turbo run test --color --remote-only --token=${{ secrets.TURBO_TOKEN }} --team=${{ vars.TURBO_TEAM }} --filter="turborepo-tests-examples" -- "${{ matrix.example }}" "${{ matrix.manager }}" + run: turbo run example-test --continue --color --remote-only --token=${{ secrets.TURBO_TOKEN }} --team=${{ vars.TURBO_TEAM }} --filter="!@turborepo-examples-tests/pnpm-gatsby" # Re-enable corepack, actions/setup-node invokes other package managers and # that causes corepack to throw an error, so we disable it here. The "Post" step @@ -459,7 +425,7 @@ jobs: - name: Run tests run: | - turbo run check-types test --filter=...[${{ github.event.pull_request.base.sha || 'HEAD^1' }}] --filter="./packages/*" --color + turbo run check-types test --filter={./packages/*}...[${{ github.event.pull_request.base.sha || 'HEAD^1' }}] --color turbopack_typescript: name: Turbopack TypeScript files @@ -1289,7 +1255,7 @@ jobs: - determine_jobs - go_lint - go_unit - - go_examples + - turborepo_examples - go_e2e - go_integration - js_packages @@ -1327,7 +1293,7 @@ jobs: subjob ${{needs.determine_jobs.result}} "Determining jobs" subjob ${{needs.go_lint.result}} "Go lints" subjob ${{needs.go_unit.result}} "Go unit tests" - subjob ${{needs.go_examples.result}} "Go examples" + subjob ${{needs.turborepo_examples.result}} "Turborepo examples" subjob ${{needs.go_e2e.result}} "Go e2e tests" subjob ${{needs.go_integration.result}} "Go integration tests" subjob ${{needs.js_packages.result}} "JS Package tests" @@ -1441,7 +1407,7 @@ jobs: - determine_jobs - go_lint - go_unit - - go_examples + - turborepo_examples - go_e2e - go_integration - rust_prepare @@ -1482,7 +1448,7 @@ jobs: subjob ${{needs.determine_jobs.result}} "Determining jobs" subjob ${{needs.go_lint.result}} "Go lints" subjob ${{needs.go_unit.result}} "Go unit tests" - subjob ${{needs.go_examples.result}} "Go examples" + subjob ${{needs.turborepo_examples.result}} "Turborepo examples" subjob ${{needs.go_e2e.result}} "Go e2e tests" subjob ${{needs.go_integration.result}} "Go integration tests" subjob ${{needs.rust_prepare.result}} "Rust prepare" diff --git a/.gitignore b/.gitignore index 5883dcd9c8841..7eb13c8cf4e67 100644 --- a/.gitignore +++ b/.gitignore @@ -56,3 +56,6 @@ cli/internal/ffi/libturborepo_ffi*.a # generated by tonic file_descriptor_set.bin + +# include .env files for tests +!**/__fixtures__/**/*.env diff --git a/.vscode/turborepo.code-workspace b/.vscode/turborepo.code-workspace new file mode 100644 index 0000000000000..c32d6a13ff1ad --- /dev/null +++ b/.vscode/turborepo.code-workspace @@ -0,0 +1,11 @@ +{ + "folders": [ + { + "name": "turbo", + "path": "../" + }, + { + "path": "../packages/eslint-plugin-turbo" + } + ] +} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e10e60c7c8b31..67dee0284ed13 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -264,6 +264,10 @@ Install `cargo-nextest` (https://nexte.st/): `cargo install cargo-nextest` +Then, install dependencies for testcases: + +`pnpm install -r --side-effects-cache -C crates/turbopack/tests/node-file-trace` + Run via: ```shell diff --git a/Cargo.lock b/Cargo.lock index a694efb1ef94d..5c5d84eec84a7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -111,12 +111,6 @@ dependencies = [ "vte", ] -[[package]] -name = "anstyle" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41ed9a86bf92ae6580e0a31281f65a1b1d867c0cc68d5346e2ae128dddfa6a7d" - [[package]] name = "any_ascii" version = "0.1.7" @@ -125,9 +119,9 @@ checksum = "70033777eb8b5124a81a1889416543dddef2de240019b674c81285a2635a7e1e" [[package]] name = "anyhow" -version = "1.0.71" +version = "1.0.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" +checksum = "7de8ce5e0f9f8d88245311066a578d72b7af3e7088f32783804676302df237e4" dependencies = [ "backtrace", ] @@ -182,12 +176,11 @@ dependencies = [ [[package]] name = "assert_cmd" -version = "2.0.11" +version = "2.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86d6b683edf8d1119fe420a94f8a7e389239666aa72e65495d91c00462510151" +checksum = "9834fcc22e0874394a010230586367d4a3e9f11b560f469262678547e1d2575e" dependencies = [ - "anstyle", - "bstr 1.4.0", + "bstr", "doc-comment", "predicates", "predicates-core", @@ -325,7 +318,7 @@ checksum = "0e97ce7de6cf12de5d7226c73f5ba9811622f4db3a5b91b55c53e987e5f91cba" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.15", ] [[package]] @@ -391,21 +384,21 @@ checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.15", ] [[package]] name = "async-tungstenite" -version = "0.17.2" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1b71b31561643aa8e7df3effe284fa83ab1a840e52294c5f4bd7bfd8b2becbb" +checksum = "8e6acf7e4a267eecbb127ed696bb2d50572c22ba7f586a646321e1798d8336a1" dependencies = [ "futures-io", "futures-util", "log", "pin-project-lite", "tokio", - "tungstenite 0.17.3", + "tungstenite", ] [[package]] @@ -662,15 +655,15 @@ dependencies = [ "regex", "rustc-hash", "shlex", - "syn 2.0.18", + "syn 2.0.15", "which", ] [[package]] name = "binding_macros" -version = "0.50.39" +version = "0.50.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d883eaeed1bedd5399cc1e2dfc680a237732ea27a019a2d43804b7a0037c112c" +checksum = "ef3a809ca2ce465f8c44acb578f9730234a0faf9199590cd5e00e537bb2affa2" dependencies = [ "anyhow", "console_error_panic_hook", @@ -763,15 +756,6 @@ dependencies = [ "futures-lite", ] -[[package]] -name = "brownstone" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5839ee4f953e811bfdcf223f509cb2c6a3e1447959b0bff459405575bc17f22" -dependencies = [ - "arrayvec 0.7.2", -] - [[package]] name = "browserslist-rs" version = "0.12.3" @@ -796,17 +780,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "bstr" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223" -dependencies = [ - "lazy_static", - "memchr", - "regex-automata", -] - [[package]] name = "bstr" version = "1.4.0" @@ -993,15 +966,16 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chromiumoxide" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5506e432f602b1747e8a0d60ac6607c6977af4ee9720237764170305323e62" +checksum = "1fbef58698a487c253c55c3d17bb1efbe268d2961a2c8278e3f86fff721355fc" dependencies = [ "async-tungstenite", - "base64 0.13.1", + "base64 0.21.0", "cfg-if 1.0.0", "chromiumoxide_cdp", "chromiumoxide_types", + "dunce", "fnv", "futures", "futures-timer", @@ -1018,9 +992,9 @@ dependencies = [ [[package]] name = "chromiumoxide_cdp" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88b6988af5c6bbf097999e7db879729dd7b27a62010c482d4922fddeb4f220d4" +checksum = "902b90e019dff479bf5a36ed3961e955afa48c35fb2d4245d0b193e7746d50b9" dependencies = [ "chromiumoxide_pdl", "chromiumoxide_types", @@ -1030,9 +1004,9 @@ dependencies = [ [[package]] name = "chromiumoxide_pdl" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cdf6513e24d260548345a5ef13a04110f5915b7764c274933e10f9363a43e3b" +checksum = "cc9319fb29ecce08ac90dd5a798c391f6a8ae1d7c90aff71f3fa27cb3cdfc3ec" dependencies = [ "chromiumoxide_types", "either", @@ -1047,9 +1021,9 @@ dependencies = [ [[package]] name = "chromiumoxide_types" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1af9c183b5aac7f09639cc7b4ddde8a8551850d2c9bf36530830cb10e28e676f" +checksum = "0c9187058637b8e555690935a6d25a1f7af1d71b377fc45b4257712efb34551f" dependencies = [ "serde", "serde_json", @@ -1767,17 +1741,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd4056f63fce3b82d852c3da92b08ea59959890813a7f4ce9c0ff85b10cf301b" dependencies = [ "quote", - "syn 2.0.18", + "syn 2.0.15", ] [[package]] name = "ctrlc" -version = "3.3.0" +version = "3.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04d778600249295e82b6ab12e291ed9029407efee0cfb7baf67157edc65964df" +checksum = "bbcf33c2a618cbe41ee43ae6e9f2e48368cd9f9db2896f10167d8d762679f639" dependencies = [ "nix", - "windows-sys 0.48.0", + "windows-sys 0.45.0", ] [[package]] @@ -1841,7 +1815,7 @@ dependencies = [ "proc-macro2", "quote", "scratch", - "syn 2.0.18", + "syn 2.0.15", ] [[package]] @@ -1858,7 +1832,7 @@ checksum = "2345488264226bf682893e25de0769f3360aac9957980ec49361b083ddaa5bc5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.15", ] [[package]] @@ -1911,9 +1885,9 @@ dependencies = [ [[package]] name = "data-encoding" -version = "2.4.0" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" +checksum = "23d8666cb01533c39dde32bcbab8e227b4ed6679b2c925eba05feabea39508fb" [[package]] name = "dav1d" @@ -1970,9 +1944,9 @@ dependencies = [ [[package]] name = "dialoguer" -version = "0.10.4" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59c6f2989294b9a498d3ad5491a79c6deb604617378e1cdc4bfc1c1361fe2f87" +checksum = "af3c796f3b0b408d9fd581611b47fa850821fcb84aa640b83a3c1a5be2d691f2" dependencies = [ "console", "fuzzy-matcher", @@ -2001,9 +1975,9 @@ checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" [[package]] name = "digest" -version = "0.10.7" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" dependencies = [ "block-buffer", "crypto-common", @@ -2263,13 +2237,13 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.1" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +checksum = "50d6a0976c999d473fe89ad888d5a284e55366d9dc9038b1ba2aa15128c4afa0" dependencies = [ "errno-dragonfly", "libc", - "windows-sys 0.48.0", + "windows-sys 0.45.0", ] [[package]] @@ -2585,7 +2559,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.15", ] [[package]] @@ -2764,7 +2738,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "029d74589adefde59de1a0c4f4732695c32805624aec7b68d91503d4dba79afc" dependencies = [ "aho-corasick 0.7.20", - "bstr 1.4.0", + "bstr", "fnv", "log", "regex", @@ -2781,7 +2755,7 @@ dependencies = [ "thiserror", "turbopath", "walkdir", - "wax 0.5.0", + "wax", ] [[package]] @@ -3111,7 +3085,7 @@ dependencies = [ "pin-project", "tokio", "tokio-tungstenite", - "tungstenite 0.18.0", + "tungstenite", ] [[package]] @@ -3230,12 +3204,6 @@ dependencies = [ "quote", ] -[[package]] -name = "indent_write" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cfe9645a18782869361d9c8732246be7b410ad4e919d3609ebabdac00ba12c3" - [[package]] name = "indexmap" version = "1.9.3" @@ -3344,9 +3312,9 @@ dependencies = [ [[package]] name = "io-lifetimes" -version = "1.0.11" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" +checksum = "9c66c74d2ae7e79a5a8f7ac924adbe38ee42a859c6539ad869eb51f0b52dc220" dependencies = [ "hermit-abi 0.3.1", "libc", @@ -3481,12 +3449,6 @@ dependencies = [ "libc", ] -[[package]] -name = "joinery" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72167d68f5fce3b8655487b8038691a3c9984ee769590f93f2a631f4ad64e4f5" - [[package]] name = "jpeg-decoder" version = "0.3.0" @@ -3698,9 +3660,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.144" +version = "0.2.142" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b00cc1c228a6782d0f076e7b232802e0c5689d41bb5df366f2a6b6621cfdfe1" +checksum = "6a987beff54b60ffa6d51982e1aa1146bc42f19bd26be28b0586f252fccf5317" [[package]] name = "libfuzzer-sys" @@ -3807,9 +3769,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.3.8" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" +checksum = "9b085a4f2cde5781fc4b1717f2e86c62f5cda49de7ba99a7c2eae02b61c9064c" [[package]] name = "lock_api" @@ -3823,10 +3785,11 @@ dependencies = [ [[package]] name = "log" -version = "0.4.18" +version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "518ef76f2f87365916b142844c16d8fefd85039bc5699050210a7778ee1cd1de" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" dependencies = [ + "cfg-if 1.0.0", "value-bag", ] @@ -4020,7 +3983,7 @@ checksum = "4c65c625186a9bcce6699394bee511e1b1aec689aa7e3be1bf4e996e75834153" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.15", ] [[package]] @@ -4034,9 +3997,9 @@ dependencies = [ [[package]] name = "mimalloc-rust" -version = "0.2.1" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eb726c8298efb4010b2c46d8050e4be36cf807b9d9e98cb112f830914fc9bbe" +checksum = "6973866e0bc6504c03a16b6817b7e70839cc8a1dbd5d6dab00c65d8034868d8b" dependencies = [ "cty", "mimalloc-rust-sys", @@ -4044,9 +4007,9 @@ dependencies = [ [[package]] name = "mimalloc-rust-sys" -version = "1.7.9-source" +version = "1.7.6-source" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6413e13241a9809f291568133eca6694572cf528c1a6175502d090adce5dd5db" +checksum = "7a50daf45336b979a202a19f53b4b382f2c4bd50f392a8dbdb4c6c56ba5dfa64" dependencies = [ "cc", "cty", @@ -4369,19 +4332,6 @@ dependencies = [ "minimal-lexical", ] -[[package]] -name = "nom-supreme" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bd3ae6c901f1959588759ff51c95d24b491ecb9ff91aa9c2ef4acc5b1dcab27" -dependencies = [ - "brownstone", - "indent_write", - "joinery", - "memchr", - "nom", -] - [[package]] name = "noop_proc_macro" version = "0.3.0" @@ -4444,9 +4394,9 @@ dependencies = [ [[package]] name = "ntapi" -version = "0.4.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8a3895c6391c39d7fe7ebc444a87eb2991b2a0bc718fdabd071eec617fc68e4" +checksum = "bc51db7b362b205941f71232e56c625156eb9a929f8cf74a428fd5bc094a4afc" dependencies = [ "winapi 0.3.9", ] @@ -4907,22 +4857,22 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.0" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c95a7476719eab1e366eaf73d0260af3021184f18177925b07f54b30089ceead" +checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.0" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39407670928234ebc5e6e580247dd567ad73a3578460c5990f9503df207e8f07" +checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 1.0.109", ] [[package]] @@ -5090,11 +5040,10 @@ checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" [[package]] name = "predicates" -version = "3.0.3" +version = "2.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09963355b9f467184c04017ced4a2ba2d75cbcb4e7462690d388233253d4b1a9" +checksum = "59230a63c37f3e18569bdb90e4a89cbf5bf8b06fea0b84e65ea10cc4df47addd" dependencies = [ - "anstyle", "difflib", "itertools", "predicates-core", @@ -5102,15 +5051,15 @@ dependencies = [ [[package]] name = "predicates-core" -version = "1.0.6" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b794032607612e7abeb4db69adb4e33590fa6cf1149e95fd7cb00e634b92f174" +checksum = "72f883590242d3c6fc5bf50299011695fa6590c2c70eac95ee1bdb9a733ad1a2" [[package]] name = "predicates-tree" -version = "1.0.9" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368ba315fb8c5052ab692e68a0eefec6ec57b23a36959c14496f0b0df2c0cecf" +checksum = "54ff541861505aabf6ea722d2131ee980b8276e10a1297b94e896dd8b621850d" dependencies = [ "predicates-core", "termtree", @@ -5148,9 +5097,9 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.1.25" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" +checksum = "4ebcd279d20a4a0a2404a33056388e950504d891c855c7975b9a8fef75f3bf04" dependencies = [ "proc-macro2", "syn 1.0.109", @@ -5163,7 +5112,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ceca8aaf45b5c46ec7ed39fff75f57290368c1846d33d24a122ca81416ab058" dependencies = [ "proc-macro2", - "syn 2.0.18", + "syn 2.0.15", ] [[package]] @@ -5218,9 +5167,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.59" +version = "1.0.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6aeca18b86b413c660b781aa319e4e2648a3e6f9eadc9b47e9038e6fe9f3451b" +checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435" dependencies = [ "unicode-ident", ] @@ -5248,7 +5197,7 @@ dependencies = [ "log", "multimap", "petgraph", - "prettyplease 0.1.25", + "prettyplease 0.1.24", "prost", "prost-types", "regex", @@ -5336,9 +5285,9 @@ checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" [[package]] name = "quote" -version = "1.0.28" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488" +checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" dependencies = [ "proc-macro2", ] @@ -5557,13 +5506,13 @@ dependencies = [ [[package]] name = "regex" -version = "1.8.3" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81ca098a9821bd52d6b24fd8b10bd081f47d39c22778cafaa75a2857a62c6390" +checksum = "ac6cf59af1067a3fb53fbe5c88c053764e930f932be1d71d3ffe032cbe147f59" dependencies = [ "aho-corasick 1.0.1", "memchr", - "regex-syntax 0.7.2", + "regex-syntax 0.7.0", ] [[package]] @@ -5583,9 +5532,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.7.2" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78" +checksum = "b6868896879ba532248f33598de5181522d8b3d9d724dfd230911e1a7d4822f5" [[package]] name = "region" @@ -5850,9 +5799,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.19" +version = "0.37.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d" +checksum = "f79bef90eb6d984c72722595b5b1348ab39275a5e5123faca6863bf07d75a4e0" dependencies = [ "bitflags 1.3.2", "errno", @@ -6026,9 +5975,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.163" +version = "1.0.160" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2" +checksum = "bb2f3770c8bce3bcda7e149193a069a0f4365bda1fa5cd88e03bca26afc1216c" dependencies = [ "serde_derive", ] @@ -6065,13 +6014,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.163" +version = "1.0.160" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e" +checksum = "291a097c63d8497e00160b166a967a4a79c64f3facdd01cbd7502231688d77df" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.15", ] [[package]] @@ -6188,9 +6137,9 @@ dependencies = [ [[package]] name = "serde_yaml" -version = "0.9.21" +version = "0.9.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9d684e3ec7de3bf5466b32bd75303ac16f0736426e5a4e0d6e489559ce1249c" +checksum = "f82e6c8c047aa50a7328632d067bcae6ef38772a79e28daf32f735e0e4f3dd10" dependencies = [ "indexmap", "itoa", @@ -6683,9 +6632,9 @@ dependencies = [ [[package]] name = "swc" -version = "0.261.39" +version = "0.261.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6af9a982167495f0b84a46f27d8afe6ea2b370951843b40e7e810133878fd5f6" +checksum = "1b59f4e1f5ebb10037de0a3a5c25d2fe7691f8811f42d3549fb81f2b9047205b" dependencies = [ "ahash 0.7.6", "anyhow", @@ -6762,9 +6711,9 @@ dependencies = [ [[package]] name = "swc_bundler" -version = "0.214.29" +version = "0.214.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54e7d99a790a67bf61cd14e3f960c0ff0e2ca042b85fbea05f10db0a9179e4d8" +checksum = "1a22f0573fef09dffc3db7094d29ef9445b4f09b76da650bc870060ca0b8c8a4" dependencies = [ "ahash 0.7.6", "anyhow", @@ -6868,9 +6817,9 @@ dependencies = [ [[package]] name = "swc_core" -version = "0.76.46" +version = "0.76.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82b4ef70c1c82e3392058c2a497e2fc787ed0df3b92dddbbef2e864d866cfa7f" +checksum = "05ce227c715f658e5f0367f1c75c908672d76de4dc69646e27f5f62e8380f5de" dependencies = [ "binding_macros", "swc", @@ -7156,9 +7105,9 @@ dependencies = [ [[package]] name = "swc_ecma_minifier" -version = "0.181.29" +version = "0.181.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23dcbc596127b44dc9c3a61bbc6214126d9b7fbae81043e22b64f239ee85d73c" +checksum = "0f85eb56b6c5a8ba4e91141602511b9c5a390bad4c8e454bff77d80c2033c265" dependencies = [ "ahash 0.7.6", "arrayvec 0.7.2", @@ -7212,9 +7161,9 @@ dependencies = [ [[package]] name = "swc_ecma_preset_env" -version = "0.195.25" +version = "0.195.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3457c16e26c2995582c5153c1c32e7c3d976196f424dbcea69b71902a7ef7a6" +checksum = "384802032a00bc0e67993e7c3c9a28b82af19c5685eb9d8f93655376f3f823c2" dependencies = [ "ahash 0.7.6", "anyhow", @@ -7267,9 +7216,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms" -version = "0.218.23" +version = "0.218.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d163b122ab67faa638f2e14c51954e1f8582271343585a159bec43804d559318" +checksum = "dbe5ca1c16b4ea9ece9f43a24554edce402641c46b2cc4e418be6c40897572b0" dependencies = [ "swc_atoms", "swc_common", @@ -7393,9 +7342,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_optimization" -version = "0.187.23" +version = "0.187.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07887d34a75fbacf228f93c801c9861a7eebbb1e99747cff526ae2c9c69f136b" +checksum = "3fff2641ba155b3aaa8ef15c1888d75b73e9f10431ec1cb58e66598266199322" dependencies = [ "ahash 0.7.6", "dashmap", @@ -7419,9 +7368,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_proposal" -version = "0.161.22" +version = "0.161.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cdce42d44ef775bc29f5ada3678a80ff72fa17a0ef705e14f63cfd0e0155e0e" +checksum = "593d2c321aa9b29b289d144832d985031ddc453ab1fdf59170d810e2283944f2" dependencies = [ "either", "rustc-hash", @@ -7491,9 +7440,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_typescript" -version = "0.177.23" +version = "0.177.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fe2eea4f5b8a25c93cdaa29fb1ce4108893da88a11e61e04b7f5295b5468829" +checksum = "64f65165a1ef99b7638503fcf6ead4f58b7cfbf3a37c75dfd6f541b491323041" dependencies = [ "serde", "swc_atoms", @@ -7507,9 +7456,9 @@ dependencies = [ [[package]] name = "swc_ecma_usage_analyzer" -version = "0.13.15" +version = "0.13.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62aacc5022f52ae332c6545b9248b70285be1847cf85d48b0640d05f68ff971f" +checksum = "b7da59ebce19380671e76cfe7e9b0cdd6f430b3090c65c24aefcc07ed63739f2" dependencies = [ "ahash 0.7.6", "indexmap", @@ -7781,9 +7730,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.18" +version = "2.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e" +checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822" dependencies = [ "proc-macro2", "quote", @@ -7955,9 +7904,9 @@ dependencies = [ [[package]] name = "termtree" -version = "0.4.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" +checksum = "95059e91184749cb66be6dc994f67f182b6d897cb3df74a5bf66b5e709295fd8" [[package]] name = "test-case" @@ -8079,7 +8028,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.15", ] [[package]] @@ -8225,9 +8174,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.28.2" +version = "1.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94d7b1cfd2aa4011f2de74c2c4c63665e27a71006b0a192dcd2710272e73dfa2" +checksum = "d0de47a4eecbe11f498978a9b29d792f0d2692d1dd003650c24c76510e3bc001" dependencies = [ "autocfg", "bytes", @@ -8240,7 +8189,7 @@ dependencies = [ "socket2", "tokio-macros", "tracing", - "windows-sys 0.48.0", + "windows-sys 0.45.0", ] [[package]] @@ -8255,13 +8204,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.1.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +checksum = "61a573bdc87985e9d6ddeed1b3d864e8a302c847e40d647746df2f1de209d1ce" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.15", ] [[package]] @@ -8305,7 +8254,7 @@ dependencies = [ "futures-util", "log", "tokio", - "tungstenite 0.18.0", + "tungstenite", ] [[package]] @@ -8404,7 +8353,7 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5bf5e9b9c0f7e0a7c027dcfaba7b2c60816c7049171f679d99ee2ff65d0de8c4" dependencies = [ - "prettyplease 0.1.25", + "prettyplease 0.1.24", "proc-macro2", "prost-build", "quote", @@ -8483,13 +8432,13 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.24" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f57e3ca2a01450b1a921183a9c9cbfda207fd822cef4ccb00a65402cbba7a74" +checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 1.0.109", ] [[package]] @@ -8505,9 +8454,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.31" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" dependencies = [ "once_cell", "valuable", @@ -8620,25 +8569,6 @@ version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b3e06c9b9d80ed6b745c7159c40b311ad2916abb34a49e9be2653b90db0d8dd" -[[package]] -name = "tungstenite" -version = "0.17.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e27992fd6a8c29ee7eef28fc78349aa244134e10ad447ce3b9f0ac0ed0fa4ce0" -dependencies = [ - "base64 0.13.1", - "byteorder", - "bytes", - "http", - "httparse", - "log", - "rand 0.8.5", - "sha-1", - "thiserror", - "url", - "utf-8", -] - [[package]] name = "tungstenite" version = "0.18.0" @@ -8673,7 +8603,7 @@ dependencies = [ "pretty_assertions", "serde", "serde_json", - "serde_yaml 0.9.21", + "serde_yaml 0.9.19", "tiny-gradient", "tokio-util", "tracing", @@ -8965,7 +8895,7 @@ dependencies = [ "serde_json", "tempfile", "tokio", - "tungstenite 0.17.3", + "tungstenite", "turbo-tasks", "turbo-tasks-testing", "turbopack-create-test-app", @@ -9482,13 +9412,13 @@ name = "turbopath" version = "0.1.0" dependencies = [ "anyhow", - "bstr 1.4.0", + "bstr", "dunce", "path-clean 1.0.1", "path-slash", "serde", "thiserror", - "wax 0.5.0", + "wax", ] [[package]] @@ -9527,6 +9457,17 @@ dependencies = [ "zstd", ] +[[package]] +name = "turborepo-env" +version = "0.1.0" +dependencies = [ + "lazy_static", + "regex", + "serde", + "test-case", + "thiserror", +] + [[package]] name = "turborepo-ffi" version = "0.1.0" @@ -9539,6 +9480,7 @@ dependencies = [ "thiserror", "turbopath", "turborepo-cache", + "turborepo-env", "turborepo-fs", "turborepo-lockfiles", "turborepo-scm", @@ -9606,7 +9548,7 @@ dependencies = [ "semver 1.0.17", "serde", "serde_json", - "serde_yaml 0.9.21", + "serde_yaml 0.9.19", "sha2", "shared_child", "sysinfo", @@ -9631,10 +9573,12 @@ dependencies = [ "turbo-updater", "turbopath", "turborepo-api-client", + "turborepo-env", + "turborepo-lockfiles", "uds_windows", "url", "vercel-api-mock", - "wax 0.5.0", + "wax", "webbrowser", "which", ] @@ -9651,7 +9595,7 @@ dependencies = [ "semver 1.0.17", "serde", "serde_json", - "serde_yaml 0.9.21", + "serde_yaml 0.9.19", "test-case", "thiserror", ] @@ -9660,17 +9604,19 @@ dependencies = [ name = "turborepo-scm" version = "0.1.0" dependencies = [ - "bstr 1.4.0", + "bstr", "git2 0.16.1", + "globwalk", "hex", "ignore", + "itertools", "nom", "path-slash", "sha1 0.10.5", "tempfile", "thiserror", "turbopath", - "wax 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "wax", "which", ] @@ -9786,9 +9732,9 @@ checksum = "d70b6494226b36008c8366c288d77190b3fad2eb4c10533139c1c1f461127f1a" [[package]] name = "unicode-ident" -version = "1.0.9" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0" +checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" [[package]] name = "unicode-linebreak" @@ -9829,9 +9775,9 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] name = "unsafe-libyaml" -version = "0.2.8" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1865806a559042e51ab5414598446a5871b561d21b6764f2eabb0dd481d880a6" +checksum = "ad2024452afd3874bf539695e04af6732ba06517424dbf958fdb16a01f3bef6c" [[package]] name = "untrusted" @@ -9909,9 +9855,13 @@ checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" [[package]] name = "value-bag" -version = "1.4.0" +version = "1.0.0-alpha.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4d330786735ea358f3bc09eea4caa098569c1c93f342d9aca0514915022fe7e" +checksum = "2209b78d1249f7e6f3293657c9779fe31ced465df091bbd433a1cf88e916ec55" +dependencies = [ + "ctor 0.1.26", + "version_check", +] [[package]] name = "vcpkg" @@ -10543,24 +10493,6 @@ dependencies = [ "walkdir", ] -[[package]] -name = "wax" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06c7a3bac6110ac062b7b422a442b7ee23e07209e2784a036654cab1e71bbafc" -dependencies = [ - "bstr 0.2.17", - "const_format", - "itertools", - "nom", - "nom-supreme", - "pori", - "regex", - "smallvec", - "thiserror", - "walkdir", -] - [[package]] name = "web-sys" version = "0.3.61" @@ -11011,9 +10943,9 @@ dependencies = [ [[package]] name = "zeroize" -version = "1.6.0" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" +checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" [[package]] name = "zstd" diff --git a/Cargo.toml b/Cargo.toml index 63b082d715e12..fa97d3ca7653f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -116,6 +116,7 @@ turbopath = { path = "crates/turborepo-paths" } turborepo = { path = "crates/turborepo" } turborepo-api-client = { path = "crates/turborepo-api-client" } turborepo-cache = { path = "crates/turborepo-cache" } +turborepo-env = { path = "crates/turborepo-env" } turborepo-ffi = { path = "crates/turborepo-ffi" } turborepo-fs = { path = "crates/turborepo-fs" } turborepo-lib = { path = "crates/turborepo-lib" } @@ -130,12 +131,12 @@ vercel-api-mock = { path = "crates/turborepo-vercel-api-mock" } # and some aren't buildable with rustls. reqwest = { version = "0.11.14", default-features = false } -chromiumoxide = { version = "0.4.0", features = [ +chromiumoxide = { version = "0.5.0", features = [ "tokio-runtime", ], default-features = false } # For matching on errors from chromiumoxide. Keep in # sync with chromiumoxide's tungstenite requirement. -tungstenite = "0.17.3" +tungstenite = "0.18.0" anyhow = "1.0.69" assert_cmd = "2.0.8" @@ -193,8 +194,10 @@ serde_json = "1.0.93" serde_qs = "0.11.0" serde_with = "2.3.2" serde_yaml = "0.9.17" +sha2 = "0.10.6" syn = "1.0.107" tempfile = "3.3.0" +test-case = "3.0.0" thiserror = "1.0.38" tiny-gradient = "0.1.0" tokio = "1.25.0" diff --git a/cli/cmd/turbo/version.go b/cli/cmd/turbo/version.go index df3b34821d5eb..2653287ce7a33 100644 --- a/cli/cmd/turbo/version.go +++ b/cli/cmd/turbo/version.go @@ -1,3 +1,3 @@ package main -const turboVersion = "1.10.3-canary.0" +const turboVersion = "1.10.4-canary.2" diff --git a/cli/internal/env/env.go b/cli/internal/env/env.go index f49bd3d0e7746..878cd5e006b19 100644 --- a/cli/internal/env/env.go +++ b/cli/internal/env/env.go @@ -20,7 +20,7 @@ type BySource struct { // DetailedMap contains the composite and the detailed maps of environment variables // All is used as a taskhash input (taskhash.CalculateTaskHash) -// BySoure is used to print out a Dry Run Summary +// BySource is used to print out a Dry Run Summary type DetailedMap struct { All EnvironmentVariableMap BySource BySource diff --git a/cli/internal/env/env_test.go b/cli/internal/env/env_test.go index 7f95ed2b7624a..2ee385cfb2711 100644 --- a/cli/internal/env/env_test.go +++ b/cli/internal/env/env_test.go @@ -4,6 +4,7 @@ import ( "reflect" "testing" + "github.com/vercel/turbo/cli/internal/ffi" "gotest.tools/v3/assert" ) @@ -200,7 +201,12 @@ func TestGetEnvVarsFromWildcards(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := tt.self.FromWildcards(tt.wildcardPatterns) + var rustResult EnvironmentVariableMap + rustResult, rustErr := ffi.FromWildcards(tt.self, tt.wildcardPatterns) + assert.NilError(t, rustErr, "Rust implementation failed.") assert.NilError(t, err, "Did not fail regexp compile.") + + assert.DeepEqual(t, got, rustResult) if !reflect.DeepEqual(got, tt.want) { t.Errorf("GetEnvVarsFromWildcards() = %v, want %v", got, tt.want) } diff --git a/cli/internal/ffi/bindings.h b/cli/internal/ffi/bindings.h index 82c6f24d51df7..a10de89235178 100644 --- a/cli/internal/ffi/bindings.h +++ b/cli/internal/ffi/bindings.h @@ -24,8 +24,14 @@ struct Buffer get_package_file_hashes_from_git_index(struct Buffer buffer); struct Buffer get_package_file_hashes_from_processing_git_ignore(struct Buffer buffer); +struct Buffer get_package_file_hashes_from_inputs(struct Buffer buffer); + struct Buffer glob(struct Buffer buffer); +struct Buffer from_wildcards(struct Buffer buffer); + +struct Buffer get_global_hashable_env_vars(struct Buffer buffer); + struct Buffer transitive_closure(struct Buffer buf); struct Buffer subgraph(struct Buffer buf); diff --git a/cli/internal/ffi/ffi.go b/cli/internal/ffi/ffi.go index d34755b4262e3..8f4cb5b157c69 100644 --- a/cli/internal/ffi/ffi.go +++ b/cli/internal/ffi/ffi.go @@ -218,6 +218,8 @@ func toPackageManager(packageManager string) ffi_proto.PackageManager { return ffi_proto.PackageManager_BERRY case "pnpm": return ffi_proto.PackageManager_PNPM + case "yarn": + return ffi_proto.PackageManager_YARN default: panic(fmt.Sprintf("Invalid package manager string: %s", packageManager)) } @@ -384,6 +386,91 @@ func GetPackageFileHashesFromProcessingGitIgnore(rootPath string, packagePath st if err := resp.GetError(); err != "" { return nil, errors.New(err) } + hashes := resp.GetHashes() return hashes.GetHashes(), nil } + +// GetPackageFileHashesFromInputs proxies to rust to walk the filesystem and use git to hash the resulting +// files +func GetPackageFileHashesFromInputs(rootPath string, packagePath string, inputs []string) (map[string]string, error) { + req := ffi_proto.GetPackageFileHashesFromInputsRequest{ + TurboRoot: rootPath, + PackagePath: packagePath, + Inputs: inputs, + } + reqBuf := Marshal(&req) + resBuf := C.get_package_file_hashes_from_inputs(reqBuf) + reqBuf.Free() + + resp := ffi_proto.GetPackageFileHashesFromInputsResponse{} + if err := Unmarshal(resBuf, resp.ProtoReflect().Interface()); err != nil { + panic(err) + } + + if err := resp.GetError(); err != "" { + return nil, errors.New(err) + } + hashes := resp.GetHashes() + return hashes.GetHashes(), nil +} + +// FromWildcards returns an EnvironmentVariableMap containing the variables +// in the environment which match an array of wildcard patterns. +func FromWildcards(environmentMap map[string]string, wildcardPatterns []string) (map[string]string, error) { + if wildcardPatterns == nil { + return nil, nil + } + req := ffi_proto.FromWildcardsRequest{ + EnvVars: &ffi_proto.EnvVarMap{ + Map: environmentMap, + }, + WildcardPatterns: wildcardPatterns, + } + reqBuf := Marshal(&req) + resBuf := C.from_wildcards(reqBuf) + reqBuf.Free() + + resp := ffi_proto.FromWildcardsResponse{} + if err := Unmarshal(resBuf, resp.ProtoReflect().Interface()); err != nil { + panic(err) + } + + if err := resp.GetError(); err != "" { + return nil, errors.New(err) + } + envVarMap := resp.GetEnvVars().GetMap() + // If the map is nil, return an empty map instead of nil + // to match with existing Go code. + if envVarMap == nil { + return map[string]string{}, nil + } + return envVarMap, nil +} + +// GetGlobalHashableEnvVars calculates env var dependencies +func GetGlobalHashableEnvVars(envAtExecutionStart map[string]string, globalEnv []string) (*ffi_proto.DetailedMap, error) { + req := ffi_proto.GetGlobalHashableEnvVarsRequest{ + EnvAtExecutionStart: &ffi_proto.EnvVarMap{Map: envAtExecutionStart}, + GlobalEnv: globalEnv, + } + reqBuf := Marshal(&req) + resBuf := C.get_global_hashable_env_vars(reqBuf) + reqBuf.Free() + + resp := ffi_proto.GetGlobalHashableEnvVarsResponse{} + if err := Unmarshal(resBuf, resp.ProtoReflect().Interface()); err != nil { + panic(err) + } + + if err := resp.GetError(); err != "" { + return nil, errors.New(err) + } + + respDetailedMap := resp.GetDetailedMap() + if respDetailedMap == nil { + return nil, nil + } + + return respDetailedMap, nil +} diff --git a/cli/internal/ffi/proto/messages.pb.go b/cli/internal/ffi/proto/messages.pb.go index 0789e259a426f..538d90d61331c 100644 --- a/cli/internal/ffi/proto/messages.pb.go +++ b/cli/internal/ffi/proto/messages.pb.go @@ -26,6 +26,7 @@ const ( PackageManager_NPM PackageManager = 0 PackageManager_BERRY PackageManager = 1 PackageManager_PNPM PackageManager = 2 + PackageManager_YARN PackageManager = 3 ) // Enum value maps for PackageManager. @@ -34,11 +35,13 @@ var ( 0: "NPM", 1: "BERRY", 2: "PNPM", + 3: "YARN", } PackageManager_value = map[string]int32{ "NPM": 0, "BERRY": 1, "PNPM": 2, + "YARN": 3, } ) @@ -2153,6 +2156,578 @@ func (*GetPackageFileHashesFromProcessingGitIgnoreResponse_Hashes) isGetPackageF func (*GetPackageFileHashesFromProcessingGitIgnoreResponse_Error) isGetPackageFileHashesFromProcessingGitIgnoreResponse_Response() { } +type GetPackageFileHashesFromInputsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + TurboRoot string `protobuf:"bytes,1,opt,name=turbo_root,json=turboRoot,proto3" json:"turbo_root,omitempty"` + PackagePath string `protobuf:"bytes,2,opt,name=package_path,json=packagePath,proto3" json:"package_path,omitempty"` + Inputs []string `protobuf:"bytes,3,rep,name=inputs,proto3" json:"inputs,omitempty"` +} + +func (x *GetPackageFileHashesFromInputsRequest) Reset() { + *x = GetPackageFileHashesFromInputsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[33] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetPackageFileHashesFromInputsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetPackageFileHashesFromInputsRequest) ProtoMessage() {} + +func (x *GetPackageFileHashesFromInputsRequest) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[33] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetPackageFileHashesFromInputsRequest.ProtoReflect.Descriptor instead. +func (*GetPackageFileHashesFromInputsRequest) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{33} +} + +func (x *GetPackageFileHashesFromInputsRequest) GetTurboRoot() string { + if x != nil { + return x.TurboRoot + } + return "" +} + +func (x *GetPackageFileHashesFromInputsRequest) GetPackagePath() string { + if x != nil { + return x.PackagePath + } + return "" +} + +func (x *GetPackageFileHashesFromInputsRequest) GetInputs() []string { + if x != nil { + return x.Inputs + } + return nil +} + +type GetPackageFileHashesFromInputsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Response: + // *GetPackageFileHashesFromInputsResponse_Hashes + // *GetPackageFileHashesFromInputsResponse_Error + Response isGetPackageFileHashesFromInputsResponse_Response `protobuf_oneof:"response"` +} + +func (x *GetPackageFileHashesFromInputsResponse) Reset() { + *x = GetPackageFileHashesFromInputsResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[34] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetPackageFileHashesFromInputsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetPackageFileHashesFromInputsResponse) ProtoMessage() {} + +func (x *GetPackageFileHashesFromInputsResponse) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[34] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetPackageFileHashesFromInputsResponse.ProtoReflect.Descriptor instead. +func (*GetPackageFileHashesFromInputsResponse) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{34} +} + +func (m *GetPackageFileHashesFromInputsResponse) GetResponse() isGetPackageFileHashesFromInputsResponse_Response { + if m != nil { + return m.Response + } + return nil +} + +func (x *GetPackageFileHashesFromInputsResponse) GetHashes() *FileHashes { + if x, ok := x.GetResponse().(*GetPackageFileHashesFromInputsResponse_Hashes); ok { + return x.Hashes + } + return nil +} + +func (x *GetPackageFileHashesFromInputsResponse) GetError() string { + if x, ok := x.GetResponse().(*GetPackageFileHashesFromInputsResponse_Error); ok { + return x.Error + } + return "" +} + +type isGetPackageFileHashesFromInputsResponse_Response interface { + isGetPackageFileHashesFromInputsResponse_Response() +} + +type GetPackageFileHashesFromInputsResponse_Hashes struct { + Hashes *FileHashes `protobuf:"bytes,1,opt,name=hashes,proto3,oneof"` +} + +type GetPackageFileHashesFromInputsResponse_Error struct { + Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` +} + +func (*GetPackageFileHashesFromInputsResponse_Hashes) isGetPackageFileHashesFromInputsResponse_Response() { +} + +func (*GetPackageFileHashesFromInputsResponse_Error) isGetPackageFileHashesFromInputsResponse_Response() { +} + +type FromWildcardsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + EnvVars *EnvVarMap `protobuf:"bytes,1,opt,name=env_vars,json=envVars,proto3" json:"env_vars,omitempty"` + WildcardPatterns []string `protobuf:"bytes,2,rep,name=wildcard_patterns,json=wildcardPatterns,proto3" json:"wildcard_patterns,omitempty"` +} + +func (x *FromWildcardsRequest) Reset() { + *x = FromWildcardsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[35] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FromWildcardsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FromWildcardsRequest) ProtoMessage() {} + +func (x *FromWildcardsRequest) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[35] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FromWildcardsRequest.ProtoReflect.Descriptor instead. +func (*FromWildcardsRequest) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{35} +} + +func (x *FromWildcardsRequest) GetEnvVars() *EnvVarMap { + if x != nil { + return x.EnvVars + } + return nil +} + +func (x *FromWildcardsRequest) GetWildcardPatterns() []string { + if x != nil { + return x.WildcardPatterns + } + return nil +} + +type FromWildcardsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Response: + // *FromWildcardsResponse_EnvVars + // *FromWildcardsResponse_Error + Response isFromWildcardsResponse_Response `protobuf_oneof:"response"` +} + +func (x *FromWildcardsResponse) Reset() { + *x = FromWildcardsResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[36] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FromWildcardsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FromWildcardsResponse) ProtoMessage() {} + +func (x *FromWildcardsResponse) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[36] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FromWildcardsResponse.ProtoReflect.Descriptor instead. +func (*FromWildcardsResponse) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{36} +} + +func (m *FromWildcardsResponse) GetResponse() isFromWildcardsResponse_Response { + if m != nil { + return m.Response + } + return nil +} + +func (x *FromWildcardsResponse) GetEnvVars() *EnvVarMap { + if x, ok := x.GetResponse().(*FromWildcardsResponse_EnvVars); ok { + return x.EnvVars + } + return nil +} + +func (x *FromWildcardsResponse) GetError() string { + if x, ok := x.GetResponse().(*FromWildcardsResponse_Error); ok { + return x.Error + } + return "" +} + +type isFromWildcardsResponse_Response interface { + isFromWildcardsResponse_Response() +} + +type FromWildcardsResponse_EnvVars struct { + EnvVars *EnvVarMap `protobuf:"bytes,1,opt,name=env_vars,json=envVars,proto3,oneof"` +} + +type FromWildcardsResponse_Error struct { + Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` +} + +func (*FromWildcardsResponse_EnvVars) isFromWildcardsResponse_Response() {} + +func (*FromWildcardsResponse_Error) isFromWildcardsResponse_Response() {} + +type EnvVarMap struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Map map[string]string `protobuf:"bytes,1,rep,name=map,proto3" json:"map,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *EnvVarMap) Reset() { + *x = EnvVarMap{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[37] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *EnvVarMap) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*EnvVarMap) ProtoMessage() {} + +func (x *EnvVarMap) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[37] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use EnvVarMap.ProtoReflect.Descriptor instead. +func (*EnvVarMap) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{37} +} + +func (x *EnvVarMap) GetMap() map[string]string { + if x != nil { + return x.Map + } + return nil +} + +type DetailedMap struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + All map[string]string `protobuf:"bytes,1,rep,name=all,proto3" json:"all,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + BySource *BySource `protobuf:"bytes,2,opt,name=by_source,json=bySource,proto3" json:"by_source,omitempty"` +} + +func (x *DetailedMap) Reset() { + *x = DetailedMap{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[38] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DetailedMap) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DetailedMap) ProtoMessage() {} + +func (x *DetailedMap) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[38] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DetailedMap.ProtoReflect.Descriptor instead. +func (*DetailedMap) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{38} +} + +func (x *DetailedMap) GetAll() map[string]string { + if x != nil { + return x.All + } + return nil +} + +func (x *DetailedMap) GetBySource() *BySource { + if x != nil { + return x.BySource + } + return nil +} + +type BySource struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Explicit map[string]string `protobuf:"bytes,1,rep,name=explicit,proto3" json:"explicit,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Matching map[string]string `protobuf:"bytes,2,rep,name=matching,proto3" json:"matching,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *BySource) Reset() { + *x = BySource{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[39] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *BySource) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BySource) ProtoMessage() {} + +func (x *BySource) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[39] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BySource.ProtoReflect.Descriptor instead. +func (*BySource) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{39} +} + +func (x *BySource) GetExplicit() map[string]string { + if x != nil { + return x.Explicit + } + return nil +} + +func (x *BySource) GetMatching() map[string]string { + if x != nil { + return x.Matching + } + return nil +} + +type GetGlobalHashableEnvVarsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + EnvAtExecutionStart *EnvVarMap `protobuf:"bytes,1,opt,name=env_at_execution_start,json=envAtExecutionStart,proto3" json:"env_at_execution_start,omitempty"` + GlobalEnv []string `protobuf:"bytes,2,rep,name=global_env,json=globalEnv,proto3" json:"global_env,omitempty"` +} + +func (x *GetGlobalHashableEnvVarsRequest) Reset() { + *x = GetGlobalHashableEnvVarsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[40] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetGlobalHashableEnvVarsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetGlobalHashableEnvVarsRequest) ProtoMessage() {} + +func (x *GetGlobalHashableEnvVarsRequest) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[40] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetGlobalHashableEnvVarsRequest.ProtoReflect.Descriptor instead. +func (*GetGlobalHashableEnvVarsRequest) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{40} +} + +func (x *GetGlobalHashableEnvVarsRequest) GetEnvAtExecutionStart() *EnvVarMap { + if x != nil { + return x.EnvAtExecutionStart + } + return nil +} + +func (x *GetGlobalHashableEnvVarsRequest) GetGlobalEnv() []string { + if x != nil { + return x.GlobalEnv + } + return nil +} + +type GetGlobalHashableEnvVarsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Response: + // *GetGlobalHashableEnvVarsResponse_DetailedMap + // *GetGlobalHashableEnvVarsResponse_Error + Response isGetGlobalHashableEnvVarsResponse_Response `protobuf_oneof:"response"` +} + +func (x *GetGlobalHashableEnvVarsResponse) Reset() { + *x = GetGlobalHashableEnvVarsResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[41] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetGlobalHashableEnvVarsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetGlobalHashableEnvVarsResponse) ProtoMessage() {} + +func (x *GetGlobalHashableEnvVarsResponse) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[41] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetGlobalHashableEnvVarsResponse.ProtoReflect.Descriptor instead. +func (*GetGlobalHashableEnvVarsResponse) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{41} +} + +func (m *GetGlobalHashableEnvVarsResponse) GetResponse() isGetGlobalHashableEnvVarsResponse_Response { + if m != nil { + return m.Response + } + return nil +} + +func (x *GetGlobalHashableEnvVarsResponse) GetDetailedMap() *DetailedMap { + if x, ok := x.GetResponse().(*GetGlobalHashableEnvVarsResponse_DetailedMap); ok { + return x.DetailedMap + } + return nil +} + +func (x *GetGlobalHashableEnvVarsResponse) GetError() string { + if x, ok := x.GetResponse().(*GetGlobalHashableEnvVarsResponse_Error); ok { + return x.Error + } + return "" +} + +type isGetGlobalHashableEnvVarsResponse_Response interface { + isGetGlobalHashableEnvVarsResponse_Response() +} + +type GetGlobalHashableEnvVarsResponse_DetailedMap struct { + DetailedMap *DetailedMap `protobuf:"bytes,1,opt,name=detailed_map,json=detailedMap,proto3,oneof"` +} + +type GetGlobalHashableEnvVarsResponse_Error struct { + Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` +} + +func (*GetGlobalHashableEnvVarsResponse_DetailedMap) isGetGlobalHashableEnvVarsResponse_Response() {} + +func (*GetGlobalHashableEnvVarsResponse_Error) isGetGlobalHashableEnvVarsResponse_Response() {} + var File_turborepo_ffi_messages_proto protoreflect.FileDescriptor var file_turborepo_ffi_messages_proto_rawDesc = []byte{ @@ -2392,12 +2967,88 @@ var file_turborepo_ffi_messages_proto_rawDesc = []byte{ 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x61, 0x73, 0x68, 0x65, 0x73, 0x48, 0x00, 0x52, 0x06, 0x68, 0x61, 0x73, 0x68, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, + 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, + 0x81, 0x01, 0x0a, 0x25, 0x47, 0x65, 0x74, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x46, 0x69, + 0x6c, 0x65, 0x48, 0x61, 0x73, 0x68, 0x65, 0x73, 0x46, 0x72, 0x6f, 0x6d, 0x49, 0x6e, 0x70, 0x75, + 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x74, 0x75, 0x72, + 0x62, 0x6f, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x74, + 0x75, 0x72, 0x62, 0x6f, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x70, 0x61, 0x63, 0x6b, + 0x61, 0x67, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, + 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x16, 0x0a, 0x06, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x73, 0x22, 0x73, 0x0a, 0x26, 0x47, 0x65, 0x74, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, + 0x65, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x61, 0x73, 0x68, 0x65, 0x73, 0x46, 0x72, 0x6f, 0x6d, 0x49, + 0x6e, 0x70, 0x75, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x25, 0x0a, + 0x06, 0x68, 0x61, 0x73, 0x68, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, + 0x46, 0x69, 0x6c, 0x65, 0x48, 0x61, 0x73, 0x68, 0x65, 0x73, 0x48, 0x00, 0x52, 0x06, 0x68, 0x61, + 0x73, 0x68, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, + 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x6a, 0x0a, 0x14, 0x46, 0x72, 0x6f, 0x6d, + 0x57, 0x69, 0x6c, 0x64, 0x63, 0x61, 0x72, 0x64, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x25, 0x0a, 0x08, 0x65, 0x6e, 0x76, 0x5f, 0x76, 0x61, 0x72, 0x73, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x45, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x4d, 0x61, 0x70, 0x52, 0x07, + 0x65, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x73, 0x12, 0x2b, 0x0a, 0x11, 0x77, 0x69, 0x6c, 0x64, 0x63, + 0x61, 0x72, 0x64, 0x5f, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x10, 0x77, 0x69, 0x6c, 0x64, 0x63, 0x61, 0x72, 0x64, 0x50, 0x61, 0x74, 0x74, + 0x65, 0x72, 0x6e, 0x73, 0x22, 0x64, 0x0a, 0x15, 0x46, 0x72, 0x6f, 0x6d, 0x57, 0x69, 0x6c, 0x64, + 0x63, 0x61, 0x72, 0x64, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x27, 0x0a, + 0x08, 0x65, 0x6e, 0x76, 0x5f, 0x76, 0x61, 0x72, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x0a, 0x2e, 0x45, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x4d, 0x61, 0x70, 0x48, 0x00, 0x52, 0x07, 0x65, + 0x6e, 0x76, 0x56, 0x61, 0x72, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, + 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x6a, 0x0a, 0x09, 0x45, 0x6e, + 0x76, 0x56, 0x61, 0x72, 0x4d, 0x61, 0x70, 0x12, 0x25, 0x0a, 0x03, 0x6d, 0x61, 0x70, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x45, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x4d, 0x61, 0x70, + 0x2e, 0x4d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x03, 0x6d, 0x61, 0x70, 0x1a, 0x36, + 0x0a, 0x08, 0x4d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x96, 0x01, 0x0a, 0x0b, 0x44, 0x65, 0x74, 0x61, 0x69, + 0x6c, 0x65, 0x64, 0x4d, 0x61, 0x70, 0x12, 0x27, 0x0a, 0x03, 0x61, 0x6c, 0x6c, 0x18, 0x01, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x4d, 0x61, + 0x70, 0x2e, 0x41, 0x6c, 0x6c, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x03, 0x61, 0x6c, 0x6c, 0x12, + 0x26, 0x0a, 0x09, 0x62, 0x79, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x09, 0x2e, 0x42, 0x79, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x08, 0x62, + 0x79, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x1a, 0x36, 0x0a, 0x08, 0x41, 0x6c, 0x6c, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, + 0xee, 0x01, 0x0a, 0x08, 0x42, 0x79, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x33, 0x0a, 0x08, + 0x65, 0x78, 0x70, 0x6c, 0x69, 0x63, 0x69, 0x74, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, + 0x2e, 0x42, 0x79, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x45, 0x78, 0x70, 0x6c, 0x69, 0x63, + 0x69, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x65, 0x78, 0x70, 0x6c, 0x69, 0x63, 0x69, + 0x74, 0x12, 0x33, 0x0a, 0x08, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x42, 0x79, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x4d, + 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x61, + 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x1a, 0x3b, 0x0a, 0x0d, 0x45, 0x78, 0x70, 0x6c, 0x69, 0x63, + 0x69, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x02, 0x38, 0x01, 0x1a, 0x3b, 0x0a, 0x0d, 0x4d, 0x61, 0x74, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x45, + 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, + 0x22, 0x81, 0x01, 0x0a, 0x1f, 0x47, 0x65, 0x74, 0x47, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x48, 0x61, + 0x73, 0x68, 0x61, 0x62, 0x6c, 0x65, 0x45, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x3f, 0x0a, 0x16, 0x65, 0x6e, 0x76, 0x5f, 0x61, 0x74, 0x5f, 0x65, + 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x45, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x4d, 0x61, 0x70, + 0x52, 0x13, 0x65, 0x6e, 0x76, 0x41, 0x74, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, + 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x5f, + 0x65, 0x6e, 0x76, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x09, 0x67, 0x6c, 0x6f, 0x62, 0x61, + 0x6c, 0x45, 0x6e, 0x76, 0x22, 0x79, 0x0a, 0x20, 0x47, 0x65, 0x74, 0x47, 0x6c, 0x6f, 0x62, 0x61, + 0x6c, 0x48, 0x61, 0x73, 0x68, 0x61, 0x62, 0x6c, 0x65, 0x45, 0x6e, 0x76, 0x56, 0x61, 0x72, 0x73, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x31, 0x0a, 0x0c, 0x64, 0x65, 0x74, 0x61, + 0x69, 0x6c, 0x65, 0x64, 0x5f, 0x6d, 0x61, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, + 0x2e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x4d, 0x61, 0x70, 0x48, 0x00, 0x52, 0x0b, + 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x4d, 0x61, 0x70, 0x12, 0x16, 0x0a, 0x05, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2a, - 0x2e, 0x0a, 0x0e, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, + 0x38, 0x0a, 0x0e, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x72, 0x12, 0x07, 0x0a, 0x03, 0x4e, 0x50, 0x4d, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x42, 0x45, - 0x52, 0x52, 0x59, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x50, 0x4e, 0x50, 0x4d, 0x10, 0x02, 0x42, - 0x0b, 0x5a, 0x09, 0x66, 0x66, 0x69, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x33, + 0x52, 0x52, 0x59, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x50, 0x4e, 0x50, 0x4d, 0x10, 0x02, 0x12, + 0x08, 0x0a, 0x04, 0x59, 0x41, 0x52, 0x4e, 0x10, 0x03, 0x42, 0x0b, 0x5a, 0x09, 0x66, 0x66, 0x69, + 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -2413,7 +3064,7 @@ func file_turborepo_ffi_messages_proto_rawDescGZIP() []byte { } var file_turborepo_ffi_messages_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_turborepo_ffi_messages_proto_msgTypes = make([]protoimpl.MessageInfo, 37) +var file_turborepo_ffi_messages_proto_msgTypes = make([]protoimpl.MessageInfo, 50) var file_turborepo_ffi_messages_proto_goTypes = []interface{}{ (PackageManager)(0), // 0: PackageManager (*TurboDataDirResp)(nil), // 1: TurboDataDirResp @@ -2449,37 +3100,60 @@ var file_turborepo_ffi_messages_proto_goTypes = []interface{}{ (*GetPackageFileHashesFromGitIndexResponse)(nil), // 31: GetPackageFileHashesFromGitIndexResponse (*GetPackageFileHashesFromProcessingGitIgnoreRequest)(nil), // 32: GetPackageFileHashesFromProcessingGitIgnoreRequest (*GetPackageFileHashesFromProcessingGitIgnoreResponse)(nil), // 33: GetPackageFileHashesFromProcessingGitIgnoreResponse - nil, // 34: WorkspaceDependencies.DependenciesEntry - nil, // 35: TransitiveDepsRequest.WorkspacesEntry - nil, // 36: AdditionalBerryData.ResolutionsEntry - nil, // 37: FileHashes.HashesEntry + (*GetPackageFileHashesFromInputsRequest)(nil), // 34: GetPackageFileHashesFromInputsRequest + (*GetPackageFileHashesFromInputsResponse)(nil), // 35: GetPackageFileHashesFromInputsResponse + (*FromWildcardsRequest)(nil), // 36: FromWildcardsRequest + (*FromWildcardsResponse)(nil), // 37: FromWildcardsResponse + (*EnvVarMap)(nil), // 38: EnvVarMap + (*DetailedMap)(nil), // 39: DetailedMap + (*BySource)(nil), // 40: BySource + (*GetGlobalHashableEnvVarsRequest)(nil), // 41: GetGlobalHashableEnvVarsRequest + (*GetGlobalHashableEnvVarsResponse)(nil), // 42: GetGlobalHashableEnvVarsResponse + nil, // 43: WorkspaceDependencies.DependenciesEntry + nil, // 44: TransitiveDepsRequest.WorkspacesEntry + nil, // 45: AdditionalBerryData.ResolutionsEntry + nil, // 46: FileHashes.HashesEntry + nil, // 47: EnvVarMap.MapEntry + nil, // 48: DetailedMap.AllEntry + nil, // 49: BySource.ExplicitEntry + nil, // 50: BySource.MatchingEntry } var file_turborepo_ffi_messages_proto_depIdxs = []int32{ 4, // 0: GlobResp.files:type_name -> GlobRespList 7, // 1: ChangedFilesResp.files:type_name -> ChangedFilesList 10, // 2: PackageDependencyList.list:type_name -> PackageDependency - 34, // 3: WorkspaceDependencies.dependencies:type_name -> WorkspaceDependencies.DependenciesEntry + 43, // 3: WorkspaceDependencies.dependencies:type_name -> WorkspaceDependencies.DependenciesEntry 0, // 4: TransitiveDepsRequest.package_manager:type_name -> PackageManager - 35, // 5: TransitiveDepsRequest.workspaces:type_name -> TransitiveDepsRequest.WorkspacesEntry + 44, // 5: TransitiveDepsRequest.workspaces:type_name -> TransitiveDepsRequest.WorkspacesEntry 15, // 6: TransitiveDepsRequest.resolutions:type_name -> AdditionalBerryData 12, // 7: TransitiveDepsResponse.dependencies:type_name -> WorkspaceDependencies - 36, // 8: AdditionalBerryData.resolutions:type_name -> AdditionalBerryData.ResolutionsEntry + 45, // 8: AdditionalBerryData.resolutions:type_name -> AdditionalBerryData.ResolutionsEntry 16, // 9: LockfilePackageList.list:type_name -> LockfilePackage 0, // 10: SubgraphRequest.package_manager:type_name -> PackageManager 15, // 11: SubgraphRequest.resolutions:type_name -> AdditionalBerryData 0, // 12: PatchesRequest.package_manager:type_name -> PackageManager 22, // 13: PatchesResponse.patches:type_name -> Patches 0, // 14: GlobalChangeRequest.package_manager:type_name -> PackageManager - 37, // 15: FileHashes.hashes:type_name -> FileHashes.HashesEntry + 46, // 15: FileHashes.hashes:type_name -> FileHashes.HashesEntry 30, // 16: GetPackageFileHashesFromGitIndexResponse.hashes:type_name -> FileHashes 30, // 17: GetPackageFileHashesFromProcessingGitIgnoreResponse.hashes:type_name -> FileHashes - 17, // 18: WorkspaceDependencies.DependenciesEntry.value:type_name -> LockfilePackageList - 11, // 19: TransitiveDepsRequest.WorkspacesEntry.value:type_name -> PackageDependencyList - 20, // [20:20] is the sub-list for method output_type - 20, // [20:20] is the sub-list for method input_type - 20, // [20:20] is the sub-list for extension type_name - 20, // [20:20] is the sub-list for extension extendee - 0, // [0:20] is the sub-list for field type_name + 30, // 18: GetPackageFileHashesFromInputsResponse.hashes:type_name -> FileHashes + 38, // 19: FromWildcardsRequest.env_vars:type_name -> EnvVarMap + 38, // 20: FromWildcardsResponse.env_vars:type_name -> EnvVarMap + 47, // 21: EnvVarMap.map:type_name -> EnvVarMap.MapEntry + 48, // 22: DetailedMap.all:type_name -> DetailedMap.AllEntry + 40, // 23: DetailedMap.by_source:type_name -> BySource + 49, // 24: BySource.explicit:type_name -> BySource.ExplicitEntry + 50, // 25: BySource.matching:type_name -> BySource.MatchingEntry + 38, // 26: GetGlobalHashableEnvVarsRequest.env_at_execution_start:type_name -> EnvVarMap + 39, // 27: GetGlobalHashableEnvVarsResponse.detailed_map:type_name -> DetailedMap + 17, // 28: WorkspaceDependencies.DependenciesEntry.value:type_name -> LockfilePackageList + 11, // 29: TransitiveDepsRequest.WorkspacesEntry.value:type_name -> PackageDependencyList + 30, // [30:30] is the sub-list for method output_type + 30, // [30:30] is the sub-list for method input_type + 30, // [30:30] is the sub-list for extension type_name + 30, // [30:30] is the sub-list for extension extendee + 0, // [0:30] is the sub-list for field type_name } func init() { file_turborepo_ffi_messages_proto_init() } @@ -2884,6 +3558,114 @@ func file_turborepo_ffi_messages_proto_init() { return nil } } + file_turborepo_ffi_messages_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetPackageFileHashesFromInputsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetPackageFileHashesFromInputsResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[35].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FromWildcardsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[36].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FromWildcardsResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*EnvVarMap); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[38].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DetailedMap); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[39].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*BySource); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[40].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetGlobalHashableEnvVarsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[41].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GetGlobalHashableEnvVarsResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } } file_turborepo_ffi_messages_proto_msgTypes[2].OneofWrappers = []interface{}{ (*GlobResp_Files)(nil), @@ -2926,13 +3708,25 @@ func file_turborepo_ffi_messages_proto_init() { (*GetPackageFileHashesFromProcessingGitIgnoreResponse_Hashes)(nil), (*GetPackageFileHashesFromProcessingGitIgnoreResponse_Error)(nil), } + file_turborepo_ffi_messages_proto_msgTypes[34].OneofWrappers = []interface{}{ + (*GetPackageFileHashesFromInputsResponse_Hashes)(nil), + (*GetPackageFileHashesFromInputsResponse_Error)(nil), + } + file_turborepo_ffi_messages_proto_msgTypes[36].OneofWrappers = []interface{}{ + (*FromWildcardsResponse_EnvVars)(nil), + (*FromWildcardsResponse_Error)(nil), + } + file_turborepo_ffi_messages_proto_msgTypes[41].OneofWrappers = []interface{}{ + (*GetGlobalHashableEnvVarsResponse_DetailedMap)(nil), + (*GetGlobalHashableEnvVarsResponse_Error)(nil), + } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_turborepo_ffi_messages_proto_rawDesc, NumEnums: 1, - NumMessages: 37, + NumMessages: 50, NumExtensions: 0, NumServices: 0, }, diff --git a/cli/internal/graph/graph.go b/cli/internal/graph/graph.go index bda7364c1d367..c834faec3870e 100644 --- a/cli/internal/graph/graph.go +++ b/cli/internal/graph/graph.go @@ -4,13 +4,13 @@ package graph import ( gocontext "context" "fmt" - "path/filepath" "regexp" "sort" "strings" "github.com/hashicorp/go-hclog" "github.com/pyr-sh/dag" + "github.com/vercel/turbo/cli/internal/env" "github.com/vercel/turbo/cli/internal/fs" "github.com/vercel/turbo/cli/internal/nodes" "github.com/vercel/turbo/cli/internal/runsummary" @@ -123,8 +123,15 @@ func (g *CompleteGraph) GetPackageTaskVisitor( expandedInputs := g.TaskHashTracker.GetExpandedInputs(packageTask) framework := g.TaskHashTracker.GetFramework(taskID) - logFile := repoRelativeLogFile(pkgDir, taskName) - packageTask.LogFile = logFile + logFileAbsolutePath := taskLogFile(g.RepoRoot, pkgDir, taskName) + + var logFileRelativePath string + if logRelative, err := logFileAbsolutePath.RelativeTo(g.RepoRoot); err == nil { + logFileRelativePath = logRelative.ToString() + } + + // Give packageTask a string version of the logFile relative to root. + packageTask.LogFile = logFileRelativePath packageTask.Command = command envVarPassThroughMap, err := g.TaskHashTracker.EnvAtExecutionStart.FromWildcards(taskDefinition.PassThroughEnv) @@ -145,7 +152,8 @@ func (g *CompleteGraph) GetPackageTaskVisitor( Dir: pkgDir.ToString(), Outputs: taskDefinition.Outputs.Inclusions, ExcludedOutputs: taskDefinition.Outputs.Exclusions, - LogFile: logFile, + LogFileRelativePath: logFileRelativePath, + LogFileAbsolutePath: logFileAbsolutePath, ResolvedTaskDefinition: taskDefinition, ExpandedInputs: expandedInputs, ExpandedOutputs: []turbopath.AnchoredSystemPath{}, @@ -158,8 +166,8 @@ func (g *CompleteGraph) GetPackageTaskVisitor( Env: specifiedEnvVarsPresentation, PassThroughEnv: taskDefinition.PassThroughEnv, }, - Configured: envVars.BySource.Explicit.ToSecretHashable(), - Inferred: envVars.BySource.Matching.ToSecretHashable(), + Configured: env.EnvironmentVariableMap(envVars.BySource.Explicit).ToSecretHashable(), + Inferred: env.EnvironmentVariableMap(envVars.BySource.Matching).ToSecretHashable(), PassThrough: envVarPassThroughMap.ToSecretHashable(), }, DotEnv: taskDefinition.DotEnv, @@ -228,10 +236,10 @@ func (g *CompleteGraph) GetPackageJSONFromWorkspace(workspaceName string) (*fs.P return nil, fmt.Errorf("No package.json for %s", workspaceName) } -// repoRelativeLogFile returns the path to the log file for this task execution as a -// relative path from the root of the monorepo. -func repoRelativeLogFile(dir turbopath.AnchoredSystemPath, taskName string) string { - return filepath.Join(dir.ToStringDuringMigration(), ".turbo", fmt.Sprintf("turbo-%v.log", taskName)) +// taskLogFile returns the path to the log file for this task execution as an absolute path +func taskLogFile(root turbopath.AbsoluteSystemPath, dir turbopath.AnchoredSystemPath, taskName string) turbopath.AbsoluteSystemPath { + pkgDir := dir.RestoreAnchor(root) + return pkgDir.UntypedJoin(".turbo", fmt.Sprintf("turbo-%v.log", taskName)) } // getTaskGraphAncestors gets all the ancestors for a given task in the graph. diff --git a/cli/internal/hashing/package_deps_hash.go b/cli/internal/hashing/package_deps_hash.go index 00ad038299f33..87580d8699738 100644 --- a/cli/internal/hashing/package_deps_hash.go +++ b/cli/internal/hashing/package_deps_hash.go @@ -13,7 +13,6 @@ import ( "github.com/pkg/errors" "github.com/vercel/turbo/cli/internal/encoding/gitoutput" "github.com/vercel/turbo/cli/internal/fs" - "github.com/vercel/turbo/cli/internal/globby" "github.com/vercel/turbo/cli/internal/turbopath" "github.com/vercel/turbo/cli/internal/util" ) @@ -27,72 +26,6 @@ type PackageDepsOptions struct { InputPatterns []string } -func getPackageFileHashesFromInputs(rootPath turbopath.AbsoluteSystemPath, packagePath turbopath.AnchoredSystemPath, inputs []string) (map[turbopath.AnchoredUnixPath]string, error) { - absolutePackagePath := packagePath.RestoreAnchor(rootPath) - // Add all the checked in hashes. - - // make a copy of the inputPatterns array, because we may be appending to it later. - calculatedInputs := make([]string, len(inputs)) - copy(calculatedInputs, inputs) - - // Add in package.json and turbo.json to input patterns. Both file paths are relative to pkgPath - // - // - package.json is an input because if the `scripts` in - // the package.json change (i.e. the tasks that turbo executes), we want - // a cache miss, since any existing cache could be invalid. - // - turbo.json because it's the definition of the tasks themselves. The root turbo.json - // is similarly included in the global hash. This file may not exist in the workspace, but - // that is ok, because it will get ignored downstream. - calculatedInputs = append(calculatedInputs, "package.json") - calculatedInputs = append(calculatedInputs, "turbo.json") - - // The input patterns are relative to the package. - // However, we need to change the globbing to be relative to the repo root. - // Prepend the package path to each of the input patterns. - prefixedInputPatterns := []string{} - prefixedExcludePatterns := []string{} - for _, pattern := range calculatedInputs { - if len(pattern) > 0 && pattern[0] == '!' { - rerooted, err := rootPath.PathTo(absolutePackagePath.UntypedJoin(pattern[1:])) - if err != nil { - return nil, err - } - prefixedExcludePatterns = append(prefixedExcludePatterns, rerooted) - } else { - rerooted, err := rootPath.PathTo(absolutePackagePath.UntypedJoin(pattern)) - if err != nil { - return nil, err - } - prefixedInputPatterns = append(prefixedInputPatterns, rerooted) - } - } - absoluteFilesToHash, err := globby.GlobFiles(rootPath.ToStringDuringMigration(), prefixedInputPatterns, prefixedExcludePatterns) - - if err != nil { - return nil, errors.Wrapf(err, "failed to resolve input globs %v", calculatedInputs) - } - - filesToHash := make([]turbopath.AnchoredSystemPath, len(absoluteFilesToHash)) - for i, rawPath := range absoluteFilesToHash { - relativePathString, err := absolutePackagePath.RelativePathString(rawPath) - - if err != nil { - return nil, errors.Wrapf(err, "not relative to package: %v", rawPath) - } - - filesToHash[i] = turbopath.AnchoredSystemPathFromUpstream(relativePathString) - } - - // Note that in this scenario, we don't need to check git status. - // We're hashing the current state, not state at a commit. - result, err := GetHashesForFiles(absolutePackagePath, filesToHash) - if err != nil { - return nil, errors.Wrap(err, "failed hashing resolved inputs globs") - } - - return result, nil -} - // GetPackageFileHashes Builds an object containing git hashes for the files under the specified `packagePath` folder. func GetPackageFileHashes(rootPath turbopath.AbsoluteSystemPath, packagePath turbopath.AnchoredSystemPath, inputs []string) (map[turbopath.AnchoredUnixPath]string, error) { if len(inputs) == 0 { @@ -264,33 +197,6 @@ func manuallyHashFiles(rootPath turbopath.AbsoluteSystemPath, files []turbopath. return hashObject, nil } -// runGitCommand provides boilerplate command handling for `ls-tree`, `ls-files`, and `status` -// Rather than doing string processing, it does stream processing of `stdout`. -func runGitCommand(cmd *exec.Cmd, commandName string, handler func(io.Reader) *gitoutput.Reader) ([][]string, error) { - stdoutPipe, pipeError := cmd.StdoutPipe() - if pipeError != nil { - return nil, fmt.Errorf("failed to read `git %s`: %w", commandName, pipeError) - } - - startError := cmd.Start() - if startError != nil { - return nil, fmt.Errorf("failed to read `git %s`: %w", commandName, startError) - } - - reader := handler(stdoutPipe) - entries, readErr := reader.ReadAll() - if readErr != nil { - return nil, fmt.Errorf("failed to read `git %s`: %w", commandName, readErr) - } - - waitErr := cmd.Wait() - if waitErr != nil { - return nil, fmt.Errorf("failed to read `git %s`: %w", commandName, waitErr) - } - - return entries, nil -} - // getTraversePath gets the distance of the current working directory to the repository root. // This is used to convert repo-relative paths to cwd-relative paths. // diff --git a/cli/internal/hashing/package_deps_hash_go.go b/cli/internal/hashing/package_deps_hash_go.go index a029d9bec0595..c7a739c0d1893 100644 --- a/cli/internal/hashing/package_deps_hash_go.go +++ b/cli/internal/hashing/package_deps_hash_go.go @@ -5,13 +5,16 @@ package hashing import ( "fmt" + "io" "os/exec" "strings" + "github.com/pkg/errors" gitignore "github.com/sabhiram/go-gitignore" "github.com/vercel/turbo/cli/internal/doublestar" "github.com/vercel/turbo/cli/internal/encoding/gitoutput" "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/globby" "github.com/vercel/turbo/cli/internal/turbopath" ) @@ -241,3 +244,96 @@ type statusCode struct { func (s statusCode) isDelete() bool { return s.x == "D" || s.y == "D" } + +func getPackageFileHashesFromInputs(rootPath turbopath.AbsoluteSystemPath, packagePath turbopath.AnchoredSystemPath, inputs []string) (map[turbopath.AnchoredUnixPath]string, error) { + absolutePackagePath := packagePath.RestoreAnchor(rootPath) + // Add all the checked in hashes. + + // make a copy of the inputPatterns array, because we may be appending to it later. + calculatedInputs := make([]string, len(inputs)) + copy(calculatedInputs, inputs) + + // Add in package.json and turbo.json to input patterns. Both file paths are relative to pkgPath + // + // - package.json is an input because if the `scripts` in + // the package.json change (i.e. the tasks that turbo executes), we want + // a cache miss, since any existing cache could be invalid. + // - turbo.json because it's the definition of the tasks themselves. The root turbo.json + // is similarly included in the global hash. This file may not exist in the workspace, but + // that is ok, because it will get ignored downstream. + calculatedInputs = append(calculatedInputs, "package.json") + calculatedInputs = append(calculatedInputs, "turbo.json") + + // The input patterns are relative to the package. + // However, we need to change the globbing to be relative to the repo root. + // Prepend the package path to each of the input patterns. + prefixedInputPatterns := []string{} + prefixedExcludePatterns := []string{} + for _, pattern := range calculatedInputs { + if len(pattern) > 0 && pattern[0] == '!' { + rerooted, err := rootPath.PathTo(absolutePackagePath.UntypedJoin(pattern[1:])) + if err != nil { + return nil, err + } + prefixedExcludePatterns = append(prefixedExcludePatterns, rerooted) + } else { + rerooted, err := rootPath.PathTo(absolutePackagePath.UntypedJoin(pattern)) + if err != nil { + return nil, err + } + prefixedInputPatterns = append(prefixedInputPatterns, rerooted) + } + } + absoluteFilesToHash, err := globby.GlobFiles(rootPath.ToStringDuringMigration(), prefixedInputPatterns, prefixedExcludePatterns) + + if err != nil { + return nil, errors.Wrapf(err, "failed to resolve input globs %v", calculatedInputs) + } + + filesToHash := make([]turbopath.AnchoredSystemPath, len(absoluteFilesToHash)) + for i, rawPath := range absoluteFilesToHash { + relativePathString, err := absolutePackagePath.RelativePathString(rawPath) + + if err != nil { + return nil, errors.Wrapf(err, "not relative to package: %v", rawPath) + } + + filesToHash[i] = turbopath.AnchoredSystemPathFromUpstream(relativePathString) + } + + // Note that in this scenario, we don't need to check git status. + // We're hashing the current state, not state at a commit. + result, err := GetHashesForFiles(absolutePackagePath, filesToHash) + if err != nil { + return nil, errors.Wrap(err, "failed hashing resolved inputs globs") + } + + return result, nil +} + +// runGitCommand provides boilerplate command handling for `ls-tree`, `ls-files`, and `status` +// Rather than doing string processing, it does stream processing of `stdout`. +func runGitCommand(cmd *exec.Cmd, commandName string, handler func(io.Reader) *gitoutput.Reader) ([][]string, error) { + stdoutPipe, pipeError := cmd.StdoutPipe() + if pipeError != nil { + return nil, fmt.Errorf("failed to read `git %s`: %w", commandName, pipeError) + } + + startError := cmd.Start() + if startError != nil { + return nil, fmt.Errorf("failed to read `git %s`: %w", commandName, startError) + } + + reader := handler(stdoutPipe) + entries, readErr := reader.ReadAll() + if readErr != nil { + return nil, fmt.Errorf("failed to read `git %s`: %w", commandName, readErr) + } + + waitErr := cmd.Wait() + if waitErr != nil { + return nil, fmt.Errorf("failed to read `git %s`: %w", commandName, waitErr) + } + + return entries, nil +} diff --git a/cli/internal/hashing/package_deps_hash_rust.go b/cli/internal/hashing/package_deps_hash_rust.go index d258fbc9b9398..dc34bc87c6f90 100644 --- a/cli/internal/hashing/package_deps_hash_rust.go +++ b/cli/internal/hashing/package_deps_hash_rust.go @@ -33,3 +33,16 @@ func getPackageFileHashesFromProcessingGitIgnore(rootPath turbopath.AbsoluteSyst } return hashes, nil } + +func getPackageFileHashesFromInputs(rootPath turbopath.AbsoluteSystemPath, packagePath turbopath.AnchoredSystemPath, inputs []string) (map[turbopath.AnchoredUnixPath]string, error) { + rawHashes, err := ffi.GetPackageFileHashesFromInputs(rootPath.ToString(), packagePath.ToString(), inputs) + if err != nil { + return nil, err + } + + hashes := make(map[turbopath.AnchoredUnixPath]string, len(rawHashes)) + for rawPath, hash := range rawHashes { + hashes[turbopath.AnchoredUnixPathFromUpstream(rawPath)] = hash + } + return hashes, nil +} diff --git a/cli/internal/lockfile/lockfile.go b/cli/internal/lockfile/lockfile.go index 7aaa75b746dd1..717bdfcdeb979 100644 --- a/cli/internal/lockfile/lockfile.go +++ b/cli/internal/lockfile/lockfile.go @@ -82,6 +82,9 @@ func AllTransitiveClosures( if lf, ok := lockFile.(*PnpmLockfile); ok { return rustTransitiveDeps(lf.contents, "pnpm", workspaces, nil) } + if lf, ok := lockFile.(*YarnLockfile); ok { + return rustTransitiveDeps(lf.contents, "yarn", workspaces, nil) + } g := new(errgroup.Group) c := make(chan closureMsg, len(workspaces)) diff --git a/cli/internal/lockfile/yarn_lockfile.go b/cli/internal/lockfile/yarn_lockfile.go index 99d776441a5fa..7eae477683537 100644 --- a/cli/internal/lockfile/yarn_lockfile.go +++ b/cli/internal/lockfile/yarn_lockfile.go @@ -1,82 +1,50 @@ package lockfile import ( - "bytes" - "fmt" "io" - "github.com/andybalholm/crlf" - "github.com/iseki0/go-yarnlock" - "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/ffi" "github.com/vercel/turbo/cli/internal/turbopath" ) -var _crlfLiteral = []byte("\r\n") - // YarnLockfile representation of yarn lockfile type YarnLockfile struct { - inner yarnlock.LockFile - hasCRLF bool + contents []byte } var _ Lockfile = (*YarnLockfile)(nil) // ResolvePackage Given a package and version returns the key, resolved version, and if it was found func (l *YarnLockfile) ResolvePackage(_workspacePath turbopath.AnchoredUnixPath, name string, version string) (Package, error) { - for _, key := range yarnPossibleKeys(name, version) { - if entry, ok := (l.inner)[key]; ok { - return Package{ - Found: true, - Key: key, - Version: entry.Version, - }, nil - } - } - - return Package{}, nil + // This is only used when doing calculating the transitive deps, but Rust + // implementations do this calculation on the Rust side. + panic("Unreachable") } // AllDependencies Given a lockfile key return all (dev/optional/peer) dependencies of that package func (l *YarnLockfile) AllDependencies(key string) (map[string]string, bool) { - deps := map[string]string{} - entry, ok := (l.inner)[key] - if !ok { - return deps, false - } - - for name, version := range entry.Dependencies { - deps[name] = version - } - for name, version := range entry.OptionalDependencies { - deps[name] = version - } - - return deps, true + // This is only used when doing calculating the transitive deps, but Rust + // implementations do this calculation on the Rust side. + panic("Unreachable") } // Subgraph Given a list of lockfile keys returns a Lockfile based off the original one that only contains the packages given -func (l *YarnLockfile) Subgraph(_ []turbopath.AnchoredSystemPath, packages []string) (Lockfile, error) { - lockfile := make(map[string]yarnlock.LockFileEntry, len(packages)) - for _, key := range packages { - entry, ok := (l.inner)[key] - if ok { - lockfile[key] = entry - } +func (l *YarnLockfile) Subgraph(workspacePackages []turbopath.AnchoredSystemPath, packages []string) (Lockfile, error) { + workspaces := make([]string, len(workspacePackages)) + for i, workspace := range workspacePackages { + workspaces[i] = workspace.ToUnixPath().ToString() } - - return &YarnLockfile{lockfile, l.hasCRLF}, nil + contents, err := ffi.Subgraph("yarn", l.contents, workspaces, packages, nil) + if err != nil { + return nil, err + } + return &YarnLockfile{contents: contents}, nil } // Encode encode the lockfile representation and write it to the given writer func (l *YarnLockfile) Encode(w io.Writer) error { - writer := w - if l.hasCRLF { - writer = crlf.NewWriter(w) - } - if err := l.inner.Encode(writer); err != nil { - return errors.Wrap(err, "Unable to encode yarn.lock") - } - return nil + _, err := w.Write(l.contents) + return err } // Patches return a list of patches used in the lockfile @@ -86,24 +54,7 @@ func (l *YarnLockfile) Patches() []turbopath.AnchoredUnixPath { // DecodeYarnLockfile Takes the contents of a yarn lockfile and returns a struct representation func DecodeYarnLockfile(contents []byte) (*YarnLockfile, error) { - lockfile, err := yarnlock.ParseLockFileData(contents) - hasCRLF := bytes.HasSuffix(contents, _crlfLiteral) - newline := []byte("\n") - - // there's no trailing newline for this file, need to inspect more to see newline style - if !hasCRLF && !bytes.HasSuffix(contents, newline) { - firstNewline := bytes.IndexByte(contents, newline[0]) - if firstNewline != -1 && firstNewline != 0 { - byteBeforeNewline := contents[firstNewline-1] - hasCRLF = byteBeforeNewline == '\r' - } - } - - if err != nil { - return nil, errors.Wrap(err, "Unable to decode yarn.lock") - } - - return &YarnLockfile{lockfile, hasCRLF}, nil + return &YarnLockfile{contents: contents}, nil } // GlobalChange checks if there are any differences between lockfiles that would completely invalidate @@ -112,13 +63,3 @@ func (l *YarnLockfile) GlobalChange(other Lockfile) bool { _, ok := other.(*YarnLockfile) return !ok } - -func yarnPossibleKeys(name string, version string) []string { - return []string{ - fmt.Sprintf("%v@%v", name, version), - fmt.Sprintf("%v@npm:%v", name, version), - fmt.Sprintf("%v@file:%v", name, version), - fmt.Sprintf("%v@workspace:%v", name, version), - fmt.Sprintf("%v@yarn:%v", name, version), - } -} diff --git a/cli/internal/lockfile/yarn_lockfile_test.go b/cli/internal/lockfile/yarn_lockfile_test.go deleted file mode 100644 index ef4fcb0732a27..0000000000000 --- a/cli/internal/lockfile/yarn_lockfile_test.go +++ /dev/null @@ -1,51 +0,0 @@ -package lockfile - -import ( - "bytes" - "testing" - - "gotest.tools/v3/assert" -) - -func TestRoundtrip(t *testing.T) { - content, err := getFixture(t, "yarn.lock") - if err != nil { - t.Error(err) - } - - lockfile, err := DecodeYarnLockfile(content) - if err != nil { - t.Error(err) - } - - var b bytes.Buffer - if err := lockfile.Encode(&b); err != nil { - t.Error(err) - } - - assert.DeepEqual(t, string(content), b.String()) -} - -func TestKeySplitting(t *testing.T) { - content, err := getFixture(t, "yarn.lock") - if err != nil { - t.Error(err) - } - - lockfile, err := DecodeYarnLockfile(content) - if err != nil { - t.Error(err) - } - - // @babel/types has multiple entries, these should all appear in the lockfile struct - keys := []string{ - "@babel/types@^7.18.10", - "@babel/types@^7.18.6", - "@babel/types@^7.19.0", - } - - for _, key := range keys { - _, ok := lockfile.inner[key] - assert.Assert(t, ok, "Unable to find entry for %s in parsed lockfile", key) - } -} diff --git a/cli/internal/run/global_hash.go b/cli/internal/run/global_hash.go index 2c8e15f81d643..b70fe1e67cdb5 100644 --- a/cli/internal/run/global_hash.go +++ b/cli/internal/run/global_hash.go @@ -107,39 +107,7 @@ func getGlobalHashInputs( frameworkInference bool, dotEnv turbopath.AnchoredUnixPathArray, ) (GlobalHashableInputs, error) { - // Calculate env var dependencies - - // Our "inferred" env var maps - defaultEnvVarMap, err := envAtExecutionStart.FromWildcards(_defaultEnvVars) - if err != nil { - return GlobalHashableInputs{}, err - } - userEnvVarSet, err := envAtExecutionStart.FromWildcardsUnresolved(globalEnv) - if err != nil { - return GlobalHashableInputs{}, err - } - - allEnvVarMap := env.EnvironmentVariableMap{} - allEnvVarMap.Union(userEnvVarSet.Inclusions) - allEnvVarMap.Union(defaultEnvVarMap) - allEnvVarMap.Difference(userEnvVarSet.Exclusions) - - explicitEnvVarMap := env.EnvironmentVariableMap{} - explicitEnvVarMap.Union(userEnvVarSet.Inclusions) - explicitEnvVarMap.Difference(userEnvVarSet.Exclusions) - - matchingEnvVarMap := env.EnvironmentVariableMap{} - matchingEnvVarMap.Union(defaultEnvVarMap) - matchingEnvVarMap.Difference(userEnvVarSet.Exclusions) - - globalHashableEnvVars := env.DetailedMap{ - All: allEnvVarMap, - BySource: env.BySource{ - Explicit: explicitEnvVarMap, - Matching: matchingEnvVarMap, - }, - } - + globalHashableEnvVars, err := getGlobalHashableEnvVars(envAtExecutionStart, globalEnv) if err != nil { return GlobalHashableInputs{}, err } diff --git a/cli/internal/run/global_hash_go.go b/cli/internal/run/global_hash_go.go new file mode 100644 index 0000000000000..f5fe6f035112f --- /dev/null +++ b/cli/internal/run/global_hash_go.go @@ -0,0 +1,42 @@ +//go:build go || !rust +// +build go !rust + +package run + +import "github.com/vercel/turbo/cli/internal/env" + +// `getGlobalHashableEnvVars` calculates env var dependencies +func getGlobalHashableEnvVars(envAtExecutionStart env.EnvironmentVariableMap, globalEnv []string) (env.DetailedMap, error) { + // Our "inferred" env var maps + defaultEnvVarMap, err := envAtExecutionStart.FromWildcards(_defaultEnvVars) + if err != nil { + return env.DetailedMap{}, err + } + userEnvVarSet, err := envAtExecutionStart.FromWildcardsUnresolved(globalEnv) + if err != nil { + return env.DetailedMap{}, err + } + + allEnvVarMap := env.EnvironmentVariableMap{} + allEnvVarMap.Union(userEnvVarSet.Inclusions) + allEnvVarMap.Union(defaultEnvVarMap) + allEnvVarMap.Difference(userEnvVarSet.Exclusions) + + explicitEnvVarMap := env.EnvironmentVariableMap{} + explicitEnvVarMap.Union(userEnvVarSet.Inclusions) + explicitEnvVarMap.Difference(userEnvVarSet.Exclusions) + + matchingEnvVarMap := env.EnvironmentVariableMap{} + matchingEnvVarMap.Union(defaultEnvVarMap) + matchingEnvVarMap.Difference(userEnvVarSet.Exclusions) + + globalHashableEnvVars := env.DetailedMap{ + All: allEnvVarMap, + BySource: env.BySource{ + Explicit: explicitEnvVarMap, + Matching: matchingEnvVarMap, + }, + } + + return globalHashableEnvVars, nil +} diff --git a/cli/internal/run/global_hash_rust.go b/cli/internal/run/global_hash_rust.go new file mode 100644 index 0000000000000..fc9b2c1d36404 --- /dev/null +++ b/cli/internal/run/global_hash_rust.go @@ -0,0 +1,38 @@ +//go:build rust +// +build rust + +package run + +import ( + "github.com/vercel/turbo/cli/internal/env" + + "github.com/vercel/turbo/cli/internal/ffi" +) + +// `getGlobalHashableEnvVars` calculates env var dependencies +func getGlobalHashableEnvVars(envAtExecutionStart env.EnvironmentVariableMap, globalEnv []string) (env.DetailedMap, error) { + respDetailedMap, err := ffi.GetGlobalHashableEnvVars(envAtExecutionStart, globalEnv) + if err != nil { + return env.DetailedMap{}, err + } + + // We set explicit and matching to empty maps if they are nil + // to preserve existing behavior from the Go code + explicit := respDetailedMap.GetBySource().GetExplicit() + if explicit == nil { + explicit = make(map[string]string) + } + + matching := respDetailedMap.GetBySource().GetMatching() + if matching == nil { + matching = make(map[string]string) + } + detailedMap := env.DetailedMap{ + All: respDetailedMap.GetAll(), + BySource: env.BySource{ + Explicit: explicit, + Matching: matching, + }, + } + return detailedMap, nil +} diff --git a/cli/internal/run/global_hash_test.go b/cli/internal/run/global_hash_test.go new file mode 100644 index 0000000000000..c1841a4e4baef --- /dev/null +++ b/cli/internal/run/global_hash_test.go @@ -0,0 +1,75 @@ +package run + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/vercel/turbo/cli/internal/env" +) + +func TestGetGlobalHashableEnvVars(t *testing.T) { + testCases := []struct { + name string + envAtExecutionStart env.EnvironmentVariableMap + globalEnv []string + expectedMap env.DetailedMap + }{ + { + name: "has default env var", + envAtExecutionStart: env.EnvironmentVariableMap{ + "VERCEL_ANALYTICS_ID": "123", + }, + globalEnv: []string{}, + expectedMap: env.DetailedMap{ + All: map[string]string{ + "VERCEL_ANALYTICS_ID": "123", + }, + BySource: env.BySource{ + Matching: map[string]string{ + "VERCEL_ANALYTICS_ID": "123", + }, + Explicit: map[string]string{}, + }, + }, + }, + { + name: "has global env wildcard", + envAtExecutionStart: env.EnvironmentVariableMap{ + "FOO_BAR": "123", + }, + globalEnv: []string{"FOO*"}, + expectedMap: env.DetailedMap{ + All: map[string]string{ + "FOO_BAR": "123", + }, + BySource: env.BySource{ + Matching: map[string]string{}, + Explicit: map[string]string{ + "FOO_BAR": "123", + }, + }, + }, + }, + { + name: "has global env wildcard but also excluded", + envAtExecutionStart: env.EnvironmentVariableMap{ + "FOO_BAR": "123", + }, + globalEnv: []string{"FOO*", "!FOO_BAR"}, + expectedMap: env.DetailedMap{ + BySource: env.BySource{ + Matching: map[string]string{}, + Explicit: map[string]string{}, + }, + }, + }, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + result, err := getGlobalHashableEnvVars(testCase.envAtExecutionStart, testCase.globalEnv) + assert.NoError(t, err) + assert.Equal(t, testCase.expectedMap, result) + }) + } +} diff --git a/cli/internal/run/run_spec.go b/cli/internal/run/run_spec.go index 8350ee56f0c2b..d1faa37309b5f 100644 --- a/cli/internal/run/run_spec.go +++ b/cli/internal/run/run_spec.go @@ -69,6 +69,9 @@ func (o *Opts) SynthesizeCommand(tasks []string) string { cmd += " --dry" } } + if o.runOpts.Only { + cmd += " --only" + } if len(o.runOpts.PassThroughArgs) > 0 { cmd += " -- " + strings.Join(o.runOpts.PassThroughArgs, " ") } diff --git a/cli/internal/run/run_spec_test.go b/cli/internal/run/run_spec_test.go index 2bcfe2b2475d2..51cdd2917e2c1 100644 --- a/cli/internal/run/run_spec_test.go +++ b/cli/internal/run/run_spec_test.go @@ -11,6 +11,7 @@ func TestSynthesizeCommand(t *testing.T) { testCases := []struct { filterPatterns []string legacyFilter scope.LegacyFilter + only bool passThroughArgs []string parallel bool continueOnError bool @@ -24,6 +25,17 @@ func TestSynthesizeCommand(t *testing.T) { tasks: []string{"build"}, expected: "turbo run build --filter=my-app", }, + { + tasks: []string{"build"}, + only: true, + expected: "turbo run build --only", + }, + { + filterPatterns: []string{"my-app"}, + tasks: []string{"build"}, + + expected: "turbo run build --filter=my-app", + }, { filterPatterns: []string{"my-app"}, tasks: []string{"build"}, @@ -95,6 +107,7 @@ func TestSynthesizeCommand(t *testing.T) { ContinueOnError: testCase.continueOnError, DryRun: testCase.dryRun, DryRunJSON: testCase.dryRunJSON, + Only: testCase.only, }, } cmd := o.SynthesizeCommand(testCase.tasks) diff --git a/cli/internal/runsummary/format_text.go b/cli/internal/runsummary/format_text.go index 619789d025f93..022bce85bd081 100644 --- a/cli/internal/runsummary/format_text.go +++ b/cli/internal/runsummary/format_text.go @@ -82,7 +82,7 @@ func (rsm Meta) FormatAndPrintText(workspaceInfos workspace.Catalog) error { fmt.Fprintln(w, util.Sprintf(" ${GREY}Command\t=\t%s\t${RESET}", task.Command)) fmt.Fprintln(w, util.Sprintf(" ${GREY}Outputs\t=\t%s\t${RESET}", strings.Join(task.Outputs, ", "))) - fmt.Fprintln(w, util.Sprintf(" ${GREY}Log File\t=\t%s\t${RESET}", task.LogFile)) + fmt.Fprintln(w, util.Sprintf(" ${GREY}Log File\t=\t%s\t${RESET}", task.LogFileRelativePath)) fmt.Fprintln(w, util.Sprintf(" ${GREY}Dependencies\t=\t%s\t${RESET}", strings.Join(task.Dependencies, ", "))) fmt.Fprintln(w, util.Sprintf(" ${GREY}Dependendents\t=\t%s\t${RESET}", strings.Join(task.Dependents, ", "))) fmt.Fprintln(w, util.Sprintf(" ${GREY}Inputs Files Considered\t=\t%d\t${RESET}", len(task.ExpandedInputs))) diff --git a/cli/internal/runsummary/task_summary.go b/cli/internal/runsummary/task_summary.go index 5282a9faf41d3..1973a752255df 100644 --- a/cli/internal/runsummary/task_summary.go +++ b/cli/internal/runsummary/task_summary.go @@ -1,8 +1,6 @@ package runsummary import ( - "os" - "github.com/vercel/turbo/cli/internal/cache" "github.com/vercel/turbo/cli/internal/fs" "github.com/vercel/turbo/cli/internal/turbopath" @@ -67,7 +65,8 @@ type TaskSummary struct { CommandArguments []string `json:"cliArguments"` Outputs []string `json:"outputs"` ExcludedOutputs []string `json:"excludedOutputs"` - LogFile string `json:"logFile"` + LogFileRelativePath string `json:"logFile"` + LogFileAbsolutePath turbopath.AbsoluteSystemPath `json:"-"` Dir string `json:"directory,omitempty"` Dependencies []string `json:"dependencies"` Dependents []string `json:"dependents"` @@ -80,9 +79,9 @@ type TaskSummary struct { Execution *TaskExecutionSummary `json:"execution,omitempty"` // omit when it's not set } -// GetLogs reads the Logfile and returns the data +// GetLogs reads the log file and returns the data func (ts *TaskSummary) GetLogs() []byte { - bytes, err := os.ReadFile(ts.LogFile) + bytes, err := ts.LogFileAbsolutePath.ReadFile() if err != nil { return []byte{} } diff --git a/cli/internal/scope/filter/filter.go b/cli/internal/scope/filter/filter.go index 60aaf1d854ba5..8df70326f3103 100644 --- a/cli/internal/scope/filter/filter.go +++ b/cli/internal/scope/filter/filter.go @@ -335,7 +335,7 @@ func (r *Resolver) filterSubtreesWithSelector(selector *TargetSelector) (util.Se for name, pkg := range r.WorkspaceInfos.PackageJSONs { if parentDir == "" { entryPackages.Add(name) - } else if matches, err := doublestar.PathMatch(parentDir.ToString(), pkg.Dir.RestoreAnchor(r.Cwd).ToString()); err != nil { + } else if matches, err := doublestar.PathMatch(r.Cwd.Join(parentDir).ToString(), pkg.Dir.RestoreAnchor(r.Cwd).ToString()); err != nil { return nil, fmt.Errorf("failed to resolve directory relationship %v contains %v: %v", selector.parentDir, pkg.Dir, err) } else if matches { entryPackages.Add(name) diff --git a/cli/internal/scope/filter/filter_test.go b/cli/internal/scope/filter/filter_test.go index a23ae1d898206..92d01dad4ad76 100644 --- a/cli/internal/scope/filter/filter_test.go +++ b/cli/internal/scope/filter/filter_test.go @@ -600,6 +600,17 @@ func Test_SCM(t *testing.T) { }, []string{"package-3"}, }, + { + "match dependency subtree", + []*TargetSelector{ + { + fromRef: "HEAD~1", + parentDir: "package-*", + matchDependencies: true, + }, + }, + []string{"package-1", "package-2"}, + }, } for _, tc := range testCases { diff --git a/cli/turbo.json b/cli/turbo.json index d6232d40421a9..228c251c8b8af 100644 --- a/cli/turbo.json +++ b/cli/turbo.json @@ -14,9 +14,11 @@ ], "inputs": [ - "**/*.go", - "**/*_test.go", - "../crates/turborepo*/**" // Rust crates + "{internal,cmd}/**/*.go", + "!**/*_test.go", + "../crates/turborepo*/**/*.rs", // Rust crates + "../crates/turborepo*/Cargo.toml", + "!../crates/**/target" ] }, "e2e": { diff --git a/crates/turbo-tasks-macros-tests/tests/value_debug.rs b/crates/turbo-tasks-macros-tests/tests/value_debug.rs index 3ff34eaaf522e..9daf7f42cb403 100644 --- a/crates/turbo-tasks-macros-tests/tests/value_debug.rs +++ b/crates/turbo-tasks-macros-tests/tests/value_debug.rs @@ -16,7 +16,7 @@ async fn ignored_indexes() { let input = IgnoredIndexes(-1, 2, -3); let debug = input.value_debug_format(usize::MAX).try_to_string().await?; assert!(!debug.contains("-1")); - assert!(debug.contains("2")); + assert!(debug.contains('2')); assert!(!debug.contains("-3")); } } diff --git a/crates/turbo-tasks-macros/src/derive/task_input_macro.rs b/crates/turbo-tasks-macros/src/derive/task_input_macro.rs index 88b514d8ed27a..144f2b39fe169 100644 --- a/crates/turbo-tasks-macros/src/derive/task_input_macro.rs +++ b/crates/turbo-tasks-macros/src/derive/task_input_macro.rs @@ -76,9 +76,9 @@ pub fn derive_task_input(input: TokenStream) -> TokenStream { let (fields_destructuring, try_from_expansion, from_expansion) = expand_fields( &variant.ident, &variant.fields, - &expand_named, - &expand_unnamed, - &expand_unit, + expand_named, + expand_unnamed, + expand_unit, ); variants_idents.push(variant_ident); variants_fields_len.push(variant.fields.len()); @@ -101,7 +101,7 @@ pub fn derive_task_input(input: TokenStream) -> TokenStream { }; let variants_discriminants: Vec<_> = (0..variants_idents.len()) - .map(|i| Literal::usize_unsuffixed(i)) + .map(Literal::usize_unsuffixed) .collect(); ( @@ -144,7 +144,7 @@ pub fn derive_task_input(input: TokenStream) -> TokenStream { } Data::Struct(DataStruct { fields, .. }) => { let (destructuring, try_from_expansion, from_expansion) = - expand_fields(ident, fields, &expand_named, &expand_unnamed, &expand_unit); + expand_fields(ident, fields, expand_named, expand_unnamed, expand_unit); let fields_len = fields.len(); ( diff --git a/crates/turbo-tasks/src/graph/graph_store.rs b/crates/turbo-tasks/src/graph/graph_store.rs index 77e590ffe0749..30c43b85faeee 100644 --- a/crates/turbo-tasks/src/graph/graph_store.rs +++ b/crates/turbo-tasks/src/graph/graph_store.rs @@ -58,7 +58,7 @@ where { pub fn new(store: StoreImpl) -> Self { Self { - store: store, + store, visited: Default::default(), } } diff --git a/crates/turbopack-bench/src/util/mod.rs b/crates/turbopack-bench/src/util/mod.rs index 16a95bf61c65b..9b69b8f478e3f 100644 --- a/crates/turbopack-bench/src/util/mod.rs +++ b/crates/turbopack-bench/src/util/mod.rs @@ -88,6 +88,7 @@ pub async fn create_browser() -> Browser { let with_head = read_env_bool("TURBOPACK_BENCH_WITH_HEAD"); let with_devtools = read_env_bool("TURBOPACK_BENCH_DEVTOOLS"); let mut builder = BrowserConfig::builder(); + builder = builder.no_sandbox(); if with_head { builder = builder.with_head(); } diff --git a/crates/turbopack-build/src/chunking_context.rs b/crates/turbopack-build/src/chunking_context.rs index 61f9865959944..65574e532ef86 100644 --- a/crates/turbopack-build/src/chunking_context.rs +++ b/crates/turbopack-build/src/chunking_context.rs @@ -140,7 +140,7 @@ impl BuildChunkingContextVc { let asset = self_vc.generate_evaluate_chunk( entry_chunk, - AssetsVc::cell(assets.clone()), + AssetsVc::cell(assets), evaluatable_assets, Some(module), ); @@ -172,10 +172,7 @@ impl BuildChunkingContextVc { .iter() .map({ move |evaluatable_asset| async move { - Ok(evaluatable_asset - .as_root_chunk(self.into()) - .resolve() - .await?) + evaluatable_asset.as_root_chunk(self.into()).resolve().await } }) .try_join() diff --git a/crates/turbopack-build/src/ecmascript/node/content.rs b/crates/turbopack-build/src/ecmascript/node/content.rs index e796575cb0704..a57776e15d7f2 100644 --- a/crates/turbopack-build/src/ecmascript/node/content.rs +++ b/crates/turbopack-build/src/ecmascript/node/content.rs @@ -70,8 +70,8 @@ impl EcmascriptBuildNodeChunkContentVc { .await? { write!(code, "{}: ", StringifyJs(&id))?; - code.push_code(&*item_code); - write!(code, ",\n")?; + code.push_code(&item_code); + writeln!(code, ",")?; } write!(code, "\n}};")?; diff --git a/crates/turbopack-build/src/ecmascript/node/evaluate/chunk.rs b/crates/turbopack-build/src/ecmascript/node/evaluate/chunk.rs index 6774f0325da17..72fb451378417 100644 --- a/crates/turbopack-build/src/ecmascript/node/evaluate/chunk.rs +++ b/crates/turbopack-build/src/ecmascript/node/evaluate/chunk.rs @@ -96,7 +96,7 @@ impl EcmascriptBuildNodeEvaluateChunkVc { const CHUNK_PUBLIC_PATH = {}; const runtime = require({}); "#, - StringifyJs(&*chunk_public_path), + StringifyJs(chunk_public_path), StringifyJs(&*runtime_relative_path) )?; diff --git a/crates/turbopack-build/src/ecmascript/node/evaluate/runtime.rs b/crates/turbopack-build/src/ecmascript/node/evaluate/runtime.rs index 9c4fcf4e66b85..ed016523961f4 100644 --- a/crates/turbopack-build/src/ecmascript/node/evaluate/runtime.rs +++ b/crates/turbopack-build/src/ecmascript/node/evaluate/runtime.rs @@ -57,7 +57,7 @@ impl EcmascriptBuildNodeRuntimeChunkVc { r#" const RUNTIME_PUBLIC_PATH = {}; "#, - StringifyJs(&*runtime_public_path) + StringifyJs(runtime_public_path) )?; match this.chunking_context.await?.runtime_type() { diff --git a/crates/turbopack-cli/src/dev/web_entry_source.rs b/crates/turbopack-cli/src/dev/web_entry_source.rs index e030fe141f0ca..76b689cfee08a 100644 --- a/crates/turbopack-cli/src/dev/web_entry_source.rs +++ b/crates/turbopack-cli/src/dev/web_entry_source.rs @@ -293,7 +293,7 @@ pub async fn create_web_entry_source( } else if let Some(chunkable) = ChunkableAssetVc::resolve_from(module).await? { // TODO this is missing runtime code, so it's probably broken and we should also // add an ecmascript chunk with the runtime code - Ok((chunkable.into(), chunking_context, None)) + Ok((chunkable, chunking_context, None)) } else { // TODO convert into a serve-able asset Err(anyhow!( diff --git a/crates/turbopack-core/src/chunk/data.rs b/crates/turbopack-core/src/chunk/data.rs index 845b519abe0e4..9ff473c4369e9 100644 --- a/crates/turbopack-core/src/chunk/data.rs +++ b/crates/turbopack-core/src/chunk/data.rs @@ -44,7 +44,7 @@ impl ChunkDataVc { // The "path" in this case is the chunk's path, not the chunk item's path. // The difference is a chunk is a file served by the dev server, and an // item is one of several that are contained in that chunk file. - let Some(path) = output_root.get_path_to(&*path) else { + let Some(path) = output_root.get_path_to(&path) else { return Ok(ChunkDataOptionVc::cell(None)); }; let path = path.to_string(); @@ -88,7 +88,7 @@ impl ChunkDataVc { async move { let chunk_path = chunk.ident().path().await?; - Ok(output_root.get_path_to(&*chunk_path).map(|path| { + Ok(output_root.get_path_to(&chunk_path).map(|path| { ( path.to_owned(), SingleAssetReferenceVc::new( @@ -114,7 +114,7 @@ impl ChunkDataVc { path, included, excluded, - module_chunks: module_chunks, + module_chunks, references: AssetReferencesVc::cell(module_chunks_references), } .cell(), diff --git a/crates/turbopack-core/src/ident.rs b/crates/turbopack-core/src/ident.rs index f53c32b54b5f6..94d957aa6c518 100644 --- a/crates/turbopack-core/src/ident.rs +++ b/crates/turbopack-core/src/ident.rs @@ -37,7 +37,7 @@ impl AssetIdent { let root = self.path.root(); let path = self.path.await?; self.path = root - .join(&pattern.replace("*", &path.path)) + .join(&pattern.replace('*', &path.path)) .resolve() .await?; Ok(()) diff --git a/crates/turbopack-core/src/resolve/mod.rs b/crates/turbopack-core/src/resolve/mod.rs index 1562ed9121d6b..ee6a1bd37d69b 100644 --- a/crates/turbopack-core/src/resolve/mod.rs +++ b/crates/turbopack-core/src/resolve/mod.rs @@ -1347,7 +1347,7 @@ async fn resolve_package_internal_with_imports_field( *package_json_path, resolve_options, imports, - &specifier, + specifier, conditions, unspecified_conditions, ) diff --git a/crates/turbopack-dev-server/src/html.rs b/crates/turbopack-dev-server/src/html.rs index 568c59f6c2e44..bc49af84a072d 100644 --- a/crates/turbopack-dev-server/src/html.rs +++ b/crates/turbopack-dev-server/src/html.rs @@ -153,7 +153,7 @@ impl DevHtmlAssetVc { chunking_context.chunk_group(chunk) }; - Ok(assets.await?) + assets.await }) .try_join() .await? diff --git a/crates/turbopack-dev-server/src/update/server.rs b/crates/turbopack-dev-server/src/update/server.rs index 9d0af2725c740..ea3d1a1cab784 100644 --- a/crates/turbopack-dev-server/src/update/server.rs +++ b/crates/turbopack-dev-server/src/update/server.rs @@ -137,7 +137,7 @@ impl UpdateServer

{ client .send(ClientUpdateInstruction::partial( &resource, - &**partial_instruction, + partial_instruction, &issues, )) .await?; diff --git a/crates/turbopack-dev-server/src/update/stream.rs b/crates/turbopack-dev-server/src/update/stream.rs index 79eaf8c3002c8..4fa14f79eb39e 100644 --- a/crates/turbopack-dev-server/src/update/stream.rs +++ b/crates/turbopack-dev-server/src/update/stream.rs @@ -243,7 +243,7 @@ impl UpdateStream { let stream = ReceiverStream::new(rx).filter_map(move |item| { { let (has_issues, issues_changed) = - if let Some(UpdateStreamItem::Found { issues, .. }) = item.as_deref().ok() { + if let Ok(UpdateStreamItem::Found { issues, .. }) = item.as_deref() { let has_issues = !issues.is_empty(); let issues_changed = has_issues != last_had_issues; last_had_issues = has_issues; diff --git a/crates/turbopack-dev/src/chunking_context.rs b/crates/turbopack-dev/src/chunking_context.rs index fafaeebfcef0d..1cdab38e0581c 100644 --- a/crates/turbopack-dev/src/chunking_context.rs +++ b/crates/turbopack-dev/src/chunking_context.rs @@ -302,10 +302,10 @@ impl ChunkingContext for DevChunkingContext { .iter() .map({ move |evaluatable_asset| async move { - Ok(evaluatable_asset + evaluatable_asset .as_root_chunk(self_vc.into()) .resolve() - .await?) + .await } }) .try_join() diff --git a/crates/turbopack-dev/src/ecmascript/optimize.rs b/crates/turbopack-dev/src/ecmascript/optimize.rs index b189d1dcde4fd..66a58050f90f5 100644 --- a/crates/turbopack-dev/src/ecmascript/optimize.rs +++ b/crates/turbopack-dev/src/ecmascript/optimize.rs @@ -38,13 +38,11 @@ pub async fn optimize_ecmascript_chunks(chunks: EcmascriptChunksVc) -> Result Self { Self { - transition_name: transition_name.clone(), + transition_name: *transition_name, } } } diff --git a/crates/turbopack-ecmascript-plugins/src/transform/directives/server.rs b/crates/turbopack-ecmascript-plugins/src/transform/directives/server.rs index 65f15c5a35689..16dd9490a0c6f 100644 --- a/crates/turbopack-ecmascript-plugins/src/transform/directives/server.rs +++ b/crates/turbopack-ecmascript-plugins/src/transform/directives/server.rs @@ -20,7 +20,7 @@ pub struct ServerDirectiveTransformer { impl ServerDirectiveTransformer { pub fn new(transition_name: &StringVc) -> Self { Self { - transition_name: transition_name.clone(), + transition_name: *transition_name, } } } diff --git a/crates/turbopack-ecmascript-plugins/src/transform/relay.rs b/crates/turbopack-ecmascript-plugins/src/transform/relay.rs index 52161b1ef078f..6abf7c3858096 100644 --- a/crates/turbopack-ecmascript-plugins/src/transform/relay.rs +++ b/crates/turbopack-ecmascript-plugins/src/transform/relay.rs @@ -70,7 +70,7 @@ impl CustomTransformer for RelayTransformer { let p = std::mem::replace(program, Program::Module(Module::dummy())); *program = p.fold_with(&mut swc_relay::relay( - config.as_ref().unwrap_or_else(|| &self.config), + config.as_ref().unwrap_or(&self.config), FileName::Real(PathBuf::from(ctx.file_name_str)), root, // [TODO]: pages_dir comes through next-swc-loader diff --git a/crates/turbopack-ecmascript-plugins/src/transform/swc_ecma_transform_plugins.rs b/crates/turbopack-ecmascript-plugins/src/transform/swc_ecma_transform_plugins.rs index 877efdaa37902..6053c203036a0 100644 --- a/crates/turbopack-ecmascript-plugins/src/transform/swc_ecma_transform_plugins.rs +++ b/crates/turbopack-ecmascript-plugins/src/transform/swc_ecma_transform_plugins.rs @@ -30,9 +30,9 @@ pub struct SwcPluginModule( impl SwcPluginModule { pub fn new(plugin_name: &str, plugin_bytes: Vec) -> Self { - Self { - #[cfg(feature = "swc_ecma_transform_plugin")] - 0: { + #[cfg(feature = "swc_ecma_transform_plugin")] + { + Self({ use swc_core::plugin_runner::plugin_module_bytes::{ CompiledPluginModuleBytes, RawPluginModuleBytes, }; @@ -40,9 +40,12 @@ impl SwcPluginModule { plugin_name.to_string(), plugin_bytes, )) - }, - #[cfg(not(feature = "swc_ecma_transform_plugin"))] - 0: (), + }) + } + + #[cfg(not(feature = "swc_ecma_transform_plugin"))] + { + Self(()) } } } @@ -66,9 +69,9 @@ impl Issue for UnsupportedSwcEcmaTransformPluginsIssue { #[turbo_tasks::function] async fn title(&self) -> Result { - Ok(StringVc::cell(format!( - "Unsupported SWC EcmaScript transform plugins on this platform." - ))) + Ok(StringVc::cell( + "Unsupported SWC EcmaScript transform plugins on this platform.".to_string(), + )) } #[turbo_tasks::function] diff --git a/crates/turbopack-ecmascript/src/analyzer/mod.rs b/crates/turbopack-ecmascript/src/analyzer/mod.rs index 9e29d314e32d7..0faf2e5116e8c 100644 --- a/crates/turbopack-ecmascript/src/analyzer/mod.rs +++ b/crates/turbopack-ecmascript/src/analyzer/mod.rs @@ -1145,7 +1145,7 @@ impl JsValue { "| " ) ), - JsValue::FreeVar(name) => format!("FreeVar({})", &*name), + JsValue::FreeVar(name) => format!("FreeVar({})", name), JsValue::Variable(name) => { format!("{}", name.0) } diff --git a/crates/turbopack-ecmascript/src/chunk/data.rs b/crates/turbopack-ecmascript/src/chunk/data.rs index d5fdab9c88a04..2c352c0b0ca42 100644 --- a/crates/turbopack-ecmascript/src/chunk/data.rs +++ b/crates/turbopack-ecmascript/src/chunk/data.rs @@ -27,13 +27,13 @@ impl<'a> EcmascriptChunkData<'a> { references: _, } = chunk_data; if included.is_empty() && excluded.is_empty() && module_chunks.is_empty() { - return EcmascriptChunkData::Simple(&path); + return EcmascriptChunkData::Simple(path); } EcmascriptChunkData::WithRuntimeInfo { path, - included: &included, - excluded: &excluded, - module_chunks: &module_chunks, + included, + excluded, + module_chunks, } } } diff --git a/crates/turbopack-ecmascript/src/chunk_group_files_asset.rs b/crates/turbopack-ecmascript/src/chunk_group_files_asset.rs index 8a474712880d6..2adb66120a202 100644 --- a/crates/turbopack-ecmascript/src/chunk_group_files_asset.rs +++ b/crates/turbopack-ecmascript/src/chunk_group_files_asset.rs @@ -51,7 +51,6 @@ impl ChunkGroupFilesAssetVc { this.chunking_context.evaluated_chunk_group( ecma.as_root_chunk(this.chunking_context), this.runtime_entries - .clone() .unwrap_or_else(EvaluatableAssetsVc::empty) .with_entry(ecma.into()), ) @@ -220,7 +219,7 @@ impl Introspectable for ChunkGroupFilesAsset { let mut children = IndexSet::new(); let chunk_ty = StringVc::cell("chunk".to_string()); for &chunk in self_vc.chunks().await?.iter() { - children.insert((chunk_ty, IntrospectableAssetVc::new(chunk.into()))); + children.insert((chunk_ty, IntrospectableAssetVc::new(chunk))); } children.insert(( StringVc::cell("inner asset".to_string()), diff --git a/crates/turbopack-ecmascript/src/manifest/loader_item.rs b/crates/turbopack-ecmascript/src/manifest/loader_item.rs index 5244eadab917c..a44a2255c74b8 100644 --- a/crates/turbopack-ecmascript/src/manifest/loader_item.rs +++ b/crates/turbopack-ecmascript/src/manifest/loader_item.rs @@ -100,7 +100,7 @@ impl ChunkItem for ManifestLoaderItem { impl EcmascriptChunkItem for ManifestLoaderItem { #[turbo_tasks::function] async fn chunking_context(&self) -> Result { - Ok(self.manifest.await?.chunking_context.into()) + Ok(self.manifest.await?.chunking_context) } #[turbo_tasks::function] @@ -121,7 +121,7 @@ impl EcmascriptChunkItem for ManifestLoaderItem { // exports a promise for all of the necessary chunk loads. let item_id = &*this .manifest - .as_chunk_item(manifest.chunking_context.into()) + .as_chunk_item(manifest.chunking_context) .id() .await?; @@ -131,7 +131,7 @@ impl EcmascriptChunkItem for ManifestLoaderItem { .await? .ok_or_else(|| anyhow!("asset is not placeable in ecmascript chunk"))?; let dynamic_id = &*placeable - .as_chunk_item(manifest.chunking_context.into()) + .as_chunk_item(manifest.chunking_context) .id() .await?; @@ -157,7 +157,7 @@ impl EcmascriptChunkItem for ManifestLoaderItem { chunks_server_data = StringifyJs( &chunks_server_data .iter() - .map(|chunk_data| EcmascriptChunkData::new(&chunk_data)) + .map(|chunk_data| EcmascriptChunkData::new(chunk_data)) .collect::>() ), item_id = StringifyJs(item_id), diff --git a/crates/turbopack-ecmascript/src/parse.rs b/crates/turbopack-ecmascript/src/parse.rs index d73ff8c19f4d1..83b2c8e158183 100644 --- a/crates/turbopack-ecmascript/src/parse.rs +++ b/crates/turbopack-ecmascript/src/parse.rs @@ -398,13 +398,10 @@ impl Issue for ReadSourceIssue { #[turbo_tasks::function] fn description(&self) -> StringVc { - StringVc::cell( - format!( - "An unexpected error happened while trying to read the source code to parse: {}", - self.error - ) - .into(), - ) + StringVc::cell(format!( + "An unexpected error happened while trying to read the source code to parse: {}", + self.error + )) } #[turbo_tasks::function] diff --git a/crates/turbopack-ecmascript/src/references/mod.rs b/crates/turbopack-ecmascript/src/references/mod.rs index 97167654322fb..6768ad006b48f 100644 --- a/crates/turbopack-ecmascript/src/references/mod.rs +++ b/crates/turbopack-ecmascript/src/references/mod.rs @@ -159,7 +159,7 @@ impl AnalyzeEcmascriptModuleResultVc { return Ok(BoolVc::cell(true)); } } - return Ok(BoolVc::cell(false)); + Ok(BoolVc::cell(false)) } } @@ -1310,12 +1310,11 @@ pub(crate) async fn analyze_ecmascript_module( if it.next().unwrap() != Cow::Borrowed(prop) { continue; } - if obj.iter_defineable_name_rev().eq(it) { - if handle_free_var_reference(ast_path, value, state, analysis) + if obj.iter_defineable_name_rev().eq(it) + && handle_free_var_reference(ast_path, value, state, analysis) .await? - { - return Ok(()); - } + { + return Ok(()); } } } @@ -1354,10 +1353,9 @@ pub(crate) async fn analyze_ecmascript_module( if var .iter_defineable_name_rev() .eq(name.iter().map(Cow::Borrowed).rev()) + && handle_free_var_reference(ast_path, value, state, analysis).await? { - if handle_free_var_reference(ast_path, value, state, analysis).await? { - return Ok(()); - } + return Ok(()); } } } @@ -1373,7 +1371,7 @@ pub(crate) async fn analyze_ecmascript_module( ) -> Result { // We don't want to replace assignments as this would lead to invalid code. if matches!( - &ast_path[..], + ast_path, [ .., AstParentKind::AssignExpr(AssignExprField::Left), diff --git a/crates/turbopack-ecmascript/src/references/require_context.rs b/crates/turbopack-ecmascript/src/references/require_context.rs index dceaa441e82d1..439c0d6b7e590 100644 --- a/crates/turbopack-ecmascript/src/references/require_context.rs +++ b/crates/turbopack-ecmascript/src/references/require_context.rs @@ -498,7 +498,7 @@ impl EcmascriptChunkItem for RequireContextChunkItem { }, cm: source_map.clone(), comments: None, - wr: JsWriter::new(source_map.clone(), "\n", &mut bytes, None), + wr: JsWriter::new(source_map, "\n", &mut bytes, None), }; emitter.emit_module(&module)?; diff --git a/crates/turbopack-ecmascript/src/transform/mod.rs b/crates/turbopack-ecmascript/src/transform/mod.rs index f33a07c05c63b..7cb9889aaa42f 100644 --- a/crates/turbopack-ecmascript/src/transform/mod.rs +++ b/crates/turbopack-ecmascript/src/transform/mod.rs @@ -215,7 +215,7 @@ impl EcmascriptInputTransform { { Program::Module(Module { span, - body: body.drain(..).map(|stmt| ModuleItem::Stmt(stmt)).collect(), + body: body.drain(..).map(ModuleItem::Stmt).collect(), shebang, }) } else { diff --git a/crates/turbopack-ecmascript/src/tree_shake/tests.rs b/crates/turbopack-ecmascript/src/tree_shake/tests.rs index d0f9eb0f73ef1..3b6c65275dce1 100644 --- a/crates/turbopack-ecmascript/src/tree_shake/tests.rs +++ b/crates/turbopack-ecmascript/src/tree_shake/tests.rs @@ -43,8 +43,8 @@ struct TestConfig { fn run(input: PathBuf) { let config = input.with_file_name("config.json"); - let config = std::fs::read_to_string(&config).unwrap_or_else(|_| "{}".into()); - let config = serde_json::from_str::(&config).unwrap_or_else(|e| { + let config = std::fs::read_to_string(config).unwrap_or_else(|_| "{}".into()); + let config = serde_json::from_str::(&config).unwrap_or_else(|_e| { panic!("failed to parse config.json: {}", config); }); diff --git a/crates/turbopack-image/src/process/svg.rs b/crates/turbopack-image/src/process/svg.rs index f27717f23b39a..80a18babaf2f6 100644 --- a/crates/turbopack-image/src/process/svg.rs +++ b/crates/turbopack-image/src/process/svg.rs @@ -79,7 +79,7 @@ fn calculate_by_viewbox( } pub fn calculate(content: &str) -> Result<(u32, u32)> { - let Some(root) = ROOT_REGEX.find(&content) else { + let Some(root) = ROOT_REGEX.find(content) else { bail!("Source code does not contain a root element"); }; let root = root.as_str(); diff --git a/crates/turbopack-node/src/debug.rs b/crates/turbopack-node/src/debug.rs index 248e722f11173..efba10bba330f 100644 --- a/crates/turbopack-node/src/debug.rs +++ b/crates/turbopack-node/src/debug.rs @@ -12,5 +12,5 @@ pub fn should_debug(operation: &str) -> bool { return false; }; - val == "*" || val.split(",").any(|part| part == operation) + val == "*" || val.split(',').any(|part| part == operation) } diff --git a/crates/turbopack-node/src/render/node_api_source.rs b/crates/turbopack-node/src/render/node_api_source.rs index 9a86e25a0aad2..f4ff8536ac660 100644 --- a/crates/turbopack-node/src/render/node_api_source.rs +++ b/crates/turbopack-node/src/render/node_api_source.rs @@ -213,7 +213,7 @@ impl Introspectable for NodeApiContentSource { StringVc::cell("intermediate asset".to_string()), IntrospectableAssetVc::new(get_intermediate_asset( entry.chunking_context, - entry.module.into(), + entry.module, entry.runtime_entries, )), )); diff --git a/crates/turbopack-node/src/render/render_proxy.rs b/crates/turbopack-node/src/render/render_proxy.rs index 645a7a938a5cc..5973a8146e172 100644 --- a/crates/turbopack-node/src/render/render_proxy.rs +++ b/crates/turbopack-node/src/render/render_proxy.rs @@ -233,7 +233,7 @@ async fn render_stream_internal( let stream = generator! { let intermediate_asset = get_intermediate_asset( chunking_context, - module.into(), + module, runtime_entries, ); let pool = get_renderer_pool( diff --git a/crates/turbopack-node/src/transforms/postcss.rs b/crates/turbopack-node/src/transforms/postcss.rs index 2560458419638..a08866c5d9af0 100644 --- a/crates/turbopack-node/src/transforms/postcss.rs +++ b/crates/turbopack-node/src/transforms/postcss.rs @@ -143,14 +143,10 @@ async fn extra_configs( .map(|path| async move { Ok( matches!(&*path.get_type().await?, FileSystemEntryType::File).then(|| { - any_content_changed( - context - .process( - SourceAssetVc::new(path).into(), - Value::new(ReferenceType::Internal(InnerAssetsVc::empty())), - ) - .into(), - ) + any_content_changed(context.process( + SourceAssetVc::new(path).into(), + Value::new(ReferenceType::Internal(InnerAssetsVc::empty())), + )) }), ) }) diff --git a/crates/turbopack-node/src/transforms/webpack.rs b/crates/turbopack-node/src/transforms/webpack.rs index cbba61c89ad45..7c68e52e437dc 100644 --- a/crates/turbopack-node/src/transforms/webpack.rs +++ b/crates/turbopack-node/src/transforms/webpack.rs @@ -116,12 +116,10 @@ struct ProcessWebpackLoadersResult { #[turbo_tasks::function] fn webpack_loaders_executor(context: AssetContextVc) -> AssetVc { - context - .process( - SourceAssetVc::new(embed_file_path("transforms/webpack-loaders.ts")).into(), - Value::new(ReferenceType::Internal(InnerAssetsVc::empty())), - ) - .into() + context.process( + SourceAssetVc::new(embed_file_path("transforms/webpack-loaders.ts")).into(), + Value::new(ReferenceType::Internal(InnerAssetsVc::empty())), + ) } #[turbo_tasks::value_impl] diff --git a/crates/turbopack-tests/tests/snapshot.rs b/crates/turbopack-tests/tests/snapshot.rs index eacbaa7934342..90be2f1cd4839 100644 --- a/crates/turbopack-tests/tests/snapshot.rs +++ b/crates/turbopack-tests/tests/snapshot.rs @@ -320,8 +320,7 @@ async fn run_test(resource: &str) -> Result { env, ) .runtime_type(options.runtime_type) - .build() - .into(), + .build(), Runtime::Build => BuildChunkingContextVc::builder( project_root, path, @@ -411,7 +410,7 @@ async fn walk_asset( return Ok(()); } - if path.await?.is_inside(&*output_path) { + if path.await?.is_inside(output_path) { // Only consider assets that should be written to disk. diff(path, asset.content()).await?; } diff --git a/crates/turbopack/src/lib.rs b/crates/turbopack/src/lib.rs index 5107e29d65ba6..57db670a0940e 100644 --- a/crates/turbopack/src/lib.rs +++ b/crates/turbopack/src/lib.rs @@ -162,7 +162,7 @@ async fn apply_module_type( } } - builder.build().into() + builder.build() } ModuleType::Json => JsonModuleAssetVc::new(source).into(), diff --git a/crates/turbopack/src/module_options/mod.rs b/crates/turbopack/src/module_options/mod.rs index cb1e158d48c7a..5ac0d264cd3d3 100644 --- a/crates/turbopack/src/module_options/mod.rs +++ b/crates/turbopack/src/module_options/mod.rs @@ -101,7 +101,7 @@ impl ModuleOptionsVc { .output_transforms .iter() .cloned() - .map(|plugin| EcmascriptInputTransform::Plugin(plugin)) + .map(EcmascriptInputTransform::Plugin) .collect(), ) } else { @@ -270,7 +270,7 @@ impl ModuleOptionsVc { ]), vec![ModuleRuleEffect::ModuleType(ModuleType::Ecmascript { transforms: app_transforms, - options: ecmascript_options.clone(), + options: ecmascript_options, })], ), ModuleRule::new( @@ -301,12 +301,12 @@ impl ModuleOptionsVc { vec![if enable_types { ModuleRuleEffect::ModuleType(ModuleType::TypescriptWithTypes { transforms: ts_app_transforms, - options: ecmascript_options.clone(), + options: ecmascript_options, }) } else { ModuleRuleEffect::ModuleType(ModuleType::Typescript { transforms: ts_app_transforms, - options: ecmascript_options.clone(), + options: ecmascript_options, }) }], ), @@ -361,7 +361,7 @@ impl ModuleOptionsVc { vec![ModuleRuleEffect::ModuleType( ModuleType::TypescriptDeclaration { transforms: vendor_transforms, - options: ecmascript_options.clone(), + options: ecmascript_options, }, )], ), @@ -384,7 +384,7 @@ impl ModuleOptionsVc { ModuleRuleCondition::ResourcePathHasNoExtension, vec![ModuleRuleEffect::ModuleType(ModuleType::Ecmascript { transforms: vendor_transforms, - options: ecmascript_options.clone(), + options: ecmascript_options, })], ), ModuleRule::new( @@ -443,7 +443,7 @@ impl ModuleOptionsVc { for (glob, rule) in webpack_loaders_options.rules.await?.iter() { rules.push(ModuleRule::new( ModuleRuleCondition::All(vec![ - if !glob.contains("/") { + if !glob.contains('/') { ModuleRuleCondition::ResourceBasePathGlob(GlobVc::new(glob).await?) } else { ModuleRuleCondition::ResourcePathGlob { @@ -458,7 +458,7 @@ impl ModuleOptionsVc { // This can be overriden by specifying e. g. `as: "*.css"` in the rule. ModuleRuleEffect::ModuleType(ModuleType::Ecmascript { transforms: app_transforms, - options: ecmascript_options.clone(), + options: ecmascript_options, }), ModuleRuleEffect::SourceTransforms(SourceTransformsVc::cell(vec![ WebpackLoadersVc::new( diff --git a/crates/turborepo-cache/src/signature_authentication.rs b/crates/turborepo-cache/src/signature_authentication.rs index 9abda46e68b11..eaf9429d9961f 100644 --- a/crates/turborepo-cache/src/signature_authentication.rs +++ b/crates/turborepo-cache/src/signature_authentication.rs @@ -110,7 +110,6 @@ impl ArtifactSignatureAuthenticator { #[cfg(test)] mod tests { use anyhow::Result; - use os_str_bytes::OsStrBytes; use super::*; diff --git a/crates/turborepo-env/Cargo.toml b/crates/turborepo-env/Cargo.toml new file mode 100644 index 0000000000000..3057974148b62 --- /dev/null +++ b/crates/turborepo-env/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "turborepo-env" +version = "0.1.0" +edition = "2021" +license = "MPL-2.0" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +lazy_static = { workspace = true } +regex = { workspace = true } +serde = { workspace = true } +test-case = { workspace = true } +thiserror = { workspace = true } diff --git a/crates/turborepo-env/src/lib.rs b/crates/turborepo-env/src/lib.rs new file mode 100644 index 0000000000000..300786f99d367 --- /dev/null +++ b/crates/turborepo-env/src/lib.rs @@ -0,0 +1,264 @@ +use std::{ + collections::HashMap, + env, + ops::{Deref, DerefMut}, + string::ToString, +}; + +use regex::Regex; +use serde::Serialize; +use thiserror::Error; + +const DEFAULT_ENV_VARS: [&str; 1] = ["VERCEL_ANALYTICS_ID"]; + +#[derive(Clone, Debug, Error)] +pub enum Error { + #[error("Failed to parse regex: {0}")] + Regex(#[from] regex::Error), +} + +// TODO: Consider using immutable data structures here +#[derive(Clone, Debug, Default, Serialize)] +#[serde(transparent)] +pub struct EnvironmentVariableMap(HashMap); + +// BySource contains a map of environment variables broken down by the source +#[derive(Debug, Serialize)] +pub struct BySource { + pub explicit: EnvironmentVariableMap, + pub matching: EnvironmentVariableMap, +} + +// DetailedMap contains the composite and the detailed maps of environment +// variables All is used as a taskhash input (taskhash.CalculateTaskHash) +// BySource is used by dry runs and run summaries +#[derive(Debug, Serialize)] +pub struct DetailedMap { + pub all: EnvironmentVariableMap, + pub by_source: BySource, +} + +// WildcardMaps is a pair of EnvironmentVariableMaps. +#[derive(Debug)] +pub struct WildcardMaps { + pub inclusions: EnvironmentVariableMap, + pub exclusions: EnvironmentVariableMap, +} + +impl WildcardMaps { + // Resolve collapses a WildcardSet into a single EnvironmentVariableMap. + fn resolve(self) -> EnvironmentVariableMap { + let mut output = self.inclusions; + output.difference(&self.exclusions); + output + } +} + +impl From> for EnvironmentVariableMap { + fn from(map: HashMap) -> Self { + EnvironmentVariableMap(map) + } +} + +impl Deref for EnvironmentVariableMap { + type Target = HashMap; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for EnvironmentVariableMap { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl EnvironmentVariableMap { + pub fn infer() -> Self { + EnvironmentVariableMap(env::vars().collect()) + } + + pub fn into_inner(self) -> HashMap { + self.0 + } + + // Takes another EnvironmentVariableMap and adds it into `self` + // Overwrites values if they already exist. + pub fn union(&mut self, another: &EnvironmentVariableMap) { + for (key, value) in &another.0 { + self.0.insert(key.clone(), value.clone()); + } + } + + // Takes another EnvironmentVariableMap and removes matching keys + // from `self` + pub fn difference(&mut self, another: &EnvironmentVariableMap) { + for key in another.0.keys() { + self.0.remove(key); + } + } + + // returns a WildcardMaps after processing wildcards against it. + fn wildcard_map_from_wildcards( + &self, + wildcard_patterns: &[impl AsRef], + ) -> Result { + let mut output = WildcardMaps { + inclusions: EnvironmentVariableMap::default(), + exclusions: EnvironmentVariableMap::default(), + }; + + let mut include_patterns = Vec::new(); + let mut exclude_patterns = Vec::new(); + + for wildcard_pattern in wildcard_patterns { + let wildcard_pattern = wildcard_pattern.as_ref(); + if let Some(rest) = wildcard_pattern.strip_prefix('!') { + let exclude_pattern = wildcard_to_regex_pattern(rest); + exclude_patterns.push(exclude_pattern); + } else if wildcard_pattern.starts_with("\\!") { + let include_pattern = wildcard_to_regex_pattern(&wildcard_pattern[1..]); + include_patterns.push(include_pattern); + } else { + let include_pattern = wildcard_to_regex_pattern(wildcard_pattern); + include_patterns.push(include_pattern); + } + } + + let include_regex_string = format!("^({})$", include_patterns.join("|")); + let exclude_regex_string = format!("^({})$", exclude_patterns.join("|")); + + let include_regex = Regex::new(&include_regex_string)?; + let exclude_regex = Regex::new(&exclude_regex_string)?; + for (env_var, env_value) in &self.0 { + if !include_patterns.is_empty() && include_regex.is_match(env_var) { + output.inclusions.insert(env_var.clone(), env_value.clone()); + } + if !exclude_patterns.is_empty() && exclude_regex.is_match(env_var) { + output.exclusions.insert(env_var.clone(), env_value.clone()); + } + } + + Ok(output) + } + + // Returns an EnvironmentVariableMap containing the variables + // in the environment which match an array of wildcard patterns. + pub fn from_wildcards( + &self, + wildcard_patterns: &[impl AsRef], + ) -> Result { + if wildcard_patterns.is_empty() { + return Ok(EnvironmentVariableMap::default()); + } + + let resolved_set = self.wildcard_map_from_wildcards(wildcard_patterns)?; + Ok(resolved_set.resolve()) + } + + // FromWildcardsUnresolved returns a wildcardSet specifying the inclusions and + // exclusions discovered from a set of wildcard patterns. This is used to ensure + // that user exclusions have primacy over inferred inclusions. + pub fn from_wildcards_unresolved( + &self, + wildcard_patterns: &[String], + ) -> Result { + if wildcard_patterns.is_empty() { + return Ok(WildcardMaps { + inclusions: EnvironmentVariableMap::default(), + exclusions: EnvironmentVariableMap::default(), + }); + } + + self.wildcard_map_from_wildcards(wildcard_patterns) + } +} + +const WILDCARD: char = '*'; +const WILDCARD_ESCAPE: char = '\\'; +const REGEX_WILDCARD_SEGMENT: &str = ".*"; + +fn wildcard_to_regex_pattern(pattern: &str) -> String { + let mut regex_string = Vec::new(); + let mut previous_index = 0; + let mut previous_char: Option = None; + + for (i, char) in pattern.chars().enumerate() { + if char == WILDCARD { + if previous_char == Some(WILDCARD_ESCAPE) { + // Found a literal * + // Replace the trailing "\*" with just "*" before adding the segment. + regex_string.push(regex::escape(&format!( + "{}*", + &pattern[previous_index..(i - 1)] + ))); + } else { + // Found a wildcard + // Add in the static segment since the last wildcard. Can be zero length. + regex_string.push(regex::escape(&pattern[previous_index..i])); + + // Add a dynamic segment if it isn't adjacent to another dynamic segment. + if let Some(last_segment) = regex_string.last() { + if last_segment != REGEX_WILDCARD_SEGMENT { + regex_string.push(REGEX_WILDCARD_SEGMENT.to_string()); + } + } + } + + // Advance the pointer. + previous_index = i + 1; + } + previous_char = Some(char); + } + + // Add the last static segment. Can be zero length. + regex_string.push(regex::escape(&pattern[previous_index..])); + + regex_string.join("") +} + +pub fn get_global_hashable_env_vars( + env_at_execution_start: EnvironmentVariableMap, + global_env: &[String], +) -> Result { + let default_env_var_map = env_at_execution_start.from_wildcards(&DEFAULT_ENV_VARS[..])?; + + let user_env_var_set = env_at_execution_start.from_wildcards_unresolved(global_env)?; + + let mut all_env_var_map = EnvironmentVariableMap::default(); + all_env_var_map.union(&user_env_var_set.inclusions); + all_env_var_map.union(&default_env_var_map); + all_env_var_map.difference(&user_env_var_set.exclusions); + + let mut explicit_env_var_map = EnvironmentVariableMap::default(); + explicit_env_var_map.union(&user_env_var_set.inclusions); + explicit_env_var_map.difference(&user_env_var_set.exclusions); + + let mut matching_env_var_map = EnvironmentVariableMap::default(); + matching_env_var_map.union(&default_env_var_map); + matching_env_var_map.difference(&explicit_env_var_map); + + Ok(DetailedMap { + all: all_env_var_map, + by_source: BySource { + explicit: explicit_env_var_map, + matching: matching_env_var_map, + }, + }) +} + +#[cfg(test)] +mod tests { + use test_case::test_case; + + #[test_case("LITERAL_\\*", "LITERAL_\\*" ; "literal star")] + #[test_case("\\*LEADING", "\\*LEADING" ; "leading literal star")] + #[test_case("\\!LEADING", "\\\\!LEADING" ; "leading literal bang")] + #[test_case("!LEADING", "!LEADING" ; "leading bang")] + #[test_case("*LEADING", ".*LEADING" ; "leading star")] + fn test_wildcard_to_regex_pattern(pattern: &str, expected: &str) { + let actual = super::wildcard_to_regex_pattern(pattern); + assert_eq!(actual, expected); + } +} diff --git a/crates/turborepo-ffi/Cargo.toml b/crates/turborepo-ffi/Cargo.toml index 417073327668f..b8128c0a66430 100644 --- a/crates/turborepo-ffi/Cargo.toml +++ b/crates/turborepo-ffi/Cargo.toml @@ -14,6 +14,7 @@ prost = "0.11.6" thiserror = { workspace = true } turbopath = { workspace = true } turborepo-cache = { workspace = true } +turborepo-env = { workspace = true } turborepo-fs = { workspace = true } turborepo-lockfiles = { workspace = true } turborepo-scm = { workspace = true } diff --git a/crates/turborepo-ffi/messages.proto b/crates/turborepo-ffi/messages.proto index f0009d01eda02..fede8153d3c89 100644 --- a/crates/turborepo-ffi/messages.proto +++ b/crates/turborepo-ffi/messages.proto @@ -58,6 +58,7 @@ enum PackageManager { NPM = 0; BERRY = 1; PNPM = 2; + YARN = 3; } message PackageDependency { @@ -194,3 +195,55 @@ message GetPackageFileHashesFromProcessingGitIgnoreResponse { string error = 2; } } + +message GetPackageFileHashesFromInputsRequest { + string turbo_root = 1; + string package_path = 2; + repeated string inputs = 3; +} + +message GetPackageFileHashesFromInputsResponse { + oneof response { + FileHashes hashes = 1; + string error = 2; + } +} + + +message FromWildcardsRequest { + EnvVarMap env_vars = 1; + repeated string wildcard_patterns = 2; +} + +message FromWildcardsResponse { + oneof response { + EnvVarMap env_vars = 1; + string error = 2; + } +} + +message EnvVarMap { + map map = 1; +} + +message DetailedMap { + map all = 1; + BySource by_source = 2; +} + +message BySource { + map explicit = 1; + map matching = 2; +} + +message GetGlobalHashableEnvVarsRequest { + EnvVarMap env_at_execution_start = 1; + repeated string global_env = 2; +} + +message GetGlobalHashableEnvVarsResponse { + oneof response { + DetailedMap detailed_map = 1; + string error = 2; + } +} diff --git a/crates/turborepo-ffi/src/lib.rs b/crates/turborepo-ffi/src/lib.rs index 7458c0d121b4f..996c02d502439 100644 --- a/crates/turborepo-ffi/src/lib.rs +++ b/crates/turborepo-ffi/src/lib.rs @@ -6,9 +6,10 @@ mod lockfile; use std::{collections::HashMap, mem::ManuallyDrop, path::PathBuf}; -use globwalk::{globwalk, WalkError}; +use globwalk::globwalk; pub use lockfile::{patches, subgraph, transitive_closure}; use turbopath::{AbsoluteSystemPathBuf, AnchoredSystemPathBuf}; +use turborepo_env::EnvironmentVariableMap; mod proto { include!(concat!(env!("OUT_DIR"), "/_.rs")); @@ -273,14 +274,14 @@ pub extern "C" fn get_package_file_hashes_from_git_index(buffer: Buffer) -> Buff to_return.insert(filename, hash); } let file_hashes = proto::FileHashes { hashes: to_return }; - let resp = proto::GetPackageFileHashesFromGitIndexResponse { + + proto::GetPackageFileHashesFromGitIndexResponse { response: Some( proto::get_package_file_hashes_from_git_index_response::Response::Hashes( file_hashes, ), ), - }; - resp + } } Err(err) => { let resp = proto::GetPackageFileHashesFromGitIndexResponse { @@ -358,14 +359,14 @@ pub extern "C" fn get_package_file_hashes_from_processing_git_ignore(buffer: Buf to_return.insert(filename, hash); } let file_hashes = proto::FileHashes { hashes: to_return }; - let resp = proto::GetPackageFileHashesFromProcessingGitIgnoreResponse { + + proto::GetPackageFileHashesFromProcessingGitIgnoreResponse { response: Some( proto::get_package_file_hashes_from_processing_git_ignore_response::Response::Hashes( file_hashes, ), ), - }; - resp + } } Err(err) => { let resp = proto::GetPackageFileHashesFromProcessingGitIgnoreResponse { @@ -381,6 +382,91 @@ pub extern "C" fn get_package_file_hashes_from_processing_git_ignore(buffer: Buf response.into() } +#[no_mangle] +pub extern "C" fn get_package_file_hashes_from_inputs(buffer: Buffer) -> Buffer { + let req: proto::GetPackageFileHashesFromInputsRequest = match buffer.into_proto() { + Ok(req) => req, + Err(err) => { + let resp = proto::GetPackageFileHashesFromInputsResponse { + response: Some( + proto::get_package_file_hashes_from_inputs_response::Response::Error( + err.to_string(), + ), + ), + }; + return resp.into(); + } + }; + let turbo_root = match AbsoluteSystemPathBuf::new(req.turbo_root) { + Ok(turbo_root) => turbo_root, + Err(err) => { + let resp = proto::GetPackageFileHashesFromInputsResponse { + response: Some( + proto::get_package_file_hashes_from_inputs_response::Response::Error( + err.to_string(), + ), + ), + }; + return resp.into(); + } + }; + let package_path = match AnchoredSystemPathBuf::from_raw(req.package_path) { + Ok(package_path) => package_path, + Err(err) => { + let resp = proto::GetPackageFileHashesFromInputsResponse { + response: Some( + proto::get_package_file_hashes_from_inputs_response::Response::Error( + err.to_string(), + ), + ), + }; + return resp.into(); + } + }; + let inputs = req.inputs.as_slice(); + let response = match turborepo_scm::package_deps::get_package_file_hashes_from_inputs( + &turbo_root, + &package_path, + inputs, + ) { + Ok(hashes) => { + let mut to_return = HashMap::new(); + for (filename, hash) in hashes { + let filename = match filename.as_str() { + Ok(s) => s.to_owned(), + Err(err) => { + let resp = proto::GetPackageFileHashesFromInputsResponse { + response: Some(proto::get_package_file_hashes_from_inputs_response::Response::Error(err.to_string())) + }; + return resp.into(); + } + }; + to_return.insert(filename, hash); + } + let file_hashes = proto::FileHashes { hashes: to_return }; + let resp = proto::GetPackageFileHashesFromInputsResponse { + response: Some( + proto::get_package_file_hashes_from_inputs_response::Response::Hashes( + file_hashes, + ), + ), + }; + resp + } + Err(err) => { + let resp = proto::GetPackageFileHashesFromInputsResponse { + response: Some( + proto::get_package_file_hashes_from_inputs_response::Response::Error( + err.to_string(), + ), + ), + }; + return resp.into(); + } + }; + response.into() +} + #[no_mangle] pub extern "C" fn glob(buffer: Buffer) -> Buffer { let req: proto::GlobReq = match buffer.into_proto() { @@ -397,13 +483,13 @@ pub extern "C" fn glob(buffer: Buffer) -> Buffer { false => globwalk::WalkType::All, }; - let mut iter = match globwalk( + let files = match globwalk( &AbsoluteSystemPathBuf::new(req.base_path).expect("absolute"), &req.include_patterns, &req.exclude_patterns, walk_type, ) { - Ok(iter) => iter, + Ok(files) => files, Err(err) => { let resp = proto::GlobResp { response: Some(proto::glob_resp::Response::Error(err.to_string())), @@ -412,17 +498,8 @@ pub extern "C" fn glob(buffer: Buffer) -> Buffer { } }; - let paths = match iter.collect::, WalkError>>() { - Ok(paths) => paths, - Err(err) => { - let resp = proto::GlobResp { - response: Some(proto::glob_resp::Response::Error(err.to_string())), - }; - return resp.into(); - } - }; // TODO: is to_string_lossy the right thing to do here? We could error... - let files: Vec<_> = paths + let files: Vec<_> = files .into_iter() .map(|path| path.to_string_lossy().to_string()) .collect(); @@ -434,6 +511,88 @@ pub extern "C" fn glob(buffer: Buffer) -> Buffer { .into() } +#[no_mangle] +pub extern "C" fn from_wildcards(buffer: Buffer) -> Buffer { + let req: proto::FromWildcardsRequest = match buffer.into_proto() { + Ok(req) => req, + Err(err) => { + let resp = proto::FromWildcardsResponse { + response: Some(proto::from_wildcards_response::Response::Error( + err.to_string(), + )), + }; + return resp.into(); + } + }; + + let env_var_map: EnvironmentVariableMap = req.env_vars.unwrap().map.into(); + match env_var_map.from_wildcards(&req.wildcard_patterns) { + Ok(map) => { + let resp = proto::FromWildcardsResponse { + response: Some(proto::from_wildcards_response::Response::EnvVars( + proto::EnvVarMap { + map: map.into_inner(), + }, + )), + }; + resp.into() + } + Err(err) => { + let resp = proto::FromWildcardsResponse { + response: Some(proto::from_wildcards_response::Response::Error( + err.to_string(), + )), + }; + resp.into() + } + } +} + +#[no_mangle] +pub extern "C" fn get_global_hashable_env_vars(buffer: Buffer) -> Buffer { + let req: proto::GetGlobalHashableEnvVarsRequest = match buffer.into_proto() { + Ok(req) => req, + Err(err) => { + let resp = proto::GetGlobalHashableEnvVarsResponse { + response: Some( + proto::get_global_hashable_env_vars_response::Response::Error(err.to_string()), + ), + }; + return resp.into(); + } + }; + + match turborepo_env::get_global_hashable_env_vars( + req.env_at_execution_start.unwrap().map.into(), + &req.global_env, + ) { + Ok(map) => { + let resp = proto::GetGlobalHashableEnvVarsResponse { + response: Some( + proto::get_global_hashable_env_vars_response::Response::DetailedMap( + proto::DetailedMap { + all: map.all.into_inner(), + by_source: Some(proto::BySource { + explicit: map.by_source.explicit.into_inner(), + matching: map.by_source.matching.into_inner(), + }), + }, + ), + ), + }; + resp.into() + } + Err(err) => { + let resp = proto::GetGlobalHashableEnvVarsResponse { + response: Some( + proto::get_global_hashable_env_vars_response::Response::Error(err.to_string()), + ), + }; + resp.into() + } + } +} + #[cfg(test)] mod test { use super::*; diff --git a/crates/turborepo-ffi/src/lockfile.rs b/crates/turborepo-ffi/src/lockfile.rs index 4c5cab84ae65e..52bc6c781072a 100644 --- a/crates/turborepo-ffi/src/lockfile.rs +++ b/crates/turborepo-ffi/src/lockfile.rs @@ -4,7 +4,9 @@ use std::{ }; use thiserror::Error; -use turborepo_lockfiles::{self, BerryLockfile, LockfileData, NpmLockfile, Package, PnpmLockfile}; +use turborepo_lockfiles::{ + self, BerryLockfile, LockfileData, NpmLockfile, Package, PnpmLockfile, Yarn1Lockfile, +}; use super::{proto, Buffer}; @@ -51,6 +53,7 @@ fn transitive_closure_inner(buf: Buffer) -> Result npm_transitive_closure_inner(request), proto::PackageManager::Berry => berry_transitive_closure_inner(request), proto::PackageManager::Pnpm => pnpm_transitive_closure_inner(request), + proto::PackageManager::Yarn => yarn_transitive_closure_inner(request), } } @@ -106,6 +109,23 @@ fn pnpm_transitive_closure_inner( Ok(dependencies.into()) } +fn yarn_transitive_closure_inner( + request: proto::TransitiveDepsRequest, +) -> Result { + let proto::TransitiveDepsRequest { + contents, + workspaces, + .. + } = request; + let lockfile = + Yarn1Lockfile::from_bytes(contents.as_slice()).map_err(turborepo_lockfiles::Error::from)?; + let dependencies = turborepo_lockfiles::all_transitive_closures( + &lockfile, + workspaces.into_iter().map(|(k, v)| (k, v.into())).collect(), + )?; + Ok(dependencies.into()) +} + #[no_mangle] pub extern "C" fn subgraph(buf: Buffer) -> Buffer { use proto::subgraph_response::Response; @@ -141,6 +161,7 @@ fn subgraph_inner(buf: Buffer) -> Result, Error> { proto::PackageManager::Pnpm => { turborepo_lockfiles::pnpm_subgraph(&contents, &workspaces, &packages)? } + proto::PackageManager::Yarn => turborepo_lockfiles::yarn_subgraph(&contents, &packages)?, }; Ok(contents) } @@ -205,6 +226,7 @@ fn global_change_inner(buf: Buffer) -> Result { &request.prev_contents, &request.curr_contents, )?), + proto::PackageManager::Yarn => Ok(false), } } @@ -248,6 +270,7 @@ impl fmt::Display for proto::PackageManager { proto::PackageManager::Npm => "npm", proto::PackageManager::Berry => "berry", proto::PackageManager::Pnpm => "pnpm", + proto::PackageManager::Yarn => "yarn", }) } } diff --git a/crates/turborepo-fs/src/lib.rs b/crates/turborepo-fs/src/lib.rs index 1420d3e826628..94adf5267cf1c 100644 --- a/crates/turborepo-fs/src/lib.rs +++ b/crates/turborepo-fs/src/lib.rs @@ -45,13 +45,13 @@ pub fn recursive_copy( false }; - let suffix = AnchoredSystemPathBuf::new(src, &path)?; + let suffix = AnchoredSystemPathBuf::new(src, path)?; let target = dst.resolve(&suffix); if is_dir_or_symlink_to_dir { let src_metadata = entry.metadata()?; make_dir_copy(&target, &src_metadata)?; } else { - copy_file_with_type(&path, file_type, &target)?; + copy_file_with_type(path, file_type, &target)?; } } } @@ -130,7 +130,7 @@ mod tests { let err = copy_file(src_file, dst_file).unwrap_err(); let err = err.downcast::()?; - assert_eq!(err.is_io_error(io::ErrorKind::NotFound), true); + assert!(err.is_io_error(io::ErrorKind::NotFound)); Ok(()) } @@ -184,7 +184,7 @@ mod tests { copy_file(&src_file, &dst_file)?; assert_file_matches(&src_file, &dst_file); - assert_eq!(dst_file.is_readonly()?, true); + assert!(dst_file.is_readonly()?); Ok(()) } @@ -226,25 +226,25 @@ mod tests { let dst_child_path = dst_dir.join_component("child"); let dst_a_path = dst_child_path.join_component("a"); - assert_file_matches(&a_path, &dst_a_path); + assert_file_matches(&a_path, dst_a_path); let dst_b_path = dst_dir.join_component("b"); - assert_file_matches(&b_path, &dst_b_path); + assert_file_matches(&b_path, dst_b_path); let dst_link_path = dst_child_path.join_component("link"); - assert_target_matches(&dst_link_path, "../b"); + assert_target_matches(dst_link_path, "../b"); let dst_broken_path = dst_child_path.join_component("broken"); - assert_eq!(dst_broken_path.as_path().exists(), false); + assert!(!dst_broken_path.as_path().exists()); // Currently, we convert symlink-to-directory to empty-directory // This is very likely not ideal behavior, but leaving this test here to verify // that it is what we expect at this point in time. let dst_circle_path = dst_child_path.join_component("circle"); let dst_circle_metadata = fs::symlink_metadata(&dst_circle_path)?; - assert_eq!(dst_circle_metadata.is_dir(), true); + assert!(dst_circle_metadata.is_dir()); - let num_files = fs::read_dir(dst_circle_path.as_path())?.into_iter().count(); + let num_files = fs::read_dir(dst_circle_path.as_path())?.count(); assert_eq!(num_files, 0); Ok(()) diff --git a/crates/turborepo-globwalk/src/lib.rs b/crates/turborepo-globwalk/src/lib.rs index 4a3216032dbbd..864fdefc02479 100644 --- a/crates/turborepo-globwalk/src/lib.rs +++ b/crates/turborepo-globwalk/src/lib.rs @@ -4,6 +4,7 @@ mod empty_glob; use std::{ borrow::Cow, + collections::HashSet, io::ErrorKind, path::{Path, PathBuf}, }; @@ -11,7 +12,7 @@ use std::{ use empty_glob::InclusiveEmptyAny; use itertools::Itertools; use path_slash::PathExt; -use turbopath::{AbsoluteSystemPath, AbsoluteSystemPathBuf}; +use turbopath::{AbsoluteSystemPath, AbsoluteSystemPathBuf, PathError}; use wax::{Any, BuildError, Glob, Pattern}; #[derive(Debug, PartialEq, Clone, Copy)] @@ -50,6 +51,12 @@ pub enum WalkError { InvalidPath, #[error("walk error: {0}")] WalkError(#[from] walkdir::Error), + #[error(transparent)] + Path(#[from] PathError), + #[error(transparent)] + WaxWalk(#[from] wax::WalkError), + #[error("Internal error on glob {glob}: {error}")] + InternalError { glob: String, error: String }, } /// Performs a glob walk, yielding paths that _are_ included in the include list @@ -62,7 +69,7 @@ pub enum WalkError { /// - collapse the path, and calculate the new base_path, which defined as /// the longest common prefix of all the includes /// - traversing above the root of the base_path is not allowed -pub fn globwalk( +pub fn _globwalk( base_path: &AbsoluteSystemPath, include: &[String], exclude: &[String], @@ -244,8 +251,12 @@ fn collapse_path(path: &str) -> Option<(Cow, usize)> { for segment in path.trim_start_matches('/').split('/') { match segment { ".." => { - lowest_index.get_or_insert(stack.len()); stack.pop()?; + // Set this value post-pop so that we capture + // the remaining prefix, and not the segment we're + // about to remove. Note that this gets papered over + // below when we compare against the current stack length. + lowest_index.get_or_insert(stack.len()); changed = true; } "." => { @@ -273,6 +284,74 @@ fn collapse_path(path: &str) -> Option<(Cow, usize)> { } } +pub fn globwalk( + base_path: &AbsoluteSystemPath, + include: &[String], + exclude: &[String], + walk_type: WalkType, +) -> Result, WalkError> { + let (base_path_new, include_paths, exclude_paths) = + preprocess_paths_and_globs(base_path, include, exclude)?; + let inc_patterns = include_paths + .iter() + .map(|g| Glob::new(g.as_str()).map_err(|e| e.into())) + .collect::, WalkError>>()?; + let ex_patterns = exclude_paths + .iter() + .map(|g| Glob::new(g.as_str())) + .collect::, _>>()?; + + let result = inc_patterns + .into_iter() + .flat_map(|glob| { + // Check if the glob specifies an exact filename with no meta characters. + if let Some(prefix) = glob.variance().path() { + // We expect all of our globs to be absolute paths (asserted above) + assert!(prefix.is_absolute(), "Found relative glob path {}", glob); + // We're either going to return this path or nothing. Check if it's a directory + // and if we want directories + match AbsoluteSystemPathBuf::new(prefix).and_then(|path| { + let metadata = path.symlink_metadata()?; + Ok((path, metadata)) + }) { + Err(e) if e.is_io_error(ErrorKind::NotFound) => { + // If the file doesn't exist, it's not an error, there's just nothing to + // glob + vec![] + } + Err(e) => vec![Err(e.into())], + Ok((_, md)) if walk_type == WalkType::Files && md.is_dir() => { + vec![] + } + Ok((path, _)) => vec![Ok(path)], + } + } else { + glob.walk(&base_path_new) + .not(ex_patterns.iter().cloned()) + // Per docs, only fails if exclusion list is too large, since we're using + // pre-compiled globs + .unwrap_or_else(|e| { + panic!( + "Failed to compile exclusion globs: {:?}: {}", + ex_patterns, e, + ) + }) + .filter_map(|entry| match entry { + Ok(entry) if walk_type == WalkType::Files && entry.file_type().is_dir() => { + None + } + Ok(entry) => { + Some(AbsoluteSystemPathBuf::new(entry.path()).map_err(|e| e.into())) + } + Err(e) => Some(Err(e.into())), + }) + .collect::>() + } + }) + .collect::, WalkError>>()?; + Ok(result) +} + #[cfg(test)] mod test { use std::{collections::HashSet, path::Path}; @@ -508,7 +587,9 @@ mod test { #[test_case("**/*.txt", 1, 1 => matches None)] #[test_case("**/【*", 1, 1 => matches None)] // in the go implementation, broken-symlink is yielded, - // however in symlink mode, walkdir yields broken symlinks as errors + // however in symlink mode, walkdir yields broken symlinks as errors. + // Note that walkdir _always_ follows root symlinks. We handle this in the layer + // above wax. #[test_case("broken-symlink", 1, 1 => matches None ; "broken symlinks should be yielded")] // globs that match across a symlink should not follow the symlink #[test_case("working-symlink/c/*", 0, 0 => matches None ; "working symlink should not be followed")] @@ -552,11 +633,10 @@ mod test { let dir = setup(); let path = AbsoluteSystemPathBuf::new(dir.path()).unwrap(); - let (success, error): (Vec, Vec<_>) = - match super::globwalk(&path, &[pattern.into()], &[], crate::WalkType::All) { - Ok(e) => e.into_iter().partition_result(), - Err(e) => return Some(e), - }; + let success = match super::globwalk(&path, &[pattern.into()], &[], crate::WalkType::All) { + Ok(e) => e.into_iter(), + Err(e) => return Some(e), + }; assert_eq!( success.len(), @@ -1124,10 +1204,7 @@ mod test { (crate::WalkType::Files, expected_files), (crate::WalkType::All, expected), ] { - let (success, _): (Vec, Vec<_>) = - super::globwalk(&path, &include, &exclude, walk_type) - .unwrap() - .partition_result(); + let success = super::globwalk(&path, &include, &exclude, walk_type).unwrap(); let success = success .iter() @@ -1186,13 +1263,13 @@ mod test { )] fn glob_walk_err( files: &[&str], - base_path: &str, - include: &[&str], - exclude: &[&str], - expected: &[&str], - expected_files: &[&str], + _base_path: &str, + _include: &[&str], + _exclude: &[&str], + _expected: &[&str], + _expected_files: &[&str], ) { - let dir = setup_files(files); + let _dir = setup_files(files); } fn setup_files(files: &[&str]) -> tempdir::TempDir { @@ -1208,6 +1285,24 @@ mod test { tmp } + #[test] + fn test_directory_traversal() { + let files = &["root-file", "child/some-file"]; + let tmp = setup_files(files); + let root = AbsoluteSystemPathBuf::new(tmp.path()).unwrap(); + let child = root.join_component("child"); + let include = &["../*-file".to_string()]; + let exclude = &[]; + let iter = globwalk(&child, include, exclude, WalkType::Files) + .unwrap() + .into_iter(); + let results = iter + .map(|entry| root.anchor(entry).unwrap().to_str().unwrap().to_string()) + .collect::>(); + let expected = vec!["root-file".to_string()]; + assert_eq!(results, expected); + } + #[test] fn workspace_globbing() { let files = &[ @@ -1227,8 +1322,8 @@ mod test { let exclude = &["apps/ignored".to_string(), "**/node_modules/**".to_string()]; let iter = globwalk(&root, include, exclude, WalkType::Files).unwrap(); let paths = iter + .into_iter() .map(|path| { - let path = path.unwrap(); let relative = root.anchor(path).unwrap(); relative.to_str().unwrap().to_string() }) diff --git a/crates/turborepo-globwatch/src/lib.rs b/crates/turborepo-globwatch/src/lib.rs index 23746ba841dd5..5a1c24f3d7a0f 100644 --- a/crates/turborepo-globwatch/src/lib.rs +++ b/crates/turborepo-globwatch/src/lib.rs @@ -9,6 +9,7 @@ //! watch for a full round trip through the filesystem to ensure the watcher is //! up to date. +#![allow(clippy::all)] #![deny( missing_docs, missing_debug_implementations, @@ -294,7 +295,7 @@ impl WatchConfig { pub async fn include(&self, relative_to: &Path, glob: &str) -> Result<(), ConfigError> { trace!("including {:?}", glob); - glob_to_paths(&glob) + glob_to_paths(glob) .iter() .map(|p| relative_to.join(p)) .map(|p| { @@ -361,7 +362,7 @@ impl WatchConfig { pub async fn exclude(&self, relative_to: &Path, glob: &str) { trace!("excluding {:?}", glob); - for p in glob_to_paths(&glob).iter().map(|p| relative_to.join(p)) { + for p in glob_to_paths(glob).iter().map(|p| relative_to.join(p)) { // we don't care if this fails, it's just a best-effort self.watcher .lock() @@ -442,7 +443,7 @@ fn glob_to_paths(glob: &str) -> Vec { let chunks = glob_to_symbols(glob).group_by(|s| s != &GlobSymbol::PathSeperator); let chunks = chunks .into_iter() - .filter_map(|(not_sep, chunk)| (not_sep).then(|| chunk)); + .filter_map(|(not_sep, chunk)| (not_sep).then_some(chunk)); // multi cartisian product allows us to get all the possible combinations // of path components for each chunk. for example, if we have a glob @@ -455,10 +456,10 @@ fn glob_to_paths(glob: &str) -> Vec { chunks .map(symbols_to_combinations) // yield all the possible segments for each glob chunk .take_while(|c| c.is_some()) // if any segment has no possible paths, we can stop - .filter_map(|chunk| chunk) + .flatten() .multi_cartesian_product() // get all the possible combinations of path segments .map(|chunks| { - let prefix = if glob.starts_with("/") { "/" } else { "" }; + let prefix = if glob.starts_with('/') { "/" } else { "" }; std::iter::once(prefix) .chain(chunks.iter().map(|s| s.as_str())) .collect::() diff --git a/crates/turborepo-lib/Cargo.toml b/crates/turborepo-lib/Cargo.toml index 67995d6d38625..063d33be7a60a 100644 --- a/crates/turborepo-lib/Cargo.toml +++ b/crates/turborepo-lib/Cargo.toml @@ -27,7 +27,7 @@ pretty_assertions = { workspace = true } rand = { workspace = true } tempdir = "0.3.7" tempfile = { workspace = true } -test-case = "3.0.0" +test-case = { workspace = true } tracing-test = { version = "0.2.4", features = ["no-env-filter"] } tracing.workspace = true vercel-api-mock = { workspace = true } @@ -67,7 +67,7 @@ semver = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } serde_yaml = { workspace = true } -sha2 = "0.10.6" +sha2 = { workspace = true } shared_child = "1.0.0" sysinfo = "0.27.7" thiserror = "1.0.38" @@ -98,7 +98,9 @@ tracing.workspace = true turbo-updater = { workspace = true } turbopath = { workspace = true } turborepo-api-client = { workspace = true } -wax.workspace = true +turborepo-env = { workspace = true } +turborepo-lockfiles = { workspace = true } +wax = { workspace = true } webbrowser = { workspace = true } which = { workspace = true } diff --git a/crates/turborepo-lib/src/cli.rs b/crates/turborepo-lib/src/cli.rs index 0759c410af1bd..3546d549be918 100644 --- a/crates/turborepo-lib/src/cli.rs +++ b/crates/turborepo-lib/src/cli.rs @@ -299,7 +299,7 @@ pub enum Command { #[clap(subcommand)] #[serde(skip)] - command: Option, + command: Option>, }, /// Login to your Vercel account Login { @@ -538,7 +538,7 @@ pub enum LogPrefix { #[tokio::main] pub async fn run( repo_state: Option, - logger: &TurboSubscriber, + _logger: &TurboSubscriber, ui: UI, ) -> Result { let mut cli_args = Args::new()?; @@ -675,7 +675,10 @@ pub async fn run( generate::run(tag, command, &args)?; Ok(Payload::Rust(Ok(0))) } - Command::Daemon { command, idle_time } => { + Command::Daemon { + command, + idle_time: _, + } => { let base = CommandBase::new(cli_args.clone(), repo_root, version, ui)?; match command { diff --git a/crates/turborepo-lib/src/commands/generate.rs b/crates/turborepo-lib/src/commands/generate.rs index 97676dfcbc7ee..bf17b73a87ecf 100644 --- a/crates/turborepo-lib/src/commands/generate.rs +++ b/crates/turborepo-lib/src/commands/generate.rs @@ -31,26 +31,18 @@ fn call_turbo_gen(command: &str, tag: &String, raw_args: &str) -> Result { pub fn run( tag: &String, - command: &Option, + command: &Option>, args: &GeneratorCustomArgs, ) -> Result<()> { - match command { - // check if a subcommand was passed - Some(command) => { - if let GenerateCommand::Workspace(workspace_args) = command { - let raw_args = serde_json::to_string(&workspace_args)?; - call_turbo_gen("workspace", tag, &raw_args)?; - } else { - let raw_args = serde_json::to_string(&args)?; - call_turbo_gen("run", tag, &raw_args)?; - } - } + // check if a subcommand was passed + if let Some(box GenerateCommand::Workspace(workspace_args)) = command { + let raw_args = serde_json::to_string(&workspace_args)?; + call_turbo_gen("workspace", tag, &raw_args)?; + } else { // if no subcommand was passed, run the generate command as default - None => { - let raw_args = serde_json::to_string(&args)?; - call_turbo_gen("run", tag, &raw_args)?; - } - }; + let raw_args = serde_json::to_string(&args)?; + call_turbo_gen("run", tag, &raw_args)?; + } Ok(()) } diff --git a/crates/turborepo-lib/src/config/repo.rs b/crates/turborepo-lib/src/config/repo.rs index 439142694c320..2471fd8be2dd6 100644 --- a/crates/turborepo-lib/src/config/repo.rs +++ b/crates/turborepo-lib/src/config/repo.rs @@ -89,7 +89,7 @@ impl RepoConfig { } fn write_to_disk(&self) -> Result<()> { - write_to_disk(&self.path.as_path(), &self.disk_config) + write_to_disk(self.path.as_path(), &self.disk_config) } } diff --git a/crates/turborepo-lib/src/config/turbo.rs b/crates/turborepo-lib/src/config/turbo.rs index 24e9219755140..777518581d9aa 100644 --- a/crates/turborepo-lib/src/config/turbo.rs +++ b/crates/turborepo-lib/src/config/turbo.rs @@ -1,6 +1,6 @@ use serde::{Deserialize, Serialize}; -use crate::{opts::RemoteCacheOpts, run::pipeline::Pipeline}; +use crate::{opts::RemoteCacheOpts, task_graph::Pipeline}; #[derive(Serialize, Deserialize, Debug, Default)] #[serde(rename_all = "camelCase")] @@ -17,6 +17,7 @@ pub struct TurboJson { other: serde_json::Value, pub(crate) remote_cache_opts: Option, pub space_id: Option, + #[allow(dead_code)] pub pipeline: Pipeline, #[serde(skip_serializing_if = "Option::is_none")] pub experimental_spaces: Option, diff --git a/crates/turborepo-lib/src/config/user.rs b/crates/turborepo-lib/src/config/user.rs index 487bb1b3485fc..7df4627f4031a 100644 --- a/crates/turborepo-lib/src/config/user.rs +++ b/crates/turborepo-lib/src/config/user.rs @@ -145,7 +145,7 @@ mod test { Ok(()) } - static TOKEN_ENV_VARS: [&'static str; 2] = ["TURBO_TOKEN", "VERCEL_ARTIFACTS_TOKEN"]; + static TOKEN_ENV_VARS: [&str; 2] = ["TURBO_TOKEN", "VERCEL_ARTIFACTS_TOKEN"]; #[test] fn test_env_var_trumps_disk() -> Result<()> { diff --git a/crates/turborepo-lib/src/daemon/bump_timeout.rs b/crates/turborepo-lib/src/daemon/bump_timeout.rs index 97ced3343edda..f3a884d338460 100644 --- a/crates/turborepo-lib/src/daemon/bump_timeout.rs +++ b/crates/turborepo-lib/src/daemon/bump_timeout.rs @@ -18,6 +18,7 @@ pub struct BumpTimeout { } impl BumpTimeout { + #[allow(dead_code)] pub fn new(increment: Duration) -> Self { let start = Instant::now(); let millis = increment.as_millis(); @@ -49,12 +50,14 @@ impl BumpTimeout { .store(duration.as_millis() as u64, Ordering::Relaxed); } + #[allow(dead_code)] pub fn as_instant(&self) -> Instant { self.start + self.duration() } /// Waits until the deadline is reached, but if the deadline is /// changed while waiting, it will wait until the new deadline is reached. + #[allow(dead_code)] pub async fn wait(&self) { let mut deadline = self.as_instant(); loop { diff --git a/crates/turborepo-lib/src/daemon/bump_timeout_layer.rs b/crates/turborepo-lib/src/daemon/bump_timeout_layer.rs index 1cc8f5fba6447..abe8b58669e3c 100644 --- a/crates/turborepo-lib/src/daemon/bump_timeout_layer.rs +++ b/crates/turborepo-lib/src/daemon/bump_timeout_layer.rs @@ -14,6 +14,7 @@ use super::bump_timeout::BumpTimeout; pub struct BumpTimeoutLayer(Arc); impl BumpTimeoutLayer { + #[allow(dead_code)] pub fn new(timeout: Arc) -> Self { Self(timeout) } diff --git a/crates/turborepo-lib/src/daemon/client.rs b/crates/turborepo-lib/src/daemon/client.rs index 37b6b2b8dca54..db5628bd7897a 100644 --- a/crates/turborepo-lib/src/daemon/client.rs +++ b/crates/turborepo-lib/src/daemon/client.rs @@ -147,6 +147,7 @@ pub enum DaemonError { DaemonConnect(#[from] DaemonConnectorError), /// The timeout specified was invalid. #[error("invalid timeout specified ({0})")] + #[allow(dead_code)] InvalidTimeout(String), /// The server is unable to start file watching. #[error("unable to start file watching")] diff --git a/crates/turborepo-lib/src/daemon/server.rs b/crates/turborepo-lib/src/daemon/server.rs index f6c8d2222bdad..c7f92199da971 100644 --- a/crates/turborepo-lib/src/daemon/server.rs +++ b/crates/turborepo-lib/src/daemon/server.rs @@ -47,14 +47,17 @@ use crate::{ }; pub struct DaemonServer { + #[allow(dead_code)] daemon_root: AbsoluteSystemPathBuf, log_file: AbsoluteSystemPathBuf, start_time: Instant, + #[allow(dead_code)] timeout: Arc, watcher: Arc>, shutdown: Mutex>>, + #[allow(dead_code)] shutdown_rx: Option>, running: Arc, @@ -63,6 +66,7 @@ pub struct DaemonServer { } #[derive(Debug)] +#[allow(dead_code)] pub enum CloseReason { Timeout, Shutdown, @@ -305,7 +309,7 @@ impl proto::turbod_server::Turbod for DaemonServer< match changed { Ok(changed) => Ok(tonic::Response::new(proto::GetChangedOutputsResponse { changed_output_globs: changed.into_iter().collect(), - time_saved: time_saved, + time_saved, })), Err(e) => { error!("flush directory operation failed: {:?}", e); diff --git a/crates/turborepo-lib/src/globwatcher/mod.rs b/crates/turborepo-lib/src/globwatcher/mod.rs index f618c74da0b7d..b63b39977305e 100644 --- a/crates/turborepo-lib/src/globwatcher/mod.rs +++ b/crates/turborepo-lib/src/globwatcher/mod.rs @@ -36,6 +36,7 @@ pub struct HashGlobWatcher { /// maps a glob to the hashes for which this glob hasn't changed glob_statuses: Arc>>>, + #[allow(dead_code)] watcher: Arc>>, config: WatchConfig, } @@ -269,6 +270,8 @@ impl HashGlobWatcher { /// /// note: we take a mutex guard to make sure that the mutex is dropped /// when the function returns +#[allow(dead_code)] +#[allow(clippy::type_complexity)] fn populate_hash_globs<'a>( glob_statuses: &MutexGuard>>, repo_relative_paths: impl Iterator + Clone, diff --git a/crates/turborepo-lib/src/lib.rs b/crates/turborepo-lib/src/lib.rs index ebec7236037e8..2fa4036a357ed 100644 --- a/crates/turborepo-lib/src/lib.rs +++ b/crates/turborepo-lib/src/lib.rs @@ -12,10 +12,12 @@ mod execution_state; pub(crate) mod globwatcher; mod manager; mod opts; +mod package_graph; mod package_json; mod package_manager; mod run; mod shim; +mod task_graph; mod tracing; mod ui; diff --git a/crates/turborepo-lib/src/opts.rs b/crates/turborepo-lib/src/opts.rs index fbbd0a5427dcd..228c67450fb81 100644 --- a/crates/turborepo-lib/src/opts.rs +++ b/crates/turborepo-lib/src/opts.rs @@ -71,7 +71,9 @@ pub struct RunOpts<'a> { tasks: &'a [String], concurrency: u32, parallel: bool, - env_mode: EnvMode, + pub(crate) env_mode: EnvMode, + // Whether or not to infer the framework for each workspace. + pub(crate) framework_inference: bool, profile: Option<&'a str>, continue_on_error: bool, passthrough_args: &'a [String], @@ -111,6 +113,7 @@ impl<'a> TryFrom<&'a RunArgs> for RunOpts<'a> { log_prefix: args.log_prefix, summarize: args.summarize, experimental_space_id: args.experimental_space_id.clone(), + framework_inference: args.framework_inference, env_mode: args.env_mode, concurrency, parallel: args.parallel, diff --git a/crates/turborepo-lib/src/run/package_graph.rs b/crates/turborepo-lib/src/package_graph/mod.rs similarity index 61% rename from crates/turborepo-lib/src/run/package_graph.rs rename to crates/turborepo-lib/src/package_graph/mod.rs index b021b799fd8d9..ada88d61e120a 100644 --- a/crates/turborepo-lib/src/run/package_graph.rs +++ b/crates/turborepo-lib/src/package_graph/mod.rs @@ -2,20 +2,28 @@ use std::rc::Rc; use anyhow::Result; use turbopath::AbsoluteSystemPathBuf; +use turborepo_lockfiles::Lockfile; -use crate::{package_json::PackageJson, run::graph::WorkspaceCatalog}; +use crate::{package_json::PackageJson, package_manager::PackageManager}; + +#[derive(Default)] +pub struct WorkspaceCatalog {} pub struct PackageGraph { pub workspace_graph: Rc>, pub workspace_infos: Rc, + pub package_manager: PackageManager, + pub lockfile: Box, } impl PackageGraph { - pub fn build_single_package_graph(_root_package_json: PackageJson) -> Result { + pub fn build_single_package_graph(_root_package_json: &PackageJson) -> Result { // TODO Ok(PackageGraph { workspace_graph: Rc::new(petgraph::Graph::new()), workspace_infos: Rc::new(WorkspaceCatalog::default()), + package_manager: PackageManager::Npm, + lockfile: Box::new(turborepo_lockfiles::NpmLockfile::default()), }) } @@ -27,6 +35,8 @@ impl PackageGraph { Ok(PackageGraph { workspace_graph: Rc::new(petgraph::Graph::new()), workspace_infos: Rc::new(WorkspaceCatalog::default()), + package_manager: PackageManager::Npm, + lockfile: Box::new(turborepo_lockfiles::NpmLockfile::default()), }) } diff --git a/crates/turborepo-lib/src/package_manager/mod.rs b/crates/turborepo-lib/src/package_manager/mod.rs index 97c919aa384f6..1eecb2e62775e 100644 --- a/crates/turborepo-lib/src/package_manager/mod.rs +++ b/crates/turborepo-lib/src/package_manager/mod.rs @@ -163,6 +163,7 @@ pub struct NoPackageManager; impl NoPackageManager { // TODO: determine how to thread through user-friendly error message and apply // our UI + #[allow(dead_code)] pub fn ui_display(&self, ui: &UI) -> String { let url = ui.apply(UNDERLINE.apply_to("https://nodejs.org/api/packages.html#packagemanager")); @@ -210,6 +211,12 @@ impl From<&PackageManager> for MissingWorkspaceError { } } +impl From for Error { + fn from(value: wax::BuildError) -> Self { + Self::Wax(Box::new(value), backtrace::Backtrace::capture()) + } +} + #[derive(Debug, Error)] pub enum Error { #[error("io error: {0}")] @@ -221,7 +228,7 @@ pub enum Error { #[error("json parsing error: {0}")] ParsingJson(#[from] serde_json::Error, #[backtrace] backtrace::Backtrace), #[error("globbing error: {0}")] - Wax(#[from] wax::BuildError, #[backtrace] backtrace::Backtrace), + Wax(Box, #[backtrace] backtrace::Backtrace), #[error(transparent)] Other(#[from] anyhow::Error), #[error(transparent)] @@ -383,22 +390,20 @@ impl PackageManager { } } + #[allow(dead_code)] pub fn get_package_jsons( &self, repo_root: &AbsoluteSystemPath, - ) -> Result, Error> { + ) -> Result, Error> { let globs = self.get_workspace_globs(repo_root)?; - let walker = globwalk::globwalk( + let files = globwalk::globwalk( repo_root, &globs.package_json_inclusions, &globs.raw_exclusions, globwalk::WalkType::Files, )?; - let items = walker - .map(|result| result.map_err(|e| e.into())) - .collect::, Error>>()?; - Ok(items) + Ok(files.into_iter()) } } diff --git a/crates/turborepo-lib/src/package_manager/npm.rs b/crates/turborepo-lib/src/package_manager/npm.rs index 5bf8c87b80c1b..adcee58e5aa75 100644 --- a/crates/turborepo-lib/src/package_manager/npm.rs +++ b/crates/turborepo-lib/src/package_manager/npm.rs @@ -54,7 +54,7 @@ mod tests { let repo_root_path = AbsoluteSystemPathBuf::new(repo_root.path())?; let lockfile_path = repo_root.path().join(LOCKFILE); - File::create(&lockfile_path)?; + File::create(lockfile_path)?; let package_manager = PackageManager::detect_package_manager(&repo_root_path)?; assert_eq!(package_manager, PackageManager::Npm); diff --git a/crates/turborepo-lib/src/package_manager/pnpm.rs b/crates/turborepo-lib/src/package_manager/pnpm.rs index e16375402e869..08645bd3245c5 100644 --- a/crates/turborepo-lib/src/package_manager/pnpm.rs +++ b/crates/turborepo-lib/src/package_manager/pnpm.rs @@ -59,7 +59,7 @@ mod tests { let repo_root = tempdir()?; let repo_root_path = AbsoluteSystemPathBuf::new(repo_root.path())?; let lockfile_path = repo_root.path().join(LOCKFILE); - File::create(&lockfile_path)?; + File::create(lockfile_path)?; let package_manager = PackageManager::detect_package_manager(&repo_root_path)?; assert_eq!(package_manager, PackageManager::Pnpm); diff --git a/crates/turborepo-lib/src/package_manager/yarn.rs b/crates/turborepo-lib/src/package_manager/yarn.rs index 6aa67304ffe62..933ce988d5a65 100644 --- a/crates/turborepo-lib/src/package_manager/yarn.rs +++ b/crates/turborepo-lib/src/package_manager/yarn.rs @@ -92,7 +92,7 @@ mod tests { let repo_root_path = AbsoluteSystemPathBuf::new(repo_root.path())?; let yarn_lock_path = repo_root.path().join(LOCKFILE); - File::create(&yarn_lock_path)?; + File::create(yarn_lock_path)?; let mut detector = YarnDetector::new(&repo_root_path); detector.set_version_override("1.22.10".parse()?); diff --git a/crates/turborepo-lib/src/run/global_hash.rs b/crates/turborepo-lib/src/run/global_hash.rs new file mode 100644 index 0000000000000..de92413e0e823 --- /dev/null +++ b/crates/turborepo-lib/src/run/global_hash.rs @@ -0,0 +1,69 @@ +use std::collections::HashMap; + +use anyhow::Result; +use turbopath::{AbsoluteSystemPath, RelativeUnixPathBuf}; +use turborepo_env::{BySource, DetailedMap, EnvironmentVariableMap}; +use turborepo_lockfiles::Lockfile; + +use crate::{cli::EnvMode, package_json::PackageJson, package_manager::PackageManager, ui::UI}; + +static DEFAULT_ENV_VARS: [&str; 1] = ["VERCEL_ANALYTICS_ID"]; + +#[derive(Default)] +pub struct GlobalHashableInputs { + global_cache_key: &'static str, + global_file_hash_map: HashMap, + root_external_deps_hash: String, + env: Vec, + // Only Option to allow #[derive(Default)] + resolved_env_vars: Option, + pass_through_env: Vec, + env_mode: EnvMode, + framework_inference: bool, + dot_env: Vec, +} + +pub fn get_global_hash_inputs( + _ui: &UI, + _root_path: &AbsoluteSystemPath, + _root_package_json: &PackageJson, + _package_manager: PackageManager, + _lockfile: Box, + _global_file_dependencies: Vec, + env_at_execution_start: &EnvironmentVariableMap, + global_env: Vec, + _global_pass_through_env: Vec, + _env_mode: EnvMode, + _framework_inference: bool, + _dot_env: Vec, +) -> Result { + let default_env_var_map = env_at_execution_start.from_wildcards(&DEFAULT_ENV_VARS[..])?; + + let user_env_var_set = env_at_execution_start.from_wildcards_unresolved(&global_env)?; + + let mut all_env_var_map = EnvironmentVariableMap::default(); + all_env_var_map.union(&user_env_var_set.inclusions); + all_env_var_map.union(&default_env_var_map); + all_env_var_map.difference(&user_env_var_set.exclusions); + + let mut explicit_env_var_map = EnvironmentVariableMap::default(); + explicit_env_var_map.union(&user_env_var_set.inclusions); + explicit_env_var_map.difference(&user_env_var_set.exclusions); + + let mut matching_env_var_map = EnvironmentVariableMap::default(); + matching_env_var_map.union(&default_env_var_map); + matching_env_var_map.difference(&user_env_var_set.exclusions); + + let global_hashable_env_vars = DetailedMap { + all: all_env_var_map, + by_source: BySource { + explicit: explicit_env_var_map, + matching: matching_env_var_map, + }, + }; + + Ok(GlobalHashableInputs { + resolved_env_vars: Some(global_hashable_env_vars), + ..GlobalHashableInputs::default() + }) +} diff --git a/crates/turborepo-lib/src/run/graph.rs b/crates/turborepo-lib/src/run/graph.rs index 41666be8097f1..3b3b81da86384 100644 --- a/crates/turborepo-lib/src/run/graph.rs +++ b/crates/turborepo-lib/src/run/graph.rs @@ -5,7 +5,8 @@ use turbopath::AbsoluteSystemPath; use crate::{ config::TurboJson, - run::pipeline::{Pipeline, TaskDefinition}, + package_graph::WorkspaceCatalog, + task_graph::{Pipeline, TaskDefinition}, }; pub struct CompleteGraph<'run> { @@ -52,8 +53,5 @@ impl<'run> CompleteGraph<'run> { } } -#[derive(Default)] -pub struct WorkspaceCatalog {} - #[derive(Default)] pub struct TaskHashTracker {} diff --git a/crates/turborepo-lib/src/run/mod.rs b/crates/turborepo-lib/src/run/mod.rs index d86095a31d21b..9b44bfebdc91a 100644 --- a/crates/turborepo-lib/src/run/mod.rs +++ b/crates/turborepo-lib/src/run/mod.rs @@ -1,22 +1,23 @@ #![allow(dead_code)] -mod graph; -mod package_graph; -pub mod pipeline; +mod global_hash; +pub mod graph; mod scope; mod task_id; use anyhow::{Context as ErrorContext, Result}; use graph::CompleteGraph; use tracing::{debug, info}; +use turborepo_env::EnvironmentVariableMap; use crate::{ commands::CommandBase, daemon::DaemonConnector, manager::Manager, opts::Opts, + package_graph::PackageGraph, package_json::PackageJson, - run::{package_graph::PackageGraph, task_id::ROOT_PKG_NAME}, + run::{global_hash::get_global_hash_inputs, task_id::ROOT_PKG_NAME}, }; #[derive(Debug)] @@ -36,7 +37,7 @@ impl Run { } fn opts(&self) -> Result { - Ok(self.base.args().try_into()?) + self.base.args().try_into() } pub async fn run(&mut self) -> Result<()> { @@ -50,7 +51,7 @@ impl Run { let _is_structured_output = opts.run_opts.graph_dot || opts.run_opts.dry_run_json; let pkg_dep_graph = if opts.run_opts.single_package { - PackageGraph::build_single_package_graph(root_package_json)? + PackageGraph::build_single_package_graph(&root_package_json)? } else { PackageGraph::build_multi_package_graph(&self.base.repo_root, &root_package_json)? }; @@ -105,6 +106,24 @@ impl Run { } } + let env_at_execution_start = EnvironmentVariableMap::infer(); + + let _global_hash_inputs = get_global_hash_inputs( + &self.base.ui, + &self.base.repo_root, + &root_package_json, + pkg_dep_graph.package_manager, + pkg_dep_graph.lockfile, + // TODO: Fill in these vec![] once turbo.json is ported + vec![], + &env_at_execution_start, + vec![], + vec![], + opts.run_opts.env_mode, + opts.run_opts.framework_inference, + vec![], + )?; + Ok(()) } } diff --git a/crates/turborepo-lib/src/run/scope.rs b/crates/turborepo-lib/src/run/scope.rs index f78d97e2f0222..ba4c07a57a4e7 100644 --- a/crates/turborepo-lib/src/run/scope.rs +++ b/crates/turborepo-lib/src/run/scope.rs @@ -3,7 +3,7 @@ use std::collections::HashSet; use anyhow::Result; use tracing::warn; -use crate::{commands::CommandBase, opts::ScopeOpts, run::package_graph}; +use crate::{commands::CommandBase, opts::ScopeOpts, package_graph}; pub fn resolve_packages( _opts: &ScopeOpts, diff --git a/crates/turborepo-lib/src/shim.rs b/crates/turborepo-lib/src/shim.rs index d8d85d9165652..c19e790fdfae9 100644 --- a/crates/turborepo-lib/src/shim.rs +++ b/crates/turborepo-lib/src/shim.rs @@ -684,7 +684,21 @@ impl RepoState { let child = spawn_child(command)?; - let exit_code = child.wait()?.code().unwrap_or(2); + let exit_status = child.wait()?; + let exit_code = exit_status.code().unwrap_or_else(|| { + debug!("go-turbo failed to report exit code"); + #[cfg(unix)] + { + use std::os::unix::process::ExitStatusExt; + let signal = exit_status.signal(); + let core_dumped = exit_status.core_dumped(); + debug!( + "go-turbo caught signal {:?}. Core dumped? {}", + signal, core_dumped + ); + } + 2 + }); Ok(exit_code) } @@ -1050,7 +1064,7 @@ mod test { description: "Nested non-monorepo packages, turbo.json primacy.", infer_infos: vec![ InferInfo { - path: project_two.clone(), + path: project_two, has_package_json: true, has_turbo_json: false, workspace_globs: None, diff --git a/crates/turborepo-lib/src/run/pipeline.rs b/crates/turborepo-lib/src/task_graph/mod.rs similarity index 88% rename from crates/turborepo-lib/src/run/pipeline.rs rename to crates/turborepo-lib/src/task_graph/mod.rs index 8e4c1b9b685cc..8882b630ef08d 100644 --- a/crates/turborepo-lib/src/run/pipeline.rs +++ b/crates/turborepo-lib/src/task_graph/mod.rs @@ -33,17 +33,17 @@ struct TaskOutputs { enum TaskOutputMode { // FullTaskOutput will show all task output #[default] - FullTaskOutput, - // NoTaskOutput will hide all task output - NoTaskOutput, - // HashTaskOutput will display turbo-computed task hashes - HashTaskOutput, - // NewTaskOutput will show all new task output and turbo-computed task hashes for cached + Full, + // None will hide all task output + None, + // Hash will display turbo-computed task hashes + Hash, + // New will show all new task output and turbo-computed task hashes for cached // output - NewTaskOutput, - // ErrorTaskOutput will show task output for failures only; no cache miss/hit messages are + New, + // Error will show task output for failures only; no cache miss/hit messages are // emitted - ErrorTaskOutput, + Error, } // taskDefinitionHashable exists as a definition for PristinePipeline, which is diff --git a/crates/turborepo-lib/src/tracing.rs b/crates/turborepo-lib/src/tracing.rs index 66c1e1b548039..96f53d5bc456a 100644 --- a/crates/turborepo-lib/src/tracing.rs +++ b/crates/turborepo-lib/src/tracing.rs @@ -41,10 +41,12 @@ type DaemonLog = tracing_subscriber::fmt::Layer< type Layered = tracing_subscriber::layer::Layered; pub struct TurboSubscriber { + #[allow(dead_code)] update: Handle, Layered>, /// The non-blocking file logger only continues to log while this guard is /// held. We keep it here so that it doesn't get dropped. + #[allow(dead_code)] guard: Mutex>, #[cfg(feature = "tracing-chrome")] diff --git a/crates/turborepo-lockfiles/fixtures/yarn1.lock b/crates/turborepo-lockfiles/fixtures/yarn1.lock new file mode 100644 index 0000000000000..fbfae8b3b786b --- /dev/null +++ b/crates/turborepo-lockfiles/fixtures/yarn1.lock @@ -0,0 +1,50 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +nextjs@^0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/nextjs/-/nextjs-0.0.3.tgz#4f4d1d6a257be920d9b9649d4d9522c724a4e543" + integrity sha512-mYbDUo4/sRAZ8TqK63PCpYnFiLg7BICG/ot9+guOrUKd4/Fo71ZmEQ41IZbH6nqbQvG7SXTBuofJXAIWfNho0w== + +turbo-darwin-64@1.9.3: + version "1.9.3" + resolved "https://registry.yarnpkg.com/turbo-darwin-64/-/turbo-darwin-64-1.9.3.tgz#29470b902a1418dae8a88b2620caf917b27480bc" + integrity sha512-0dFc2cWXl82kRE4Z+QqPHhbEFEpUZho1msHXHWbz5+PqLxn8FY0lEVOHkq5tgKNNEd5KnGyj33gC/bHhpZOk5g== + +turbo-darwin-arm64@1.9.3: + version "1.9.3" + resolved "https://registry.yarnpkg.com/turbo-darwin-arm64/-/turbo-darwin-arm64-1.9.3.tgz#0eb404d6101ba69eab8522b16260a4eb50885e6c" + integrity sha512-1cYbjqLBA2zYE1nbf/qVnEkrHa4PkJJbLo7hnuMuGM0bPzh4+AnTNe98gELhqI1mkTWBu/XAEeF5u6dgz0jLNA== + +turbo-linux-64@1.9.3: + version "1.9.3" + resolved "https://registry.yarnpkg.com/turbo-linux-64/-/turbo-linux-64-1.9.3.tgz#dbce8fd50edee1319f17800ee38e7c4749ab0cb0" + integrity sha512-UuBPFefawEwpuxh5pM9Jqq3q4C8M0vYxVYlB3qea/nHQ80pxYq7ZcaLGEpb10SGnr3oMUUs1zZvkXWDNKCJb8Q== + +turbo-linux-arm64@1.9.3: + version "1.9.3" + resolved "https://registry.yarnpkg.com/turbo-linux-arm64/-/turbo-linux-arm64-1.9.3.tgz#636b77fde17c7a5cdef8a20616ff57f08c785345" + integrity sha512-vUrNGa3hyDtRh9W0MkO+l1dzP8Co2gKnOVmlJQW0hdpOlWlIh22nHNGGlICg+xFa2f9j4PbQlWTsc22c019s8Q== + +turbo-windows-64@1.9.3: + version "1.9.3" + resolved "https://registry.yarnpkg.com/turbo-windows-64/-/turbo-windows-64-1.9.3.tgz#c65625c222456161b0b4d000ec7f50e372332825" + integrity sha512-0BZ7YaHs6r+K4ksqWus1GKK3W45DuDqlmfjm/yuUbTEVc8szmMCs12vugU2Zi5GdrdJSYfoKfEJ/PeegSLIQGQ== + +turbo-windows-arm64@1.9.3: + version "1.9.3" + resolved "https://registry.yarnpkg.com/turbo-windows-arm64/-/turbo-windows-arm64-1.9.3.tgz#86e105692ad6ba935eff0284522bdf7728a2e517" + integrity sha512-QJUYLSsxdXOsR1TquiOmLdAgtYcQ/RuSRpScGvnZb1hY0oLc7JWU0llkYB81wVtWs469y8H9O0cxbKwCZGR4RQ== + +turbo@^1.9.3: + version "1.9.3" + resolved "https://registry.yarnpkg.com/turbo/-/turbo-1.9.3.tgz#911012624f647f98d9788a08e25b98e38cdd48b2" + integrity sha512-ID7mxmaLUPKG/hVkp+h0VuucB1U99RPCJD9cEuSEOdIPoSIuomcIClEJtKamUsdPLhLCud+BvapBNnhgh58Nzw== + optionalDependencies: + turbo-darwin-64 "1.9.3" + turbo-darwin-arm64 "1.9.3" + turbo-linux-64 "1.9.3" + turbo-linux-arm64 "1.9.3" + turbo-windows-64 "1.9.3" + turbo-windows-arm64 "1.9.3" diff --git a/cli/internal/lockfile/testdata/yarn.lock b/crates/turborepo-lockfiles/fixtures/yarn1full.lock similarity index 100% rename from cli/internal/lockfile/testdata/yarn.lock rename to crates/turborepo-lockfiles/fixtures/yarn1full.lock diff --git a/crates/turborepo-lockfiles/src/error.rs b/crates/turborepo-lockfiles/src/error.rs index ef3326649acf0..f0fbfbacdc706 100644 --- a/crates/turborepo-lockfiles/src/error.rs +++ b/crates/turborepo-lockfiles/src/error.rs @@ -16,4 +16,6 @@ pub enum Error { UnsupportedNpmVersion, #[error(transparent)] Pnpm(#[from] crate::pnpm::Error), + #[error(transparent)] + Yarn1(#[from] crate::yarn1::Error), } diff --git a/crates/turborepo-lockfiles/src/lib.rs b/crates/turborepo-lockfiles/src/lib.rs index c83d9c9565547..6ad08689bc4bd 100644 --- a/crates/turborepo-lockfiles/src/lib.rs +++ b/crates/turborepo-lockfiles/src/lib.rs @@ -4,6 +4,7 @@ mod berry; mod error; mod npm; mod pnpm; +mod yarn1; use std::collections::{HashMap, HashSet}; @@ -12,6 +13,7 @@ pub use error::Error; pub use npm::*; pub use pnpm::{pnpm_global_change, pnpm_subgraph, PnpmLockfile}; use serde::Serialize; +pub use yarn1::{yarn_subgraph, Yarn1Lockfile}; #[derive(Debug, PartialEq, Eq, Clone, PartialOrd, Ord, Hash, Serialize)] pub struct Package { diff --git a/crates/turborepo-lockfiles/src/npm.rs b/crates/turborepo-lockfiles/src/npm.rs index dbae72cc841b1..fefb70ab362b3 100644 --- a/crates/turborepo-lockfiles/src/npm.rs +++ b/crates/turborepo-lockfiles/src/npm.rs @@ -10,7 +10,7 @@ type Map = std::collections::BTreeMap; // we change graph traversal now // resolve_package should only be used now for converting initial contents // of workspace package.json into a set of node ids -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Default, Serialize, Deserialize)] pub struct NpmLockfile { #[serde(rename = "lockfileVersion")] lockfile_version: i32, diff --git a/crates/turborepo-lockfiles/src/yarn1/de.rs b/crates/turborepo-lockfiles/src/yarn1/de.rs new file mode 100644 index 0000000000000..436cc6eedbf40 --- /dev/null +++ b/crates/turborepo-lockfiles/src/yarn1/de.rs @@ -0,0 +1,317 @@ +use std::sync::OnceLock; + +use nom::{ + branch::alt, + bytes::complete::{escaped_transform, is_not, tag, take_till}, + character::complete::{anychar, char as nom_char, crlf, newline, none_of, satisfy, space1}, + combinator::{all_consuming, map, not, opt, peek, recognize, value}, + multi::{count, many0, many1}, + sequence::{delimited, pair, preceded, separated_pair, terminated, tuple}, + IResult, +}; +use regex::Regex; +use serde_json::Value; + +// regex for trimming spaces from start and end +fn pseudostring_replace() -> &'static Regex { + static RE: OnceLock = OnceLock::new(); + RE.get_or_init(|| Regex::new(r"^ *| *$").unwrap()) +} + +pub fn parse_syml(input: &str) -> Result { + match all_consuming(property_statements(0))(input) { + Ok((_, value)) => Ok(value), + Err(e) => Err(super::Error::SymlParse(e.to_string())), + } +} + +// Array and map types +fn item_statements(level: usize) -> impl Fn(&str) -> IResult<&str, Value> { + move |i: &str| map(many0(item_statement(level)), Value::Array)(i) +} + +fn item_statement(level: usize) -> impl Fn(&str) -> IResult<&str, Value> { + move |i: &str| { + let (i, _) = indent(level)(i)?; + let (i, _) = nom_char('-')(i)?; + let (i, _) = blankspace(i)?; + expression(level)(i) + } +} + +fn property_statements(level: usize) -> impl Fn(&str) -> IResult<&str, Value> { + move |i: &str| { + let (i, properties) = many0(property_statement(level))(i)?; + let mut map = serde_json::Map::new(); + for (key, value) in properties.into_iter().flatten() { + map.insert(key, value); + } + Ok((i, Value::Object(map))) + } +} + +fn property_statement(level: usize) -> impl Fn(&str) -> IResult<&str, Vec<(String, Value)>> { + move |i: &str| { + alt(( + value( + vec![], + tuple(( + opt(blankspace), + opt(pair(nom_char('#'), many1(pair(not(eol), anychar)))), + many1(eol_any), + )), + ), + map( + preceded( + indent(level), + separated_pair(name, wrapped_colon, expression(level)), + ), + |entry| vec![entry], + ), + // legacy names + map( + preceded( + indent(level), + separated_pair(legacy_name, wrapped_colon, expression(level)), + ), + |entry| vec![entry], + ), + // legacy prop without colon + map( + preceded( + indent(level), + separated_pair( + legacy_name, + blankspace, + terminated(legacy_literal, many1(eol_any)), + ), + ), + |entry| vec![entry], + ), + multikey_property_statement(level), + ))(i) + } +} + +fn multikey_property_statement( + level: usize, +) -> impl Fn(&str) -> IResult<&str, Vec<(String, Value)>> { + move |i: &str| { + let (i, ()) = indent(level)(i)?; + let (i, property) = legacy_name(i)?; + let (i, others) = many1(preceded( + delimited(opt(blankspace), nom_char(','), opt(blankspace)), + legacy_name, + ))(i)?; + let (i, _) = wrapped_colon(i)?; + let (i, value) = expression(level)(i)?; + + Ok(( + i, + std::iter::once(property) + .chain(others.into_iter()) + .map(|key| (key, value.clone())) + .collect(), + )) + } +} + +fn wrapped_colon(i: &str) -> IResult<&str, char> { + delimited(opt(blankspace), nom_char(':'), opt(blankspace))(i) +} + +fn expression(level: usize) -> impl Fn(&str) -> IResult<&str, Value> { + move |i: &str| { + alt(( + preceded( + tuple(( + peek(tuple((eol, indent(level + 1), nom_char('-'), blankspace))), + eol_any, + )), + item_statements(level + 1), + ), + preceded(eol, property_statements(level + 1)), + terminated(literal, many1(eol_any)), + ))(i) + } +} + +fn indent(level: usize) -> impl Fn(&str) -> IResult<&str, ()> { + move |i: &str| { + let (i, _) = count(nom_char(' '), level * 2)(i)?; + Ok((i, ())) + } +} + +// Simple types + +fn name(i: &str) -> IResult<&str, String> { + alt((string, pseudostring))(i) +} + +fn legacy_name(i: &str) -> IResult<&str, String> { + alt(( + string, + map(recognize(many1(pseudostring_legacy)), |s| s.to_string()), + ))(i) +} + +fn literal(i: &str) -> IResult<&str, Value> { + alt(( + value(Value::Null, null), + map(boolean, Value::Bool), + map(string, Value::String), + map(pseudostring, Value::String), + ))(i) +} + +fn legacy_literal(i: &str) -> IResult<&str, Value> { + alt(( + value(Value::Null, null), + map(string, Value::String), + map(pseudostring_legacy, Value::String), + ))(i) +} + +fn pseudostring(i: &str) -> IResult<&str, String> { + let (i, pseudo) = recognize(pseudostring_inner)(i)?; + Ok(( + i, + pseudostring_replace().replace_all(pseudo, "").into_owned(), + )) +} + +fn pseudostring_inner(i: &str) -> IResult<&str, ()> { + let (i, _) = none_of("\r\n\t ?:,][{}#&*!|>'\"%@`-")(i)?; + let (i, _) = many0(tuple((opt(blankspace), none_of("\r\n\t ,][{}:#\"'"))))(i)?; + Ok((i, ())) +} + +fn pseudostring_legacy(i: &str) -> IResult<&str, String> { + let (i, pseudo) = recognize(pseudostring_legacy_inner)(i)?; + let replaced = pseudostring_replace().replace_all(pseudo, ""); + Ok((i, replaced.to_string())) +} + +fn pseudostring_legacy_inner(i: &str) -> IResult<&str, ()> { + let (i, _) = opt(tag("--"))(i)?; + let (i, _) = satisfy(|c| c.is_ascii_alphanumeric() || c == '/')(i)?; + let (i, _) = take_till(|c| "\r\n\t :,".contains(c))(i)?; + Ok((i, ())) +} + +// String parsing + +fn null(i: &str) -> IResult<&str, &str> { + tag("null")(i) +} + +fn boolean(i: &str) -> IResult<&str, bool> { + alt((value(true, tag("true")), value(false, tag("false"))))(i) +} + +fn string(i: &str) -> IResult<&str, String> { + alt((empty_string, delimited(tag("\""), syml_chars, tag("\""))))(i) +} + +fn empty_string(i: &str) -> IResult<&str, String> { + let (i, _) = tag(r#""""#)(i)?; + Ok((i, "".to_string())) +} + +fn syml_chars(i: &str) -> IResult<&str, String> { + // The SYML grammar provided by Yarn2+ includes escape sequences that weren't + // supported by the yarn1 parser. We diverge from the Yarn2+ provided + // grammar to match the actual parser used by yarn1. + escaped_transform( + is_not("\"\\"), + '\\', + alt(( + value("\"", tag("\"")), + value("\\", tag("\\")), + value("/", tag("/")), + value("\n", tag("n")), + value("\r", tag("r")), + value("\t", tag("t")), + )), + )(i) +} + +// Spaces +fn blankspace(i: &str) -> IResult<&str, &str> { + space1(i) +} + +fn eol_any(i: &str) -> IResult<&str, &str> { + recognize(tuple((eol, many0(tuple((opt(blankspace), eol))))))(i) +} + +fn eol(i: &str) -> IResult<&str, &str> { + alt((crlf, value("\n", newline), value("\r", nom_char('\r'))))(i) +} + +#[cfg(test)] +mod test { + use serde_json::json; + use test_case::test_case; + + use super::*; + + #[test_case("null", Value::Null ; "null")] + #[test_case("false", Value::Bool(false) ; "literal false")] + #[test_case("true", Value::Bool(true) ; "literal true")] + #[test_case("\"\"", Value::String("".into()) ; "empty string literal")] + #[test_case("\"foo\"", Value::String("foo".into()) ; "quoted string literal")] + #[test_case("foo", Value::String("foo".into()) ; "unquoted string literal")] + fn test_literal(input: &str, expected: Value) { + let (_, actual) = literal(input).unwrap(); + assert_eq!(actual, expected); + } + + #[test_case("name: foo", "name" ; "basic")] + #[test_case("technically a name: foo", "technically a name" ; "multiword name")] + fn test_name(input: &str, expected: &str) { + let (_, actual) = name(input).unwrap(); + assert_eq!(actual, expected); + } + + #[test_case("foo@1:", "foo@1" ; "name with colon terminator")] + #[test_case("\"foo@1\":", "foo@1" ; "qutoed name with colon terminator")] + #[test_case("name foo", "name" ; "name without colon terminator")] + fn test_legacy_name(input: &str, expected: &str) { + let (_, actual) = legacy_name(input).unwrap(); + assert_eq!(actual, expected); + } + + #[test_case("null\n", Value::Null ; "null")] + #[test_case("\"foo\"\n", json!("foo") ; "basic string")] + #[test_case("\n name: foo\n", json!({ "name": "foo" }) ; "basic object")] + fn test_expression(input: &str, expected: Value) { + let (_, actual) = expression(0)(input).unwrap(); + assert_eq!(actual, expected); + } + + #[test_case("# a comment\n", vec![] ; "comment")] + #[test_case("foo: null\n", vec![("foo".into(), Value::Null)] ; "single property")] + #[test_case("name foo\n", vec![("name".into(), json!("foo"))] ; "legacy property")] + fn test_property_statement(input: &str, expected: Vec<(String, Value)>) { + let (_, actual) = property_statement(0)(input).unwrap(); + assert_eq!(actual, expected); + } + + #[test_case("name: foo\n", json!({"name": "foo"}) ; "single property object")] + #[test_case("\"name\": foo\n", json!({"name": "foo"}) ; "single quoted property object")] + #[test_case("name foo\n", json!({"name": "foo"}) ; "single property without colon object")] + #[test_case("# comment\nname: foo\n", json!({"name": "foo"}) ; "comment doesn't affect object")] + #[test_case("name foo\nversion \"1.2.3\"\n", json!({"name": "foo", "version": "1.2.3"}) ; "multi-property object")] + #[test_case("foo:\n version \"1.2.3\"\n", json!({"foo": {"version": "1.2.3"}}) ; "nested object")] + #[test_case("foo, bar, baz:\n version \"1.2.3\"\n", json!({ + "foo": {"version": "1.2.3"}, + "bar": {"version": "1.2.3"}, + "baz": {"version": "1.2.3"}, + }) ; "multi-key object")] + fn test_property_statements(input: &str, expected: Value) { + let (_, actual) = property_statements(0)(input).unwrap(); + assert_eq!(actual, expected); + } +} diff --git a/crates/turborepo-lockfiles/src/yarn1/mod.rs b/crates/turborepo-lockfiles/src/yarn1/mod.rs new file mode 100644 index 0000000000000..089b7724a9ca1 --- /dev/null +++ b/crates/turborepo-lockfiles/src/yarn1/mod.rs @@ -0,0 +1,161 @@ +use std::str::FromStr; + +use serde::Deserialize; + +use crate::Lockfile; + +mod de; +mod ser; + +type Map = std::collections::BTreeMap; + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("unable to parse: {0}")] + SymlParse(String), + #[error("unable to convert to structured syml: {0}")] + SymlStructure(#[from] serde_json::Error), + #[error("unexpected non-utf8 yarn.lock")] + NonUTF8(#[from] std::str::Utf8Error), +} + +pub struct Yarn1Lockfile { + inner: Map, +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Deserialize)] +#[serde(rename_all = "camelCase")] +struct Entry { + name: Option, + version: String, + uid: Option, + resolved: Option, + integrity: Option, + registry: Option, + dependencies: Option>, + optional_dependencies: Option>, +} + +impl Yarn1Lockfile { + pub fn from_bytes(input: &[u8]) -> Result { + let input = std::str::from_utf8(input)?; + Self::from_str(input) + } + + pub fn subgraph(&self, packages: &[String]) -> Result { + let mut inner = Map::new(); + + for (key, entry) in packages.iter().filter_map(|key| { + let entry = self.inner.get(key)?; + Some((key, entry)) + }) { + inner.insert(key.clone(), entry.clone()); + } + + Ok(Self { inner }) + } +} + +impl FromStr for Yarn1Lockfile { + type Err = Error; + + fn from_str(s: &str) -> Result { + let value = de::parse_syml(s)?; + let inner = serde_json::from_value(value)?; + Ok(Self { inner }) + } +} + +impl Lockfile for Yarn1Lockfile { + fn resolve_package( + &self, + _workspace_path: &str, + name: &str, + version: &str, + ) -> Result, crate::Error> { + for key in possible_keys(name, version) { + if let Some(entry) = self.inner.get(&key) { + return Ok(Some(crate::Package { + key, + version: entry.version.clone(), + })); + } + } + + Ok(None) + } + + fn all_dependencies( + &self, + key: &str, + ) -> Result>, crate::Error> { + let Some(entry) = self.inner.get(key) else { + return Ok(None); + }; + + let all_deps: std::collections::HashMap<_, _> = entry.dependency_entries().collect(); + Ok(match all_deps.is_empty() { + false => Some(all_deps), + true => None, + }) + } +} + +pub fn yarn_subgraph(contents: &[u8], packages: &[String]) -> Result, crate::Error> { + let lockfile = Yarn1Lockfile::from_bytes(contents)?; + let pruned_lockfile = lockfile.subgraph(packages)?; + Ok(pruned_lockfile.to_string().into_bytes()) +} + +impl Entry { + fn dependency_entries(&self) -> impl Iterator + '_ { + self.dependencies + .iter() + .flatten() + .chain(self.optional_dependencies.iter().flatten()) + .map(|(k, v)| (k.clone(), v.clone())) + } +} + +const PROTOCOLS: &[&str] = ["", "npm:", "file:", "workspace:", "yarn:"].as_slice(); + +fn possible_keys<'a>(name: &'a str, version: &'a str) -> impl Iterator + 'a { + PROTOCOLS + .iter() + .copied() + .map(move |protocol| format!("{name}@{protocol}{version}")) +} + +#[cfg(test)] +mod test { + use pretty_assertions::assert_eq; + use test_case::test_case; + + use super::*; + + const MINIMAL: &str = include_str!("../../fixtures/yarn1.lock"); + const FULL: &str = include_str!("../../fixtures/yarn1full.lock"); + + #[test_case(MINIMAL ; "minimal lockfile")] + #[test_case(FULL ; "full lockfile")] + fn test_roundtrip(input: &str) { + let lockfile = Yarn1Lockfile::from_str(input).unwrap(); + assert_eq!(input, lockfile.to_string()); + } + + #[test] + fn test_key_splitting() { + let lockfile = Yarn1Lockfile::from_str(FULL).unwrap(); + for key in [ + "@babel/types@^7.18.10", + "@babel/types@^7.18.6", + "@babel/types@^7.19.0", + ] { + assert!( + lockfile.inner.contains_key(key), + "missing {} in lockfile", + key + ); + } + } +} diff --git a/crates/turborepo-lockfiles/src/yarn1/ser.rs b/crates/turborepo-lockfiles/src/yarn1/ser.rs new file mode 100644 index 0000000000000..57f6be81fe4ac --- /dev/null +++ b/crates/turborepo-lockfiles/src/yarn1/ser.rs @@ -0,0 +1,215 @@ +use std::{ + borrow::Cow, + collections::{HashMap, HashSet}, + fmt, +}; + +use super::{Entry, Yarn1Lockfile}; + +const INDENT: &str = " "; + +impl Yarn1Lockfile { + fn reverse_lookup(&self) -> HashMap<&Entry, HashSet<&str>> { + let mut reverse_lookup = HashMap::new(); + for (key, value) in self.inner.iter() { + let keys: &mut HashSet<&str> = reverse_lookup.entry(value).or_default(); + keys.insert(key); + } + reverse_lookup + } +} + +impl fmt::Display for Yarn1Lockfile { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str( + "# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.\n# yarn lockfile \ + v1\n\n", + )?; + let reverse_lookup = self.reverse_lookup(); + let mut added_keys: HashSet<&str> = HashSet::with_capacity(self.inner.len()); + for (key, entry) in self.inner.iter() { + if added_keys.contains(key.as_str()) { + continue; + } + + let all_keys = reverse_lookup + .get(entry) + .expect("entry in lockfile should appear as a key in reverse lookup"); + added_keys.extend(all_keys); + let mut keys = all_keys.iter().copied().collect::>(); + // Keys must be sorted before they get wrapped + keys.sort(); + + let wrapped_keys = keys.into_iter().map(maybe_wrap).collect::>(); + let key_line = wrapped_keys.join(", "); + + f.write_fmt(format_args!("\n{}:\n{}\n", key_line, entry))?; + } + Ok(()) + } +} + +impl fmt::Display for Entry { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut leading = LeadingNewline::new(); + if let Some(name) = &self.name { + f.write_fmt(format_args!( + "{}{INDENT}name {}", + leading.leading(), + maybe_wrap(name) + ))?; + } + f.write_fmt(format_args!( + "{}{INDENT}version {}", + leading.leading(), + maybe_wrap(&self.version) + ))?; + if let Some(uid) = &self.uid { + f.write_fmt(format_args!( + "{}{INDENT}uid {}", + leading.leading(), + maybe_wrap(uid) + ))?; + } + if let Some(resolved) = &self.resolved { + f.write_fmt(format_args!( + "{}{INDENT}resolved {}", + leading.leading(), + maybe_wrap(resolved) + ))?; + } + if let Some(integrity) = &self.integrity { + f.write_fmt(format_args!( + "{}{INDENT}integrity {}", + leading.leading(), + maybe_wrap(integrity) + ))?; + } + if let Some(registry) = &self.registry { + f.write_fmt(format_args!( + "{}{INDENT}integrity {}", + leading.leading(), + maybe_wrap(registry) + ))?; + } + // encode deps and optional deps + if let Some(deps) = &self.dependencies { + f.write_fmt(format_args!("{}{INDENT}dependencies:", leading.leading()))?; + encode_map(deps.iter().map(|(k, v)| (k.as_ref(), v.as_ref())), f)?; + } + if let Some(optional_deps) = &self.optional_dependencies { + f.write_fmt(format_args!( + "{}{INDENT}optionalDependencies:", + leading.leading() + ))?; + encode_map( + optional_deps.iter().map(|(k, v)| (k.as_ref(), v.as_ref())), + f, + )?; + } + Ok(()) + } +} + +#[derive(Debug, Clone, Copy)] +enum LeadingNewline { + First, + Rest, +} + +impl LeadingNewline { + fn new() -> Self { + Self::First + } + + fn leading(&mut self) -> &'static str { + let res = match self { + LeadingNewline::First => "", + LeadingNewline::Rest => "\n", + }; + *self = Self::Rest; + res + } +} + +fn encode_map<'a, I: Iterator>( + entries: I, + f: &mut fmt::Formatter<'_>, +) -> fmt::Result { + let mut wrapped_entries = entries + .map(|(k, v)| (maybe_wrap(k), maybe_wrap(v))) + .collect::>(); + wrapped_entries.sort_unstable_by(|(k1, _), (k2, _)| k1.cmp(k2)); + // we sort the via wrapped keys + // then we write each line with the value wrapped as well + for (key, value) in wrapped_entries { + f.write_fmt(format_args!("\n{INDENT}{INDENT}{key} {value}"))?; + } + + Ok(()) +} + +fn maybe_wrap(s: &str) -> Cow { + match should_wrap_key(s) { + // yarn uses JSON.stringify to escape strings + // we approximate this behavior using serde_json + true => serde_json::to_string(s) + .expect("failed at encoding string as json") + .into(), + false => s.into(), + } +} + +// Determines if we need to wrap a key +fn should_wrap_key(s: &str) -> bool { + // Wrap if it starts with a syml keyword + s.starts_with("true") || + s.starts_with("false") || + // Wrap if it doesn't start with a-zA-Z + s.chars().next().map_or(false, |c| !c.is_ascii_alphabetic()) || + // Wrap if it contains any unwanted chars + s.chars().any(|c| matches!( + c, + ' ' | ':' | '\t' | '\r' | '\u{000B}' | '\u{000C}' | '\n' | '\\' | '"' | ',' | '[' | ']' + )) +} + +#[cfg(test)] +mod test { + use pretty_assertions::assert_eq; + + use super::*; + + #[test] + fn test_should_wrap() { + assert!(should_wrap_key("jsx-ast-utils@^2.4.1 || ^3.0.0")) + } + + #[test] + fn test_basic_serialization() { + let entry = Entry { + version: "12.2.5".into(), + resolved: Some("https://registry.yarnpkg.com/next/-/next-12.2.5.tgz#14fb5975e8841fad09553b8ef41fe1393602b717".into()), + integrity: Some("sha512-tBdjqX5XC/oFs/6gxrZhjmiq90YWizUYU6qOWAfat7zJwrwapJ+BYgX2PmiacunXMaRpeVT4vz5MSPSLgNkrpA==".into()), + dependencies: Some(vec![ + ("@next/env".into(), "12.2.5".into()), + ("caniuse-lite".into(), "^1.0.30001332".into()), + ("postcss".into(), "8.4.14".into()), + ].into_iter().collect()), + optional_dependencies: Some(vec![("@next/swc-win32-x64-msvc".into(), "12.2.5".into())].into_iter().collect()), + ..Default::default() + }; + assert_eq!( + entry.to_string(), + r#" version "12.2.5" + resolved "https://registry.yarnpkg.com/next/-/next-12.2.5.tgz#14fb5975e8841fad09553b8ef41fe1393602b717" + integrity sha512-tBdjqX5XC/oFs/6gxrZhjmiq90YWizUYU6qOWAfat7zJwrwapJ+BYgX2PmiacunXMaRpeVT4vz5MSPSLgNkrpA== + dependencies: + "@next/env" "12.2.5" + caniuse-lite "^1.0.30001332" + postcss "8.4.14" + optionalDependencies: + "@next/swc-win32-x64-msvc" "12.2.5""# + ); + } +} diff --git a/crates/turborepo-paths/src/absolute_system_path.rs b/crates/turborepo-paths/src/absolute_system_path.rs index 8d47d854a879e..f71479c86f916 100644 --- a/crates/turborepo-paths/src/absolute_system_path.rs +++ b/crates/turborepo-paths/src/absolute_system_path.rs @@ -80,7 +80,7 @@ impl AbsoluteSystemPath { pub fn new + ?Sized>(value: &P) -> Result<&Self, PathError> { let path = value.as_ref(); if path.is_relative() { - return Err(PathError::NotAbsolute(path.to_owned()).into()); + return Err(PathError::NotAbsolute(path.to_owned())); } let path_str = path .to_str() @@ -89,9 +89,7 @@ impl AbsoluteSystemPath { let system_path = Cow::from_slash(path_str); match system_path { - Cow::Owned(path) => { - Err(PathError::NotSystem(path.to_string_lossy().to_string()).into()) - } + Cow::Owned(path) => Err(PathError::NotSystem(path.to_string_lossy().to_string())), Cow::Borrowed(path) => { let path = Path::new(path); // copied from stdlib path.rs: relies on the representation of @@ -112,9 +110,7 @@ impl AbsoluteSystemPath { } pub fn ancestors(&self) -> impl Iterator { - self.0 - .ancestors() - .map(|ancestor| Self::new_unchecked(ancestor)) + self.0.ancestors().map(Self::new_unchecked) } // intended for joining literals or obviously single-token strings @@ -165,7 +161,7 @@ impl AbsoluteSystemPath { pub fn symlink_to_dir>(&self, to: P) -> Result<(), PathError> { let system_path = to.as_ref(); let system_path = system_path.into_system()?; - symlink_dir(&system_path, &self.0)?; + symlink_dir(system_path, &self.0)?; Ok(()) } @@ -221,4 +217,12 @@ mod tests { Ok(()) } + + #[test] + fn test_resolve_empty() { + let root = AbsoluteSystemPathBuf::cwd().unwrap(); + let empty = AnchoredSystemPathBuf::from_raw("").unwrap(); + let result = root.resolve(&empty); + assert_eq!(result, root); + } } diff --git a/crates/turborepo-paths/src/absolute_system_path_buf.rs b/crates/turborepo-paths/src/absolute_system_path_buf.rs index c36825bbae3c2..23befcc9e6ebb 100644 --- a/crates/turborepo-paths/src/absolute_system_path_buf.rs +++ b/crates/turborepo-paths/src/absolute_system_path_buf.rs @@ -68,7 +68,7 @@ impl AbsoluteSystemPathBuf { pub fn new(unchecked_path: impl Into) -> Result { let unchecked_path = unchecked_path.into(); if !unchecked_path.is_absolute() { - return Err(PathError::NotAbsolute(unchecked_path).into()); + return Err(PathError::NotAbsolute(unchecked_path)); } let system_path = unchecked_path.into_system()?; @@ -287,7 +287,7 @@ mod tests { assert_eq!( AbsoluteSystemPathBuf::new("/some/dir") .unwrap() - .join_unix_path(&tail) + .join_unix_path(tail) .unwrap(), AbsoluteSystemPathBuf::new("/some/other").unwrap(), ); diff --git a/crates/turborepo-paths/src/anchored_system_path_buf.rs b/crates/turborepo-paths/src/anchored_system_path_buf.rs index 4508dc2706e33..ccd10be54cf93 100644 --- a/crates/turborepo-paths/src/anchored_system_path_buf.rs +++ b/crates/turborepo-paths/src/anchored_system_path_buf.rs @@ -1,4 +1,4 @@ -use std::path::{Path, PathBuf}; +use std::path::{Component, Path, PathBuf}; use serde::{Deserialize, Serialize}; @@ -13,7 +13,7 @@ impl TryFrom<&Path> for AnchoredSystemPathBuf { fn try_from(path: &Path) -> Result { if path.is_absolute() { let bad_path = path.display().to_string(); - return Err(PathError::NotRelative(bad_path).into()); + return Err(PathError::NotRelative(bad_path)); } Ok(AnchoredSystemPathBuf(path.into_system()?)) @@ -43,6 +43,45 @@ impl AnchoredSystemPathBuf { Ok(AnchoredSystemPathBuf(stripped_path)) } + // Produces a path from start to end, which may include directory traversal + // tokens. Given that both parameters are absolute, we _should_ always be + // able to produce such a path. The exception is when crossing drive letters + // on Windows, where no such path is possible. Since a repository is + // expected to only reside on a single drive, this shouldn't be an issue. + pub fn relative_path_between(start: &AbsoluteSystemPath, end: &AbsoluteSystemPath) -> Self { + // Filter the implicit "RootDir" component that exists for unix paths. + // For windows paths, we may want an assertion that we aren't crossing drives + let these_components = start + .components() + .skip_while(|c| *c == Component::RootDir) + .collect::>(); + let other_components = end + .components() + .skip_while(|c| *c == Component::RootDir) + .collect::>(); + let prefix_len = these_components + .iter() + .zip(other_components.iter()) + .take_while(|(a, b)| a == b) + .count(); + #[cfg(windows)] + debug_assert!( + prefix_len >= 1, + "Cannot traverse drives between {} and {}", + start, + end + ); + + let traverse_count = these_components.len() - prefix_len; + // For every remaining non-matching segment in self, add a directory traversal + // Then, add every non-matching segment from other + let path = std::iter::repeat(Component::ParentDir) + .take(traverse_count) + .chain(other_components.into_iter().skip(prefix_len)) + .collect::(); + Self(path) + } + pub fn from_raw>(raw: P) -> Result { let system_path = raw.as_ref(); let system_path = system_path.into_system()?; @@ -83,3 +122,47 @@ impl From for PathBuf { path.0 } } + +#[cfg(test)] +mod tests { + use crate::{AbsoluteSystemPathBuf, AnchoredSystemPathBuf}; + + #[test] + fn test_relative_path_to() { + #[cfg(unix)] + let root_token = "/"; + #[cfg(windows)] + let root_token = "C:\\"; + + let root = AbsoluteSystemPathBuf::new( + [root_token, "a", "b", "c"].join(std::path::MAIN_SEPARATOR_STR), + ) + .unwrap(); + + // /a/b/c + // vs + // /a -> ../.. + // /a/b/d -> ../d + // /a/b/c/d -> d + // /e/f -> ../../../e/f + // / -> ../../.. + let test_cases: &[(&[&str], &[&str])] = &[ + (&["a"], &["..", ".."]), + (&["a", "b", "d"], &["..", "d"]), + (&["a", "b", "c", "d"], &["d"]), + (&["e", "f"], &["..", "..", "..", "e", "f"]), + (&[], &["..", "..", ".."]), + ]; + for (input, expected) in test_cases { + let mut parts = vec![root_token]; + parts.extend_from_slice(input); + let target = + AbsoluteSystemPathBuf::new(parts.join(std::path::MAIN_SEPARATOR_STR)).unwrap(); + let expected = + AnchoredSystemPathBuf::from_raw(expected.join(std::path::MAIN_SEPARATOR_STR)) + .unwrap(); + let result = AnchoredSystemPathBuf::relative_path_between(&root, &target); + assert_eq!(result, expected); + } + } +} diff --git a/crates/turborepo-scm/Cargo.toml b/crates/turborepo-scm/Cargo.toml index c073613781baf..89063432da86e 100644 --- a/crates/turborepo-scm/Cargo.toml +++ b/crates/turborepo-scm/Cargo.toml @@ -9,14 +9,16 @@ license = "MPL-2.0" [dependencies] bstr = "1.4.0" git2 = { version = "0.16.1", default-features = false } +globwalk = { path = "../turborepo-globwalk" } hex = "0.4.3" ignore = "0.4.20" +itertools.workspace = true nom = "7.1.3" path-slash = "0.2.1" sha1 = "0.10.5" thiserror = { workspace = true } turbopath = { workspace = true } -wax = "0.5.0" +wax = { workspace = true } which = { workspace = true } [dev-dependencies] diff --git a/crates/turborepo-scm/src/git.rs b/crates/turborepo-scm/src/git.rs index 3e0a921819d42..cba80bd5a818f 100644 --- a/crates/turborepo-scm/src/git.rs +++ b/crates/turborepo-scm/src/git.rs @@ -260,7 +260,7 @@ mod tests { let git_binary = which("git")?; let output = Command::new(git_binary) - .args(&[ + .args([ "clone", "--depth", "2", @@ -297,10 +297,10 @@ mod tests { let file_path = Path::new("foo.js"); fs::write(&file, "let z = 0;")?; - let first_commit_oid = commit_file(&repo, &file_path, None)?; + let first_commit_oid = commit_file(&repo, file_path, None)?; fs::remove_file(&file)?; - let _second_commit_oid = commit_delete(&repo, &file_path, first_commit_oid)?; + let _second_commit_oid = commit_delete(&repo, file_path, first_commit_oid)?; let first_commit_sha = first_commit_oid.to_string(); let git_root = repo_root.path().to_owned(); @@ -315,24 +315,24 @@ mod tests { fn test_merge_base() -> Result<(), Error> { let (repo_root, repo) = setup_repository()?; let first_file = repo_root.path().join("foo.js"); - fs::write(&first_file, "let z = 0;")?; + fs::write(first_file, "let z = 0;")?; // Create a base commit. This will *not* be the merge base let first_commit_oid = commit_file(&repo, Path::new("foo.js"), None)?; let second_file = repo_root.path().join("bar.js"); - fs::write(&second_file, "let y = 1;")?; + fs::write(second_file, "let y = 1;")?; // This commit will be the merge base let second_commit_oid = commit_file(&repo, Path::new("bar.js"), Some(first_commit_oid))?; let third_file = repo_root.path().join("baz.js"); - fs::write(&third_file, "let x = 2;")?; + fs::write(third_file, "let x = 2;")?; // Create a first commit off of merge base let third_commit_oid = commit_file(&repo, Path::new("baz.js"), Some(second_commit_oid))?; // Move head back to merge base repo.set_head_detached(second_commit_oid)?; let fourth_file = repo_root.path().join("qux.js"); - fs::write(&fourth_file, "let w = 3;")?; + fs::write(fourth_file, "let w = 3;")?; // Create a second commit off of merge base let fourth_commit_oid = commit_file(&repo, Path::new("qux.js"), Some(second_commit_oid))?; @@ -569,7 +569,7 @@ mod tests { assert_eq!(content, b"let z = 0;"); let new_file = repo_root.path().join("bar.js"); - fs::write(&new_file, "let y = 0;")?; + fs::write(new_file, "let y = 0;")?; let third_commit_oid = commit_file(&repo, Path::new("bar.js"), Some(second_commit_oid))?; let third_commit = repo.find_commit(third_commit_oid)?; repo.branch("release-1", &third_commit, false)?; diff --git a/crates/turborepo-scm/src/hash_object.rs b/crates/turborepo-scm/src/hash_object.rs index 14862fe598f95..9bbc8c16813a7 100644 --- a/crates/turborepo-scm/src/hash_object.rs +++ b/crates/turborepo-scm/src/hash_object.rs @@ -6,14 +6,14 @@ use std::{ }; use nom::{Finish, IResult}; -use turbopath::{AbsoluteSystemPathBuf, RelativeUnixPathBuf}; +use turbopath::{AbsoluteSystemPath, AnchoredSystemPathBuf, RelativeUnixPathBuf}; use crate::{package_deps::GitHashes, wait_for_success, Error}; pub(crate) fn hash_objects( - pkg_path: &AbsoluteSystemPathBuf, + git_root: &AbsoluteSystemPath, + pkg_path: &AbsoluteSystemPath, to_hash: Vec, - pkg_prefix: &RelativeUnixPathBuf, hashes: &mut GitHashes, ) -> Result<(), Error> { if to_hash.is_empty() { @@ -21,7 +21,9 @@ pub(crate) fn hash_objects( } let mut git = Command::new("git") .args(["hash-object", "--stdin-paths"]) - .current_dir(pkg_path) + // Note that the directory is irrelevant as long as it is within the git repo. + // --stdin-paths processes all paths as relative to the root of the _git_ repository. + .current_dir(git_root) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .stdin(Stdio::piped()) @@ -41,7 +43,7 @@ pub(crate) fn hash_objects( .stderr .take() .ok_or_else(|| Error::git_error("failed to get stderr for git hash-object"))?; - let parse_result = read_object_hashes(stdout, stdin, &to_hash, pkg_prefix, hashes); + let parse_result = read_object_hashes(stdout, stdin, &to_hash, git_root, pkg_path, hashes); wait_for_success(git, &mut stderr, "git hash-object", pkg_path, parse_result) } @@ -51,7 +53,8 @@ fn read_object_hashes( mut reader: R, writer: W, to_hash: &Vec, - pkg_prefix: &RelativeUnixPathBuf, + git_prefix: &AbsoluteSystemPath, + pkg_path: &AbsoluteSystemPath, hashes: &mut GitHashes, ) -> Result<(), Error> { thread::scope(move |scope| -> Result<(), Error> { @@ -71,7 +74,9 @@ fn read_object_hashes( reader.read_exact(&mut buffer)?; let hash = parse_hash_object(&buffer)?; let hash = String::from_utf8(hash.to_vec())?; - let path = filename.strip_prefix(pkg_prefix)?; + let full_file_path = git_prefix.join_unix_path(filename)?; + let path = AnchoredSystemPathBuf::relative_path_between(pkg_path, &full_file_path) + .to_unix()?; hashes.insert(path, hash); } match write_thread.join() { @@ -152,7 +157,7 @@ mod test { let expected_hashes = GitHashes::from_iter(file_hashes.into_iter()); let mut hashes = GitHashes::new(); let to_hash = expected_hashes.keys().map(|k| pkg_prefix.join(k)).collect(); - hash_objects(&pkg_path, to_hash, &pkg_prefix, &mut hashes).unwrap(); + hash_objects(&git_root, pkg_path, to_hash, &mut hashes).unwrap(); assert_eq!(hashes, expected_hashes); } @@ -172,7 +177,7 @@ mod test { .collect(); let mut hashes = GitHashes::new(); - let result = hash_objects(&pkg_path, to_hash, &pkg_prefix, &mut hashes); + let result = hash_objects(&git_root, &pkg_path, to_hash, &mut hashes); assert_eq!(result.is_err(), true); } } diff --git a/crates/turborepo-scm/src/lib.rs b/crates/turborepo-scm/src/lib.rs index 525ec33034b84..ab8332b286643 100644 --- a/crates/turborepo-scm/src/lib.rs +++ b/crates/turborepo-scm/src/lib.rs @@ -38,11 +38,13 @@ pub enum Error { #[error("package traversal error: {0}")] Ignore(#[from] ignore::Error, #[backtrace] backtrace::Backtrace), #[error("invalid glob: {0}")] - Glob(Box>, backtrace::Backtrace), + Glob(Box, backtrace::Backtrace), + #[error(transparent)] + Walk(#[from] globwalk::WalkError), } -impl From> for Error { - fn from(value: wax::BuildError<'static>) -> Self { +impl From for Error { + fn from(value: wax::BuildError) -> Self { Error::Glob(Box::new(value), Backtrace::capture()) } } diff --git a/crates/turborepo-scm/src/ls_tree.rs b/crates/turborepo-scm/src/ls_tree.rs index eb9e6866b3186..71f88b5ef11a6 100644 --- a/crates/turborepo-scm/src/ls_tree.rs +++ b/crates/turborepo-scm/src/ls_tree.rs @@ -81,7 +81,7 @@ mod tests { use crate::{ls_tree::read_ls_tree, package_deps::GitHashes}; fn to_hash_map(pairs: &[(&str, &str)]) -> GitHashes { - HashMap::from_iter(pairs.into_iter().map(|(path, hash)| { + HashMap::from_iter(pairs.iter().map(|(path, hash)| { ( RelativeUnixPathBuf::new(path.as_bytes()).unwrap(), hash.to_string(), diff --git a/crates/turborepo-scm/src/manual.rs b/crates/turborepo-scm/src/manual.rs index 4aaaf1e75d061..9ad1ac4aad47c 100644 --- a/crates/turborepo-scm/src/manual.rs +++ b/crates/turborepo-scm/src/manual.rs @@ -40,30 +40,26 @@ pub fn get_package_file_hashes_from_processing_gitignore>( .to_slash() .ok_or_else(|| PathError::invalid_utf8_error(exclusion.as_bytes()))? .into_owned(); - let g = Glob::new(glob.as_str()) - .map(|g| g.into_owned()) - .map_err(|e| e.into_owned())?; + let g = Glob::new(glob.as_str()).map(|g| g.into_owned())?; excludes.push(g); } else { let glob = Path::new(pattern) .to_slash() .ok_or_else(|| PathError::invalid_utf8_error(pattern.as_bytes()))? .into_owned(); - let g = Glob::new(glob.as_str()) - .map(|g| g.into_owned()) - .map_err(|e| e.into_owned())?; + let g = Glob::new(glob.as_str()).map(|g| g.into_owned())?; includes.push(g); } } let include_pattern = if includes.is_empty() { None } else { - Some(any::, _>(includes)?) + Some(any(includes)?) }; let exclude_pattern = if excludes.is_empty() { None } else { - Some(wax::any::, _>(excludes.into_iter())?) + Some(any(excludes.into_iter())?) }; let walker = walker_builder .follow_links(false) diff --git a/crates/turborepo-scm/src/package_deps.rs b/crates/turborepo-scm/src/package_deps.rs index 4fede63611e2a..782d9abb99a1b 100644 --- a/crates/turborepo-scm/src/package_deps.rs +++ b/crates/turborepo-scm/src/package_deps.rs @@ -1,14 +1,17 @@ use std::{collections::HashMap, process::Command}; use bstr::io::BufReadExt; -use turbopath::{AbsoluteSystemPathBuf, AnchoredSystemPathBuf, RelativeUnixPathBuf}; +use itertools::{Either, Itertools}; +use turbopath::{ + AbsoluteSystemPath, AbsoluteSystemPathBuf, AnchoredSystemPathBuf, RelativeUnixPathBuf, +}; use crate::{hash_object::hash_objects, ls_tree::git_ls_tree, status::append_git_status, Error}; pub type GitHashes = HashMap; pub fn get_package_file_hashes_from_git_index( - turbo_root: &AbsoluteSystemPathBuf, + turbo_root: &AbsoluteSystemPath, package_path: &AnchoredSystemPathBuf, ) -> Result { // TODO: memoize git root -> turbo root calculation once we aren't crossing ffi @@ -19,12 +22,69 @@ pub fn get_package_file_hashes_from_git_index( let mut hashes = git_ls_tree(&full_pkg_path)?; // Note: to_hash is *git repo relative* let to_hash = append_git_status(&full_pkg_path, &pkg_prefix, &mut hashes)?; - hash_objects(&full_pkg_path, to_hash, &pkg_prefix, &mut hashes)?; + hash_objects(&git_root, &full_pkg_path, to_hash, &mut hashes)?; + Ok(hashes) +} + +pub fn get_package_file_hashes_from_inputs>( + turbo_root: &AbsoluteSystemPath, + package_path: &AnchoredSystemPathBuf, + inputs: &[S], +) -> Result { + // TODO: memoize git root -> turbo root calculation once we aren't crossing ffi + let git_root = find_git_root(turbo_root)?; + let full_pkg_path = turbo_root.resolve(package_path); + let package_unix_path_buf = package_path.to_unix()?; + let package_unix_path = package_unix_path_buf.as_str()?; + + let mut inputs = inputs + .iter() + .map(|s| s.as_ref().to_string()) + .collect::>(); + // Add in package.json and turbo.json to input patterns. Both file paths are + // relative to pkgPath + // + // - package.json is an input because if the `scripts` in the package.json + // change (i.e. the tasks that turbo executes), we want a cache miss, since + // any existing cache could be invalid. + // - turbo.json because it's the definition of the tasks themselves. The root + // turbo.json is similarly included in the global hash. This file may not + // exist in the workspace, but that is ok, because it will get ignored + // downstream. + inputs.push("package.json".to_string()); + inputs.push("turbo.json".to_string()); + + // The input patterns are relative to the package. + // However, we need to change the globbing to be relative to the repo root. + // Prepend the package path to each of the input patterns. + let (inclusions, exclusions): (Vec, Vec) = + inputs.into_iter().partition_map(|raw_glob| { + if let Some(exclusion) = raw_glob.strip_prefix('!') { + Either::Right([package_unix_path, exclusion].join("/")) + } else { + Either::Left([package_unix_path, raw_glob.as_ref()].join("/")) + } + }); + let files = globwalk::globwalk( + turbo_root, + &inclusions, + &exclusions, + globwalk::WalkType::Files, + )?; + let to_hash = files + .iter() + .map(|entry| { + let path = git_root.anchor(entry)?.to_unix()?; + Ok(path) + }) + .collect::, Error>>()?; + let mut hashes = GitHashes::new(); + hash_objects(&git_root, &full_pkg_path, to_hash, &mut hashes)?; Ok(hashes) } pub(crate) fn find_git_root( - turbo_root: &AbsoluteSystemPathBuf, + turbo_root: &AbsoluteSystemPath, ) -> Result { let rev_parse = Command::new("git") .args(["rev-parse", "--show-cdup"]) @@ -70,7 +130,7 @@ mod tests { fn require_git_cmd(repo_root: &AbsoluteSystemPathBuf, args: &[&str]) { let mut cmd = Command::new("git"); cmd.args(args).current_dir(repo_root); - assert_eq!(cmd.output().unwrap().status.success(), true); + assert!(cmd.output().unwrap().status.success()); } fn setup_repository(repo_root: &AbsoluteSystemPathBuf) { @@ -161,7 +221,7 @@ mod tests { let package_path = AnchoredSystemPathBuf::from_raw("my-pkg")?; - let expected = to_hash_map(&[ + let all_expected = to_hash_map(&[ ("committed-file", "3a29e62ea9ba15c4a4009d1f605d391cdd262033"), ( "uncommitted-file", @@ -174,12 +234,65 @@ mod tests { ), ]); let hashes = get_package_file_hashes_from_git_index(&repo_root, &package_path)?; - assert_eq!(hashes, expected); + assert_eq!(hashes, all_expected); + + // add the new root file as an option + let mut all_expected = all_expected.clone(); + all_expected.insert( + RelativeUnixPathBuf::new("../new-root-file").unwrap(), + "8906ddcdd634706188bd8ef1c98ac07b9be3425e".to_string(), + ); + + let input_tests: &[(&[&str], &[&str])] = &[ + (&["uncommitted-file"], &["package.json", "uncommitted-file"]), + ( + &["**/*-file"], + &[ + "committed-file", + "uncommitted-file", + "package.json", + "dir/nested-file", + ], + ), + ( + &["../**/*-file"], + &[ + "committed-file", + "uncommitted-file", + "package.json", + "dir/nested-file", + "../new-root-file", + ], + ), + ( + &["**/{uncommitted,committed}-file"], + &["committed-file", "uncommitted-file", "package.json"], + ), + ( + &["../**/{new-root,uncommitted,committed}-file"], + &[ + "committed-file", + "uncommitted-file", + "package.json", + "../new-root-file", + ], + ), + ]; + for (inputs, expected_files) in input_tests { + let expected: GitHashes = HashMap::from_iter(expected_files.into_iter().map(|key| { + let key = RelativeUnixPathBuf::new(*key).unwrap(); + let value = all_expected.get(&key).unwrap().clone(); + (key, value) + })); + let hashes = + get_package_file_hashes_from_inputs(&repo_root, &package_path, &inputs).unwrap(); + assert_eq!(hashes, expected); + } Ok(()) } fn to_hash_map(pairs: &[(&str, &str)]) -> GitHashes { - HashMap::from_iter(pairs.into_iter().map(|(path, hash)| { + HashMap::from_iter(pairs.iter().map(|(path, hash)| { ( RelativeUnixPathBuf::new(path.as_bytes()).unwrap(), hash.to_string(), diff --git a/crates/turborepo-scm/src/status.rs b/crates/turborepo-scm/src/status.rs index fed4ab338c425..f27f62af190a5 100644 --- a/crates/turborepo-scm/src/status.rs +++ b/crates/turborepo-scm/src/status.rs @@ -129,7 +129,7 @@ mod tests { } fn to_hash_map(pairs: &[(&str, &str)]) -> GitHashes { - HashMap::from_iter(pairs.into_iter().map(|(path, hash)| { + HashMap::from_iter(pairs.iter().map(|(path, hash)| { ( RelativeUnixPathBuf::new(path.as_bytes()).unwrap(), hash.to_string(), diff --git a/crates/turborepo-wax/src/encode.rs b/crates/turborepo-wax/src/encode.rs index 5d0044f216686..c047487123795 100644 --- a/crates/turborepo-wax/src/encode.rs +++ b/crates/turborepo-wax/src/encode.rs @@ -254,7 +254,7 @@ fn encode<'t, A, T>( } else { pattern.push_str(nsepexpr!("&&{0}")); } - pattern.push_str("]"); + pattern.push(']'); // Compile the character class sub-expression. This may fail // if the subtraction of the separator pattern yields an // empty character class (meaning that the glob expression diff --git a/crates/turborepo-wax/src/lib.rs b/crates/turborepo-wax/src/lib.rs index 0f3c95ec374d4..9e35a7089dcbc 100644 --- a/crates/turborepo-wax/src/lib.rs +++ b/crates/turborepo-wax/src/lib.rs @@ -13,6 +13,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/olson-sean-k/wax/master/doc/wax.svg?sanitize=true" )] +#![allow(clippy::all)] #![deny( clippy::cast_lossless, clippy::checked_conversions, diff --git a/crates/turborepo-wax/src/walk.rs b/crates/turborepo-wax/src/walk.rs index 597618c7df1a3..af6ac8dccef03 100644 --- a/crates/turborepo-wax/src/walk.rs +++ b/crates/turborepo-wax/src/walk.rs @@ -143,11 +143,15 @@ macro_rules! walk { .strip_prefix(&$state.prefix) .expect("path is not in tree"); let depth = entry.depth().saturating_sub(1); + // Globs don't include the root token, but absolute paths do. + // Skip that token so that matching up components will work below. for candidate in path .components() + .filter(|c| !matches!(c, Component::RootDir)) .skip(depth) .filter_map(|component| match component { Component::Normal(component) => Some(CandidatePath::from(component)), + Component::Prefix(component) => Some(CandidatePath::from(component.as_os_str())), _ => None, }) .zip_longest($state.components.iter().skip(depth)) @@ -570,9 +574,22 @@ pub struct Walk<'g> { root: PathBuf, prefix: PathBuf, walk: walkdir::IntoIter, + // This is a hack to express an empty iterator + is_empty: bool, } impl<'g> Walk<'g> { + fn empty() -> Self { + Self { + pattern: Cow::Owned(Regex::new("").unwrap()), + components: vec![], + root: PathBuf::new(), + prefix: PathBuf::new(), + walk: walkdir::WalkDir::new(PathBuf::new()).into_iter(), + is_empty: true, + } + } + fn compile<'t, I>(tokens: I) -> Result, CompileError> where I: IntoIterator>, @@ -602,6 +619,7 @@ impl<'g> Walk<'g> { root, prefix, walk, + is_empty, } = self; Walk { pattern: Cow::Owned(pattern.into_owned()), @@ -609,6 +627,7 @@ impl<'g> Walk<'g> { root, prefix, walk, + is_empty, } } @@ -704,6 +723,9 @@ impl Iterator for Walk<'_> { type Item = WalkItem<'static>; fn next(&mut self) -> Option { + if self.is_empty { + return None; + } walk!(self => |entry| { return Some(entry.map(WalkEntry::into_owned)); }); @@ -907,6 +929,24 @@ pub fn walk<'g>( } }, ); + if matches!(link, LinkBehavior::ReadFile) { + if let Ok(tail) = root.strip_prefix(directory) { + let found = tail + .components() + .try_fold(directory.to_path_buf(), |accum, c| { + let candidate = accum.join(c); + if candidate.is_symlink() { + None + } else { + Some(candidate) + } + }) + .is_none(); + if found { + return Walk::empty(); + } + } + } let components = Walk::compile(glob.tree.as_ref().tokens()).expect("failed to compile glob sub-expressions"); Walk { @@ -921,6 +961,7 @@ pub fn walk<'g>( }) .max_depth(depth) .into_iter(), + is_empty: false, } } diff --git a/docs/components/LogoContext/items.tsx b/docs/components/LogoContext/items.tsx index 6ce1fe60941d1..bb5a3304660b2 100644 --- a/docs/components/LogoContext/items.tsx +++ b/docs/components/LogoContext/items.tsx @@ -62,6 +62,14 @@ export const PLATFORM_MENU_ITEMS = ({ export const PRODUCT_MENU_ITEMS = ({ site, }: ContextList): Array => [ + { + name: "vercel", + "aria-label": "Open Vercel Home in New Tab", + children: "Vercel", + prefix: , + type: "external", + href: "https://vercel.com", + }, { name: "next-js", "aria-label": "Open Next.js Home in New Tab", diff --git a/docs/next.config.js b/docs/next.config.js index 4632672cfddc6..7003d7ae347b5 100644 --- a/docs/next.config.js +++ b/docs/next.config.js @@ -138,48 +138,48 @@ const nextConfig = withNextra({ }, { source: "/discord{/}?", - permanent: true, destination: "https://discord.gg/sSzyjxvbf5", + permanent: true, }, { source: "/docs/changelog", - permanent: true, destination: "https://github.com/vercel/turbo/releases", + permanent: true, }, { source: "/docs/guides/complimentary-tools", - permanent: true, destination: "/repo/docs/handbook", + permanent: true, }, { source: "/docs/guides/monorepo-tools", - permanent: true, destination: "/repo/docs/handbook", + permanent: true, }, { source: "/docs/glossary", - permanent: true, destination: "/repo/docs/handbook", + permanent: true, }, { source: "/docs/guides/continuous-integration", - permanent: true, destination: "/repo/docs/ci", + permanent: true, }, { source: "/repo/docs/handbook/prisma", - permanent: true, destination: "/repo/docs/handbook/tools/prisma", + permanent: true, }, { source: "/pack/docs/comparisons/turbopack-vs-vite", - permanent: true, destination: "/pack/docs/comparisons/vite", + permanent: true, }, { source: "/pack/docs/comparisons/turbopack-vs-webpack", - permanent: true, destination: "/pack/docs/comparisons/webpack", + permanent: true, }, { // Accidentally created, eventually removable. See below. @@ -211,8 +211,8 @@ const nextConfig = withNextra({ // They've _never_ resolved, so _eventually_ we should be able to remove the // redirects we added above to fix them. source: "/docs/features/:path*", - permanent: true, destination: "/repo/docs/core-concepts/:path*", + permanent: true, }, { // Accidentally created, eventually removable. See below. @@ -232,8 +232,8 @@ const nextConfig = withNextra({ // They've _never_ resolved, so _eventually_ we should be able to remove the // redirects we added above to fix them. source: "/docs/:path*", - permanent: true, destination: "/repo/docs/:path*", + permanent: true, }, ]; }, diff --git a/docs/pages/pack/docs/comparisons/webpack.mdx b/docs/pages/pack/docs/comparisons/webpack.mdx index 8a0600a36cf5b..053e9ccac93af 100644 --- a/docs/pages/pack/docs/comparisons/webpack.mdx +++ b/docs/pages/pack/docs/comparisons/webpack.mdx @@ -63,4 +63,4 @@ In a 1,000 module application, Turbopack can react to file changes **>> FULL TURBO (re) - $ git diff diff --git a/examples-tests/npm-with-npm/package.json b/examples-tests/npm-with-npm/package.json new file mode 100644 index 0000000000000..aa032f82ad3c0 --- /dev/null +++ b/examples-tests/npm-with-npm/package.json @@ -0,0 +1,11 @@ +{ + "name": "@turborepo-examples-tests/npm-with-npm", + "scripts": { + "example-test": "../.cram_env/bin/prysk --shell=`which bash` test.t" + }, + "dependencies": { + "turborepo-tests-helpers": "workspace:*", + "@turborepo-examples-tests/helpers": "workspace:*", + "turborepo-examples": "workspace:*" + } +} diff --git a/turborepo-tests/examples/tests/npm-with-npm.t b/examples-tests/npm-with-npm/test.t similarity index 95% rename from turborepo-tests/examples/tests/npm-with-npm.t rename to examples-tests/npm-with-npm/test.t index d9c61b20bb2f3..21c9e7758fc57 100644 --- a/turborepo-tests/examples/tests/npm-with-npm.t +++ b/examples-tests/npm-with-npm/test.t @@ -1,6 +1,5 @@ - $ . ${TESTDIR}/setup.sh with-npm npm + $ . ${TESTDIR}/../setup.sh with-npm npm \d+\.\d+\.\d+ (re) - # run twice and make sure it works $ npm run build lint -- --output-logs=none @@ -15,9 +14,6 @@ Cached: 0 cached, 5 total Time:\s*[\.0-9ms]+ (re) - - - $ npm run build lint -- --output-logs=none \> build (re) @@ -31,7 +27,4 @@ Cached: 5 cached, 5 total Time:\s*[\.0-9ms]+ >>> FULL TURBO (re) - - - $ git diff diff --git a/examples-tests/npm-with-yarn/package.json b/examples-tests/npm-with-yarn/package.json new file mode 100644 index 0000000000000..5d50a51b64b38 --- /dev/null +++ b/examples-tests/npm-with-yarn/package.json @@ -0,0 +1,11 @@ +{ + "name": "@turborepo-examples-tests/npm-with-yarn", + "scripts": { + "example-test": "../.cram_env/bin/prysk --shell=`which bash` test.t" + }, + "dependencies": { + "turborepo-tests-helpers": "workspace:*", + "@turborepo-examples-tests/helpers": "workspace:*", + "turborepo-examples": "workspace:*" + } +} diff --git a/turborepo-tests/examples/tests/npm-with-yarn.t b/examples-tests/npm-with-yarn/test.t similarity index 94% rename from turborepo-tests/examples/tests/npm-with-yarn.t rename to examples-tests/npm-with-yarn/test.t index 1e0087a13c75b..acf33de9f082c 100644 --- a/turborepo-tests/examples/tests/npm-with-yarn.t +++ b/examples-tests/npm-with-yarn/test.t @@ -1,6 +1,5 @@ - $ . ${TESTDIR}/setup.sh with-yarn npm + $ . ${TESTDIR}/../setup.sh with-yarn npm \d+\.\d+\.\d+ (re) - # run twice and make sure it works $ npm run build lint -- --output-logs=none @@ -15,9 +14,6 @@ Cached: 0 cached, 5 total Time:\s*[\.0-9ms]+ (re) - - - $ npm run build lint -- --output-logs=none \> build (re) @@ -31,7 +27,4 @@ Cached: 5 cached, 5 total Time:\s*[\.0-9ms]+ >>> FULL TURBO (re) - - - $ git diff diff --git a/examples-tests/pnpm-basic/package.json b/examples-tests/pnpm-basic/package.json new file mode 100644 index 0000000000000..6001e0978e76f --- /dev/null +++ b/examples-tests/pnpm-basic/package.json @@ -0,0 +1,11 @@ +{ + "name": "@turborepo-examples-tests/pnpm-basic", + "scripts": { + "example-test": "../.cram_env/bin/prysk --shell=`which bash` test.t" + }, + "dependencies": { + "turborepo-tests-helpers": "workspace:*", + "@turborepo-examples-tests/helpers": "workspace:*", + "turborepo-examples": "workspace:*" + } +} diff --git a/turborepo-tests/examples/tests/pnpm-basic.t b/examples-tests/pnpm-basic/test.t similarity index 88% rename from turborepo-tests/examples/tests/pnpm-basic.t rename to examples-tests/pnpm-basic/test.t index c9941ffbeeb38..c6f5cd3c46c8e 100644 --- a/turborepo-tests/examples/tests/pnpm-basic.t +++ b/examples-tests/pnpm-basic/test.t @@ -1,9 +1,9 @@ - $ . ${TESTDIR}/setup.sh basic pnpm + $ . ${TESTDIR}/../setup.sh basic pnpm 6.26.1 # run twice and make sure it works $ pnpm run build lint -- --output-logs=none - \> @ build (.*)/pnpm-basic.t (re) + \> @ build (.*)/test.t (re) \> turbo run build "lint" "--output-logs=none" (re) \xe2\x80\xa2 Packages in scope: docs, eslint-config-custom, tsconfig, ui, web (esc) @@ -14,12 +14,9 @@ Cached: 0 cached, 5 total Time:\s*[\.0-9ms]+ (re) - - - $ pnpm run build lint -- --output-logs=none - \> @ build (.*)/pnpm-basic.t (re) + \> @ build (.*)/test.t (re) \> turbo run build "lint" "--output-logs=none" (re) \xe2\x80\xa2 Packages in scope: docs, eslint-config-custom, tsconfig, ui, web (esc) @@ -30,7 +27,4 @@ Cached: 5 cached, 5 total Time:\s*[\.0-9ms]+ >>> FULL TURBO (re) - - - $ git diff diff --git a/examples-tests/pnpm-gatsby/package.json b/examples-tests/pnpm-gatsby/package.json new file mode 100644 index 0000000000000..02402b7344805 --- /dev/null +++ b/examples-tests/pnpm-gatsby/package.json @@ -0,0 +1,11 @@ +{ + "name": "@turborepo-examples-tests/pnpm-gatsby", + "scripts": { + "example-test": "../.cram_env/bin/prysk --shell=`which bash` test.t" + }, + "dependencies": { + "turborepo-tests-helpers": "workspace:*", + "@turborepo-examples-tests/helpers": "workspace:*", + "turborepo-examples": "workspace:*" + } +} diff --git a/turborepo-tests/examples/tests/pnpm-gatsby.t b/examples-tests/pnpm-gatsby/test.t similarity index 84% rename from turborepo-tests/examples/tests/pnpm-gatsby.t rename to examples-tests/pnpm-gatsby/test.t index 5d5b6903db8a3..7cc73d8370ac1 100644 --- a/turborepo-tests/examples/tests/pnpm-gatsby.t +++ b/examples-tests/pnpm-gatsby/test.t @@ -1,9 +1,9 @@ - $ . ${TESTDIR}/setup.sh with-gatsby pnpm + $ . ${TESTDIR}/../setup.sh with-gatsby pnpm 6.26.1 # run twice and make sure it works $ pnpm run build lint -- --output-logs=none - \> with-gatsby@0.0.0 build (.*)/pnpm-gatsby.t (re) + \> with-gatsby@0.0.0 build (.*)/test.t (re) \> turbo build "lint" "--output-logs=none" (re) \xe2\x80\xa2 Packages in scope: docs, eslint-config-custom, tsconfig, ui, web (esc) @@ -17,7 +17,7 @@ $ pnpm run build lint -- --output-logs=none - \> with-gatsby@0.0.0 build (.*)/pnpm-gatsby.t (re) + \> with-gatsby@0.0.0 build (.*)/test.t (re) \> turbo build "lint" "--output-logs=none" (re) \xe2\x80\xa2 Packages in scope: docs, eslint-config-custom, tsconfig, ui, web (esc) diff --git a/examples-tests/pnpm-kitchen-sink/package.json b/examples-tests/pnpm-kitchen-sink/package.json new file mode 100644 index 0000000000000..56938119ce3e2 --- /dev/null +++ b/examples-tests/pnpm-kitchen-sink/package.json @@ -0,0 +1,11 @@ +{ + "name": "@turborepo-examples-tests/pnpm-kitchen-sink", + "scripts": { + "example-test": "../.cram_env/bin/prysk --shell=`which bash` test.t" + }, + "dependencies": { + "turborepo-tests-helpers": "workspace:*", + "@turborepo-examples-tests/helpers": "workspace:*", + "turborepo-examples": "workspace:*" + } +} diff --git a/turborepo-tests/examples/tests/pnpm-kitchen-sink.t b/examples-tests/pnpm-kitchen-sink/test.t similarity index 88% rename from turborepo-tests/examples/tests/pnpm-kitchen-sink.t rename to examples-tests/pnpm-kitchen-sink/test.t index 2a9cbd67a34e6..894274378ed75 100644 --- a/turborepo-tests/examples/tests/pnpm-kitchen-sink.t +++ b/examples-tests/pnpm-kitchen-sink/test.t @@ -1,9 +1,9 @@ - $ . ${TESTDIR}/setup.sh kitchen-sink pnpm + $ . ${TESTDIR}/../setup.sh kitchen-sink pnpm 6.26.1 # run twice and make sure it works $ pnpm run build lint -- --output-logs=none - \> @ build (.*)/pnpm-kitchen-sink.t (re) + \> @ build (.*)/test.t (re) \> turbo build "lint" "--output-logs=none" (re) \xe2\x80\xa2 Packages in scope: admin, api, blog, eslint-config-custom, eslint-config-custom-server, jest-presets, logger, storefront, tsconfig, ui (esc) @@ -14,12 +14,9 @@ Cached: 0 cached, 11 total Time:\s*[\.0-9ms]+ (re) - - - $ pnpm run build lint -- --output-logs=none - \> @ build (.*)/pnpm-kitchen-sink.t (re) + \> @ build (.*)/test.t (re) \> turbo build "lint" "--output-logs=none" (re) \xe2\x80\xa2 Packages in scope: admin, api, blog, eslint-config-custom, eslint-config-custom-server, jest-presets, logger, storefront, tsconfig, ui (esc) @@ -30,7 +27,4 @@ Cached: 11 cached, 11 total Time:\s*[\.0-9ms]+ >>> FULL TURBO (re) - - - $ git diff diff --git a/examples-tests/pnpm-with-svelte/package.json b/examples-tests/pnpm-with-svelte/package.json new file mode 100644 index 0000000000000..76c1152da5508 --- /dev/null +++ b/examples-tests/pnpm-with-svelte/package.json @@ -0,0 +1,11 @@ +{ + "name": "@turborepo-examples-tests/pnpm-with-svelte", + "scripts": { + "example-test": "../.cram_env/bin/prysk --shell=`which bash` test.t" + }, + "dependencies": { + "turborepo-tests-helpers": "workspace:*", + "@turborepo-examples-tests/helpers": "workspace:*", + "turborepo-examples": "workspace:*" + } +} diff --git a/turborepo-tests/examples/tests/pnpm-with-svelte.t b/examples-tests/pnpm-with-svelte/test.t similarity index 86% rename from turborepo-tests/examples/tests/pnpm-with-svelte.t rename to examples-tests/pnpm-with-svelte/test.t index f4a8cf471129a..1fcf24aa1ba15 100644 --- a/turborepo-tests/examples/tests/pnpm-with-svelte.t +++ b/examples-tests/pnpm-with-svelte/test.t @@ -1,9 +1,9 @@ - $ . ${TESTDIR}/setup.sh with-svelte pnpm + $ . ${TESTDIR}/../setup.sh with-svelte pnpm 6.26.1 # run twice and make sure it works $ pnpm run build lint -- --output-logs=none - \> @ build (.*)/pnpm-with-svelte.t (re) + \> @ build (.*)/test.t (re) \> turbo run build "lint" "--output-logs=none" (re) \xe2\x80\xa2 Packages in scope: docs, eslint-config-custom, ui, web (esc) @@ -14,12 +14,9 @@ Cached: 0 cached, 5 total Time:\s*[\.0-9ms]+ (re) - - - $ pnpm run build lint -- --output-logs=none - \> @ build (.*)/pnpm-with-svelte.t (re) + \> @ build (.*)/test.t (re) \> turbo run build "lint" "--output-logs=none" (re) \xe2\x80\xa2 Packages in scope: docs, eslint-config-custom, ui, web (esc) @@ -30,7 +27,4 @@ Cached: 5 cached, 5 total Time:\s*[\.0-9ms]+ >>> FULL TURBO (re) - - - $ git diff diff --git a/turborepo-tests/examples/tests/setup.sh b/examples-tests/setup.sh similarity index 64% rename from turborepo-tests/examples/tests/setup.sh rename to examples-tests/setup.sh index 890b7df10bd87..6f42b2b29df42 100644 --- a/turborepo-tests/examples/tests/setup.sh +++ b/examples-tests/setup.sh @@ -1,12 +1,15 @@ #!/bin/bash -set -e + +# This script is called from within a prysk test, so pwd is already in the prysk tmp directory. + +set -eo pipefail exampleName=$1 pkgManager=$2 # Copy the example dir over to the test dir that prysk puts you in SCRIPT_DIR=$(dirname "${BASH_SOURCE[0]}") -MONOREPO_ROOT_DIR="$SCRIPT_DIR/../../.." +MONOREPO_ROOT_DIR="$SCRIPT_DIR/.." EXAMPLE_DIR="$MONOREPO_ROOT_DIR/examples/$exampleName" TARGET_DIR="$(pwd)" @@ -37,18 +40,30 @@ corepack enable NPM_PACKAGE_MANAGER_VALUE="npm@8.1.2" PNPM_PACKAGE_MANAGER_VALUE="pnpm@6.26.1" YARN_PACKAGE_MANAGER_VALUE="yarn@1.22.17" + if [ "$pkgManager" == "npm" ]; then + # Note! We will packageManager for npm, but it doesn't actually change the version + # We are effectively just removing any packageManager that's already set. + # https://nodejs.org/api/corepack.html#how-does-corepack-interact-with-npm + # > "While npm is a valid option in the "packageManager" property, the lack of shim will cause the global npm to be used." set_package_manager "$NPM_PACKAGE_MANAGER_VALUE" + npm --version - npm install > /dev/null + npm install > /dev/null 2>&1 elif [ "$pkgManager" == "pnpm" ]; then set_package_manager "$PNPM_PACKAGE_MANAGER_VALUE" pnpm --version - pnpm install > /dev/null + pnpm install > /dev/null 2>&1 elif [ "$pkgManager" == "yarn" ]; then set_package_manager "$YARN_PACKAGE_MANAGER_VALUE" yarn --version - yarn install > /dev/null + # Pass a --cache-folder here because yarn seems to have trouble + # running multiple yarn installs at the same time and we are running + # examples tests in parallel. https://github.com/yarnpkg/yarn/issues/1275 + yarn install --cache-folder="$PWD/.yarn-cache" > /dev/null 2>&1 + + # And ignore this new cache folder from the new git repo we're about to create. + echo ".yarn-cache" >> .gitignore fi # Delete .git directory if it's there, we'll set up a new git repo diff --git a/examples-tests/yarn-non-monorepo/package.json b/examples-tests/yarn-non-monorepo/package.json new file mode 100644 index 0000000000000..cbc34ab4ee5ee --- /dev/null +++ b/examples-tests/yarn-non-monorepo/package.json @@ -0,0 +1,11 @@ +{ + "name": "@turborepo-examples-tests/yarn-non-monorepo", + "scripts": { + "example-test": "../.cram_env/bin/prysk --shell=`which bash` test.t" + }, + "dependencies": { + "turborepo-tests-helpers": "workspace:*", + "@turborepo-examples-tests/helpers": "workspace:*", + "turborepo-examples": "workspace:*" + } +} diff --git a/turborepo-tests/examples/tests/yarn-non-monorepo.t b/examples-tests/yarn-non-monorepo/test.t similarity index 94% rename from turborepo-tests/examples/tests/yarn-non-monorepo.t rename to examples-tests/yarn-non-monorepo/test.t index 24fda1bf518e8..132c000788da9 100644 --- a/turborepo-tests/examples/tests/yarn-non-monorepo.t +++ b/examples-tests/yarn-non-monorepo/test.t @@ -1,4 +1,4 @@ - $ . ${TESTDIR}/setup.sh non-monorepo yarn + $ . ${TESTDIR}/../setup.sh non-monorepo yarn \d+\.\d+\.\d+ (re) # run twice and make sure it works @@ -25,4 +25,5 @@ Time:\s*[\.0-9ms]+ >>> FULL TURBO (re) Done in [\.0-9]+m?s\. (re) + $ git diff diff --git a/examples-tests/yarn-with-npm/package.json b/examples-tests/yarn-with-npm/package.json new file mode 100644 index 0000000000000..79bb6a8b26953 --- /dev/null +++ b/examples-tests/yarn-with-npm/package.json @@ -0,0 +1,11 @@ +{ + "name": "@turborepo-examples-tests/yarn-with-npm", + "scripts": { + "example-test": "../.cram_env/bin/prysk --shell=`which bash` test.t" + }, + "dependencies": { + "turborepo-tests-helpers": "workspace:*", + "@turborepo-examples-tests/helpers": "workspace:*", + "turborepo-examples": "workspace:*" + } +} diff --git a/turborepo-tests/examples/tests/yarn-with-npm.t b/examples-tests/yarn-with-npm/test.t similarity index 95% rename from turborepo-tests/examples/tests/yarn-with-npm.t rename to examples-tests/yarn-with-npm/test.t index 1306dae457283..40c0ae78bd79d 100644 --- a/turborepo-tests/examples/tests/yarn-with-npm.t +++ b/examples-tests/yarn-with-npm/test.t @@ -1,4 +1,4 @@ - $ . ${TESTDIR}/setup.sh with-npm yarn + $ . ${TESTDIR}/../setup.sh with-npm yarn \d+\.\d+\.\d+ (re) # run twice and make sure it works @@ -27,4 +27,5 @@ Time:\s*[\.0-9ms]+ >>> FULL TURBO (re) Done in [\.0-9]+m?s\. (re) + $ git diff diff --git a/examples-tests/yarn-with-yarn/package.json b/examples-tests/yarn-with-yarn/package.json new file mode 100644 index 0000000000000..cf9bfb5e19bfd --- /dev/null +++ b/examples-tests/yarn-with-yarn/package.json @@ -0,0 +1,11 @@ +{ + "name": "@turborepo-tests-examples/yarn-with-yarn", + "scripts": { + "example-test": "../.cram_env/bin/prysk --shell=`which bash` test.t" + }, + "dependencies": { + "turborepo-tests-helpers": "workspace:*", + "@turborepo-examples-tests/helpers": "workspace:*", + "turborepo-examples": "workspace:*" + } +} diff --git a/turborepo-tests/examples/tests/yarn-with-yarn.t b/examples-tests/yarn-with-yarn/test.t similarity index 95% rename from turborepo-tests/examples/tests/yarn-with-yarn.t rename to examples-tests/yarn-with-yarn/test.t index a083911511575..c4117ab6c165b 100644 --- a/turborepo-tests/examples/tests/yarn-with-yarn.t +++ b/examples-tests/yarn-with-yarn/test.t @@ -1,4 +1,4 @@ - $ . ${TESTDIR}/setup.sh with-yarn yarn + $ . ${TESTDIR}/../setup.sh with-yarn yarn \d+\.\d+\.\d+ (re) # run twice and make sure it works @@ -27,4 +27,5 @@ Time:\s*[\.0-9ms]+ >>> FULL TURBO (re) Done in [\.0-9]+m?s\. (re) + $ git diff diff --git a/examples/basic/meta.json b/examples/basic/meta.json index c0232590b2812..eff0c70a306d0 100644 --- a/examples/basic/meta.json +++ b/examples/basic/meta.json @@ -1,5 +1,5 @@ { - "name": "Basic", + "name": "Next.js", "description": "Minimal Turborepo example for learning the fundamentals.", "template": "https://vercel.com/templates/next.js/turborepo-next-basic", "featured": true, diff --git a/examples/kitchen-sink/.gitignore b/examples/kitchen-sink/.gitignore index 1554c1c53a127..e1a2be0251238 100644 --- a/examples/kitchen-sink/.gitignore +++ b/examples/kitchen-sink/.gitignore @@ -10,4 +10,3 @@ dist-ssr .cache server/dist public/dist -.turbo diff --git a/packages/create-turbo/jest.config.js b/packages/create-turbo/jest.config.js index b738f4b2bd92b..a273992d5cd70 100644 --- a/packages/create-turbo/jest.config.js +++ b/packages/create-turbo/jest.config.js @@ -7,5 +7,6 @@ module.exports = { transformIgnorePatterns: ["/node_modules/(?!(ansi-regex)/)"], modulePathIgnorePatterns: ["/node_modules", "/dist"], collectCoverage: true, - verbose: true, + verbose: process.env.RUNNER_DEBUG === "1", + silent: process.env.RUNNER_DEBUG !== "1", }; diff --git a/packages/create-turbo/package.json b/packages/create-turbo/package.json index 8b1e58dd4cc90..58eb24eb97383 100644 --- a/packages/create-turbo/package.json +++ b/packages/create-turbo/package.json @@ -1,6 +1,6 @@ { "name": "create-turbo", - "version": "1.10.3-canary.0", + "version": "1.10.4-canary.2", "description": "Create a new Turborepo", "homepage": "https://turbo.build/repo", "license": "MPL-2.0", diff --git a/packages/devlow-bench/README.md b/packages/devlow-bench/README.md new file mode 100644 index 0000000000000..f4430af399221 --- /dev/null +++ b/packages/devlow-bench/README.md @@ -0,0 +1,163 @@ +# devlow-bench + +DEVeloper workfLOW BENCHmarking tool + +## Installation + +```bash +npm install devlow-bench +``` + +## Usage + +```bash +Usage: devlow-bench [options] +## Selecting scenarios + --scenario=, -s= Only run the scenario with the given name + --interactive, -i Select scenarios and variants interactively + --= Filter by any variant property defined in scenarios +## Output + --json=, -j= Write the results to the given path as JSON + --console Print the results to the console + --datadog[=] Upload the results to Datadog + (requires DATADOG_API_KEY environment variables) +## Help + --help, -h, -? Show this help +``` + +## Scenarios + +A scenario file is similar to a test case file. It can contain one or multiple scenarios by using the `describe()` method to define them. + +```js +import { describe } from "devlow-bench"; + +describe( + "my scenario", + { + /* property options */ + }, + async ( + { + /* property values */ + } + ) => { + // run the scenario + } +); +``` + +The `describe()` method takes three arguments: + +- `name`: The name of the scenario +- `props`: An object with possible property values for the scenario. +- `fn`: The function that runs the scenario. It is passed an object with the property values as the first argument. + +The `props` object can contain any number of properties. The key is the name of the property. The value must either be an array of possible values (number, string, boolean), or it can be `true` as shortcut for `[true, false]` resp. `false` for `[false, true]`. The scenario will run for every possible combination of the property values, if not specified otherwise. + +### Example + +```js +import { describe } from "devlow-bench"; + +describe( + "my scenario", + { + myProperty: [1, 2, 3], + myOtherProperty: true, + }, + async ({ myProperty, myOtherProperty }) => { + console.log(myProperty, myOtherProperty); + } +); + +// will print: +// 1 true +// 2 true +// 3 true +// 1 false +// 2 false +// 3 false +``` + +## Reporting measurements + +```js +import { measureTime, reportMeasurement } from "devlow-bench"; + +// Measure a time +measureTime("name of the timing", { + /* optional options */ +}); + +// Report some other measurement +reportMeasurement("name of the measurement", value, unit, { + /* optional options */ +}); +``` + +Options: + +- `relativeTo`: measure time/value relative to some other measurement. +- `scenario`: override the reported scenario name (to make measurement independent of scenario name) +- `props`: override the reported scenario properties (to make measurement independent of scenario properties, object is merged with original props, to remove a prop use `null` value) + +## Browser operations + +The `devlow-bench` package provides a few helper functions to run operations in the browser. + +```js +import { newBrowserSession } from "devlow-bench/browser"; + +const session = await newBrowserSession({ + // options +}); +await session.hardNavigation("metric name", "https://example.com"); +await session.reload("metric name"); +await session.softNavigationByClick("metric name", ".selector-to-click"); +await session.close(); +``` + +## Shell operations + +The `devlow-bench` package provides a few helper functions to run operations in the shell. + +```js +import { command } from 'devlow-bench/shell'; + +const shell = await command("pnpm", ["run", "build"], { + env: { /* optional env vars */ } + cwd: "/optional/path/to/directory" +}); + +// Wait for successful exit +await shell.ok(); + +// Wait for exit +const exitCode = await shell.end(); + +// Wait for specific output +const [match, world] = await shell.waitForOutput(/hello (world)/); + +// Report memory usage or the process tree as metric +await shell.reportMemUsage("metric name", { /* optional options */ }); + +shell.stdout, shell.stderr + +// merged output +shell.output + +// Kill the process tree +await shell.kill(); +``` + +## File operations + +The `devlow-bench` package provides a few helper functions to run operations on the file system. + +```js +import { waitForFile } from "devlow-bench/file"; + +// wait for file to exist +await waitForFile("/path/to/file", /* timeout = */ 30000); +``` diff --git a/packages/devlow-bench/package.json b/packages/devlow-bench/package.json new file mode 100644 index 0000000000000..24730bf3cf6a4 --- /dev/null +++ b/packages/devlow-bench/package.json @@ -0,0 +1,44 @@ +{ + "name": "@vercel/devlow-bench", + "version": "0.1.3", + "description": "Benchmarking tool for the developer workflow", + "type": "module", + "main": "dist/index.js", + "bin": "dist/cli.js", + "scripts": { + "prerelease": "pnpm run build:ts", + "build:ts": "tsc" + }, + "files": [ + "dist" + ], + "keywords": [], + "author": "Tobias Koppers", + "license": "MPL-2.0", + "exports": { + ".": "./dist/index.js", + "./browser": "./dist/browser.js", + "./shell": "./dist/shell.js", + "./file": "./dist/file.js", + "./interfaces/compose": "./dist/interfaces/compose.js", + "./interfaces/console": "./dist/interfaces/console.js", + "./interfaces/interactive": "./dist/interfaces/interactive.js", + "./interfaces/json": "./dist/interfaces/json.js" + }, + "devDependencies": { + "@types/inquirer": "^9.0.3", + "@types/minimist": "^1.2.2", + "@types/node": "^20.3.0", + "@types/split2": "^4.2.0" + }, + "dependencies": { + "@datadog/datadog-api-client": "^1.13.0", + "chalk": "2.4.2", + "inquirer": "^9.2.7", + "minimist": "^1.2.8", + "pidusage-tree": "^2.0.5", + "playwright-chromium": "^1.35.0", + "split2": "^4.2.0", + "tree-kill": "^1.2.2" + } +} diff --git a/packages/devlow-bench/src/browser.ts b/packages/devlow-bench/src/browser.ts new file mode 100644 index 0000000000000..a9d0b3d85163c --- /dev/null +++ b/packages/devlow-bench/src/browser.ts @@ -0,0 +1,226 @@ +import type { + Browser, + BrowserContext, + Page, + Request, + Response, +} from "playwright-chromium"; +import { chromium } from "playwright-chromium"; +import { measureTime, reportMeasurement } from "./index.js"; +import { resolve } from "path"; + +interface BrowserSession { + close(): Promise; + hardNavigation(url: string, metricName?: string): Promise; + softNavigationByClick(metricName: string, selector: string): Promise; + reload(metricName: string): Promise; +} + +async function withRequestMetrics( + metricName: string, + page: Page, + fn: () => Promise +): Promise { + const activePromises: Promise[] = []; + const sizeByExtension = new Map(); + const requestsByExtension = new Map(); + const responseHandler = (response: Response) => { + activePromises.push( + (async () => { + const url = response.request().url(); + const status = response.status(); + const extension = + /^[^\?#]+\.([a-z0-9]+)(?:[\?#]|$)/i.exec(url)?.[1] ?? "none"; + const currentRequests = requestsByExtension.get(extension) ?? 0; + requestsByExtension.set(extension, currentRequests + 1); + if (status >= 200 && status < 300) { + let body; + try { + body = await response.body(); + } catch {} + if (body) { + const size = body.length; + const current = sizeByExtension.get(extension) ?? 0; + sizeByExtension.set(extension, current + size); + } + } + })() + ); + }; + try { + page.on("response", responseHandler); + await fn(); + await Promise.all(activePromises); + let totalDownload = 0; + for (const [extension, size] of sizeByExtension.entries()) { + reportMeasurement( + `${metricName}/responseSizes/${extension}`, + size, + "bytes" + ); + totalDownload += size; + } + reportMeasurement(`${metricName}/responseSizes`, totalDownload, "bytes"); + let totalRequests = 0; + for (const [extension, count] of requestsByExtension.entries()) { + reportMeasurement( + `${metricName}/requests/${extension}`, + count, + "requests" + ); + totalRequests += count; + } + reportMeasurement(`${metricName}/requests`, totalRequests, "requests"); + } finally { + page.off("response", responseHandler); + } +} + +function networkIdle(page: Page) { + return new Promise((resolve) => { + const cleanup = () => { + page.off("request", requestHandler); + page.off("requestfailed", requestFinishedHandler); + page.off("requestfinished", requestFinishedHandler); + }; + let activeRequests = 0; + let timeout: NodeJS.Timeout | null = null; + const requestHandler = (request: Request) => { + activeRequests++; + if (timeout) { + clearTimeout(timeout); + timeout = null; + } + }; + const requestFinishedHandler = (request: Request) => { + activeRequests--; + if (activeRequests === 0) { + timeout = setTimeout(() => { + cleanup(); + resolve(); + }, 300); + } + }; + page.on("request", requestHandler); + page.on("requestfailed", requestFinishedHandler); + page.on("requestfinished", requestFinishedHandler); + }); +} + +class BrowserSessionImpl implements BrowserSession { + private browser: Browser; + private context: BrowserContext; + private page: Page | null; + constructor(browser: Browser, context: BrowserContext) { + this.browser = browser; + this.context = context; + this.page = null; + } + + async close() { + if (this.page) { + await this.page.close(); + } + await this.context.close(); + await this.browser.close(); + } + + async hardNavigation(metricName: string, url: string) { + const page = (this.page = this.page ?? (await this.context.newPage())); + await withRequestMetrics(metricName, page, async () => { + measureTime(`${metricName}/start`); + await page.goto(url, { + waitUntil: "commit", + }); + measureTime(`${metricName}/html`, { + relativeTo: `${metricName}/start`, + }); + await page.waitForLoadState("domcontentloaded"); + measureTime(`${metricName}/dom`, { + relativeTo: `${metricName}/start`, + }); + await page.waitForLoadState("load"); + measureTime(`${metricName}/load`, { + relativeTo: `${metricName}/start`, + }); + await page.waitForLoadState("networkidle"); + measureTime(`${metricName}`, { + relativeTo: `${metricName}/start`, + }); + }); + return page; + } + + async softNavigationByClick(metricName: string, selector: string) { + const page = this.page; + if (!page) { + throw new Error( + "softNavigationByClick() must be called after hardNavigation()" + ); + } + await withRequestMetrics(metricName, page, async () => { + measureTime(`${metricName}/start`); + const firstResponse = new Promise((resolve) => + page.once("response", () => resolve()) + ); + const idle = networkIdle(page); + await page.click(selector); + await firstResponse; + measureTime(`${metricName}/firstResponse`, { + relativeTo: `${metricName}/start`, + }); + await idle; + measureTime(`${metricName}`, { + relativeTo: `${metricName}/start`, + }); + }); + } + + async reload(metricName: string) { + const page = this.page; + if (!page) { + throw new Error("reload() must be called after hardNavigation()"); + } + await withRequestMetrics(metricName, page, async () => { + measureTime(`${metricName}/start`); + await page.reload({ + waitUntil: "commit", + }); + measureTime(`${metricName}/html`, { + relativeTo: `${metricName}/start`, + }); + await page.waitForLoadState("domcontentloaded"); + measureTime(`${metricName}/dom`, { + relativeTo: `${metricName}/start`, + }); + await page.waitForLoadState("load"); + measureTime(`${metricName}/load`, { + relativeTo: `${metricName}/start`, + }); + await page.waitForLoadState("networkidle"); + measureTime(`${metricName}`, { + relativeTo: `${metricName}/start`, + }); + }); + } +} + +export async function newBrowserSession(options: { + headless?: boolean; + devtools?: boolean; + baseURL?: string; +}): Promise { + const browser = await chromium.launch({ + headless: options.headless ?? process.env.HEADLESS !== "false", + timeout: 60000, + }); + const context = await browser.newContext({ + baseURL: options.baseURL ?? "http://localhost:3000", + viewport: { width: 1280, height: 720 }, + }); + context.setDefaultTimeout(120000); + context.setDefaultNavigationTimeout(120000); + return new BrowserSessionImpl(browser, context); +} + +// see next.js/test/lib/browsers/playwright.ts diff --git a/packages/devlow-bench/src/cli.ts b/packages/devlow-bench/src/cli.ts new file mode 100644 index 0000000000000..76f1a0afbbb3c --- /dev/null +++ b/packages/devlow-bench/src/cli.ts @@ -0,0 +1,114 @@ +import minimist from "minimist"; +import { setCurrentScenarios } from "./describe.js"; +import { join } from "path"; +import { Scenario, ScenarioVariant, runScenarios } from "./index.js"; +import compose from "./interfaces/compose.js"; +import { pathToFileURL } from "url"; + +(async () => { + const knownArgs = new Set([ + "scenario", + "s", + "json", + "j", + "console", + "datadog", + "interactive", + "i", + "help", + "h", + "?", + "_", + ]); + const args = minimist(process.argv.slice(2), { + alias: { + s: "scenario", + j: "json", + i: "interactive", + "?": "help", + h: "help", + }, + }); + + if (args.help || (Object.keys(args).length === 1 && args._.length === 0)) { + console.log("Usage: devlow-bench [options] "); + console.log("## Selecting scenarios"); + console.log( + " --scenario=, -s= Only run the scenario with the given name" + ); + console.log( + " --interactive, -i Select scenarios and variants interactively" + ); + console.log( + " --= Filter by any variant property defined in scenarios" + ); + console.log("## Output"); + console.log( + " --json=, -j= Write the results to the given path as JSON" + ); + console.log( + " --console Print the results to the console" + ); + console.log( + " --datadog[=] Upload the results to Datadog" + ); + console.log( + " (requires DATADOG_API_KEY environment variables)" + ); + console.log("## Help"); + console.log(" --help, -h, -? Show this help"); + } + + const scenarios: Scenario[] = []; + setCurrentScenarios(scenarios); + + for (const path of args._) { + await import(pathToFileURL(join(process.cwd(), path)).toString()); + } + + setCurrentScenarios(null); + + const cliIface = { + filterScenarios: async (scenarios: Scenario[]) => { + if (args.scenario) { + const filter = [].concat(args.scenario); + return scenarios.filter((s) => + filter.some((filter) => s.name.includes(filter)) + ); + } + return scenarios; + }, + filterScenarioVariants: async (variants: ScenarioVariant[]) => { + const propEntries = Object.entries(args).filter( + ([key]) => !knownArgs.has(key) + ); + if (propEntries.length === 0) return variants; + for (const [key, value] of propEntries) { + const values = (Array.isArray(value) ? value : [value]).map((v) => + v.toString() + ); + variants = variants.filter((variant) => { + const prop = variant.props[key]; + if (typeof prop === "undefined") return false; + const str = prop.toString(); + return values.some((v) => str.includes(v)); + }); + } + return variants; + }, + }; + let ifaces = [ + cliIface, + args.interactive && (await import("./interfaces/interactive.js")).default(), + args.json && (await import("./interfaces/json.js")).default(args.json), + args.datadog && + (await import("./interfaces/datadog.js")).default( + typeof args.datadog === "string" ? { host: args.datadog } : undefined + ), + args.console !== false && + (await import("./interfaces/console.js")).default(), + ].filter((x) => x); + await runScenarios(scenarios, compose(...ifaces)); +})().catch((e) => { + console.error(e.stack); +}); diff --git a/packages/devlow-bench/src/describe.ts b/packages/devlow-bench/src/describe.ts new file mode 100644 index 0000000000000..3b88b731135c4 --- /dev/null +++ b/packages/devlow-bench/src/describe.ts @@ -0,0 +1,153 @@ +import type { + ConfigFor, + CurrentScenario, + Interface, + Scenario, +} from "./index.js"; +import compose from "./interfaces/compose.js"; +import { runScenarios } from "./runner.js"; + +let currentScenarios: Scenario[] | null = null; + +export function setCurrentScenarios(scenarios: Scenario[] | null): void { + currentScenarios = scenarios; +} + +export function describe

( + name: string, + config: ConfigFor

, + fn: (props: P) => Promise +): void { + if (currentScenarios === null) { + const scenarios = (currentScenarios = []); + + Promise.resolve().then(async () => { + const ifaceNames = process.env.INTERFACE || "interactive,console"; + const ifaces = []; + for (const ifaceName of ifaceNames.split(",").map((s) => s.trim())) { + let iface: unknown; + try { + iface = await import(`./interfaces/${ifaceName}.js`); + } catch (e) { + iface = await import(ifaceName); + } + iface = (iface && (iface as any).default) || iface; + if (typeof iface === "function") { + iface = await iface(); + } + if (!iface) { + throw new Error(`Interface ${ifaceName} is not a valid interface`); + } + ifaces.push(iface as Interface); + } + runScenarios(scenarios, compose(...ifaces)); + }); + } + const normalizedConfig: Record = + Object.fromEntries( + Object.entries(config).map(([key, value]) => [ + key, + typeof value === "boolean" + ? [value, !value] + : (value as (string | number | boolean)[]), + ]) + ); + currentScenarios!.push({ + name, + config: normalizedConfig, + only: false, + fn: fn as ( + props: Record + ) => Promise, + }); +} + +describe.only = function describeOnly

( + name: string, + config: ConfigFor

, + fn: (props: P) => Promise +): void { + describe(name, config, fn); + currentScenarios![currentScenarios!.length - 1].only = true; +}; + +let currentScenario: CurrentScenario | null = null; + +export function withCurrent( + current: CurrentScenario, + fn: () => Promise +): Promise { + const prev = currentScenario; + currentScenario = current; + return fn().finally(() => { + currentScenario = prev; + }); +} + +export const PREVIOUS = Symbol("previous measurement with that unit"); + +export function measureTime( + name: string, + options: { + relativeTo?: string | typeof PREVIOUS; + scenario?: string; + props?: Record; + } = {} +) { + const end = Date.now(); + reportMeasurement(name, end, "ms", { relativeTo: PREVIOUS, ...options }); +} + +export function reportMeasurement( + name: string, + value: number, + unit: string, + options: { + relativeTo?: string | typeof PREVIOUS; + scenario?: string; + props?: Record; + } = {} +) { + if (!currentScenario) { + throw new Error("reportMeasurement() must be called inside of describe()"); + } + let { relativeTo, scenario, props } = options; + if (relativeTo === PREVIOUS) { + relativeTo = "previous"; + for (const [prevName, prev] of currentScenario.measurements) { + if (prev.unit === unit) { + relativeTo = prevName; + } + } + } + currentScenario.measurements.set(name, { + value, + unit, + }); + let reportedValue = value; + if (relativeTo) { + const prev = currentScenario.measurements.get(relativeTo); + if (!prev) { + throw new Error(`No measurement named ${relativeTo} found`); + } + if (prev.unit !== "ms") { + throw new Error( + `Measurement ${relativeTo} is not a "${unit}" measurement` + ); + } + reportedValue -= prev.value; + } + currentScenario.iface.measurement( + scenario ?? currentScenario.scenario.scenario.name, + props + ? { + ...currentScenario.scenario.props, + ...props, + } + : currentScenario.scenario.props, + name, + reportedValue, + unit, + relativeTo + ); +} diff --git a/packages/devlow-bench/src/file.ts b/packages/devlow-bench/src/file.ts new file mode 100644 index 0000000000000..2f50b75e84240 --- /dev/null +++ b/packages/devlow-bench/src/file.ts @@ -0,0 +1,59 @@ +import { watch } from "fs"; +import { access, constants } from "fs/promises"; +import { dirname } from "path"; + +export async function waitForFile( + path: string, + timeout: number +): Promise { + let currentAction = ""; + let timeoutRef; + const timeoutPromise = new Promise((resolve, reject) => { + timeoutRef = setTimeout(() => { + reject( + new Error(`Timed out waiting for file ${path} (${currentAction}))`) + ); + }, timeout || 60000); + }); + const elements = []; + let current = path; + while (true) { + elements.push(current); + const parent = dirname(current); + if (parent === current) { + break; + } + current = parent; + } + elements.reverse(); + try { + for (const path of elements) { + const checkAccess = () => + access(path, constants.F_OK) + .then(() => true) + .catch(() => false); + if (!(await checkAccess())) { + let resolveCheckAgain = () => {}; + const watcher = watch(dirname(path), () => { + resolveCheckAgain(); + }); + currentAction = `waiting for ${path}`; + let checkAgainPromise = new Promise((resolve) => { + resolveCheckAgain = resolve; + }); + try { + do { + await Promise.race([timeoutPromise, checkAgainPromise]); + checkAgainPromise = new Promise((resolve) => { + resolveCheckAgain = resolve; + }); + } while (!(await checkAccess())); + } finally { + watcher.close(); + } + } + } + } finally { + clearTimeout(timeoutRef); + } +} diff --git a/packages/devlow-bench/src/index.ts b/packages/devlow-bench/src/index.ts new file mode 100644 index 0000000000000..e87fd8a368bf5 --- /dev/null +++ b/packages/devlow-bench/src/index.ts @@ -0,0 +1,89 @@ +export type ConfigFor

= { + [K in keyof P]: P[K] extends string + ? string[] + : P[K] extends number + ? number[] + : P[K] extends boolean + ? boolean[] | boolean + : never; +}; + +export interface Scenario { + name: string; + config: Record; + only: boolean; + fn: (props: Record) => Promise; +} + +export interface ScenarioVariant { + scenario: Scenario; + props: Record; +} + +export interface CurrentScenario { + scenario: ScenarioVariant; + iface: FullInterface; + + measurements: Map< + string, + { + value: number; + unit: string; + } + >; +} + +export type Interface = Partial; + +export interface FullInterface { + filterScenarios(scenarios: Scenario[]): Promise; + filterScenarioVariants( + scenarioVariants: ScenarioVariant[] + ): Promise; + + start( + scenario: string, + props: Record + ): Promise; + measurement( + scenario: string, + props: Record, + name: string, + value: number, + unit: string, + relativeTo?: string + ): Promise; + end( + scenario: string, + props: Record + ): Promise; + error( + scenario: string, + props: Record, + error: unknown + ): Promise; + + finish(): Promise; +} + +export function intoFullInterface(iface: Interface): FullInterface { + return { + filterScenarios: iface.filterScenarios ?? (async (scenarios) => scenarios), + filterScenarioVariants: + iface.filterScenarioVariants ?? + (async (scenarioVariants) => scenarioVariants), + start: iface.start ?? (async () => {}), + measurement: iface.measurement ?? (async () => {}), + end: iface.end ?? (async () => {}), + error: iface.error ?? (async () => {}), + finish: iface.finish ?? (async () => {}), + }; +} + +export { + describe, + measureTime, + reportMeasurement, + PREVIOUS, +} from "./describe.js"; +export { runScenarios } from "./runner.js"; diff --git a/packages/devlow-bench/src/interfaces/compose.ts b/packages/devlow-bench/src/interfaces/compose.ts new file mode 100644 index 0000000000000..dac157c7f34d1 --- /dev/null +++ b/packages/devlow-bench/src/interfaces/compose.ts @@ -0,0 +1,34 @@ +import { Interface } from "../index.js"; + +export default function compose(...ifaces: Interface[]): Interface { + const allKeys = new Set(); + for (const iface of ifaces) { + for (const key of Object.keys(iface)) { + allKeys.add(key as keyof Interface); + } + } + const composed: any = {}; + for (const key of allKeys) { + if (key.startsWith("filter")) { + composed[key] = async (items: any, ...args: any[]) => { + for (const iface of ifaces) { + const anyIface = iface as any; + if (anyIface[key]) { + items = await anyIface[key](items, ...args); + } + } + return items; + }; + } else { + composed[key] = async (...args: any[]) => { + for (const iface of ifaces) { + const anyIface = iface as any; + if (anyIface[key]) { + await anyIface[key](...args); + } + } + }; + } + } + return composed; +} diff --git a/packages/devlow-bench/src/interfaces/console.ts b/packages/devlow-bench/src/interfaces/console.ts new file mode 100644 index 0000000000000..6e845eaed1e5e --- /dev/null +++ b/packages/devlow-bench/src/interfaces/console.ts @@ -0,0 +1,36 @@ +import { Interface, Scenario, ScenarioVariant } from "../index.js"; +import inquirer from "inquirer"; +import chalk from "chalk"; +import { formatUnit } from "../units.js"; +import { formatVariant } from "../utils.js"; + +export default function createInterface(): Interface { + const iface: Interface = { + start: async (scenario, props) => { + console.log( + chalk.bold.underline(`Running ${formatVariant(scenario, props)}...`) + ); + }, + measurement: async (scenario, props, name, value, unit, relativeTo) => { + console.log( + chalk.bgCyan.bold.magenta( + `${formatVariant(scenario, props)}: ${name} = ${formatUnit( + value, + unit + )}${relativeTo ? ` (from ${relativeTo})` : ""}` + ) + ); + }, + error: async (scenario, props, error) => { + console.log( + chalk.bold.red( + `${formatVariant(scenario, props)}: ${ + (error && (error as any).stack) || error + }` + ) + ); + }, + }; + + return iface; +} diff --git a/packages/devlow-bench/src/interfaces/datadog.ts b/packages/devlow-bench/src/interfaces/datadog.ts new file mode 100644 index 0000000000000..e18a36982c2d0 --- /dev/null +++ b/packages/devlow-bench/src/interfaces/datadog.ts @@ -0,0 +1,89 @@ +import type { + DistributionPointsSeries, + MetricMetadata, +} from "@datadog/datadog-api-client/dist/packages/datadog-api-client-v1/index.js"; +import type { Interface } from "../index.js"; +import datadogApiClient from "@datadog/datadog-api-client"; +import os from "os"; + +function toIdentifier(str: string) { + return str.replace(/\//g, ".").replace(/ /g, "_"); +} + +const UNIT_MAPPING: Record = { + ms: "millisecond", + requests: "request", + bytes: "byte", +}; + +export default function createInterface({ + apiKey = process.env.DATADOG_API_KEY, + appKey = process.env.DATADOG_APP_KEY, + host = process.env.DATADOG_HOST || os.hostname(), +}: { apiKey?: string; appKey?: string; host?: string } = {}): Interface { + if (!apiKey) + throw new Error("Datadog API key is required (set DATADOG_API_KEY)"); + const commonTags = [ + `ci:${!!process.env.CI || "false"}`, + `os:${process.platform}`, + `os_release:${os.release()}`, + `cpus:${os.cpus().length}`, + `cpu_model:${os.cpus()[0].model}`, + `user:${os.userInfo().username}`, + `arch:${os.arch()}`, + `total_memory:${Math.round(os.totalmem() / 1024 / 1024 / 1024)}`, + `node_version:${process.version}`, + ]; + const configuration = datadogApiClient.client.createConfiguration({ + authMethods: { + apiKeyAuth: apiKey, + appKeyAuth: appKey, + }, + }); + const api = new datadogApiClient.v1.MetricsApi(configuration); + const dataPoints: DistributionPointsSeries[] = []; + const metricMetadata: Record = {}; + const iface: Interface = { + measurement: async (scenario, props, name, value, unit, relativeTo) => { + const ts = Math.round(Date.now() / 1000); + const metric = toIdentifier(`devlow_bench/${scenario}/${name}`); + if (UNIT_MAPPING[unit]) { + metricMetadata[metric] = { + unit: UNIT_MAPPING[unit], + }; + } + dataPoints.push({ + metric, + type: "distribution", + host, + tags: Object.entries(props) + .filter(([, value]) => value !== null) + .map( + ([key, value]) => + `${toIdentifier(key)}:${toIdentifier(value!.toString())}` + ) + .concat(commonTags), + points: [[ts, [value]]], + }); + }, + end: async (scenario, props) => { + await api.submitDistributionPoints({ + body: { + series: dataPoints, + }, + }); + dataPoints.length = 0; + }, + finish: async () => { + if (appKey) { + for (const [metric, metadata] of Object.entries(metricMetadata)) { + await api.updateMetricMetadata({ + metricName: metric, + body: metadata, + }); + } + } + }, + }; + return iface; +} diff --git a/packages/devlow-bench/src/interfaces/interactive.ts b/packages/devlow-bench/src/interfaces/interactive.ts new file mode 100644 index 0000000000000..dc77fa195f62f --- /dev/null +++ b/packages/devlow-bench/src/interfaces/interactive.ts @@ -0,0 +1,46 @@ +import { Interface, Scenario, ScenarioVariant } from "../index.js"; +import inquirer from "inquirer"; +import chalk from "chalk"; +import { formatUnit } from "../units.js"; +import { formatVariant } from "../utils.js"; + +export default function createInterface(): Interface { + const iface: Interface = { + filterScenarios: async (scenarios) => { + if (scenarios.length === 1) { + return scenarios; + } + let answer = await inquirer.prompt({ + type: "checkbox", + name: "scenarios", + default: scenarios.slice(), + message: "Choose scenarios to run", + choices: scenarios.map((scenario) => ({ + name: scenario.name, + value: scenario, + })), + }); + return answer.scenarios; + }, + filterScenarioVariants: async (variants) => { + if (variants.length === 1) { + return variants; + } + let answer = await inquirer.prompt({ + type: "checkbox", + name: "variants", + default: variants.slice(), + message: "Choose variants to run", + choices: variants.map((variant) => { + return { + name: formatVariant(variant.scenario.name, variant.props), + value: variant, + }; + }), + }); + return answer.variants; + }, + }; + + return iface; +} diff --git a/packages/devlow-bench/src/interfaces/json.ts b/packages/devlow-bench/src/interfaces/json.ts new file mode 100644 index 0000000000000..0e3dbee99ee6e --- /dev/null +++ b/packages/devlow-bench/src/interfaces/json.ts @@ -0,0 +1,83 @@ +import { Interface, Scenario, ScenarioVariant } from "../index.js"; +import inquirer from "inquirer"; +import chalk from "chalk"; +import { formatUnit } from "../units.js"; +import { formatVariant } from "../utils.js"; +import { writeFile } from "fs/promises"; + +function filterProp( + prop: Record +): Record { + const filteredProp: Record = {}; + for (const [key, value] of Object.entries(prop)) { + if (value !== null) { + filteredProp[key] = value; + } + } + return filteredProp; +} + +export default function createInterface( + file: string = (() => { + const file = process.env.JSON_OUTPUT_FILE; + if (!file) { + throw new Error("env var JSON_OUTPUT_FILE is not set"); + } + return file; + })() +): Interface { + const metrics = new Map< + string, + { + key: Record; + value: number; + unit: string; + count: number; + relativeTo?: string; + } + >(); + const iface: Interface = { + measurement: async (scenario, props, name, value, unit, relativeTo) => { + const keyObject = { + scenario: scenario, + ...filterProp(props), + name: name, + }; + const key = JSON.stringify(keyObject); + const current = metrics.get(key); + if (current) { + current.value += value; + current.count++; + } else { + metrics.set(key, { + key: keyObject, + value, + unit: unit, + count: 1, + relativeTo, + }); + } + }, + finish: async () => { + await writeFile( + file, + JSON.stringify( + [...metrics.values()].map( + ({ key, value, unit, count, relativeTo }) => { + return { + key, + value: value / count, + unit, + text: formatUnit(value / count, unit), + datapoints: count, + relativeTo, + }; + } + ) + ) + ); + }, + }; + + return iface; +} diff --git a/packages/devlow-bench/src/runner.ts b/packages/devlow-bench/src/runner.ts new file mode 100644 index 0000000000000..62d753d4ffe8f --- /dev/null +++ b/packages/devlow-bench/src/runner.ts @@ -0,0 +1,64 @@ +import { withCurrent } from "./describe.js"; +import { Interface, Scenario, intoFullInterface } from "./index.js"; + +export async function runScenarios( + scenarios: Scenario[], + iface: Interface +): Promise { + const fullIface = intoFullInterface(iface); + if (scenarios.some((scenario) => scenario.only)) { + scenarios = scenarios.filter((scenario) => scenario.only); + } + scenarios = await fullIface.filterScenarios(scenarios); + let variants = []; + for (const scenario of scenarios) { + let props = [{}]; + for (const [key, options] of Object.entries(scenario.config)) { + const newProps = []; + for (const prop of props) { + if (prop === "scenario" || prop === "name") + throw new Error("Cannot use 'scenario' or 'name' as a property name"); + for (const value of options) { + newProps.push({ + ...prop, + [key]: value, + }); + } + } + props = newProps; + } + variants.push( + ...props.map((props) => ({ + scenario, + props, + })) + ); + } + variants = await fullIface.filterScenarioVariants(variants); + + for (const variant of variants) { + try { + const measurements = new Map(); + await withCurrent( + { + iface: fullIface, + measurements, + scenario: variant, + }, + async () => { + await fullIface.start(variant.scenario.name, variant.props); + measurements.set("start", { + value: Date.now(), + unit: "ms", + }); + await variant.scenario.fn(variant.props); + await fullIface.end(variant.scenario.name, variant.props); + } + ); + } catch (e) { + await fullIface.error(variant.scenario.name, variant.props, e); + } + } + + await fullIface.finish(); +} diff --git a/packages/devlow-bench/src/shell.ts b/packages/devlow-bench/src/shell.ts new file mode 100644 index 0000000000000..2cc204c6be191 --- /dev/null +++ b/packages/devlow-bench/src/shell.ts @@ -0,0 +1,156 @@ +import { ChildProcess, spawn } from "child_process"; +import split2 from "split2"; +import treeKill from "tree-kill"; +import pidusage from "pidusage-tree"; +import { PREVIOUS, reportMeasurement } from "./describe.js"; + +export interface Command { + ok(): Promise; + kill(): Promise; + end(): Promise; + waitForOutput(regex: RegExp): Promise; + reportMemUsage( + metricName: string, + options: { + relativeTo?: string | typeof PREVIOUS; + scenario?: string; + props?: Record; + } + ): Promise; + stdout: string; + stderr: string; + output: string; +} + +const shellOutput = !!process.env.SHELL_OUTPUT; + +class CommandImpl { + stdout: string = ""; + stderr: string = ""; + output: string = ""; + exitPromise: Promise; + waitingForOutput: (() => void)[] = []; + constructor(private process: ChildProcess) { + process.stdout?.pipe(split2()).on("data", (data) => { + const str = data.toString(); + this.stdout += str + "\n"; + this.output += str + "\n"; + if (shellOutput) { + console.log(`[STDOUT] ${str}`); + } + if (this.waitingForOutput.length !== 0) { + const waitingForOutput = this.waitingForOutput; + this.waitingForOutput = []; + for (const fn of waitingForOutput) { + fn(); + } + } + }); + process.stderr?.pipe(split2()).on("data", (data) => { + const str = data.toString(); + this.stderr += str + "\n"; + this.output += str + "\n"; + if (shellOutput) { + console.log(`[STDERR] ${str}`); + } + if (this.waitingForOutput.length !== 0) { + const waitingForOutput = this.waitingForOutput; + this.waitingForOutput = []; + for (const fn of waitingForOutput) { + fn(); + } + } + }); + this.exitPromise = new Promise((resolve, reject) => { + process.on("error", reject); + process.on("exit", resolve); + }); + } + + async ok() { + const exitCode = await this.exitPromise; + if (exitCode !== 0) { + throw new Error( + `Command exited with code ${exitCode}\n\nOutput:\n${this.output}` + ); + } + } + + async end() { + return await this.exitPromise; + } + + async kill() { + const pid = this.process.pid!; + await new Promise((resolve, reject) => + treeKill(pid, (err) => { + if (err) reject(err); + else resolve(); + }) + ); + await this.exitPromise; + } + + async waitForOutput(regex: RegExp) { + let start = this.output.length; + while (true) { + const match = this.output.slice(start).match(regex); + if (match) { + return match; + } + const waitResult = await Promise.race([ + this.exitPromise, + new Promise((resolve) => { + this.waitingForOutput.push(resolve); + }).then(() => "output"), + ]); + if (waitResult !== "output") { + throw new Error( + `Command exited with code ${waitResult}\n\nOutput:\n${this.output}` + ); + } + } + } + + async reportMemUsage( + metricName: string, + options: { + relativeTo?: string | typeof PREVIOUS; + scenario?: string; + props?: Record; + } = {} + ) { + try { + const pid = this.process.pid!; + const report = await pidusage(pid); + const memUsage = Object.values(report) + .filter((x) => x) + .map((x) => (x as any).memory) + .reduce((a, b) => a + b, 0); + reportMeasurement(metricName, memUsage, "bytes", options); + } catch (e) { + // ignore + } + } +} + +export function command( + command: string, + args: string[], + options: { + env?: Record; + cwd?: string; + } = {} +): Command { + const process = spawn(command, args, { + shell: true, + ...options, + stdio: ["ignore", "pipe", "pipe"], + }); + if (shellOutput) { + console.log( + `[SHELL] ${command} ${args.join(" ")} ${JSON.stringify(options)}` + ); + } + return new CommandImpl(process); +} diff --git a/packages/devlow-bench/src/table.ts b/packages/devlow-bench/src/table.ts new file mode 100644 index 0000000000000..2321958974699 --- /dev/null +++ b/packages/devlow-bench/src/table.ts @@ -0,0 +1,129 @@ +import { readFile } from "fs/promises"; +import minimist from "minimist"; + +(async () => { + const args = minimist(process.argv.slice(2), { + alias: { + r: "row", + c: "column", + "?": "help", + h: "help", + }, + }); + + const knownArgs = new Set(["row", "r", "column", "c", "help", "h", "?", "_"]); + if (args.help || (Object.keys(args).length === 1 && args._.length === 0)) { + console.log("Usage: devlow-table "); + console.log(" --row= Key to show as row"); + console.log(" --column= Key to show as column"); + console.log(" --= Filter values"); + console.log(" --help, -h, -? Show this help"); + } + + let data = JSON.parse(await readFile(args._[0], "utf-8")) as any[]; + + const getValue = ( + data: any, + name: string | string[], + includeKey: boolean + ): string => { + if (name === "value") { + return data.text as string; + } + if (Array.isArray(name)) { + return name + .map((n) => getValue(data, n, true)) + .filter((x) => x) + .join(" "); + } + const value = data.key[name]; + if (value === undefined) return ""; + if (value === true) return includeKey ? name : "true"; + if (value === false) return includeKey ? "" : "false"; + if (value === null) return ""; + if (includeKey) return `${name}=${value}`; + return value + ""; + }; + + for (const [key, value] of Object.entries(args)) { + if (knownArgs.has(key)) continue; + const values = (Array.isArray(value) ? value : [value]).map((v) => + v.toString() + ); + data = data.filter((item) => { + const itemValue = getValue(item, key, false); + if (itemValue === "") return false; + return values.some((v) => itemValue === v); + }); + } + + if (data.length === 0) { + console.log("No data"); + return; + } + + const row = args.row || "name"; + const column = args.column || "scenario"; + const getRow = (data: any) => getValue(data, row, false); + const getColumn = (data: any) => getValue(data, column, false); + + const allRows = new Set(data.map(getRow)); + const allColumns = new Set(data.map(getColumn)); + + const table = []; + const columnSizes = [...allColumns].map((c) => c.length); + for (const row of allRows) { + const rowData: string[] = []; + let i = 0; + for (const column of allColumns) { + let items = data + .filter((d: any) => getRow(d) === row && getColumn(d) === column) + .map((i) => i.text); + rowData.push(items.join(", ")); + columnSizes[i] = Math.max(columnSizes[i], rowData[i].length); + i++; + } + table.push(rowData); + } + + const pad = (str: string, size: number) => { + return " ".repeat(size - str.length) + str; + }; + + const firstColumnSize = Math.max(...[...allRows].map((r) => r.length)); + + // Header + { + let row = "| "; + let sepRow = "|:"; + row += " ".repeat(firstColumnSize); + sepRow += "-".repeat(firstColumnSize); + const allColumnsArray = [...allColumns]; + for (let i = 0; i < columnSizes.length; i++) { + row += " | "; + row += pad(allColumnsArray[i], columnSizes[i]); + sepRow += ":|-"; + sepRow += "-".repeat(columnSizes[i]); + } + row += " |"; + sepRow += ":|"; + console.log(row); + console.log(sepRow); + } + + // Separator + let r = 0; + for (const rowName of allRows) { + let row = "| "; + row += pad(rowName, firstColumnSize); + for (let i = 0; i < columnSizes.length; i++) { + row += " | "; + row += pad(table[r][i], columnSizes[i]); + } + row += " |"; + console.log(row); + r++; + } +})().catch((e) => { + console.error(e.stack); +}); diff --git a/packages/devlow-bench/src/types.d.ts b/packages/devlow-bench/src/types.d.ts new file mode 100644 index 0000000000000..ba169651fbe50 --- /dev/null +++ b/packages/devlow-bench/src/types.d.ts @@ -0,0 +1,2 @@ +declare module "chalk"; +declare module "pidusage-tree"; diff --git a/packages/devlow-bench/src/units.ts b/packages/devlow-bench/src/units.ts new file mode 100644 index 0000000000000..610a136c4147b --- /dev/null +++ b/packages/devlow-bench/src/units.ts @@ -0,0 +1,25 @@ +const UNITS: Record> = { + ms: { + " s": 1000, + }, + bytes: { + " GB": 1024 * 1024 * 1024, + " MB": 1024 * 1024, + " KB": 1024, + }, + requests: { + "K requests": 1000, + }, +}; + +export function formatUnit(value: number, unit: string) { + const conversion = UNITS[unit]; + if (conversion) { + for (const [name, factor] of Object.entries(conversion)) { + if (value >= factor) { + return `${(value / factor).toFixed(2)}${name}`; + } + } + } + return `${value.toFixed(2).replace(/\.00$/, "")} ${unit}`; +} diff --git a/packages/devlow-bench/src/utils.ts b/packages/devlow-bench/src/utils.ts new file mode 100644 index 0000000000000..6fe0e543b637b --- /dev/null +++ b/packages/devlow-bench/src/utils.ts @@ -0,0 +1,14 @@ +import { ScenarioVariant } from "./index.js"; + +export function formatVariant( + scenario: string, + props: Record +): string { + const keys = Object.keys(props) + .filter((key) => props[key] !== false && props[key] !== null) + .map((key) => (props[key] === true ? key : `${key}=${props[key]}`)); + if (keys.length === 0) { + return scenario; + } + return `${scenario} ${keys.join(" ")}`; +} diff --git a/packages/devlow-bench/tsconfig.json b/packages/devlow-bench/tsconfig.json new file mode 100644 index 0000000000000..925fa03cc9e98 --- /dev/null +++ b/packages/devlow-bench/tsconfig.json @@ -0,0 +1,14 @@ +{ + "compilerOptions": { + "strict": true, + "target": "ES2020", + "module": "NodeNext", + "types": ["node"], + "outDir": "dist", + "declaration": true, + "declarationDir": "dist", + }, + "include": [ + "src/**/*" + ], +} \ No newline at end of file diff --git a/packages/eslint-config-turbo/package.json b/packages/eslint-config-turbo/package.json index 5b78c8d869b0d..d136fcf8b0b67 100644 --- a/packages/eslint-config-turbo/package.json +++ b/packages/eslint-config-turbo/package.json @@ -1,6 +1,6 @@ { "name": "eslint-config-turbo", - "version": "1.10.3-canary.0", + "version": "1.10.4-canary.2", "description": "ESLint config for Turborepo", "repository": { "type": "git", diff --git a/packages/eslint-plugin-turbo/.vscode/launch.json b/packages/eslint-plugin-turbo/.vscode/launch.json new file mode 100644 index 0000000000000..e67b2ddd4e727 --- /dev/null +++ b/packages/eslint-plugin-turbo/.vscode/launch.json @@ -0,0 +1,13 @@ +{ + "version": "1.0.0", + "configurations": [ + { + "type": "node", + "request": "launch", + "name": "test", + "program": "${workspaceFolder}/node_modules/jest/bin/jest.js", + "args": ["${workspaceFolder}", "--run-in-band"], + "console": "integratedTerminal" + } + ] +} diff --git a/packages/eslint-plugin-turbo/__fixtures__/configs/single/package.json b/packages/eslint-plugin-turbo/__fixtures__/configs/single/package.json new file mode 100644 index 0000000000000..0967ef424bce6 --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/configs/single/package.json @@ -0,0 +1 @@ +{} diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/.env b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/.env new file mode 100644 index 0000000000000..6d0793308d76e --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/.env @@ -0,0 +1 @@ +ROOT_DOT_ENV=root diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/docs/turbo.json b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/docs/turbo.json index a3713efab8eec..90490c97cfa67 100644 --- a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/docs/turbo.json +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/docs/turbo.json @@ -3,6 +3,7 @@ "extends": ["//"], "pipeline": { "build": { + "dotEnv": ["missing1.env", "missing2.env"], "env": ["ENV_3"] } } diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/.env b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/.env new file mode 100644 index 0000000000000..08aeba6aeb1ab --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/.env @@ -0,0 +1 @@ +WEB_DOT_ENV=web diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/index.js b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/index.js index bfd3ab817a0de..50b0dfebf7790 100644 --- a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/index.js +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/index.js @@ -2,5 +2,14 @@ export default function web() { if (!process.env.ENV_2) { return "bar"; } + if (process.env.NX_DOT_ENV === undefined) { + return "does not exist"; + } + if (process.env.ROOT_DOT_ENV === undefined) { + return "does not exist"; + } + if (process.env.WEB_DOT_ENV === undefined) { + return "does not exist"; + } return "foo"; } diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/turbo.json b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/turbo.json index 0d1b80f43964c..2c5f4f2d3ec6a 100644 --- a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/turbo.json +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/turbo.json @@ -3,7 +3,8 @@ "extends": ["//"], "pipeline": { "build": { - "env": ["ENV_2"] + "dotEnv": [".env"], + "env": ["ENV_2", "NEXT_PUBLIC_*", "!NEXT_PUBLIC_EXCLUDE*"] } } } diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/turbo.json b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/turbo.json index cb4fb203398fa..525f731d7f4a0 100644 --- a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/turbo.json +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/turbo.json @@ -1,6 +1,7 @@ { "$schema": "https://turbo.build/schema.json", "globalEnv": ["CI"], + "globalDotEnv": [".env", "missing.env"], "pipeline": { "build": { "env": ["ENV_1"] diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace/.env b/packages/eslint-plugin-turbo/__fixtures__/workspace/.env new file mode 100644 index 0000000000000..c6e6c0a068933 --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace/.env @@ -0,0 +1,2 @@ +ROOT_DOT_ENV=1 +WEB_DOT_ENV=2 diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace/package.json b/packages/eslint-plugin-turbo/__fixtures__/workspace/package.json index a1b2929589dad..703682c2de232 100644 --- a/packages/eslint-plugin-turbo/__fixtures__/workspace/package.json +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace/package.json @@ -1,5 +1,6 @@ { - "dependencies": { + "devDependencies": { + "eslint": "latest", "eslint-plugin-turbo": "../../" } } diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace/turbo.json b/packages/eslint-plugin-turbo/__fixtures__/workspace/turbo.json index 8079eb24634da..6c6bcf357d80a 100644 --- a/packages/eslint-plugin-turbo/__fixtures__/workspace/turbo.json +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace/turbo.json @@ -1,6 +1,7 @@ { "$schema": "https://turbo.build/schema.json", "globalEnv": ["UNORDERED", "CI"], + "globalDotEnv": [".env", "missing.env"], "pipeline": { "build": { // A workspace's `build` task depends on that workspace's diff --git a/packages/eslint-plugin-turbo/__tests__/cwd.test.ts b/packages/eslint-plugin-turbo/__tests__/cwd.test.ts index 15a2695f60913..c7c5079d6c3d6 100644 --- a/packages/eslint-plugin-turbo/__tests__/cwd.test.ts +++ b/packages/eslint-plugin-turbo/__tests__/cwd.test.ts @@ -13,14 +13,55 @@ describe("eslint settings check", () => { const { root: cwd } = useFixture({ fixture: "workspace" }); execSync(`npm install`, { cwd }); - const configString = execSync(`eslint --print-config peer.js`, { + const configString = execSync(`npm exec eslint -- --print-config peer.js`, { cwd, encoding: "utf8", }); const configJson = JSON5.parse(configString); expect(configJson.settings).toEqual({ - turbo: { envVars: ["CI", "UNORDERED"] }, + turbo: { + cacheKey: { + global: { + legacyConfig: [], + env: ["CI", "UNORDERED"], + passThroughEnv: null, + dotEnv: { + filePaths: [".env", "missing.env"], + hashes: { + ".env": "9ad6c5fd4d5bbe7c00e1f2b358ac7ef2aa3521d0", + }, + }, + }, + globalTasks: { + build: { + legacyConfig: [], + env: [], + passThroughEnv: null, + dotEnv: null, + }, + test: { + legacyConfig: [], + env: [], + passThroughEnv: null, + dotEnv: null, + }, + lint: { + legacyConfig: [], + env: [], + passThroughEnv: null, + dotEnv: null, + }, + deploy: { + legacyConfig: [], + env: [], + passThroughEnv: null, + dotEnv: null, + }, + }, + workspaceTasks: {}, + }, + }, }); }); @@ -29,14 +70,58 @@ describe("eslint settings check", () => { execSync(`npm install`, { cwd: root }); const cwd = path.join(root, "child"); - const configString = execSync(`eslint --print-config child.js`, { - cwd, - encoding: "utf8", - }); + const configString = execSync( + `npm exec eslint -- --print-config child.js`, + { + cwd, + encoding: "utf8", + } + ); const configJson = JSON5.parse(configString); expect(configJson.settings).toEqual({ - turbo: { envVars: ["CI", "UNORDERED"] }, + turbo: { + cacheKey: { + global: { + legacyConfig: [], + env: ["CI", "UNORDERED"], + passThroughEnv: null, + dotEnv: { + filePaths: [".env", "missing.env"], + hashes: { + ".env": "9ad6c5fd4d5bbe7c00e1f2b358ac7ef2aa3521d0", + }, + }, + }, + globalTasks: { + build: { + legacyConfig: [], + env: [], + passThroughEnv: null, + dotEnv: null, + }, + test: { + legacyConfig: [], + env: [], + passThroughEnv: null, + dotEnv: null, + }, + lint: { + legacyConfig: [], + env: [], + passThroughEnv: null, + dotEnv: null, + }, + deploy: { + legacyConfig: [], + env: [], + passThroughEnv: null, + dotEnv: null, + }, + }, + workspaceTasks: {}, + }, + }, }); }); }); @@ -55,7 +140,10 @@ describe("eslint cache is busted", () => { const cwd = path.join(root, "child"); try { - execSync(`eslint --format=json child.js`, { cwd, encoding: "utf8" }); + execSync(`npm exec eslint -- --format=json child.js`, { + cwd, + encoding: "utf8", + }); } catch (error: any) { const outputJson = JSON5.parse(error.stdout); expect(outputJson).toMatchObject([ @@ -78,7 +166,7 @@ describe("eslint cache is busted", () => { } // test that we invalidated the eslint cache - const output = execSync(`eslint --format=json child.js`, { + const output = execSync(`npm exec eslint -- --format=json child.js`, { cwd, encoding: "utf8", }); diff --git a/packages/eslint-plugin-turbo/__tests__/lib/no-undeclared-env-vars.test.ts b/packages/eslint-plugin-turbo/__tests__/lib/no-undeclared-env-vars.test.ts index 5c753dd5356e4..8304d12fd6250 100644 --- a/packages/eslint-plugin-turbo/__tests__/lib/no-undeclared-env-vars.test.ts +++ b/packages/eslint-plugin-turbo/__tests__/lib/no-undeclared-env-vars.test.ts @@ -21,6 +21,30 @@ ruleTester.run(RULES.noUndeclaredEnvVars, rule, { "../../__fixtures__/workspace-configs/apps/web/index.js" ), }, + { + code: ` + const { ROOT_DOT_ENV, WEB_DOT_ENV } = process.env; + `, + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/workspace-configs") }, + ], + filename: path.join( + __dirname, + "../../__fixtures__/workspace-configs/apps/web/index.js" + ), + }, + { + code: ` + const { NEXT_PUBLIC_HAHAHAHA } = process.env; + `, + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/workspace-configs") }, + ], + filename: path.join( + __dirname, + "../../__fixtures__/workspace-configs/apps/web/index.js" + ), + }, { code: ` const { ENV_1 } = process.env; @@ -248,6 +272,28 @@ ruleTester.run(RULES.noUndeclaredEnvVars, rule, { }, ], }, + { + code: ` + const { NEXT_PUBLIC_HAHAHAHA, NEXT_PUBLIC_EXCLUDE, NEXT_PUBLIC_EXCLUDED } = process.env; + `, + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/workspace-configs") }, + ], + filename: path.join( + __dirname, + "../../__fixtures__/workspace-configs/apps/web/index.js" + ), + errors: [ + { + message: + "NEXT_PUBLIC_EXCLUDE is not listed as a dependency in the root turbo.json or workspace (apps/web) turbo.json", + }, + { + message: + "NEXT_PUBLIC_EXCLUDED is not listed as a dependency in the root turbo.json or workspace (apps/web) turbo.json", + }, + ], + }, { code: "let { X } = process.env;", options: [ diff --git a/packages/eslint-plugin-turbo/jest.config.js b/packages/eslint-plugin-turbo/jest.config.js index 102773e13b3de..286c1a0613822 100644 --- a/packages/eslint-plugin-turbo/jest.config.js +++ b/packages/eslint-plugin-turbo/jest.config.js @@ -9,4 +9,6 @@ module.exports = { moduleFileExtensions: ["ts", "tsx", "js", "jsx", "json", "node"], modulePathIgnorePatterns: ["/node_modules", "/dist"], preset: "ts-jest", + verbose: process.env.RUNNER_DEBUG === "1", + silent: process.env.RUNNER_DEBUG !== "1", }; diff --git a/packages/eslint-plugin-turbo/lib/configs/recommended.ts b/packages/eslint-plugin-turbo/lib/configs/recommended.ts index e24750395f908..9b34d367f286f 100644 --- a/packages/eslint-plugin-turbo/lib/configs/recommended.ts +++ b/packages/eslint-plugin-turbo/lib/configs/recommended.ts @@ -1,17 +1,12 @@ import { RULES } from "../constants"; -import getEnvVarDependencies from "../utils/getEnvVarDependencies"; +import { Project } from "../utils/calculate-inputs"; + +const project = new Project(process.cwd()); +const cacheKey = project.valid() ? project.key() : Math.random(); -// Add the environment variables into the ESLint incremental cache key. -const envVars = getEnvVarDependencies({ - cwd: process.cwd(), -}); const settings = { turbo: { - envVars: envVars - ? Object.values(envVars) - .flatMap((s) => Array.from(s)) - .sort() - : [], + cacheKey, }, }; diff --git a/packages/eslint-plugin-turbo/lib/rules/no-undeclared-env-vars.ts b/packages/eslint-plugin-turbo/lib/rules/no-undeclared-env-vars.ts index 372d21a9e1c75..1c967ab2a2146 100644 --- a/packages/eslint-plugin-turbo/lib/rules/no-undeclared-env-vars.ts +++ b/packages/eslint-plugin-turbo/lib/rules/no-undeclared-env-vars.ts @@ -2,7 +2,7 @@ import type { Rule } from "eslint"; import path from "path"; import { Node, MemberExpression } from "estree"; import { RULES } from "../constants"; -import getEnvVarDependencies from "../utils/getEnvVarDependencies"; +import { Project, getWorkspaceFromFilePath } from "../utils/calculate-inputs"; const meta: Rule.RuleMetaData = { type: "problem", @@ -61,6 +61,7 @@ function normalizeCwd( function create(context: Rule.RuleContext): Rule.RuleListener { const { options, getPhysicalFilename } = context; + const allowList: Array = options?.[0]?.allowList || []; const regexAllowList: Array = []; allowList.forEach((allowed) => { @@ -76,64 +77,55 @@ function create(context: Rule.RuleContext): Rule.RuleListener { context.getCwd ? context.getCwd() : undefined, options ); - const filePath = getPhysicalFilename(); - const allTurboVars = getEnvVarDependencies({ - cwd, - }); - // if allTurboVars is null, something went wrong reading from the turbo config - // (this is different from finding a config with no env vars present, which would - // return an empty set) - so there is no point continuing if we have nothing to check against - if (!allTurboVars) { - // return of {} bails early from a rule check + const project = new Project(cwd); + if (!project.valid()) { return {}; } - const globalTurboVars = allTurboVars["//"]; - const hasWorkspaceConfigs = Object.keys(allTurboVars).length > 1; - - // find any workspace configs that match the current file path - // find workspace config (if any) that match the current file path - const workspaceKey = Object.keys(allTurboVars).find( - (workspacePath) => filePath !== "//" && filePath.startsWith(workspacePath) + const filePath = getPhysicalFilename(); + const hasWorkspaceConfigs = project.projectWorkspaces.some( + (workspaceConfig) => !!workspaceConfig.turboConfig + ); + const workspaceConfig = getWorkspaceFromFilePath( + project.projectWorkspaces, + filePath ); - - let workspaceTurboVars: Set | null = null; - if (workspaceKey) { - workspaceTurboVars = allTurboVars[workspaceKey]; - } const checkKey = (node: Node, envKey?: string) => { - if ( - envKey && - !globalTurboVars.has(envKey) && - !regexAllowList.some((regex) => regex.test(envKey)) - ) { - // if we have a workspace config, check that too - if (workspaceTurboVars && workspaceTurboVars.has(envKey)) { - return {}; - } else { - let message = `{{ envKey }} is not listed as a dependency in ${ - hasWorkspaceConfigs ? "root turbo.json" : "turbo.json" - }`; - if (workspaceKey && workspaceTurboVars) { - if (cwd) { - // if we have a cwd, we can provide a relative path to the workspace config - message = `{{ envKey }} is not listed as a dependency in the root turbo.json or workspace (${path.relative( - cwd, - workspaceKey - )}) turbo.json`; - } else { - message = `{{ envKey }} is not listed as a dependency in the root turbo.json or workspace turbo.json`; - } - } + if (!envKey) { + return {}; + } - context.report({ - node, - message, - data: { envKey }, - }); + if (regexAllowList.some((regex) => regex.test(envKey))) { + return {}; + } + + let configured = project.test(workspaceConfig?.workspaceName, envKey); + + if (configured) { + return {}; + } else { + let message = `{{ envKey }} is not listed as a dependency in ${ + hasWorkspaceConfigs ? "root turbo.json" : "turbo.json" + }`; + if (workspaceConfig && workspaceConfig.turboConfig) { + if (cwd) { + // if we have a cwd, we can provide a relative path to the workspace config + message = `{{ envKey }} is not listed as a dependency in the root turbo.json or workspace (${path.relative( + cwd, + workspaceConfig.workspacePath + )}) turbo.json`; + } else { + message = `{{ envKey }} is not listed as a dependency in the root turbo.json or workspace turbo.json`; + } } + + context.report({ + node, + message, + data: { envKey }, + }); } }; diff --git a/packages/eslint-plugin-turbo/lib/utils/calculate-inputs.ts b/packages/eslint-plugin-turbo/lib/utils/calculate-inputs.ts new file mode 100644 index 0000000000000..1eef2f20de4b0 --- /dev/null +++ b/packages/eslint-plugin-turbo/lib/utils/calculate-inputs.ts @@ -0,0 +1,449 @@ +import crypto from "crypto"; +import fs from "fs"; +import path from "path"; +import { wildcardTests } from "../utils/wildcard-processing"; +import { dotEnv } from "../utils/dotenv-processing"; + +import { WorkspaceConfig, getWorkspaceConfigs } from "@turbo/utils"; +import { Pipeline } from "@turbo/types"; +import { RootSchema } from "@turbo/types/src/types/config"; + +type EnvironmentConfig = { + legacyConfig: string[]; + env: string[]; + passThroughEnv: string[] | null; + dotEnv: DotEnvConfig | null; +}; + +type EnvVar = string; +type EnvTest = (variable: EnvVar) => boolean; +type EnvironmentTest = { + legacyConfig: EnvTest; + env: EnvTest; + passThroughEnv: EnvTest; + dotEnv: EnvTest; +}; + +type DotEnvConfig = { + filePaths: string[]; + hashes: { + [path: string]: string | null; + }; +}; + +type ProjectKey = { + global: EnvironmentConfig; + globalTasks: { + [script: string]: EnvironmentConfig; + }; + workspaceTasks: { + [workspace: string]: { + [script: string]: EnvironmentConfig; + }; + }; +}; + +type ProjectTests = { + global: EnvironmentTest; + globalTasks: { + [script: string]: EnvironmentTest; + }; + workspaceTasks: { + [workspace: string]: { + [script: string]: EnvironmentTest; + }; + }; +}; + +// Process inputs for `EnvironmentConfig`s + +function processLegacyConfig(legacyConfig: string[] | undefined): string[] { + if (!legacyConfig) { + return []; + } + + const processed = legacyConfig + // filter for env vars + .filter((dep) => dep.startsWith("$")) + // remove leading $ + .map((variable) => variable.slice(1)); + + // After processing length is 0, 1, or more than 1. + switch (processed.length) { + case 0: + return []; + case 1: + return processed; + default: + return [...new Set(processed)].sort(); + } +} + +function processEnv(env: string[] | undefined): string[] { + if (!env) { + return []; + } + + switch (env.length) { + case 0: + return []; + case 1: + return [env[0]]; + default: + return [...new Set(env)].sort(); + } +} + +function processPassThroughEnv( + passThroughEnv: string[] | null | undefined +): string[] | null { + if (!passThroughEnv) { + return null; + } + + switch (passThroughEnv.length) { + case 0: + return []; + case 1: + return [passThroughEnv[0]]; + default: + return [...new Set(passThroughEnv)].sort(); + } +} + +function processDotEnv( + workspacePath: string, + filePaths: string[] | null | undefined +): DotEnvConfig | null { + if (!filePaths) { + return null; + } + + const hashEntries: [string, string][] = []; + filePaths.reduce((accumulator, filePath) => { + const hash = crypto.createHash("sha1"); + try { + const fileContents = fs.readFileSync(path.join(workspacePath, filePath)); + hash.update(fileContents); + accumulator.push([filePath, hash.digest("hex")]); + } catch (_) {} + + return accumulator; + }, hashEntries); + + return { + filePaths, + hashes: Object.fromEntries(hashEntries), + }; +} + +// Generate `EnvironmentConfig`s + +function processGlobal( + workspacePath: string, + rootTurboJson: RootSchema +): EnvironmentConfig { + return { + legacyConfig: processLegacyConfig(rootTurboJson.globalDependencies), + env: processEnv(rootTurboJson.globalEnv), + passThroughEnv: processPassThroughEnv(rootTurboJson.globalPassThroughEnv), + dotEnv: processDotEnv(workspacePath, rootTurboJson.globalDotEnv), + }; +} + +function processTask(workspacePath: string, task: Pipeline): EnvironmentConfig { + return { + legacyConfig: processLegacyConfig(task.dependsOn), + env: processEnv(task.env), + passThroughEnv: processPassThroughEnv(task.passThroughEnv), + dotEnv: processDotEnv(workspacePath, task.dotEnv), + }; +} + +const TEST_FALSE = (_: string): boolean => false; +function generateEnvironmentTest( + config: EnvironmentConfig, + workspacePath: string | undefined +): EnvironmentTest { + const output: EnvironmentTest = { + legacyConfig: TEST_FALSE, + env: TEST_FALSE, + passThroughEnv: TEST_FALSE, + dotEnv: TEST_FALSE, + }; + + if (config.legacyConfig.length > 0) { + const dependsOnEnvSet = new Set(config.legacyConfig); + output.legacyConfig = (variable: EnvVar) => dependsOnEnvSet.has(variable); + } + + if (config.env.length > 0) { + const testables = wildcardTests(config.env); + output.env = (variable: EnvVar) => { + return ( + testables.inclusions.test(variable) && + !testables.exclusions.test(variable) + ); + }; + } + + // c. Check the passThroughEnv configuration. + if (config.passThroughEnv && config.passThroughEnv.length > 0) { + const testables = wildcardTests(config.passThroughEnv); + output.passThroughEnv = (variable: EnvVar) => { + return ( + testables.inclusions.test(variable) && + !testables.exclusions.test(variable) + ); + }; + } + + // d. Check to see if the variable is accounted for by dotEnv. + if (config.dotEnv && config.dotEnv.filePaths.length > 0) { + const dotEnvEnvSet = dotEnv(workspacePath, config.dotEnv); + output.dotEnv = (variable: EnvVar) => dotEnvEnvSet.has(variable); + } + + return output; +} + +function environmentTestArray(envContext: EnvironmentTest) { + return [ + envContext.legacyConfig, + envContext.env, + envContext.passThroughEnv, + envContext.dotEnv, + ]; +} + +// Identify where to store `EnvironmentConfig`s + +function getTaskAddress(taskName: string): { + workspaceName: string | null; + scriptName: string; +} { + // Somehow empty. Error. + if (taskName.length === 0) { + throw new Error("Invalid task name found in turbo.json."); + } + + const firstIndexOf = taskName.indexOf("#"); + + // Something like "build" + if (firstIndexOf === -1) { + return { + workspaceName: null, + scriptName: taskName, + }; + } + + // Something like "what#are#you#doing" + if (firstIndexOf !== taskName.lastIndexOf("#")) { + throw new Error("Invalid task name found in turbo.json."); + } + + const [workspaceName, scriptName] = taskName.split("#"); + + return { + workspaceName, + scriptName, + }; +} + +export function getWorkspaceFromFilePath( + projectWorkspaces: WorkspaceConfig[], + filePath: string +): WorkspaceConfig | null { + let possibleWorkspaces = projectWorkspaces + .filter((projectWorkspace) => + filePath.startsWith(projectWorkspace.workspacePath) + ) + .sort((a, b) => { + if (a.workspacePath > b.workspacePath) { + return -1; + } else if (a === b) { + return 0; + } else { + return 1; + } + }); + + if (possibleWorkspaces.length > 0) { + return possibleWorkspaces[0]; + } + + return null; +} + +// Driver + +export class Project { + _key: ProjectKey; + _test: ProjectTests; + + cwd: string | undefined; + allConfigs: WorkspaceConfig[]; + projectRoot: WorkspaceConfig | undefined; + projectWorkspaces: WorkspaceConfig[]; + + constructor(cwd: string | undefined) { + this.cwd = cwd; + this.allConfigs = getWorkspaceConfigs(cwd); + this.projectRoot = this.allConfigs.find( + (workspaceConfig) => workspaceConfig.isWorkspaceRoot + ); + this.projectWorkspaces = this.allConfigs.filter( + (workspaceConfig) => !workspaceConfig.isWorkspaceRoot + ); + + this._key = this.generateKey(); + this._test = this.generateTestConfig(); + } + + valid(): boolean { + return this.allConfigs.length > 0; + } + + generateKey(): ProjectKey { + let global: EnvironmentConfig = { + legacyConfig: [], + env: [], + passThroughEnv: null, + dotEnv: null, + }; + let globalTasks: { + [script: string]: EnvironmentConfig; + } = {}; + let workspaceTasks: { + [workspace: string]: { + [script: string]: EnvironmentConfig; + }; + } = {}; + + if ( + this.projectRoot && + this.projectRoot.turboConfig && + !("extends" in this.projectRoot) + ) { + const rootTurboJson = this.projectRoot; + + global = processGlobal( + this.projectRoot.workspacePath, + this.projectRoot.turboConfig + ); + + Object.entries(this.projectRoot.turboConfig.pipeline).forEach( + ([taskName, taskDefinition]) => { + const { workspaceName, scriptName } = getTaskAddress(taskName); + if (workspaceName) { + workspaceTasks[workspaceName] = workspaceTasks[workspaceName] || {}; + workspaceTasks[workspaceName][scriptName] = processTask( + rootTurboJson.workspacePath, + taskDefinition + ); + } else { + globalTasks[scriptName] = processTask( + rootTurboJson.workspacePath, + taskDefinition + ); + } + } + ); + } + + this.projectWorkspaces.forEach((projectWorkspace) => { + if (!projectWorkspace.turboConfig) { + return; + } + + Object.entries(projectWorkspace.turboConfig.pipeline).forEach( + ([taskName, taskDefinition]) => { + const { workspaceName: erroneousWorkspaceName, scriptName } = + getTaskAddress(taskName); + if (erroneousWorkspaceName) { + throw new Error( + "May not specify workspace name in non-root turbo.json" + ); + } + + const workspaceName = projectWorkspace.workspaceName; + workspaceTasks[workspaceName] = workspaceTasks[workspaceName] || {}; + workspaceTasks[workspaceName][scriptName] = processTask( + projectWorkspace.workspacePath, + taskDefinition + ); + } + ); + }); + + return { + global, + globalTasks, + workspaceTasks, + }; + } + + getWorkspacePath(workspaceName: string): string | undefined { + return this.projectWorkspaces.find( + (workspaceConfig) => workspaceConfig.workspaceName === workspaceName + )?.workspacePath; + } + + generateTestConfig(): ProjectTests { + return { + global: generateEnvironmentTest( + this._key.global, + this.projectRoot?.workspacePath + ), + globalTasks: Object.fromEntries( + Object.entries(this._key.globalTasks).map(([script, config]) => { + return [ + script, + generateEnvironmentTest(config, this.projectRoot?.workspacePath), + ]; + }) + ), + workspaceTasks: Object.fromEntries( + Object.entries(this._key.workspaceTasks).map( + ([workspace, taskConfigs]) => { + const workspacePath = this.getWorkspacePath(workspace); + return [ + workspace, + Object.fromEntries( + Object.entries(taskConfigs).map(([script, config]) => { + return [ + script, + generateEnvironmentTest(config, workspacePath), + ]; + }) + ), + ]; + } + ) + ), + }; + } + + key() { + return this._key; + } + + test(workspaceName: string | undefined, envVar: string) { + const tests = [ + environmentTestArray(this._test.global), + ...Object.values(this._test.globalTasks).map((context) => + environmentTestArray(context) + ), + ]; + + if (workspaceName) { + tests.push( + ...Object.values(this._test.workspaceTasks[workspaceName]).map( + (context) => environmentTestArray(context) + ) + ); + } + + return tests.flat().some((test) => test(envVar)); + } +} diff --git a/packages/eslint-plugin-turbo/lib/utils/dotenv-processing.ts b/packages/eslint-plugin-turbo/lib/utils/dotenv-processing.ts new file mode 100644 index 0000000000000..8c6bee8037ea1 --- /dev/null +++ b/packages/eslint-plugin-turbo/lib/utils/dotenv-processing.ts @@ -0,0 +1,34 @@ +import { parse } from "dotenv"; +import fs from "fs"; +import path from "path"; + +type DotEnvConfig = { + filePaths: string[]; + hashes: { + [path: string]: string | null; + }; +}; + +export function dotEnv( + workspacePath: string | undefined, + config: DotEnvConfig +): Set { + if (!workspacePath) { + return new Set(); + } + + let outputSet = new Set(); + config.filePaths.forEach((filePath) => { + try { + var dotEnvFileContents = fs.readFileSync( + path.join(workspacePath, filePath), + "utf8" + ); + Object.keys(parse(dotEnvFileContents)).forEach((envVarName) => + outputSet.add(envVarName) + ); + } catch (e) {} + }); + + return outputSet; +} diff --git a/packages/eslint-plugin-turbo/lib/utils/getEnvVarDependencies.ts b/packages/eslint-plugin-turbo/lib/utils/getEnvVarDependencies.ts deleted file mode 100644 index a57e5eb682a40..0000000000000 --- a/packages/eslint-plugin-turbo/lib/utils/getEnvVarDependencies.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { getTurboConfigs } from "@turbo/utils"; - -function findDependsOnEnvVars({ - dependencies, -}: { - dependencies?: Array; -}) { - if (dependencies) { - return ( - dependencies - // filter for dep env vars - .filter((dep) => dep.startsWith("$")) - // remove leading $ - .map((envVar) => envVar.slice(1, envVar.length)) - ); - } - - return []; -} - -function getEnvVarDependencies({ - cwd, -}: { - cwd: string | undefined; -}): Record> | null { - const turboConfigs = getTurboConfigs(cwd); - - if (!turboConfigs.length) { - return null; - } - - const envVars: Record> = { - "//": new Set(), - }; - - turboConfigs.forEach((turboConfig) => { - const { config, workspacePath, isRootConfig } = turboConfig; - - const key = isRootConfig ? "//" : workspacePath; - if (!envVars[key]) { - envVars[key] = new Set(); - } - - // handle globals - if (!("extends" in config)) { - const { globalDependencies = [], globalEnv = [] } = config; - - const keys = [ - ...findDependsOnEnvVars({ - dependencies: globalDependencies, - }), - ...globalEnv, - ]; - keys.forEach((k) => envVars[key].add(k)); - } - - // handle pipelines - const { pipeline = {} } = config; - Object.values(pipeline).forEach(({ env, dependsOn }) => { - if (dependsOn) { - findDependsOnEnvVars({ dependencies: dependsOn }).forEach((k) => - envVars[key].add(k) - ); - } - - if (env) { - env.forEach((k) => envVars[key].add(k)); - } - }); - }); - - return envVars; -} - -export default getEnvVarDependencies; diff --git a/packages/eslint-plugin-turbo/lib/utils/wildcard-processing.ts b/packages/eslint-plugin-turbo/lib/utils/wildcard-processing.ts new file mode 100644 index 0000000000000..f9ffee5328fca --- /dev/null +++ b/packages/eslint-plugin-turbo/lib/utils/wildcard-processing.ts @@ -0,0 +1,107 @@ +import { EnvWildcard } from "@turbo/types/src/types/config"; + +const reRegExpChar = /[\\^$.*+?()[\]{}|]/g; +const reHasRegExpChar = RegExp(reRegExpChar.source); +function escapeRegExp(string: string) { + return string && reHasRegExpChar.test(string) + ? string.replace(reRegExpChar, "\\$&") + : string || ""; +} + +const wildcard = "*"; +const wildcardEscape = "\\"; +const regexWildcardSegment = ".*"; + +function wildcardToRegexPattern(pattern: string): string { + let regexString: string[] = []; + + let previousIndex: number = 0; + let previousRune: null | string = null; + + for (let i = 0; i < pattern.length; i++) { + let char = pattern[i]; + if (char === wildcard) { + if (previousRune === wildcardEscape) { + // Found a literal * + + // Replace the trailing "\*" with just "*" before adding the segment. + regexString.push( + escapeRegExp(pattern.slice(previousIndex, i - 1) + "*") + ); + } else { + // Found a wildcard + + // Add in the static segment since the last wildcard. Can be zero length. + regexString.push(escapeRegExp(pattern.slice(previousIndex, i))); + + // Add a dynamic segment if it isn't adjacent to another dynamic segment. + if (regexString[regexString.length - 1] !== regexWildcardSegment) { + regexString.push(regexWildcardSegment); + } + } + + // Advance the pointer. + previousIndex = i + 1; + } + previousRune = char; + } + + // Add the last static segment. Can be zero length. + regexString.push(escapeRegExp(pattern.slice(previousIndex))); + + return regexString.join(""); +} + +interface Testable { + test(input: string): boolean; +} + +const NO_PATTERNS = { + test(_: string): boolean { + return false; + }, +}; + +export type WildcardTests = { + inclusions: Testable; + exclusions: Testable; +}; + +// wildcardTests returns a WildcardSet after processing wildcards against it. +export function wildcardTests(wildcardPatterns: EnvWildcard[]): WildcardTests { + let includePatterns: string[] = []; + let excludePatterns: string[] = []; + + wildcardPatterns.forEach((wildcardPattern) => { + let isExclude = wildcardPattern[0] === "!"; + let isLiteralLeadingExclamation = wildcardPattern.indexOf("\\!") === 0; + + if (isExclude) { + let excludePattern = wildcardToRegexPattern(wildcardPattern.slice(1)); + excludePatterns.push(excludePattern); + } else if (isLiteralLeadingExclamation) { + let includePattern = wildcardToRegexPattern(wildcardPattern.slice(1)); + includePatterns.push(includePattern); + } else { + let includePattern = wildcardToRegexPattern(wildcardPattern); + includePatterns.push(includePattern); + } + }); + + // Set some defaults. + let inclusions = NO_PATTERNS; + let exclusions = NO_PATTERNS; + + // Override if they're not empty. + if (includePatterns.length > 0) { + inclusions = new RegExp("^(" + includePatterns.join("|") + ")$"); + } + if (excludePatterns.length > 0) { + exclusions = new RegExp("^(" + excludePatterns.join("|") + ")$"); + } + + return { + inclusions, + exclusions, + }; +} diff --git a/packages/eslint-plugin-turbo/package.json b/packages/eslint-plugin-turbo/package.json index f892ae1f3cfff..888c782082eb4 100644 --- a/packages/eslint-plugin-turbo/package.json +++ b/packages/eslint-plugin-turbo/package.json @@ -1,6 +1,6 @@ { "name": "eslint-plugin-turbo", - "version": "1.10.3-canary.0", + "version": "1.10.4-canary.2", "description": "ESLint plugin for Turborepo", "keywords": [ "turbo", @@ -30,6 +30,9 @@ "lint": "eslint lib/**/*.ts", "lint:prettier": "prettier -c . --cache --ignore-path=../../.prettierignore" }, + "dependencies": { + "dotenv": "16.0.3" + }, "devDependencies": { "@types/eslint": "^8.4.5", "@types/estree": "^1.0.0", diff --git a/packages/turbo-codemod/jest.config.js b/packages/turbo-codemod/jest.config.js index 2c7542a8972ee..b7df7bc86b988 100644 --- a/packages/turbo-codemod/jest.config.js +++ b/packages/turbo-codemod/jest.config.js @@ -15,4 +15,6 @@ module.exports = { statements: 89, }, }, + verbose: process.env.RUNNER_DEBUG === "1", + silent: process.env.RUNNER_DEBUG !== "1", }; diff --git a/packages/turbo-codemod/package.json b/packages/turbo-codemod/package.json index 5eb8ef56a643d..898efe373c227 100644 --- a/packages/turbo-codemod/package.json +++ b/packages/turbo-codemod/package.json @@ -1,6 +1,6 @@ { "name": "@turbo/codemod", - "version": "1.10.3-canary.0", + "version": "1.10.4-canary.2", "description": "Provides Codemod transformations to help upgrade your Turborepo codebase when a feature is deprecated.", "homepage": "https://turbo.build/repo", "license": "MPL-2.0", diff --git a/packages/turbo-gen/jest.config.js b/packages/turbo-gen/jest.config.js index b738f4b2bd92b..a273992d5cd70 100644 --- a/packages/turbo-gen/jest.config.js +++ b/packages/turbo-gen/jest.config.js @@ -7,5 +7,6 @@ module.exports = { transformIgnorePatterns: ["/node_modules/(?!(ansi-regex)/)"], modulePathIgnorePatterns: ["/node_modules", "/dist"], collectCoverage: true, - verbose: true, + verbose: process.env.RUNNER_DEBUG === "1", + silent: process.env.RUNNER_DEBUG !== "1", }; diff --git a/packages/turbo-gen/package.json b/packages/turbo-gen/package.json index 76c55b80268c4..452152c2b99b5 100644 --- a/packages/turbo-gen/package.json +++ b/packages/turbo-gen/package.json @@ -1,6 +1,6 @@ { "name": "@turbo/gen", - "version": "1.10.3-canary.0", + "version": "1.10.4-canary.2", "description": "Extend a Turborepo", "homepage": "https://turbo.build/repo", "license": "MPL-2.0", diff --git a/packages/turbo-gen/src/utils/error.ts b/packages/turbo-gen/src/utils/error.ts index 13dc78bab8020..7d310cecc4123 100644 --- a/packages/turbo-gen/src/utils/error.ts +++ b/packages/turbo-gen/src/utils/error.ts @@ -3,6 +3,7 @@ export type GenerateErrorType = | "plop_error_running_generator" | "plop_unable_to_load_config" | "plop_generator_not_found" + | "plop_no_config" | "config_directory_already_exists" // default | "unknown"; diff --git a/packages/turbo-gen/src/utils/plop.ts b/packages/turbo-gen/src/utils/plop.ts index 8317e6af1b516..3f48ecc50bdc6 100644 --- a/packages/turbo-gen/src/utils/plop.ts +++ b/packages/turbo-gen/src/utils/plop.ts @@ -26,11 +26,6 @@ export type Generator = PlopGenerator & { name: string; }; -// init ts-node for plop to support ts configs -register({ - transpileOnly: true, -}); - export function getPlop({ project, configPath, @@ -38,23 +33,43 @@ export function getPlop({ project: Project; configPath?: string; }): NodePlopAPI | undefined { + // init ts-node for plop to support ts configs + register({ + transpileOnly: true, + cwd: project.paths.root, + compilerOptions: { + module: "nodenext", + moduleResolution: "nodenext", + }, + }); + // fetch all the workspace generator configs const workspaceConfigs = getWorkspaceGeneratorConfigs({ project }); let plop: NodePlopAPI | undefined = undefined; if (configPath) { + if (!fs.existsSync(configPath)) { + throw new GeneratorError(`No config at "${configPath}"`, { + type: "plop_no_config", + }); + } + try { plop = nodePlop(configPath, { destBasePath: configPath, force: false, }); } catch (e) { - // skip + console.error(e); } } else { // look for a root config for (const configPath of SUPPORTED_ROOT_GENERATOR_CONFIGS) { const plopFile = path.join(project.paths.root, configPath); + if (!fs.existsSync(plopFile)) { + continue; + } + try { plop = nodePlop(plopFile, { destBasePath: project.paths.root, @@ -62,7 +77,7 @@ export function getPlop({ }); break; } catch (e) { - // skip + console.error(e); } } diff --git a/packages/turbo-ignore/jest.config.js b/packages/turbo-ignore/jest.config.js index 52ddbbc6c8663..e043ffb2da6a1 100644 --- a/packages/turbo-ignore/jest.config.js +++ b/packages/turbo-ignore/jest.config.js @@ -15,4 +15,6 @@ module.exports = { }, modulePathIgnorePatterns: ["/node_modules", "/dist"], transformIgnorePatterns: ["/node_modules/(?!(ansi-regex)/)"], + verbose: process.env.RUNNER_DEBUG === "1", + silent: process.env.RUNNER_DEBUG !== "1", }; diff --git a/packages/turbo-ignore/package.json b/packages/turbo-ignore/package.json index 9d09272f78dbe..17bfc1681e2c0 100644 --- a/packages/turbo-ignore/package.json +++ b/packages/turbo-ignore/package.json @@ -1,6 +1,6 @@ { "name": "turbo-ignore", - "version": "1.10.3-canary.0", + "version": "1.10.4-canary.2", "description": "", "homepage": "https://turbo.build/repo", "keywords": [], diff --git a/packages/turbo-types/package.json b/packages/turbo-types/package.json index 255d45a67f3d6..3298762b3df28 100644 --- a/packages/turbo-types/package.json +++ b/packages/turbo-types/package.json @@ -1,6 +1,6 @@ { "name": "@turbo/types", - "version": "1.10.3-canary.0", + "version": "1.10.4-canary.2", "description": "Turborepo types", "homepage": "https://turbo.build/repo", "license": "MPL-2.0", diff --git a/packages/turbo-utils/jest.config.js b/packages/turbo-utils/jest.config.js index bbe7825d9e394..f109e6b44fa1f 100644 --- a/packages/turbo-utils/jest.config.js +++ b/packages/turbo-utils/jest.config.js @@ -4,4 +4,6 @@ module.exports = { testEnvironment: "node", modulePathIgnorePatterns: ["/node_modules", "/dist"], transformIgnorePatterns: ["/node_modules/(?!(ansi-regex)/)"], + verbose: process.env.RUNNER_DEBUG === "1", + silent: process.env.RUNNER_DEBUG !== "1", }; diff --git a/packages/turbo-utils/src/getTurboConfigs.ts b/packages/turbo-utils/src/getTurboConfigs.ts index cb1fd70588b95..854a0a2075134 100644 --- a/packages/turbo-utils/src/getTurboConfigs.ts +++ b/packages/turbo-utils/src/getTurboConfigs.ts @@ -7,13 +7,23 @@ import { Schema } from "@turbo/types"; import JSON5 from "json5"; const ROOT_GLOB = "turbo.json"; +const ROOT_WORKSPACE_GLOB = "package.json"; -export type TurboConfigs = Array<{ +export type WorkspaceConfig = { + workspaceName: string; + workspacePath: string; + isWorkspaceRoot: boolean; + turboConfig?: Schema; +}; + +export type TurboConfig = { config: Schema; turboConfigPath: string; workspacePath: string; isRootConfig: boolean; -}>; +}; + +export type TurboConfigs = Array; interface PackageJson { turbo?: Schema; @@ -24,7 +34,8 @@ interface Options { cache?: boolean; } -const configsCache: Record = {}; +const turboConfigsCache: Record = {}; +const workspaceConfigCache: Record = {}; // A quick and dirty workspace parser // TODO: after @turbo/workspace-convert is merged, we can leverage those utils here @@ -59,8 +70,8 @@ export function getTurboConfigs(cwd?: string, opts?: Options): TurboConfigs { const configs: TurboConfigs = []; const cacheEnabled = opts?.cache ?? true; - if (cacheEnabled && cwd && configsCache[cwd]) { - return configsCache[cwd]; + if (cacheEnabled && cwd && turboConfigsCache[cwd]) { + return turboConfigsCache[cwd]; } // parse workspaces @@ -109,7 +120,89 @@ export function getTurboConfigs(cwd?: string, opts?: Options): TurboConfigs { } if (cacheEnabled && cwd) { - configsCache[cwd] = configs; + turboConfigsCache[cwd] = configs; + } + + return configs; +} + +export function getWorkspaceConfigs( + cwd?: string, + opts?: Options +): WorkspaceConfig[] { + const turboRoot = getTurboRoot(cwd, opts); + const configs: WorkspaceConfig[] = []; + + const cacheEnabled = opts?.cache ?? true; + if (cacheEnabled && cwd && workspaceConfigCache[cwd]) { + return workspaceConfigCache[cwd]; + } + + // parse workspaces + if (turboRoot) { + const workspaceGlobs = getWorkspaceGlobs(turboRoot); + const workspaceConfigGlobs = workspaceGlobs.map( + (glob) => `${glob}/package.json` + ); + + const configPaths = sync([ROOT_WORKSPACE_GLOB, ...workspaceConfigGlobs], { + cwd: turboRoot, + onlyFiles: true, + followSymbolicLinks: false, + // avoid throwing when encountering permission errors or unreadable paths + suppressErrors: true, + }).map((configPath) => path.join(turboRoot, configPath)); + + configPaths.forEach((configPath) => { + try { + const rawPackageJson = fs.readFileSync(configPath, "utf8"); + const packageJsonContent = JSON.parse(rawPackageJson); + + const workspaceName = packageJsonContent.name; + const workspacePath = path.dirname(configPath); + const isWorkspaceRoot = workspacePath === turboRoot; + + // Try and get turbo.json + const turboJsonPath = path.join(workspacePath, "turbo.json"); + let rawTurboJson = null; + let turboConfig: Schema | undefined; + try { + rawTurboJson = fs.readFileSync(turboJsonPath, "utf8"); + turboConfig = JSON5.parse(rawTurboJson); + + if (turboConfig) { + // basic config validation + if (isWorkspaceRoot) { + // invalid - root config with extends + if ("extends" in turboConfig) { + return; + } + } else { + // invalid - workspace config with no extends + if (!("extends" in turboConfig)) { + return; + } + } + } + } catch (e) { + // It is fine for there to not be a turbo.json. + } + + configs.push({ + workspaceName, + workspacePath, + isWorkspaceRoot, + turboConfig, + }); + } catch (e) { + // if we can't read or parse the config, just ignore it with a warning + console.warn(e); + } + }); + } + + if (cacheEnabled && cwd) { + workspaceConfigCache[cwd] = configs; } return configs; diff --git a/packages/turbo-utils/src/index.ts b/packages/turbo-utils/src/index.ts index 0f401ac2b90e8..7ff098e575e01 100644 --- a/packages/turbo-utils/src/index.ts +++ b/packages/turbo-utils/src/index.ts @@ -1,6 +1,6 @@ // utils export { getTurboRoot } from "./getTurboRoot"; -export { getTurboConfigs } from "./getTurboConfigs"; +export { getTurboConfigs, getWorkspaceConfigs } from "./getTurboConfigs"; export { searchUp } from "./searchUp"; export { getAvailablePackageManagers } from "./managers"; export { isFolderEmpty } from "./isFolderEmpty"; @@ -22,4 +22,8 @@ export * as logger from "./logger"; // types export type { PackageManagerAvailable } from "./managers"; export type { RepoInfo } from "./examples"; -export type { TurboConfigs } from "./getTurboConfigs"; +export type { + TurboConfig, + TurboConfigs, + WorkspaceConfig, +} from "./getTurboConfigs"; diff --git a/packages/turbo-workspaces/jest.config.js b/packages/turbo-workspaces/jest.config.js index 2658950771455..b813f6c959ffa 100644 --- a/packages/turbo-workspaces/jest.config.js +++ b/packages/turbo-workspaces/jest.config.js @@ -7,7 +7,6 @@ module.exports = { transformIgnorePatterns: ["/node_modules/(?!(ansi-regex)/)"], modulePathIgnorePatterns: ["/node_modules", "/dist"], collectCoverage: true, - verbose: true, coverageThreshold: { global: { branches: 83, @@ -16,4 +15,6 @@ module.exports = { statements: 93, }, }, + verbose: process.env.RUNNER_DEBUG === "1", + silent: process.env.RUNNER_DEBUG !== "1", }; diff --git a/packages/turbo-workspaces/package.json b/packages/turbo-workspaces/package.json index 27928a34d1e76..efbbf25ed1e09 100644 --- a/packages/turbo-workspaces/package.json +++ b/packages/turbo-workspaces/package.json @@ -1,6 +1,6 @@ { "name": "@turbo/workspaces", - "version": "1.10.3-canary.0", + "version": "1.10.4-canary.2", "description": "Tools for working with package managers", "homepage": "https://turbo.build/repo", "license": "MPL-2.0", diff --git a/packages/turbo/package.json b/packages/turbo/package.json index 36a80a07e90b9..c52b0a0741f58 100644 --- a/packages/turbo/package.json +++ b/packages/turbo/package.json @@ -1,6 +1,6 @@ { "name": "turbo", - "version": "1.10.3-canary.0", + "version": "1.10.4-canary.2", "description": "Turborepo is a high-performance build system for JavaScript and TypeScript codebases.", "repository": "https://github.com/vercel/turbo", "bugs": "https://github.com/vercel/turbo/issues", @@ -19,11 +19,11 @@ "install.js" ], "optionalDependencies": { - "turbo-darwin-64": "1.10.3-canary.0", - "turbo-darwin-arm64": "1.10.3-canary.0", - "turbo-linux-64": "1.10.3-canary.0", - "turbo-linux-arm64": "1.10.3-canary.0", - "turbo-windows-64": "1.10.3-canary.0", - "turbo-windows-arm64": "1.10.3-canary.0" + "turbo-darwin-64": "1.10.4-canary.2", + "turbo-darwin-arm64": "1.10.4-canary.2", + "turbo-linux-64": "1.10.4-canary.2", + "turbo-linux-arm64": "1.10.4-canary.2", + "turbo-windows-64": "1.10.4-canary.2", + "turbo-windows-arm64": "1.10.4-canary.2" } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 861abbcc951c1..958430bfbb982 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -220,6 +220,109 @@ importers: examples: specifiers: {} + examples-tests/helpers: + specifiers: {} + + examples-tests/npm-non-monorepo: + specifiers: + '@turborepo-examples-tests/helpers': workspace:* + turborepo-examples: workspace:* + turborepo-tests-helpers: workspace:* + dependencies: + '@turborepo-examples-tests/helpers': link:../helpers + turborepo-examples: link:../../examples + turborepo-tests-helpers: link:../../turborepo-tests/helpers + + examples-tests/npm-with-npm: + specifiers: + '@turborepo-examples-tests/helpers': workspace:* + turborepo-examples: workspace:* + turborepo-tests-helpers: workspace:* + dependencies: + '@turborepo-examples-tests/helpers': link:../helpers + turborepo-examples: link:../../examples + turborepo-tests-helpers: link:../../turborepo-tests/helpers + + examples-tests/npm-with-yarn: + specifiers: + '@turborepo-examples-tests/helpers': workspace:* + turborepo-examples: workspace:* + turborepo-tests-helpers: workspace:* + dependencies: + '@turborepo-examples-tests/helpers': link:../helpers + turborepo-examples: link:../../examples + turborepo-tests-helpers: link:../../turborepo-tests/helpers + + examples-tests/pnpm-basic: + specifiers: + '@turborepo-examples-tests/helpers': workspace:* + turborepo-examples: workspace:* + turborepo-tests-helpers: workspace:* + dependencies: + '@turborepo-examples-tests/helpers': link:../helpers + turborepo-examples: link:../../examples + turborepo-tests-helpers: link:../../turborepo-tests/helpers + + examples-tests/pnpm-gatsby: + specifiers: + '@turborepo-examples-tests/helpers': workspace:* + turborepo-examples: workspace:* + turborepo-tests-helpers: workspace:* + dependencies: + '@turborepo-examples-tests/helpers': link:../helpers + turborepo-examples: link:../../examples + turborepo-tests-helpers: link:../../turborepo-tests/helpers + + examples-tests/pnpm-kitchen-sink: + specifiers: + '@turborepo-examples-tests/helpers': workspace:* + turborepo-examples: workspace:* + turborepo-tests-helpers: workspace:* + dependencies: + '@turborepo-examples-tests/helpers': link:../helpers + turborepo-examples: link:../../examples + turborepo-tests-helpers: link:../../turborepo-tests/helpers + + examples-tests/pnpm-with-svelte: + specifiers: + '@turborepo-examples-tests/helpers': workspace:* + turborepo-examples: workspace:* + turborepo-tests-helpers: workspace:* + dependencies: + '@turborepo-examples-tests/helpers': link:../helpers + turborepo-examples: link:../../examples + turborepo-tests-helpers: link:../../turborepo-tests/helpers + + examples-tests/yarn-non-monorepo: + specifiers: + '@turborepo-examples-tests/helpers': workspace:* + turborepo-examples: workspace:* + turborepo-tests-helpers: workspace:* + dependencies: + '@turborepo-examples-tests/helpers': link:../helpers + turborepo-examples: link:../../examples + turborepo-tests-helpers: link:../../turborepo-tests/helpers + + examples-tests/yarn-with-npm: + specifiers: + '@turborepo-examples-tests/helpers': workspace:* + turborepo-examples: workspace:* + turborepo-tests-helpers: workspace:* + dependencies: + '@turborepo-examples-tests/helpers': link:../helpers + turborepo-examples: link:../../examples + turborepo-tests-helpers: link:../../turborepo-tests/helpers + + examples-tests/yarn-with-yarn: + specifiers: + '@turborepo-examples-tests/helpers': workspace:* + turborepo-examples: workspace:* + turborepo-tests-helpers: workspace:* + dependencies: + '@turborepo-examples-tests/helpers': link:../helpers + turborepo-examples: link:../../examples + turborepo-tests-helpers: link:../../turborepo-tests/helpers + packages/create-turbo: specifiers: '@turbo/test-utils': workspace:* @@ -269,6 +372,35 @@ importers: tsup: 6.7.0_typescript@4.7.4 typescript: 4.7.4 + packages/devlow-bench: + specifiers: + '@datadog/datadog-api-client': ^1.13.0 + '@types/inquirer': ^9.0.3 + '@types/minimist': ^1.2.2 + '@types/node': ^20.3.0 + '@types/split2': ^4.2.0 + chalk: 2.4.2 + inquirer: ^9.2.7 + minimist: ^1.2.8 + pidusage-tree: ^2.0.5 + playwright-chromium: ^1.35.0 + split2: ^4.2.0 + tree-kill: ^1.2.2 + dependencies: + '@datadog/datadog-api-client': 1.13.0 + chalk: 2.4.2 + inquirer: 9.2.7 + minimist: 1.2.8 + pidusage-tree: 2.0.5 + playwright-chromium: 1.35.0 + split2: 4.2.0 + tree-kill: 1.2.2 + devDependencies: + '@types/inquirer': 9.0.3 + '@types/minimist': 1.2.2 + '@types/node': 20.3.0 + '@types/split2': 4.2.0 + packages/eslint-config-turbo: specifiers: '@types/eslint': ^8.4.5 @@ -290,6 +422,7 @@ importers: '@types/estree': ^1.0.0 '@types/jest': ^27.4.0 '@types/node': ^16.11.12 + dotenv: 16.0.3 eslint: '>6.6.0' jest: ^27.4.3 json5: ^2.2.1 @@ -297,6 +430,7 @@ importers: tsup: ^6.2.0 typescript: ^4.7.4 dependencies: + dotenv: 16.0.3 eslint: 8.23.0 devDependencies: '@turbo/test-utils': link:../turbo-test-utils @@ -693,14 +827,6 @@ importers: uvu: 0.5.6 zstd-codec: 0.1.4 - turborepo-tests/examples: - specifiers: - turborepo-examples: workspace:* - turborepo-tests-helpers: workspace:* - dependencies: - turborepo-examples: link:../../examples - turborepo-tests-helpers: link:../helpers - turborepo-tests/helpers: specifiers: {} @@ -1118,6 +1244,24 @@ packages: dependencies: '@jridgewell/trace-mapping': 0.3.9 + /@datadog/datadog-api-client/1.13.0: + resolution: {integrity: sha512-Dnwq2zDeRonzk0/43J4dWOOnRuRODEAxuCEw8OIDxwZZubhEUKM4KjEfNG+6g9Hlpw4FWMl2nNvqifGnv4v3Iw==} + engines: {node: '>=12.0.0'} + dependencies: + '@types/buffer-from': 1.1.0 + '@types/node': 20.3.0 + '@types/pako': 1.0.4 + buffer-from: 1.1.2 + cross-fetch: 3.1.6 + es6-promise: 4.2.8 + form-data: 4.0.0 + loglevel: 1.8.1 + pako: 2.1.0 + url-parse: 1.5.10 + transitivePeerDependencies: + - encoding + dev: false + /@emotion/is-prop-valid/0.8.8: resolution: {integrity: sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==} requiresBuild: true @@ -2696,6 +2840,12 @@ packages: '@babel/types': 7.21.2 dev: true + /@types/buffer-from/1.1.0: + resolution: {integrity: sha512-BLFpLBcN+RPKUsFxqRkMiwqTOOdi+TrKr5OpLJ9qCnUdSxS6S80+QRX/mIhfR66u0Ykc4QTkReaejOM2ILh+9Q==} + dependencies: + '@types/node': 20.3.0 + dev: false + /@types/cacheable-request/6.0.3: resolution: {integrity: sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw==} dependencies: @@ -2804,6 +2954,13 @@ packages: '@types/through': 0.0.30 dev: true + /@types/inquirer/9.0.3: + resolution: {integrity: sha512-CzNkWqQftcmk2jaCWdBTf9Sm7xSw4rkI1zpU/Udw3HX5//adEZUIm9STtoRP1qgWj0CWQtJ9UTvqmO2NNjhMJw==} + dependencies: + '@types/through': 0.0.30 + rxjs: 7.8.1 + dev: true + /@types/istanbul-lib-coverage/2.0.4: resolution: {integrity: sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==} dev: true @@ -2878,6 +3035,10 @@ packages: /@types/minimatch/5.1.1: resolution: {integrity: sha512-v55NF6Dz0wrj14Rn8iEABTWrhYRmgkJYuokduunSiq++t3hZ9VZ6dvcDt+850Pm5sGJZk8RaHzkFCXPxVINZ+g==} + /@types/minimist/1.2.2: + resolution: {integrity: sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==} + dev: true + /@types/ms/0.7.31: resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==} dev: false @@ -2900,6 +3061,13 @@ packages: /@types/node/18.13.0: resolution: {integrity: sha512-gC3TazRzGoOnoKAhUx+Q0t8S9Tzs74z7m0ipwGpSqQrleP14hKxP4/JUeEQcD3W1/aIpnWl8pHowI7WokuZpXg==} + /@types/node/20.3.0: + resolution: {integrity: sha512-cumHmIAf6On83X7yP+LrsEyUOf/YlociZelmpRYaGFydoaPdxdt80MAbu6vWerQT2COCp2nPvHdsbD7tHn/YlQ==} + + /@types/pako/1.0.4: + resolution: {integrity: sha512-Z+5bJSm28EXBSUJEgx29ioWeEEHUh6TiMkZHDhLwjc9wVFH+ressbkmX6waUZc5R3Gobn4Qu5llGxaoflZ+yhA==} + dev: false + /@types/prettier/2.7.2: resolution: {integrity: sha512-KufADq8uQqo1pYKVIYzfKbJfBAc0sOeXqGbFaSpv8MRmC/zXgowNZmFcbngndGk922QDmOASEXUZCaY48gs4cg==} dev: true @@ -2938,6 +3106,12 @@ packages: resolution: {integrity: sha512-WwA1MW0++RfXmCr12xeYOOC5baSC9mSb0ZqCquFzKhcoF4TvHu5MKOuXsncgZcpVFhB1pXd5hZmM0ryAoCp12A==} dev: true + /@types/split2/4.2.0: + resolution: {integrity: sha512-rwV0tC3XGQvQ8zdeYDZ+dLn4CJLKnYPBrSU8dRXvzMVLUPMsYTsy3/ZbE4OlejsT2D7MTGP8ePk05C98xl2seQ==} + dependencies: + '@types/node': 20.3.0 + dev: true + /@types/stack-utils/2.0.1: resolution: {integrity: sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==} dev: true @@ -2952,7 +3126,7 @@ packages: /@types/through/0.0.30: resolution: {integrity: sha512-FvnCJljyxhPM3gkRgWmxmDZyAQSiBQQWLI0A0VFL0K7W1oRUrPJSqNO0NvTnLkBcotdlp3lKvaT0JrnyRDkzOg==} dependencies: - '@types/node': 18.13.0 + '@types/node': 20.3.0 /@types/tinycolor2/1.4.3: resolution: {integrity: sha512-Kf1w9NE5HEgGxCRyIcRXR/ZYtDv0V8FVPtYHwLxl0O+maGX0erE77pQlD0gpP+/KByMZ87mOA79SjifhSB3PjQ==} @@ -3859,7 +4033,6 @@ packages: /buffer-from/1.1.2: resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} - dev: true /buffer/5.7.1: resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} @@ -4051,7 +4224,6 @@ packages: /chalk/5.2.0: resolution: {integrity: sha512-ree3Gqw/nazQAPuJJEy+avdl7QfZMcUvmHIKgEZkGL+xOBzRvup5Hxo6LHuMceSxOabuJLJm5Yp/92R9eMmMvA==} engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} - dev: true /change-case/3.1.0: resolution: {integrity: sha512-2AZp7uJZbYEzRPsFoa+ijKdvp9zsrnnt6+yFokfwEpeJm0xuJDVoxiRCAaTzyJND8GJkofo2IcKWaUZ/OECVzw==} @@ -4209,6 +4381,11 @@ packages: resolution: {integrity: sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==} engines: {node: '>= 10'} + /cli-width/4.0.0: + resolution: {integrity: sha512-ZksGS2xpa/bYkNzN3BAw1wEjsLV/ZKOf/CCrJ/QOBsxx6fOARIkwTutxp1XIOIohi6HKmOFjMoK/XaqDVUpEEw==} + engines: {node: '>= 12'} + dev: false + /client-only/0.0.1: resolution: {integrity: sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==} @@ -4413,6 +4590,14 @@ packages: cross-spawn: 7.0.3 dev: true + /cross-fetch/3.1.6: + resolution: {integrity: sha512-riRvo06crlE8HiqOwIpQhxwdOk4fOeR7FVM/wXoxchFEqMNUjvbs3bfo4OTgMEMHzppd4DxFBDbyySj8Cv781g==} + dependencies: + node-fetch: 2.6.11 + transitivePeerDependencies: + - encoding + dev: false + /cross-spawn-async/2.2.5: resolution: {integrity: sha512-snteb3aVrxYYOX9e8BabYFK9WhCDhTlw1YQktfTthBogxri4/2r9U2nQc0ffY73ZAxezDc+U8gvHAeU1wy1ubQ==} deprecated: cross-spawn no longer requires a build toolchain, use it instead @@ -4758,6 +4943,11 @@ packages: tslib: 2.4.1 dev: true + /dotenv/16.0.3: + resolution: {integrity: sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==} + engines: {node: '>=12'} + dev: false + /eastasianwidth/0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} dev: true @@ -4862,6 +5052,10 @@ packages: is-symbol: 1.0.4 dev: true + /es6-promise/4.2.8: + resolution: {integrity: sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==} + dev: false + /esbuild-android-64/0.14.49: resolution: {integrity: sha512-vYsdOTD+yi+kquhBiFWl3tyxnj2qZJsl4tAqwhT90ktUdnyTizgle7TjNx6Ar1bN7wcwWqZ9QInfdk2WVagSww==} engines: {node: '>=12'} @@ -6441,6 +6635,14 @@ packages: dependencies: escape-string-regexp: 1.0.5 + /figures/5.0.0: + resolution: {integrity: sha512-ej8ksPF4x6e5wvK9yevct0UCXh8TTFlWGVLlgjZuoBH1HwjIfKE/IdL5mq89sFA7zELi1VhKpmtDnrs7zWyeyg==} + engines: {node: '>=14'} + dependencies: + escape-string-regexp: 5.0.0 + is-unicode-supported: 1.3.0 + dev: false + /file-entry-cache/6.0.1: resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==} engines: {node: ^10.12.0 || >=12.0.0} @@ -7333,6 +7535,27 @@ packages: through: 2.3.8 wrap-ansi: 7.0.0 + /inquirer/9.2.7: + resolution: {integrity: sha512-Bf52lnfvNxGPJPltiNO2tLBp3zC339KNlGMqOkW+dsvNikBhcVDK5kqU2lVX2FTPzuXUFX5WJDlsw//w3ZwoTw==} + engines: {node: '>=14.18.0'} + dependencies: + ansi-escapes: 4.3.2 + chalk: 5.2.0 + cli-cursor: 3.1.0 + cli-width: 4.0.0 + external-editor: 3.1.0 + figures: 5.0.0 + lodash: 4.17.21 + mute-stream: 1.0.0 + ora: 5.4.1 + run-async: 3.0.0 + rxjs: 7.8.1 + string-width: 4.2.3 + strip-ansi: 6.0.1 + through: 2.3.8 + wrap-ansi: 6.2.0 + dev: false + /internal-slot/1.0.3: resolution: {integrity: sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==} engines: {node: '>= 0.4'} @@ -7704,7 +7927,6 @@ packages: /is-unicode-supported/1.3.0: resolution: {integrity: sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==} engines: {node: '>=12'} - dev: true /is-upper-case/1.1.2: resolution: {integrity: sha512-GQYSJMgfeAmVwh9ixyk888l7OIhNAGKtY6QA+IrWlu9MDTCaXmeozOZ2S9Knj7bQwBO/H6J2kb+pbyTUiMNbsw==} @@ -8459,7 +8681,7 @@ packages: resolution: {integrity: sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==} hasBin: true dependencies: - minimist: 1.2.7 + minimist: 1.2.8 dev: true /json5/2.2.1: @@ -8644,7 +8866,7 @@ packages: log-update: 4.0.0 p-map: 4.0.0 rfdc: 1.3.0 - rxjs: 7.6.0 + rxjs: 7.8.1 through: 2.3.8 wrap-ansi: 7.0.0 dev: true @@ -8738,6 +8960,11 @@ packages: wrap-ansi: 6.2.0 dev: true + /loglevel/1.8.1: + resolution: {integrity: sha512-tCRIJM51SHjAayKwC+QAg8hT8vg6z7GSgLJKGvzuPb1Wc+hLzqtuVLxp6/HzSPOozuK+8ErAhy7U/sVzw8Dgfg==} + engines: {node: '>= 0.6.0'} + dev: false + /longest-streak/3.0.1: resolution: {integrity: sha512-cHlYSUpL2s7Fb3394mYxwTYj8niTaNHUCLr0qdiCXQfSjfuA7CKofpX2uSwEfFDQ0EB7JcnMnm+GjbqqoinYYg==} dev: false @@ -9495,6 +9722,9 @@ packages: /minimist/1.2.7: resolution: {integrity: sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==} + /minimist/1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + /minipass/3.3.4: resolution: {integrity: sha512-I9WPbWHCGu8W+6k1ZiGpPu0GkoKBeorkfKNuAFBNS1HNFJvke82sxvI5bzcCNpWPorkOO5QQ+zomzzwRxejXiw==} engines: {node: '>=8'} @@ -9613,6 +9843,11 @@ packages: /mute-stream/0.0.8: resolution: {integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==} + /mute-stream/1.0.0: + resolution: {integrity: sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + dev: false + /mz/2.7.0: resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} dependencies: @@ -9902,6 +10137,18 @@ packages: resolution: {integrity: sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==} dev: false + /node-fetch/2.6.11: + resolution: {integrity: sha512-4I6pdBY1EthSqDmJkiNk3JIT8cswwR9nfeW/cPdUagJYEQG7R95WRH74wpz7ma8Gh/9dI9FP+OU+0E4FvtA55w==} + engines: {node: 4.x || >=6.0.0} + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + dependencies: + whatwg-url: 5.0.0 + dev: false + /node-fetch/2.6.9: resolution: {integrity: sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==} engines: {node: 4.x || >=6.0.0} @@ -10316,6 +10563,10 @@ packages: resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} engines: {node: '>=6'} + /pako/2.1.0: + resolution: {integrity: sha512-w+eufiZ1WuJYgPXbV/PO3NCMEc3xqylkKHzp8bxp1uW4qaSNQUkwmLLEc3kKsfz8lpV1F8Ht3U1Cm+9Srog2ug==} + dev: false + /param-case/2.1.1: resolution: {integrity: sha512-eQE845L6ot89sk2N8liD8HAuH4ca6Vvr7VWAWwt7+kvvG5aBcPmmphQ68JsEG2qa9n1TykS2DLeMt363AAH8/w==} dependencies: @@ -10508,7 +10759,6 @@ packages: resolution: {integrity: sha512-qQbW94hLHEqCg7nhby4yRC7G2+jYHY4Rguc2bjw7Uug4GIJuu1tvf2uHaZv5Q8zdt+WKJ6qK1FOI6amaWUo5FA==} engines: {node: '>=0.10'} hasBin: true - dev: true /pidtree/0.6.0: resolution: {integrity: sha512-eG2dWTVw5bzqGRztnHExczNxt5VGsE6OwTeCG3fdUf9KBsZzO3R5OIIIzWR+iZA0NtZ+RDVdaoE2dK1cn6jH4g==} @@ -10516,6 +10766,20 @@ packages: hasBin: true dev: true + /pidusage-tree/2.0.5: + resolution: {integrity: sha512-4J9SkX1IorF9srgzbTrXpfO2xA4JHESDn5AGGDtCHXvVAGNvP4KzZpWwXhLDKlB+dC5rcERIKS5Z7JktjzCcCA==} + dependencies: + pidtree: 0.3.1 + pidusage: 2.0.21 + dev: false + + /pidusage/2.0.21: + resolution: {integrity: sha512-cv3xAQos+pugVX+BfXpHsbyz/dLzX+lr44zNMsYiGxUw+kV5sgQCIcLd1z+0vq+KyC7dJ+/ts2PsfgWfSC3WXA==} + engines: {node: '>=8'} + dependencies: + safe-buffer: 5.2.1 + dev: false + /pify/2.3.0: resolution: {integrity: sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==} engines: {node: '>=0.10.0'} @@ -10538,6 +10802,21 @@ packages: find-up: 4.1.0 dev: true + /playwright-chromium/1.35.0: + resolution: {integrity: sha512-94xeZO0dv/PRZ/LH+vb6KFlOs8+Vt8Zw3IN+BfmL11xsbIDKRBtM2aS6x36fWXuFOITFVvSFjXiK4MJlW5q9qw==} + engines: {node: '>=16'} + hasBin: true + requiresBuild: true + dependencies: + playwright-core: 1.35.0 + dev: false + + /playwright-core/1.35.0: + resolution: {integrity: sha512-muMXyPmIx/2DPrCHOD1H1ePT01o7OdKxKj2ebmCAYvqhUy+Y1bpal7B0rdoxros7YrXI294JT/DWw2LqyiqTPA==} + engines: {node: '>=16'} + hasBin: true + dev: false + /plop/3.1.1: resolution: {integrity: sha512-NuctKmuNUACXBQn25bBr5oj/75nHxdKGwjA/+b7cVoj1sp+gTVqcc8eAr4QcNJgMPsZWRJBN2kMkgmsqbqV9gg==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -10783,7 +11062,6 @@ packages: /querystringify/2.2.0: resolution: {integrity: sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==} - dev: true /queue-microtask/1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} @@ -11062,7 +11340,6 @@ packages: /requires-port/1.0.0: resolution: {integrity: sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==} - dev: true /resolve-alpn/1.2.1: resolution: {integrity: sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==} @@ -11198,6 +11475,11 @@ packages: resolution: {integrity: sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==} engines: {node: '>=0.12.0'} + /run-async/3.0.0: + resolution: {integrity: sha512-540WwVDOMxA6dN6We19EcT9sc3hkXPw5mzRNGM3FkdN/vtE9NFvj5lFAPNwUDmJjXidm3v7TC1cTE7t17Ulm1Q==} + engines: {node: '>=0.12.0'} + dev: false + /run-parallel/1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} dependencies: @@ -11218,6 +11500,12 @@ packages: resolution: {integrity: sha512-DDa7d8TFNUalGC9VqXvQ1euWNN7sc63TrUCuM9J998+ViviahMIjKSOU7rfcgFOF+FCD71BhDRv4hrFz+ImDLQ==} dependencies: tslib: 2.4.1 + dev: false + + /rxjs/7.8.1: + resolution: {integrity: sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==} + dependencies: + tslib: 2.4.1 /sade/1.8.1: resolution: {integrity: sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==} @@ -11658,6 +11946,11 @@ packages: readable-stream: 3.6.0 dev: false + /split2/4.2.0: + resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} + engines: {node: '>= 10.x'} + dev: false + /sprintf-js/1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} @@ -12263,7 +12556,6 @@ packages: /tree-kill/1.2.2: resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==} hasBin: true - dev: true /trim-lines/3.0.1: resolution: {integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==} @@ -12541,7 +12833,7 @@ packages: dependencies: '@types/json5': 0.0.29 json5: 1.0.2 - minimist: 1.2.7 + minimist: 1.2.8 strip-bom: 3.0.0 dev: true @@ -13032,7 +13324,6 @@ packages: dependencies: querystringify: 2.2.0 requires-port: 1.0.0 - dev: true /use/3.1.1: resolution: {integrity: sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==} @@ -13312,7 +13603,6 @@ packages: ansi-styles: 4.3.0 string-width: 4.2.3 strip-ansi: 6.0.1 - dev: true /wrap-ansi/7.0.0: resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 1a0aa0941d4da..0d28384e803d9 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -5,6 +5,7 @@ packages: - "packages/*" - "benchmark" - "turborepo-tests/*" + - "examples-tests/*" # Intentionally exclude the `npm` `turbo` package from the workspaces. - "!packages/turbo" - "packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose" diff --git a/socket.yaml b/socket.yaml index 43d92bfa4887e..7e8e8d3a623c9 100644 --- a/socket.yaml +++ b/socket.yaml @@ -4,3 +4,4 @@ version: 2 projectIgnorePaths: - turborepo-tests - packages/turbo-codemod/__tests__/ + - examples/ diff --git a/turbo.json b/turbo.json index 38ba76ef23ac6..bde279e2a02d9 100644 --- a/turbo.json +++ b/turbo.json @@ -180,9 +180,25 @@ "//#build:ts": { "outputs": ["packages/**/dist"] }, + "build:ts": { + "outputs": ["dist/**/*"] + }, "build": { "outputs": ["dist/**/*", ".next/**/*"], "dependsOn": ["^build"] + }, + // This is a synthetic tasks that lets us pull in other workspaces as dependencies + // So changes in internal workspaces that we depend on, will trigger this task. + "topo": { + "dependsOn": ["^topo"] + }, + + // example-tesst is in the root turbo.json so we don't have to redefine in each workspace + // Since they also has a unique set of dependencies, we don't model example tests with the + // test task, or we'd have to override the dependenies in each workspace. + "example-test": { + "dependsOn": ["topo", "@turborepo-examples-tests/helpers#setup"], + "inputs": ["*.t", "../setup.sh"] } }, "experimentalSpaces": { diff --git a/turborepo-tests/examples/package.json b/turborepo-tests/examples/package.json deleted file mode 100644 index c8a245da4b756..0000000000000 --- a/turborepo-tests/examples/package.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "name": "turborepo-tests-examples", - "scripts": { - "test": "./run-example.sh" - }, - "dependencies": { - "turborepo-tests-helpers": "workspace:*", - "turborepo-examples": "workspace:*" - } -} diff --git a/turborepo-tests/examples/run-example.sh b/turborepo-tests/examples/run-example.sh deleted file mode 100755 index 9bbcd99cf2c90..0000000000000 --- a/turborepo-tests/examples/run-example.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash - -set -e - -python3 -m venv .cram_env -.cram_env/bin/pip install prysk - -export folder=$1 -export pkgManager=$2 - -TEST_FILE="tests/$2-$1.t" - -if [ -f "$TEST_FILE" ]; then - echo "Running $TEST_FILE" - .cram_env/bin/prysk --shell="$(which bash)" "$TEST_FILE" -else - echo "Could not find $TEST_FILE" - exit 1 -fi diff --git a/turborepo-tests/examples/turbo.json b/turborepo-tests/examples/turbo.json deleted file mode 100644 index 3a4ed3341b43f..0000000000000 --- a/turborepo-tests/examples/turbo.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "extends": ["//"], - "pipeline": { - // This is a synthetic tasks that lets us pull in other workspaces as dependencies - // So changes in internal workspaces that we depend on, will trigger this task. - "topo": { - "dependsOn": ["^topo"] - }, - "test": { - "dependsOn": ["topo"], - "outputs": [] - } - } -} diff --git a/version.txt b/version.txt index 47a1f625c8f11..b0abe0bded9c9 100644 --- a/version.txt +++ b/version.txt @@ -1,2 +1,2 @@ -1.10.3-canary.0 +1.10.4-canary.2 canary