diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ab0ac01faf..caf20fcf8f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,6 +20,67 @@ env: OCAMLRUNPARAM: b jobs: + build-rewatch: + strategy: + fail-fast: false + matrix: + os: [ + macos-13, # x64 + macos-14, # ARM + ubuntu-latest, # x64 + buildjet-2vcpu-ubuntu-2204-arm, # ARM + windows-latest, + ] + include: + - os: macos-13 + rust-target: x86_64-apple-darwin + - os: macos-14 + rust-target: aarch64-apple-darwin + - os: ubuntu-latest + rust-target: x86_64-unknown-linux-musl + - os: buildjet-2vcpu-ubuntu-2204-arm + rust-target: aarch64-unknown-linux-musl + - os: windows-latest + rust-target: x86_64-pc-windows-gnu + + runs-on: ${{matrix.os}} + + env: + RUST_BACKTRACE: "1" + SCCACHE_GHA_ENABLED: "true" + RUSTC_WRAPPER: "sccache" + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install musl gcc + if: runner.os == 'Linux' + run: sudo apt-get install -y musl-tools + + - name: Set up sccache + uses: mozilla-actions/sccache-action@v0.0.4 + with: + version: "v0.8.0" + + - name: Install rust toolchain + uses: dtolnay/rust-toolchain@master + with: + toolchain: stable + targets: ${{matrix.rust-target}} + + - name: Build rewatch + run: cargo build --manifest-path rewatch/Cargo.toml --target ${{matrix.rust-target}} --release + + - name: Get artifact dir name + run: node .github/workflows/get_artifact_dir_name.js + + - name: "Upload artifact: rewatch binary" + uses: actions/upload-artifact@v4 + with: + name: rewatch-${{env.artifact_dir_name}} + path: rewatch/target/${{matrix.rust-target}}/release/rewatch${{ runner.os == 'Windows' && '.exe' || '' }} + # Build statically linked Linux binaries in an Alpine-based Docker container # See https://ocamlpro.com/blog/2021_09_02_generating_static_and_portable_executables_with_ocaml # for more info. @@ -59,6 +120,7 @@ jobs: # Therefore, only upload the binaries from the static build upload-linux-arm64-binaries: needs: + - build-rewatch - static-binaries-linux runs-on: buildjet-2vcpu-ubuntu-2204-arm @@ -73,31 +135,33 @@ jobs: with: name: static-binaries-linux-${{ runner.arch }} - - name: Make static linux binaries executable - if: runner.os == 'Linux' - run: | - chmod +x ninja/ninja - chmod +x _build/install/default/bin/* - - name: Use Node.js uses: actions/setup-node@v4 with: node-version: 18 + - name: Get artifact dir name + run: node .github/workflows/get_artifact_dir_name.js + + - name: Download rewatch binary + uses: actions/download-artifact@v4 + with: + name: rewatch-${{ env.artifact_dir_name }} + path: rewatch + - name: Copy exes to platform bin dirs run: node ./scripts/copyExes.js - - name: Prepare artifact upload - run: node .github/workflows/get_artifact_info.js - - name: "Upload artifacts: binaries" uses: actions/upload-artifact@v4 with: - name: ${{ env.artifact_name }} - path: ${{ env.artifact_path }} + name: binaries-${{ env.artifact_dir_name }} + path: ${{ env.artifact_dir_name }} build: - needs: static-binaries-linux + needs: + - build-rewatch + - static-binaries-linux strategy: fail-fast: false @@ -133,10 +197,20 @@ jobs: with: name: static-binaries-linux-${{ runner.arch }} + - name: Get artifact dir name + run: node .github/workflows/get_artifact_dir_name.js + + - name: Download rewatch binary + uses: actions/download-artifact@v4 + with: + name: rewatch-${{ env.artifact_dir_name }} + path: rewatch + - name: Make static linux binaries executable if: runner.os == 'Linux' run: | chmod +x ninja/ninja + chmod +x rewatch/rewatch chmod +x _build/install/default/bin/* - name: Use OCaml ${{matrix.ocaml_compiler}} @@ -252,14 +326,11 @@ jobs: KEYCDN_PASSWORD: ${{ secrets.KEYCDN_PASSWORD }} run: bash playground/upload_bundle.sh - - name: Prepare artifact upload - run: node .github/workflows/get_artifact_info.js - - name: "Upload artifacts: binaries" uses: actions/upload-artifact@v4 with: - name: ${{ env.artifact_name }} - path: ${{ env.artifact_path }} + name: binaries-${{ env.artifact_dir_name }} + path: ${{ env.artifact_dir_name }} - name: "Upload artifacts: lib/ocaml" if: runner.os == 'Linux' diff --git a/.github/workflows/get_artifact_dir_name.js b/.github/workflows/get_artifact_dir_name.js new file mode 100644 index 0000000000..2245c9c65c --- /dev/null +++ b/.github/workflows/get_artifact_dir_name.js @@ -0,0 +1,10 @@ +const fs = require("fs"); +const os = require("os"); + +const artifactDirName = require("../../scripts/bin_path").dirName; + +// Pass artifactDirName to subsequent GitHub actions +fs.appendFileSync( + process.env.GITHUB_ENV, + `artifact_dir_name=${artifactDirName}${os.EOL}` +); diff --git a/.github/workflows/get_artifact_info.js b/.github/workflows/get_artifact_info.js deleted file mode 100644 index 777ad31194..0000000000 --- a/.github/workflows/get_artifact_info.js +++ /dev/null @@ -1,13 +0,0 @@ -const fs = require("fs"); -const os = require("os"); - -const artifactPath = require("../../scripts/bin_path").dirName; -const artifactName = "binaries-" + artifactPath; - -console.log("Artifact path:", artifactPath); - -// Pass artifactPath and artifactName to subsequent GitHub actions -fs.appendFileSync( - process.env.GITHUB_ENV, - `artifact_path=${artifactPath}${os.EOL}artifact_name=${artifactName}${os.EOL}` -); diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 99852850d3..c9f37de8be 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -16,6 +16,7 @@ Happy hacking! - [NodeJS v18](https://nodejs.org/) - C compiler toolchain (usually installed with `xcode` on Mac) +- Rust toolchain (required to build rewatch; follow the instructions at https://www.rust-lang.org/tools/install) - `opam` (OCaml Package Manager) - VSCode (+ [OCaml Platform Extension](https://marketplace.visualstudio.com/items?itemName=ocamllabs.ocaml-platform)) diff --git a/Makefile b/Makefile index 7fb14bda69..e47aa4e9a9 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ SHELL = /bin/bash DUNE_BIN_DIR = ./_build/install/default/bin -build: ninja +build: ninja rewatch dune build ./scripts/copyExes.js @@ -15,6 +15,10 @@ bench: dce: reanalyze.exe -dce-cmt _build/default/jscomp +rewatch: + cargo build --manifest-path rewatch/Cargo.toml + cp rewatch/target/debug/rewatch rewatch + ninja/ninja: ./scripts/buildNinjaBinary.js @@ -78,10 +82,11 @@ clean-gentype: clean: dune clean - ./scripts/ninja.js clean + cargo clean --manifest-path rewatch/Cargo.toml && rm -f rewatch/rewatch + ./scripts/ninja.js clean && rm -f ninja/ninja clean-all: clean clean-gentype .DEFAULT_GOAL := build -.PHONY: build watch ninja bench dce test test-syntax test-syntax-roundtrip test-gentype test-all lib playground playground-cmijs playground-release artifacts format checkformat clean-gentype clean clean-all +.PHONY: build watch rewatch ninja bench dce test test-syntax test-syntax-roundtrip test-gentype test-all lib playground playground-cmijs playground-release artifacts format checkformat clean-gentype clean clean-all diff --git a/package.json b/package.json index f971840239..5dc40c55b5 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,8 @@ "bin": { "bsc": "bsc", "bstracing": "lib/bstracing", - "rescript": "rescript" + "rescript": "rescript", + "rewatch": "scripts/rewatch" }, "scripts": { "test": "node scripts/ciTest.js -all", diff --git a/packages/artifacts.txt b/packages/artifacts.txt index 88bf15a9b7..7552d667f9 100644 --- a/packages/artifacts.txt +++ b/packages/artifacts.txt @@ -9,10 +9,12 @@ darwin/bsb_helper.exe darwin/bsc.exe darwin/ninja.exe darwin/rescript.exe +darwin/rewatch.exe darwinarm64/bsb_helper.exe darwinarm64/bsc.exe darwinarm64/ninja.exe darwinarm64/rescript.exe +darwinarm64/rewatch.exe docs/docson/build-schema.json lib/bstracing lib/es6/arg.js @@ -1056,10 +1058,12 @@ linux/bsb_helper.exe linux/bsc.exe linux/ninja.exe linux/rescript.exe +linux/rewatch.exe linuxarm64/bsb_helper.exe linuxarm64/bsc.exe linuxarm64/ninja.exe linuxarm64/rescript.exe +linuxarm64/rewatch.exe ninja.COPYING package.json rescript @@ -1070,7 +1074,9 @@ scripts/rescript_convert.js scripts/rescript_dump.js scripts/rescript_format.js scripts/rescript_postinstall.js +scripts/rewatch win32/bsb_helper.exe win32/bsc.exe win32/ninja.exe -win32/rescript.exe \ No newline at end of file +win32/rescript.exe +win32/rewatch.exe \ No newline at end of file diff --git a/rewatch/.gitignore b/rewatch/.gitignore new file mode 100644 index 0000000000..6655aab8aa --- /dev/null +++ b/rewatch/.gitignore @@ -0,0 +1,2 @@ +target/ +rewatch diff --git a/rewatch/Cargo.lock b/rewatch/Cargo.lock new file mode 100644 index 0000000000..c07c072241 --- /dev/null +++ b/rewatch/Cargo.lock @@ -0,0 +1,1417 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "ahash" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "getrandom", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + +[[package]] +name = "anstream" +version = "0.6.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc" + +[[package]] +name = "anstyle-parse" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" +dependencies = [ + "anstyle", + "windows-sys 0.52.0", +] + +[[package]] +name = "arrayref" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" + +[[package]] +name = "arrayvec" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" + +[[package]] +name = "blake3" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30cca6d3674597c30ddf2c587bf8d9d65c9a84d2326d941cc79c9842dfe0ef52" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq", +] + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + +[[package]] +name = "cc" +version = "1.0.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2678b2e3449475e95b0aa6f9b506a28e61b3dc8996592b983695e8ebb58a8b41" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cfg_aliases" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" + +[[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half", +] + +[[package]] +name = "clap" +version = "3.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" +dependencies = [ + "bitflags 1.3.2", + "clap_lex 0.2.4", + "indexmap", + "textwrap", +] + +[[package]] +name = "clap" +version = "4.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" +dependencies = [ + "anstream", + "anstyle", + "clap_lex 0.7.0", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "clap_lex" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" +dependencies = [ + "os_str_bytes", +] + +[[package]] +name = "clap_lex" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" + +[[package]] +name = "colorchoice" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" + +[[package]] +name = "console" +version = "0.15.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +dependencies = [ + "encode_unicode", + "lazy_static", + "libc", + "unicode-width", + "windows-sys 0.52.0", +] + +[[package]] +name = "constant_time_eq" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" + +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "criterion" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c76e09c1aae2bc52b3d2f29e13c6572553b30c4aa1b8a49fd70de6412654cb" +dependencies = [ + "anes", + "atty", + "cast", + "ciborium", + "clap 3.2.25", + "criterion-plot", + "itertools", + "lazy_static", + "num-traits", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab3db02a9c5b5121e1e42fbdb1aeb65f5e02624cc58c43f2884c6ccac0b82f95" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "ctrlc" +version = "3.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "672465ae37dc1bc6380a6547a8883d5dd397b0f1faaad4f265726cc7042a5345" +dependencies = [ + "nix", + "windows-sys 0.52.0", +] + +[[package]] +name = "either" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" + +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + +[[package]] +name = "env_logger" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "filetime" +version = "0.2.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "windows-sys 0.52.0", +] + +[[package]] +name = "fsevent-sys" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" +dependencies = [ + "libc", +] + +[[package]] +name = "futures" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "getrandom" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "half" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888" +dependencies = [ + "cfg-if", + "crunchy", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "indicatif" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3" +dependencies = [ + "console", + "instant", + "number_prefix", + "portable-atomic", + "unicode-width", +] + +[[package]] +name = "inotify" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff" +dependencies = [ + "bitflags 1.3.2", + "inotify-sys", + "libc", +] + +[[package]] +name = "inotify-sys" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" +dependencies = [ + "libc", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "is-terminal" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" +dependencies = [ + "hermit-abi 0.3.9", + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" + +[[package]] +name = "js-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "kqueue" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7447f1ca1b7b563588a205fe93dea8df60fd981423a768bc1c0ded35ed147d0c" +dependencies = [ + "kqueue-sys", + "libc", +] + +[[package]] +name = "kqueue-sys" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" +dependencies = [ + "bitflags 1.3.2", + "libc", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.153" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" + +[[package]] +name = "log" +version = "0.4.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" + +[[package]] +name = "memchr" +version = "2.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" + +[[package]] +name = "mio" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +dependencies = [ + "libc", + "log", + "wasi", + "windows-sys 0.48.0", +] + +[[package]] +name = "nix" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" +dependencies = [ + "bitflags 2.5.0", + "cfg-if", + "cfg_aliases", + "libc", +] + +[[package]] +name = "notify" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "729f63e1ca555a43fe3efa4f3efdf4801c479da85b432242a7b726f353c88486" +dependencies = [ + "bitflags 1.3.2", + "crossbeam-channel", + "filetime", + "fsevent-sys", + "inotify", + "kqueue", + "libc", + "mio", + "serde", + "walkdir", + "windows-sys 0.45.0", +] + +[[package]] +name = "notify-debouncer-mini" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e23e9fa24f094b143c1eb61f90ac6457de87be6987bc70746e0179f7dbc9007b" +dependencies = [ + "crossbeam-channel", + "notify", +] + +[[package]] +name = "ntapi" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8a3895c6391c39d7fe7ebc444a87eb2991b2a0bc718fdabd071eec617fc68e4" +dependencies = [ + "winapi", +] + +[[package]] +name = "num-traits" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" +dependencies = [ + "autocfg", +] + +[[package]] +name = "number_prefix" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "oorandom" +version = "11.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" + +[[package]] +name = "os_str_bytes" +version = "6.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" + +[[package]] +name = "pin-project-lite" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "plotters" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" + +[[package]] +name = "plotters-svg" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" +dependencies = [ + "plotters-backend", +] + +[[package]] +name = "portable-atomic" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" + +[[package]] +name = "proc-macro2" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rayon" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "regex" +version = "1.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" + +[[package]] +name = "rewatch" +version = "1.0.6" +dependencies = [ + "ahash", + "blake3", + "clap 4.5.4", + "console", + "convert_case", + "criterion", + "ctrlc", + "env_logger", + "futures", + "futures-timer", + "indicatif", + "log", + "notify", + "notify-debouncer-mini", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "sysinfo", +] + +[[package]] +name = "ryu" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "serde" +version = "1.0.197" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.197" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.115" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12dc5c46daa8e9fdf4f5e71b6cf9a53f2487da0e86e55808e2d35539666497dd" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "syn" +version = "2.0.58" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44cfb93f38070beee36b3fef7d4f5a16f27751d94b187b666a5cc5e9b0d30687" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sysinfo" +version = "0.29.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd727fc423c2060f6c92d9534cef765c65a6ed3f428a03d7def74a8c4348e666" +dependencies = [ + "cfg-if", + "core-foundation-sys", + "libc", + "ntapi", + "once_cell", + "rayon", + "winapi", +] + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "textwrap" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" + +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-segmentation" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" + +[[package]] +name = "unicode-width" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" + +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "web-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.4", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +dependencies = [ + "windows_aarch64_gnullvm 0.52.4", + "windows_aarch64_msvc 0.52.4", + "windows_i686_gnu 0.52.4", + "windows_i686_msvc 0.52.4", + "windows_x86_64_gnu 0.52.4", + "windows_x86_64_gnullvm 0.52.4", + "windows_x86_64_msvc 0.52.4", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" + +[[package]] +name = "zerocopy" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/rewatch/Cargo.toml b/rewatch/Cargo.toml new file mode 100644 index 0000000000..bcbdf1a87a --- /dev/null +++ b/rewatch/Cargo.toml @@ -0,0 +1,40 @@ +[package] +name = "rewatch" +version = "1.0.6" +edition = "2021" + +[dependencies] +notify = { version = "5.1.0", features = ["serde"] } +serde = { version = "1.0.152", features = ["derive"] } +serde_derive = "1.0.152" +serde_json = { version = "1.0.93" } +ahash = "0.8.3" +convert_case = "0.6.0" +rayon = "1.6.1" +log = { version = "0.4.17", features = [ + "max_level_debug", + "release_max_level_warn", +] } +env_logger = "0.10.0" +indicatif = "0.17.3" +console = "0.15.5" +blake3 = "1.3.3" +notify-debouncer-mini = { version = "0.2.0" } +regex = "1.7.1" +futures = "0.3.25" +futures-timer = "3.0.2" +clap = { version = "4.3.17", features = ["derive"] } +sysinfo = "0.29.10" +ctrlc = "3.4.4" + + +[profile.release] +codegen-units = 1 +lto = true + +[dev-dependencies] +criterion = "0.4" + +[[bench]] +name = "base_bench" +harness = false diff --git a/rewatch/benches/base_bench.rs b/rewatch/benches/base_bench.rs new file mode 100644 index 0000000000..8b781a0390 --- /dev/null +++ b/rewatch/benches/base_bench.rs @@ -0,0 +1,54 @@ +use criterion::{criterion_group, criterion_main, Criterion}; +use rewatch::build; +use rewatch::build::clean; +use rewatch::build::packages; +use rewatch::helpers; + +use std::fs::File; +use std::io::prelude::*; + +fn criterion_benchmark(c: &mut Criterion) { + c.bench_function("build-package-tree", |b| { + // Folder for the testrepo + let folder = "walnut_monorepo"; + let project_root = helpers::get_abs_path(folder); + + b.iter(|| { + packages::make(&None, &project_root, &None); + }) + }); + + c.bench_function("clean-build-change-build", |b| { + // Folder for the testrepo + let folder = "testrepo"; + let filename = "testrepo/packages/dep02/src/Dep02.res"; + // Clean the build + clean::clean(folder); + // Read the file we'll be mutating + let mut file = File::options() + .read(true) + .write(true) + .append(true) + .open(filename) + .unwrap(); + let mut contents = String::new(); + file.read_to_string(&mut contents).unwrap(); + + b.iter(|| { + // Create initial build + let _ = build::build(&None, folder, false); + // Update the file + let _ = writeln!(file, r#"let log2 = () => ["a", "b"]->forEach(Js.log);log2()"#); + // Create another build + let _ = build::build(&None, folder, false); + + // Reset state + File::create(filename).unwrap(); + file.write_all(contents.as_bytes()).unwrap(); + let _ = build::build(&None, folder, false); + }) + }); +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/rewatch/rust-toolchain.toml b/rewatch/rust-toolchain.toml new file mode 100644 index 0000000000..292fe499e3 --- /dev/null +++ b/rewatch/rust-toolchain.toml @@ -0,0 +1,2 @@ +[toolchain] +channel = "stable" diff --git a/rewatch/rustfmt.toml b/rewatch/rustfmt.toml new file mode 100644 index 0000000000..423bfda59f --- /dev/null +++ b/rewatch/rustfmt.toml @@ -0,0 +1,2 @@ + +max_width = 110 \ No newline at end of file diff --git a/rewatch/src/bsconfig.rs b/rewatch/src/bsconfig.rs new file mode 100644 index 0000000000..3abcf40621 --- /dev/null +++ b/rewatch/src/bsconfig.rs @@ -0,0 +1,392 @@ +use crate::build::packages; +use convert_case::{Case, Casing}; +use serde::Deserialize; +use std::fs; +use std::path::{Path, PathBuf}; + +#[derive(Deserialize, Debug, Clone)] +#[serde(untagged)] +pub enum OneOrMore { + Multiple(Vec), + Single(T), +} + +#[derive(Deserialize, Debug, Clone, PartialEq, Hash)] +#[serde(untagged)] +pub enum Subdirs { + Qualified(Vec), + Recurse(bool), +} +impl Eq for Subdirs {} + +#[derive(Deserialize, Debug, Clone, PartialEq, Hash)] +pub struct PackageSource { + pub dir: String, + pub subdirs: Option, + #[serde(rename = "type")] + pub type_: Option, +} + +/// `to_qualified_without_children` takes a tree like structure of dependencies, coming in from +/// `bsconfig`, and turns it into a flat list. The main thing we extract here are the source +/// folders, and optional subdirs, where potentially, the subdirs recurse or not. +pub fn to_qualified_without_children(s: &Source, sub_path: Option) -> PackageSource { + match s { + Source::Shorthand(dir) => PackageSource { + dir: sub_path + .map(|p| p.join(Path::new(dir))) + .unwrap_or(Path::new(dir).to_path_buf()) + .to_string_lossy() + .to_string(), + subdirs: None, + type_: None, + }, + Source::Qualified(PackageSource { + dir, + type_, + subdirs: Some(Subdirs::Recurse(should_recurse)), + }) => PackageSource { + dir: sub_path + .map(|p| p.join(Path::new(dir))) + .unwrap_or(Path::new(dir).to_path_buf()) + .to_string_lossy() + .to_string(), + subdirs: Some(Subdirs::Recurse(*should_recurse)), + type_: type_.to_owned(), + }, + Source::Qualified(PackageSource { dir, type_, .. }) => PackageSource { + dir: sub_path + .map(|p| p.join(Path::new(dir))) + .unwrap_or(Path::new(dir).to_path_buf()) + .to_string_lossy() + .to_string(), + subdirs: None, + type_: type_.to_owned(), + }, + } +} + +impl Eq for PackageSource {} + +#[derive(Deserialize, Debug, Clone, PartialEq, Hash)] +#[serde(untagged)] +pub enum Source { + Shorthand(String), + Qualified(PackageSource), +} +impl Eq for Source {} + +#[derive(Deserialize, Debug, Clone)] +pub struct PackageSpec { + pub module: String, + #[serde(rename = "in-source")] + pub in_source: bool, + pub suffix: Option, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(untagged)] +pub enum Error { + Catchall(bool), + Qualified(String), +} + +#[derive(Deserialize, Debug, Clone)] +pub struct Warnings { + pub number: Option, + pub error: Option, +} + +#[derive(Deserialize, Debug, Clone)] +pub struct Reason { + #[serde(rename = "react-jsx")] + pub react_jsx: i32, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(untagged)] +pub enum NamespaceConfig { + Bool(bool), + String(String), +} + +#[derive(Deserialize, Debug, Clone)] +pub enum JsxMode { + #[serde(rename = "classic")] + Classic, + #[serde(rename = "automatic")] + Automatic, +} + +#[derive(Deserialize, Debug, Clone)] +pub enum JsxModule { + #[serde(rename = "react")] + React, +} + +#[derive(Deserialize, Debug, Clone)] +pub struct JsxSpecs { + pub version: Option, + pub module: Option, + pub mode: Option, + #[serde(rename = "v3-dependencies")] + pub v3_dependencies: Option>, +} + +/// # bsconfig.json representation +/// This is tricky, there is a lot of ambiguity. This is probably incomplete. +#[derive(Deserialize, Debug, Clone)] +pub struct Config { + pub name: String, + pub sources: OneOrMore, + #[serde(rename = "package-specs")] + pub package_specs: Option>, + pub warnings: Option, + pub suffix: Option, + #[serde(rename = "pinned-dependencies")] + pub pinned_dependencies: Option>, + #[serde(rename = "bs-dependencies")] + pub bs_dependencies: Option>, + #[serde(rename = "bs-dev-dependencies")] + pub bs_dev_dependencies: Option>, + #[serde(rename = "ppx-flags")] + pub ppx_flags: Option>>, + #[serde(rename = "bsc-flags")] + pub bsc_flags: Option>>, + pub reason: Option, + pub namespace: Option, + pub jsx: Option, + pub uncurried: Option, + // this is a new feature of rewatch, and it's not part of the bsconfig.json spec + #[serde(rename = "namespace-entry")] + pub namespace_entry: Option, + // this is a new feature of rewatch, and it's not part of the bsconfig.json spec + #[serde(rename = "allowed-dependents")] + pub allowed_dependents: Option>, +} + +/// This flattens string flags +pub fn flatten_flags(flags: &Option>>) -> Vec { + match flags { + None => vec![], + Some(xs) => xs + .iter() + .flat_map(|x| match x { + OneOrMore::Single(y) => vec![y.to_owned()], + OneOrMore::Multiple(ys) => ys.to_owned(), + }) + .collect::>() + .iter() + .flat_map(|str| str.split(' ')) + .map(|str| str.to_string()) + .collect::>(), + } +} + +/// Since ppx-flags could be one or more, and could be nested potentiall, this function takes the +/// flags and flattens them outright. +pub fn flatten_ppx_flags( + node_modules_dir: &String, + flags: &Option>>, + package_name: &String, +) -> Vec { + match flags { + None => vec![], + Some(xs) => xs + .iter() + .flat_map(|x| match x { + OneOrMore::Single(y) => { + let first_character = y.chars().next(); + match first_character { + Some('.') => { + vec![ + "-ppx".to_string(), + node_modules_dir.to_owned() + "/" + package_name + "/" + y, + ] + } + _ => vec!["-ppx".to_string(), node_modules_dir.to_owned() + "/" + y], + } + } + OneOrMore::Multiple(ys) if ys.is_empty() => vec![], + OneOrMore::Multiple(ys) => { + let first_character = ys[0].chars().next(); + let ppx = match first_character { + Some('.') => node_modules_dir.to_owned() + "/" + package_name + "/" + &ys[0], + _ => node_modules_dir.to_owned() + "/" + &ys[0], + }; + vec![ + "-ppx".to_string(), + vec![ppx] + .into_iter() + .chain(ys[1..].to_owned()) + .collect::>() + .join(" "), + ] + } + }) + .collect::>(), + } +} + +/// Try to convert a bsconfig from a certain path to a bsconfig struct +pub fn read(path: String) -> Config { + fs::read_to_string(path.clone()) + .map_err(|e| format!("Could not read bsconfig. {path} - {e}")) + .and_then(|x| { + serde_json::from_str::(&x).map_err(|e| format!("Could not parse bsconfig. {path} - {e}")) + }) + .expect("Errors reading bsconfig") +} + +fn check_if_rescript11_or_higher(version: &str) -> bool { + version.split('.').next().unwrap().parse::().unwrap() >= 11 +} + +fn namespace_from_package_name(package_name: &str) -> String { + package_name + .to_owned() + .replace('@', "") + .replace('/', "_") + .to_case(Case::Pascal) +} + +impl Config { + pub fn get_namespace(&self) -> packages::Namespace { + let namespace_from_package = namespace_from_package_name(&self.name); + match (self.namespace.as_ref(), self.namespace_entry.as_ref()) { + (Some(NamespaceConfig::Bool(false)), _) => packages::Namespace::NoNamespace, + (None, _) => packages::Namespace::NoNamespace, + (Some(NamespaceConfig::Bool(true)), None) => { + packages::Namespace::Namespace(namespace_from_package) + } + (Some(NamespaceConfig::Bool(true)), Some(entry)) => packages::Namespace::NamespaceWithEntry { + namespace: namespace_from_package, + entry: entry.to_string(), + }, + (Some(NamespaceConfig::String(str)), None) => match str.as_str() { + "true" => packages::Namespace::Namespace(namespace_from_package), + namespace if namespace.is_case(Case::UpperFlat) => { + packages::Namespace::Namespace(namespace.to_string()) + } + namespace => packages::Namespace::Namespace(namespace.to_string().to_case(Case::Pascal)), + }, + (Some(self::NamespaceConfig::String(str)), Some(entry)) => match str.as_str() { + "true" => packages::Namespace::NamespaceWithEntry { + namespace: namespace_from_package, + entry: entry.to_string(), + }, + namespace if namespace.is_case(Case::UpperFlat) => packages::Namespace::NamespaceWithEntry { + namespace: namespace.to_string(), + entry: entry.to_string(), + }, + namespace => packages::Namespace::NamespaceWithEntry { + namespace: namespace.to_string().to_case(Case::Pascal), + entry: entry.to_string(), + }, + }, + } + } + pub fn get_jsx_args(&self) -> Vec { + match (self.reason.to_owned(), self.jsx.to_owned()) { + (_, Some(jsx)) => match jsx.version { + Some(version) if version == 3 || version == 4 => { + vec!["-bs-jsx".to_string(), version.to_string()] + } + Some(_version) => panic!("Unsupported JSX version"), + None => vec![], + }, + (Some(reason), None) => { + vec!["-bs-jsx".to_string(), format!("{}", reason.react_jsx)] + } + _ => vec![], + } + } + + pub fn get_jsx_mode_args(&self) -> Vec { + match self.jsx.to_owned() { + Some(jsx) => match jsx.mode { + Some(JsxMode::Classic) => { + vec!["-bs-jsx-mode".to_string(), "classic".to_string()] + } + Some(JsxMode::Automatic) => { + vec!["-bs-jsx-mode".to_string(), "automatic".to_string()] + } + + None => vec![], + }, + _ => vec![], + } + } + + pub fn get_jsx_module_args(&self) -> Vec { + match self.jsx.to_owned() { + Some(jsx) => match jsx.module { + Some(JsxModule::React) => { + vec!["-bs-jsx-module".to_string(), "react".to_string()] + } + None => vec![], + }, + _ => vec![], + } + } + + pub fn get_uncurried_args(&self, version: &str) -> Vec { + if check_if_rescript11_or_higher(version) { + match self.uncurried.to_owned() { + // v11 is always uncurried except iff explicitly set to false in the root rescript.json + Some(false) => vec![], + _ => vec!["-uncurried".to_string()], + } + } else { + vec![] + } + } + + pub fn get_module(&self) -> String { + match &self.package_specs { + Some(OneOrMore::Single(PackageSpec { module, .. })) => module.to_string(), + Some(OneOrMore::Multiple(vec)) => match vec.first() { + Some(PackageSpec { module, .. }) => module.to_string(), + None => "commonjs".to_string(), + }, + _ => "commonjs".to_string(), + } + } + + pub fn get_suffix(&self) -> String { + match &self.package_specs { + Some(OneOrMore::Single(PackageSpec { suffix, .. })) => suffix.to_owned(), + Some(OneOrMore::Multiple(vec)) => match vec.first() { + Some(PackageSpec { suffix, .. }) => suffix.to_owned(), + None => None, + }, + + _ => None, + } + .or(self.suffix.to_owned()) + .unwrap_or(".js".to_string()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_getters() { + let json = r#" + { + "name": "my-monorepo", + "sources": [ { "dir": "src/", "subdirs": true } ], + "package-specs": [ { "module": "es6", "in-source": true } ], + "suffix": ".mjs", + "pinned-dependencies": [ "@teamwalnut/app" ], + "bs-dependencies": [ "@teamwalnut/app" ] + } + "#; + + let config = serde_json::from_str::(json).unwrap(); + assert_eq!(config.get_suffix(), ".mjs"); + assert_eq!(config.get_module(), "es6"); + } +} diff --git a/rewatch/src/build.rs b/rewatch/src/build.rs new file mode 100644 index 0000000000..082bc6cf91 --- /dev/null +++ b/rewatch/src/build.rs @@ -0,0 +1,442 @@ +pub mod build_types; +pub mod clean; +pub mod compile; +pub mod deps; +pub mod logs; +pub mod namespaces; +pub mod packages; +pub mod parse; +pub mod read_compile_state; + +use crate::build::compile::{mark_modules_with_deleted_deps_dirty, mark_modules_with_expired_deps_dirty}; +use crate::helpers::emojis::*; +use crate::helpers::{self, get_workspace_root}; +use crate::sourcedirs; +use ahash::AHashSet; +use build_types::*; +use console::style; +use indicatif::{ProgressBar, ProgressStyle}; +use serde::Serialize; +use std::fmt; +use std::io::{stdout, Write}; +use std::path::PathBuf; +use std::time::{Duration, Instant}; + +use self::compile::compiler_args; +use self::parse::parser_args; + +fn is_dirty(module: &Module) -> bool { + match module.source_type { + SourceType::SourceFile(SourceFile { + implementation: Implementation { + parse_dirty: true, .. + }, + .. + }) => true, + SourceType::SourceFile(SourceFile { + interface: Some(Interface { + parse_dirty: true, .. + }), + .. + }) => true, + SourceType::SourceFile(_) => false, + SourceType::MlMap(MlMap { + parse_dirty: dirty, .. + }) => dirty, + } +} + +#[derive(Serialize, Debug, Clone)] +pub struct CompilerArgs { + pub compiler_args: Vec, + pub parser_args: Vec, +} + +pub fn get_compiler_args(path: &str, rescript_version: Option) -> String { + let filename = &helpers::get_abs_path(path); + let package_root = helpers::get_abs_path( + &helpers::get_nearest_bsconfig(&std::path::PathBuf::from(path)).expect("Couldn't find package root"), + ); + let workspace_root = get_workspace_root(&package_root).map(|p| helpers::get_abs_path(&p)); + let root_rescript_config = + packages::read_bsconfig(&workspace_root.to_owned().unwrap_or(package_root.to_owned())); + let rescript_config = packages::read_bsconfig(&package_root); + let rescript_version = if let Some(rescript_version) = rescript_version { + rescript_version + } else { + let bsc_path = helpers::get_bsc(&package_root, workspace_root.to_owned()); + helpers::get_rescript_version(&bsc_path) + }; + // make PathBuf from package root and get the relative path for filename + let relative_filename = PathBuf::from(&filename) + .strip_prefix(PathBuf::from(&package_root).parent().unwrap()) + .unwrap() + .to_string_lossy() + .to_string(); + + let file_path = PathBuf::from(&package_root).join(filename); + let contents = helpers::read_file(&file_path).expect("Error reading file"); + + let (ast_path, parser_args) = parser_args( + &rescript_config, + &root_rescript_config, + &relative_filename, + &rescript_version, + &workspace_root, + workspace_root.as_ref().unwrap_or(&package_root), + &contents, + ); + let is_interface = filename.ends_with('i'); + let has_interface = if is_interface { + true + } else { + let mut interface_filename = filename.to_string(); + interface_filename.push('i'); + PathBuf::from(&interface_filename).exists() + }; + let compiler_args = compiler_args( + &rescript_config, + &root_rescript_config, + &ast_path, + &rescript_version, + &relative_filename, + is_interface, + has_interface, + &package_root, + &workspace_root, + &None, + ); + serde_json::to_string_pretty(&CompilerArgs { + compiler_args, + parser_args, + }) + .unwrap() +} + +#[derive(Debug, Clone)] +pub enum InitializeBuildError { + PackageDependencyValidation, +} + +impl fmt::Display for InitializeBuildError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::PackageDependencyValidation => write!( + f, + "{} {}Could not Validate Package Dependencies", + LINE_CLEAR, CROSS, + ), + } + } +} + +pub fn initialize_build( + default_timing: Option, + filter: &Option, + path: &str, +) -> Result { + let project_root = helpers::get_abs_path(path); + let workspace_root = helpers::get_workspace_root(&project_root); + let bsc_path = helpers::get_bsc(&project_root, workspace_root.to_owned()); + let root_config_name = packages::get_package_name(&project_root); + let rescript_version = helpers::get_rescript_version(&bsc_path); + + print!("{}{}Building package tree...", style("[1/7]").bold().dim(), TREE); + let _ = stdout().flush(); + let timing_package_tree = Instant::now(); + let packages = packages::make(filter, &project_root, &workspace_root); + let timing_package_tree_elapsed = timing_package_tree.elapsed(); + + println!( + "{}{} {}Built package tree in {:.2}s", + LINE_CLEAR, + style("[1/7]").bold().dim(), + TREE, + default_timing + .unwrap_or(timing_package_tree_elapsed) + .as_secs_f64() + ); + + if !packages::validate_packages_dependencies(&packages) { + return Err(InitializeBuildError::PackageDependencyValidation); + } + + let timing_source_files = Instant::now(); + + print!( + "{} {}Finding source files...", + style("[2/7]").bold().dim(), + LOOKING_GLASS + ); + let _ = stdout().flush(); + let mut build_state = BuildState::new( + project_root, + root_config_name, + packages, + workspace_root, + rescript_version, + bsc_path, + ); + packages::parse_packages(&mut build_state); + let timing_source_files_elapsed = timing_source_files.elapsed(); + println!( + "{}{} {}Found source files in {:.2}s", + LINE_CLEAR, + style("[2/7]").bold().dim(), + LOOKING_GLASS, + default_timing + .unwrap_or(timing_source_files_elapsed) + .as_secs_f64() + ); + + print!( + "{} {}Reading compile state...", + style("[3/7]").bold().dim(), + COMPILE_STATE + ); + let _ = stdout().flush(); + let timing_compile_state = Instant::now(); + let compile_assets_state = read_compile_state::read(&mut build_state); + let timing_compile_state_elapsed = timing_compile_state.elapsed(); + println!( + "{}{} {}Read compile state {:.2}s", + LINE_CLEAR, + style("[3/7]").bold().dim(), + COMPILE_STATE, + default_timing + .unwrap_or(timing_compile_state_elapsed) + .as_secs_f64() + ); + + print!( + "{} {}Cleaning up previous build...", + style("[4/7]").bold().dim(), + SWEEP + ); + let timing_cleanup = Instant::now(); + let (diff_cleanup, total_cleanup) = clean::cleanup_previous_build(&mut build_state, compile_assets_state); + let timing_cleanup_elapsed = timing_cleanup.elapsed(); + println!( + "{}{} {}Cleaned {}/{} {:.2}s", + LINE_CLEAR, + style("[4/7]").bold().dim(), + SWEEP, + diff_cleanup, + total_cleanup, + default_timing.unwrap_or(timing_cleanup_elapsed).as_secs_f64() + ); + Ok(build_state) +} + +fn format_step(current: usize, total: usize) -> console::StyledObject { + style(format!("[{}/{}]", current, total)).bold().dim() +} + +#[derive(Debug, Clone)] +pub enum IncrementalBuildError { + SourceFileParseError, + CompileError, +} + +impl fmt::Display for IncrementalBuildError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::SourceFileParseError => write!(f, "{} {}Could not parse Source Files", LINE_CLEAR, CROSS,), + Self::CompileError => write!(f, "{} {}Failed to Compile. See Errors Above", LINE_CLEAR, CROSS,), + } + } +} + +pub fn incremental_build( + build_state: &mut BuildState, + default_timing: Option, + initial_build: bool, + only_incremental: bool, + create_sourcedirs: bool, +) -> Result<(), IncrementalBuildError> { + logs::initialize(&build_state.packages); + let num_dirty_modules = build_state.modules.values().filter(|m| is_dirty(m)).count() as u64; + let pb = ProgressBar::new(num_dirty_modules); + let mut current_step = if only_incremental { 1 } else { 5 }; + let total_steps = if only_incremental { 3 } else { 7 }; + pb.set_style( + ProgressStyle::with_template(&format!( + "{} {}Parsing... {{spinner}} {{pos}}/{{len}} {{msg}}", + format_step(current_step, total_steps), + CODE + )) + .unwrap(), + ); + + let timing_ast = Instant::now(); + let result_asts = parse::generate_asts(build_state, || pb.inc(1)); + let timing_ast_elapsed = timing_ast.elapsed(); + + match result_asts { + Ok(err) => { + println!( + "{}{} {}Parsed {} source files in {:.2}s", + LINE_CLEAR, + format_step(current_step, total_steps), + CODE, + num_dirty_modules, + default_timing.unwrap_or(timing_ast_elapsed).as_secs_f64() + ); + print!("{}", &err); + } + Err(err) => { + logs::finalize(&build_state.packages); + println!( + "{}{} {}Error parsing source files in {:.2}s", + LINE_CLEAR, + format_step(current_step, total_steps), + CROSS, + default_timing.unwrap_or(timing_ast_elapsed).as_secs_f64() + ); + print!("{}", &err); + return Err(IncrementalBuildError::SourceFileParseError); + } + } + let timing_deps = Instant::now(); + deps::get_deps(build_state, &build_state.deleted_modules.to_owned()); + let timing_deps_elapsed = timing_deps.elapsed(); + current_step += 1; + + println!( + "{}{} {}Collected deps in {:.2}s", + LINE_CLEAR, + format_step(current_step, total_steps), + DEPS, + default_timing.unwrap_or(timing_deps_elapsed).as_secs_f64() + ); + + // track the compile dirty state, we reset it when the compile fails + let mut tracked_dirty_modules = AHashSet::new(); + for (module_name, module) in build_state.modules.iter() { + if module.compile_dirty { + tracked_dirty_modules.insert(module_name.to_owned()); + } + } + if initial_build { + // repair broken state + mark_modules_with_expired_deps_dirty(build_state); + } + mark_modules_with_deleted_deps_dirty(build_state); + current_step += 1; + + // print all the compile_dirty modules + // for (module_name, module) in build_state.modules.iter() { + // if module.compile_dirty { + // println!("compile dirty: {}", module_name); + // } + // } + + let start_compiling = Instant::now(); + let pb = ProgressBar::new(build_state.modules.len().try_into().unwrap()); + pb.set_style( + ProgressStyle::with_template(&format!( + "{} {}Compiling... {{spinner}} {{pos}}/{{len}} {{msg}}", + format_step(current_step, total_steps), + SWORDS + )) + .unwrap(), + ); + let (compile_errors, compile_warnings, num_compiled_modules) = + compile::compile(build_state, || pb.inc(1), |size| pb.set_length(size)); + let compile_duration = start_compiling.elapsed(); + + logs::finalize(&build_state.packages); + if create_sourcedirs { + sourcedirs::print(&build_state); + } + pb.finish(); + if !compile_errors.is_empty() { + if helpers::contains_ascii_characters(&compile_warnings) { + println!("{}", &compile_warnings); + } + println!( + "{}{} {}Compiled {} modules in {:.2}s", + LINE_CLEAR, + format_step(current_step, total_steps), + CROSS, + num_compiled_modules, + default_timing.unwrap_or(compile_duration).as_secs_f64() + ); + print!("{}", &compile_errors); + // mark the original files as dirty again, because we didn't complete a full build + for (module_name, module) in build_state.modules.iter_mut() { + if tracked_dirty_modules.contains(module_name) { + module.compile_dirty = true; + } + } + Err(IncrementalBuildError::CompileError) + } else { + println!( + "{}{} {}Compiled {} modules in {:.2}s", + LINE_CLEAR, + format_step(current_step, total_steps), + SWORDS, + num_compiled_modules, + default_timing.unwrap_or(compile_duration).as_secs_f64() + ); + if helpers::contains_ascii_characters(&compile_warnings) { + print!("{}", &compile_warnings); + } + Ok(()) + } +} + +#[derive(Debug, Clone)] +pub enum BuildError { + InitializeBuild(InitializeBuildError), + IncrementalBuild(IncrementalBuildError), +} + +impl fmt::Display for BuildError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::InitializeBuild(e) => { + write!(f, "{} {}Error Initializing Build: {}", LINE_CLEAR, CROSS, e) + } + Self::IncrementalBuild(e) => write!( + f, + "{} {}Error Running Incremental Build: {}", + LINE_CLEAR, CROSS, e + ), + } + } +} + +pub fn build( + filter: &Option, + path: &str, + no_timing: bool, + create_sourcedirs: bool, +) -> Result { + let default_timing: Option = if no_timing { + Some(std::time::Duration::new(0.0 as u64, 0.0 as u32)) + } else { + None + }; + let timing_total = Instant::now(); + let mut build_state = + initialize_build(default_timing, filter, path).map_err(BuildError::InitializeBuild)?; + + match incremental_build(&mut build_state, default_timing, true, false, create_sourcedirs) { + Ok(_) => { + let timing_total_elapsed = timing_total.elapsed(); + println!( + "\n{}{}Finished Compilation in {:.2}s", + LINE_CLEAR, + SPARKLES, + default_timing.unwrap_or(timing_total_elapsed).as_secs_f64() + ); + clean::cleanup_after_build(&build_state); + Ok(build_state) + } + Err(e) => { + clean::cleanup_after_build(&build_state); + Err(BuildError::IncrementalBuild(e)) + } + } +} diff --git a/rewatch/src/build/build_types.rs b/rewatch/src/build/build_types.rs new file mode 100644 index 0000000000..4dc6cfad80 --- /dev/null +++ b/rewatch/src/build/build_types.rs @@ -0,0 +1,144 @@ +use crate::build::packages::{Namespace, Package}; +use ahash::{AHashMap, AHashSet}; +use std::time::SystemTime; + +#[derive(Debug, Clone, PartialEq)] +pub enum ParseState { + Pending, + ParseError, + Warning, + Success, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum CompileState { + Pending, + Error, + Warning, + Success, +} +#[derive(Debug, Clone, PartialEq)] +pub struct Interface { + pub path: String, + pub parse_state: ParseState, + pub compile_state: CompileState, + pub last_modified: SystemTime, + pub parse_dirty: bool, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct Implementation { + pub path: String, + pub parse_state: ParseState, + pub compile_state: CompileState, + pub last_modified: SystemTime, + pub parse_dirty: bool, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct SourceFile { + pub implementation: Implementation, + pub interface: Option, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct MlMap { + pub parse_dirty: bool, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum SourceType { + SourceFile(SourceFile), + MlMap(MlMap), +} + +#[derive(Debug, Clone)] +pub struct Module { + pub source_type: SourceType, + pub deps: AHashSet, + pub dependents: AHashSet, + pub package_name: String, + pub compile_dirty: bool, + pub last_compiled_cmi: Option, + pub last_compiled_cmt: Option, +} + +impl Module { + pub fn is_mlmap(&self) -> bool { + matches!(self.source_type, SourceType::MlMap(_)) + } + + pub fn get_interface(&self) -> &Option { + match &self.source_type { + SourceType::SourceFile(source_file) => &source_file.interface, + _ => &None, + } + } +} + +#[derive(Debug)] +pub struct BuildState { + pub modules: AHashMap, + pub packages: AHashMap, + pub module_names: AHashSet, + pub project_root: String, + pub root_config_name: String, + pub deleted_modules: AHashSet, + pub rescript_version: String, + pub bsc_path: String, + pub workspace_root: Option, + pub deps_initialized: bool, +} + +impl BuildState { + pub fn get_package(&self, package_name: &str) -> Option<&Package> { + self.packages.get(package_name) + } + + pub fn get_module(&self, module_name: &str) -> Option<&Module> { + self.modules.get(module_name) + } + pub fn new( + project_root: String, + root_config_name: String, + packages: AHashMap, + workspace_root: Option, + rescript_version: String, + bsc_path: String, + ) -> Self { + Self { + module_names: AHashSet::new(), + modules: AHashMap::new(), + packages, + project_root, + root_config_name, + deleted_modules: AHashSet::new(), + workspace_root, + rescript_version, + bsc_path, + deps_initialized: false, + } + } + pub fn insert_module(&mut self, module_name: &str, module: Module) { + self.modules.insert(module_name.to_owned(), module); + self.module_names.insert(module_name.to_owned()); + } +} + +pub struct AstModule { + pub module_name: String, + pub package_name: String, + pub namespace: Namespace, + pub last_modified: SystemTime, + pub ast_file_path: String, + pub is_root: bool, + pub suffix: String, +} + +pub struct CompileAssetsState { + pub ast_modules: AHashMap, + pub cmi_modules: AHashMap, + pub cmt_modules: AHashMap, + pub ast_rescript_file_locations: AHashSet, + pub rescript_file_locations: AHashSet, +} diff --git a/rewatch/src/build/clean.rs b/rewatch/src/build/clean.rs new file mode 100644 index 0000000000..e4bde12a00 --- /dev/null +++ b/rewatch/src/build/clean.rs @@ -0,0 +1,387 @@ +use super::build_types::*; +use super::packages; +use crate::helpers; +use crate::helpers::emojis::*; +use ahash::AHashSet; +use console::style; +use rayon::prelude::*; +use std::io::Write; +use std::time::Instant; + +fn remove_ast(package: &packages::Package, source_file: &str) { + let _ = std::fs::remove_file(helpers::get_compiler_asset( + package, + &packages::Namespace::NoNamespace, + source_file, + "ast", + )); +} + +fn remove_iast(package: &packages::Package, source_file: &str) { + let _ = std::fs::remove_file(helpers::get_compiler_asset( + package, + &packages::Namespace::NoNamespace, + source_file, + "iast", + )); +} + +fn remove_mjs_file(source_file: &str, suffix: &String) { + let _ = std::fs::remove_file(helpers::change_extension( + source_file, + // suffix.to_string includes the ., so we need to remove it + &suffix.to_string()[1..], + )); +} + +fn remove_compile_asset(package: &packages::Package, source_file: &str, extension: &str) { + let _ = std::fs::remove_file(helpers::get_compiler_asset( + package, + &package.namespace, + source_file, + extension, + )); + let _ = std::fs::remove_file(helpers::get_bs_compiler_asset( + package, + &package.namespace, + source_file, + extension, + )); +} + +pub fn remove_compile_assets(package: &packages::Package, source_file: &str) { + // optimization + // only issue cmti if htere is an interfacce file + for extension in &["cmj", "cmi", "cmt", "cmti"] { + remove_compile_asset(package, source_file, extension); + } +} + +pub fn clean_mjs_files(build_state: &BuildState) { + // get all rescript file locations + let rescript_file_locations = build_state + .modules + .values() + .filter_map(|module| match &module.source_type { + SourceType::SourceFile(source_file) => { + let package = build_state.packages.get(&module.package_name).unwrap(); + let root_package = build_state + .packages + .get(&build_state.root_config_name) + .expect("Could not find root package"); + Some(( + std::path::PathBuf::from(package.path.to_string()) + .join(&source_file.implementation.path) + .to_string_lossy() + .to_string(), + root_package.bsconfig.get_suffix(), + )) + } + _ => None, + }) + .collect::>(); + + rescript_file_locations + .par_iter() + .for_each(|(rescript_file_location, suffix)| remove_mjs_file(rescript_file_location, suffix)); +} + +// TODO: change to scan_previous_build => CompileAssetsState +// and then do cleanup on that state (for instance remove all .mjs files that are not in the state) + +pub fn cleanup_previous_build( + build_state: &mut BuildState, + compile_assets_state: CompileAssetsState, +) -> (usize, usize) { + // delete the .mjs file which appear in our previous compile assets + // but does not exists anymore + // delete the compiler assets for which modules we can't find a rescript file + // location of rescript file is in the AST + // delete the .mjs file for which we DO have a compiler asset, but don't have a + // rescript file anymore (path is found in the .ast file) + let diff = compile_assets_state + .ast_rescript_file_locations + .difference(&compile_assets_state.rescript_file_locations) + .collect::>(); + + let diff_len = diff.len(); + + let deleted_interfaces = diff + .par_iter() + .map(|res_file_location| { + let AstModule { + module_name, + package_name, + ast_file_path, + suffix, + .. + } = compile_assets_state + .ast_modules + .get(&res_file_location.to_string()) + .expect("Could not find module name for ast file"); + + let package = build_state + .packages + .get(package_name) + .expect("Could not find package"); + remove_compile_assets(package, res_file_location); + remove_mjs_file(res_file_location, &suffix); + remove_iast(package, res_file_location); + remove_ast(package, res_file_location); + match helpers::get_extension(ast_file_path).as_str() { + "iast" => Some(module_name.to_owned()), + "ast" => None, + _ => None, + } + }) + .collect::>>() + .iter() + .filter_map(|module_name| module_name.to_owned()) + .collect::>(); + + compile_assets_state + .ast_rescript_file_locations + .intersection(&compile_assets_state.rescript_file_locations) + .for_each(|res_file_location| { + let AstModule { + module_name, + last_modified: ast_last_modified, + ast_file_path, + .. + } = compile_assets_state + .ast_modules + .get(res_file_location) + .expect("Could not find module name for ast file"); + let module = build_state + .modules + .get_mut(module_name) + .expect("Could not find module for ast file"); + + let compile_dirty = compile_assets_state.cmi_modules.get(module_name); + if let Some(compile_dirty) = compile_dirty { + let last_modified = Some(ast_last_modified); + + if let Some(last_modified) = last_modified { + if compile_dirty > last_modified && !deleted_interfaces.contains(module_name) { + module.compile_dirty = false; + } + } + } + + match &mut module.source_type { + SourceType::MlMap(_) => unreachable!("MlMap is not matched with a ReScript file"), + SourceType::SourceFile(source_file) => { + if helpers::is_interface_ast_file(ast_file_path) { + let interface = source_file + .interface + .as_mut() + .expect("Could not find interface for module"); + + let source_last_modified = interface.last_modified; + if ast_last_modified > &source_last_modified { + interface.parse_dirty = false; + } + } else { + let implementation = &mut source_file.implementation; + let source_last_modified = implementation.last_modified; + if ast_last_modified > &source_last_modified + && !deleted_interfaces.contains(module_name) + { + implementation.parse_dirty = false; + } + } + } + } + }); + + compile_assets_state + .cmi_modules + .iter() + .for_each(|(module_name, last_modified)| { + if let Some(module) = build_state.modules.get_mut(module_name) { + module.last_compiled_cmi = Some(*last_modified); + } + }); + + compile_assets_state + .cmt_modules + .iter() + .for_each(|(module_name, last_modified)| { + if let Some(module) = build_state.modules.get_mut(module_name) { + module.last_compiled_cmt = Some(*last_modified); + } + }); + + let ast_module_names = compile_assets_state + .ast_modules + .values() + .filter_map( + |AstModule { + module_name, + ast_file_path, + .. + }| { + match helpers::get_extension(ast_file_path).as_str() { + "iast" => None, + "ast" => Some(module_name), + _ => None, + } + }, + ) + .collect::>(); + + let all_module_names = build_state.modules.keys().collect::>(); + + let deleted_module_names = ast_module_names + .difference(&all_module_names) + .map(|module_name| { + // if the module is a namespace, we need to mark the whole namespace as dirty when a module has been deleted + if let Some(namespace) = helpers::get_namespace_from_module_name(module_name) { + return namespace; + } + module_name.to_string() + }) + .collect::>(); + + build_state.deleted_modules = deleted_module_names; + + (diff_len, compile_assets_state.ast_rescript_file_locations.len()) +} + +fn has_parse_warnings(module: &Module) -> bool { + matches!( + &module.source_type, + SourceType::SourceFile(SourceFile { + implementation: Implementation { + parse_state: ParseState::Warning, + .. + }, + .. + }) | SourceType::SourceFile(SourceFile { + interface: Some(Interface { + parse_state: ParseState::Warning, + .. + }), + .. + }) + ) +} + +fn has_compile_warnings(module: &Module) -> bool { + matches!( + &module.source_type, + SourceType::SourceFile(SourceFile { + implementation: Implementation { + compile_state: CompileState::Warning, + .. + }, + .. + }) | SourceType::SourceFile(SourceFile { + interface: Some(Interface { + compile_state: CompileState::Warning, + .. + }), + .. + }) + ) +} + +pub fn cleanup_after_build(build_state: &BuildState) { + build_state.modules.par_iter().for_each(|(_module_name, module)| { + let package = build_state.get_package(&module.package_name).unwrap(); + if has_parse_warnings(module) { + if let SourceType::SourceFile(source_file) = &module.source_type { + remove_iast(package, &source_file.implementation.path); + remove_ast(package, &source_file.implementation.path); + } + } + if has_compile_warnings(module) { + // only retain AST file if the compilation doesn't have warnings, we remove the AST in favor + // of the CMI/CMT/CMJ files because if we delete these, the editor tooling doesn't + // work anymore. If we remove the intermediate AST file, the editor tooling will + // work, and we have an indication that we need to recompile the file. + // + // Recompiling this takes a bit more time, because we have to parse again, but + // if we have warnings it's usually not a lot of files so the additional + // latency shouldn't be too bad + match &module.source_type { + SourceType::SourceFile(source_file) => { + // we only clean the ast here, this will cause the file to be recompiled + // (and thus keep showing the warning), but it will keep the cmi file, so that we don't + // unecessary mark all the dependents as dirty, when there is no change in the interface + remove_ast(package, &source_file.implementation.path); + remove_iast(package, &source_file.implementation.path); + } + SourceType::MlMap(_) => (), + } + } + }); +} + +pub fn clean(path: &str) { + let project_root = helpers::get_abs_path(path); + let workspace_root = helpers::get_workspace_root(&project_root); + let packages = packages::make(&None, &project_root, &workspace_root); + let root_config_name = packages::get_package_name(&project_root); + let bsc_path = helpers::get_bsc(&project_root, workspace_root.to_owned()); + let rescript_version = helpers::get_rescript_version(&bsc_path); + + let timing_clean_compiler_assets = Instant::now(); + print!( + "{} {} Cleaning compiler assets...", + style("[1/2]").bold().dim(), + SWEEP + ); + std::io::stdout().flush().unwrap(); + packages.iter().for_each(|(_, package)| { + print!( + "{}{} {} Cleaning {}...", + LINE_CLEAR, + style("[1/2]").bold().dim(), + SWEEP, + package.name + ); + std::io::stdout().flush().unwrap(); + + let path_str = package.get_build_path(); + let path = std::path::Path::new(&path_str); + let _ = std::fs::remove_dir_all(path); + + let path_str = package.get_bs_build_path(); + let path = std::path::Path::new(&path_str); + let _ = std::fs::remove_dir_all(path); + }); + let timing_clean_compiler_assets_elapsed = timing_clean_compiler_assets.elapsed(); + + println!( + "{}{} {}Cleaned compiler assets in {:.2}s", + LINE_CLEAR, + style("[1/2]").bold().dim(), + SWEEP, + timing_clean_compiler_assets_elapsed.as_secs_f64() + ); + std::io::stdout().flush().unwrap(); + + let timing_clean_mjs = Instant::now(); + print!("{} {} Cleaning mjs files...", style("[2/2]").bold().dim(), SWEEP); + std::io::stdout().flush().unwrap(); + let mut build_state = BuildState::new( + project_root.to_owned(), + root_config_name, + packages, + workspace_root, + rescript_version, + bsc_path, + ); + packages::parse_packages(&mut build_state); + clean_mjs_files(&build_state); + let timing_clean_mjs_elapsed = timing_clean_mjs.elapsed(); + println!( + "{}{} {}Cleaned mjs files in {:.2}s", + LINE_CLEAR, + style("[2/2]").bold().dim(), + SWEEP, + timing_clean_mjs_elapsed.as_secs_f64() + ); + std::io::stdout().flush().unwrap(); +} diff --git a/rewatch/src/build/compile.rs b/rewatch/src/build/compile.rs new file mode 100644 index 0000000000..cdf16953e5 --- /dev/null +++ b/rewatch/src/build/compile.rs @@ -0,0 +1,741 @@ +#![allow(clippy::too_many_arguments)] + +mod dependency_cycle; + +use super::build_types::*; +use super::logs; +use super::packages; +use crate::bsconfig; +use crate::helpers; +use ahash::{AHashMap, AHashSet}; +use console::style; +use log::debug; +use log::{info, log_enabled, Level::Info}; +use rayon::prelude::*; +use std::path::Path; +use std::process::Command; +use std::time::SystemTime; + +pub fn compile( + build_state: &mut BuildState, + inc: impl Fn() + std::marker::Sync, + set_length: impl Fn(u64), +) -> (String, String, usize) { + let mut compiled_modules = AHashSet::::new(); + let dirty_modules = build_state + .modules + .iter() + .filter_map(|(module_name, module)| { + if module.compile_dirty { + Some(module_name.to_owned()) + } else { + None + } + }) + .collect::>(); + + // dirty_modules.iter().for_each(|m| println!("dirty module: {}", m)); + // println!("{} dirty modules", dirty_modules.len()); + let mut sorted_dirty_modules = dirty_modules.iter().collect::>(); + sorted_dirty_modules.sort(); + // dirty_modules.iter().for_each(|m| println!("dirty module: {}", m)); + // sorted_dirty_modules + // .iter() + // .for_each(|m| println!("dirty module: {}", m)); + + // for sure clean modules -- after checking the hash of the cmi + let mut clean_modules = AHashSet::::new(); + + // TODO: calculate the real dirty modules from the orginal dirty modules in each iteration + // taken into account the modules that we know are clean, so they don't propagate through the + // deps graph + // create a hashset of all clean modules form the file-hashes + let mut loop_count = 0; + let mut files_total_count = compiled_modules.len(); + let mut files_current_loop_count; + let mut compile_errors = "".to_string(); + let mut compile_warnings = "".to_string(); + let mut num_compiled_modules = 0; + let mut sorted_modules = build_state.module_names.iter().collect::>(); + sorted_modules.sort(); + + // this is the whole "compile universe" all modules that might be dirty + // we get this by expanding the dependents from the dirty modules + + let mut compile_universe = dirty_modules.clone(); + let mut current_step_modules = compile_universe.clone(); + loop { + let mut dependents: AHashSet = AHashSet::new(); + for dirty_module in current_step_modules.iter() { + dependents.extend(build_state.get_module(dirty_module).unwrap().dependents.clone()); + } + + current_step_modules = dependents + .difference(&compile_universe) + .map(|s| s.to_string()) + .collect::>(); + + compile_universe.extend(current_step_modules.to_owned()); + if current_step_modules.is_empty() { + break; + } + } + + let compile_universe_count = compile_universe.len(); + set_length(compile_universe_count as u64); + + // start off with all modules that have no deps in this compile universe + let mut in_progress_modules = compile_universe + .iter() + .filter(|module_name| { + let module = build_state.get_module(module_name).unwrap(); + module.deps.intersection(&compile_universe).count() == 0 + }) + .map(|module_name| module_name.to_string()) + .collect::>(); + + loop { + files_current_loop_count = 0; + loop_count += 1; + + info!( + "Compiled: {} out of {}. Compile loop: {}", + files_total_count, + compile_universe.len(), + loop_count, + ); + + let current_in_progres_modules = in_progress_modules.clone(); + + current_in_progres_modules + .par_iter() + .map(|module_name| { + let module = build_state.get_module(module_name).unwrap(); + let package = build_state + .get_package(&module.package_name) + .expect("Package not found"); + // all dependencies that we care about are compiled + if module + .deps + .intersection(&compile_universe) + .all(|dep| compiled_modules.contains(dep)) + { + if !module.compile_dirty { + // we are sure we don't have to compile this, so we can mark it as compiled and clean + return Some((module_name.to_string(), Ok(None), Some(Ok(None)), true, false)); + } + match module.source_type.to_owned() { + SourceType::MlMap(_) => { + // the mlmap needs to be compiled before the files are compiled + // in the same namespace, otherwise we get a compile error + // this is why mlmap is compiled in the AST generation stage + // compile_mlmap(&module.package, module_name, &project_root); + Some(( + package.namespace.to_suffix().unwrap(), + Ok(None), + Some(Ok(None)), + false, + false, + )) + } + SourceType::SourceFile(source_file) => { + let cmi_path = helpers::get_compiler_asset( + package, + &package.namespace, + &source_file.implementation.path, + "cmi", + ); + + let cmi_digest = helpers::compute_file_hash(&cmi_path); + + let package = build_state + .get_package(&module.package_name) + .expect("Package not found"); + + let root_package = + build_state.get_package(&build_state.root_config_name).unwrap(); + + let interface_result = match source_file.interface.to_owned() { + Some(Interface { path, .. }) => { + let result = compile_file( + package, + root_package, + &package.get_iast_path(&path), + module, + &build_state.rescript_version, + true, + &build_state.bsc_path, + &build_state.packages, + &build_state.project_root, + &build_state.workspace_root, + ); + Some(result) + } + _ => None, + }; + let result = compile_file( + package, + root_package, + &package.get_ast_path(&source_file.implementation.path), + module, + &build_state.rescript_version, + false, + &build_state.bsc_path, + &build_state.packages, + &build_state.project_root, + &build_state.workspace_root, + ); + // if let Err(error) = result.to_owned() { + // println!("{}", error); + // panic!("Implementation compilation error!"); + // } + let cmi_digest_after = helpers::compute_file_hash(&cmi_path); + + // println!( + // "cmi path {}, digest: {:?} / {:?}", + // cmi_path, cmi_digest, cmi_digest_after + // ); + + // we want to compare both the hash of interface and the implementation + // compile assets to verify that nothing changed. We also need to checke the interface + // because we can include MyModule, so the modules that depend on this module might + // change when this modules interface does not change, but the implementation does + let is_clean_cmi = match (cmi_digest, cmi_digest_after) { + (Some(cmi_digest), Some(cmi_digest_after)) => { + cmi_digest.eq(&cmi_digest_after) + } + + _ => false, + }; + + Some(( + module_name.to_string(), + result, + interface_result, + is_clean_cmi, + true, + )) + } + } + } else { + None + } + .map(|res| { + if !(log_enabled!(Info)) { + inc(); + } + res + }) + }) + .collect::, String>, + Option, String>>, + bool, + bool, + )>, + >>() + .iter() + .for_each(|result| match result { + Some((module_name, result, interface_result, is_clean, is_compiled)) => { + in_progress_modules.remove(module_name); + + if *is_compiled { + num_compiled_modules += 1; + } + + files_current_loop_count += 1; + compiled_modules.insert(module_name.to_string()); + + if *is_clean { + // actually add it to a list of clean modules + clean_modules.insert(module_name.to_string()); + } + + let module_dependents = build_state.get_module(module_name).unwrap().dependents.clone(); + + // if not clean -- compile modules that depend on this module + for dep in module_dependents.iter() { + // mark the reverse dep as dirty when the source is not clean + if !*is_clean { + let dep_module = build_state.modules.get_mut(dep).unwrap(); + // mark the reverse dep as dirty when the source is not clean + dep_module.compile_dirty = true; + } + if !compiled_modules.contains(dep) { + in_progress_modules.insert(dep.to_string()); + } + } + + let module = build_state.modules.get_mut(module_name).unwrap(); + let package = build_state + .packages + .get(&module.package_name) + .expect("Package not found"); + match module.source_type { + SourceType::MlMap(ref mut mlmap) => { + module.compile_dirty = false; + mlmap.parse_dirty = false; + } + SourceType::SourceFile(ref mut source_file) => { + match result { + Ok(Some(err)) => { + source_file.implementation.compile_state = CompileState::Warning; + logs::append(package, err); + compile_warnings.push_str(err); + } + Ok(None) => { + source_file.implementation.compile_state = CompileState::Success; + } + Err(err) => { + source_file.implementation.compile_state = CompileState::Error; + logs::append(package, err); + compile_errors.push_str(err); + } + }; + match interface_result { + Some(Ok(Some(err))) => { + source_file.interface.as_mut().unwrap().compile_state = + CompileState::Warning; + logs::append(package, err); + compile_warnings.push_str(err); + } + Some(Ok(None)) => { + if let Some(interface) = source_file.interface.as_mut() { + interface.compile_state = CompileState::Success; + } + } + + Some(Err(err)) => { + source_file.interface.as_mut().unwrap().compile_state = + CompileState::Error; + logs::append(package, err); + compile_errors.push_str(err); + } + _ => (), + }; + match (result, interface_result) { + // successfull compilation + (Ok(None), Some(Ok(None))) | (Ok(None), None) => { + module.compile_dirty = false; + module.last_compiled_cmi = Some(SystemTime::now()); + module.last_compiled_cmt = Some(SystemTime::now()); + } + // some error or warning + (Err(_), _) + | (_, Some(Err(_))) + | (Ok(Some(_)), _) + | (_, Some(Ok(Some(_)))) => { + module.compile_dirty = true; + } + } + } + } + } + None => (), + }); + + files_total_count += files_current_loop_count; + + if files_total_count == compile_universe_count { + break; + } + if in_progress_modules.len() == 0 || in_progress_modules.eq(¤t_in_progres_modules) { + // find the dependency cycle + let cycle = dependency_cycle::find( + &compile_universe + .iter() + .map(|s| (s, build_state.get_module(s).unwrap())) + .collect::>(), + ); + compile_errors.push_str(&format!( + "\n{}\n{}\n", + style("Can't continue... Found a circular dependency in your code:").red(), + dependency_cycle::format(&cycle) + )) + } + if !compile_errors.is_empty() { + break; + }; + } + + (compile_errors, compile_warnings, num_compiled_modules) +} + +pub fn compiler_args( + config: &bsconfig::Config, + root_config: &bsconfig::Config, + ast_path: &str, + version: &str, + file_path: &str, + is_interface: bool, + has_interface: bool, + project_root: &str, + workspace_root: &Option, + // if packages are known, we pass a reference here + // this saves us a scan to find their paths + packages: &Option<&AHashMap>, +) -> Vec { + let normal_deps = config.bs_dependencies.as_ref().unwrap_or(&vec![]).to_owned(); + + let bsc_flags = bsconfig::flatten_flags(&config.bsc_flags); + // don't compile dev-deps yet + // let dev_deps = source + // .package + // .bsconfig + // .bs_dev_dependencies + // .as_ref() + // .unwrap_or(&vec![]) + // .to_owned(); + + let deps = vec![normal_deps] + .concat() + .par_iter() + .map(|package_name| { + let canonicalized_path = if let Some(packages) = packages { + packages + .get(package_name) + .expect("expect package") + .path + .to_string() + } else { + packages::read_dependency(package_name, project_root, project_root, workspace_root) + .expect("cannot find dep") + }; + vec!["-I".to_string(), packages::get_build_path(&canonicalized_path)] + }) + .collect::>>(); + + let module_name = helpers::file_path_to_module_name(file_path, &config.get_namespace()); + + let namespace_args = match &config.get_namespace() { + packages::Namespace::NamespaceWithEntry { namespace: _, entry } if &module_name == entry => { + // if the module is the entry we just want to open the namespace + vec![ + "-open".to_string(), + config.get_namespace().to_suffix().unwrap().to_string(), + ] + } + packages::Namespace::Namespace(_) + | packages::Namespace::NamespaceWithEntry { + namespace: _, + entry: _, + } => { + vec![ + "-bs-ns".to_string(), + config.get_namespace().to_suffix().unwrap().to_string(), + ] + } + packages::Namespace::NoNamespace => vec![], + }; + + let jsx_args = root_config.get_jsx_args(); + let jsx_module_args = root_config.get_jsx_module_args(); + let jsx_mode_args = root_config.get_jsx_mode_args(); + let uncurried_args = root_config.get_uncurried_args(version); + + let warning_args: Vec = match config.warnings.to_owned() { + None => vec![], + Some(warnings) => { + let warn_number = match warnings.number { + None => vec![], + Some(warnings) => { + vec!["-w".to_string(), warnings.to_string()] + } + }; + + let warn_error = match warnings.error { + Some(bsconfig::Error::Catchall(true)) => { + vec!["-warn-error".to_string(), "A".to_string()] + } + Some(bsconfig::Error::Qualified(errors)) => { + vec!["-warn-error".to_string(), errors.to_string()] + } + _ => vec![], + }; + + vec![warn_number, warn_error].concat() + } + }; + + let read_cmi_args = match has_interface { + true => { + if is_interface { + vec![] + } else { + vec!["-bs-read-cmi".to_string()] + } + } + false => vec![], + }; + + let implementation_args = if is_interface { + debug!("Compiling interface file: {}", &module_name); + vec![] + } else { + debug!("Compiling file: {}", &module_name); + + vec![ + "-bs-package-name".to_string(), + config.name.to_owned(), + "-bs-package-output".to_string(), + format!( + "{}:{}:{}", + root_config.get_module(), + Path::new(file_path).parent().unwrap().to_str().unwrap(), + root_config.get_suffix() + ), + ] + }; + + vec![ + namespace_args, + read_cmi_args, + vec!["-I".to_string(), ".".to_string()], + deps.concat(), + jsx_args, + jsx_module_args, + jsx_mode_args, + uncurried_args, + bsc_flags.to_owned(), + warning_args, + // vec!["-warn-error".to_string(), "A".to_string()], + // ^^ this one fails for bisect-ppx + // this is the default + // we should probably parse the right ones from the package config + // vec!["-w".to_string(), "a".to_string()], + implementation_args, + // vec![ + // "-I".to_string(), + // abs_node_modules_path.to_string() + "/rescript/ocaml", + // ], + vec![ast_path.to_string()], + ] + .concat() +} + +fn compile_file( + package: &packages::Package, + root_package: &packages::Package, + ast_path: &str, + module: &Module, + version: &str, + is_interface: bool, + bsc_path: &str, + packages: &AHashMap, + project_root: &str, + workspace_root: &Option, +) -> Result, String> { + let build_path_abs = package.get_build_path(); + let implementation_file_path = match module.source_type { + SourceType::SourceFile(ref source_file) => &source_file.implementation.path, + _ => panic!("Not a source file"), + }; + let module_name = helpers::file_path_to_module_name(implementation_file_path, &package.namespace); + let has_interface = module.get_interface().is_some(); + let to_mjs_args = compiler_args( + &package.bsconfig, + &root_package.bsconfig, + ast_path, + version, + implementation_file_path, + is_interface, + has_interface, + project_root, + workspace_root, + &Some(packages), + ); + + let to_mjs = Command::new(bsc_path) + .current_dir(helpers::canonicalize_string_path(&build_path_abs.to_owned()).unwrap()) + .args(to_mjs_args) + .output(); + + match to_mjs { + Ok(x) if !x.status.success() => { + let stderr = String::from_utf8_lossy(&x.stderr); + let stdout = String::from_utf8_lossy(&x.stdout); + Err(stderr.to_string() + &stdout) + } + Err(e) => Err(format!("ERROR, {}, {:?}", e, ast_path)), + Ok(x) => { + let err = std::str::from_utf8(&x.stderr) + .expect("stdout should be non-null") + .to_string(); + + let dir = std::path::Path::new(implementation_file_path).parent().unwrap(); + + // perhaps we can do this copying somewhere else + if !is_interface { + let _ = std::fs::copy( + build_path_abs.to_string() + "/" + &module_name + ".cmi", + std::path::Path::new(&package.get_bs_build_path()) + .join(dir) + // because editor tooling doesn't support namespace entries yet + // we just remove the @ for now. This makes sure the editor support + // doesn't break + .join(module_name.to_owned().replace('@', "") + ".cmi"), + ); + let _ = std::fs::copy( + build_path_abs.to_string() + "/" + &module_name + ".cmj", + std::path::Path::new(&package.get_bs_build_path()) + .join(dir) + .join(module_name.to_owned() + ".cmj"), + ); + let _ = std::fs::copy( + build_path_abs.to_string() + "/" + &module_name + ".cmt", + std::path::Path::new(&package.get_bs_build_path()) + .join(dir) + // because editor tooling doesn't support namespace entries yet + // we just remove the @ for now. This makes sure the editor support + // doesn't break + .join(module_name.to_owned().replace('@', "") + ".cmt"), + ); + } else { + let _ = std::fs::copy( + build_path_abs.to_string() + "/" + &module_name + ".cmti", + std::path::Path::new(&package.get_bs_build_path()) + .join(dir) + .join(module_name.to_owned() + ".cmti"), + ); + } + match &module.source_type { + SourceType::SourceFile(SourceFile { + interface: Some(Interface { path, .. }), + .. + }) + | SourceType::SourceFile(SourceFile { + implementation: Implementation { path, .. }, + .. + }) => { + // we need to copy the source file to the build directory. + // editor tools expects the source file in lib/bs for finding the current package + // and in lib/ocaml when referencing modules in other packages + let _ = std::fs::copy( + std::path::Path::new(&package.path).join(path), + std::path::Path::new(&package.get_bs_build_path()).join(path), + ) + .expect("copying source file failed"); + + let _ = std::fs::copy( + std::path::Path::new(&package.path).join(path), + std::path::Path::new(&package.get_build_path()) + .join(std::path::Path::new(path).file_name().unwrap()), + ) + .expect("copying source file failed"); + } + _ => (), + } + + if helpers::contains_ascii_characters(&err) { + if package.is_pinned_dep { + // supress warnings of external deps + Ok(Some(err)) + } else { + Ok(None) + } + } else { + Ok(None) + } + } + } +} + +pub fn mark_modules_with_deleted_deps_dirty(build_state: &mut BuildState) { + build_state.modules.iter_mut().for_each(|(_, module)| { + if !module.deps.is_disjoint(&build_state.deleted_modules) { + module.compile_dirty = true; + } + }); +} + +// this happens when a compile is not completed successfully in some way +// a dependent module could be compiled with a new interface, but the dependent +// modules have not finished compiling. This can cause a stale build. +// When the build is clean this doesn't happen. But when we interupt the build, +// such as force quitting the watcher, it can happen. +// +// If a build stops in the middle of errors, this will also happen, because +// basically we interrupt a build and we stop compiling somewhere in the middle. +// +// In watch mode, we retain the dirty state of the modules, so we don't need +// to do this, which will make it more efficient. +// +// We could clean up the build after errors. But I think we probably still need +// to do this, because people can also force quit the watcher of +pub fn mark_modules_with_expired_deps_dirty(build_state: &mut BuildState) { + let mut modules_with_expired_deps: AHashSet = AHashSet::new(); + build_state + .modules + .iter() + .filter(|m| !m.1.is_mlmap()) + .for_each(|(module_name, module)| { + for dependent in module.dependents.iter() { + let dependent_module = build_state.modules.get(dependent).unwrap(); + match dependent_module.source_type { + SourceType::SourceFile(_) => { + match (module.last_compiled_cmt, module.last_compiled_cmi) { + (None, None) | (Some(_), None) | (None, Some(_)) => { + // println!( + // "🛑 {} is a dependent of {} but has no cmt/cmi", + // module_name, dependent + // ); + modules_with_expired_deps.insert(module_name.to_string()); + } + (Some(_), Some(_)) => (), + } + + // we compare the last compiled time of the dependent module with the last + // compile of the interface of the module it depends on, if the interface + // didn't change it doesn't matter + match (dependent_module.last_compiled_cmt, module.last_compiled_cmi) { + (Some(last_compiled_dependent), Some(last_compiled)) => { + if last_compiled_dependent < last_compiled { + // println!( + // "✅ {} is a dependent of {} ({:?} / {:?})", + // module_name, dependent, last_compiled_dependent, last_compiled + // ); + + modules_with_expired_deps.insert(dependent.to_string()); + } else { + // println!( + // "🛑 {} is a dependent of {} ({:?} / {:?})", + // module_name, dependent, last_compiled_dependent, last_compiled + // ); + } + } + (None, _) => { + // println!( + // "🛑 {} is a dependent of {} (no last compiled time)", + // module_name, dependent + // ); + modules_with_expired_deps.insert(dependent.to_string()); + } + _ => (), + } + } + // a namespace is never a dependent of a module (it can be a dependency, but not the other + // way around) + SourceType::MlMap(_) => { + for dependent_of_namespace in dependent_module.dependents.iter() { + let dependent_module = build_state.modules.get(dependent_of_namespace).unwrap(); + + if let (Some(last_compiled_dependent), Some(last_compiled)) = + (dependent_module.last_compiled_cmt, module.last_compiled_cmi) + { + if last_compiled_dependent < last_compiled { + modules_with_expired_deps.insert(dependent.to_string()); + } + } + } + } + } + } + }); + build_state.modules.iter_mut().for_each(|(module_name, module)| { + if modules_with_expired_deps.contains(module_name) { + module.compile_dirty = true; + } + }); +} diff --git a/rewatch/src/build/compile/dependency_cycle.rs b/rewatch/src/build/compile/dependency_cycle.rs new file mode 100644 index 0000000000..072ca06743 --- /dev/null +++ b/rewatch/src/build/compile/dependency_cycle.rs @@ -0,0 +1,71 @@ +use super::super::build_types::*; +use crate::helpers; +use ahash::AHashSet; + +pub fn find(modules: &Vec<(&String, &Module)>) -> Vec { + let mut visited: AHashSet = AHashSet::new(); + let mut stack: Vec = vec![]; + + // we want to sort the module names so that we always return the same + // dependency cycle (there can be more than one) + let mut module_names = modules + .iter() + .map(|(name, _)| name.to_string()) + .collect::>(); + + module_names.sort(); + for module_name in module_names { + if find_dependency_cycle_helper(&module_name, modules, &mut visited, &mut stack) { + return stack; + } + visited.clear(); + stack.clear(); + } + stack +} + +fn find_dependency_cycle_helper( + module_name: &String, + modules: &Vec<(&String, &Module)>, + visited: &mut AHashSet, + stack: &mut Vec, +) -> bool { + if let Some(module) = modules + .iter() + .find(|(name, _)| *name == module_name) + .map(|(_, module)| module) + { + visited.insert(module_name.to_string()); + // if the module is a mlmap (namespace), we don't want to show this in the path + // because the namespace is not a module the user created, so only add source files + // to the stack + if let SourceType::SourceFile(_) = module.source_type { + stack.push(module_name.to_string()) + } + for dep in &module.deps { + if !visited.contains(dep) { + if find_dependency_cycle_helper(dep, modules, visited, stack) { + return true; + } + } else if stack.contains(dep) { + stack.push(dep.to_string()); + return true; + } + } + // because we only pushed source files to the stack, we also only need to + // pop these from the stack if we don't find a dependency cycle + if let SourceType::SourceFile(_) = module.source_type { + let _ = stack.pop(); + } + return false; + } + false +} + +pub fn format(cycle: &[String]) -> String { + cycle + .iter() + .map(|s| helpers::format_namespaced_module_name(s)) + .collect::>() + .join(" -> ") +} diff --git a/rewatch/src/build/deps.rs b/rewatch/src/build/deps.rs new file mode 100644 index 0000000000..dad31311b1 --- /dev/null +++ b/rewatch/src/build/deps.rs @@ -0,0 +1,127 @@ +use super::build_types::*; +use super::is_dirty; +use super::packages; +use crate::helpers; +use ahash::AHashSet; +use rayon::prelude::*; + +fn get_dep_modules( + ast_file: &str, + namespace: Option, + package_modules: &AHashSet, + valid_modules: &AHashSet, +) -> AHashSet { + let mut deps = AHashSet::new(); + if let Ok(lines) = helpers::read_lines(ast_file.to_string()) { + // we skip the first line with is some null characters + // the following lines in the AST are the dependency modules + // we stop when we hit a line that starts with a "/", this is the path of the file. + // this is the point where the dependencies end and the actual AST starts + for line in lines.skip(1).flatten() { + let line = line.trim().to_string(); + if line.starts_with('/') { + break; + } else if !line.is_empty() { + deps.insert(line); + } + } + } else { + panic!("Could not read file {}", ast_file); + } + + return deps + .iter() + .map(|dep| { + let dep_first = dep.split('.').next().unwrap(); + let dep_second = dep.split('.').nth(1); + match &namespace { + Some(namespace) => { + // if the module is in the own namespace, take the submodule -- so: + // if the module is TeamwalnutApp.MyModule inside of the namespace TeamwalnutApp + // we need the dependency to be MyModule in the same namespace + let dep = match dep_second { + Some(dep_second) if dep_first == namespace => dep_second, + _ => dep_first, + }; + let namespaced_name = dep.to_owned() + "-" + namespace; + if package_modules.contains(&namespaced_name) { + namespaced_name + } else { + dep.to_string() + } + } + None => dep_first.to_string(), + } + }) + .filter(|dep| { + valid_modules.contains(dep) + && match namespace.to_owned() { + Some(namespace) => !dep.eq(&namespace), + None => true, + } + }) + .collect::>(); +} + +pub fn get_deps(build_state: &mut BuildState, deleted_modules: &AHashSet) { + let all_mod = &build_state.module_names.union(deleted_modules).cloned().collect(); + build_state + .modules + .par_iter() + .map(|(module_name, module)| match &module.source_type { + SourceType::MlMap(_) => (module_name.to_string(), module.deps.to_owned()), + SourceType::SourceFile(source_file) => { + let package = build_state + .get_package(&module.package_name) + .expect("Package not found"); + let ast_path = package.get_ast_path(&source_file.implementation.path); + if is_dirty(module) || !build_state.deps_initialized { + let mut deps = get_dep_modules( + &ast_path, + package.namespace.to_suffix(), + package.modules.as_ref().unwrap(), + all_mod, + ); + + match &source_file.interface { + Some(interface) => { + let iast_path = package.get_iast_path(&interface.path); + + deps.extend(get_dep_modules( + &iast_path, + package.namespace.to_suffix(), + package.modules.as_ref().unwrap(), + all_mod, + )) + } + None => (), + } + match &package.namespace { + packages::Namespace::NamespaceWithEntry { namespace: _, entry } + if entry == module_name => + { + deps.insert(package.namespace.to_suffix().unwrap()); + } + _ => (), + } + deps.remove(module_name); + (module_name.to_string(), deps) + } else { + (module_name.to_string(), module.deps.to_owned()) + } + } + }) + .collect::)>>() + .into_iter() + .for_each(|(module_name, deps)| { + if let Some(module) = build_state.modules.get_mut(&module_name) { + module.deps = deps.clone(); + } + deps.iter().for_each(|dep_name| { + if let Some(module) = build_state.modules.get_mut(dep_name) { + module.dependents.insert(module_name.to_string()); + } + }); + }); + build_state.deps_initialized = true; +} diff --git a/rewatch/src/build/logs.rs b/rewatch/src/build/logs.rs new file mode 100644 index 0000000000..8f79afd802 --- /dev/null +++ b/rewatch/src/build/logs.rs @@ -0,0 +1,77 @@ +use crate::build::packages::Package; +use crate::helpers; +use ahash::AHashMap; +use log::error; +use rayon::prelude::*; +use regex::Regex; +use std::fs::File; +use std::io::prelude::*; + +use super::packages; + +enum Location { + Bs, + Ocaml, +} + +fn get_log_file_path(package: &packages::Package, subfolder: Location) -> String { + let build_folder = match subfolder { + Location::Bs => package.get_bs_build_path(), + Location::Ocaml => package.get_build_path(), + }; + + build_folder.to_owned() + "/.compiler.log" +} + +fn escape_colours(str: &str) -> String { + let re = Regex::new(r"[\u001b\u009b]\[[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]") + .expect("Could not create regex"); + re.replace_all(str, "").to_string() +} + +fn write_to_log_file(mut file: File, package_name: &str, content: &str) { + let res = file.write(escape_colours(content).as_bytes()).map_err(|e| { + error!( + "Could not create compiler log file. {}. \n{:?}", + &package_name, &e + ); + }); + + match res { + Ok(_) => {} + Err(e) => error!( + "Could not create compiler log file. {}. \n{:?}", + &package_name, &e + ), + } +} + +pub fn initialize(packages: &AHashMap) { + packages.par_iter().for_each(|(name, package)| { + File::create(get_log_file_path(package, Location::Bs)) + .map(|file| write_to_log_file(file, name, &format!("#Start({})\n", helpers::get_system_time()))) + .expect(&("Cannot create compiler log for package ".to_owned() + name)); + }) +} + +pub fn append(package: &packages::Package, str: &str) { + File::options() + .append(true) + .open(get_log_file_path(package, Location::Bs)) + .map(|file| write_to_log_file(file, &package.name, str)) + .expect(&("Cannot write compilerlog: ".to_owned() + &get_log_file_path(package, Location::Bs))); +} + +pub fn finalize(packages: &AHashMap) { + packages.par_iter().for_each(|(name, package)| { + let _ = File::options() + .append(true) + .open(get_log_file_path(package, Location::Bs)) + .map(|file| write_to_log_file(file, name, &format!("#Done({})\n", helpers::get_system_time()))); + + let _ = std::fs::copy( + get_log_file_path(package, Location::Bs), + get_log_file_path(package, Location::Ocaml), + ); + }) +} diff --git a/rewatch/src/build/namespaces.rs b/rewatch/src/build/namespaces.rs new file mode 100644 index 0000000000..394b582972 --- /dev/null +++ b/rewatch/src/build/namespaces.rs @@ -0,0 +1,62 @@ +use crate::build::packages; +use crate::helpers; +use ahash::AHashSet; +use std::fs::File; +use std::io::Write; +use std::process::Command; + +// Namespaces work like the following: The build system will generate a file +// called `MyModule.mlmap` which contains all modules that are in the namespace +// +// Not sure what the first line of this file is, but the next lines are names of +// the modules in the namespace you can call bsc with this file, and it will +// produce compiler assets for this file basically a module with all aliases. +// Given that this is just aliases, it doesn not need to create a mjs file. +// +// Internal modules are not accessible with the following trick, they are +// compiled to a module name such as `MyModule-MyNameSpace`. A dash in a module +// name is not possible to make in a source file, but it's possible when +// constructing the AST, so these modules are hidden from compilation. +// in the top namespace however, we alias with the proper names + +pub fn gen_mlmap( + package: &packages::Package, + namespace: &str, + depending_modules: &AHashSet, +) -> String { + let build_path_abs = package.get_build_path(); + // we don't really need to create a digest, because we track if we need to + // recompile in a different way but we need to put it in the file for it to + // be readable. + + let path = build_path_abs.to_string() + "/" + namespace + ".mlmap"; + let mut file = File::create(&path).expect("Unable to create mlmap"); + + file.write_all(b"randjbuildsystem\n" as &[u8]) + .expect("Unable to write mlmap"); + + let mut modules = Vec::from_iter(depending_modules.to_owned()); + modules.sort(); + for module in modules { + // check if the module names is referencible in code (no exotic module names) + // (only contains A-Z a-z 0-9 and _ and only starts with a capital letter) + // if not, it does not make sense to export as part of the name space + // this helps compile times of exotic modules such as MyModule.test + file.write_all(module.as_bytes()).unwrap(); + file.write_all(b"\n").unwrap(); + } + + path.to_string() +} + +pub fn compile_mlmap(package: &packages::Package, namespace: &str, bsc_path: &str) { + let build_path_abs = package.get_build_path(); + let mlmap_name = format!("{}.mlmap", namespace); + let args = vec!["-w", "-49", "-color", "always", "-no-alias-deps", &mlmap_name]; + + let _ = Command::new(bsc_path) + .current_dir(helpers::canonicalize_string_path(&build_path_abs).unwrap()) + .args(args) + .output() + .expect("err"); +} diff --git a/rewatch/src/build/packages.rs b/rewatch/src/build/packages.rs new file mode 100644 index 0000000000..171bc0cae9 --- /dev/null +++ b/rewatch/src/build/packages.rs @@ -0,0 +1,991 @@ +use super::build_types::*; +use super::namespaces; +use super::packages; +use crate::bsconfig; +use crate::helpers; +use crate::helpers::emojis::*; +use ahash::{AHashMap, AHashSet}; +use console::style; +use log::{debug, error}; +use rayon::prelude::*; +use std::error; +use std::fs::{self}; +use std::hash::{Hash, Hasher}; +use std::path::{Path, PathBuf}; +use std::time::SystemTime; + +#[derive(Debug, Clone)] +pub struct SourceFileMeta { + pub modified: SystemTime, +} + +#[derive(Debug, Clone)] +pub enum Namespace { + Namespace(String), + NamespaceWithEntry { namespace: String, entry: String }, + NoNamespace, +} + +impl Namespace { + pub fn to_suffix(&self) -> Option { + match self { + Namespace::Namespace(namespace) => Some(namespace.to_string()), + Namespace::NamespaceWithEntry { namespace, entry: _ } => Some("@".to_string() + namespace), + Namespace::NoNamespace => None, + } + } +} + +#[derive(Debug, Clone)] +struct Dependency { + name: String, + bsconfig: bsconfig::Config, + path: String, + is_pinned: bool, + dependencies: Vec, +} + +#[derive(Debug, Clone)] +pub struct Package { + pub name: String, + pub bsconfig: bsconfig::Config, + pub source_folders: AHashSet, + // these are the relative file paths (relative to the package root) + pub source_files: Option>, + pub namespace: Namespace, + pub modules: Option>, + // canonicalized dir of the package + pub path: String, + pub dirs: Option>, + pub is_pinned_dep: bool, + pub is_root: bool, +} + +pub fn get_build_path(canonical_path: &str) -> String { + format!("{}/lib/ocaml", canonical_path) +} + +impl Package { + pub fn get_bs_build_path(&self) -> String { + format!("{}/lib/bs", self.path) + } + + pub fn get_build_path(&self) -> String { + get_build_path(&self.path) + } + + pub fn get_mlmap_path(&self) -> String { + self.get_build_path() + + "/" + + &self + .namespace + .to_suffix() + .expect("namespace should be set for mlmap module") + + ".mlmap" + } + + pub fn get_mlmap_compile_path(&self) -> String { + self.get_build_path() + + "/" + + &self + .namespace + .to_suffix() + .expect("namespace should be set for mlmap module") + + ".cmi" + } + + pub fn get_ast_path(&self, source_file: &str) -> String { + helpers::get_compiler_asset(self, &packages::Namespace::NoNamespace, source_file, "ast") + } + + pub fn get_iast_path(&self, source_file: &str) -> String { + helpers::get_compiler_asset(self, &packages::Namespace::NoNamespace, source_file, "iast") + } +} + +impl PartialEq for Package { + fn eq(&self, other: &Self) -> bool { + self.name == other.name + } +} +impl Eq for Package {} +impl Hash for Package { + fn hash(&self, _state: &mut H) { + blake3::hash(self.name.as_bytes()); + } +} + +fn matches_filter(filter: &Option, path: &str) -> bool { + match filter { + Some(filter) => filter.is_match(path), + None => true, + } +} + +pub fn read_folders( + filter: &Option, + package_dir: &Path, + path: &Path, + recurse: bool, +) -> Result, Box> { + let mut map: AHashMap = AHashMap::new(); + let path_buf = PathBuf::from(path); + let meta = fs::metadata(package_dir.join(path)); + let path_with_meta = meta.map(|meta| { + ( + path.to_owned(), + SourceFileMeta { + modified: meta.modified().unwrap(), + }, + ) + }); + + for entry in fs::read_dir(package_dir.join(&path_buf))? { + let entry_path_buf = entry.map(|entry| entry.path())?; + let metadata = fs::metadata(&entry_path_buf)?; + let name = entry_path_buf.file_name().unwrap().to_str().unwrap().to_string(); + + let path_ext = entry_path_buf.extension().and_then(|x| x.to_str()); + let new_path = path_buf.join(&name); + if metadata.file_type().is_dir() && recurse { + match read_folders(filter, package_dir, &new_path, recurse) { + Ok(s) => map.extend(s), + Err(e) => println!("Error reading directory: {}", e), + } + } + + match path_ext { + Some(extension) if helpers::is_source_file(extension) => match path_with_meta { + Ok((ref path, _)) if matches_filter(filter, &name) => { + let mut path = path.to_owned(); + path.push(&name); + map.insert( + path.to_string_lossy().to_string(), + SourceFileMeta { + modified: metadata.modified().unwrap(), + }, + ); + } + + Ok(_) => println!("Filtered: {:?}", name), + Err(ref e) => println!("Error reading directory: {}", e), + }, + _ => (), + } + } + + Ok(map) +} + +/// Given a projects' root folder and a `bsconfig::Source`, this recursively creates all the +/// sources in a flat list. In the process, it removes the children, as they are being resolved +/// because of the recursiveness. So you get a flat list of files back, retaining the type_ and +/// wether it needs to recurse into all structures +fn get_source_dirs(source: bsconfig::Source, sub_path: Option) -> AHashSet { + let mut source_folders: AHashSet = AHashSet::new(); + + let (subdirs, full_recursive) = match source.to_owned() { + bsconfig::Source::Shorthand(_) + | bsconfig::Source::Qualified(bsconfig::PackageSource { subdirs: None, .. }) => (None, false), + bsconfig::Source::Qualified(bsconfig::PackageSource { + subdirs: Some(bsconfig::Subdirs::Recurse(recurse)), + .. + }) => (None, recurse), + bsconfig::Source::Qualified(bsconfig::PackageSource { + subdirs: Some(bsconfig::Subdirs::Qualified(subdirs)), + .. + }) => (Some(subdirs), false), + }; + + let source_folder = bsconfig::to_qualified_without_children(&source, sub_path.to_owned()); + source_folders.insert(source_folder.to_owned()); + + if !full_recursive { + let sub_path = Path::new(&source_folder.dir).to_path_buf(); + subdirs + .unwrap_or(vec![]) + .par_iter() + .map(|subdir| get_source_dirs(subdir.to_owned(), Some(sub_path.to_owned()))) + .collect::>>() + .into_iter() + .for_each(|subdir| source_folders.extend(subdir)) + } + + source_folders +} + +pub fn read_bsconfig(package_dir: &str) -> bsconfig::Config { + let prefix = if package_dir.is_empty() { + "".to_string() + } else { + package_dir.to_string() + "/" + }; + + let rescript_json_path = prefix.to_string() + "rescript.json"; + let bsconfig_json_path = prefix.to_string() + "bsconfig.json"; + + if Path::new(&rescript_json_path).exists() { + bsconfig::read(rescript_json_path) + } else { + bsconfig::read(bsconfig_json_path) + } +} + +pub fn read_dependency( + package_name: &str, + parent_path: &str, + project_root: &str, + workspace_root: &Option, +) -> Result { + let path_from_parent = PathBuf::from(helpers::package_path(parent_path, package_name)); + let path_from_project_root = PathBuf::from(helpers::package_path(project_root, package_name)); + let maybe_path_from_workspace_root = workspace_root + .as_ref() + .map(|workspace_root| PathBuf::from(helpers::package_path(workspace_root, package_name))); + + let path = match ( + path_from_parent, + path_from_project_root, + maybe_path_from_workspace_root, + ) { + (path_from_parent, _, _) if path_from_parent.exists() => Ok(path_from_parent), + (_, path_from_project_root, _) if path_from_project_root.exists() => Ok(path_from_project_root), + (_, _, Some(path_from_workspace_root)) if path_from_workspace_root.exists() => { + Ok(path_from_workspace_root) + } + _ => Err(format!( + "The package \"{}\" is not found (are node_modules up-to-date?)...", + package_name + )), + }?; + + let canonical_path = match path.canonicalize() { + Ok(canonical_path) => Ok(canonical_path.to_string_lossy().to_string()), + Err(e) => { + Err(format!( + "Failed canonicalizing the package \"{}\" path \"{}\" (are node_modules up-to-date?)...\nMore details: {}", + package_name, + path.to_string_lossy(), + e + )) + } + }?; + + Ok(canonical_path) +} + +/// # Make Package + +/// Given a bsconfig, reqursively finds all dependencies. +/// 1. It starts with registering dependencies and +/// prevents the operation for the ones which are already +/// registerd for the parent packages. Especially relevant for peerDependencies. +/// 2. In parallel performs IO to read the dependencies bsconfig and +/// recursively continues operation for their dependencies as well. +fn read_dependencies( + registered_dependencies_set: &mut AHashSet, + parent_bsconfig: &bsconfig::Config, + parent_path: &str, + project_root: &str, + workspace_root: Option, +) -> Vec { + return parent_bsconfig + .bs_dependencies + .to_owned() + .unwrap_or(vec![]) + .iter() + .filter_map(|package_name| { + if registered_dependencies_set.contains(package_name) { + None + } else { + registered_dependencies_set.insert(package_name.to_owned()); + Some(package_name.to_owned()) + } + }) + .collect::>() + // Read all bsconfig files in parallel instead of blocking + .par_iter() + .map(|package_name| { + let (bsconfig, canonical_path) = + match read_dependency(package_name, parent_path, project_root, &workspace_root) { + Err(error) => { + print!( + "{} {} Error building package tree. {}", + style("[1/2]").bold().dim(), + CROSS, + error + ); + std::process::exit(2) + } + Ok(canonical_path) => (read_bsconfig(&canonical_path), canonical_path), + }; + let is_pinned = parent_bsconfig + .pinned_dependencies + .as_ref() + .map(|p| p.contains(&bsconfig.name)) + .unwrap_or(false); + + let dependencies = read_dependencies( + &mut registered_dependencies_set.to_owned(), + &bsconfig, + &canonical_path, + project_root, + workspace_root.to_owned(), + ); + + Dependency { + name: package_name.to_owned(), + bsconfig, + path: canonical_path, + is_pinned, + dependencies, + } + }) + .collect::>(); +} + +fn flatten_dependencies(dependencies: Vec) -> Vec { + let mut flattened: Vec = Vec::new(); + for dep in dependencies { + flattened.push(dep.clone()); + let nested_flattened = flatten_dependencies(dep.dependencies); + flattened.extend(nested_flattened); + } + flattened +} + +fn make_package( + bsconfig: bsconfig::Config, + package_path: &str, + is_pinned_dep: bool, + is_root: bool, +) -> Package { + let source_folders = match bsconfig.sources.to_owned() { + bsconfig::OneOrMore::Single(source) => get_source_dirs(source, None), + bsconfig::OneOrMore::Multiple(sources) => { + let mut source_folders: AHashSet = AHashSet::new(); + sources + .iter() + .map(|source| get_source_dirs(source.to_owned(), None)) + .collect::>>() + .into_iter() + .for_each(|source| source_folders.extend(source)); + source_folders + } + }; + + Package { + name: bsconfig.name.to_owned(), + bsconfig: bsconfig.to_owned(), + source_folders, + source_files: None, + namespace: bsconfig.get_namespace(), + modules: None, + // we canonicalize the path name so it's always the same + path: PathBuf::from(package_path) + .canonicalize() + .expect("Could not canonicalize") + .to_string_lossy() + .to_string(), + dirs: None, + is_pinned_dep, + is_root, + } +} + +fn read_packages(project_root: &str, workspace_root: Option) -> AHashMap { + let root_bsconfig = read_bsconfig(project_root); + + // Store all packages and completely deduplicate them + let mut map: AHashMap = AHashMap::new(); + map.insert( + root_bsconfig.name.to_owned(), + make_package(root_bsconfig.to_owned(), project_root, false, true), + ); + + let mut registered_dependencies_set: AHashSet = AHashSet::new(); + let dependencies = flatten_dependencies(read_dependencies( + &mut registered_dependencies_set, + &root_bsconfig, + project_root, + project_root, + workspace_root, + )); + dependencies.iter().for_each(|d| { + if !map.contains_key(&d.name) { + map.insert( + d.name.to_owned(), + make_package(d.bsconfig.to_owned(), &d.path, d.is_pinned, false), + ); + } + }); + + map +} + +/// `get_source_files` is essentially a wrapper around `read_structure`, which read a +/// list of files in a folder to a hashmap of `string` / `fs::Metadata` (file metadata). Reason for +/// this wrapper is the recursiveness of the `bsconfig.json` subfolders. Some sources in bsconfig +/// can be specified as being fully recursive (`{ subdirs: true }`). This wrapper pulls out that +/// data from the config and pushes it forwards. Another thing is the 'type_', some files / folders +/// can be marked with the type 'dev'. Which means that they may not be around in the distributed +/// NPM package. The file reader allows for this, just warns when this happens. +/// TODO -> Check wether we actually need the `fs::Metadata` +pub fn get_source_files( + package_dir: &Path, + filter: &Option, + source: &bsconfig::PackageSource, +) -> AHashMap { + let mut map: AHashMap = AHashMap::new(); + + let (recurse, type_) = match source { + bsconfig::PackageSource { + subdirs: Some(bsconfig::Subdirs::Recurse(subdirs)), + type_, + .. + } => (subdirs.to_owned(), type_), + bsconfig::PackageSource { type_, .. } => (false, type_), + }; + + let path_dir = Path::new(&source.dir); + // don't include dev sources for now + if type_ != &Some("dev".to_string()) { + match read_folders(filter, package_dir, path_dir, recurse) { + Ok(files) => map.extend(files), + Err(_e) if type_ == &Some("dev".to_string()) => { + println!( + "Could not read folder: {}... Probably ok as type is dev", + path_dir.to_string_lossy() + ) + } + Err(_e) => println!("Could not read folder: {}...", path_dir.to_string_lossy()), + } + } + + map +} + +/// This takes the tree of packages, and finds all the source files for each, adding them to the +/// respective packages. +fn extend_with_children( + filter: &Option, + mut build: AHashMap, +) -> AHashMap { + for (_key, value) in build.iter_mut() { + let mut map: AHashMap = AHashMap::new(); + value + .source_folders + .par_iter() + .map(|source| get_source_files(Path::new(&value.path), filter, source)) + .collect::>>() + .into_iter() + .for_each(|source| map.extend(source)); + + let mut modules = AHashSet::from_iter( + map.keys() + .map(|key| helpers::file_path_to_module_name(key, &value.namespace)), + ); + match value.namespace.to_owned() { + Namespace::Namespace(namespace) => { + let _ = modules.insert(namespace); + } + Namespace::NamespaceWithEntry { namespace, entry: _ } => { + let _ = modules.insert("@".to_string() + &namespace); + } + Namespace::NoNamespace => (), + } + value.modules = Some(modules); + let mut dirs = AHashSet::new(); + map.keys().for_each(|path| { + let dir = std::path::Path::new(&path).parent().unwrap(); + dirs.insert(dir.to_owned()); + }); + value.dirs = Some(dirs); + value.source_files = Some(map); + } + build +} + +/// Make turns a folder, that should contain a bsconfig, into a tree of Packages. +/// It does so in two steps: +/// 1. Get all the packages parsed, and take all the source folders from the bsconfig +/// 2. Take the (by then deduplicated) packages, and find all the '.re', '.res', '.ml' and +/// interface files. +/// The two step process is there to reduce IO overhead +pub fn make( + filter: &Option, + root_folder: &str, + workspace_root: &Option, +) -> AHashMap { + let map = read_packages(root_folder, workspace_root.to_owned()); + + /* Once we have the deduplicated packages, we can add the source files for each - to minimize + * the IO */ + let result = extend_with_children(filter, map); + result.values().for_each(|package| match &package.dirs { + Some(dirs) => dirs.iter().for_each(|dir| { + let _ = std::fs::create_dir_all(std::path::Path::new(&package.get_bs_build_path()).join(dir)); + }), + None => (), + }); + result +} + +pub fn get_package_name(path: &str) -> String { + let bsconfig = read_bsconfig(path); + bsconfig.name +} + +pub fn parse_packages(build_state: &mut BuildState) { + build_state + .packages + .clone() + .iter() + .for_each(|(package_name, package)| { + debug!("Parsing package: {}", package_name); + if let Some(package_modules) = package.modules.to_owned() { + build_state.module_names.extend(package_modules) + } + let build_path_abs = package.get_build_path(); + let bs_build_path = package.get_bs_build_path(); + helpers::create_build_path(&build_path_abs); + helpers::create_build_path(&bs_build_path); + + package.namespace.to_suffix().iter().for_each(|namespace| { + // generate the mlmap "AST" file for modules that have a namespace configured + let source_files = match package.source_files.to_owned() { + Some(source_files) => source_files + .keys() + .map(|key| key.to_owned()) + .collect::>(), + None => unreachable!(), + }; + let entry = match &package.namespace { + packages::Namespace::NamespaceWithEntry { entry, namespace: _ } => Some(entry), + _ => None, + }; + + let depending_modules = source_files + .iter() + .map(|path| helpers::file_path_to_module_name(path, &packages::Namespace::NoNamespace)) + .filter(|module_name| { + if let Some(entry) = entry { + module_name != entry + } else { + true + } + }) + .filter(|module_name| helpers::is_non_exotic_module_name(module_name)) + .collect::>(); + + let mlmap = namespaces::gen_mlmap(package, namespace, &depending_modules); + + // mlmap will be compiled in the AST generation step + // compile_mlmap(&package, namespace, &project_root); + let deps = source_files + .iter() + .filter(|path| { + helpers::is_non_exotic_module_name(&helpers::file_path_to_module_name( + path, + &packages::Namespace::NoNamespace, + )) + }) + .map(|path| helpers::file_path_to_module_name(path, &package.namespace)) + .filter(|module_name| { + if let Some(entry) = entry { + module_name != entry + } else { + true + } + }) + .collect::>(); + + build_state.insert_module( + &helpers::file_path_to_module_name(&mlmap.to_owned(), &packages::Namespace::NoNamespace), + Module { + source_type: SourceType::MlMap(MlMap { parse_dirty: false }), + deps, + dependents: AHashSet::new(), + package_name: package.name.to_owned(), + compile_dirty: false, + last_compiled_cmt: None, + last_compiled_cmi: None, + }, + ); + }); + + debug!("Building source file-tree for package: {}", package.name); + match &package.source_files { + None => (), + Some(source_files) => source_files.iter().for_each(|(file, metadata)| { + let namespace = package.namespace.to_owned(); + + let file_buf = PathBuf::from(file); + let extension = file_buf.extension().unwrap().to_str().unwrap(); + let module_name = helpers::file_path_to_module_name(&file.to_owned(), &namespace); + + if helpers::is_implementation_file(extension) { + build_state + .modules + .entry(module_name.to_string()) + .and_modify(|module| { + if let SourceType::SourceFile(ref mut source_file) = module.source_type { + if &source_file.implementation.path != file { + error!("Duplicate files found for module: {}", &module_name); + error!("file 1: {}", &source_file.implementation.path); + error!("file 2: {}", &file); + + panic!("Unable to continue... See log output above..."); + } + source_file.implementation.path = file.to_owned(); + source_file.implementation.last_modified = metadata.modified; + source_file.implementation.parse_dirty = true; + } + }) + .or_insert(Module { + source_type: SourceType::SourceFile(SourceFile { + implementation: Implementation { + path: file.to_owned(), + parse_state: ParseState::Pending, + compile_state: CompileState::Pending, + last_modified: metadata.modified, + parse_dirty: true, + }, + interface: None, + }), + deps: AHashSet::new(), + dependents: AHashSet::new(), + package_name: package.name.to_owned(), + compile_dirty: true, + last_compiled_cmt: None, + last_compiled_cmi: None, + }); + } else { + // remove last character of string: resi -> res, rei -> re, mli -> ml + let mut implementation_filename = file.to_owned(); + implementation_filename.pop(); + match source_files.get(&implementation_filename) { + None => { + println!( + "{}Warning: No implementation file found for interface file (skipping): {}", + LINE_CLEAR, file + ) + } + Some(_) => { + build_state + .modules + .entry(module_name.to_string()) + .and_modify(|module| { + if let SourceType::SourceFile(ref mut source_file) = + module.source_type + { + source_file.interface = Some(Interface { + path: file.to_owned(), + parse_state: ParseState::Pending, + compile_state: CompileState::Pending, + last_modified: metadata.modified, + parse_dirty: true, + }); + } + }) + .or_insert(Module { + source_type: SourceType::SourceFile(SourceFile { + // this will be overwritten later + implementation: Implementation { + path: implementation_filename.to_string(), + parse_state: ParseState::Pending, + compile_state: CompileState::Pending, + last_modified: metadata.modified, + parse_dirty: true, + }, + interface: Some(Interface { + path: file.to_owned(), + parse_state: ParseState::Pending, + compile_state: CompileState::Pending, + last_modified: metadata.modified, + parse_dirty: true, + }), + }), + deps: AHashSet::new(), + dependents: AHashSet::new(), + package_name: package.name.to_owned(), + compile_dirty: true, + last_compiled_cmt: None, + last_compiled_cmi: None, + }); + } + } + } + }), + } + }); +} + +impl Package { + pub fn get_jsx_args(&self) -> Vec { + self.bsconfig.get_jsx_args() + } + + pub fn get_jsx_mode_args(&self) -> Vec { + self.bsconfig.get_jsx_mode_args() + } + + pub fn get_jsx_module_args(&self) -> Vec { + self.bsconfig.get_jsx_module_args() + } + + pub fn get_uncurried_args(&self, version: &str, root_package: &packages::Package) -> Vec { + root_package.bsconfig.get_uncurried_args(version) + } +} + +fn get_unallowed_dependents( + packages: &AHashMap, + package_name: &String, + dependencies: &Vec, +) -> Option { + for deps_package_name in dependencies { + if let Some(deps_package) = packages.get(deps_package_name) { + let deps_allowed_dependents = deps_package.bsconfig.allowed_dependents.to_owned(); + if let Some(allowed_dependents) = deps_allowed_dependents { + if !allowed_dependents.contains(package_name) { + return Some(deps_package_name.to_string()); + } + } + } + } + None +} +#[derive(Debug, Clone)] +struct UnallowedDependency { + bs_deps: Vec, + pinned_deps: Vec, + bs_dev_deps: Vec, +} + +pub fn validate_packages_dependencies(packages: &AHashMap) -> bool { + let mut detected_unallowed_dependencies: AHashMap = AHashMap::new(); + + for (package_name, package) in packages { + let bs_dependencies = &package.bsconfig.bs_dependencies.to_owned().unwrap_or(vec![]); + let pinned_dependencies = &package.bsconfig.pinned_dependencies.to_owned().unwrap_or(vec![]); + let dev_dependencies = &package.bsconfig.bs_dev_dependencies.to_owned().unwrap_or(vec![]); + + vec![ + ("bs-dependencies", bs_dependencies), + ("pinned-dependencies", pinned_dependencies), + ("bs-dev-dependencies", dev_dependencies), + ] + .iter() + .for_each(|(dependency_type, dependencies)| { + if let Some(unallowed_dependency_name) = + get_unallowed_dependents(packages, package_name, dependencies) + { + let empty_unallowed_deps = UnallowedDependency { + bs_deps: vec![], + pinned_deps: vec![], + bs_dev_deps: vec![], + }; + + let unallowed_dependency = detected_unallowed_dependencies.entry(String::from(package_name)); + let value = unallowed_dependency.or_insert_with(|| empty_unallowed_deps); + match *dependency_type { + "bs-dependencies" => value.bs_deps.push(unallowed_dependency_name), + "pinned-dependencies" => value.pinned_deps.push(unallowed_dependency_name), + "bs-dev-dependencies" => value.bs_dev_deps.push(unallowed_dependency_name), + _ => (), + } + } + }); + } + for (package_name, unallowed_deps) in detected_unallowed_dependencies.iter() { + println!( + "\n{}: {} has the following unallowed dependencies:", + console::style("Error").red(), + console::style(package_name).bold() + ); + + vec![ + ("bs-dependencies", unallowed_deps.bs_deps.to_owned()), + ("pinned-dependencies", unallowed_deps.pinned_deps.to_owned()), + ("bs-dev-dependencies", unallowed_deps.bs_dev_deps.to_owned()), + ] + .iter() + .for_each(|(deps_type, map)| { + if !map.is_empty() { + println!( + "{} dependencies: {}", + console::style(deps_type).bold().dim(), + console::style(map.join(" \n -")).bold().dim() + ); + } + }); + } + let has_any_unallowed_dependent = detected_unallowed_dependencies.len() > 0; + + if has_any_unallowed_dependent { + println!( + "\nUpdate the {} value in the {} of the unallowed dependencies to solve the issue!", + console::style("unallowed_dependents").bold().dim(), + console::style("bsconfig.json").bold().dim() + ) + } + !has_any_unallowed_dependent +} + +#[cfg(test)] +mod test { + use crate::bsconfig::Source; + use ahash::{AHashMap, AHashSet}; + + use super::{Namespace, Package}; + + fn create_package( + name: String, + bs_deps: Vec, + pinned_deps: Vec, + dev_deps: Vec, + allowed_dependents: Option>, + ) -> Package { + return Package { + name: name.clone(), + bsconfig: crate::bsconfig::Config { + name: name.clone(), + sources: crate::bsconfig::OneOrMore::Single(Source::Shorthand(String::from("Source"))), + package_specs: None, + warnings: None, + suffix: None, + pinned_dependencies: Some(pinned_deps), + bs_dependencies: Some(bs_deps), + bs_dev_dependencies: Some(dev_deps), + ppx_flags: None, + bsc_flags: None, + reason: None, + namespace: None, + jsx: None, + uncurried: None, + namespace_entry: None, + allowed_dependents, + }, + source_folders: AHashSet::new(), + source_files: None, + namespace: Namespace::Namespace(String::from("Package1")), + modules: None, + path: String::from("./something"), + dirs: None, + is_pinned_dep: false, + is_root: false, + }; + } + #[test] + fn should_return_false_with_invalid_parents_as_bs_dependencies() { + let mut packages: AHashMap = AHashMap::new(); + packages.insert( + String::from("Package1"), + create_package( + String::from("Package1"), + vec![String::from("Package2")], + vec![], + vec![], + None, + ), + ); + packages.insert( + String::from("Package2"), + create_package( + String::from("Package2"), + vec![], + vec![], + vec![], + Some(vec![String::from("Package3")]), + ), + ); + + let is_valid = super::validate_packages_dependencies(&packages); + assert_eq!(is_valid, false) + } + + #[test] + fn should_return_false_with_invalid_parents_as_pinned_dependencies() { + let mut packages: AHashMap = AHashMap::new(); + packages.insert( + String::from("Package1"), + create_package( + String::from("Package1"), + vec![], + vec![String::from("Package2")], + vec![], + None, + ), + ); + packages.insert( + String::from("Package2"), + create_package( + String::from("Package2"), + vec![], + vec![], + vec![], + Some(vec![String::from("Package3")]), + ), + ); + + let is_valid = super::validate_packages_dependencies(&packages); + assert_eq!(is_valid, false) + } + + #[test] + fn should_return_false_with_invalid_parents_as_dev_dependencies() { + let mut packages: AHashMap = AHashMap::new(); + packages.insert( + String::from("Package1"), + create_package( + String::from("Package1"), + vec![], + vec![], + vec![String::from("Package2")], + None, + ), + ); + packages.insert( + String::from("Package2"), + create_package( + String::from("Package2"), + vec![], + vec![], + vec![], + Some(vec![String::from("Package3")]), + ), + ); + + let is_valid = super::validate_packages_dependencies(&packages); + assert_eq!(is_valid, false) + } + + #[test] + fn should_return_true_with_no_invalid_parent() { + let mut packages: AHashMap = AHashMap::new(); + packages.insert( + String::from("Package1"), + create_package( + String::from("Package1"), + vec![String::from("Package2")], + vec![], + vec![], + None, + ), + ); + packages.insert( + String::from("Package2"), + create_package( + String::from("Package2"), + vec![], + vec![], + vec![], + Some(vec![String::from("Package1")]), + ), + ); + + let is_valid = super::validate_packages_dependencies(&packages); + assert_eq!(is_valid, true) + } +} diff --git a/rewatch/src/build/parse.rs b/rewatch/src/build/parse.rs new file mode 100644 index 0000000000..4409d65c34 --- /dev/null +++ b/rewatch/src/build/parse.rs @@ -0,0 +1,368 @@ +use super::build_types::*; +use super::logs; +use super::namespaces; +use super::packages; +use crate::bsconfig; +use crate::bsconfig::OneOrMore; +use crate::helpers; +use ahash::AHashSet; +use log::debug; +use rayon::prelude::*; +use std::path::{Path, PathBuf}; +use std::process::Command; + +pub fn generate_asts( + build_state: &mut BuildState, + inc: impl Fn() + std::marker::Sync, +) -> Result { + let mut has_failure = false; + let mut stderr = "".to_string(); + + build_state + .modules + .par_iter() + .map(|(module_name, module)| { + debug!("Generating AST for module: {}", module_name); + let package = build_state + .get_package(&module.package_name) + .expect("Package not found"); + match &module.source_type { + SourceType::MlMap(_mlmap) => { + let path = package.get_mlmap_path(); + (module_name.to_owned(), Ok((path, None)), Ok(None), false) + } + + SourceType::SourceFile(source_file) => { + let root_package = build_state.get_package(&build_state.root_config_name).unwrap(); + + let (ast_result, iast_result, dirty) = if source_file.implementation.parse_dirty + || source_file + .interface + .as_ref() + .map(|i| i.parse_dirty) + .unwrap_or(false) + { + inc(); + let ast_result = generate_ast( + package.to_owned(), + root_package.to_owned(), + &source_file.implementation.path.to_owned(), + &build_state.rescript_version, + &build_state.bsc_path, + &build_state.workspace_root, + ); + + let iast_result = match source_file.interface.as_ref().map(|i| i.path.to_owned()) { + Some(interface_file_path) => generate_ast( + package.to_owned(), + root_package.to_owned(), + &interface_file_path.to_owned(), + &build_state.rescript_version, + &build_state.bsc_path, + &build_state.workspace_root, + ) + .map(Some), + _ => Ok(None), + }; + + (ast_result, iast_result, true) + } else { + ( + Ok(( + helpers::get_basename(&source_file.implementation.path).to_string() + ".ast", + None, + )), + Ok(source_file + .interface + .as_ref() + .map(|i| (helpers::get_basename(&i.path).to_string() + ".iast", None))), + false, + ) + }; + + (module_name.to_owned(), ast_result, iast_result, dirty) + } + } + }) + .collect::), String>, + Result)>, String>, + bool, + )>>() + .into_iter() + .for_each(|(module_name, ast_result, iast_result, is_dirty)| { + if let Some(module) = build_state.modules.get_mut(&module_name) { + // if the module is dirty, mark it also compile_dirty + // do NOT set to false if the module is not parse_dirty, it needs to keep + // the compile_dirty flag if it was set before + if is_dirty { + module.compile_dirty = true; + } + let package = build_state + .packages + .get(&module.package_name) + .expect("Package not found"); + if let SourceType::SourceFile(ref mut source_file) = module.source_type { + // We get Err(x) when there is a parse error. When it's Ok(_, Some( + // stderr_warnings )), the outputs are warnings + match ast_result { + // In case of a pinned (internal) dependency, we want to keep on + // propagating the warning with every compile. So we mark it as dirty for + // the next round + Ok((_path, Some(stderr_warnings))) if package.is_pinned_dep => { + source_file.implementation.parse_state = ParseState::Warning; + source_file.implementation.parse_dirty = true; + if let Some(interface) = source_file.interface.as_mut() { + interface.parse_dirty = false; + } + logs::append(package, &stderr_warnings); + stderr.push_str(&stderr_warnings); + } + Ok((_path, Some(_))) | Ok((_path, None)) => { + // If we do have stderr_warnings here, the file is not a pinned + // dependency (so some external dep). We can ignore those + source_file.implementation.parse_state = ParseState::Success; + source_file.implementation.parse_dirty = false; + if let Some(interface) = source_file.interface.as_mut() { + interface.parse_dirty = false; + } + } + Err(err) => { + // Some compilation error + source_file.implementation.parse_state = ParseState::ParseError; + source_file.implementation.parse_dirty = true; + logs::append(package, &err); + has_failure = true; + stderr.push_str(&err); + } + }; + + // We get Err(x) when there is a parse error. When it's Ok(_, Some(( _path, + // stderr_warnings ))), the outputs are warnings + match iast_result { + // In case of a pinned (internal) dependency, we want to keep on + // propagating the warning with every compile. So we mark it as dirty for + // the next round + Ok(Some((_path, Some(stderr_warnings)))) if package.is_pinned_dep => { + if let Some(interface) = source_file.interface.as_mut() { + interface.parse_state = ParseState::Warning; + interface.parse_dirty = true; + } + logs::append(package, &stderr_warnings); + stderr.push_str(&stderr_warnings); + } + Ok(Some((_, None))) | Ok(Some((_, Some(_)))) => { + // If we do have stderr_warnings here, the file is not a pinned + // dependency (so some external dep). We can ignore those + if let Some(interface) = source_file.interface.as_mut() { + interface.parse_state = ParseState::Success; + interface.parse_dirty = false; + } + } + Err(err) => { + // Some compilation error + if let Some(interface) = source_file.interface.as_mut() { + interface.parse_state = ParseState::ParseError; + interface.parse_dirty = true; + } + logs::append(package, &err); + has_failure = true; + stderr.push_str(&err); + } + Ok(None) => { + // The file had no interface file associated + () + } + } + }; + } + }); + + // compile the mlmaps of dirty modules + // first collect dirty packages + let dirty_packages = build_state + .modules + .iter() + .filter(|(_, module)| module.compile_dirty) + .map(|(_, module)| module.package_name.clone()) + .collect::>(); + + build_state.modules.iter_mut().for_each(|(module_name, module)| { + let is_dirty = match &module.source_type { + SourceType::MlMap(_) => { + if dirty_packages.contains(&module.package_name) { + let package = build_state + .packages + .get(&module.package_name) + .expect("Package not found"); + // probably better to do this in a different function + // specific to compiling mlmaps + let compile_path = package.get_mlmap_compile_path(); + let mlmap_hash = helpers::compute_file_hash(&compile_path); + namespaces::compile_mlmap(package, module_name, &build_state.bsc_path); + let mlmap_hash_after = helpers::compute_file_hash(&compile_path); + + match (mlmap_hash, mlmap_hash_after) { + (Some(digest), Some(digest_after)) => !digest.eq(&digest_after), + _ => true, + } + } else { + false + } + } + _ => false, + }; + if is_dirty { + module.compile_dirty = is_dirty; + } + }); + + if has_failure { + Err(stderr) + } else { + Ok(stderr) + } +} + +pub fn parser_args( + config: &bsconfig::Config, + root_config: &bsconfig::Config, + filename: &str, + version: &str, + workspace_root: &Option, + root_path: &str, + contents: &str, +) -> (String, Vec) { + let file = &filename.to_string(); + let path = PathBuf::from(filename); + let ast_extension = path_to_ast_extension(&path); + let ast_path = (helpers::get_basename(&file.to_string()).to_owned()) + ast_extension; + let ppx_flags = bsconfig::flatten_ppx_flags( + &if let Some(workspace_root) = workspace_root { + format!("{}/node_modules", &workspace_root) + } else { + format!("{}/node_modules", &root_path) + }, + &filter_ppx_flags(&config.ppx_flags, contents), + &config.name, + ); + let jsx_args = root_config.get_jsx_args(); + let jsx_module_args = root_config.get_jsx_module_args(); + let jsx_mode_args = root_config.get_jsx_mode_args(); + let uncurried_args = root_config.get_uncurried_args(version); + let bsc_flags = bsconfig::flatten_flags(&config.bsc_flags); + + let file = "../../".to_string() + file; + ( + ast_path.to_string(), + vec![ + vec!["-bs-v".to_string(), format!("{}", version)], + ppx_flags, + jsx_args, + jsx_module_args, + jsx_mode_args, + uncurried_args, + bsc_flags, + vec![ + "-absname".to_string(), + "-bs-ast".to_string(), + "-o".to_string(), + ast_path.to_string(), + file, + ], + ] + .concat(), + ) +} + +fn generate_ast( + package: packages::Package, + root_package: packages::Package, + filename: &str, + version: &str, + bsc_path: &str, + workspace_root: &Option, +) -> Result<(String, Option), String> { + let file_path = PathBuf::from(&package.path).join(filename); + let contents = helpers::read_file(&file_path).expect("Error reading file"); + + let build_path_abs = package.get_build_path(); + let (ast_path, parser_args) = parser_args( + &package.bsconfig, + &root_package.bsconfig, + filename, + version, + workspace_root, + &root_package.path, + &contents, + ); + + /* Create .ast */ + if let Some(res_to_ast) = Some( + Command::new(bsc_path) + .current_dir(&build_path_abs) + .args(parser_args) + .output() + .expect("Error converting .res to .ast"), + ) { + let stderr = std::str::from_utf8(&res_to_ast.stderr).expect("Expect StdErr to be non-null"); + if helpers::contains_ascii_characters(stderr) { + if res_to_ast.status.success() { + Ok((ast_path, Some(stderr.to_string()))) + } else { + Err(format!("Error in {}:\n{}", package.name, stderr)) + } + } else { + Ok((ast_path, None)) + } + } else { + println!("Parsing file {}...", filename); + Err(format!( + "Could not find canonicalize_string_path for file {} in package {}", + filename, package.name + )) + } +} + +fn path_to_ast_extension(path: &Path) -> &str { + let extension = path.extension().unwrap().to_str().unwrap(); + if helpers::is_interface_ast_file(extension) { + ".iast" + } else { + ".ast" + } +} + +fn include_ppx(flag: &str, contents: &str) -> bool { + if flag.contains("bisect") { + return std::env::var("BISECT_ENABLE").is_ok(); + } else if (flag.contains("graphql-ppx") || flag.contains("graphql_ppx")) && !contents.contains("%graphql") + { + return false; + } else if flag.contains("spice") && !contents.contains("@spice") { + return false; + } else if flag.contains("rescript-relay") && !contents.contains("%relay") { + return false; + } else if flag.contains("re-formality") && !contents.contains("%form") { + return false; + } + return true; +} + +fn filter_ppx_flags( + ppx_flags: &Option>>, + contents: &str, +) -> Option>> { + // get the environment variable "BISECT_ENABLE" if it exists set the filter to "bisect" + ppx_flags.as_ref().map(|flags| { + flags + .iter() + .filter(|flag| match flag { + bsconfig::OneOrMore::Single(str) => include_ppx(str, contents), + bsconfig::OneOrMore::Multiple(str) => include_ppx(str.first().unwrap(), contents), + }) + .map(|x| x.to_owned()) + .collect::>>() + }) +} diff --git a/rewatch/src/build/read_compile_state.rs b/rewatch/src/build/read_compile_state.rs new file mode 100644 index 0000000000..7d983f5a50 --- /dev/null +++ b/rewatch/src/build/read_compile_state.rs @@ -0,0 +1,160 @@ +use super::build_types::*; +use super::packages; +use crate::helpers; +use ahash::{AHashMap, AHashSet}; +use rayon::prelude::*; +use std::fs; +use std::path::PathBuf; +use std::time::SystemTime; + +pub fn read(build_state: &mut BuildState) -> CompileAssetsState { + let mut ast_modules: AHashMap = AHashMap::new(); + let mut cmi_modules: AHashMap = AHashMap::new(); + let mut cmt_modules: AHashMap = AHashMap::new(); + let mut ast_rescript_file_locations = AHashSet::new(); + + let mut rescript_file_locations = build_state + .modules + .values() + .filter_map(|module| match &module.source_type { + SourceType::SourceFile(source_file) => { + let package = build_state.packages.get(&module.package_name).unwrap(); + + Some( + PathBuf::from(&package.path) + .join(&source_file.implementation.path) + .to_string_lossy() + .to_string(), + ) + } + _ => None, + }) + .collect::>(); + + rescript_file_locations.extend( + build_state + .modules + .values() + .filter_map(|module| { + let package = build_state.packages.get(&module.package_name).unwrap(); + module.get_interface().as_ref().map(|interface| { + PathBuf::from(&package.path) + .join(&interface.path) + .to_string_lossy() + .to_string() + }) + }) + .collect::>(), + ); + + // scan all ast files in all packages + let compile_assets = build_state + .packages + .par_iter() + .map(|(_, package)| { + let read_dir = fs::read_dir(std::path::Path::new(&package.get_build_path())).unwrap(); + read_dir + .filter_map(|entry| match entry { + Ok(entry) => { + let path = entry.path(); + let extension = path.extension().and_then(|e| e.to_str()); + match extension { + Some(ext) => match ext { + "iast" | "ast" | "cmi" | "cmt" => Some(( + path.to_owned(), + entry.metadata().unwrap().modified().unwrap(), + ext.to_owned(), + package.name.to_owned(), + package.namespace.to_owned(), + package.is_root, + )), + _ => None, + }, + None => None, + } + } + Err(_) => None, + }) + .collect::>() + }) + .flatten() + .collect::>(); + + compile_assets.iter().for_each( + |(path, last_modified, extension, package_name, package_namespace, package_is_root)| { + match extension.as_str() { + "iast" | "ast" => { + let module_name = + helpers::file_path_to_module_name(path.to_str().unwrap(), package_namespace); + + let ast_file_path = path.to_str().unwrap().to_owned(); + let res_file_path = get_res_path_from_ast(&ast_file_path); + let root_package = build_state + .packages + .get(&build_state.root_config_name) + .expect("Could not find root package"); + if let Some(res_file_path) = res_file_path { + let _ = ast_modules.insert( + res_file_path.to_owned(), + AstModule { + module_name, + package_name: package_name.to_owned(), + namespace: package_namespace.to_owned(), + last_modified: last_modified.to_owned(), + ast_file_path, + is_root: *package_is_root, + suffix: root_package.bsconfig.get_suffix(), + }, + ); + let _ = ast_rescript_file_locations.insert(res_file_path); + } + } + "cmi" => { + let module_name = helpers::file_path_to_module_name( + path.to_str().unwrap(), + // we don't want to include a namespace here because the CMI file + // already includes a namespace + &packages::Namespace::NoNamespace, + ); + cmi_modules.insert(module_name, last_modified.to_owned()); + } + "cmt" => { + let module_name = helpers::file_path_to_module_name( + path.to_str().unwrap(), + // we don't want to include a namespace here because the CMI file + // already includes a namespace + &packages::Namespace::NoNamespace, + ); + cmt_modules.insert(module_name, last_modified.to_owned()); + } + _ => { + // println!("other extension: {:?}", other); + } + } + }, + ); + + CompileAssetsState { + ast_modules, + cmi_modules, + cmt_modules, + ast_rescript_file_locations, + rescript_file_locations, + } +} + +fn get_res_path_from_ast(ast_file: &str) -> Option { + if let Ok(lines) = helpers::read_lines(ast_file.to_string()) { + // we skip the first line with is some null characters + // the following lines in the AST are the dependency modules + // we stop when we hit a line that starts with a "/", this is the path of the file. + // this is the point where the dependencies end and the actual AST starts + for line in lines.skip(1) { + match line { + Ok(line) if line.trim_start().starts_with('/') => return Some(line), + _ => (), + } + } + } + None +} diff --git a/rewatch/src/cmd.rs b/rewatch/src/cmd.rs new file mode 100644 index 0000000000..acc3c10fa6 --- /dev/null +++ b/rewatch/src/cmd.rs @@ -0,0 +1,56 @@ +use crate::helpers::emojis::*; +use console::style; +use std::io::{BufRead, BufReader}; +use std::process::{Command, Stdio}; +use std::time::Instant; + +pub fn run(command_string: String) { + let start_subcommand = Instant::now(); + + print!( + "{} {}Running subcommand... \n{}\n", + style("[...]").bold().dim(), + COMMAND, + style("────────"), + ); + + let parsed_command = command_string.split_whitespace().collect::>(); + let (command, params) = parsed_command.split_at(1); + + let mut cmd = Command::new(command[0]) + .args(params) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .expect("failed to execute process"); + + { + let stdout = cmd.stdout.as_mut().unwrap(); + let stderr = cmd.stderr.as_mut().unwrap(); + + let stdout_reader = BufReader::new(stdout); + let stderr_reader = BufReader::new(stderr); + + let stdout_lines = stdout_reader.lines(); + let std_err = stderr_reader.lines(); + + for line in stdout_lines { + println!("{}", line.unwrap()); + } + + for line in std_err { + println!("{}", line.unwrap()); + } + + let subcommand_duration = start_subcommand.elapsed(); + println!( + "{}{} {}Ran subcommand in {:.2}s", + LINE_CLEAR, + style("[...]").bold().dim(), + COMMAND, + subcommand_duration.as_secs_f64(), + ); + } + + cmd.wait().unwrap(); +} diff --git a/rewatch/src/helpers.rs b/rewatch/src/helpers.rs new file mode 100644 index 0000000000..0202f111d1 --- /dev/null +++ b/rewatch/src/helpers.rs @@ -0,0 +1,333 @@ +use crate::build::packages; +use std::ffi::OsString; +use std::fs; +use std::fs::File; +use std::io::Read; +use std::io::{self, BufRead}; +use std::path::{Component, Path, PathBuf}; +use std::process::Command; +use std::time::{SystemTime, UNIX_EPOCH}; + +pub type StdErr = String; + +pub mod emojis { + use console::Emoji; + pub static COMMAND: Emoji<'_, '_> = Emoji("🏃 ", ""); + pub static TREE: Emoji<'_, '_> = Emoji("📦 ", ""); + pub static SWEEP: Emoji<'_, '_> = Emoji("🧹 ", ""); + pub static LOOKING_GLASS: Emoji<'_, '_> = Emoji("🕵️ ", ""); + pub static CODE: Emoji<'_, '_> = Emoji("🧱 ", ""); + pub static SWORDS: Emoji<'_, '_> = Emoji("🤺 ️", ""); + pub static DEPS: Emoji<'_, '_> = Emoji("️🌴 ", ""); + pub static CHECKMARK: Emoji<'_, '_> = Emoji("️✅ ", ""); + pub static CROSS: Emoji<'_, '_> = Emoji("️🛑 ", ""); + pub static SPARKLES: Emoji<'_, '_> = Emoji("✨ ", ""); + pub static COMPILE_STATE: Emoji<'_, '_> = Emoji("📝 ", ""); + pub static LINE_CLEAR: &str = "\x1b[2K\r"; +} + +pub trait LexicalAbsolute { + fn to_lexical_absolute(&self) -> std::io::Result; +} + +impl LexicalAbsolute for Path { + fn to_lexical_absolute(&self) -> std::io::Result { + let mut absolute = if self.is_absolute() { + PathBuf::new() + } else { + std::env::current_dir()? + }; + for component in self.components() { + match component { + Component::CurDir => {} + Component::ParentDir => { + absolute.pop(); + } + component => absolute.push(component.as_os_str()), + } + } + Ok(absolute) + } +} + +pub fn package_path(root: &str, package_name: &str) -> String { + format!("{}/node_modules/{}", root, package_name) +} + +pub fn get_abs_path(path: &str) -> String { + let abs_path_buf = PathBuf::from(path); + + return abs_path_buf + .to_lexical_absolute() + .expect("Could not canonicalize") + .to_str() + .expect("Could not canonicalize") + .to_string(); +} + +pub fn get_basename(path: &str) -> String { + let path_buf = PathBuf::from(path); + return path_buf + .file_stem() + .expect("Could not get basename") + .to_str() + .expect("Could not get basename 2") + .to_string(); +} + +pub fn change_extension(path: &str, new_extension: &str) -> String { + let path_buf = PathBuf::from(path); + return path_buf + .with_extension(new_extension) + .to_str() + .expect("Could not change extension") + .to_string(); +} + +/// Capitalizes the first character in s. +fn capitalize(s: &str) -> String { + let mut c = s.chars(); + match c.next() { + None => String::new(), + Some(f) => f.to_uppercase().collect::() + c.as_str(), + } +} + +fn add_suffix(base: &str, namespace: &packages::Namespace) -> String { + match namespace { + packages::Namespace::NamespaceWithEntry { namespace: _, entry } if entry == base => base.to_string(), + packages::Namespace::Namespace(_) + | packages::Namespace::NamespaceWithEntry { + namespace: _, + entry: _, + } => base.to_string() + "-" + &namespace.to_suffix().unwrap(), + packages::Namespace::NoNamespace => base.to_string(), + } +} + +pub fn module_name_with_namespace(module_name: &str, namespace: &packages::Namespace) -> String { + capitalize(&add_suffix(module_name, namespace)) +} + +// this doesn't capitalize the module name! if the rescript name of the file is "foo.res" the +// compiler assets are foo-Namespace.cmt and foo-Namespace.cmj, but the module name is Foo +pub fn file_path_to_compiler_asset_basename(path: &str, namespace: &packages::Namespace) -> String { + let base = get_basename(path); + add_suffix(&base, namespace) +} + +pub fn file_path_to_module_name(path: &str, namespace: &packages::Namespace) -> String { + capitalize(&file_path_to_compiler_asset_basename(path, namespace)) +} + +pub fn contains_ascii_characters(str: &str) -> bool { + for chr in str.chars() { + if chr.is_ascii_alphanumeric() { + return true; + } + } + false +} + +pub fn create_build_path(build_path: &str) { + fs::DirBuilder::new() + .recursive(true) + .create(PathBuf::from(build_path.to_string())) + .unwrap(); +} + +pub fn get_bsc(root_path: &str, workspace_root: Option) -> String { + let subfolder = match (std::env::consts::OS, std::env::consts::ARCH) { + ("macos", "aarch64") => "darwinarm64", + ("macos", _) => "darwin", + ("linux", _) => "linux", + ("windows", _) => "win32", + _ => panic!("Unsupported architecture"), + }; + + match ( + PathBuf::from(format!( + "{}/node_modules/rescript/{}/bsc.exe", + root_path, subfolder + )) + .canonicalize(), + workspace_root.map(|workspace_root| { + PathBuf::from(format!( + "{}/node_modules/rescript/{}/bsc.exe", + workspace_root, subfolder + )) + .canonicalize() + }), + ) { + (Ok(path), _) => path, + (_, Some(Ok(path))) => path, + _ => panic!("Could not find bsc.exe"), + } + .to_string_lossy() + .to_string() +} + +pub fn string_ends_with_any(s: &Path, suffixes: &[&str]) -> bool { + suffixes + .iter() + .any(|&suffix| s.extension().unwrap_or(&OsString::new()).to_str().unwrap_or("") == suffix) +} + +pub fn get_compiler_asset( + package: &packages::Package, + namespace: &packages::Namespace, + source_file: &str, + extension: &str, +) -> String { + let namespace = match extension { + "ast" | "iast" => &packages::Namespace::NoNamespace, + _ => namespace, + }; + package.get_build_path() + + "/" + + &file_path_to_compiler_asset_basename(source_file, namespace) + + "." + + extension +} + +pub fn canonicalize_string_path(path: &str) -> Option { + return Path::new(path) + .canonicalize() + .ok() + .map(|path| path.to_str().expect("Could not canonicalize").to_string()); +} + +pub fn get_bs_compiler_asset( + package: &packages::Package, + namespace: &packages::Namespace, + source_file: &str, + extension: &str, +) -> String { + let namespace = match extension { + "ast" | "iast" => &packages::Namespace::NoNamespace, + _ => namespace, + }; + + let dir = std::path::Path::new(&source_file).parent().unwrap(); + + std::path::Path::new(&package.get_bs_build_path()) + .join(dir) + .join(file_path_to_compiler_asset_basename(source_file, namespace) + extension) + .to_str() + .unwrap() + .to_owned() +} + +pub fn get_namespace_from_module_name(module_name: &str) -> Option { + let mut split = module_name.split('-'); + let _ = split.next(); + split.next().map(|s| s.to_string()) +} + +pub fn is_interface_ast_file(file: &str) -> bool { + file.ends_with(".iast") +} + +pub fn read_lines(filename: String) -> io::Result>> { + let file = fs::File::open(filename)?; + Ok(io::BufReader::new(file).lines()) +} + +pub fn get_system_time() -> u128 { + let start = SystemTime::now(); + let since_the_epoch = start.duration_since(UNIX_EPOCH).expect("Time went backwards"); + since_the_epoch.as_millis() +} + +pub fn is_interface_file(extension: &str) -> bool { + matches!(extension, "resi" | "mli" | "rei") +} + +pub fn is_implementation_file(extension: &str) -> bool { + matches!(extension, "res" | "ml" | "re") +} + +pub fn is_source_file(extension: &str) -> bool { + is_interface_file(extension) || is_implementation_file(extension) +} + +pub fn is_non_exotic_module_name(module_name: &str) -> bool { + let mut chars = module_name.chars(); + if chars.next().unwrap().is_ascii_uppercase() && chars.all(|c| c.is_ascii_alphanumeric() || c == '_') { + return true; + } + false +} + +pub fn get_extension(path: &str) -> String { + let path_buf = PathBuf::from(path); + return path_buf + .extension() + .expect("Could not get extension") + .to_str() + .expect("Could not get extension 2") + .to_string(); +} + +pub fn format_namespaced_module_name(module_name: &str) -> String { + // from ModuleName-Namespace to Namespace.ModuleName + // also format ModuleName-@Namespace to Namespace.ModuleName + let mut split = module_name.split('-'); + let module_name = split.next().unwrap(); + let namespace = split.next(); + let namespace = namespace.map(|ns| ns.trim_start_matches('@')); + match namespace { + None => module_name.to_string(), + Some(ns) => ns.to_string() + "." + module_name, + } +} + +pub fn compute_file_hash(path: &str) -> Option { + match fs::read(path) { + Ok(str) => Some(blake3::hash(&str)), + Err(_) => None, + } +} + +fn has_rescript_config(path: &Path) -> bool { + path.join("bsconfig.json").exists() || path.join("rescript.json").exists() +} + +pub fn get_workspace_root(package_root: &str) -> Option { + std::path::PathBuf::from(&package_root) + .parent() + .and_then(get_nearest_bsconfig) +} + +// traverse up the directory tree until we find a bsconfig.json, if not return None +pub fn get_nearest_bsconfig(path_buf: &Path) -> Option { + let mut current_dir = path_buf.to_owned(); + loop { + if has_rescript_config(¤t_dir) { + return Some(current_dir.to_string_lossy().to_string()); + } + match current_dir.parent() { + None => return None, + Some(parent) => current_dir = parent.to_path_buf(), + } + } +} + +pub fn get_rescript_version(bsc_path: &str) -> String { + let version_cmd = Command::new(bsc_path) + .args(["-v"]) + .output() + .expect("failed to find version"); + + std::str::from_utf8(&version_cmd.stdout) + .expect("Could not read version from rescript") + .replace('\n', "") + .replace("ReScript ", "") +} + +pub fn read_file(path: &Path) -> Result { + let mut file = File::open(path).expect("file not found"); + let mut contents = String::new(); + file.read_to_string(&mut contents)?; + Ok(contents) +} diff --git a/rewatch/src/lib.rs b/rewatch/src/lib.rs new file mode 100644 index 0000000000..b84aed04a4 --- /dev/null +++ b/rewatch/src/lib.rs @@ -0,0 +1,8 @@ +pub mod bsconfig; +pub mod build; +pub mod cmd; +pub mod helpers; +pub mod lock; +pub mod queue; +pub mod sourcedirs; +pub mod watcher; diff --git a/rewatch/src/lock.rs b/rewatch/src/lock.rs new file mode 100644 index 0000000000..98ab21769f --- /dev/null +++ b/rewatch/src/lock.rs @@ -0,0 +1,70 @@ +use std::fs; +use std::fs::File; +use std::io::Write; +use std::path::Path; +use std::process; +use sysinfo::{PidExt, System, SystemExt}; + +/* This locking mechanism is meant to never be deleted. Instead, it stores the PID of the process + * that's running, when trying to aquire a lock, it checks wether that process is still running. If + * not, it rewrites the lockfile to have its own PID instead. */ + +pub static LOCKFILE: &str = "rewatch.lock"; + +pub enum Error { + Locked(u32), + ParsingLockfile(std::num::ParseIntError), + ReadingLockfile(std::io::Error), + WritingLockfile(std::io::Error), +} + +impl std::fmt::Display for Error { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + let msg = match self { + Error::Locked(pid) => format!("Rewatch is already running with PID {}", pid), + Error::ParsingLockfile(e) => format!("Could not parse lockfile: \n {}", e), + Error::ReadingLockfile(e) => format!("Could not read lockfile: \n {}", e), + Error::WritingLockfile(e) => format!("Could not write lockfile: \n {}", e), + }; + write!(f, "{}", msg) + } +} + +pub enum Lock { + Aquired(u32), + Error(Error), +} + +fn exists(to_check_pid: u32) -> bool { + System::new_all() + .processes() + .iter() + .any(|(pid, _process)| pid.as_u32() == to_check_pid) +} + +fn create(lockfile_location: &Path, pid: u32) -> Lock { + // Create /lib if not exists + if let Some(Err(e)) = lockfile_location.parent().map(fs::create_dir_all) { + return Lock::Error(Error::WritingLockfile(e)); + }; + + File::create(lockfile_location) + .and_then(|mut file| file.write(pid.to_string().as_bytes()).map(|_| Lock::Aquired(pid))) + .unwrap_or_else(|e| Lock::Error(Error::WritingLockfile(e))) +} + +pub fn get(folder: &str) -> Lock { + let location = format!("{}/lib/{}", folder, LOCKFILE); + let path = Path::new(&location); + let pid = process::id(); + + match fs::read_to_string(&location) { + Err(e) if (e.kind() == std::io::ErrorKind::NotFound) => create(path, pid), + Err(e) => Lock::Error(Error::ReadingLockfile(e)), + Ok(s) => match s.parse::() { + Ok(parsed_pid) if !exists(parsed_pid) => create(path, pid), + Ok(parsed_pid) => Lock::Error(Error::Locked(parsed_pid)), + Err(e) => Lock::Error(Error::ParsingLockfile(e)), + }, + } +} diff --git a/rewatch/src/main.rs b/rewatch/src/main.rs new file mode 100644 index 0000000000..9a5b98ef6f --- /dev/null +++ b/rewatch/src/main.rs @@ -0,0 +1,109 @@ +use clap::{Parser, ValueEnum}; +use regex::Regex; + +use rewatch::{build, cmd, lock, watcher}; + +#[derive(Debug, Clone, ValueEnum)] +enum Command { + /// Build using Rewatch + Build, + /// Build, then start a watcher + Watch, + /// Clean the build artifacts + Clean, +} + +/// Rewatch is an alternative build system for the Rescript Compiler bsb (which uses Ninja internally). It strives +/// to deliver consistent and faster builds in monorepo setups with multiple packages, where the +/// default build system fails to pick up changed interfaces across multiple packages. +#[derive(Parser, Debug)] +#[command(version)] +struct Args { + #[arg(value_enum)] + command: Option, + + /// The relative path to where the main bsconfig.json resides. IE - the root of your project. + folder: Option, + + /// Filter allows for a regex to be supplied which will filter the files to be compiled. For + /// instance, to filter out test files for compilation while doing feature work. + #[arg(short, long)] + filter: Option, + + /// This allows one to pass an additional command to the watcher, which allows it to run when + /// finished. For instance, to play a sound when done compiling, or to run a test suite. + /// NOTE - You may need to add '--color=always' to your subcommand in case you want to output + /// colour as well + #[arg(short, long)] + after_build: Option, + + #[arg(short, long)] + no_timing: Option, + + /// This creates a source_dirs.json file at the root of the monorepo, which is needed when you + /// want to use Reanalyze + #[arg(short, long)] + create_sourcedirs: Option, + + #[arg(long)] + compiler_args: Option, + + #[arg(long)] + rescript_version: Option, +} + +fn main() { + env_logger::init(); + let args = Args::parse(); + + let command = args.command.unwrap_or(Command::Build); + let folder = args.folder.unwrap_or(".".to_string()); + let filter = args + .filter + .map(|filter| Regex::new(filter.as_ref()).expect("Could not parse regex")); + + match args.compiler_args { + None => (), + Some(path) => { + println!("{}", build::get_compiler_args(&path, args.rescript_version)); + std::process::exit(0); + } + } + + match lock::get(&folder) { + lock::Lock::Error(ref e) => { + eprintln!("Error while trying to get lock: {e}"); + std::process::exit(1) + } + lock::Lock::Aquired(_) => match command { + Command::Clean => build::clean::clean(&folder), + Command::Build => { + match build::build( + &filter, + &folder, + args.no_timing.unwrap_or(false), + args.create_sourcedirs.unwrap_or(false), + ) { + Err(e) => { + eprintln!("Error Building: {e}"); + std::process::exit(1) + } + Ok(_) => { + if let Some(args_after_build) = args.after_build { + cmd::run(args_after_build) + } + std::process::exit(0) + } + }; + } + Command::Watch => { + watcher::start( + &filter, + &folder, + args.after_build, + args.create_sourcedirs.unwrap_or(false), + ); + } + }, + } +} diff --git a/rewatch/src/queue.rs b/rewatch/src/queue.rs new file mode 100644 index 0000000000..eedf83c1d5 --- /dev/null +++ b/rewatch/src/queue.rs @@ -0,0 +1,177 @@ +/// As taken from: https://fedevitale.medium.com/thread-safe-queue-in-rust-1ed1acb9b93e +/// https://github.com/rawnly/queue-rs +use std::{ + collections::VecDeque, + sync::{Condvar, Mutex}, +}; + +/// A trait describing the general behaviour of a Queue +pub trait Queue { + /// Creates a new, empty queue + fn new() -> Self; + + /// Push a new item in the queue + fn push(&self, value: T); + + /// Removes an item from the queue + fn pop(&self) -> T; + + /// Returns the size of the queue + fn len(&self) -> usize; + + /// checks if the queue is empty + fn is_empty(&self) -> bool; +} + +/// A FIFO queue implemented using a VecDeque and a Mutex +#[derive(Debug)] +pub struct FifoQueue { + /// The underlying data structure of the queue + data: Mutex>, + cv: Condvar, +} + +impl Queue for FifoQueue { + /// Creates a new, empty queue + fn new() -> Self { + Self { + data: Mutex::new(VecDeque::new()), + cv: Condvar::new(), + } + } + + /// Adds an element to the back of the queue + fn push(&self, value: T) { + let mut data = self.data.lock().unwrap(); + data.push_back(value); + self.cv.notify_one(); + } + + /// Removes an element from the front of the queue + /// Returns None if the queue is empty + fn pop(&self) -> T { + let mut data = self.data.lock().unwrap(); + + while data.is_empty() { + data = self.cv.wait(data).unwrap(); + } + + data.pop_front().unwrap() + } + + /// Returns the size of the queue + fn len(&self) -> usize { + let data = self.data.lock().unwrap(); + data.len() + } + + /// Checks if the queue is empty + fn is_empty(&self) -> bool { + let data = self.data.lock().unwrap(); + data.is_empty() + } +} + +#[cfg(test)] +mod test { + use std::{sync::Arc, thread}; + + use super::*; + + #[test] + fn test_basic_functionalities() { + let queue = FifoQueue::new(); + + // Test push and pop + queue.push(1); + queue.push(2); + queue.push(3); + + assert_eq!(queue.pop(), 1); + assert_eq!(queue.pop(), 2); + assert_eq!(queue.pop(), 3); + + // Test size and is_empty + assert_eq!(queue.len(), 0); + assert!(queue.is_empty()); + + queue.push(4); + queue.push(5); + + assert_eq!(queue.len(), 2); + assert!(!queue.is_empty()); + } + + #[test] + fn test_queue_thread_safety() { + // createa a queue of numbers + let queue = Arc::new(FifoQueue::::new()); + + let q1 = queue.clone(); + let t1 = thread::spawn(move || { + q1.push(1); + q1.push(2); + }); + + let q2 = queue.clone(); + let t2 = thread::spawn(move || { + q2.push(3); + q2.push(4) + }); + + t1.join().unwrap(); + t2.join().unwrap(); + + assert_eq!(queue.len(), 4); + } + + #[test] + fn test_concurrent_pushes_and_pops() { + let queue = Arc::new(FifoQueue::new()); + + let queue1 = queue.clone(); + let handle1 = thread::spawn(move || { + for i in 0..1000 { + queue1.push(i); + } + }); + + let queue2 = queue.clone(); + let handle2 = thread::spawn(move || { + for _ in 0..1000 { + queue2.pop(); + } + }); + + handle1.join().unwrap(); + handle2.join().unwrap(); + + assert!(queue.is_empty()); + } + + #[test] + fn test_concurrent_mixed_operations() { + let queue = Arc::new(FifoQueue::new()); + + let queue1 = queue.clone(); + let handle1 = thread::spawn(move || { + for i in 0..1000 { + queue1.push(i); + queue1.pop(); + } + }); + + let queue2 = queue.clone(); + let handle2 = thread::spawn(move || { + for i in 0..1000 { + queue2.push(i); + queue2.pop(); + } + }); + + handle1.join().unwrap(); + handle2.join().unwrap(); + + assert!(queue.is_empty()); + } +} diff --git a/rewatch/src/sourcedirs.rs b/rewatch/src/sourcedirs.rs new file mode 100644 index 0000000000..d50f5636aa --- /dev/null +++ b/rewatch/src/sourcedirs.rs @@ -0,0 +1,120 @@ +use crate::build::build_types::BuildState; +use crate::build::packages::Package; +use ahash::{AHashMap, AHashSet}; +use rayon::prelude::*; +use serde::Serialize; +use serde_json::json; +use std::fs::File; +use std::io::prelude::*; +use std::path::PathBuf; + +type Dir = String; +type PackageName = String; +type AbsolutePath = String; +type Pkg = (PackageName, AbsolutePath); + +#[derive(Serialize, Debug, Clone, PartialEq, Hash)] +pub struct SourceDirs<'a> { + pub dirs: &'a Vec, + pub pkgs: &'a Vec, + pub generated: &'a Vec, +} + +fn package_to_dirs<'a>(package: &'a Package, root_package_path: &String) -> AHashSet { + let relative_path = PathBuf::from(&package.path) + .strip_prefix(PathBuf::from(&root_package_path)) + .unwrap() + .to_string_lossy() + .to_string(); + + package + .dirs + .as_ref() + .unwrap_or(&AHashSet::new()) + .iter() + .filter_map(|path| path.to_str().map(|path| format!("{relative_path}/{path}"))) + .collect::>() +} + +fn deps_to_pkgs<'a>( + packages: &'a AHashMap, + dependencies: &'a Option>, +) -> AHashSet { + dependencies + .as_ref() + .unwrap_or(&vec![]) + .iter() + .filter_map(|name| { + packages + .get(name) + .map(|package| (name.to_owned(), package.path.to_owned())) + }) + .collect::>() +} + +fn write_sourcedirs_files(path: String, source_dirs: &SourceDirs) -> Result { + let mut source_dirs_json = File::create(path + "/.sourcedirs.json")?; + source_dirs_json.write(json!(source_dirs).to_string().as_bytes()) +} + +pub fn print(buildstate: &BuildState) { + // Find Root Package + let (_name, root_package) = buildstate + .packages + .iter() + .find(|(_name, package)| package.is_root) + .expect("Could not find root package"); + + // Take all packages apart from the root package + let (dirs, pkgs): (Vec>, Vec>) = buildstate + .packages + .par_iter() + .filter(|(_name, package)| !package.is_root) + .map(|(_name, package)| { + // Extract Directories + let dirs = package_to_dirs(&package, &root_package.path); + + // Extract Pkgs + let pkgs = [ + &package.bsconfig.pinned_dependencies, + &package.bsconfig.bs_dependencies, + &package.bsconfig.bs_dev_dependencies, + ] + .into_iter() + .map(|dependencies| deps_to_pkgs(&buildstate.packages, dependencies)); + + // Write sourcedirs.json + write_sourcedirs_files( + package.get_bs_build_path(), + &SourceDirs { + dirs: &dirs.clone().into_iter().collect::>(), + pkgs: &pkgs.clone().flatten().collect::>(), + generated: &vec![], + }, + ) + .expect("Could not write sourcedirs.json"); + + ( + dirs, + pkgs.flatten().collect::>(), + ) + }) + .unzip(); + + let mut merged_dirs: AHashSet = AHashSet::new(); + let mut merged_pkgs: AHashMap = AHashMap::new(); + + dirs.into_iter().for_each(|dir_set| merged_dirs.extend(dir_set)); + pkgs.into_iter().for_each(|pkg_set| merged_pkgs.extend(pkg_set)); + + // Write sourcedirs.json + write_sourcedirs_files( + root_package.get_bs_build_path(), + &SourceDirs { + dirs: &merged_dirs.into_iter().collect::>(), + pkgs: &merged_pkgs.into_iter().collect::>(), + generated: &vec![], + }, + ) + .expect("Could not write sourcedirs.json"); +} diff --git a/rewatch/src/structure_hashmap.rs b/rewatch/src/structure_hashmap.rs new file mode 100644 index 0000000000..1e3abf4094 --- /dev/null +++ b/rewatch/src/structure_hashmap.rs @@ -0,0 +1,54 @@ +use crate::helpers::{is_source_file, LexicalAbsolute}; +use ahash::AHashMap; +use std::path::PathBuf; +use std::{error, fs}; + +pub fn read_folders( + filter: &Option, + path: &str, + recurse: bool, +) -> Result, Box> { + let mut map: AHashMap = AHashMap::new(); + + let path_buf = PathBuf::from(path); + let abs_path = path_buf + .to_lexical_absolute() + .map(|x| x.to_str().map(|y| y.to_string()).unwrap_or("".to_string())) + .and_then(|x| fs::metadata(x.to_owned()).map(|m| (x.to_owned(), m))); + + for entry in fs::read_dir(path.replace("//", "/"))? { + let path_buf = entry.map(|entry| entry.path())?; + let metadata = fs::metadata(&path_buf)?; + let name = path_buf + .file_name() + .and_then(|x| x.to_str()) + .unwrap_or("Unknown") + .to_string(); + + let path_ext = path_buf.extension().and_then(|x| x.to_str()); + + if metadata.file_type().is_dir() && recurse { + match read_folders(&filter, &(path.to_owned() + "/" + &name + "/"), recurse) { + Ok(s) => map.extend(s), + Err(e) => println!("Error reading directory: {}", e), + } + } + match path_ext { + Some(extension) if is_source_file(extension) => match abs_path { + Ok((ref path, _)) + if filter + .as_ref() + .map(|re| !re.is_match(&name)) + .unwrap_or(true) => + { + map.insert(path.to_owned() + "/" + &name, metadata); + } + Ok(_) => println!("Filtered: {:?}", name), + Err(ref e) => println!("Error reading directory: {}", e), + }, + _ => (), + } + } + + Ok(map) +} diff --git a/rewatch/src/watcher.rs b/rewatch/src/watcher.rs new file mode 100644 index 0000000000..b5685d8361 --- /dev/null +++ b/rewatch/src/watcher.rs @@ -0,0 +1,254 @@ +use crate::build; +use crate::build::build_types::SourceType; +use crate::build::clean; +use crate::cmd; +use crate::helpers; +use crate::helpers::emojis::*; +use crate::queue::FifoQueue; +use crate::queue::*; +use futures_timer::Delay; +use notify::event::ModifyKind; +use notify::{Config, Error, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher}; +use std::path::Path; +use std::sync::Arc; +use std::sync::Mutex; +use std::time::{Duration, Instant}; + +#[derive(Debug, Clone, PartialEq, Eq, Copy)] +enum CompileType { + Incremental, + Full, + None, +} + +fn is_rescript_file(path_buf: &Path) -> bool { + let extension = path_buf.extension().and_then(|ext| ext.to_str()); + + if let Some(extension) = extension { + helpers::is_implementation_file(extension) || helpers::is_interface_file(extension) + } else { + false + } +} + +fn is_in_build_path(path_buf: &Path) -> bool { + path_buf + .to_str() + .map(|x| x.contains("/lib/bs/") || x.contains("/lib/ocaml/")) + .unwrap_or(false) +} + +fn matches_filter(path_buf: &Path, filter: &Option) -> bool { + let name = path_buf + .file_name() + .map(|x| x.to_string_lossy().to_string()) + .unwrap_or("".to_string()); + filter.as_ref().map(|re| !re.is_match(&name)).unwrap_or(true) +} + +async fn async_watch( + q: Arc>>, + path: &str, + filter: &Option, + after_build: Option, + create_sourcedirs: bool, +) -> notify::Result<()> { + let mut build_state = build::initialize_build(None, filter, path).expect("Can't initialize build"); + let mut needs_compile_type = CompileType::Incremental; + // create a mutex to capture if ctrl-c was pressed + let ctrlc_pressed = Arc::new(Mutex::new(false)); + let ctrlc_pressed_clone = Arc::clone(&ctrlc_pressed); + + ctrlc::set_handler(move || { + let pressed = Arc::clone(&ctrlc_pressed); + let mut pressed = pressed.lock().unwrap(); + *pressed = true; + }) + .expect("Error setting Ctrl-C handler"); + + let mut initial_build = true; + + loop { + if *ctrlc_pressed_clone.lock().unwrap() { + println!("\nExiting..."); + clean::cleanup_after_build(&build_state); + break Ok(()); + } + let mut events: Vec = vec![]; + if !q.is_empty() { + // Wait for events to settle + Delay::new(Duration::from_millis(50)).await; + } + while !q.is_empty() { + if let Ok(event) = q.pop() { + events.push(event) + } + } + + for event in events { + let paths = event + .paths + .iter() + .filter(|path| is_rescript_file(path)) + .filter(|path| !is_in_build_path(path)) + .filter(|path| matches_filter(path, filter)); + for path in paths { + let path_buf = path.to_path_buf(); + + match (needs_compile_type, event.kind) { + ( + CompileType::Incremental | CompileType::None, + // when we have a name change, create or remove event we need to do a full compile + EventKind::Remove(_) + | EventKind::Any + | EventKind::Create(_) + | EventKind::Modify(ModifyKind::Name(_)), + ) => { + // if we are going to do a full compile, we don't need to bother marking + // files dirty because we do a full scan anyway + needs_compile_type = CompileType::Full; + } + + ( + CompileType::None | CompileType::Incremental, + // when we have a data change event, we can do an incremental compile + EventKind::Modify(ModifyKind::Data(_)), + ) => { + // if we are going to compile incrementally, we need to mark the exact files + // dirty + if let Ok(canonicalized_path_buf) = path_buf.canonicalize() { + for module in build_state.modules.values_mut() { + match module.source_type { + SourceType::SourceFile(ref mut source_file) => { + // mark the implementation file dirty + let package = build_state + .packages + .get(&module.package_name) + .expect("Package not found"); + let canonicalized_implementation_file = + std::path::PathBuf::from(package.path.to_string()) + .join(&source_file.implementation.path); + if canonicalized_path_buf == canonicalized_implementation_file { + if let Ok(modified) = + canonicalized_path_buf.metadata().and_then(|x| x.modified()) + { + source_file.implementation.last_modified = modified; + }; + source_file.implementation.parse_dirty = true; + break; + } + + // mark the interface file dirty + if let Some(ref mut interface) = source_file.interface { + let canonicalized_interface_file = + std::path::PathBuf::from(package.path.to_string()) + .join(&interface.path); + if canonicalized_path_buf == canonicalized_interface_file { + if let Ok(modified) = canonicalized_path_buf + .metadata() + .and_then(|x| x.modified()) + { + interface.last_modified = modified; + } + interface.parse_dirty = true; + break; + } + } + } + SourceType::MlMap(_) => (), + } + } + needs_compile_type = CompileType::Incremental; + } + } + + ( + CompileType::None | CompileType::Incremental, + // these are not relevant events for compilation + EventKind::Access(_) + | EventKind::Other + | EventKind::Modify(ModifyKind::Any) + | EventKind::Modify(ModifyKind::Metadata(_)) + | EventKind::Modify(ModifyKind::Other), + ) => (), + // if we already need a full compile, we don't need to check for other events + (CompileType::Full, _) => (), + } + } + } + match needs_compile_type { + CompileType::Incremental => { + let timing_total = Instant::now(); + if build::incremental_build( + &mut build_state, + None, + initial_build, + !initial_build, + create_sourcedirs, + ) + .is_ok() + { + if let Some(a) = after_build.clone() { + cmd::run(a) + } + let timing_total_elapsed = timing_total.elapsed(); + println!( + "\n{}{}Finished {} compilation in {:.2}s\n", + LINE_CLEAR, + SPARKLES, + if initial_build { "initial" } else { "incremental" }, + timing_total_elapsed.as_secs_f64() + ); + } + needs_compile_type = CompileType::None; + initial_build = false; + } + CompileType::Full => { + let timing_total = Instant::now(); + build_state = build::initialize_build(None, filter, path).expect("Can't initialize build"); + let _ = + build::incremental_build(&mut build_state, None, initial_build, false, create_sourcedirs); + if let Some(a) = after_build.clone() { + cmd::run(a) + } + let timing_total_elapsed = timing_total.elapsed(); + println!( + "\n{}{}Finished compilation in {:.2}s\n", + LINE_CLEAR, + SPARKLES, + timing_total_elapsed.as_secs_f64() + ); + needs_compile_type = CompileType::None; + initial_build = false; + } + CompileType::None => { + // We want to sleep for a little while so the CPU can schedule other work. That way we end + // up not burning CPU cycles. + Delay::new(Duration::from_millis(50)).await; + } + } + } +} + +pub fn start( + filter: &Option, + folder: &str, + after_build: Option, + create_sourcedirs: bool, +) { + futures::executor::block_on(async { + let queue = Arc::new(FifoQueue::>::new()); + let producer = queue.clone(); + let consumer = queue.clone(); + + let mut watcher = RecommendedWatcher::new(move |res| producer.push(res), Config::default()) + .expect("Could not create watcher"); + watcher + .watch(folder.as_ref(), RecursiveMode::Recursive) + .expect("Could not start watcher"); + + if let Err(e) = async_watch(consumer, folder, filter, after_build, create_sourcedirs).await { + println!("error: {:?}", e) + } + }) +} diff --git a/rewatch/tests/compile.sh b/rewatch/tests/compile.sh new file mode 100755 index 0000000000..2b7173ee57 --- /dev/null +++ b/rewatch/tests/compile.sh @@ -0,0 +1,108 @@ +#!/bin/bash +cd $(dirname $0) +source "./utils.sh" +cd ../testrepo + +bold "Test: It should compile" + +if rewatch clean &> /dev/null; +then + success "Repo Cleaned" +else + error "Error Cleaning Repo" + exit 1 +fi + +if rewatch &> /dev/null; +then + success "Repo Built" +else + error "Error Building Repo" + exit 1 +fi + + +if git diff --exit-code ./; +then + success "Testrepo has no changes" +else + error "Build has changed" + exit 1 +fi + +node ./packages/main/src/Main.mjs > ./packages/main/src/output.txt + +mv ./packages/main/src/Main.res ./packages/main/src/Main2.res +rewatch build --no-timing=true &> ../tests/snapshots/rename-file.txt +mv ./packages/main/src/Main2.res ./packages/main/src/Main.res +rewatch build &> /dev/null +mv ./packages/main/src/ModuleWithInterface.resi ./packages/main/src/ModuleWithInterface2.resi +rewatch build --no-timing=true &> ../tests/snapshots/rename-interface-file.txt +mv ./packages/main/src/ModuleWithInterface2.resi ./packages/main/src/ModuleWithInterface.resi +rewatch build &> /dev/null +mv ./packages/main/src/ModuleWithInterface.res ./packages/main/src/ModuleWithInterface2.res +rewatch build --no-timing=true &> ../tests/snapshots/rename-file-with-interface.txt +mv ./packages/main/src/ModuleWithInterface2.res ./packages/main/src/ModuleWithInterface.res +rewatch build &> /dev/null + +# when deleting a file that other files depend on, the compile should fail +rm packages/dep02/src/Dep02.res +rewatch build --no-timing=true &> ../tests/snapshots/remove-file.txt +# replace the absolute path so the snapshot is the same on all machines +replace "s/$(pwd | sed "s/\//\\\\\//g")//g" ../tests/snapshots/remove-file.txt +git checkout -- packages/dep02/src/Dep02.res +rewatch build &> /dev/null + +# it should show an error when we have a dependency cycle +echo 'Dep01.log()' >> packages/new-namespace/src/NS_alias.res +rewatch build --no-timing=true &> ../tests/snapshots/dependency-cycle.txt +git checkout -- packages/new-namespace/src/NS_alias.res +rewatch build &> /dev/null + +# it should not loop (we had an infinite loop when clean building with a cycle) +rewatch clean &> /dev/null +echo 'Dep01.log()' >> packages/new-namespace/src/NS_alias.res +git checkout -- packages/new-namespace/src/NS_alias.res +rewatch build &> /dev/null + +# make sure we don't have changes in the test repo +if git diff --exit-code ./; +then + success "Output is correct" +else + error "Output is incorrect" + exit 1 +fi + +# make sure there are no new files created by the build +# this could happen because of not cleaning up .mjs files +# after we rename files +new_files=$(git ls-files --others --exclude-standard ./) +if [[ $new_files = "" ]]; +then + success "No new files created" +else + error "❌ - New files created" + printf "${new_files}\n" + exit 1 +fi + +# see if the snapshots have changed +changed_snapshots=$(git ls-files --modified ../tests/snapshots) +if git diff --exit-code ../tests/snapshots &> /dev/null; +then + success "Snapshots are correct" +else + error "Snapshots are incorrect:" + # print filenames in the snapshot dir call bold with the filename + # and then cat their contents + printf "\n\n" + for file in $changed_snapshots; do + bold $file + # show diff of file vs contents in git + git diff $file $file + printf "\n\n" + done + + exit 1 +fi \ No newline at end of file diff --git a/rewatch/tests/lib/rewatch.lock b/rewatch/tests/lib/rewatch.lock new file mode 100644 index 0000000000..da27e6c542 --- /dev/null +++ b/rewatch/tests/lib/rewatch.lock @@ -0,0 +1 @@ +57150 \ No newline at end of file diff --git a/rewatch/tests/lock.sh b/rewatch/tests/lock.sh new file mode 100755 index 0000000000..65d48cb6fd --- /dev/null +++ b/rewatch/tests/lock.sh @@ -0,0 +1,54 @@ +source "./utils.sh" +cd ../testrepo + +bold "Test: It should lock - when watching" + +sleep 1 + +if rewatch clean &> /dev/null; +then + success "Repo Cleaned" +else + error "Error Cleaning Repo" + exit 1 +fi + +exit_watcher() { + # we need to kill the parent process (rewatch) + kill $(pgrep -P $!); +} + +rewatch watch &>/dev/null & +success "Watcher Started" + +sleep 1 + +if rewatch watch 2>&1 | grep 'Error while trying to get lock:' &> /dev/null; +then + success "Lock is correctly set" + exit_watcher +else + error "Not setting lock correctly" + exit_watcher + exit 1 +fi + +sleep 1 + +touch tmp.txt +rewatch watch &> tmp.txt & +success "Watcher Started" + +sleep 1 + +if cat tmp.txt | grep 'Error while trying to get lock:' &> /dev/null; +then + error "Lock not removed correctly" + exit_watcher + exit 1 +else + success "Lock removed correctly" + exit_watcher +fi + +rm tmp.txt diff --git a/rewatch/tests/snapshots/dependency-cycle.txt b/rewatch/tests/snapshots/dependency-cycle.txt new file mode 100644 index 0000000000..10c4e55922 --- /dev/null +++ b/rewatch/tests/snapshots/dependency-cycle.txt @@ -0,0 +1,11 @@ +[1/7]📦 Building package tree... [1/7] 📦 Built package tree in 0.00s +[2/7] 🕵️ Finding source files... [2/7] 🕵️ Found source files in 0.00s +[3/7] 📝 Reading compile state... [3/7] 📝 Read compile state 0.00s +[4/7] 🧹 Cleaning up previous build... [4/7] 🧹 Cleaned 0/10 0.00s + [5/7] 🧱 Parsed 1 source files in 0.00s + [6/7] ️🌴 Collected deps in 0.00s + [7/7] ️🛑 Compiled 0 modules in 0.00s + +Can't continue... Found a circular dependency in your code: +NewNamespace.NS_alias -> Dep01 -> Dep02 -> NS -> NewNamespace.NS_alias +Error Building:  ️🛑 Error Running Incremental Build:  ️🛑 Failed to Compile. See Errors Above diff --git a/rewatch/tests/snapshots/remove-file.txt b/rewatch/tests/snapshots/remove-file.txt new file mode 100644 index 0000000000..228ef79c00 --- /dev/null +++ b/rewatch/tests/snapshots/remove-file.txt @@ -0,0 +1,24 @@ +[1/7]📦 Building package tree... [1/7] 📦 Built package tree in 0.00s +[2/7] 🕵️ Finding source files... [2/7] 🕵️ Found source files in 0.00s +[3/7] 📝 Reading compile state... [3/7] 📝 Read compile state 0.00s +[4/7] 🧹 Cleaning up previous build... [4/7] 🧹 Cleaned 1/10 0.00s + [5/7] 🧱 Parsed 0 source files in 0.00s + [6/7] ️🌴 Collected deps in 0.00s + [7/7] ️🛑 Compiled 1 modules in 0.00s + + We've found a bug for you! + /packages/dep01/src/Dep01.res:3:9-17 + + 1 │ let log = () => { + 2 │ Js.log("02") + 3 │ Dep02.log() + 4 │ } + 5 │ + + The module or file Dep02 can't be found. + - If it's a third-party dependency: + - Did you add it to the "bs-dependencies" or "bs-dev-dependencies" in bsconfig.json? + - Did you include the file's directory to the "sources" in bsconfig.json? + + +Error Building:  ️🛑 Error Running Incremental Build:  ️🛑 Failed to Compile. See Errors Above diff --git a/rewatch/tests/snapshots/rename-file-with-interface.txt b/rewatch/tests/snapshots/rename-file-with-interface.txt new file mode 100644 index 0000000000..e16ce55028 --- /dev/null +++ b/rewatch/tests/snapshots/rename-file-with-interface.txt @@ -0,0 +1,10 @@ +[1/7]📦 Building package tree... [1/7] 📦 Built package tree in 0.00s +[2/7] 🕵️ Finding source files... Warning: No implementation file found for interface file (skipping): src/ModuleWithInterface.resi + [2/7] 🕵️ Found source files in 0.00s +[3/7] 📝 Reading compile state... [3/7] 📝 Read compile state 0.00s +[4/7] 🧹 Cleaning up previous build... [4/7] 🧹 Cleaned 2/10 0.00s + [5/7] 🧱 Parsed 1 source files in 0.00s + [6/7] ️🌴 Collected deps in 0.00s + [7/7] 🤺 ️Compiled 1 modules in 0.00s + + ✨ Finished Compilation in 0.00s diff --git a/rewatch/tests/snapshots/rename-file.txt b/rewatch/tests/snapshots/rename-file.txt new file mode 100644 index 0000000000..68d3ebf198 --- /dev/null +++ b/rewatch/tests/snapshots/rename-file.txt @@ -0,0 +1,9 @@ +[1/7]📦 Building package tree... [1/7] 📦 Built package tree in 0.00s +[2/7] 🕵️ Finding source files... [2/7] 🕵️ Found source files in 0.00s +[3/7] 📝 Reading compile state... [3/7] 📝 Read compile state 0.00s +[4/7] 🧹 Cleaning up previous build... [4/7] 🧹 Cleaned 1/10 0.00s + [5/7] 🧱 Parsed 1 source files in 0.00s + [6/7] ️🌴 Collected deps in 0.00s + [7/7] 🤺 ️Compiled 1 modules in 0.00s + + ✨ Finished Compilation in 0.00s diff --git a/rewatch/tests/snapshots/rename-interface-file.txt b/rewatch/tests/snapshots/rename-interface-file.txt new file mode 100644 index 0000000000..d9805848a0 --- /dev/null +++ b/rewatch/tests/snapshots/rename-interface-file.txt @@ -0,0 +1,10 @@ +[1/7]📦 Building package tree... [1/7] 📦 Built package tree in 0.00s +[2/7] 🕵️ Finding source files... Warning: No implementation file found for interface file (skipping): src/ModuleWithInterface2.resi + [2/7] 🕵️ Found source files in 0.00s +[3/7] 📝 Reading compile state... [3/7] 📝 Read compile state 0.00s +[4/7] 🧹 Cleaning up previous build... [4/7] 🧹 Cleaned 1/10 0.00s + [5/7] 🧱 Parsed 1 source files in 0.00s + [6/7] ️🌴 Collected deps in 0.00s + [7/7] 🤺 ️Compiled 1 modules in 0.00s + + ✨ Finished Compilation in 0.00s diff --git a/rewatch/tests/suffix.sh b/rewatch/tests/suffix.sh new file mode 100755 index 0000000000..a7fe97875e --- /dev/null +++ b/rewatch/tests/suffix.sh @@ -0,0 +1,56 @@ +source "./utils.sh" +cd ../testrepo + +bold "Test: It should support custom suffixes" + +# Clean Repo +sleep 1 +if rewatch clean &> /dev/null; +then + success "Repo Cleaned" +else + error "Error Cleaning Repo" + exit 1 +fi + +# Replace suffix +replace "s/.mjs/.res.js/g" bsconfig.json + +if rewatch build &> /dev/null; +then + success "Repo Built" +else + error "Error building repo" + exit 1 +fi + +# Count files with new extension +file_count=$(find . -name *.res.js | wc -l) + +if [ "$file_count" -eq 9 ]; +then + success "Found files with correct suffix" +else + error "Suffix not correctly used" + exit 1 +fi + +if rewatch clean &> /dev/null; +then + success "Repo Cleaned" +else + error "Error Cleaning Repo" + exit 1 +fi + +# Restore Suffix +replace "s/.res.js/.mjs/g" bsconfig.json + +# Restore original build +if rewatch build &> /dev/null; +then + success "Repo Built" +else + error "Error building repo" + exit 1 +fi diff --git a/rewatch/tests/suite-ci.sh b/rewatch/tests/suite-ci.sh new file mode 100755 index 0000000000..bcec5677f7 --- /dev/null +++ b/rewatch/tests/suite-ci.sh @@ -0,0 +1,25 @@ +#!/bin/bash +# Make sure we are in the right directory +cd $(dirname $0) + +source ./utils.sh + +bold "Check if build exists" +if test -f ../target/release/rewatch; +then + success "Build exists" +else + error "Build does not exist. Exiting..." + exit 1 +fi + +bold "Make sure the testrepo is clean" +if git diff --exit-code ../testrepo &> /dev/null; +then + success "Testrepo has no changes" +else + error "Testrepo is not clean to start with" + exit 1 +fi + +./compile.sh && ./watch.sh && ./lock.sh && ./suffix.sh diff --git a/rewatch/tests/suite.sh b/rewatch/tests/suite.sh new file mode 100755 index 0000000000..8241a09ab2 --- /dev/null +++ b/rewatch/tests/suite.sh @@ -0,0 +1,3 @@ +#!/bin/bash +cd $(dirname $0) +./suite-ci.sh diff --git a/rewatch/tests/utils.sh b/rewatch/tests/utils.sh new file mode 100644 index 0000000000..3f6218d4c5 --- /dev/null +++ b/rewatch/tests/utils.sh @@ -0,0 +1,15 @@ +#!/bin/bash +overwrite() { echo -e "\r\033[1A\033[0K$@"; } +success() { echo -e "- ✅ \033[32m$1\033[0m"; } +error() { echo -e "- 🛑 \033[31m$1\033[0m"; } +bold() { echo -e "\033[1m$1\033[0m"; } +rewatch() { RUST_BACKTRACE=1 ../target/release/rewatch --no-timing=true $1; } + +replace() { + if [[ $OSTYPE == 'darwin'* ]]; + then + sed -i '' $1 $2; + else + sed -i $1 $2; + fi +} diff --git a/rewatch/tests/watch.sh b/rewatch/tests/watch.sh new file mode 100755 index 0000000000..20546d865b --- /dev/null +++ b/rewatch/tests/watch.sh @@ -0,0 +1,50 @@ +source "./utils.sh" +cd ../testrepo + +bold "Test: It should watch" + +if rewatch clean &> /dev/null; +then + success "Repo Cleaned" +else + error "Error Cleaning Repo" + exit 1 +fi + +exit_watcher() { + # we need to kill the parent process (rewatch) + kill $(pgrep -P $!); +} + +rewatch watch &>/dev/null & +success "Watcher Started" + +echo 'Js.log("added-by-test")' >> ./packages/main/src/Main.res + +sleep 1 + +if node ./packages/main/src/Main.mjs | grep 'added-by-test' &> /dev/null; +then + success "Output is correct" +else + error "Output is incorrect" + exit_watcher + exit 1 +fi + +sleep 1 + +replace '/Js.log("added-by-test")/d' ./packages/main/src/Main.res; + +sleep 1 + +if git diff --exit-code ./ +then + success "Adding and removing changes nothing" +else + error "Adding and removing changes left some artifacts" + exit_watcher + exit 1 +fi + +exit_watcher \ No newline at end of file diff --git a/scripts/.npmignore b/scripts/.npmignore index 49d90018f9..a0ff4a6690 100644 --- a/scripts/.npmignore +++ b/scripts/.npmignore @@ -2,4 +2,5 @@ # keep only those actually needed in the npm package. * !bin_path.js -!rescript_*.js \ No newline at end of file +!rescript_*.js +!rewatch \ No newline at end of file diff --git a/scripts/copyExes.js b/scripts/copyExes.js index af1a136d25..26268749b1 100755 --- a/scripts/copyExes.js +++ b/scripts/copyExes.js @@ -8,6 +8,7 @@ const { duneBinDir } = require("./dune"); const { absolutePath: platformBinDir } = require("./bin_path"); const ninjaDir = path.join(__dirname, "..", "ninja"); +const rewatchDir = path.join(__dirname, "..", "rewatch"); if (!fs.existsSync(platformBinDir)) { fs.mkdirSync(platformBinDir); @@ -34,3 +35,4 @@ copyExe(duneBinDir, "rescript"); copyExe(duneBinDir, "bsc"); copyExe(duneBinDir, "bsb_helper"); copyExe(ninjaDir, "ninja"); +copyExe(rewatchDir, "rewatch"); diff --git a/scripts/makeArtifactList.js b/scripts/makeArtifactList.js index 41bfec0ef6..90dfe7ffae 100755 --- a/scripts/makeArtifactList.js +++ b/scripts/makeArtifactList.js @@ -47,7 +47,13 @@ if (isCheckMode) { function getFilesAddedByCI() { const platforms = ["darwin", "darwinarm64", "linux", "linuxarm64", "win32"]; - const exes = ["bsb_helper.exe", "bsc.exe", "ninja.exe", "rescript.exe"]; + const exes = [ + "bsb_helper.exe", + "bsc.exe", + "ninja.exe", + "rescript.exe", + "rewatch.exe", + ]; const files = ["ninja.COPYING"]; diff --git a/scripts/rewatch b/scripts/rewatch new file mode 100755 index 0000000000..3084263e37 --- /dev/null +++ b/scripts/rewatch @@ -0,0 +1,13 @@ +#!/usr/bin/env node +//@ts-check +"use strict"; + +const path = require("path"); +const child_process = require("child_process"); + +const binAbsolutePath = require("./bin_path").absolutePath; +const rewatchExe = path.join(binAbsolutePath, "rewatch.exe"); + +const args = process.argv.slice(2); + +child_process.spawnSync(rewatchExe, args, { stdio: "inherit" });