diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..e832a59eb --- /dev/null +++ b/.gitattributes @@ -0,0 +1,16 @@ +# Ask git to automatically ensure that text files have posix-style line endings. +* text eol=lf + +# Git can usually detect binary files, but lets mark the binary files we have +# in the repository as binary just to be safe. +*.arrow binary +*.db binary +*.jpg binary +*.mp4 binary +*.parquet binary +*.png binary +*.tar binary +*.tgz binary +*.webp binary +*.xlsx binary +*.zip binary diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 26a096660..82f1026b9 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,7 +11,8 @@ jobs: strategy: matrix: version: [20, 21] - runs-on: ubuntu-latest + os: [ubuntu-latest, windows-latest] + runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 @@ -28,7 +29,7 @@ jobs: - uses: actions/upload-artifact@v3 if: failure() with: - name: test-output-changes + name: test-output-changes-${{ matrix.os }}-${{ matrix.version }} path: | test/output/*-changed.* test/output/build/*-changed/ diff --git a/bin/observable-init.js b/bin/observable-init.js index 5e0ae9194..97f5e944c 100755 --- a/bin/observable-init.js +++ b/bin/observable-init.js @@ -1,4 +1,10 @@ -#!/usr/bin/env -S node --no-warnings=ExperimentalWarning +#!/usr/bin/env node +import {fileURLToPath} from "node:url"; +import crossSpawn from "cross-spawn"; -await import("tsx/esm"); -await import("./observable.ts"); +const observablePath = fileURLToPath(import.meta.resolve("./observable.ts")); +crossSpawn.sync( + "node", + ["--no-warnings=ExperimentalWarning", "--import", "tsx/esm", observablePath, ...process.argv.slice(2)], + {stdio: "inherit"} +); diff --git a/examples/eia/docs/data/eia-bia-reference.csv b/examples/eia/docs/data/eia-bia-reference.csv index 187a3ef89..a91327639 100644 --- a/examples/eia/docs/data/eia-bia-reference.csv +++ b/examples/eia/docs/data/eia-bia-reference.csv @@ -1,79 +1,79 @@ -BA Code,BA Name,Time Zone,Region/Country Code,Region/Country Name,Generation Only BA,Demand by BA Subregion,U.S. BA,Active BA,Activation Date,Retirement Date -AEC,PowerSouth Energy Cooperative,Central,SE,Southeast,No,No,Yes,No,,9/1/2021 -AECI,"Associated Electric Cooperative, Inc.",Central,MIDW,Midwest,No,No,Yes,Yes,, -AVA,Avista Corporation,Pacific,NW,Northwest,No,No,Yes,Yes,, -AVRN,"Avangrid Renewables, LLC",Pacific,NW,Northwest,Yes,No,Yes,Yes,7/31/2018, -AZPS,Arizona Public Service Company,Arizona,SW,Southwest,No,No,Yes,Yes,, -BANC,Balancing Authority of Northern California,Pacific,CAL,California,No,No,Yes,Yes,, -BPAT,Bonneville Power Administration,Pacific,NW,Northwest,No,No,Yes,Yes,, -CHPD,Public Utility District No. 1 of Chelan County,Pacific,NW,Northwest,No,No,Yes,Yes,, -CISO,California Independent System Operator,Pacific,CAL,California,No,Yes,Yes,Yes,, -CPLE,Duke Energy Progress East,Eastern,CAR,Carolinas,No,No,Yes,Yes,, -CPLW,Duke Energy Progress West,Eastern,CAR,Carolinas,No,No,Yes,Yes,, -DEAA,"Arlington Valley, LLC",Arizona,SW,Southwest,Yes,No,Yes,Yes,, -DOPD,PUD No. 1 of Douglas County,Pacific,NW,Northwest,No,No,Yes,Yes,, -DUK,Duke Energy Carolinas,Eastern,CAR,Carolinas,No,No,Yes,Yes,, -EEI,"Electric Energy, Inc.",Central,MIDW,Midwest,Yes,No,Yes,No,,2/29/2020 -EPE,El Paso Electric Company,Arizona,SW,Southwest,No,No,Yes,Yes,, -ERCO,"Electric Reliability Council of Texas, Inc.",Central,TEX,Texas,No,Yes,Yes,Yes,, -FMPP,Florida Municipal Power Pool,Eastern,FLA,Florida,No,No,Yes,Yes,, -FPC,"Duke Energy Florida, Inc.",Eastern,FLA,Florida,No,No,Yes,Yes,, -FPL,Florida Power & Light Co.,Eastern,FLA,Florida,No,No,Yes,Yes,, -GCPD,"Public Utility District No. 2 of Grant County, Washington",Pacific,NW,Northwest,No,No,Yes,Yes,, -GLHB,GridLiance,Central,MIDW,Midwest,Yes,No,Yes,No,2/29/2020,9/1/2022 -GRID,"Gridforce Energy Management, LLC",Pacific,NW,Northwest,Yes,No,Yes,Yes,, -GRIF,"Griffith Energy, LLC",Arizona,SW,Southwest,Yes,No,Yes,Yes,, -GRMA,"Gila River Power, LLC",Arizona,SW,Southwest,Yes,No,Yes,No,,5/3/2018 -GVL,Gainesville Regional Utilities,Eastern,FLA,Florida,No,No,Yes,Yes,, -GWA,"NaturEner Power Watch, LLC",Mountain,NW,Northwest,Yes,No,Yes,Yes,, -HGMA,"New Harquahala Generating Company, LLC",Arizona,SW,Southwest,Yes,No,Yes,Yes,, -HST,City of Homestead,Eastern,FLA,Florida,No,No,Yes,Yes,, -IID,Imperial Irrigation District,Pacific,CAL,California,No,No,Yes,Yes,, -IPCO,Idaho Power Company,Pacific,NW,Northwest,No,No,Yes,Yes,, -ISNE,ISO New England,Eastern,NE,New England,No,Yes,Yes,Yes,, -JEA,JEA,Eastern,FLA,Florida,No,No,Yes,Yes,, -LDWP,Los Angeles Department of Water and Power,Pacific,CAL,California,No,No,Yes,Yes,, -LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,Eastern,MIDW,Midwest,No,No,Yes,Yes,, -MISO,"Midcontinent Independent System Operator, Inc.",Eastern,MIDW,Midwest,No,Yes,Yes,Yes,, -NEVP,Nevada Power Company,Pacific,NW,Northwest,No,No,Yes,Yes,, -NSB,Utilities Commission of New Smyrna Beach,Eastern,FLA,Florida,No,No,Yes,No,,1/8/2020 -NWMT,NorthWestern Corporation,Mountain,NW,Northwest,No,No,Yes,Yes,, -NYIS,New York Independent System Operator,Eastern,NY,New York,No,Yes,Yes,Yes,, -OVEC,Ohio Valley Electric Corporation,Eastern,MIDA,Mid-Atlantic,No,No,Yes,No,,12/1/2018 -PACE,PacifiCorp East,Mountain,NW,Northwest,No,No,Yes,Yes,, -PACW,PacifiCorp West,Pacific,NW,Northwest,No,No,Yes,Yes,, -PGE,Portland General Electric Company,Pacific,NW,Northwest,No,No,Yes,Yes,, -PJM,"PJM Interconnection, LLC",Eastern,MIDA,Mid-Atlantic,No,Yes,Yes,Yes,, -PNM,Public Service Company of New Mexico,Arizona,SW,Southwest,No,Yes,Yes,Yes,, -PSCO,Public Service Company of Colorado,Mountain,NW,Northwest,No,No,Yes,Yes,, -PSEI,"Puget Sound Energy, Inc.",Pacific,NW,Northwest,No,No,Yes,Yes,, -SC,South Carolina Public Service Authority,Eastern,CAR,Carolinas,No,No,Yes,Yes,, -SCEG,"Dominion Energy South Carolina, Inc.",Eastern,CAR,Carolinas,No,No,Yes,Yes,, -SCL,Seattle City Light,Pacific,NW,Northwest,No,No,Yes,Yes,, -SEC,Seminole Electric Cooperative,Eastern,FLA,Florida,No,No,Yes,Yes,, -SEPA,Southeastern Power Administration,Central,SE,Southeast,Yes,No,Yes,Yes,, -SOCO,"Southern Company Services, Inc. - Trans",Central,SE,Southeast,No,No,Yes,Yes,, -SPA,Southwestern Power Administration,Central,CENT,Central,No,No,Yes,Yes,, -SRP,Salt River Project Agricultural Improvement and Power District,Arizona,SW,Southwest,No,No,Yes,Yes,, -SWPP,Southwest Power Pool,Central,CENT,Central,No,Yes,Yes,Yes,, -TAL,City of Tallahassee,Eastern,FLA,Florida,No,No,Yes,Yes,, -TEC,Tampa Electric Company,Eastern,FLA,Florida,No,No,Yes,Yes,, -TEPC,Tucson Electric Power,Arizona,SW,Southwest,No,No,Yes,Yes,, -TIDC,Turlock Irrigation District,Pacific,CAL,California,No,No,Yes,Yes,, -TPWR,"City of Tacoma, Department of Public Utilities, Light Division",Pacific,NW,Northwest,No,No,Yes,Yes,, -TVA,Tennessee Valley Authority,Central,TEN,Tennessee,No,No,Yes,Yes,, -WACM,Western Area Power Administration - Rocky Mountain Region,Arizona,NW,Northwest,No,No,Yes,Yes,, -WALC,Western Area Power Administration - Desert Southwest Region,Arizona,SW,Southwest,No,No,Yes,Yes,, -WAUE,Western Area Power Administration - Upper Great Plains East,Mountain,NW,Northwest,No,No,Yes,No,,10/1/2015 -WAUW,Western Area Power Administration - Upper Great Plains West,Mountain,NW,Northwest,No,No,Yes,Yes,, -WWA,"NaturEner Wind Watch, LLC",Mountain,NW,Northwest,Yes,No,Yes,Yes,, -YAD,"Alcoa Power Generating, Inc. - Yadkin Division",Eastern,CAR,Carolinas,Yes,No,Yes,Yes,, -AESO,Alberta Electric System Operator,,CAN,Canada,No,No,No,Yes,, -BCHA,British Columbia Hydro and Power Authority,,CAN,Canada,No,No,No,Yes,, -HQT,Hydro-Quebec TransEnergie,,CAN,Canada,No,No,No,Yes,, -IESO,Ontario IESO,,CAN,Canada,No,No,No,Yes,, -MHEB,Manitoba Hydro,,CAN,Canada,No,No,No,Yes,, -NBSO,New Brunswick System Operator,,CAN,Canada,No,No,No,Yes,, -SPC,Saskatchewan Power Corporation,,CAN,Canada,No,No,No,Yes,, -CEN,Centro Nacional de Control de Energia,,MEX,Mexico,No,No,No,Yes,10/31/2017, +BA Code,BA Name,Time Zone,Region/Country Code,Region/Country Name,Generation Only BA,Demand by BA Subregion,U.S. BA,Active BA,Activation Date,Retirement Date +AEC,PowerSouth Energy Cooperative,Central,SE,Southeast,No,No,Yes,No,,9/1/2021 +AECI,"Associated Electric Cooperative, Inc.",Central,MIDW,Midwest,No,No,Yes,Yes,, +AVA,Avista Corporation,Pacific,NW,Northwest,No,No,Yes,Yes,, +AVRN,"Avangrid Renewables, LLC",Pacific,NW,Northwest,Yes,No,Yes,Yes,7/31/2018, +AZPS,Arizona Public Service Company,Arizona,SW,Southwest,No,No,Yes,Yes,, +BANC,Balancing Authority of Northern California,Pacific,CAL,California,No,No,Yes,Yes,, +BPAT,Bonneville Power Administration,Pacific,NW,Northwest,No,No,Yes,Yes,, +CHPD,Public Utility District No. 1 of Chelan County,Pacific,NW,Northwest,No,No,Yes,Yes,, +CISO,California Independent System Operator,Pacific,CAL,California,No,Yes,Yes,Yes,, +CPLE,Duke Energy Progress East,Eastern,CAR,Carolinas,No,No,Yes,Yes,, +CPLW,Duke Energy Progress West,Eastern,CAR,Carolinas,No,No,Yes,Yes,, +DEAA,"Arlington Valley, LLC",Arizona,SW,Southwest,Yes,No,Yes,Yes,, +DOPD,PUD No. 1 of Douglas County,Pacific,NW,Northwest,No,No,Yes,Yes,, +DUK,Duke Energy Carolinas,Eastern,CAR,Carolinas,No,No,Yes,Yes,, +EEI,"Electric Energy, Inc.",Central,MIDW,Midwest,Yes,No,Yes,No,,2/29/2020 +EPE,El Paso Electric Company,Arizona,SW,Southwest,No,No,Yes,Yes,, +ERCO,"Electric Reliability Council of Texas, Inc.",Central,TEX,Texas,No,Yes,Yes,Yes,, +FMPP,Florida Municipal Power Pool,Eastern,FLA,Florida,No,No,Yes,Yes,, +FPC,"Duke Energy Florida, Inc.",Eastern,FLA,Florida,No,No,Yes,Yes,, +FPL,Florida Power & Light Co.,Eastern,FLA,Florida,No,No,Yes,Yes,, +GCPD,"Public Utility District No. 2 of Grant County, Washington",Pacific,NW,Northwest,No,No,Yes,Yes,, +GLHB,GridLiance,Central,MIDW,Midwest,Yes,No,Yes,No,2/29/2020,9/1/2022 +GRID,"Gridforce Energy Management, LLC",Pacific,NW,Northwest,Yes,No,Yes,Yes,, +GRIF,"Griffith Energy, LLC",Arizona,SW,Southwest,Yes,No,Yes,Yes,, +GRMA,"Gila River Power, LLC",Arizona,SW,Southwest,Yes,No,Yes,No,,5/3/2018 +GVL,Gainesville Regional Utilities,Eastern,FLA,Florida,No,No,Yes,Yes,, +GWA,"NaturEner Power Watch, LLC",Mountain,NW,Northwest,Yes,No,Yes,Yes,, +HGMA,"New Harquahala Generating Company, LLC",Arizona,SW,Southwest,Yes,No,Yes,Yes,, +HST,City of Homestead,Eastern,FLA,Florida,No,No,Yes,Yes,, +IID,Imperial Irrigation District,Pacific,CAL,California,No,No,Yes,Yes,, +IPCO,Idaho Power Company,Pacific,NW,Northwest,No,No,Yes,Yes,, +ISNE,ISO New England,Eastern,NE,New England,No,Yes,Yes,Yes,, +JEA,JEA,Eastern,FLA,Florida,No,No,Yes,Yes,, +LDWP,Los Angeles Department of Water and Power,Pacific,CAL,California,No,No,Yes,Yes,, +LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,Eastern,MIDW,Midwest,No,No,Yes,Yes,, +MISO,"Midcontinent Independent System Operator, Inc.",Eastern,MIDW,Midwest,No,Yes,Yes,Yes,, +NEVP,Nevada Power Company,Pacific,NW,Northwest,No,No,Yes,Yes,, +NSB,Utilities Commission of New Smyrna Beach,Eastern,FLA,Florida,No,No,Yes,No,,1/8/2020 +NWMT,NorthWestern Corporation,Mountain,NW,Northwest,No,No,Yes,Yes,, +NYIS,New York Independent System Operator,Eastern,NY,New York,No,Yes,Yes,Yes,, +OVEC,Ohio Valley Electric Corporation,Eastern,MIDA,Mid-Atlantic,No,No,Yes,No,,12/1/2018 +PACE,PacifiCorp East,Mountain,NW,Northwest,No,No,Yes,Yes,, +PACW,PacifiCorp West,Pacific,NW,Northwest,No,No,Yes,Yes,, +PGE,Portland General Electric Company,Pacific,NW,Northwest,No,No,Yes,Yes,, +PJM,"PJM Interconnection, LLC",Eastern,MIDA,Mid-Atlantic,No,Yes,Yes,Yes,, +PNM,Public Service Company of New Mexico,Arizona,SW,Southwest,No,Yes,Yes,Yes,, +PSCO,Public Service Company of Colorado,Mountain,NW,Northwest,No,No,Yes,Yes,, +PSEI,"Puget Sound Energy, Inc.",Pacific,NW,Northwest,No,No,Yes,Yes,, +SC,South Carolina Public Service Authority,Eastern,CAR,Carolinas,No,No,Yes,Yes,, +SCEG,"Dominion Energy South Carolina, Inc.",Eastern,CAR,Carolinas,No,No,Yes,Yes,, +SCL,Seattle City Light,Pacific,NW,Northwest,No,No,Yes,Yes,, +SEC,Seminole Electric Cooperative,Eastern,FLA,Florida,No,No,Yes,Yes,, +SEPA,Southeastern Power Administration,Central,SE,Southeast,Yes,No,Yes,Yes,, +SOCO,"Southern Company Services, Inc. - Trans",Central,SE,Southeast,No,No,Yes,Yes,, +SPA,Southwestern Power Administration,Central,CENT,Central,No,No,Yes,Yes,, +SRP,Salt River Project Agricultural Improvement and Power District,Arizona,SW,Southwest,No,No,Yes,Yes,, +SWPP,Southwest Power Pool,Central,CENT,Central,No,Yes,Yes,Yes,, +TAL,City of Tallahassee,Eastern,FLA,Florida,No,No,Yes,Yes,, +TEC,Tampa Electric Company,Eastern,FLA,Florida,No,No,Yes,Yes,, +TEPC,Tucson Electric Power,Arizona,SW,Southwest,No,No,Yes,Yes,, +TIDC,Turlock Irrigation District,Pacific,CAL,California,No,No,Yes,Yes,, +TPWR,"City of Tacoma, Department of Public Utilities, Light Division",Pacific,NW,Northwest,No,No,Yes,Yes,, +TVA,Tennessee Valley Authority,Central,TEN,Tennessee,No,No,Yes,Yes,, +WACM,Western Area Power Administration - Rocky Mountain Region,Arizona,NW,Northwest,No,No,Yes,Yes,, +WALC,Western Area Power Administration - Desert Southwest Region,Arizona,SW,Southwest,No,No,Yes,Yes,, +WAUE,Western Area Power Administration - Upper Great Plains East,Mountain,NW,Northwest,No,No,Yes,No,,10/1/2015 +WAUW,Western Area Power Administration - Upper Great Plains West,Mountain,NW,Northwest,No,No,Yes,Yes,, +WWA,"NaturEner Wind Watch, LLC",Mountain,NW,Northwest,Yes,No,Yes,Yes,, +YAD,"Alcoa Power Generating, Inc. - Yadkin Division",Eastern,CAR,Carolinas,Yes,No,Yes,Yes,, +AESO,Alberta Electric System Operator,,CAN,Canada,No,No,No,Yes,, +BCHA,British Columbia Hydro and Power Authority,,CAN,Canada,No,No,No,Yes,, +HQT,Hydro-Quebec TransEnergie,,CAN,Canada,No,No,No,Yes,, +IESO,Ontario IESO,,CAN,Canada,No,No,No,Yes,, +MHEB,Manitoba Hydro,,CAN,Canada,No,No,No,Yes,, +NBSO,New Brunswick System Operator,,CAN,Canada,No,No,No,Yes,, +SPC,Saskatchewan Power Corporation,,CAN,Canada,No,No,No,Yes,, +CEN,Centro Nacional de Control de Energia,,MEX,Mexico,No,No,No,Yes,10/31/2017, CFE,Comision Federal de Electricidad,,MEX,Mexico,No,No,No,No,,7/1/2018 \ No newline at end of file diff --git a/examples/eia/docs/data/eia-connections-reference.csv b/examples/eia/docs/data/eia-connections-reference.csv index d5643fe98..8e6c7c677 100644 --- a/examples/eia/docs/data/eia-connections-reference.csv +++ b/examples/eia/docs/data/eia-connections-reference.csv @@ -1,336 +1,336 @@ -BA Code,BA Name,Directly Interconnected BA Code,Directly Interconnected BA Name,Country of Foreign Directly Interconnected BA,Active Connection,Activation Date,Retirement Date -AEC,PowerSouth Energy Cooperative,MISO,"Midcontinent Independent System Operator, Inc.",,No,,9/1/2021 -AEC,PowerSouth Energy Cooperative,SOCO,"Southern Company Services, Inc. - Trans",,No,,9/1/2021 -AECI,"Associated Electric Cooperative, Inc.",MISO,"Midcontinent Independent System Operator, Inc.",,Yes,, -AECI,"Associated Electric Cooperative, Inc.",SPA,Southwestern Power Administration,,Yes,, -AECI,"Associated Electric Cooperative, Inc.",SWPP,Southwest Power Pool,,Yes,, -AECI,"Associated Electric Cooperative, Inc.",TVA,Tennessee Valley Authority,,Yes,, -AECI,"Associated Electric Cooperative, Inc.",WAUE,Western Area Power Administration - Upper Great Plains East,,No,,10/1/2015 -AVA,Avista Corporation,BPAT,Bonneville Power Administration,,Yes,, -AVA,Avista Corporation,CHPD,Public Utility District No. 1 of Chelan County,,Yes,, -AVA,Avista Corporation,GCPD,"Public Utility District No. 2 of Grant County, Washington",,Yes,, -AVA,Avista Corporation,IPCO,Idaho Power Company,,Yes,, -AVA,Avista Corporation,NWMT,NorthWestern Corporation,,Yes,, -AVA,Avista Corporation,PACW,PacifiCorp West,,Yes,, -AVRN,"Avangrid Renewables, LLC",BPAT,Bonneville Power Administration,,Yes,7/31/2018, -AVRN,"Avangrid Renewables, LLC",PACW,PacifiCorp West,,Yes,4/2/2019, -AZPS,Arizona Public Service Company,CISO,California Independent System Operator,,Yes,, -AZPS,Arizona Public Service Company,GRMA,"Gila River Power, LLC",,No,,5/3/2018 -AZPS,Arizona Public Service Company,IID,Imperial Irrigation District,,Yes,, -AZPS,Arizona Public Service Company,LDWP,Los Angeles Department of Water and Power,,Yes,, -AZPS,Arizona Public Service Company,PACE,PacifiCorp East,,Yes,, -AZPS,Arizona Public Service Company,PNM,Public Service Company of New Mexico,,Yes,, -AZPS,Arizona Public Service Company,SRP,Salt River Project Agricultural Improvement and Power District,,Yes,, -AZPS,Arizona Public Service Company,TEPC,Tucson Electric Power,,Yes,, -AZPS,Arizona Public Service Company,WACM,Western Area Power Administration - Rocky Mountain Region,,Yes,, -AZPS,Arizona Public Service Company,WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, -BANC,Balancing Authority of Northern California,BPAT,Bonneville Power Administration,,Yes,, -BANC,Balancing Authority of Northern California,CISO,California Independent System Operator,,Yes,, -BANC,Balancing Authority of Northern California,TIDC,Turlock Irrigation District,,Yes,, -BPAT,Bonneville Power Administration,AVA,Avista Corporation,,Yes,, -BPAT,Bonneville Power Administration,AVRN,"Avangrid Renewables, LLC",,Yes,7/31/2018, -BPAT,Bonneville Power Administration,BANC,Balancing Authority of Northern California,,Yes,, -BPAT,Bonneville Power Administration,BCHA,British Columbia Hydro and Power Authority,Canada,Yes,, -BPAT,Bonneville Power Administration,CHPD,Public Utility District No. 1 of Chelan County,,Yes,, -BPAT,Bonneville Power Administration,CISO,California Independent System Operator,,Yes,, -BPAT,Bonneville Power Administration,DOPD,PUD No. 1 of Douglas County,,Yes,, -BPAT,Bonneville Power Administration,GCPD,"Public Utility District No. 2 of Grant County, Washington",,Yes,, -BPAT,Bonneville Power Administration,GRID,"Gridforce Energy Management, LLC",,Yes,, -BPAT,Bonneville Power Administration,IPCO,Idaho Power Company,,Yes,, -BPAT,Bonneville Power Administration,LDWP,Los Angeles Department of Water and Power,,Yes,, -BPAT,Bonneville Power Administration,NEVP,Nevada Power Company,,Yes,, -BPAT,Bonneville Power Administration,NWMT,NorthWestern Corporation,,Yes,, -BPAT,Bonneville Power Administration,PACW,PacifiCorp West,,Yes,, -BPAT,Bonneville Power Administration,PGE,Portland General Electric Company,,Yes,, -BPAT,Bonneville Power Administration,PSEI,"Puget Sound Energy, Inc.",,Yes,, -BPAT,Bonneville Power Administration,SCL,Seattle City Light,,Yes,, -BPAT,Bonneville Power Administration,TPWR,"City of Tacoma, Department of Public Utilities, Light Division",,Yes,, -CHPD,Public Utility District No. 1 of Chelan County,AVA,Avista Corporation,,Yes,, -CHPD,Public Utility District No. 1 of Chelan County,BPAT,Bonneville Power Administration,,Yes,, -CHPD,Public Utility District No. 1 of Chelan County,DOPD,PUD No. 1 of Douglas County,,Yes,, -CHPD,Public Utility District No. 1 of Chelan County,PSEI,"Puget Sound Energy, Inc.",,Yes,, -CISO,California Independent System Operator,AZPS,Arizona Public Service Company,,Yes,, -CISO,California Independent System Operator,BANC,Balancing Authority of Northern California,,Yes,, -CISO,California Independent System Operator,BPAT,Bonneville Power Administration,,Yes,, -CISO,California Independent System Operator,CEN,Centro Nacional de Control de Energia,Mexico,Yes,7/1/2018, -CISO,California Independent System Operator,CFE,Comision Federal de Electricidad,Mexico,No,,7/1/2018 -CISO,California Independent System Operator,IID,Imperial Irrigation District,,Yes,, -CISO,California Independent System Operator,LDWP,Los Angeles Department of Water and Power,,Yes,, -CISO,California Independent System Operator,NEVP,Nevada Power Company,,Yes,, -CISO,California Independent System Operator,PACW,PacifiCorp West,,Yes,, -CISO,California Independent System Operator,SRP,Salt River Project Agricultural Improvement and Power District,,Yes,, -CISO,California Independent System Operator,TIDC,Turlock Irrigation District,,Yes,, -CISO,California Independent System Operator,WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, -CPLE,Duke Energy Progress East,DUK,Duke Energy Carolinas,,Yes,, -CPLE,Duke Energy Progress East,PJM,"PJM Interconnection, LLC",,Yes,, -CPLE,Duke Energy Progress East,SC,South Carolina Public Service Authority,,Yes,, -CPLE,Duke Energy Progress East,SCEG,"Dominion Energy South Carolina, Inc.",,Yes,, -CPLE,Duke Energy Progress East,YAD,"Alcoa Power Generating, Inc. - Yadkin Division",,Yes,, -CPLW,Duke Energy Progress West,DUK,Duke Energy Carolinas,,Yes,, -CPLW,Duke Energy Progress West,PJM,"PJM Interconnection, LLC",,Yes,, -CPLW,Duke Energy Progress West,TVA,Tennessee Valley Authority,,Yes,, -DEAA,"Arlington Valley, LLC",SRP,Salt River Project Agricultural Improvement and Power District,,Yes,, -DOPD,PUD No. 1 of Douglas County,BPAT,Bonneville Power Administration,,Yes,, -DOPD,PUD No. 1 of Douglas County,CHPD,Public Utility District No. 1 of Chelan County,,Yes,, -DUK,Duke Energy Carolinas,CPLE,Duke Energy Progress East,,Yes,, -DUK,Duke Energy Carolinas,CPLW,Duke Energy Progress West,,Yes,, -DUK,Duke Energy Carolinas,PJM,"PJM Interconnection, LLC",,Yes,, -DUK,Duke Energy Carolinas,SC,South Carolina Public Service Authority,,Yes,, -DUK,Duke Energy Carolinas,SCEG,"Dominion Energy South Carolina, Inc.",,Yes,, -DUK,Duke Energy Carolinas,SEPA,Southeastern Power Administration,,Yes,, -DUK,Duke Energy Carolinas,SOCO,"Southern Company Services, Inc. - Trans",,Yes,, -DUK,Duke Energy Carolinas,TVA,Tennessee Valley Authority,,Yes,, -DUK,Duke Energy Carolinas,YAD,"Alcoa Power Generating, Inc. - Yadkin Division",,Yes,, -EEI,"Electric Energy, Inc.",LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,,No,,2/29/2020 -EEI,"Electric Energy, Inc.",MISO,"Midcontinent Independent System Operator, Inc.",,No,,2/29/2020 -EEI,"Electric Energy, Inc.",TVA,Tennessee Valley Authority,,No,,2/29/2020 -EPE,El Paso Electric Company,PNM,Public Service Company of New Mexico,,Yes,, -EPE,El Paso Electric Company,SWPP,Southwest Power Pool,,Yes,, -EPE,El Paso Electric Company,TEPC,Tucson Electric Power,,Yes,, -ERCO,"Electric Reliability Council of Texas, Inc.",CEN,Centro Nacional de Control de Energia,Mexico,Yes,10/31/2017, -ERCO,"Electric Reliability Council of Texas, Inc.",CFE,Comision Federal de Electricidad,Mexico,No,,10/31/2017 -ERCO,"Electric Reliability Council of Texas, Inc.",SWPP,Southwest Power Pool,,Yes,, -FMPP,Florida Municipal Power Pool,FPC,"Duke Energy Florida, Inc.",,Yes,, -FMPP,Florida Municipal Power Pool,FPL,Florida Power & Light Co.,,Yes,, -FMPP,Florida Municipal Power Pool,JEA,JEA,,Yes,, -FMPP,Florida Municipal Power Pool,TEC,Tampa Electric Company,,Yes,, -FPC,"Duke Energy Florida, Inc.",FMPP,Florida Municipal Power Pool,,Yes,, -FPC,"Duke Energy Florida, Inc.",FPL,Florida Power & Light Co.,,Yes,, -FPC,"Duke Energy Florida, Inc.",GVL,Gainesville Regional Utilities,,Yes,, -FPC,"Duke Energy Florida, Inc.",NSB,Utilities Commission of New Smyrna Beach,,No,,1/8/2020 -FPC,"Duke Energy Florida, Inc.",SEC,Seminole Electric Cooperative,,Yes,, -FPC,"Duke Energy Florida, Inc.",SOCO,"Southern Company Services, Inc. - Trans",,Yes,, -FPC,"Duke Energy Florida, Inc.",TAL,City of Tallahassee,,Yes,, -FPC,"Duke Energy Florida, Inc.",TEC,Tampa Electric Company,,Yes,, -FPL,Florida Power & Light Co.,FMPP,Florida Municipal Power Pool,,Yes,, -FPL,Florida Power & Light Co.,FPC,"Duke Energy Florida, Inc.",,Yes,, -FPL,Florida Power & Light Co.,GVL,Gainesville Regional Utilities,,Yes,, -FPL,Florida Power & Light Co.,HST,City of Homestead,,Yes,, -FPL,Florida Power & Light Co.,JEA,JEA,,Yes,, -FPL,Florida Power & Light Co.,NSB,Utilities Commission of New Smyrna Beach,,No,,1/8/2020 -FPL,Florida Power & Light Co.,SEC,Seminole Electric Cooperative,,Yes,, -FPL,Florida Power & Light Co.,SOCO,"Southern Company Services, Inc. - Trans",,Yes,, -FPL,Florida Power & Light Co.,TEC,Tampa Electric Company,,Yes,, -GCPD,"Public Utility District No. 2 of Grant County, Washington",AVA,Avista Corporation,,Yes,, -GCPD,"Public Utility District No. 2 of Grant County, Washington",BPAT,Bonneville Power Administration,,Yes,, -GCPD,"Public Utility District No. 2 of Grant County, Washington",PACW,PacifiCorp West,,Yes,, -GCPD,"Public Utility District No. 2 of Grant County, Washington",PSEI,"Puget Sound Energy, Inc.",,Yes,, -GLHB,GridLiance,LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,,No,2/29/2020,9/1/2022 -GLHB,GridLiance,MISO,"Midcontinent Independent System Operator, Inc.",,No,2/29/2020,9/1/2022 -GRID,"Gridforce Energy Management, LLC",BPAT,Bonneville Power Administration,,Yes,, -GRID,"Gridforce Energy Management, LLC",PNM,Public Service Company of New Mexico,,Yes,1/15/2017, -GRID,"Gridforce Energy Management, LLC",SRP,Salt River Project Agricultural Improvement and Power District,,Yes,12/14/2020, -GRIF,"Griffith Energy, LLC",WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, -GRMA,"Gila River Power, LLC",AZPS,Arizona Public Service Company,,No,,5/3/2018 -GVL,Gainesville Regional Utilities,FPC,"Duke Energy Florida, Inc.",,Yes,, -GVL,Gainesville Regional Utilities,FPL,Florida Power & Light Co.,,Yes,, -GWA,"NaturEner Power Watch, LLC",NWMT,NorthWestern Corporation,,Yes,, -HGMA,"New Harquahala Generating Company, LLC",SRP,Salt River Project Agricultural Improvement and Power District,,Yes,, -HST,City of Homestead,FPL,Florida Power & Light Co.,,Yes,, -IID,Imperial Irrigation District,AZPS,Arizona Public Service Company,,Yes,, -IID,Imperial Irrigation District,CISO,California Independent System Operator,,Yes,, -IID,Imperial Irrigation District,WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, -IPCO,Idaho Power Company,AVA,Avista Corporation,,Yes,, -IPCO,Idaho Power Company,BPAT,Bonneville Power Administration,,Yes,, -IPCO,Idaho Power Company,NEVP,Nevada Power Company,,Yes,, -IPCO,Idaho Power Company,NWMT,NorthWestern Corporation,,Yes,, -IPCO,Idaho Power Company,PACE,PacifiCorp East,,Yes,, -IPCO,Idaho Power Company,PACW,PacifiCorp West,,Yes,, -ISNE,ISO New England,HQT,Hydro-Quebec TransEnergie,Canada,Yes,, -ISNE,ISO New England,NBSO,New Brunswick System Operator,Canada,Yes,, -ISNE,ISO New England,NYIS,New York Independent System Operator,,Yes,, -JEA,JEA,FMPP,Florida Municipal Power Pool,,Yes,, -JEA,JEA,FPL,Florida Power & Light Co.,,Yes,, -JEA,JEA,SEC,Seminole Electric Cooperative,,Yes,, -LDWP,Los Angeles Department of Water and Power,AZPS,Arizona Public Service Company,,Yes,, -LDWP,Los Angeles Department of Water and Power,BPAT,Bonneville Power Administration,,Yes,, -LDWP,Los Angeles Department of Water and Power,CISO,California Independent System Operator,,Yes,, -LDWP,Los Angeles Department of Water and Power,NEVP,Nevada Power Company,,Yes,, -LDWP,Los Angeles Department of Water and Power,PACE,PacifiCorp East,,Yes,, -LDWP,Los Angeles Department of Water and Power,WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, -LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,EEI,"Electric Energy, Inc.",,No,,2/29/2020 -LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,GLHB,GridLiance,,No,2/29/2020,9/1/2022 -LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,MISO,"Midcontinent Independent System Operator, Inc.",,Yes,, -LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,OVEC,Ohio Valley Electric Corporation,,No,,12/1/2018 -LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,PJM,"PJM Interconnection, LLC",,Yes,, -LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,TVA,Tennessee Valley Authority,,Yes,, -MISO,"Midcontinent Independent System Operator, Inc.",AEC,PowerSouth Energy Cooperative,,No,,9/1/2021 -MISO,"Midcontinent Independent System Operator, Inc.",AECI,"Associated Electric Cooperative, Inc.",,Yes,, -MISO,"Midcontinent Independent System Operator, Inc.",EEI,"Electric Energy, Inc.",,No,,2/29/2020 -MISO,"Midcontinent Independent System Operator, Inc.",GLHB,GridLiance,,No,2/29/2020,9/1/2022 -MISO,"Midcontinent Independent System Operator, Inc.",IESO,Ontario IESO,Canada,Yes,, -MISO,"Midcontinent Independent System Operator, Inc.",LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,,Yes,, -MISO,"Midcontinent Independent System Operator, Inc.",MHEB,Manitoba Hydro,Canada,Yes,, -MISO,"Midcontinent Independent System Operator, Inc.",PJM,"PJM Interconnection, LLC",,Yes,, -MISO,"Midcontinent Independent System Operator, Inc.",SOCO,"Southern Company Services, Inc. - Trans",,Yes,, -MISO,"Midcontinent Independent System Operator, Inc.",SPA,Southwestern Power Administration,,Yes,, -MISO,"Midcontinent Independent System Operator, Inc.",SWPP,Southwest Power Pool,,Yes,, -MISO,"Midcontinent Independent System Operator, Inc.",TVA,Tennessee Valley Authority,,Yes,, -MISO,"Midcontinent Independent System Operator, Inc.",WAUE,Western Area Power Administration - Upper Great Plains East,,No,,10/1/2015 -NEVP,Nevada Power Company,BPAT,Bonneville Power Administration,,Yes,, -NEVP,Nevada Power Company,CISO,California Independent System Operator,,Yes,, -NEVP,Nevada Power Company,IPCO,Idaho Power Company,,Yes,, -NEVP,Nevada Power Company,LDWP,Los Angeles Department of Water and Power,,Yes,, -NEVP,Nevada Power Company,PACE,PacifiCorp East,,Yes,, -NEVP,Nevada Power Company,WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, -NSB,Utilities Commission of New Smyrna Beach,FPC,"Duke Energy Florida, Inc.",,No,,1/8/2020 -NSB,Utilities Commission of New Smyrna Beach,FPL,Florida Power & Light Co.,,No,,1/8/2020 -NWMT,NorthWestern Corporation,AESO,Alberta Electric System Operator,Canada,Yes,, -NWMT,NorthWestern Corporation,AVA,Avista Corporation,,Yes,, -NWMT,NorthWestern Corporation,BPAT,Bonneville Power Administration,,Yes,, -NWMT,NorthWestern Corporation,GWA,"NaturEner Power Watch, LLC",,Yes,, -NWMT,NorthWestern Corporation,IPCO,Idaho Power Company,,Yes,, -NWMT,NorthWestern Corporation,PACE,PacifiCorp East,,Yes,, -NWMT,NorthWestern Corporation,WAUW,Western Area Power Administration - Upper Great Plains West,,Yes,, -NWMT,NorthWestern Corporation,WWA,"NaturEner Wind Watch, LLC",,Yes,, -NYIS,New York Independent System Operator,HQT,Hydro-Quebec TransEnergie,Canada,Yes,, -NYIS,New York Independent System Operator,IESO,Ontario IESO,Canada,Yes,, -NYIS,New York Independent System Operator,ISNE,ISO New England,,Yes,, -NYIS,New York Independent System Operator,PJM,"PJM Interconnection, LLC",,Yes,, -OVEC,Ohio Valley Electric Corporation,LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,,No,,12/1/2018 -OVEC,Ohio Valley Electric Corporation,PJM,"PJM Interconnection, LLC",,No,,12/1/2018 -PACE,PacifiCorp East,AZPS,Arizona Public Service Company,,Yes,, -PACE,PacifiCorp East,IPCO,Idaho Power Company,,Yes,, -PACE,PacifiCorp East,LDWP,Los Angeles Department of Water and Power,,Yes,, -PACE,PacifiCorp East,NEVP,Nevada Power Company,,Yes,, -PACE,PacifiCorp East,NWMT,NorthWestern Corporation,,Yes,, -PACE,PacifiCorp East,PACW,PacifiCorp West,,No,,7/6/2022 -PACE,PacifiCorp East,WACM,Western Area Power Administration - Rocky Mountain Region,,Yes,, -PACW,PacifiCorp West,AVA,Avista Corporation,,Yes,, -PACW,PacifiCorp West,AVRN,"Avangrid Renewables, LLC",,Yes,4/2/2019, -PACW,PacifiCorp West,BPAT,Bonneville Power Administration,,Yes,, -PACW,PacifiCorp West,CISO,California Independent System Operator,,Yes,, -PACW,PacifiCorp West,GCPD,"Public Utility District No. 2 of Grant County, Washington",,Yes,, -PACW,PacifiCorp West,IPCO,Idaho Power Company,,Yes,, -PACW,PacifiCorp West,PACE,PacifiCorp East,,No,,7/6/2022 -PACW,PacifiCorp West,PGE,Portland General Electric Company,,Yes,, -PGE,Portland General Electric Company,BPAT,Bonneville Power Administration,,Yes,, -PGE,Portland General Electric Company,PACW,PacifiCorp West,,Yes,, -PJM,"PJM Interconnection, LLC",CPLE,Duke Energy Progress East,,Yes,, -PJM,"PJM Interconnection, LLC",CPLW,Duke Energy Progress West,,Yes,, -PJM,"PJM Interconnection, LLC",DUK,Duke Energy Carolinas,,Yes,, -PJM,"PJM Interconnection, LLC",LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,,Yes,, -PJM,"PJM Interconnection, LLC",MISO,"Midcontinent Independent System Operator, Inc.",,Yes,, -PJM,"PJM Interconnection, LLC",NYIS,New York Independent System Operator,,Yes,, -PJM,"PJM Interconnection, LLC",OVEC,Ohio Valley Electric Corporation,,No,,12/1/2018 -PJM,"PJM Interconnection, LLC",TVA,Tennessee Valley Authority,,Yes,, -PNM,Public Service Company of New Mexico,AZPS,Arizona Public Service Company,,Yes,, -PNM,Public Service Company of New Mexico,EPE,El Paso Electric Company,,Yes,, -PNM,Public Service Company of New Mexico,GRID,"Gridforce Energy Management, LLC",,Yes,1/15/2017, -PNM,Public Service Company of New Mexico,PSCO,Public Service Company of Colorado,,Yes,, -PNM,Public Service Company of New Mexico,SRP,Salt River Project Agricultural Improvement and Power District,,No,,1/1/2017 -PNM,Public Service Company of New Mexico,SWPP,Southwest Power Pool,,Yes,, -PNM,Public Service Company of New Mexico,TEPC,Tucson Electric Power,,Yes,, -PNM,Public Service Company of New Mexico,WACM,Western Area Power Administration - Rocky Mountain Region,,Yes,, -PSCO,Public Service Company of Colorado,PNM,Public Service Company of New Mexico,,Yes,, -PSCO,Public Service Company of Colorado,SWPP,Southwest Power Pool,,Yes,, -PSCO,Public Service Company of Colorado,WACM,Western Area Power Administration - Rocky Mountain Region,,Yes,, -PSEI,"Puget Sound Energy, Inc.",BPAT,Bonneville Power Administration,,Yes,, -PSEI,"Puget Sound Energy, Inc.",CHPD,Public Utility District No. 1 of Chelan County,,Yes,, -PSEI,"Puget Sound Energy, Inc.",GCPD,"Public Utility District No. 2 of Grant County, Washington",,Yes,, -PSEI,"Puget Sound Energy, Inc.",SCL,Seattle City Light,,Yes,, -PSEI,"Puget Sound Energy, Inc.",TPWR,"City of Tacoma, Department of Public Utilities, Light Division",,Yes,, -SC,South Carolina Public Service Authority,CPLE,Duke Energy Progress East,,Yes,, -SC,South Carolina Public Service Authority,DUK,Duke Energy Carolinas,,Yes,, -SC,South Carolina Public Service Authority,SCEG,"Dominion Energy South Carolina, Inc.",,Yes,, -SC,South Carolina Public Service Authority,SEPA,Southeastern Power Administration,,Yes,, -SC,South Carolina Public Service Authority,SOCO,"Southern Company Services, Inc. - Trans",,Yes,, -SCEG,"Dominion Energy South Carolina, Inc.",CPLE,Duke Energy Progress East,,Yes,, -SCEG,"Dominion Energy South Carolina, Inc.",DUK,Duke Energy Carolinas,,Yes,, -SCEG,"Dominion Energy South Carolina, Inc.",SC,South Carolina Public Service Authority,,Yes,, -SCEG,"Dominion Energy South Carolina, Inc.",SEPA,Southeastern Power Administration,,Yes,, -SCEG,"Dominion Energy South Carolina, Inc.",SOCO,"Southern Company Services, Inc. - Trans",,Yes,, -SCL,Seattle City Light,BPAT,Bonneville Power Administration,,Yes,, -SCL,Seattle City Light,PSEI,"Puget Sound Energy, Inc.",,Yes,, -SEC,Seminole Electric Cooperative,FPC,"Duke Energy Florida, Inc.",,Yes,, -SEC,Seminole Electric Cooperative,FPL,Florida Power & Light Co.,,Yes,, -SEC,Seminole Electric Cooperative,JEA,JEA,,Yes,, -SEC,Seminole Electric Cooperative,TEC,Tampa Electric Company,,Yes,, -SEPA,Southeastern Power Administration,DUK,Duke Energy Carolinas,,Yes,, -SEPA,Southeastern Power Administration,SC,South Carolina Public Service Authority,,Yes,, -SEPA,Southeastern Power Administration,SCEG,"Dominion Energy South Carolina, Inc.",,Yes,, -SEPA,Southeastern Power Administration,SOCO,"Southern Company Services, Inc. - Trans",,Yes,, -SOCO,"Southern Company Services, Inc. - Trans",AEC,PowerSouth Energy Cooperative,,No,,9/1/2021 -SOCO,"Southern Company Services, Inc. - Trans",DUK,Duke Energy Carolinas,,Yes,, -SOCO,"Southern Company Services, Inc. - Trans",FPC,"Duke Energy Florida, Inc.",,Yes,, -SOCO,"Southern Company Services, Inc. - Trans",FPL,Florida Power & Light Co.,,Yes,, -SOCO,"Southern Company Services, Inc. - Trans",MISO,"Midcontinent Independent System Operator, Inc.",,Yes,, -SOCO,"Southern Company Services, Inc. - Trans",SC,South Carolina Public Service Authority,,Yes,, -SOCO,"Southern Company Services, Inc. - Trans",SCEG,"Dominion Energy South Carolina, Inc.",,Yes,, -SOCO,"Southern Company Services, Inc. - Trans",SEPA,Southeastern Power Administration,,Yes,, -SOCO,"Southern Company Services, Inc. - Trans",TAL,City of Tallahassee,,Yes,, -SOCO,"Southern Company Services, Inc. - Trans",TVA,Tennessee Valley Authority,,Yes,, -SPA,Southwestern Power Administration,AECI,"Associated Electric Cooperative, Inc.",,Yes,, -SPA,Southwestern Power Administration,MISO,"Midcontinent Independent System Operator, Inc.",,Yes,, -SPA,Southwestern Power Administration,SWPP,Southwest Power Pool,,Yes,, -SRP,Salt River Project Agricultural Improvement and Power District,AZPS,Arizona Public Service Company,,Yes,, -SRP,Salt River Project Agricultural Improvement and Power District,CISO,California Independent System Operator,,Yes,, -SRP,Salt River Project Agricultural Improvement and Power District,DEAA,"Arlington Valley, LLC",,Yes,, -SRP,Salt River Project Agricultural Improvement and Power District,GRID,"Gridforce Energy Management, LLC",,Yes,12/14/2020, -SRP,Salt River Project Agricultural Improvement and Power District,HGMA,"New Harquahala Generating Company, LLC",,Yes,, -SRP,Salt River Project Agricultural Improvement and Power District,PNM,Public Service Company of New Mexico,,No,,1/1/2017 -SRP,Salt River Project Agricultural Improvement and Power District,TEPC,Tucson Electric Power,,Yes,, -SRP,Salt River Project Agricultural Improvement and Power District,WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, -SWPP,Southwest Power Pool,AECI,"Associated Electric Cooperative, Inc.",,Yes,, -SWPP,Southwest Power Pool,EPE,El Paso Electric Company,,Yes,, -SWPP,Southwest Power Pool,ERCO,"Electric Reliability Council of Texas, Inc.",,Yes,, -SWPP,Southwest Power Pool,MISO,"Midcontinent Independent System Operator, Inc.",,Yes,, -SWPP,Southwest Power Pool,PNM,Public Service Company of New Mexico,,Yes,, -SWPP,Southwest Power Pool,PSCO,Public Service Company of Colorado,,Yes,, -SWPP,Southwest Power Pool,SPA,Southwestern Power Administration,,Yes,, -SWPP,Southwest Power Pool,SPC,Saskatchewan Power Corporation,Canada,Yes,10/1/2016, -SWPP,Southwest Power Pool,WACM,Western Area Power Administration - Rocky Mountain Region,,Yes,, -SWPP,Southwest Power Pool,WAUE,Western Area Power Administration - Upper Great Plains East,,No,,10/1/2015 -SWPP,Southwest Power Pool,WAUW,Western Area Power Administration - Upper Great Plains West,,Yes,, -TAL,City of Tallahassee,FPC,"Duke Energy Florida, Inc.",,Yes,, -TAL,City of Tallahassee,SOCO,"Southern Company Services, Inc. - Trans",,Yes,, -TEC,Tampa Electric Company,FMPP,Florida Municipal Power Pool,,Yes,, -TEC,Tampa Electric Company,FPC,"Duke Energy Florida, Inc.",,Yes,, -TEC,Tampa Electric Company,FPL,Florida Power & Light Co.,,Yes,, -TEC,Tampa Electric Company,SEC,Seminole Electric Cooperative,,Yes,, -TEPC,Tucson Electric Power,AZPS,Arizona Public Service Company,,Yes,, -TEPC,Tucson Electric Power,EPE,El Paso Electric Company,,Yes,, -TEPC,Tucson Electric Power,PNM,Public Service Company of New Mexico,,Yes,, -TEPC,Tucson Electric Power,SRP,Salt River Project Agricultural Improvement and Power District,,Yes,, -TEPC,Tucson Electric Power,WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, -TIDC,Turlock Irrigation District,BANC,Balancing Authority of Northern California,,Yes,, -TIDC,Turlock Irrigation District,CISO,California Independent System Operator,,Yes,, -TPWR,"City of Tacoma, Department of Public Utilities, Light Division",BPAT,Bonneville Power Administration,,Yes,, -TPWR,"City of Tacoma, Department of Public Utilities, Light Division",PSEI,"Puget Sound Energy, Inc.",,Yes,, -TVA,Tennessee Valley Authority,AECI,"Associated Electric Cooperative, Inc.",,Yes,, -TVA,Tennessee Valley Authority,CPLW,Duke Energy Progress West,,Yes,, -TVA,Tennessee Valley Authority,DUK,Duke Energy Carolinas,,Yes,, -TVA,Tennessee Valley Authority,EEI,"Electric Energy, Inc.",,No,,2/29/2020 -TVA,Tennessee Valley Authority,LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,,Yes,, -TVA,Tennessee Valley Authority,MISO,"Midcontinent Independent System Operator, Inc.",,Yes,, -TVA,Tennessee Valley Authority,PJM,"PJM Interconnection, LLC",,Yes,, -TVA,Tennessee Valley Authority,SOCO,"Southern Company Services, Inc. - Trans",,Yes,, -WACM,Western Area Power Administration - Rocky Mountain Region,AZPS,Arizona Public Service Company,,Yes,, -WACM,Western Area Power Administration - Rocky Mountain Region,PACE,PacifiCorp East,,Yes,, -WACM,Western Area Power Administration - Rocky Mountain Region,PNM,Public Service Company of New Mexico,,Yes,, -WACM,Western Area Power Administration - Rocky Mountain Region,PSCO,Public Service Company of Colorado,,Yes,, -WACM,Western Area Power Administration - Rocky Mountain Region,SWPP,Southwest Power Pool,,Yes,, -WACM,Western Area Power Administration - Rocky Mountain Region,WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, -WACM,Western Area Power Administration - Rocky Mountain Region,WAUE,Western Area Power Administration - Upper Great Plains East,,No,,10/1/2015 -WACM,Western Area Power Administration - Rocky Mountain Region,WAUW,Western Area Power Administration - Upper Great Plains West,,Yes,, -WALC,Western Area Power Administration - Desert Southwest Region,AZPS,Arizona Public Service Company,,Yes,, -WALC,Western Area Power Administration - Desert Southwest Region,CISO,California Independent System Operator,,Yes,, -WALC,Western Area Power Administration - Desert Southwest Region,GRIF,"Griffith Energy, LLC",,Yes,, -WALC,Western Area Power Administration - Desert Southwest Region,IID,Imperial Irrigation District,,Yes,, -WALC,Western Area Power Administration - Desert Southwest Region,LDWP,Los Angeles Department of Water and Power,,Yes,, -WALC,Western Area Power Administration - Desert Southwest Region,NEVP,Nevada Power Company,,Yes,, -WALC,Western Area Power Administration - Desert Southwest Region,SRP,Salt River Project Agricultural Improvement and Power District,,Yes,, -WALC,Western Area Power Administration - Desert Southwest Region,TEPC,Tucson Electric Power,,Yes,, -WALC,Western Area Power Administration - Desert Southwest Region,WACM,Western Area Power Administration - Rocky Mountain Region,,Yes,, -WAUE,Western Area Power Administration - Upper Great Plains East,AECI,"Associated Electric Cooperative, Inc.",,No,,10/1/2015 -WAUE,Western Area Power Administration - Upper Great Plains East,MISO,"Midcontinent Independent System Operator, Inc.",,No,,10/1/2015 -WAUE,Western Area Power Administration - Upper Great Plains East,SWPP,Southwest Power Pool,,No,,10/1/2015 -WAUE,Western Area Power Administration - Upper Great Plains East,WACM,Western Area Power Administration - Rocky Mountain Region,,No,,10/1/2015 -WAUE,Western Area Power Administration - Upper Great Plains East,WAUW,Western Area Power Administration - Upper Great Plains West,,No,,10/1/2015 -WAUW,Western Area Power Administration - Upper Great Plains West,NWMT,NorthWestern Corporation,,Yes,, -WAUW,Western Area Power Administration - Upper Great Plains West,SWPP,Southwest Power Pool,,Yes,, -WAUW,Western Area Power Administration - Upper Great Plains West,WACM,Western Area Power Administration - Rocky Mountain Region,,Yes,, -WAUW,Western Area Power Administration - Upper Great Plains West,WAUE,Western Area Power Administration - Upper Great Plains East,,No,,10/1/2015 -WWA,"NaturEner Wind Watch, LLC",NWMT,NorthWestern Corporation,,Yes,, -YAD,"Alcoa Power Generating, Inc. - Yadkin Division",CPLE,Duke Energy Progress East,,Yes,, +BA Code,BA Name,Directly Interconnected BA Code,Directly Interconnected BA Name,Country of Foreign Directly Interconnected BA,Active Connection,Activation Date,Retirement Date +AEC,PowerSouth Energy Cooperative,MISO,"Midcontinent Independent System Operator, Inc.",,No,,9/1/2021 +AEC,PowerSouth Energy Cooperative,SOCO,"Southern Company Services, Inc. - Trans",,No,,9/1/2021 +AECI,"Associated Electric Cooperative, Inc.",MISO,"Midcontinent Independent System Operator, Inc.",,Yes,, +AECI,"Associated Electric Cooperative, Inc.",SPA,Southwestern Power Administration,,Yes,, +AECI,"Associated Electric Cooperative, Inc.",SWPP,Southwest Power Pool,,Yes,, +AECI,"Associated Electric Cooperative, Inc.",TVA,Tennessee Valley Authority,,Yes,, +AECI,"Associated Electric Cooperative, Inc.",WAUE,Western Area Power Administration - Upper Great Plains East,,No,,10/1/2015 +AVA,Avista Corporation,BPAT,Bonneville Power Administration,,Yes,, +AVA,Avista Corporation,CHPD,Public Utility District No. 1 of Chelan County,,Yes,, +AVA,Avista Corporation,GCPD,"Public Utility District No. 2 of Grant County, Washington",,Yes,, +AVA,Avista Corporation,IPCO,Idaho Power Company,,Yes,, +AVA,Avista Corporation,NWMT,NorthWestern Corporation,,Yes,, +AVA,Avista Corporation,PACW,PacifiCorp West,,Yes,, +AVRN,"Avangrid Renewables, LLC",BPAT,Bonneville Power Administration,,Yes,7/31/2018, +AVRN,"Avangrid Renewables, LLC",PACW,PacifiCorp West,,Yes,4/2/2019, +AZPS,Arizona Public Service Company,CISO,California Independent System Operator,,Yes,, +AZPS,Arizona Public Service Company,GRMA,"Gila River Power, LLC",,No,,5/3/2018 +AZPS,Arizona Public Service Company,IID,Imperial Irrigation District,,Yes,, +AZPS,Arizona Public Service Company,LDWP,Los Angeles Department of Water and Power,,Yes,, +AZPS,Arizona Public Service Company,PACE,PacifiCorp East,,Yes,, +AZPS,Arizona Public Service Company,PNM,Public Service Company of New Mexico,,Yes,, +AZPS,Arizona Public Service Company,SRP,Salt River Project Agricultural Improvement and Power District,,Yes,, +AZPS,Arizona Public Service Company,TEPC,Tucson Electric Power,,Yes,, +AZPS,Arizona Public Service Company,WACM,Western Area Power Administration - Rocky Mountain Region,,Yes,, +AZPS,Arizona Public Service Company,WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, +BANC,Balancing Authority of Northern California,BPAT,Bonneville Power Administration,,Yes,, +BANC,Balancing Authority of Northern California,CISO,California Independent System Operator,,Yes,, +BANC,Balancing Authority of Northern California,TIDC,Turlock Irrigation District,,Yes,, +BPAT,Bonneville Power Administration,AVA,Avista Corporation,,Yes,, +BPAT,Bonneville Power Administration,AVRN,"Avangrid Renewables, LLC",,Yes,7/31/2018, +BPAT,Bonneville Power Administration,BANC,Balancing Authority of Northern California,,Yes,, +BPAT,Bonneville Power Administration,BCHA,British Columbia Hydro and Power Authority,Canada,Yes,, +BPAT,Bonneville Power Administration,CHPD,Public Utility District No. 1 of Chelan County,,Yes,, +BPAT,Bonneville Power Administration,CISO,California Independent System Operator,,Yes,, +BPAT,Bonneville Power Administration,DOPD,PUD No. 1 of Douglas County,,Yes,, +BPAT,Bonneville Power Administration,GCPD,"Public Utility District No. 2 of Grant County, Washington",,Yes,, +BPAT,Bonneville Power Administration,GRID,"Gridforce Energy Management, LLC",,Yes,, +BPAT,Bonneville Power Administration,IPCO,Idaho Power Company,,Yes,, +BPAT,Bonneville Power Administration,LDWP,Los Angeles Department of Water and Power,,Yes,, +BPAT,Bonneville Power Administration,NEVP,Nevada Power Company,,Yes,, +BPAT,Bonneville Power Administration,NWMT,NorthWestern Corporation,,Yes,, +BPAT,Bonneville Power Administration,PACW,PacifiCorp West,,Yes,, +BPAT,Bonneville Power Administration,PGE,Portland General Electric Company,,Yes,, +BPAT,Bonneville Power Administration,PSEI,"Puget Sound Energy, Inc.",,Yes,, +BPAT,Bonneville Power Administration,SCL,Seattle City Light,,Yes,, +BPAT,Bonneville Power Administration,TPWR,"City of Tacoma, Department of Public Utilities, Light Division",,Yes,, +CHPD,Public Utility District No. 1 of Chelan County,AVA,Avista Corporation,,Yes,, +CHPD,Public Utility District No. 1 of Chelan County,BPAT,Bonneville Power Administration,,Yes,, +CHPD,Public Utility District No. 1 of Chelan County,DOPD,PUD No. 1 of Douglas County,,Yes,, +CHPD,Public Utility District No. 1 of Chelan County,PSEI,"Puget Sound Energy, Inc.",,Yes,, +CISO,California Independent System Operator,AZPS,Arizona Public Service Company,,Yes,, +CISO,California Independent System Operator,BANC,Balancing Authority of Northern California,,Yes,, +CISO,California Independent System Operator,BPAT,Bonneville Power Administration,,Yes,, +CISO,California Independent System Operator,CEN,Centro Nacional de Control de Energia,Mexico,Yes,7/1/2018, +CISO,California Independent System Operator,CFE,Comision Federal de Electricidad,Mexico,No,,7/1/2018 +CISO,California Independent System Operator,IID,Imperial Irrigation District,,Yes,, +CISO,California Independent System Operator,LDWP,Los Angeles Department of Water and Power,,Yes,, +CISO,California Independent System Operator,NEVP,Nevada Power Company,,Yes,, +CISO,California Independent System Operator,PACW,PacifiCorp West,,Yes,, +CISO,California Independent System Operator,SRP,Salt River Project Agricultural Improvement and Power District,,Yes,, +CISO,California Independent System Operator,TIDC,Turlock Irrigation District,,Yes,, +CISO,California Independent System Operator,WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, +CPLE,Duke Energy Progress East,DUK,Duke Energy Carolinas,,Yes,, +CPLE,Duke Energy Progress East,PJM,"PJM Interconnection, LLC",,Yes,, +CPLE,Duke Energy Progress East,SC,South Carolina Public Service Authority,,Yes,, +CPLE,Duke Energy Progress East,SCEG,"Dominion Energy South Carolina, Inc.",,Yes,, +CPLE,Duke Energy Progress East,YAD,"Alcoa Power Generating, Inc. - Yadkin Division",,Yes,, +CPLW,Duke Energy Progress West,DUK,Duke Energy Carolinas,,Yes,, +CPLW,Duke Energy Progress West,PJM,"PJM Interconnection, LLC",,Yes,, +CPLW,Duke Energy Progress West,TVA,Tennessee Valley Authority,,Yes,, +DEAA,"Arlington Valley, LLC",SRP,Salt River Project Agricultural Improvement and Power District,,Yes,, +DOPD,PUD No. 1 of Douglas County,BPAT,Bonneville Power Administration,,Yes,, +DOPD,PUD No. 1 of Douglas County,CHPD,Public Utility District No. 1 of Chelan County,,Yes,, +DUK,Duke Energy Carolinas,CPLE,Duke Energy Progress East,,Yes,, +DUK,Duke Energy Carolinas,CPLW,Duke Energy Progress West,,Yes,, +DUK,Duke Energy Carolinas,PJM,"PJM Interconnection, LLC",,Yes,, +DUK,Duke Energy Carolinas,SC,South Carolina Public Service Authority,,Yes,, +DUK,Duke Energy Carolinas,SCEG,"Dominion Energy South Carolina, Inc.",,Yes,, +DUK,Duke Energy Carolinas,SEPA,Southeastern Power Administration,,Yes,, +DUK,Duke Energy Carolinas,SOCO,"Southern Company Services, Inc. - Trans",,Yes,, +DUK,Duke Energy Carolinas,TVA,Tennessee Valley Authority,,Yes,, +DUK,Duke Energy Carolinas,YAD,"Alcoa Power Generating, Inc. - Yadkin Division",,Yes,, +EEI,"Electric Energy, Inc.",LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,,No,,2/29/2020 +EEI,"Electric Energy, Inc.",MISO,"Midcontinent Independent System Operator, Inc.",,No,,2/29/2020 +EEI,"Electric Energy, Inc.",TVA,Tennessee Valley Authority,,No,,2/29/2020 +EPE,El Paso Electric Company,PNM,Public Service Company of New Mexico,,Yes,, +EPE,El Paso Electric Company,SWPP,Southwest Power Pool,,Yes,, +EPE,El Paso Electric Company,TEPC,Tucson Electric Power,,Yes,, +ERCO,"Electric Reliability Council of Texas, Inc.",CEN,Centro Nacional de Control de Energia,Mexico,Yes,10/31/2017, +ERCO,"Electric Reliability Council of Texas, Inc.",CFE,Comision Federal de Electricidad,Mexico,No,,10/31/2017 +ERCO,"Electric Reliability Council of Texas, Inc.",SWPP,Southwest Power Pool,,Yes,, +FMPP,Florida Municipal Power Pool,FPC,"Duke Energy Florida, Inc.",,Yes,, +FMPP,Florida Municipal Power Pool,FPL,Florida Power & Light Co.,,Yes,, +FMPP,Florida Municipal Power Pool,JEA,JEA,,Yes,, +FMPP,Florida Municipal Power Pool,TEC,Tampa Electric Company,,Yes,, +FPC,"Duke Energy Florida, Inc.",FMPP,Florida Municipal Power Pool,,Yes,, +FPC,"Duke Energy Florida, Inc.",FPL,Florida Power & Light Co.,,Yes,, +FPC,"Duke Energy Florida, Inc.",GVL,Gainesville Regional Utilities,,Yes,, +FPC,"Duke Energy Florida, Inc.",NSB,Utilities Commission of New Smyrna Beach,,No,,1/8/2020 +FPC,"Duke Energy Florida, Inc.",SEC,Seminole Electric Cooperative,,Yes,, +FPC,"Duke Energy Florida, Inc.",SOCO,"Southern Company Services, Inc. - Trans",,Yes,, +FPC,"Duke Energy Florida, Inc.",TAL,City of Tallahassee,,Yes,, +FPC,"Duke Energy Florida, Inc.",TEC,Tampa Electric Company,,Yes,, +FPL,Florida Power & Light Co.,FMPP,Florida Municipal Power Pool,,Yes,, +FPL,Florida Power & Light Co.,FPC,"Duke Energy Florida, Inc.",,Yes,, +FPL,Florida Power & Light Co.,GVL,Gainesville Regional Utilities,,Yes,, +FPL,Florida Power & Light Co.,HST,City of Homestead,,Yes,, +FPL,Florida Power & Light Co.,JEA,JEA,,Yes,, +FPL,Florida Power & Light Co.,NSB,Utilities Commission of New Smyrna Beach,,No,,1/8/2020 +FPL,Florida Power & Light Co.,SEC,Seminole Electric Cooperative,,Yes,, +FPL,Florida Power & Light Co.,SOCO,"Southern Company Services, Inc. - Trans",,Yes,, +FPL,Florida Power & Light Co.,TEC,Tampa Electric Company,,Yes,, +GCPD,"Public Utility District No. 2 of Grant County, Washington",AVA,Avista Corporation,,Yes,, +GCPD,"Public Utility District No. 2 of Grant County, Washington",BPAT,Bonneville Power Administration,,Yes,, +GCPD,"Public Utility District No. 2 of Grant County, Washington",PACW,PacifiCorp West,,Yes,, +GCPD,"Public Utility District No. 2 of Grant County, Washington",PSEI,"Puget Sound Energy, Inc.",,Yes,, +GLHB,GridLiance,LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,,No,2/29/2020,9/1/2022 +GLHB,GridLiance,MISO,"Midcontinent Independent System Operator, Inc.",,No,2/29/2020,9/1/2022 +GRID,"Gridforce Energy Management, LLC",BPAT,Bonneville Power Administration,,Yes,, +GRID,"Gridforce Energy Management, LLC",PNM,Public Service Company of New Mexico,,Yes,1/15/2017, +GRID,"Gridforce Energy Management, LLC",SRP,Salt River Project Agricultural Improvement and Power District,,Yes,12/14/2020, +GRIF,"Griffith Energy, LLC",WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, +GRMA,"Gila River Power, LLC",AZPS,Arizona Public Service Company,,No,,5/3/2018 +GVL,Gainesville Regional Utilities,FPC,"Duke Energy Florida, Inc.",,Yes,, +GVL,Gainesville Regional Utilities,FPL,Florida Power & Light Co.,,Yes,, +GWA,"NaturEner Power Watch, LLC",NWMT,NorthWestern Corporation,,Yes,, +HGMA,"New Harquahala Generating Company, LLC",SRP,Salt River Project Agricultural Improvement and Power District,,Yes,, +HST,City of Homestead,FPL,Florida Power & Light Co.,,Yes,, +IID,Imperial Irrigation District,AZPS,Arizona Public Service Company,,Yes,, +IID,Imperial Irrigation District,CISO,California Independent System Operator,,Yes,, +IID,Imperial Irrigation District,WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, +IPCO,Idaho Power Company,AVA,Avista Corporation,,Yes,, +IPCO,Idaho Power Company,BPAT,Bonneville Power Administration,,Yes,, +IPCO,Idaho Power Company,NEVP,Nevada Power Company,,Yes,, +IPCO,Idaho Power Company,NWMT,NorthWestern Corporation,,Yes,, +IPCO,Idaho Power Company,PACE,PacifiCorp East,,Yes,, +IPCO,Idaho Power Company,PACW,PacifiCorp West,,Yes,, +ISNE,ISO New England,HQT,Hydro-Quebec TransEnergie,Canada,Yes,, +ISNE,ISO New England,NBSO,New Brunswick System Operator,Canada,Yes,, +ISNE,ISO New England,NYIS,New York Independent System Operator,,Yes,, +JEA,JEA,FMPP,Florida Municipal Power Pool,,Yes,, +JEA,JEA,FPL,Florida Power & Light Co.,,Yes,, +JEA,JEA,SEC,Seminole Electric Cooperative,,Yes,, +LDWP,Los Angeles Department of Water and Power,AZPS,Arizona Public Service Company,,Yes,, +LDWP,Los Angeles Department of Water and Power,BPAT,Bonneville Power Administration,,Yes,, +LDWP,Los Angeles Department of Water and Power,CISO,California Independent System Operator,,Yes,, +LDWP,Los Angeles Department of Water and Power,NEVP,Nevada Power Company,,Yes,, +LDWP,Los Angeles Department of Water and Power,PACE,PacifiCorp East,,Yes,, +LDWP,Los Angeles Department of Water and Power,WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, +LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,EEI,"Electric Energy, Inc.",,No,,2/29/2020 +LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,GLHB,GridLiance,,No,2/29/2020,9/1/2022 +LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,MISO,"Midcontinent Independent System Operator, Inc.",,Yes,, +LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,OVEC,Ohio Valley Electric Corporation,,No,,12/1/2018 +LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,PJM,"PJM Interconnection, LLC",,Yes,, +LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,TVA,Tennessee Valley Authority,,Yes,, +MISO,"Midcontinent Independent System Operator, Inc.",AEC,PowerSouth Energy Cooperative,,No,,9/1/2021 +MISO,"Midcontinent Independent System Operator, Inc.",AECI,"Associated Electric Cooperative, Inc.",,Yes,, +MISO,"Midcontinent Independent System Operator, Inc.",EEI,"Electric Energy, Inc.",,No,,2/29/2020 +MISO,"Midcontinent Independent System Operator, Inc.",GLHB,GridLiance,,No,2/29/2020,9/1/2022 +MISO,"Midcontinent Independent System Operator, Inc.",IESO,Ontario IESO,Canada,Yes,, +MISO,"Midcontinent Independent System Operator, Inc.",LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,,Yes,, +MISO,"Midcontinent Independent System Operator, Inc.",MHEB,Manitoba Hydro,Canada,Yes,, +MISO,"Midcontinent Independent System Operator, Inc.",PJM,"PJM Interconnection, LLC",,Yes,, +MISO,"Midcontinent Independent System Operator, Inc.",SOCO,"Southern Company Services, Inc. - Trans",,Yes,, +MISO,"Midcontinent Independent System Operator, Inc.",SPA,Southwestern Power Administration,,Yes,, +MISO,"Midcontinent Independent System Operator, Inc.",SWPP,Southwest Power Pool,,Yes,, +MISO,"Midcontinent Independent System Operator, Inc.",TVA,Tennessee Valley Authority,,Yes,, +MISO,"Midcontinent Independent System Operator, Inc.",WAUE,Western Area Power Administration - Upper Great Plains East,,No,,10/1/2015 +NEVP,Nevada Power Company,BPAT,Bonneville Power Administration,,Yes,, +NEVP,Nevada Power Company,CISO,California Independent System Operator,,Yes,, +NEVP,Nevada Power Company,IPCO,Idaho Power Company,,Yes,, +NEVP,Nevada Power Company,LDWP,Los Angeles Department of Water and Power,,Yes,, +NEVP,Nevada Power Company,PACE,PacifiCorp East,,Yes,, +NEVP,Nevada Power Company,WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, +NSB,Utilities Commission of New Smyrna Beach,FPC,"Duke Energy Florida, Inc.",,No,,1/8/2020 +NSB,Utilities Commission of New Smyrna Beach,FPL,Florida Power & Light Co.,,No,,1/8/2020 +NWMT,NorthWestern Corporation,AESO,Alberta Electric System Operator,Canada,Yes,, +NWMT,NorthWestern Corporation,AVA,Avista Corporation,,Yes,, +NWMT,NorthWestern Corporation,BPAT,Bonneville Power Administration,,Yes,, +NWMT,NorthWestern Corporation,GWA,"NaturEner Power Watch, LLC",,Yes,, +NWMT,NorthWestern Corporation,IPCO,Idaho Power Company,,Yes,, +NWMT,NorthWestern Corporation,PACE,PacifiCorp East,,Yes,, +NWMT,NorthWestern Corporation,WAUW,Western Area Power Administration - Upper Great Plains West,,Yes,, +NWMT,NorthWestern Corporation,WWA,"NaturEner Wind Watch, LLC",,Yes,, +NYIS,New York Independent System Operator,HQT,Hydro-Quebec TransEnergie,Canada,Yes,, +NYIS,New York Independent System Operator,IESO,Ontario IESO,Canada,Yes,, +NYIS,New York Independent System Operator,ISNE,ISO New England,,Yes,, +NYIS,New York Independent System Operator,PJM,"PJM Interconnection, LLC",,Yes,, +OVEC,Ohio Valley Electric Corporation,LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,,No,,12/1/2018 +OVEC,Ohio Valley Electric Corporation,PJM,"PJM Interconnection, LLC",,No,,12/1/2018 +PACE,PacifiCorp East,AZPS,Arizona Public Service Company,,Yes,, +PACE,PacifiCorp East,IPCO,Idaho Power Company,,Yes,, +PACE,PacifiCorp East,LDWP,Los Angeles Department of Water and Power,,Yes,, +PACE,PacifiCorp East,NEVP,Nevada Power Company,,Yes,, +PACE,PacifiCorp East,NWMT,NorthWestern Corporation,,Yes,, +PACE,PacifiCorp East,PACW,PacifiCorp West,,No,,7/6/2022 +PACE,PacifiCorp East,WACM,Western Area Power Administration - Rocky Mountain Region,,Yes,, +PACW,PacifiCorp West,AVA,Avista Corporation,,Yes,, +PACW,PacifiCorp West,AVRN,"Avangrid Renewables, LLC",,Yes,4/2/2019, +PACW,PacifiCorp West,BPAT,Bonneville Power Administration,,Yes,, +PACW,PacifiCorp West,CISO,California Independent System Operator,,Yes,, +PACW,PacifiCorp West,GCPD,"Public Utility District No. 2 of Grant County, Washington",,Yes,, +PACW,PacifiCorp West,IPCO,Idaho Power Company,,Yes,, +PACW,PacifiCorp West,PACE,PacifiCorp East,,No,,7/6/2022 +PACW,PacifiCorp West,PGE,Portland General Electric Company,,Yes,, +PGE,Portland General Electric Company,BPAT,Bonneville Power Administration,,Yes,, +PGE,Portland General Electric Company,PACW,PacifiCorp West,,Yes,, +PJM,"PJM Interconnection, LLC",CPLE,Duke Energy Progress East,,Yes,, +PJM,"PJM Interconnection, LLC",CPLW,Duke Energy Progress West,,Yes,, +PJM,"PJM Interconnection, LLC",DUK,Duke Energy Carolinas,,Yes,, +PJM,"PJM Interconnection, LLC",LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,,Yes,, +PJM,"PJM Interconnection, LLC",MISO,"Midcontinent Independent System Operator, Inc.",,Yes,, +PJM,"PJM Interconnection, LLC",NYIS,New York Independent System Operator,,Yes,, +PJM,"PJM Interconnection, LLC",OVEC,Ohio Valley Electric Corporation,,No,,12/1/2018 +PJM,"PJM Interconnection, LLC",TVA,Tennessee Valley Authority,,Yes,, +PNM,Public Service Company of New Mexico,AZPS,Arizona Public Service Company,,Yes,, +PNM,Public Service Company of New Mexico,EPE,El Paso Electric Company,,Yes,, +PNM,Public Service Company of New Mexico,GRID,"Gridforce Energy Management, LLC",,Yes,1/15/2017, +PNM,Public Service Company of New Mexico,PSCO,Public Service Company of Colorado,,Yes,, +PNM,Public Service Company of New Mexico,SRP,Salt River Project Agricultural Improvement and Power District,,No,,1/1/2017 +PNM,Public Service Company of New Mexico,SWPP,Southwest Power Pool,,Yes,, +PNM,Public Service Company of New Mexico,TEPC,Tucson Electric Power,,Yes,, +PNM,Public Service Company of New Mexico,WACM,Western Area Power Administration - Rocky Mountain Region,,Yes,, +PSCO,Public Service Company of Colorado,PNM,Public Service Company of New Mexico,,Yes,, +PSCO,Public Service Company of Colorado,SWPP,Southwest Power Pool,,Yes,, +PSCO,Public Service Company of Colorado,WACM,Western Area Power Administration - Rocky Mountain Region,,Yes,, +PSEI,"Puget Sound Energy, Inc.",BPAT,Bonneville Power Administration,,Yes,, +PSEI,"Puget Sound Energy, Inc.",CHPD,Public Utility District No. 1 of Chelan County,,Yes,, +PSEI,"Puget Sound Energy, Inc.",GCPD,"Public Utility District No. 2 of Grant County, Washington",,Yes,, +PSEI,"Puget Sound Energy, Inc.",SCL,Seattle City Light,,Yes,, +PSEI,"Puget Sound Energy, Inc.",TPWR,"City of Tacoma, Department of Public Utilities, Light Division",,Yes,, +SC,South Carolina Public Service Authority,CPLE,Duke Energy Progress East,,Yes,, +SC,South Carolina Public Service Authority,DUK,Duke Energy Carolinas,,Yes,, +SC,South Carolina Public Service Authority,SCEG,"Dominion Energy South Carolina, Inc.",,Yes,, +SC,South Carolina Public Service Authority,SEPA,Southeastern Power Administration,,Yes,, +SC,South Carolina Public Service Authority,SOCO,"Southern Company Services, Inc. - Trans",,Yes,, +SCEG,"Dominion Energy South Carolina, Inc.",CPLE,Duke Energy Progress East,,Yes,, +SCEG,"Dominion Energy South Carolina, Inc.",DUK,Duke Energy Carolinas,,Yes,, +SCEG,"Dominion Energy South Carolina, Inc.",SC,South Carolina Public Service Authority,,Yes,, +SCEG,"Dominion Energy South Carolina, Inc.",SEPA,Southeastern Power Administration,,Yes,, +SCEG,"Dominion Energy South Carolina, Inc.",SOCO,"Southern Company Services, Inc. - Trans",,Yes,, +SCL,Seattle City Light,BPAT,Bonneville Power Administration,,Yes,, +SCL,Seattle City Light,PSEI,"Puget Sound Energy, Inc.",,Yes,, +SEC,Seminole Electric Cooperative,FPC,"Duke Energy Florida, Inc.",,Yes,, +SEC,Seminole Electric Cooperative,FPL,Florida Power & Light Co.,,Yes,, +SEC,Seminole Electric Cooperative,JEA,JEA,,Yes,, +SEC,Seminole Electric Cooperative,TEC,Tampa Electric Company,,Yes,, +SEPA,Southeastern Power Administration,DUK,Duke Energy Carolinas,,Yes,, +SEPA,Southeastern Power Administration,SC,South Carolina Public Service Authority,,Yes,, +SEPA,Southeastern Power Administration,SCEG,"Dominion Energy South Carolina, Inc.",,Yes,, +SEPA,Southeastern Power Administration,SOCO,"Southern Company Services, Inc. - Trans",,Yes,, +SOCO,"Southern Company Services, Inc. - Trans",AEC,PowerSouth Energy Cooperative,,No,,9/1/2021 +SOCO,"Southern Company Services, Inc. - Trans",DUK,Duke Energy Carolinas,,Yes,, +SOCO,"Southern Company Services, Inc. - Trans",FPC,"Duke Energy Florida, Inc.",,Yes,, +SOCO,"Southern Company Services, Inc. - Trans",FPL,Florida Power & Light Co.,,Yes,, +SOCO,"Southern Company Services, Inc. - Trans",MISO,"Midcontinent Independent System Operator, Inc.",,Yes,, +SOCO,"Southern Company Services, Inc. - Trans",SC,South Carolina Public Service Authority,,Yes,, +SOCO,"Southern Company Services, Inc. - Trans",SCEG,"Dominion Energy South Carolina, Inc.",,Yes,, +SOCO,"Southern Company Services, Inc. - Trans",SEPA,Southeastern Power Administration,,Yes,, +SOCO,"Southern Company Services, Inc. - Trans",TAL,City of Tallahassee,,Yes,, +SOCO,"Southern Company Services, Inc. - Trans",TVA,Tennessee Valley Authority,,Yes,, +SPA,Southwestern Power Administration,AECI,"Associated Electric Cooperative, Inc.",,Yes,, +SPA,Southwestern Power Administration,MISO,"Midcontinent Independent System Operator, Inc.",,Yes,, +SPA,Southwestern Power Administration,SWPP,Southwest Power Pool,,Yes,, +SRP,Salt River Project Agricultural Improvement and Power District,AZPS,Arizona Public Service Company,,Yes,, +SRP,Salt River Project Agricultural Improvement and Power District,CISO,California Independent System Operator,,Yes,, +SRP,Salt River Project Agricultural Improvement and Power District,DEAA,"Arlington Valley, LLC",,Yes,, +SRP,Salt River Project Agricultural Improvement and Power District,GRID,"Gridforce Energy Management, LLC",,Yes,12/14/2020, +SRP,Salt River Project Agricultural Improvement and Power District,HGMA,"New Harquahala Generating Company, LLC",,Yes,, +SRP,Salt River Project Agricultural Improvement and Power District,PNM,Public Service Company of New Mexico,,No,,1/1/2017 +SRP,Salt River Project Agricultural Improvement and Power District,TEPC,Tucson Electric Power,,Yes,, +SRP,Salt River Project Agricultural Improvement and Power District,WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, +SWPP,Southwest Power Pool,AECI,"Associated Electric Cooperative, Inc.",,Yes,, +SWPP,Southwest Power Pool,EPE,El Paso Electric Company,,Yes,, +SWPP,Southwest Power Pool,ERCO,"Electric Reliability Council of Texas, Inc.",,Yes,, +SWPP,Southwest Power Pool,MISO,"Midcontinent Independent System Operator, Inc.",,Yes,, +SWPP,Southwest Power Pool,PNM,Public Service Company of New Mexico,,Yes,, +SWPP,Southwest Power Pool,PSCO,Public Service Company of Colorado,,Yes,, +SWPP,Southwest Power Pool,SPA,Southwestern Power Administration,,Yes,, +SWPP,Southwest Power Pool,SPC,Saskatchewan Power Corporation,Canada,Yes,10/1/2016, +SWPP,Southwest Power Pool,WACM,Western Area Power Administration - Rocky Mountain Region,,Yes,, +SWPP,Southwest Power Pool,WAUE,Western Area Power Administration - Upper Great Plains East,,No,,10/1/2015 +SWPP,Southwest Power Pool,WAUW,Western Area Power Administration - Upper Great Plains West,,Yes,, +TAL,City of Tallahassee,FPC,"Duke Energy Florida, Inc.",,Yes,, +TAL,City of Tallahassee,SOCO,"Southern Company Services, Inc. - Trans",,Yes,, +TEC,Tampa Electric Company,FMPP,Florida Municipal Power Pool,,Yes,, +TEC,Tampa Electric Company,FPC,"Duke Energy Florida, Inc.",,Yes,, +TEC,Tampa Electric Company,FPL,Florida Power & Light Co.,,Yes,, +TEC,Tampa Electric Company,SEC,Seminole Electric Cooperative,,Yes,, +TEPC,Tucson Electric Power,AZPS,Arizona Public Service Company,,Yes,, +TEPC,Tucson Electric Power,EPE,El Paso Electric Company,,Yes,, +TEPC,Tucson Electric Power,PNM,Public Service Company of New Mexico,,Yes,, +TEPC,Tucson Electric Power,SRP,Salt River Project Agricultural Improvement and Power District,,Yes,, +TEPC,Tucson Electric Power,WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, +TIDC,Turlock Irrigation District,BANC,Balancing Authority of Northern California,,Yes,, +TIDC,Turlock Irrigation District,CISO,California Independent System Operator,,Yes,, +TPWR,"City of Tacoma, Department of Public Utilities, Light Division",BPAT,Bonneville Power Administration,,Yes,, +TPWR,"City of Tacoma, Department of Public Utilities, Light Division",PSEI,"Puget Sound Energy, Inc.",,Yes,, +TVA,Tennessee Valley Authority,AECI,"Associated Electric Cooperative, Inc.",,Yes,, +TVA,Tennessee Valley Authority,CPLW,Duke Energy Progress West,,Yes,, +TVA,Tennessee Valley Authority,DUK,Duke Energy Carolinas,,Yes,, +TVA,Tennessee Valley Authority,EEI,"Electric Energy, Inc.",,No,,2/29/2020 +TVA,Tennessee Valley Authority,LGEE,Louisville Gas and Electric Company and Kentucky Utilities Company,,Yes,, +TVA,Tennessee Valley Authority,MISO,"Midcontinent Independent System Operator, Inc.",,Yes,, +TVA,Tennessee Valley Authority,PJM,"PJM Interconnection, LLC",,Yes,, +TVA,Tennessee Valley Authority,SOCO,"Southern Company Services, Inc. - Trans",,Yes,, +WACM,Western Area Power Administration - Rocky Mountain Region,AZPS,Arizona Public Service Company,,Yes,, +WACM,Western Area Power Administration - Rocky Mountain Region,PACE,PacifiCorp East,,Yes,, +WACM,Western Area Power Administration - Rocky Mountain Region,PNM,Public Service Company of New Mexico,,Yes,, +WACM,Western Area Power Administration - Rocky Mountain Region,PSCO,Public Service Company of Colorado,,Yes,, +WACM,Western Area Power Administration - Rocky Mountain Region,SWPP,Southwest Power Pool,,Yes,, +WACM,Western Area Power Administration - Rocky Mountain Region,WALC,Western Area Power Administration - Desert Southwest Region,,Yes,, +WACM,Western Area Power Administration - Rocky Mountain Region,WAUE,Western Area Power Administration - Upper Great Plains East,,No,,10/1/2015 +WACM,Western Area Power Administration - Rocky Mountain Region,WAUW,Western Area Power Administration - Upper Great Plains West,,Yes,, +WALC,Western Area Power Administration - Desert Southwest Region,AZPS,Arizona Public Service Company,,Yes,, +WALC,Western Area Power Administration - Desert Southwest Region,CISO,California Independent System Operator,,Yes,, +WALC,Western Area Power Administration - Desert Southwest Region,GRIF,"Griffith Energy, LLC",,Yes,, +WALC,Western Area Power Administration - Desert Southwest Region,IID,Imperial Irrigation District,,Yes,, +WALC,Western Area Power Administration - Desert Southwest Region,LDWP,Los Angeles Department of Water and Power,,Yes,, +WALC,Western Area Power Administration - Desert Southwest Region,NEVP,Nevada Power Company,,Yes,, +WALC,Western Area Power Administration - Desert Southwest Region,SRP,Salt River Project Agricultural Improvement and Power District,,Yes,, +WALC,Western Area Power Administration - Desert Southwest Region,TEPC,Tucson Electric Power,,Yes,, +WALC,Western Area Power Administration - Desert Southwest Region,WACM,Western Area Power Administration - Rocky Mountain Region,,Yes,, +WAUE,Western Area Power Administration - Upper Great Plains East,AECI,"Associated Electric Cooperative, Inc.",,No,,10/1/2015 +WAUE,Western Area Power Administration - Upper Great Plains East,MISO,"Midcontinent Independent System Operator, Inc.",,No,,10/1/2015 +WAUE,Western Area Power Administration - Upper Great Plains East,SWPP,Southwest Power Pool,,No,,10/1/2015 +WAUE,Western Area Power Administration - Upper Great Plains East,WACM,Western Area Power Administration - Rocky Mountain Region,,No,,10/1/2015 +WAUE,Western Area Power Administration - Upper Great Plains East,WAUW,Western Area Power Administration - Upper Great Plains West,,No,,10/1/2015 +WAUW,Western Area Power Administration - Upper Great Plains West,NWMT,NorthWestern Corporation,,Yes,, +WAUW,Western Area Power Administration - Upper Great Plains West,SWPP,Southwest Power Pool,,Yes,, +WAUW,Western Area Power Administration - Upper Great Plains West,WACM,Western Area Power Administration - Rocky Mountain Region,,Yes,, +WAUW,Western Area Power Administration - Upper Great Plains West,WAUE,Western Area Power Administration - Upper Great Plains East,,No,,10/1/2015 +WWA,"NaturEner Wind Watch, LLC",NWMT,NorthWestern Corporation,,Yes,, +YAD,"Alcoa Power Generating, Inc. - Yadkin Division",CPLE,Duke Energy Progress East,,Yes,, YAD,"Alcoa Power Generating, Inc. - Yadkin Division",DUK,Duke Energy Carolinas,,Yes,, \ No newline at end of file diff --git a/package.json b/package.json index d2c0b9fbb..e46fc7b65 100644 --- a/package.json +++ b/package.json @@ -22,13 +22,14 @@ "observable": "bin/observable-init.js" }, "scripts": { - "dev": "rm -f docs/themes.md docs/theme/*.md && (tsx watch docs/theme/generate-themes.ts & tsx watch --no-warnings=ExperimentalWarning ./bin/observable.ts preview --no-open)", - "build": "yarn rebuild-themes && rm -rf dist && tsx --no-warnings=ExperimentalWarning ./bin/observable.ts build", + "dev": "rimraf --glob docs/themes.md docs/theme/*.md && (tsx watch docs/theme/generate-themes.ts & tsx watch --no-warnings=ExperimentalWarning ./bin/observable.ts preview --no-open)", + "build": "yarn rebuild-themes && rimraf dist && tsx --no-warnings=ExperimentalWarning ./bin/observable.ts build", "deploy": "yarn rebuild-themes && tsx --no-warnings=ExperimentalWarning ./bin/observable.ts deploy", - "rebuild-themes": "rm -f docs/themes.md docs/theme/*.md && tsx docs/theme/generate-themes.ts", + "rebuild-themes": "rimraf --glob docs/themes.md docs/theme/*.md && tsx docs/theme/generate-themes.ts", "test": "yarn test:mocha && yarn test:tsc && yarn test:lint && yarn test:prettier", "test:coverage": "c8 yarn test:mocha", - "test:mocha": "rm -rf test/.observablehq/cache test/input/build/*/.observablehq/cache && OBSERVABLE_TELEMETRY_DISABLE=1 TZ=America/Los_Angeles tsx --no-warnings=ExperimentalWarning ./node_modules/.bin/mocha 'test/**/*-test.*'", + "test:mocha": "rimraf --glob test/.observablehq/cache test/input/build/*/.observablehq/cache && cross-env OBSERVABLE_TELEMETRY_DISABLE=1 TZ=America/Los_Angeles tsx --no-warnings=ExperimentalWarning ./node_modules/mocha/bin/mocha.js 'test/**/*-test.*'", + "test:mocha-debug": "rimraf --glob test/.observablehq/cache test/input/build/*/.observablehq/cache && cross-env OBSERVABLE_TELEMETRY_DISABLE=1 TZ=America/Los_Angeles tsx --no-warnings=ExperimentalWarning --inspect-brk ./node_modules/mocha/bin/mocha.js --timeout 3600000 'test/**/*-test.*'", "test:lint": "eslint src test --max-warnings=0", "test:prettier": "prettier --check src test", "test:tsc": "tsc --noEmit", @@ -53,6 +54,7 @@ "acorn": "^8.11.2", "acorn-walk": "^8.3.0", "ci-info": "^4.0.0", + "cross-spawn": "^7.0.3", "esbuild": "^0.19.8", "fast-array-diff": "^1.1.0", "fast-deep-equal": "^3.1.3", @@ -92,6 +94,7 @@ "c8": "^8.0.1", "chai": "^4.3.10", "chai-http": "^4.4.0", + "cross-env": "^7.0.3", "d3-array": "^3.2.4", "d3-dsv": "^3.0.1", "eslint": "^8.50.0", @@ -100,6 +103,7 @@ "eslint-plugin-import": "^2.29.0", "mocha": "^10.2.0", "prettier": "^3.0.3 <3.1", + "rimraf": "^5.0.5", "typescript": "^5.2.2", "undici": "^5.27.2" }, diff --git a/src/brandedFs.ts b/src/brandedFs.ts new file mode 100644 index 000000000..ff2d81399 --- /dev/null +++ b/src/brandedFs.ts @@ -0,0 +1,100 @@ +import fs, {type MakeDirectoryOptions, type Stats, type WatchListener, type WriteFileOptions} from "node:fs"; +import fsp, {type FileHandle} from "node:fs/promises"; +import {FilePath, unFilePath} from "./brandedPath.js"; + +export const constants = fsp.constants; + +export function access(path: FilePath, mode?: number): Promise { + return fsp.access(unFilePath(path), mode); +} + +export function accessSync(path: FilePath, mode?: number): void { + return fs.accessSync(unFilePath(path), mode); +} + +export function copyFile(source: FilePath, destination: FilePath, flags?: number): Promise { + return fsp.copyFile(unFilePath(source), unFilePath(destination), flags); +} + +export function readFile(path: FilePath, encoding?: undefined): Promise; +export function readFile(path: FilePath, encoding: BufferEncoding): Promise; +export function readFile(path: FilePath, encoding?: BufferEncoding | undefined): Promise { + return fsp.readFile(unFilePath(path), encoding); +} + +export function readFileSync(path: FilePath, encoding: BufferEncoding): string { + return fs.readFileSync(unFilePath(path), encoding); +} + +export function writeFile(path: FilePath, data: string | Buffer, options?: WriteFileOptions): Promise { + return fsp.writeFile(unFilePath(path), data, options); +} + +export function writeFileSync(path: FilePath, data: string | Buffer, options?: WriteFileOptions): void { + return fs.writeFileSync(unFilePath(path), data, options); +} + +export async function mkdir(path: FilePath, options?: MakeDirectoryOptions): Promise { + const rv = await fsp.mkdir(unFilePath(path), options); + return rv ? FilePath(rv) : undefined; +} + +export function readdir(path: FilePath): Promise { + return fsp.readdir(unFilePath(path)); +} + +export function stat(path: FilePath): Promise { + return fsp.stat(unFilePath(path)); +} + +export function open(path: FilePath, flags: string | number, mode?: number): Promise { + return fsp.open(unFilePath(path), flags, mode); +} + +export function rename(oldPath: FilePath, newPath: FilePath): Promise { + return fsp.rename(unFilePath(oldPath), unFilePath(newPath)); +} + +export function renameSync(oldPath: FilePath, newPath: FilePath): void { + return fs.renameSync(unFilePath(oldPath), unFilePath(newPath)); +} + +export function unlink(path: FilePath): Promise { + return fsp.unlink(unFilePath(path)); +} + +export function unlinkSync(path: FilePath): void { + return fs.unlinkSync(unFilePath(path)); +} + +export function existsSync(path: FilePath): boolean { + return fs.existsSync(unFilePath(path)); +} + +export function readdirSync(path: FilePath): FilePath[] { + return fs.readdirSync(unFilePath(path)) as unknown as FilePath[]; +} + +export function statSync(path: FilePath): Stats { + return fs.statSync(unFilePath(path)); +} + +export function watch(path: FilePath, listener?: WatchListener): fs.FSWatcher { + return fs.watch(unFilePath(path), listener); +} + +export function utimes(path: FilePath, atime: Date, mtime: Date): Promise { + return fsp.utimes(unFilePath(path), atime, mtime); +} + +export function utimesSync(path: FilePath, atime: Date, mtime: Date): void { + return fs.utimesSync(unFilePath(path), atime, mtime); +} + +export function createReadStream(path: FilePath): fs.ReadStream { + return fs.createReadStream(unFilePath(path)); +} + +export function rm(path: FilePath, options?: {force?: boolean; recursive?: boolean}): Promise { + return fsp.rm(unFilePath(path), options); +} diff --git a/src/brandedPath.ts b/src/brandedPath.ts new file mode 100644 index 000000000..286de86f8 --- /dev/null +++ b/src/brandedPath.ts @@ -0,0 +1,101 @@ +import osPath from "node:path"; +import posixPath from "node:path/posix"; + +// including "unknown &" here improves type error message +type BrandedString = unknown & + Omit & {__type: T} & { + replace: (search: string | RegExp, replace: string) => BrandedString; + slice: (start?: number, end?: number) => BrandedString; + }; + +export type FilePath = BrandedString<"FilePath">; +export type UrlPath = BrandedString<"UrlPath">; + +export function FilePath(path: string | FilePath): FilePath { + if (osPath.sep === "\\") { + path = path.replaceAll("/", osPath.sep); + } else if (osPath.sep === "/") { + path = path.replaceAll("\\", osPath.sep); + } + return path as unknown as FilePath; +} + +export function UrlPath(path: string | UrlPath): UrlPath { + path = path.replaceAll("\\", posixPath.sep); + return path as unknown as UrlPath; +} + +export function unFilePath(path: FilePath | string): string { + return path as unknown as string; +} + +export function unUrlPath(path: UrlPath | string): string { + return path as unknown as string; +} + +export function filePathToUrlPath(path: FilePath): UrlPath { + if (osPath.sep === "/") return path as unknown as UrlPath; + return urlJoin(...(path.split(osPath.sep) as string[])); +} + +export function urlPathToFilePath(path: UrlPath): FilePath { + if (osPath.sep === "/") return path as unknown as FilePath; + return fileJoin(...(path.split(posixPath.sep) as string[])); +} + +// Implemenations of node:path functions: + +export function urlJoin(...paths: (string | UrlPath)[]): UrlPath { + return posixPath.join(...(paths as string[])) as unknown as UrlPath; +} + +export function fileJoin(...paths: (string | FilePath)[]): FilePath { + return osPath.join(...(paths as string[])) as unknown as FilePath; +} + +export function fileRelative(from: string | FilePath, to: string | FilePath): FilePath { + return FilePath(osPath.relative(unFilePath(from), unFilePath(to))); +} + +export function fileDirname(path: string | FilePath): FilePath { + return FilePath(osPath.dirname(unFilePath(path))); +} + +export function urlDirname(path: string | UrlPath): UrlPath { + return UrlPath(posixPath.dirname(unUrlPath(path))); +} + +export function fileNormalize(path: string | FilePath): FilePath { + return FilePath(osPath.normalize(unFilePath(path))); +} + +export function urlNormalize(path: string | UrlPath): UrlPath { + return UrlPath(osPath.normalize(unUrlPath(path))); +} + +export function fileBasename(path: string | FilePath, suffix?: string): string { + return osPath.basename(unFilePath(path), suffix); +} + +export function urlBasename(path: string | UrlPath, suffix?: string): string { + return osPath.basename(unUrlPath(path), suffix); +} + +export function fileExtname(path: string | FilePath | string): string { + return osPath.extname(unFilePath(path)); +} + +export function urlExtname(path: string | UrlPath | string): string { + return osPath.extname(unUrlPath(path)); +} + +export function fileResolve(...paths: (string | FilePath)[]): FilePath { + return FilePath(osPath.resolve(...(paths as string[]))); +} + +export function urlResolve(...paths: (string | UrlPath)[]): UrlPath { + return UrlPath(osPath.resolve(...(paths as string[]))); +} + +export const fileSep = osPath.sep as unknown as FilePath; +export const urlSep = posixPath.sep as unknown as UrlPath; diff --git a/src/build.ts b/src/build.ts index 539776d3f..1d344374d 100644 --- a/src/build.ts +++ b/src/build.ts @@ -1,7 +1,18 @@ -import {existsSync} from "node:fs"; -import {access, constants, copyFile, readFile, writeFile} from "node:fs/promises"; -import {basename, dirname, join} from "node:path"; import {fileURLToPath} from "node:url"; +import {existsSync} from "./brandedFs.js"; +import {access, constants, copyFile, readFile, writeFile} from "./brandedFs.js"; +import { + FilePath, + fileBasename, + fileDirname, + fileJoin, + filePathToUrlPath, + unUrlPath, + urlBasename, + urlDirname, + urlJoin, + urlPathToFilePath +} from "./brandedPath.js"; import type {Config, Style} from "./config.js"; import {mergeStyle} from "./config.js"; import {Loader} from "./dataloader.js"; @@ -16,10 +27,10 @@ import {Telemetry} from "./telemetry.js"; import {faint} from "./tty.js"; import {resolvePath} from "./url.js"; -const EXTRA_FILES = new Map([ +const EXTRA_FILES: Map = new Map([ [ - join(fileURLToPath(import.meta.resolve("@observablehq/runtime")), "../../dist/runtime.js"), - "_observablehq/runtime.js" + fileJoin(fileURLToPath(import.meta.resolve("@observablehq/runtime")), "..", "..", "dist", "runtime.js"), + fileJoin("_observablehq", "runtime.js") ] ]); @@ -32,18 +43,20 @@ export interface BuildOptions { export interface BuildEffects { logger: Logger; output: Writer; + existsSync: (path: FilePath) => boolean; + readFile(path: FilePath, encoding: "utf-8"): Promise; /** * @param outputPath The path of this file relative to the outputRoot. For * example, in a local build this should be relative to the dist directory. */ - copyFile(sourcePath: string, outputPath: string): Promise; + copyFile(sourcePath: FilePath, outputPath: FilePath): Promise; /** * @param outputPath The path of this file relative to the outputRoot. For * example, in a local build this should be relative to the dist directory. */ - writeFile(outputPath: string, contents: Buffer | string): Promise; + writeFile(outputPath: FilePath, contents: Buffer | string): Promise; } export async function build( @@ -56,34 +69,36 @@ export async function build( // Make sure all files are readable before starting to write output files. let pageCount = 0; for await (const sourceFile of visitMarkdownFiles(root)) { - await access(join(root, sourceFile), constants.R_OK); + await access(fileJoin(root, sourceFile), constants.R_OK); pageCount++; } if (!pageCount) throw new CliError(`Nothing to build: no page files found in your ${root} directory.`); effects.logger.log(`${faint("found")} ${pageCount} ${faint(`page${pageCount === 1 ? "" : "s"} in`)} ${root}`); // Render .md files, building a list of file attachments as we go. - const files: string[] = []; - const imports: string[] = []; + const files: FilePath[] = []; + const imports: FilePath[] = []; const styles: Style[] = []; for await (const sourceFile of visitMarkdownFiles(root)) { - const sourcePath = join(root, sourceFile); - const outputPath = join(dirname(sourceFile), basename(sourceFile, ".md") + ".html"); + const sourcePath = fileJoin(root, sourceFile); + const outputPath = fileJoin(fileDirname(sourceFile), fileBasename(sourceFile, ".md") + ".html"); effects.output.write(`${faint("render")} ${sourcePath} ${faint("→")} `); - const path = join("/", dirname(sourceFile), basename(sourceFile, ".md")); - const render = await renderServerless(sourcePath, {path, ...config}); + const urlSourceFile = filePathToUrlPath(sourceFile); + const urlPath = urlJoin("/", urlDirname(urlSourceFile), urlBasename(urlSourceFile, ".md")); + const filePath = fileJoin(fileDirname(sourceFile), fileBasename(sourceFile, ".md")); + const render = await renderServerless(sourcePath, {path: urlPath, ...config}); const resolveFile = ({name}) => resolvePath(sourceFile, name); files.push(...render.files.map(resolveFile)); imports.push(...render.imports.filter((i) => i.type === "local").map(resolveFile)); await effects.writeFile(outputPath, render.html); - const style = mergeStyle(path, render.data?.style, render.data?.theme, config.style); + const style = mergeStyle(filePath, render.data?.style, render.data?.theme, config.style); if (style && !styles.some((s) => styleEquals(s, style))) styles.push(style); } // Add imported local scripts. for (const script of config.scripts) { - if (!/^\w+:/.test(script.src)) { - imports.push(script.src); + if (!/^\w+:/.test(unUrlPath(script.src))) { + imports.push(urlPathToFilePath(script.src)); } } @@ -106,8 +121,8 @@ export async function build( ["./src/client/stdlib/zip.js", "stdlib/zip.js"], ...(config.search ? [["./src/client/search.js", "search.js"]] : []) ]) { - const clientPath = getClientPath(entry); - const outputPath = join("_observablehq", name); + const clientPath = getClientPath(FilePath(entry)); + const outputPath = fileJoin("_observablehq", name); effects.output.write(`${faint("bundle")} ${clientPath} ${faint("→")} `); const code = await (entry.endsWith(".css") ? bundleStyles({path: clientPath}) @@ -115,20 +130,20 @@ export async function build( await effects.writeFile(outputPath, code); } if (config.search) { - const outputPath = join("_observablehq", "minisearch.json"); + const outputPath = fileJoin("_observablehq", "minisearch.json"); const code = await searchIndex(config, effects); effects.output.write(`${faint("search")} ${faint("→")} `); await effects.writeFile(outputPath, code); } for (const style of styles) { if ("path" in style) { - const outputPath = join("_import", style.path); - const sourcePath = join(root, style.path); + const outputPath = fileJoin("_import", urlPathToFilePath(style.path)); + const sourcePath = fileJoin(root, urlPathToFilePath(style.path)); effects.output.write(`${faint("style")} ${sourcePath} ${faint("→")} `); const code = await bundleStyles({path: sourcePath}); await effects.writeFile(outputPath, code); } else { - const outputPath = join("_observablehq", `theme-${style.theme}.css`); + const outputPath = fileJoin("_observablehq", `theme-${style.theme}.css`); effects.output.write(`${faint("bundle")} theme-${style.theme}.css ${faint("→")} `); const code = await bundleStyles({theme: style.theme}); await effects.writeFile(outputPath, code); @@ -138,16 +153,16 @@ export async function build( // Copy over the referenced files. for (const file of files) { - let sourcePath = join(root, file); - const outputPath = join("_file", file); - if (!existsSync(sourcePath)) { - const loader = Loader.find(root, join("/", file), {useStale: true}); + let sourcePath = fileJoin(root, file); + const outputPath = fileJoin("_file", file); + if (!effects.existsSync(sourcePath)) { + const loader = Loader.find(root, file, {useStale: true}); if (!loader) { effects.logger.error("missing referenced file", sourcePath); continue; } try { - sourcePath = join(root, await loader.load(effects)); + sourcePath = fileJoin(root, await loader.load(effects)); } catch (error) { if (!isEnoent(error)) throw error; continue; @@ -160,14 +175,14 @@ export async function build( // Copy over the imported modules. const importResolver = createImportResolver(root); for (const file of imports) { - const sourcePath = join(root, file); - const outputPath = join("_import", file); - if (!existsSync(sourcePath)) { + const sourcePath = fileJoin(root, file); + const outputPath = fileJoin("_import", file); + if (!effects.existsSync(sourcePath)) { effects.logger.error("missing referenced file", sourcePath); continue; } effects.output.write(`${faint("copy")} ${sourcePath} ${faint("→")} `); - const contents = await rewriteModule(await readFile(sourcePath, "utf-8"), file, importResolver); + const contents = await rewriteModule(await effects.readFile(sourcePath, "utf-8"), file, importResolver); await effects.writeFile(outputPath, contents); } @@ -182,11 +197,11 @@ export async function build( } export class FileBuildEffects implements BuildEffects { - private readonly outputRoot: string; + private readonly outputRoot: FilePath; readonly logger: Logger; readonly output: Writer; constructor( - outputRoot: string, + outputRoot: FilePath, {logger = console, output = process.stdout}: {logger?: Logger; output?: Writer} = {} ) { if (!outputRoot) throw new Error("missing outputRoot"); @@ -194,14 +209,20 @@ export class FileBuildEffects implements BuildEffects { this.output = output; this.outputRoot = outputRoot; } - async copyFile(sourcePath: string, outputPath: string): Promise { - const destination = join(this.outputRoot, outputPath); + existsSync(path: FilePath) { + return existsSync(path); + } + readFile(path: FilePath, encoding: "utf-8"): Promise { + return readFile(path, encoding); + } + async copyFile(sourcePath: FilePath, outputPath: FilePath): Promise { + const destination = fileJoin(this.outputRoot, outputPath); this.logger.log(destination); await prepareOutput(destination); await copyFile(sourcePath, destination); } - async writeFile(outputPath: string, contents: string | Buffer): Promise { - const destination = join(this.outputRoot, outputPath); + async writeFile(outputPath: FilePath, contents: string | Buffer): Promise { + const destination = fileJoin(this.outputRoot, outputPath); this.logger.log(destination); await prepareOutput(destination); await writeFile(destination, contents); diff --git a/src/commandInstruction.ts b/src/commandInstruction.ts index 19eadf7c2..b41aed2a8 100644 --- a/src/commandInstruction.ts +++ b/src/commandInstruction.ts @@ -9,7 +9,7 @@ export function commandInstruction( env = process.env }: {color?: TtyColor | null; env?: Record} = {} ): string { - if (!color) color = (s) => s; + if (!color) color = (s) => `${s}`; const prefix = env["npm_config_user_agent"]?.includes("yarn/") ? "yarn observable" diff --git a/src/config.ts b/src/config.ts index a1cf671a9..55e6d4114 100644 --- a/src/config.ts +++ b/src/config.ts @@ -1,4 +1,5 @@ -import {basename, dirname, join} from "node:path"; +import type {UrlPath} from "./brandedPath.js"; +import {FilePath, fileJoin, filePathToUrlPath, urlBasename, urlDirname, urlJoin} from "./brandedPath.js"; import {visitMarkdownFiles} from "./files.js"; import {formatIsoDate, formatLocaleDate} from "./format.js"; import {parseMarkdown} from "./markdown.js"; @@ -7,7 +8,7 @@ import {resolvePath} from "./url.js"; export interface Page { name: string; - path: string; + path: UrlPath; } export interface Section { @@ -22,19 +23,19 @@ export interface TableOfContents { } export type Style = - | {path: string} // custom stylesheet + | {path: UrlPath} // custom stylesheet | {theme: string[]}; // zero or more named theme export interface Script { - src: string; + src: UrlPath; async: boolean; type: string | null; } export interface Config { - root: string; // defaults to docs - output: string; // defaults to dist - base: string; // defaults to "/" + root: FilePath; // defaults to docs + output: FilePath; // defaults to dist + base: UrlPath; // defaults to "/" title?: string; sidebar: boolean; // defaults to true if pages isn’t empty pages: (Page | Section)[]; @@ -49,16 +50,16 @@ export interface Config { search: boolean; // default to false } -export async function readConfig(configPath?: string, root?: string): Promise { +export async function readConfig(configPath?: FilePath, root?: FilePath): Promise { if (configPath === undefined) return readDefaultConfig(root); - const importPath = join(process.cwd(), root ?? ".", configPath); + const importPath = "file://" + filePathToUrlPath(fileJoin(process.cwd(), root ?? ".", configPath)); return normalizeConfig((await import(importPath)).default, root); } -export async function readDefaultConfig(root?: string): Promise { +export async function readDefaultConfig(root?: FilePath): Promise { for (const ext of [".js", ".ts"]) { try { - return await readConfig("observablehq.config" + ext, root); + return await readConfig(FilePath("observablehq.config" + ext), root); } catch (error: any) { if (error.code !== "ERR_MODULE_NOT_FOUND") throw error; continue; @@ -67,13 +68,14 @@ export async function readDefaultConfig(root?: string): Promise { return normalizeConfig(undefined, root); } -async function readPages(root: string): Promise { +async function readPages(root: FilePath): Promise { const pages: Page[] = []; for await (const file of visitMarkdownFiles(root)) { - if (file === "index.md" || file === "404.md") continue; - const parsed = await parseMarkdown(join(root, file), {root, path: file}); - const name = basename(file, ".md"); - const page = {path: join("/", dirname(file), name), name: parsed.title ?? "Untitled"}; + const urlFile = filePathToUrlPath(file); + if (file === FilePath("index.md") || file === FilePath("404.md")) continue; + const parsed = await parseMarkdown(fileJoin(root, file), {root, path: urlFile}); + const name = urlBasename(urlFile, ".md"); + const page = {path: urlJoin("/", urlDirname(urlFile), name), name: parsed.title ?? "Untitled"}; if (name === "index") pages.unshift(page); else pages.push(page); } @@ -86,7 +88,7 @@ export function setCurrentDate(date = new Date()): void { currentDate = date; } -export async function normalizeConfig(spec: any = {}, defaultRoot = "docs"): Promise { +export async function normalizeConfig(spec: any = {}, defaultRoot = FilePath("docs")): Promise { let { root = defaultRoot, output = "dist", @@ -178,12 +180,12 @@ export function mergeToc(spec: any, toc: TableOfContents): TableOfContents { return {label, show}; } -export function mergeStyle(path: string, style: any, theme: any, defaultStyle: null | Style): null | Style { +export function mergeStyle(path: FilePath, style: any, theme: any, defaultStyle: null | Style): null | Style { return style === undefined && theme === undefined ? defaultStyle : style === null ? null // disable : style !== undefined - ? {path: resolvePath(path, style)} + ? {path: filePathToUrlPath(resolvePath(path, style))} : {theme: normalizeTheme(theme)}; } diff --git a/src/convert.ts b/src/convert.ts index 372d3ec3b..d10cd973e 100644 --- a/src/convert.ts +++ b/src/convert.ts @@ -1,8 +1,8 @@ -import {existsSync} from "node:fs"; -import {utimes, writeFile} from "node:fs/promises"; -import {join} from "node:path"; import * as clack from "@clack/prompts"; import wrapAnsi from "wrap-ansi"; +import {utimes, writeFile} from "./brandedFs.js"; +import {existsSync} from "./brandedFs.js"; +import {type FilePath, fileJoin} from "./brandedPath.js"; import type {ClackEffects} from "./clack.js"; import {CliError} from "./error.js"; import {prepareOutput} from "./files.js"; @@ -11,25 +11,25 @@ import {type TtyEffects, bold, cyan, faint, inverse, link, reset, defaultEffects export interface ConvertEffects extends TtyEffects { clack: ClackEffects; - prepareOutput(outputPath: string): Promise; - existsSync(outputPath: string): boolean; - writeFile(outputPath: string, contents: Buffer | string): Promise; - touch(outputPath: string, date: Date | string | number): Promise; + prepareOutput(outputPath: FilePath): Promise; + existsSync(outputPath: FilePath): boolean; + writeFile(outputPath: FilePath, contents: Buffer | string): Promise; + touch(outputPath: FilePath, date: Date | string | number): Promise; } const defaultEffects: ConvertEffects = { ...ttyEffects, clack, - async prepareOutput(outputPath: string): Promise { + async prepareOutput(outputPath: FilePath): Promise { await prepareOutput(outputPath); }, - existsSync(outputPath: string): boolean { + existsSync(outputPath: FilePath): boolean { return existsSync(outputPath); }, - async writeFile(outputPath: string, contents: Buffer | string): Promise { + async writeFile(outputPath: FilePath, contents: Buffer | string): Promise { await writeFile(outputPath, contents); }, - async touch(outputPath: string, date: Date | string | number): Promise { + async touch(outputPath: FilePath, date: Date | string | number): Promise { await utimes(outputPath, (date = new Date(date)), date); } }; @@ -47,7 +47,7 @@ export async function convert( let s = clack.spinner(); const url = resolveInput(input); const name = inferFileName(url); - const path = join(output, name); + const path = fileJoin(output, name); if (await maybeFetch(path, force, effects)) { s.start(`Downloading ${bold(path)}`); const response = await fetch(url); @@ -60,7 +60,7 @@ export async function convert( n++; if (includeFiles) { for (const file of files) { - const path = join(output, file.name); + const path = fileJoin(output, file.name); if (await maybeFetch(path, force, effects)) { start = Date.now(); s = clack.spinner(); @@ -94,7 +94,7 @@ export async function convert( ); } -async function maybeFetch(path: string, force: boolean, effects: ConvertEffects): Promise { +async function maybeFetch(path: FilePath, force: boolean, effects: ConvertEffects): Promise { const {clack} = effects; if (effects.existsSync(path) && !force) { const choice = await clack.confirm({message: `${bold(path)} already exists; replace?`, initialValue: false}); diff --git a/src/create.ts b/src/create.ts index 9de792bb6..56368035d 100644 --- a/src/create.ts +++ b/src/create.ts @@ -1,13 +1,13 @@ import {exec} from "node:child_process"; -import {accessSync, existsSync, readdirSync, statSync} from "node:fs"; -import {constants, copyFile, mkdir, readFile, readdir, stat, writeFile} from "node:fs/promises"; -import {basename, dirname, join, normalize, resolve} from "node:path"; import {setTimeout as sleep} from "node:timers/promises"; import {fileURLToPath} from "node:url"; import {promisify} from "node:util"; import * as clack from "@clack/prompts"; import untildify from "untildify"; import {version} from "../package.json"; +import {accessSync, existsSync, readdirSync, statSync} from "./brandedFs.js"; +import {constants, copyFile, mkdir, readFile, readdir, stat, writeFile} from "./brandedFs.js"; +import {FilePath, fileBasename, fileDirname, fileJoin, fileNormalize, fileResolve, unFilePath} from "./brandedPath.js"; import type {ClackEffects} from "./clack.js"; import {cyan, faint, inverse, link, reset} from "./tty.js"; @@ -15,9 +15,9 @@ export interface CreateEffects { clack: ClackEffects; sleep: (delay?: number) => Promise; log(output: string): void; - mkdir(outputPath: string, options?: {recursive?: boolean}): Promise; - copyFile(sourcePath: string, outputPath: string): Promise; - writeFile(outputPath: string, contents: string): Promise; + mkdir(outputPath: FilePath, options?: {recursive?: boolean}): Promise; + copyFile(sourcePath: FilePath, outputPath: FilePath): Promise; + writeFile(outputPath: FilePath, contents: string): Promise; } const defaultEffects: CreateEffects = { @@ -26,13 +26,13 @@ const defaultEffects: CreateEffects = { log(output: string): void { console.log(output); }, - async mkdir(outputPath: string, options): Promise { + async mkdir(outputPath: FilePath, options): Promise { await mkdir(outputPath, options); }, - async copyFile(sourcePath: string, outputPath: string): Promise { + async copyFile(sourcePath: FilePath, outputPath: FilePath): Promise { await copyFile(sourcePath, outputPath); }, - async writeFile(outputPath: string, contents: string): Promise { + async writeFile(outputPath: FilePath, contents: string): Promise { await writeFile(outputPath, contents); } }; @@ -46,23 +46,25 @@ const defaultEffects: CreateEffects = { export async function create(options = {}, effects: CreateEffects = defaultEffects): Promise { const {clack} = effects; clack.intro(`${inverse(" observable create ")} ${faint(`v${version}`)}`); - const defaultRootPath = "./hello-framework"; + const defaultRootPath = FilePath("./hello-framework"); const defaultRootPathError = validateRootPath(defaultRootPath); await clack.group( { rootPath: () => clack.text({ message: "Where to create your project?", - placeholder: defaultRootPath, - defaultValue: defaultRootPathError ? undefined : defaultRootPath, - validate: (input) => validateRootPath(input, defaultRootPathError) + placeholder: unFilePath(defaultRootPath), + defaultValue: defaultRootPathError ? undefined : unFilePath(defaultRootPath), + validate: (input) => validateRootPath(FilePath(input), defaultRootPathError) }), - projectTitle: ({results: {rootPath}}) => - clack.text({ + projectTitle: ({results: {rootPath: rootPathStr}}) => { + const rootPath = FilePath(rootPathStr!); + return clack.text({ message: "What to title your project?", placeholder: inferTitle(rootPath!), defaultValue: inferTitle(rootPath!) - }), + }); + }, includeSampleFiles: () => clack.select({ message: "Include sample files to help you get started?", @@ -87,17 +89,16 @@ export async function create(options = {}, effects: CreateEffects = defaultEffec message: "Initialize git repository?" }), installing: async ({results: {rootPath, projectTitle, includeSampleFiles, packageManager, initializeGit}}) => { - rootPath = untildify(rootPath!); const s = clack.spinner(); s.start("Copying template files"); const template = includeSampleFiles ? "default" : "empty"; - const templateDir = resolve(fileURLToPath(import.meta.url), "..", "..", "templates", template); + const templateDir = fileResolve(fileURLToPath(import.meta.url), "..", "..", "templates", template); const runCommand = packageManager === "yarn" ? "yarn" : `${packageManager ?? "npm"} run`; const installCommand = `${packageManager ?? "npm"} install`; await effects.sleep(1000); await recursiveCopyTemplate( templateDir, - rootPath!, + FilePath(untildify(rootPath!)), { runCommand, installCommand, @@ -133,9 +134,9 @@ export async function create(options = {}, effects: CreateEffects = defaultEffec ); } -function validateRootPath(rootPath: string, defaultError?: string): string | undefined { - if (rootPath === "") return defaultError; // accept default value - rootPath = normalize(rootPath); +function validateRootPath(rootPath: FilePath, defaultError?: string): string | undefined { + if (rootPath === FilePath("")) return defaultError; // accept default value + rootPath = fileNormalize(rootPath); if (!canWriteRecursive(rootPath)) return "Path is not writable."; if (!existsSync(rootPath)) return; if (!statSync(rootPath).isDirectory()) return "File already exists."; @@ -143,14 +144,14 @@ function validateRootPath(rootPath: string, defaultError?: string): string | und if (readdirSync(rootPath).length !== 0) return "Directory is not empty."; } -function inferTitle(rootPath: string): string { - return basename(rootPath!) +function inferTitle(rootPath: FilePath): string { + return fileBasename(rootPath) .split(/[-_\s]/) .map(([c, ...rest]) => c.toUpperCase() + rest.join("")) .join(" "); } -function canWrite(path: string): boolean { +function canWrite(path: FilePath): boolean { try { accessSync(path, constants.W_OK); return true; @@ -159,9 +160,9 @@ function canWrite(path: string): boolean { } } -function canWriteRecursive(path: string): boolean { +function canWriteRecursive(path: FilePath): boolean { while (true) { - const dir = dirname(path); + const dir = fileDirname(path); if (canWrite(dir)) return true; if (dir === path) break; path = dir; @@ -170,15 +171,15 @@ function canWriteRecursive(path: string): boolean { } async function recursiveCopyTemplate( - inputRoot: string, - outputRoot: string, + inputRoot: FilePath, + outputRoot: FilePath, context: Record, effects: CreateEffects, - stepPath: string = "." + stepPath: FilePath = FilePath(".") ) { - const templatePath = join(inputRoot, stepPath); + const templatePath = fileJoin(inputRoot, stepPath); const templateStat = await stat(templatePath); - let outputPath = join(outputRoot, stepPath); + let outputPath = fileJoin(outputRoot, stepPath); if (templateStat.isDirectory()) { try { await effects.mkdir(outputPath, {recursive: true}); @@ -186,7 +187,7 @@ async function recursiveCopyTemplate( // that's ok } for (const entry of await readdir(templatePath)) { - await recursiveCopyTemplate(inputRoot, outputRoot, context, effects, join(stepPath, entry)); + await recursiveCopyTemplate(inputRoot, outputRoot, context, effects, fileJoin(stepPath, entry)); } } else { if (templatePath.endsWith(".DS_Store")) return; @@ -196,7 +197,11 @@ async function recursiveCopyTemplate( contents = contents.replaceAll(/\{\{\s*(\w+)\s*\}\}/g, (_, key) => { const val = context[key]; if (val) return val; - throw new Error(`no template variable ${key}`); + throw new Error( + `no template variable ${key} (expected one of ${Object.entries(context) + .map(([k, v]) => `${k}=${v}`) + .join(", ")})` + ); }); await effects.writeFile(outputPath, contents); } else { diff --git a/src/dataloader.ts b/src/dataloader.ts index 6c80a7fc9..6724ddf9e 100644 --- a/src/dataloader.ts +++ b/src/dataloader.ts @@ -1,15 +1,17 @@ -import {spawn} from "node:child_process"; -import {type WriteStream, createReadStream, existsSync, statSync} from "node:fs"; -import {mkdir, open, readFile, rename, unlink} from "node:fs/promises"; -import {dirname, extname, join} from "node:path"; +import {type WriteStream} from "node:fs"; import {createGunzip} from "node:zlib"; +import {spawn} from "cross-spawn"; import JSZip from "jszip"; import {extract} from "tar-stream"; +import {createReadStream, existsSync, statSync} from "./brandedFs.js"; +import {mkdir, open, readFile, rename, unlink} from "./brandedFs.js"; +import {FilePath, fileExtname} from "./brandedPath.js"; +import {fileDirname, fileJoin, unFilePath} from "./brandedPath.js"; import {maybeStat, prepareOutput} from "./files.js"; import type {Logger, Writer} from "./logger.js"; import {cyan, faint, green, red, yellow} from "./tty.js"; -const runningCommands = new Map>(); +const runningCommands = new Map>(); const languages = { ".js": ["node", "--no-warnings=ExperimentalWarning"], @@ -34,9 +36,9 @@ const defaultEffects: LoadEffects = { }; export interface LoaderOptions { - path: string; - sourceRoot: string; - targetPath: string; + path: FilePath; + sourceRoot: FilePath; + targetPath: FilePath; useStale: boolean; } @@ -46,19 +48,19 @@ export abstract class Loader { * directory. This is exposed so that clients can check which file to watch to * see if the loader is edited (and in which case it needs to be re-run). */ - readonly path: string; + readonly path: FilePath; /** * The source root relative to the current working directory, such as docs. */ - readonly sourceRoot: string; + readonly sourceRoot: FilePath; /** * The path to the loader script’s output relative to the destination root. * This is where the loader’s output is served, but the loader generates the * file in the .observablehq/cache directory within the source root. */ - readonly targetPath: string; + readonly targetPath: FilePath; /** * Should the loader use a stale cache. true when building. @@ -79,23 +81,23 @@ export abstract class Loader { * abort if we find a matching folder or reach the source root; for example, * if docs/data exists, we won’t look for a docs/data.zip. */ - static find(sourceRoot: string, targetPath: string, {useStale = false} = {}): Loader | undefined { + static find(sourceRoot: FilePath, targetPath: FilePath, {useStale = false} = {}): Loader | undefined { const exact = this.findExact(sourceRoot, targetPath, {useStale}); if (exact) return exact; - let dir = dirname(targetPath); - for (let parent: string; true; dir = parent) { - parent = dirname(dir); + let dir = fileDirname(targetPath); + for (let parent: FilePath; true; dir = parent) { + parent = fileDirname(dir); if (parent === dir) return; // reached source root - if (existsSync(join(sourceRoot, dir))) return; // found folder - if (existsSync(join(sourceRoot, parent))) break; // found parent + if (existsSync(fileJoin(sourceRoot, dir))) return; // found folder + if (existsSync(fileJoin(sourceRoot, parent))) break; // found parent } for (const [ext, Extractor] of extractors) { - const archive = dir + ext; - if (existsSync(join(sourceRoot, archive))) { + const archive = FilePath(dir + ext); + if (existsSync(fileJoin(sourceRoot, archive))) { return new Extractor({ preload: async () => archive, inflatePath: targetPath.slice(archive.length - ext.length + 1), - path: join(sourceRoot, archive), + path: fileJoin(sourceRoot, archive), sourceRoot, targetPath, useStale @@ -115,17 +117,17 @@ export abstract class Loader { } } - private static findExact(sourceRoot: string, targetPath: string, {useStale}): Loader | undefined { + private static findExact(sourceRoot: FilePath, targetPath: FilePath, {useStale}): Loader | undefined { for (const [ext, [command, ...args]] of Object.entries(languages)) { - if (!existsSync(join(sourceRoot, targetPath + ext))) continue; - if (extname(targetPath) === "") { + if (!existsSync(fileJoin(sourceRoot, targetPath + ext))) continue; + if (fileExtname(targetPath) === "") { console.warn(`invalid data loader path: ${targetPath + ext}`); return; } - const path = join(sourceRoot, targetPath + ext); + const path = fileJoin(sourceRoot, targetPath + ext); return new CommandLoader({ command: command ?? path, - args: command == null ? args : [...args, path], + args: command == null ? args : [...args, unFilePath(path)], path, sourceRoot, targetPath, @@ -139,13 +141,13 @@ export abstract class Loader { * to the source root; this is within the .observablehq/cache folder within * the source root. */ - async load(effects = defaultEffects): Promise { - const key = join(this.sourceRoot, this.targetPath); + async load(effects = defaultEffects): Promise { + const key = fileJoin(this.sourceRoot, this.targetPath); let command = runningCommands.get(key); if (!command) { command = (async () => { - const outputPath = join(".observablehq", "cache", this.targetPath); - const cachePath = join(this.sourceRoot, outputPath); + const outputPath = fileJoin(".observablehq", "cache", this.targetPath); + const cachePath = fileJoin(this.sourceRoot, outputPath); const loaderStat = await maybeStat(this.path); const cacheStat = await maybeStat(cachePath); if (!cacheStat) effects.output.write(faint("[missing] ")); @@ -153,8 +155,8 @@ export abstract class Loader { if (this.useStale) return effects.output.write(faint("[using stale] ")), outputPath; else effects.output.write(faint("[stale] ")); } else return effects.output.write(faint("[fresh] ")), outputPath; - const tempPath = join(this.sourceRoot, ".observablehq", "cache", `${this.targetPath}.${process.pid}`); - const errorPath = tempPath + ".err"; + const tempPath = fileJoin(this.sourceRoot, ".observablehq", "cache", `${this.targetPath}.${process.pid}`); + const errorPath = FilePath(tempPath + ".err"); const errorStat = await maybeStat(errorPath); if (errorStat) { if (errorStat.mtimeMs > loaderStat!.mtimeMs && errorStat.mtimeMs > -1000 + Date.now()) @@ -165,7 +167,7 @@ export abstract class Loader { const tempFd = await open(tempPath, "w"); try { await this.exec(tempFd.createWriteStream({highWaterMark: 1024 * 1024}), effects); - await mkdir(dirname(cachePath), {recursive: true}); + await mkdir(fileDirname(cachePath), {recursive: true}); await rename(tempPath, cachePath); } catch (error) { await rename(tempPath, errorPath); @@ -183,7 +185,7 @@ export abstract class Loader { command.then( (path) => { effects.logger.log( - `${green("success")} ${cyan(formatSize(statSync(join(this.sourceRoot, path)).size))} ${faint( + `${green("success")} ${cyan(formatSize(statSync(fileJoin(this.sourceRoot, path)).size))} ${faint( `in ${formatElapsed(start)}` )}` ); @@ -239,12 +241,12 @@ class CommandLoader extends Loader { interface ZipExtractorOptions extends LoaderOptions { preload: Loader["load"]; - inflatePath: string; + inflatePath: FilePath; } class ZipExtractor extends Loader { private readonly preload: Loader["load"]; - private readonly inflatePath: string; + private readonly inflatePath: FilePath; constructor({preload, inflatePath, ...options}: ZipExtractorOptions) { super(options); @@ -253,8 +255,8 @@ class ZipExtractor extends Loader { } async exec(output: WriteStream, effects?: LoadEffects): Promise { - const archivePath = join(this.sourceRoot, await this.preload(effects)); - const file = (await JSZip.loadAsync(await readFile(archivePath))).file(this.inflatePath); + const archivePath = fileJoin(this.sourceRoot, await this.preload(effects)); + const file = (await JSZip.loadAsync(await readFile(archivePath))).file(unFilePath(this.inflatePath)); if (!file) throw Object.assign(new Error("file not found"), {code: "ENOENT"}); const pipe = file.nodeStream().pipe(output); await new Promise((resolve, reject) => pipe.on("error", reject).on("finish", resolve)); @@ -263,13 +265,13 @@ class ZipExtractor extends Loader { interface TarExtractorOptions extends LoaderOptions { preload: Loader["load"]; - inflatePath: string; + inflatePath: FilePath; gunzip?: boolean; } class TarExtractor extends Loader { private readonly preload: Loader["load"]; - private readonly inflatePath: string; + private readonly inflatePath: FilePath; private readonly gunzip: boolean; constructor({preload, inflatePath, gunzip = false, ...options}: TarExtractorOptions) { @@ -280,12 +282,12 @@ class TarExtractor extends Loader { } async exec(output: WriteStream, effects?: LoadEffects): Promise { - const archivePath = join(this.sourceRoot, await this.preload(effects)); + const archivePath = fileJoin(this.sourceRoot, await this.preload(effects)); const tar = extract(); const input = createReadStream(archivePath); (this.gunzip ? input.pipe(createGunzip()) : input).pipe(tar); for await (const entry of tar) { - if (entry.header.name === this.inflatePath) { + if (FilePath(entry.header.name) === this.inflatePath) { const pipe = entry.pipe(output); await new Promise((resolve, reject) => pipe.on("error", reject).on("finish", resolve)); return; diff --git a/src/deploy.ts b/src/deploy.ts index 57acc1c8c..c72ba54da 100644 --- a/src/deploy.ts +++ b/src/deploy.ts @@ -1,6 +1,7 @@ -import {join} from "node:path"; import * as clack from "@clack/prompts"; import wrapAnsi from "wrap-ansi"; +import {existsSync, readFile} from "./brandedFs.js"; +import {type FilePath, fileJoin, filePathToUrlPath} from "./brandedPath.js"; import type {BuildEffects} from "./build.js"; import {build} from "./build.js"; import type {ClackEffects} from "./clack.js"; @@ -39,8 +40,8 @@ export interface DeployOptions { } export interface DeployEffects extends ConfigEffects, TtyEffects, AuthEffects { - getDeployConfig: (sourceRoot: string) => Promise; - setDeployConfig: (sourceRoot: string, config: DeployConfig) => Promise; + getDeployConfig: (sourceRoot: FilePath) => Promise; + setDeployConfig: (sourceRoot: FilePath, config: DeployConfig) => Promise; clack: ClackEffects; logger: Logger; input: NodeJS.ReadableStream; @@ -78,14 +79,16 @@ export async function deploy( if (deployConfig.workspaceLogin && !deployConfig.workspaceLogin.match(/^@?[a-z0-9-]+$/)) { throw new CliError( - `Found invalid workspace login in ${join(config.root, ".observablehq", "deploy.json")}: ${ + `Found invalid workspace login in ${fileJoin(config.root, ".observablehq", "deploy.json")}: ${ deployConfig.workspaceLogin }.` ); } if (deployConfig.projectSlug && !deployConfig.projectSlug.match(/^[a-z0-9-]+$/)) { throw new CliError( - `Found invalid project slug in ${join(config.root, ".observablehq", "deploy.json")}: ${deployConfig.projectSlug}.` + `Found invalid project slug in ${fileJoin(config.root, ".observablehq", "deploy.json")}: ${ + deployConfig.projectSlug + }.` ); } @@ -348,10 +351,10 @@ class DeployBuildEffects implements BuildEffects { this.logger = effects.logger; this.output = effects.output; } - async copyFile(sourcePath: string, outputPath: string) { + async copyFile(sourcePath: FilePath, outputPath: FilePath) { this.logger.log(outputPath); try { - await this.apiClient.postDeployFile(this.deployId, sourcePath, outputPath); + await this.apiClient.postDeployFile(this.deployId, sourcePath, filePathToUrlPath(outputPath)); } catch (error) { if (isApiError(error) && error.details.errors.some((e) => e.code === "FILE_QUOTA_EXCEEDED")) { throw new CliError("You have reached the total file size limit.", {cause: error}); @@ -364,10 +367,10 @@ class DeployBuildEffects implements BuildEffects { throw error; } } - async writeFile(outputPath: string, content: Buffer | string) { + async writeFile(outputPath: FilePath, content: Buffer | string) { this.logger.log(outputPath); try { - await this.apiClient.postDeployFileContents(this.deployId, content, outputPath); + await this.apiClient.postDeployFileContents(this.deployId, content, filePathToUrlPath(outputPath)); } catch (error) { if (isApiError(error) && error.details.errors.some((e) => e.code === "FILE_QUOTA_EXCEEDED")) { throw new CliError("You have reached the total file size limit.", {cause: error}); @@ -375,6 +378,14 @@ class DeployBuildEffects implements BuildEffects { throw error; } } + existsSync(path: FilePath) { + return existsSync(path); + } + readFile(path: FilePath): Promise; + readFile(path: FilePath, encoding: BufferEncoding): Promise; + readFile(path: FilePath, encoding?: BufferEncoding): Promise { + return encoding ? readFile(path, encoding) : readFile(path); + } } // export for testing diff --git a/src/fileWatchers.ts b/src/fileWatchers.ts index 96fd1a564..fb13263fe 100644 --- a/src/fileWatchers.ts +++ b/src/fileWatchers.ts @@ -1,4 +1,6 @@ -import {type FSWatcher, existsSync, watch} from "node:fs"; +import {type FSWatcher} from "node:fs"; +import {existsSync, watch} from "./brandedFs.js"; +import type {FilePath, UrlPath} from "./brandedPath.js"; import {Loader} from "./dataloader.js"; import {isEnoent} from "./error.js"; import {maybeStat} from "./files.js"; @@ -7,7 +9,7 @@ import {resolvePath} from "./url.js"; export class FileWatchers { private readonly watchers: FSWatcher[] = []; - static async of(root: string, path: string, names: string[], callback: (name: string) => void) { + static async of(root: FilePath, path: FilePath, names: UrlPath[], callback: (name: UrlPath) => void) { const that = new FileWatchers(); const {watchers} = that; for (const name of new Set(names)) { diff --git a/src/files.ts b/src/files.ts index 2da58e669..00ca3011c 100644 --- a/src/files.ts +++ b/src/files.ts @@ -1,65 +1,70 @@ -import {type Stats, existsSync} from "node:fs"; -import {mkdir, readdir, stat} from "node:fs/promises"; -import {dirname, extname, join, normalize, relative} from "node:path"; +import {type Stats} from "node:fs"; import {cwd} from "node:process"; import {fileURLToPath} from "node:url"; import mime from "mime"; +import {existsSync, mkdir, readdir, stat} from "./brandedFs.js"; +import {UrlPath, fileSep, urlJoin, urlPathToFilePath} from "./brandedPath.js"; +import {FilePath, fileDirname, fileExtname, fileJoin, fileNormalize, fileRelative, unUrlPath} from "./brandedPath.js"; import {isEnoent} from "./error.js"; import type {FileReference} from "./javascript.js"; import {relativeUrl, resolvePath} from "./url.js"; // A path is local if it doesn’t go outside the the root. -export function getLocalPath(sourcePath: string, name: string): string | null { - if (/^\w+:/.test(name)) return null; // URL +export function getLocalPath(sourcePath: UrlPath, name: UrlPath): FilePath | null { + if (/^\w+:/.test(unUrlPath(name))) return null; // URL if (name.startsWith("#")) return null; // anchor tag - const path = resolvePath(sourcePath, name); - if (path.startsWith("../")) return null; // goes above root + const path = resolvePath(urlPathToFilePath(sourcePath), name); + if (path.startsWith(`..${fileSep}`)) return null; // goes above root return path; } -export function getClientPath(entry: string): string { - const path = relative(cwd(), join(dirname(fileURLToPath(import.meta.url)), "..", entry)); +export function getClientPath(entry: FilePath): FilePath { + const path = fileRelative( + FilePath(cwd()), + fileJoin(fileDirname(FilePath(fileURLToPath(import.meta.url))), "..", entry) + ); + // TODO this should use the effect version of existsSync to be more type safe if (path.endsWith(".js") && !existsSync(path)) { - const tspath = path.slice(0, -".js".length) + ".ts"; + const tspath = FilePath(path.slice(0, -".js".length) + ".ts"); if (existsSync(tspath)) return tspath; } return path; } -export function fileReference(name: string, sourcePath: string): FileReference { +export function fileReference(name: UrlPath, sourcePath: UrlPath): FileReference { return { - name: relativeUrl(sourcePath, name), - mimeType: mime.getType(name), - path: relativeUrl(sourcePath, join("_file", name)) + name: relativeUrl(sourcePath, UrlPath(name)), + mimeType: mime.getType(unUrlPath(name)), + path: relativeUrl(sourcePath, urlJoin("_file", name)) }; } -export async function* visitMarkdownFiles(root: string): AsyncGenerator { +export async function* visitMarkdownFiles(root: FilePath): AsyncGenerator { for await (const file of visitFiles(root)) { - if (extname(file) !== ".md") continue; + if (fileExtname(file) !== ".md") continue; yield file; } } -export async function* visitFiles(root: string): AsyncGenerator { +export async function* visitFiles(root: FilePath): AsyncGenerator { const visited = new Set(); - const queue: string[] = [(root = normalize(root))]; + const queue: FilePath[] = [(root = fileNormalize(root))]; for (const path of queue) { const status = await stat(path); if (status.isDirectory()) { if (visited.has(status.ino)) continue; // circular symlink visited.add(status.ino); for (const entry of await readdir(path)) { - queue.push(join(path, entry)); + queue.push(fileJoin(path, entry)); } } else { - yield relative(root, path); + yield fileRelative(root, path); } } } // Like fs.stat, but returns undefined instead of throwing ENOENT if not found. -export async function maybeStat(path: string): Promise { +export async function maybeStat(path: FilePath): Promise { try { return await stat(path); } catch (error) { @@ -67,8 +72,8 @@ export async function maybeStat(path: string): Promise { } } -export async function prepareOutput(outputPath: string): Promise { - const outputDir = dirname(outputPath); - if (outputDir === ".") return; +export async function prepareOutput(outputPath: FilePath): Promise { + const outputDir = fileDirname(outputPath); + if (outputDir === FilePath(".")) return; await mkdir(outputDir, {recursive: true}); } diff --git a/src/javascript.ts b/src/javascript.ts index 3e4192f70..eace7add0 100644 --- a/src/javascript.ts +++ b/src/javascript.ts @@ -1,5 +1,6 @@ import {Parser, tokTypes} from "acorn"; import type {Expression, Identifier, Node, Options, Program} from "acorn"; +import {type FilePath, type UrlPath, filePathToUrlPath} from "./brandedPath.js"; import {fileReference} from "./files.js"; import {findAssignments} from "./javascript/assignments.js"; import {findAwaits} from "./javascript/awaits.js"; @@ -14,21 +15,21 @@ import {red} from "./tty.js"; export interface FileReference { /** The relative path from the page to the original file (e.g., "./test.txt"). */ - name: string; + name: UrlPath; /** The MIME type, if known; derived from the file extension. */ mimeType: string | null; /** The relative path from the page to the file in _file (e.g., "../_file/sub/test.txt"). */ - path: string; + path: UrlPath; } export interface ImportReference { - name: string; + name: UrlPath; type: "global" | "local"; } export interface Feature { type: "FileAttachment"; - name: string; + name: UrlPath; } export interface BaseTranspile { @@ -51,8 +52,8 @@ export interface Transpile extends BaseTranspile { export interface ParseOptions { id: string; - root: string; - sourcePath: string; + root: FilePath; + sourcePath: FilePath; inline?: boolean; sourceLine?: number; globals?: Set; @@ -65,7 +66,7 @@ export function transpileJavaScript(input: string, options: ParseOptions): Pendi const node = parseJavaScript(input, options); const files = node.features .filter((f) => f.type === "FileAttachment") - .map(({name}) => fileReference(name, sourcePath)); + .map(({name}) => fileReference(name, filePathToUrlPath(sourcePath))); const inputs = Array.from(new Set(node.references.map((r) => r.name))); const implicitDisplay = node.expression && !inputs.includes("display") && !inputs.includes("view"); if (implicitDisplay) inputs.push("display"), (node.async = true); diff --git a/src/javascript/features.ts b/src/javascript/features.ts index 5e969567c..dec24de6f 100644 --- a/src/javascript/features.ts +++ b/src/javascript/features.ts @@ -1,12 +1,13 @@ import type {CallExpression, Identifier, Literal, Node, TemplateLiteral} from "acorn"; import {simple} from "acorn-walk"; +import {type FilePath, UrlPath, filePathToUrlPath} from "../brandedPath.js"; import {getLocalPath} from "../files.js"; import type {Feature} from "../javascript.js"; import {defaultGlobals} from "./globals.js"; import {findReferences} from "./references.js"; import {syntaxError} from "./syntaxError.js"; -export function findFeatures(node: Node, path: string, references: Identifier[], input: string): Feature[] { +export function findFeatures(node: Node, path: FilePath, references: Identifier[], input: string): Feature[] { const featureMap = getFeatureReferenceMap(node); const features: Feature[] = []; @@ -25,7 +26,7 @@ export function findFeatures(node: Node, path: string, references: Identifier[], if (name !== "FileAttachment") return; type = name; } - features.push(getFeature(type, node, path, input)); + features.push(getFeature(type, node, filePathToUrlPath(path), input)); } }); @@ -82,7 +83,7 @@ export function getFeatureReferenceMap(node: Node): Map(urlPaths.map((urlPath) => [urlPath, urlPathToFilePath(urlPath)])); - for (const path of set) { - imports.push({type: "local", name: path}); + for (const [urlPath, filePath] of map.entries()) { + imports.push({type: "local", name: urlPath}); try { - const input = readFileSync(join(root, path), "utf-8"); + const input = readFileSync(fileJoin(root, filePath), "utf-8"); const body = Parser.parse(input, parseOptions); simple( @@ -117,22 +126,23 @@ export function parseLocalImports(root: string, paths: string[]): ImportsAndFeat ExportNamedDeclaration: findImport }, undefined, - path + filePath ); - features.push(...findImportFeatures(body, path, input)); + features.push(...findImportFeatures(body, urlPath, input)); } catch (error) { if (!isEnoent(error) && !(error instanceof SyntaxError)) throw error; } } - function findImport(node: ImportNode | ExportNode, path: string) { + function findImport(node: ImportNode | ExportNode, path: FilePath) { if (isStringLiteral(node.source)) { const value = getStringLiteralValue(node.source); if (isLocalImport(value, path)) { - set.add(resolvePath(path, value)); + const filePath = resolvePath(path, UrlPath(value)); + map.set(filePathToUrlPath(filePath), filePath); } else { - imports.push({name: value, type: "global"}); + imports.push({name: UrlPath(value), type: "global"}); // non-local imports don't need to be traversed } } @@ -141,7 +151,7 @@ export function parseLocalImports(root: string, paths: string[]): ImportsAndFeat return {imports, features}; } -export function findImportFeatures(node: Node, path: string, input: string): Feature[] { +export function findImportFeatures(node: Node, path: UrlPath, input: string): Feature[] { const featureMap = getFeatureReferenceMap(node); const features: Feature[] = []; @@ -156,7 +166,7 @@ export function findImportFeatures(node: Node, path: string, input: string): Fea } /** Rewrites import specifiers and FileAttachment calls in the specified ES module source. */ -export async function rewriteModule(input: string, path: string, resolver: ImportResolver): Promise { +export async function rewriteModule(input: string, path: FilePath, resolver: ImportResolver): Promise { const body = Parser.parse(input, parseOptions); const featureMap = getFeatureReferenceMap(body); const output = new Sourcemap(input); @@ -170,10 +180,12 @@ export async function rewriteModule(input: string, path: string, resolver: Impor CallExpression(node) { const type = featureMap.get(node.callee as Identifier); if (type) { - const feature = getFeature(type, node, path, input); // validate syntax + const feature = getFeature(type, node, filePathToUrlPath(path), input); // validate syntax if (feature.type === "FileAttachment") { const arg = node.arguments[0]; - const result = JSON.stringify(relativeUrl(join("_import", path), feature.name)); + const result = JSON.stringify( + relativeUrl(urlJoin("_import", filePathToUrlPath(path)), UrlPath(feature.name)) + ); output.replaceLeft(arg.start, arg.end, `${result}, import.meta.url`); } } @@ -218,7 +230,7 @@ export function findImportDeclarations(cell: JavaScriptNode): ImportDeclaration[ export async function rewriteImports( output: Sourcemap, cell: JavaScriptNode, - sourcePath: string, + sourcePath: FilePath, resolver: ImportResolver ): Promise { const expressions: ImportExpression[] = []; @@ -264,35 +276,39 @@ export async function rewriteImports( } } -export type ImportResolver = (path: string, specifier: string) => Promise; +export type ImportResolver = (path: FilePath, specifier: string) => Promise; -export function createImportResolver(root: string, base: "." | "_import" = "."): ImportResolver { +export function createImportResolver(root: FilePath, base: "." | "_import" = "."): ImportResolver { return async (path, specifier) => { + const urlPath = filePathToUrlPath(path); return isLocalImport(specifier, path) - ? relativeUrl(path, resolvePath(base, path, resolveImportHash(root, path, specifier))) + ? relativeUrl( + urlPath, + filePathToUrlPath(resolvePath(FilePath(base), path, resolveImportHash(root, urlPath, specifier))) + ) : specifier === "npm:@observablehq/runtime" - ? resolveBuiltin(base, path, "runtime.js") + ? resolveBuiltin(base, urlPath, "runtime.js") : specifier === "npm:@observablehq/stdlib" - ? resolveBuiltin(base, path, "stdlib.js") + ? resolveBuiltin(base, urlPath, "stdlib.js") : specifier === "npm:@observablehq/dot" - ? resolveBuiltin(base, path, "stdlib/dot.js") // TODO publish to npm + ? resolveBuiltin(base, urlPath, "stdlib/dot.js") // TODO publish to npm : specifier === "npm:@observablehq/duckdb" - ? resolveBuiltin(base, path, "stdlib/duckdb.js") // TODO publish to npm + ? resolveBuiltin(base, urlPath, "stdlib/duckdb.js") // TODO publish to npm : specifier === "npm:@observablehq/inputs" - ? resolveBuiltin(base, path, "stdlib/inputs.js") // TODO publish to npm + ? resolveBuiltin(base, urlPath, "stdlib/inputs.js") // TODO publish to npm : specifier === "npm:@observablehq/mermaid" - ? resolveBuiltin(base, path, "stdlib/mermaid.js") // TODO publish to npm + ? resolveBuiltin(base, urlPath, "stdlib/mermaid.js") // TODO publish to npm : specifier === "npm:@observablehq/tex" - ? resolveBuiltin(base, path, "stdlib/tex.js") // TODO publish to npm + ? resolveBuiltin(base, urlPath, "stdlib/tex.js") // TODO publish to npm : specifier === "npm:@observablehq/sqlite" - ? resolveBuiltin(base, path, "stdlib/sqlite.js") // TODO publish to npm + ? resolveBuiltin(base, urlPath, "stdlib/sqlite.js") // TODO publish to npm : specifier === "npm:@observablehq/xlsx" - ? resolveBuiltin(base, path, "stdlib/xlsx.js") // TODO publish to npm + ? resolveBuiltin(base, urlPath, "stdlib/xlsx.js") // TODO publish to npm : specifier === "npm:@observablehq/zip" - ? resolveBuiltin(base, path, "stdlib/zip.js") // TODO publish to npm + ? resolveBuiltin(base, urlPath, "stdlib/zip.js") // TODO publish to npm : specifier.startsWith("npm:") ? await resolveNpmImport(specifier.slice("npm:".length)) - : specifier; + : UrlPath(specifier); }; } @@ -313,14 +329,14 @@ function formatNpmSpecifier({name, range, path}: {name: string; range?: string; // Like import, don’t fetch the same package more than once to ensure // consistency; restart the server if you want to clear the cache. -const fetchCache = new Map>(); +const fetchCache = new Map>(); -async function cachedFetch(href: string): Promise<{headers: Headers; body: any}> { +async function cachedFetch(href: UrlPath): Promise<{headers: Headers; body: any}> { if (!remoteModulePreloadEnabled) throw new Error("remote module preload is not enabled"); let promise = fetchCache.get(href); if (promise) return promise; promise = (async () => { - const response = await fetch(href); + const response = await fetch(unUrlPath(href)); if (!response.ok) throw new Error(`unable to fetch: ${href}`); const json = /^application\/json(;|$)/.test(response.headers.get("content-type")!); const body = await (json ? response.json() : response.text()); @@ -336,25 +352,26 @@ async function resolveNpmVersion({name, range}: {name: string; range?: string}): if (range && /^\d+\.\d+\.\d+([-+].*)?$/.test(range)) return range; // exact version specified const specifier = formatNpmSpecifier({name, range}); const search = range ? `?specifier=${range}` : ""; - const {version} = (await cachedFetch(`https://data.jsdelivr.com/v1/packages/npm/${name}/resolved${search}`)).body; + const {version} = (await cachedFetch(UrlPath(`https://data.jsdelivr.com/v1/packages/npm/${name}/resolved${search}`))) + .body; if (!version) throw new Error(`unable to resolve version: ${specifier}`); return version; } -export async function resolveNpmImport(specifier: string): Promise { +export async function resolveNpmImport(specifier: string): Promise { let {name, range, path = "+esm"} = parseNpmSpecifier(specifier); // eslint-disable-line prefer-const if (name === "@duckdb/duckdb-wasm" && !range) range = "1.28.0"; // https://github.com/duckdb/duckdb-wasm/issues/1561 if (name === "apache-arrow" && !range) range = "13.0.0"; // https://github.com/observablehq/framework/issues/750 if (name === "parquet-wasm" && !range) range = "0.5.0"; // https://github.com/observablehq/framework/issues/733 if (name === "echarts" && !range) range = "5.4.3"; // https://github.com/observablehq/framework/pull/811 try { - return `https://cdn.jsdelivr.net/npm/${name}@${await resolveNpmVersion({name, range})}/${path}`; + return UrlPath(`https://cdn.jsdelivr.net/npm/${name}@${await resolveNpmVersion({name, range})}/${path}`); } catch { - return `https://cdn.jsdelivr.net/npm/${name}${range ? `@${range}` : ""}/${path}`; + return UrlPath(`https://cdn.jsdelivr.net/npm/${name}${range ? `@${range}` : ""}/${path}`); } } -const preloadCache = new Map | undefined>>(); +const preloadCache = new Map | undefined>>(); /** * Fetches the module at the specified URL and returns a promise to any @@ -363,7 +380,7 @@ const preloadCache = new Map | undefined>>(); * are considered, and the fetched module must be have immutable public caching; * dynamic imports may not be used and hence are not preloaded. */ -async function fetchModulePreloads(href: string): Promise | undefined> { +async function fetchModulePreloads(href: UrlPath): Promise | undefined> { let promise = preloadCache.get(href); if (promise) return promise; promise = (async () => { @@ -376,7 +393,7 @@ async function fetchModulePreloads(href: string): Promise | undefine const {headers, body} = response; const cache = headers.get("cache-control")?.split(/\s*,\s*/); if (!cache?.some((c) => c === "immutable") || !cache?.some((c) => c === "public")) return; - const imports = new Set(); + const imports = new Set(); let program: Program; try { program = Parser.parse(body, parseOptions); @@ -392,7 +409,7 @@ async function fetchModulePreloads(href: string): Promise | undefine function findImport(node: ImportNode | ExportNode) { if (isStringLiteral(node.source)) { const value = getStringLiteralValue(node.source); - if (isPathImport(value)) imports.add(String(new URL(value, href))); + if (isPathImport(value)) imports.add(UrlPath(String(new URL(value, unUrlPath(href))))); } } // TODO integrityCache.set(href, `sha384-${createHash("sha384").update(body).digest("base64")}`); @@ -410,10 +427,10 @@ const integrityCache = new Map(); * externally-hosted modules to compute the transitively-imported modules; also * precomputes the subresource integrity hash for each fetched module. */ -export async function resolveModulePreloads(hrefs: Set): Promise { +export async function resolveModulePreloads(hrefs: Set): Promise { if (!remoteModulePreloadEnabled) return; let resolve: () => void; - const visited = new Set(); + const visited = new Set(); const queue = new Set>(); for (const href of hrefs) { @@ -422,7 +439,7 @@ export async function resolveModulePreloads(hrefs: Set): Promise { } } - function enqueue(href: string) { + function enqueue(href: UrlPath) { if (visited.has(href)) return; visited.add(href); const promise = (async () => { @@ -451,16 +468,21 @@ export function resolveModuleIntegrity(href: string): string | undefined { return integrityCache.get(href); } -function resolveBuiltin(base: "." | "_import", path: string, specifier: string): string { - return relativeUrl(join(base === "." ? "_import" : ".", path), join("_observablehq", specifier)); +function resolveBuiltin(base: "." | "_import", path: UrlPath, specifier: string): UrlPath { + return relativeUrl(urlJoin(base === "." ? "_import" : ".", path), urlJoin("_observablehq", specifier)); } /** * Given the specified local import, applies the ?sha query string based on the * content hash of the imported module and its transitively imported modules. */ -function resolveImportHash(root: string, path: string, specifier: string): string { - return `${specifier}?sha=${getModuleHash(root, resolvePath(path, specifier))}`; +function resolveImportHash(root: FilePath, path: UrlPath, specifier: string): UrlPath { + return UrlPath( + `${specifier}?sha=${getModuleHash( + root, + filePathToUrlPath(resolvePath(urlPathToFilePath(path), UrlPath(specifier))) + )}` + ); } /** @@ -468,10 +490,11 @@ function resolveImportHash(root: string, path: string, specifier: string): strin * given source root. This involves parsing the specified module to process * transitive imports. */ -function getModuleHash(root: string, path: string): string { +function getModuleHash(root: FilePath, path: UrlPath): string { const hash = createHash("sha256"); + const filePath = urlPathToFilePath(path); try { - hash.update(readFileSync(join(root, path), "utf-8")); + hash.update(readFileSync(fileJoin(root, filePath), "utf-8")); } catch (error) { if (!isEnoent(error)) throw error; } @@ -480,7 +503,7 @@ function getModuleHash(root: string, path: string): string { for (const i of [...imports, ...features]) { if (i.type === "local" || i.type === "FileAttachment") { try { - hash.update(readFileSync(join(root, i.name), "utf-8")); + hash.update(readFileSync(fileJoin(root, urlPathToFilePath(UrlPath(i.name))), "utf-8")); } catch (error) { if (!isEnoent(error)) throw error; continue; @@ -502,8 +525,8 @@ export function isPathImport(specifier: string): boolean { return ["./", "../", "/"].some((prefix) => specifier.startsWith(prefix)); } -export function isLocalImport(specifier: string, path: string): boolean { - return isPathImport(specifier) && !resolvePath(path, specifier).startsWith("../"); +export function isLocalImport(specifier: string, path: FilePath): boolean { + return isPathImport(specifier) && !resolvePath(path, UrlPath(specifier)).startsWith(`..${fileSep}`); } function isNamespaceSpecifier(node) { diff --git a/src/libraries.ts b/src/libraries.ts index 54da0f3c7..29b007c24 100644 --- a/src/libraries.ts +++ b/src/libraries.ts @@ -1,3 +1,4 @@ +import {UrlPath} from "./brandedPath.js"; import {resolveNpmImport} from "./javascript/imports.js"; export function getImplicitSpecifiers(inputs: Set): Set { @@ -25,12 +26,15 @@ export function addImplicitSpecifiers(specifiers: Set, inputs: Set): Promise> { +export async function getImplicitStylesheets(specifiers: Set): Promise> { return addImplicitStylesheets(new Set(), specifiers); } -export async function addImplicitStylesheets(stylesheets: Set, specifiers: Set): Promise> { - if (specifiers.has("npm:@observablehq/inputs")) stylesheets.add("observablehq:stdlib/inputs.css"); +export async function addImplicitStylesheets( + stylesheets: Set, + specifiers: Set +): Promise> { + if (specifiers.has("npm:@observablehq/inputs")) stylesheets.add(UrlPath("observablehq:stdlib/inputs.css")); if (specifiers.has("npm:katex")) stylesheets.add(await resolveNpmImport("katex/dist/katex.min.css")); if (specifiers.has("npm:leaflet")) stylesheets.add(await resolveNpmImport("leaflet/dist/leaflet.css")); if (specifiers.has("npm:mapbox-gl")) stylesheets.add(await resolveNpmImport("mapbox-gl/dist/mapbox-gl.css")); diff --git a/src/markdown.ts b/src/markdown.ts index 570acb452..6fa453474 100644 --- a/src/markdown.ts +++ b/src/markdown.ts @@ -1,5 +1,4 @@ import {createHash} from "node:crypto"; -import {readFile} from "node:fs/promises"; import {type Patch, type PatchItem, getPatch} from "fast-array-diff"; import equal from "fast-deep-equal"; import matter from "gray-matter"; @@ -10,6 +9,8 @@ import {type RuleCore} from "markdown-it/lib/parser_core.js"; import {type RuleInline} from "markdown-it/lib/parser_inline.js"; import {type RenderRule, type default as Renderer} from "markdown-it/lib/renderer.js"; import MarkdownItAnchor from "markdown-it-anchor"; +import {readFile} from "./brandedFs.js"; +import {type FilePath, UrlPath, filePathToUrlPath, unUrlPath, urlPathToFilePath} from "./brandedPath.js"; import {isEnoent} from "./error.js"; import {fileReference, getLocalPath} from "./files.js"; import {computeHash} from "./hash.js"; @@ -98,7 +99,7 @@ function getLiveSource(content: string, tag: string): string | undefined { : undefined; } -function makeFenceRenderer(root: string, baseRenderer: RenderRule, sourcePath: string): RenderRule { +function makeFenceRenderer(root: FilePath, baseRenderer: RenderRule, sourcePath: FilePath): RenderRule { return (tokens, idx, options, context: ParseContext, self) => { const token = tokens[idx]; const {tag, attributes} = parseInfo(token.info); @@ -262,7 +263,7 @@ const transformPlaceholderCore: RuleCore = (state) => { state.tokens = output; }; -function makePlaceholderRenderer(root: string, sourcePath: string): RenderRule { +function makePlaceholderRenderer(root: FilePath, sourcePath: FilePath): RenderRule { return (tokens, idx, options, context: ParseContext) => { const id = uniqueCodeId(context, tokens[idx].content); const token = tokens[idx]; @@ -296,7 +297,7 @@ function extendPiece(context: ParseContext, extend: Partial) { }; } -function renderIntoPieces(renderer: Renderer, root: string, sourcePath: string): Renderer["render"] { +function renderIntoPieces(renderer: Renderer, root: FilePath, sourcePath: FilePath): Renderer["render"] { return (tokens, options, context: ParseContext) => { const rules = renderer.rules; for (let i = 0, len = tokens.length; i < len; i++) { @@ -334,15 +335,15 @@ const SUPPORTED_PROPERTIES: readonly {query: string; src: "href" | "src" | "srcs {query: "video source[src]", src: "src"} ]); -export function normalizePieceHtml(html: string, sourcePath: string, context: ParseContext): string { +export function normalizePieceHtml(html: string, sourcePath: FilePath, context: ParseContext): string { const {document} = parseHTML(html); // Extracting references to files (such as from linked stylesheets). const filePaths = new Set(); - const resolvePath = (source: string): FileReference | undefined => { - const path = getLocalPath(sourcePath, source); + const resolvePath = (source: UrlPath): FileReference | undefined => { + const path = getLocalPath(filePathToUrlPath(sourcePath), source); if (!path) return; - const file = fileReference(path, sourcePath); + const file = fileReference(filePathToUrlPath(path), filePathToUrlPath(sourcePath)); if (!filePaths.has(file.path)) { filePaths.add(file.path); context.files.push(file); @@ -357,16 +358,16 @@ export function normalizePieceHtml(html: string, sourcePath: string, context: Pa .split(",") .map((p) => { const parts = p.trim().split(/\s+/); - const source = decodeURIComponent(parts[0]); + const source = UrlPath(decodeURIComponent(parts[0])); const file = resolvePath(source); return file ? `${file.path} ${parts.slice(1).join(" ")}`.trim() : parts.join(" "); }) .filter((p) => !!p); if (paths && paths.length > 0) element.setAttribute(src, paths.join(", ")); } else { - const source = decodeURIComponent(element.getAttribute(src)!); + const source = UrlPath(decodeURIComponent(element.getAttribute(src)!)); const file = resolvePath(source); - if (file) element.setAttribute(src, file.path); + if (file) element.setAttribute(src, unUrlPath(file.path)); } } } @@ -413,21 +414,21 @@ async function toParseCells(pieces: RenderPiece[]): Promise { } export interface ParseOptions { - root: string; - path: string; + root: FilePath; + path: UrlPath; } -export async function parseMarkdown(sourcePath: string, {root, path}: ParseOptions): Promise { +export async function parseMarkdown(sourcePath: FilePath, {root, path}: ParseOptions): Promise { const source = await readFile(sourcePath, "utf-8"); const parts = matter(source, {}); const md = MarkdownIt({html: true}); md.use(MarkdownItAnchor, {permalink: MarkdownItAnchor.permalink.headerLink({class: "observablehq-header-anchor"})}); md.inline.ruler.push("placeholder", transformPlaceholderInline); md.core.ruler.before("linkify", "placeholder", transformPlaceholderCore); - md.renderer.rules.placeholder = makePlaceholderRenderer(root, path); - md.renderer.rules.fence = makeFenceRenderer(root, md.renderer.rules.fence!, path); + md.renderer.rules.placeholder = makePlaceholderRenderer(root, urlPathToFilePath(path)); + md.renderer.rules.fence = makeFenceRenderer(root, md.renderer.rules.fence!, urlPathToFilePath(path)); md.renderer.rules.softbreak = makeSoftbreakRenderer(md.renderer.rules.softbreak!); - md.renderer.render = renderIntoPieces(md.renderer, root, path); + md.renderer.render = renderIntoPieces(md.renderer, root, urlPathToFilePath(path)); const context: ParseContext = {files: [], imports: [], pieces: [], startLine: 0, currentLine: 0}; const tokens = md.parse(parts.content, context); const html = md.renderer.render(tokens, md.options, context); // Note: mutates context.pieces, context.files! @@ -439,14 +440,14 @@ export async function parseMarkdown(sourcePath: string, {root, path}: ParseOptio imports: context.imports, pieces: toParsePieces(context.pieces), cells: await toParseCells(context.pieces), - hash: await computeMarkdownHash(source, root, path, context.imports) + hash: await computeMarkdownHash(source, root, urlPathToFilePath(path), context.imports) }; } async function computeMarkdownHash( contents: string, - root: string, - path: string, + root: FilePath, + path: FilePath, imports: ImportReference[] ): Promise { const hash = createHash("sha256").update(contents); diff --git a/src/observableApiClient.ts b/src/observableApiClient.ts index 0ba23f62c..19aff8ef1 100644 --- a/src/observableApiClient.ts +++ b/src/observableApiClient.ts @@ -1,5 +1,6 @@ -import fs from "node:fs/promises"; import packageJson from "../package.json"; +import {readFile} from "./brandedFs.js"; +import {type FilePath, type UrlPath, unUrlPath} from "./brandedPath.js"; import {CliError, HttpError, isApiError} from "./error.js"; import type {ApiKey} from "./observableApiConfig.js"; import {faint, red} from "./tty.js"; @@ -145,18 +146,18 @@ export class ObservableApiClient { return data.id; } - async postDeployFile(deployId: string, filePath: string, relativePath: string): Promise { - const buffer = await fs.readFile(filePath); + async postDeployFile(deployId: string, filePath: FilePath, relativePath: UrlPath): Promise { + const buffer = await readFile(filePath); return await this.postDeployFileContents(deployId, buffer, relativePath); } - async postDeployFileContents(deployId: string, contents: Buffer | string, relativePath: string): Promise { + async postDeployFileContents(deployId: string, contents: Buffer | string, relativePath: UrlPath): Promise { if (typeof contents === "string") contents = Buffer.from(contents); const url = new URL(`/cli/deploy/${deployId}/file`, this._apiOrigin); const body = new FormData(); const blob = new Blob([contents]); body.append("file", blob); - body.append("client_name", relativePath); + body.append("client_name", unUrlPath(relativePath)); await this._fetch(url, {method: "POST", body}); } diff --git a/src/observableApiConfig.ts b/src/observableApiConfig.ts index 54738bd0d..48285a41c 100644 --- a/src/observableApiConfig.ts +++ b/src/observableApiConfig.ts @@ -1,26 +1,26 @@ -import fs from "node:fs/promises"; import os from "node:os"; -import path from "node:path"; +import {mkdir, readFile, writeFile} from "./brandedFs.js"; +import {FilePath, fileDirname, fileJoin, fileResolve} from "./brandedPath.js"; import {CliError, isEnoent} from "./error.js"; export interface ConfigEffects { - readFile: (path: string, encoding: "utf8") => Promise; - writeFile: (path: string, contents: string) => Promise; + readFile: (path: FilePath, encoding: "utf8") => Promise; + writeFile: (path: FilePath, contents: string) => Promise; env: typeof process.env; - cwd: typeof process.cwd; - mkdir: (path: string, options?: {recursive?: boolean}) => Promise; - homedir: typeof os.homedir; + cwd: () => FilePath; + mkdir: (path: FilePath, options?: {recursive?: boolean}) => Promise; + homedir: () => FilePath; } export const defaultEffects: ConfigEffects = { - readFile: (path, encoding) => fs.readFile(path, encoding), - writeFile: fs.writeFile, + readFile: (path, encoding) => readFile(path, encoding), + writeFile, mkdir: async (path, options) => { - await fs.mkdir(path, options); + await mkdir(path, options); }, env: process.env, - cwd: process.cwd, - homedir: os.homedir + cwd: () => FilePath(process.cwd()), + homedir: () => FilePath(os.homedir()) }; const userConfigName = ".observablehq"; @@ -38,7 +38,7 @@ export interface DeployConfig { } export type ApiKey = - | {source: "file"; filePath: string; key: string} + | {source: "file"; filePath: FilePath; key: string} | {source: "env"; envVar: string; key: string} | {source: "test"; key: string} | {source: "login"; key: string}; @@ -66,10 +66,10 @@ export async function setObservableApiKey(info: null | {id: string; key: string} } export async function getDeployConfig( - sourceRoot: string, + sourceRoot: FilePath, effects: ConfigEffects = defaultEffects ): Promise { - const deployConfigPath = path.join(effects.cwd(), sourceRoot, ".observablehq", "deploy.json"); + const deployConfigPath = fileJoin(effects.cwd(), sourceRoot, ".observablehq", "deploy.json"); let config: object | null = null; try { const content = await effects.readFile(deployConfigPath, "utf8"); @@ -89,12 +89,12 @@ export async function getDeployConfig( } export async function setDeployConfig( - sourceRoot: string, + sourceRoot: FilePath, newConfig: DeployConfig, effects: ConfigEffects = defaultEffects ): Promise { - const dir = path.join(effects.cwd(), sourceRoot, ".observablehq"); - const deployConfigPath = path.join(dir, "deploy.json"); + const dir = fileJoin(effects.cwd(), sourceRoot, ".observablehq"); + const deployConfigPath = fileJoin(dir, "deploy.json"); const oldConfig = (await getDeployConfig(sourceRoot)) || {}; const merged = {...oldConfig, ...newConfig}; await effects.mkdir(dir, {recursive: true}); @@ -103,14 +103,14 @@ export async function setDeployConfig( export async function loadUserConfig( effects: ConfigEffects = defaultEffects -): Promise<{configPath: string; config: UserConfig}> { - const homeConfigPath = path.join(effects.homedir(), userConfigName); +): Promise<{configPath: FilePath; config: UserConfig}> { + const homeConfigPath = fileJoin(effects.homedir(), userConfigName); - function* pathsToTry(): Generator { - let cursor = path.resolve(effects.cwd()); + function* pathsToTry(): Generator { + let cursor = fileResolve(effects.cwd()); while (true) { - yield path.join(cursor, userConfigName); - const nextCursor = path.dirname(cursor); + yield fileJoin(cursor, userConfigName); + const nextCursor = fileDirname(cursor); if (nextCursor === cursor) break; cursor = nextCursor; } @@ -138,7 +138,7 @@ export async function loadUserConfig( } async function writeUserConfig( - {configPath, config}: {configPath: string; config: UserConfig}, + {configPath, config}: {configPath: FilePath; config: UserConfig}, effects: ConfigEffects = defaultEffects ): Promise { await effects.writeFile(configPath, JSON.stringify(config, null, 2)); diff --git a/src/pager.ts b/src/pager.ts index 6a82c5860..3b9dc780b 100644 --- a/src/pager.ts +++ b/src/pager.ts @@ -1,3 +1,4 @@ +import {UrlPath} from "./brandedPath.js"; import type {Config, Page} from "./config.js"; export type PageLink = @@ -6,17 +7,20 @@ export type PageLink = | {prev: Page; next: undefined}; // last page // Pager links in the footer are computed once for a given navigation. -const linkCache = new WeakMap>(); +const linkCache = new WeakMap>(); -export function normalizePath(path: string): string { +/** + * Strip URL query string and hash fragment. + */ +export function normalizePath(path: UrlPath): UrlPath { return path.replace(/[?#].*$/, ""); } -export function findLink(path: string, options: Pick = {pages: []}): PageLink | undefined { +export function findLink(path: UrlPath, options: Pick = {pages: []}): PageLink | undefined { const {pages, title} = options; let links = linkCache.get(pages); if (!links) { - links = new Map(); + links = new Map(); let prev: Page | undefined; for (const page of walk(pages, title)) { const path = normalizePath(page.path); @@ -40,8 +44,8 @@ export function findLink(path: string, options: Pick // Walks the unique pages in the site so as to avoid creating cycles. Implicitly // adds a link at the beginning to the home page (/index). -function* walk(pages: Config["pages"], title = "Home", visited = new Set()): Generator { - if (!visited.has("/index")) yield (visited.add("/index"), {name: title, path: "/index"}); +function* walk(pages: Config["pages"], title = "Home", visited = new Set()): Generator { + if (!visited.has(UrlPath("/index"))) yield (visited.add(UrlPath("/index")), {name: title, path: UrlPath("/index")}); for (const page of pages) { if ("pages" in page) yield* walk(page.pages, title, visited); else if (!visited.has(page.path)) yield (visited.add(page.path), page); diff --git a/src/preview.ts b/src/preview.ts index 93185dedf..e6ac1a9d2 100644 --- a/src/preview.ts +++ b/src/preview.ts @@ -1,16 +1,30 @@ import {createHash} from "node:crypto"; -import {watch} from "node:fs"; import type {FSWatcher, WatchEventType} from "node:fs"; -import {access, constants, readFile, stat} from "node:fs/promises"; import {createServer} from "node:http"; import type {IncomingMessage, RequestListener, Server, ServerResponse} from "node:http"; -import {basename, dirname, extname, join, normalize} from "node:path"; import {fileURLToPath} from "node:url"; import {difference} from "d3-array"; import openBrowser from "open"; import send from "send"; import {type WebSocket, WebSocketServer} from "ws"; import {version} from "../package.json"; +import {access, constants, readFile, stat, watch} from "./brandedFs.js"; +import { + FilePath, + UrlPath, + fileBasename, + fileDirname, + fileExtname, + fileJoin, + filePathToUrlPath, + unFilePath, + unUrlPath, + urlBasename, + urlDirname, + urlJoin, + urlNormalize, + urlPathToFilePath +} from "./brandedPath.js"; import type {Config} from "./config.js"; import {mergeStyle} from "./config.js"; import {Loader} from "./dataloader.js"; @@ -28,7 +42,7 @@ import {Telemetry} from "./telemetry.js"; import {bold, faint, green, link, red} from "./tty.js"; import {relativeUrl} from "./url.js"; -const publicRoot = join(dirname(fileURLToPath(import.meta.url)), "..", "public"); +const publicRoot = fileJoin(fileDirname(FilePath(fileURLToPath(import.meta.url))), "..", "public"); export interface PreviewOptions { config: Config; @@ -91,15 +105,15 @@ export class PreviewServer { if (this._verbose) console.log(faint(req.method!), req.url); try { const url = new URL(req.url!, "http://localhost"); - let pathname = decodeURIComponent(url.pathname); + let pathname = UrlPath(decodeURIComponent(url.pathname)); let match: RegExpExecArray | null; - if (pathname === "/_observablehq/runtime.js") { - const root = join(fileURLToPath(import.meta.resolve("@observablehq/runtime")), "../../"); - send(req, "/dist/runtime.js", {root}).pipe(res); + if (pathname === UrlPath("/_observablehq/runtime.js")) { + const root = fileJoin(fileURLToPath(import.meta.resolve("@observablehq/runtime")), "../../"); + send(req, "/dist/runtime.js", {root: unFilePath(root)}).pipe(res); } else if (pathname.startsWith("/_observablehq/stdlib.js")) { - end(req, res, await rollupClient(getClientPath("./src/client/stdlib.js")), "text/javascript"); + end(req, res, await rollupClient(getClientPath(FilePath("./src/client/stdlib.js"))), "text/javascript"); } else if (pathname.startsWith("/_observablehq/stdlib/")) { - const path = getClientPath("./src/client/" + pathname.slice("/_observablehq/".length)); + const path = getClientPath(FilePath("./src/client/" + pathname.slice("/_observablehq/".length))); if (pathname.endsWith(".js")) { end(req, res, await rollupClient(path), "text/javascript"); } else if (pathname.endsWith(".css")) { @@ -107,19 +121,19 @@ export class PreviewServer { } else { throw new HttpError(`Not found: ${pathname}`, 404); } - } else if (pathname === "/_observablehq/client.js") { - end(req, res, await rollupClient(getClientPath("./src/client/preview.js")), "text/javascript"); - } else if (pathname === "/_observablehq/search.js") { - end(req, res, await rollupClient(getClientPath("./src/client/search.js")), "text/javascript"); - } else if (pathname === "/_observablehq/minisearch.json") { + } else if (pathname === UrlPath("/_observablehq/client.js")) { + end(req, res, await rollupClient(getClientPath(FilePath("./src/client/preview.js"))), "text/javascript"); + } else if (pathname === UrlPath("/_observablehq/search.js")) { + end(req, res, await rollupClient(getClientPath(FilePath("./src/client/search.js"))), "text/javascript"); + } else if (pathname === UrlPath("/_observablehq/minisearch.json")) { end(req, res, await searchIndex(config), "application/json"); - } else if ((match = /^\/_observablehq\/theme-(?[\w-]+(,[\w-]+)*)?\.css$/.exec(pathname))) { + } else if ((match = /^\/_observablehq\/theme-(?[\w-]+(,[\w-]+)*)?\.css$/.exec(unUrlPath(pathname)))) { end(req, res, await bundleStyles({theme: match.groups!.theme?.split(",") ?? []}), "text/css"); } else if (pathname.startsWith("/_observablehq/")) { - send(req, pathname.slice("/_observablehq".length), {root: publicRoot}).pipe(res); + send(req, unUrlPath(pathname).slice("/_observablehq".length), {root: unFilePath(publicRoot)}).pipe(res); } else if (pathname.startsWith("/_import/")) { const path = pathname.slice("/_import".length); - const filepath = join(root, path); + const filepath = fileJoin(root, urlPathToFilePath(path)); try { if (pathname.endsWith(".css")) { await access(filepath, constants.R_OK); @@ -127,7 +141,7 @@ export class PreviewServer { return; } else if (pathname.endsWith(".js")) { const input = await readFile(filepath, "utf-8"); - const output = await rewriteModule(input, path, createImportResolver(root)); + const output = await rewriteModule(input, filepath, createImportResolver(root)); end(req, res, output, "text/javascript"); return; } @@ -136,11 +150,11 @@ export class PreviewServer { } throw new HttpError(`Not found: ${pathname}`, 404); } else if (pathname.startsWith("/_file/")) { - const path = pathname.slice("/_file".length); - const filepath = join(root, path); + const path = urlPathToFilePath(pathname.slice("/_file".length)); + const filepath = fileJoin(root, path); try { await access(filepath, constants.R_OK); - send(req, pathname.slice("/_file".length), {root}).pipe(res); + send(req, unUrlPath(pathname.slice("/_file".length)), {root: unFilePath(root)}).pipe(res); return; } catch (error) { if (!isEnoent(error)) throw error; @@ -150,7 +164,7 @@ export class PreviewServer { const loader = Loader.find(root, path); if (loader) { try { - send(req, await loader.load(), {root}).pipe(res); + send(req, unFilePath(await loader.load()), {root: unFilePath(root)}).pipe(res); return; } catch (error) { if (!isEnoent(error)) throw error; @@ -158,22 +172,22 @@ export class PreviewServer { } throw new HttpError(`Not found: ${pathname}`, 404); } else { - if ((pathname = normalize(pathname)).startsWith("..")) throw new Error("Invalid path: " + pathname); - let path = join(root, pathname); + if ((pathname = urlNormalize(pathname)).startsWith("..")) throw new Error("Invalid path: " + pathname); + let path = fileJoin(root, urlPathToFilePath(pathname)); // If this path is for /index, redirect to the parent directory for a // tidy path. (This must be done before implicitly adding /index below!) // Respect precedence of dir/index.md over dir.md in choosing between // dir/ and dir! - if (basename(path, ".html") === "index") { + if (fileBasename(path, ".html") === "index") { try { - await stat(join(dirname(path), "index.md")); - res.writeHead(302, {Location: join(dirname(pathname), "/") + url.search}); + await stat(fileJoin(fileDirname(path), "index.md")); + res.writeHead(302, {Location: urlJoin(urlDirname(pathname), "/") + url.search}); res.end(); return; } catch (error) { if (!isEnoent(error)) throw error; - res.writeHead(302, {Location: dirname(pathname) + url.search}); + res.writeHead(302, {Location: urlDirname(pathname) + url.search}); res.end(); return; } @@ -182,14 +196,14 @@ export class PreviewServer { // If this path resolves to a directory, then add an implicit /index to // the end of the path, assuming that the corresponding index.md exists. try { - if ((await stat(path)).isDirectory() && (await stat(join(path, "index.md"))).isFile()) { + if ((await stat(path)).isDirectory() && (await stat(fileJoin(path, "index.md"))).isFile()) { if (!pathname.endsWith("/")) { res.writeHead(302, {Location: pathname + "/" + url.search}); res.end(); return; } - pathname = join(pathname, "index"); - path = join(path, "index"); + pathname = urlJoin(pathname, "index"); + path = fileJoin(path, "index"); } } catch (error) { if (!isEnoent(error)) throw error; // internal error @@ -197,8 +211,8 @@ export class PreviewServer { // If this path ends with .html, then redirect to drop the .html. TODO: // Check for the existence of the .md file first. - if (extname(path) === ".html") { - res.writeHead(302, {Location: join(dirname(pathname), basename(pathname, ".html")) + url.search}); + if (fileExtname(path) === ".html") { + res.writeHead(302, {Location: urlJoin(urlDirname(pathname), urlBasename(pathname, ".html")) + url.search}); res.end(); return; } @@ -206,7 +220,7 @@ export class PreviewServer { // Otherwise, serve the corresponding Markdown file, if it exists. // Anything else should 404; static files should be matched above. try { - const {html} = await renderPreview(path + ".md", {path: pathname, ...config}); + const {html} = await renderPreview(FilePath(path + ".md"), {path: pathname, ...config}); end(req, res, html, "text/html"); } catch (error) { if (!isEnoent(error)) throw error; // internal error @@ -222,7 +236,7 @@ export class PreviewServer { } if (req.method === "GET" && res.statusCode === 404) { try { - const {html} = await renderPreview(join(root, "404.md"), {path: "/404", ...config}); + const {html} = await renderPreview(fileJoin(root, "404.md"), {path: UrlPath("/404"), ...config}); end(req, res, html, "text/html"); return; } catch { @@ -265,48 +279,50 @@ function end(req: IncomingMessage, res: ServerResponse, content: string, type: s } } -function getWatchPaths(parseResult: ParseResult): string[] { - const paths: string[] = []; +function getWatchPaths(parseResult: ParseResult): UrlPath[] { + const paths: UrlPath[] = []; const {files, imports} = parseResult; for (const f of files) paths.push(f.name); for (const i of imports) paths.push(i.name); return paths; } -export function getPreviewStylesheet(path: string, data: ParseResult["data"], style: Config["style"]): string | null { +export function getPreviewStylesheet(path: UrlPath, data: ParseResult["data"], style: Config["style"]): UrlPath | null { + const filePath = urlPathToFilePath(path); try { - style = mergeStyle(path, data?.style, data?.theme, style); + style = mergeStyle(filePath, data?.style, data?.theme, style); } catch (error) { console.error(red(String(error))); - return relativeUrl(path, "/_observablehq/theme-.css"); + return relativeUrl(path, UrlPath("/_observablehq/theme-.css")); } return !style ? null : "path" in style - ? relativeUrl(path, `/_import/${style.path}`) - : relativeUrl(path, `/_observablehq/theme-${style.theme.join(",")}.css`); + ? relativeUrl(path, UrlPath(`/_import/${style.path}`)) + : relativeUrl(path, UrlPath(`/_observablehq/theme-${style.theme.join(",")}.css`)); } function handleWatch(socket: WebSocket, req: IncomingMessage, {root, style: defaultStyle}: Config) { - let path: string | null = null; + let path: FilePath | null = null; let current: ParseResult | null = null; - let stylesheets: Set | null = null; + let stylesheets: Set | null = null; let markdownWatcher: FSWatcher | null = null; let attachmentWatcher: FileWatchers | null = null; let emptyTimeout: ReturnType | null = null; console.log(faint("socket open"), req.url); - async function getStylesheets({cells, data}: ParseResult): Promise> { + async function getStylesheets({cells, data}: ParseResult): Promise> { const inputs = new Set(); + const urlPath = filePathToUrlPath(path!); for (const cell of cells) cell.inputs?.forEach(inputs.add, inputs); const stylesheets = await getImplicitStylesheets(getImplicitSpecifiers(inputs)); - const style = getPreviewStylesheet(path!, data, defaultStyle); + const style = getPreviewStylesheet(urlPath, data, defaultStyle); if (style) stylesheets.add(style); - return new Set(Array.from(stylesheets, (href) => resolveStylesheet(path!, href))); + return new Set(Array.from(stylesheets, (href) => resolveStylesheet(urlPath, href))); } - function refreshAttachment(name: string) { + function refreshAttachment(name: UrlPath) { const {cells} = current!; if (cells.some((cell) => cell.imports?.some((i) => i.name === name))) { watcher("change"); // trigger re-compilation of JavaScript to get new import hashes @@ -324,7 +340,7 @@ function handleWatch(socket: WebSocket, req: IncomingMessage, {root, style: defa case "rename": { markdownWatcher?.close(); try { - markdownWatcher = watch(join(root, path), (event) => watcher(event)); + markdownWatcher = watch(fileJoin(root, path), (event) => watcher(event)); } catch (error) { if (!isEnoent(error)) throw error; console.error(`file no longer exists: ${path}`); @@ -335,7 +351,7 @@ function handleWatch(socket: WebSocket, req: IncomingMessage, {root, style: defa break; } case "change": { - const updated = await parseMarkdown(join(root, path), {root, path}); + const updated = await parseMarkdown(fileJoin(root, path), {root, path: filePathToUrlPath(path)}); // delay to avoid a possibly-empty file if (!force && updated.html === "") { if (!emptyTimeout) { @@ -364,17 +380,18 @@ function handleWatch(socket: WebSocket, req: IncomingMessage, {root, style: defa } } - async function hello({path: initialPath, hash: initialHash}: {path: string; hash: string}): Promise { - if (markdownWatcher || attachmentWatcher) throw new Error("already watching"); - path = initialPath; - if (!(path = normalize(path)).startsWith("/")) throw new Error("Invalid path: " + initialPath); - if (path.endsWith("/")) path += "index"; - path += ".md"; - current = await parseMarkdown(join(root, path), {root, path}); + async function hello({path: initialPath, hash: initialHash}: {path: UrlPath; hash: string}): Promise { + let urlPath = urlNormalize(initialPath); + if (!urlPath?.startsWith("/")) throw new Error(`Invalid path: ${initialPath}`); + if (urlPath.endsWith("/")) urlPath = UrlPath(urlPath + "index"); + urlPath = UrlPath(urlPath + ".md"); + path = urlPathToFilePath(urlPath); + + current = await parseMarkdown(fileJoin(root, path), {root, path: filePathToUrlPath(path)}); if (current.hash !== initialHash) return void send({type: "reload"}); stylesheets = await getStylesheets(current); attachmentWatcher = await FileWatchers.of(root, path, getWatchPaths(current), refreshAttachment); - markdownWatcher = watch(join(root, path), (event) => watcher(event)); + markdownWatcher = watch(fileJoin(root, path), (event) => watcher(event)); } socket.on("message", async (data) => { diff --git a/src/render.ts b/src/render.ts index 02e112a02..66881ca32 100644 --- a/src/render.ts +++ b/src/render.ts @@ -1,4 +1,5 @@ import {parseHTML} from "linkedom"; +import {FilePath, UrlPath, unUrlPath, urlPathToFilePath} from "./brandedPath.js"; import type {Config, Page, Script, Section} from "./config.js"; import {mergeToc} from "./config.js"; import {getClientPath} from "./files.js"; @@ -21,11 +22,11 @@ export interface Render { } export interface RenderOptions extends Config { - root: string; - path: string; + root: FilePath; + path: UrlPath; } -export async function renderPreview(sourcePath: string, options: RenderOptions): Promise { +export async function renderPreview(sourcePath: FilePath, options: RenderOptions): Promise { const parseResult = await parseMarkdown(sourcePath, options); return { html: await render(parseResult, {...options, preview: true}), @@ -35,7 +36,7 @@ export async function renderPreview(sourcePath: string, options: RenderOptions): }; } -export async function renderServerless(sourcePath: string, options: RenderOptions): Promise { +export async function renderServerless(sourcePath: FilePath, options: RenderOptions): Promise { const parseResult = await parseMarkdown(sourcePath, options); return { html: await render(parseResult, options), @@ -62,7 +63,7 @@ async function render(parseResult: ParseResult, options: RenderOptions & RenderI const sidebar = parseResult.data?.sidebar !== undefined ? Boolean(parseResult.data.sidebar) : options.sidebar; const toc = mergeToc(parseResult.data?.toc, options.toc); return String(html` -${path === "/404" ? html`\n` : ""} +${path === UrlPath("/404") ? html`\n` : ""} ${ parseResult.title || title @@ -71,7 +72,7 @@ ${ .join(" | ")}\n` : "" }${await renderHead(parseResult, options, path, createImportResolver(root, "_import"))}${ - path === "/404" + path === UrlPath("/404") ? html.unsafe(`\n` : "" } @@ -142,7 +143,7 @@ async function renderSidebar(title = "Home", pages: (Page | Section)[], path: st `; } @@ -177,34 +178,34 @@ function renderToc(headers: Header[], label: string): Html { `; } -function renderListItem(p: Page, path: string): Html { +function renderListItem(p: Page, path: UrlPath): Html { return html`\n
  • ${p.name}
  • `; } -function prettyPath(path: string): string { +function prettyPath(path: UrlPath): UrlPath { return path.replace(/\/index$/, "/") || "/"; } async function renderHead( parseResult: ParseResult, options: Pick, - path: string, + path: UrlPath, resolver: ImportResolver ): Promise { const scripts = options.scripts; const head = parseResult.data?.head !== undefined ? parseResult.data.head : options.head; - const stylesheets = new Set(["https://fonts.googleapis.com/css2?family=Source+Serif+Pro:ital,wght@0,400;0,600;0,700;1,400;1,600;1,700&display=swap"]); // prettier-ignore + const stylesheets = new Set([UrlPath("https://fonts.googleapis.com/css2?family=Source+Serif+Pro:ital,wght@0,400;0,600;0,700;1,400;1,600;1,700&display=swap")]); // prettier-ignore const style = getPreviewStylesheet(path, parseResult.data, options.style); if (style) stylesheets.add(style); const specifiers = new Set(["npm:@observablehq/runtime", "npm:@observablehq/stdlib"]); - for (const {name} of parseResult.imports) specifiers.add(name); + for (const {name} of parseResult.imports) specifiers.add(unUrlPath(name)); const inputs = new Set(parseResult.cells.flatMap((cell) => cell.inputs ?? [])); addImplicitSpecifiers(specifiers, inputs); await addImplicitStylesheets(stylesheets, specifiers); - const preloads = new Set([relativeUrl(path, "/_observablehq/client.js")]); - for (const specifier of specifiers) preloads.add(await resolver(path, specifier)); + const preloads = new Set([relativeUrl(path, UrlPath("/_observablehq/client.js"))]); + for (const specifier of specifiers) preloads.add(await resolver(urlPathToFilePath(path), specifier)); await resolveModulePreloads(preloads); return html`${ Array.from(stylesheets) @@ -221,28 +222,30 @@ async function renderHead( }${head ? html`\n${html.unsafe(head)}` : null}${html.unsafe(scripts.map((s) => renderScript(s, path)).join(""))}`; } -export function resolveStylesheet(path: string, href: string): string { +export function resolveStylesheet(path: UrlPath, href: UrlPath): UrlPath { return href.startsWith("observablehq:") - ? relativeUrl(path, `/_observablehq/${href.slice("observablehq:".length)}`) + ? relativeUrl(path, UrlPath(`/_observablehq/${href.slice("observablehq:".length)}`)) : href; } -function renderScript(script: Script, path: string): Html { +function renderScript(script: Script, path: UrlPath): Html { return html`\n`; } -function renderStylesheet(href: string): Html { - return html`\n`; +function renderStylesheet(href: UrlPath): Html { + return html`\n`; } -function renderStylesheetPreload(href: string): Html { - return html`\n`; +function renderStylesheetPreload(href: UrlPath): Html { + return html`\n`; } -function renderModulePreload(href: string): Html { - const integrity: string | undefined = resolveModuleIntegrity(href); +function renderModulePreload(href: UrlPath): Html { + const integrity: string | undefined = resolveModuleIntegrity(unUrlPath(href)); return html`\n`; } @@ -252,7 +255,7 @@ function renderHeader({header}: Pick, data: ParseResult["data" } function renderFooter( - path: string, + path: UrlPath, options: Pick, data: ParseResult["data"] ): Html | null { @@ -267,10 +270,10 @@ function renderFooter( : null; } -function renderPager(path: string, {prev, next}: PageLink): Html { +function renderPager(path: UrlPath, {prev, next}: PageLink): Html { return html`\n`; } -function renderRel(path: string, page: Page, rel: "prev" | "next"): Html { +function renderRel(path: UrlPath, page: Page, rel: "prev" | "next"): Html { return html`${page.name}`; } diff --git a/src/rollup.ts b/src/rollup.ts index 547a1126f..ef41af001 100644 --- a/src/rollup.ts +++ b/src/rollup.ts @@ -5,6 +5,7 @@ import {build} from "esbuild"; import type {AstNode, OutputChunk, Plugin, ResolveIdResult} from "rollup"; import {rollup} from "rollup"; import esbuild from "rollup-plugin-esbuild"; +import {FilePath, UrlPath, filePathToUrlPath, unFilePath, unUrlPath} from "./brandedPath.js"; import {getClientPath} from "./files.js"; import {getStringLiteralValue, isStringLiteral} from "./javascript/features.js"; import {isPathImport, resolveNpmImport} from "./javascript/imports.js"; @@ -13,8 +14,8 @@ import {Sourcemap} from "./sourcemap.js"; import {THEMES, renderTheme} from "./theme.js"; import {relativeUrl} from "./url.js"; -const STYLE_MODULES = { - "observablehq:default.css": getClientPath("./src/style/default.css"), +const STYLE_MODULES: Record = { + "observablehq:default.css": getClientPath(FilePath("./src/style/default.css")), ...Object.fromEntries(THEMES.map(({name, path}) => [`observablehq:theme-${name}.css`, path])) }; @@ -25,20 +26,21 @@ function rewriteInputsNamespace(code: string) { return code.replace(/\b__ns__\b/g, "inputs-3a86ea"); } -export async function bundleStyles({path, theme}: {path?: string; theme?: string[]}): Promise { +export async function bundleStyles({path, theme}: {path?: FilePath; theme?: string[]}): Promise { + const compatiblePath: string | undefined = path ? unFilePath(path) : undefined; const result = await build({ bundle: true, - ...(path ? {entryPoints: [path]} : {stdin: {contents: renderTheme(theme!), loader: "css"}}), + ...(compatiblePath ? {entryPoints: [compatiblePath]} : {stdin: {contents: renderTheme(theme!), loader: "css"}}), write: false, - alias: STYLE_MODULES + alias: STYLE_MODULES as unknown as Record }); const text = result.outputFiles[0].text; return rewriteInputsNamespace(text); // TODO only for inputs } -export async function rollupClient(clientPath: string, {minify = false} = {}): Promise { +export async function rollupClient(clientPath: FilePath, {minify = false} = {}): Promise { const bundle = await rollup({ - input: clientPath, + input: unFilePath(clientPath), external: [/^https:/], plugins: [ nodeResolve({resolveOnly: BUNDLED_MODULES}), @@ -76,44 +78,77 @@ function rewriteTypeScriptImports(code: string): string { return code.replace(/(?<=\bimport\(([`'"])[\w./]+)\.ts(?=\1\))/g, ".js"); } -function importResolve(clientPath: string): Plugin { +function importResolve(clientPath: FilePath): Plugin { + const urlClientPath = filePathToUrlPath(clientPath); return { name: "resolve-import", - resolveId: (specifier) => resolveImport(clientPath, specifier), - resolveDynamicImport: (specifier) => resolveImport(clientPath, specifier) + resolveId: (specifier) => resolveImport(urlClientPath, specifier), + resolveDynamicImport: (specifier) => resolveImport(urlClientPath, specifier) }; } // TODO Consolidate with createImportResolver. -async function resolveImport(source: string, specifier: string | AstNode): Promise { +async function resolveImport(source: UrlPath, specifier: string | AstNode): Promise { return typeof specifier !== "string" ? null : specifier.startsWith("observablehq:") - ? {id: relativeUrl(source, getClientPath(`./src/client/${specifier.slice("observablehq:".length)}.js`)), external: true} // prettier-ignore + ? {id: unUrlPath(relativeUrl(source, filePathToUrlPath(getClientPath(FilePath(`./src/client/${specifier.slice("observablehq:".length)}.js`))))), external: true} // prettier-ignore : specifier === "npm:@observablehq/runtime" - ? {id: relativeUrl(source, getClientPath("./src/client/runtime.js")), external: true} + ? { + id: unUrlPath(relativeUrl(source, filePathToUrlPath(getClientPath(FilePath("./src/client/runtime.js"))))), + external: true + } : specifier === "npm:@observablehq/stdlib" - ? {id: relativeUrl(source, getClientPath("./src/client/stdlib.js")), external: true} + ? { + id: unUrlPath(relativeUrl(source, filePathToUrlPath(getClientPath(FilePath("./src/client/stdlib.js"))))), + external: true + } : specifier === "npm:@observablehq/dot" - ? {id: relativeUrl(source, getClientPath("./src/client/stdlib/dot.js")), external: true} // TODO publish to npm + ? { + id: unUrlPath(relativeUrl(source, filePathToUrlPath(getClientPath(FilePath("./src/client/stdlib/dot.js"))))), + external: true + } // TODO publish to npm : specifier === "npm:@observablehq/duckdb" - ? {id: relativeUrl(source, getClientPath("./src/client/stdlib/duckdb.js")), external: true} // TODO publish to npm + ? { + id: unUrlPath(relativeUrl(source, filePathToUrlPath(getClientPath(FilePath("./src/client/stdlib/duckdb.js"))))), + external: true + } // TODO publish to npm : specifier === "npm:@observablehq/inputs" - ? {id: relativeUrl(source, getClientPath("./src/client/stdlib/inputs.js")), external: true} // TODO publish to npm + ? { + id: unUrlPath(relativeUrl(source, filePathToUrlPath(getClientPath(FilePath("./src/client/stdlib/inputs.js"))))), + external: true + } // TODO publish to npm : specifier === "npm:@observablehq/mermaid" - ? {id: relativeUrl(source, getClientPath("./src/client/stdlib/mermaid.js")), external: true} // TODO publish to npm + ? { + id: unUrlPath( + relativeUrl(source, filePathToUrlPath(getClientPath(FilePath("./src/client/stdlib/mermaid.js")))) + ), + external: true + } // TODO publish to npm : specifier === "npm:@observablehq/tex" - ? {id: relativeUrl(source, getClientPath("./src/client/stdlib/tex.js")), external: true} // TODO publish to npm + ? { + id: unUrlPath(relativeUrl(source, filePathToUrlPath(getClientPath(FilePath("./src/client/stdlib/tex.js"))))), + external: true + } // TODO publish to npm : specifier === "npm:@observablehq/sqlite" - ? {id: relativeUrl(source, getClientPath("./src/client/stdlib/sqlite.js")), external: true} // TODO publish to npm + ? { + id: unUrlPath(relativeUrl(source, filePathToUrlPath(getClientPath(FilePath("./src/client/stdlib/sqlite.js"))))), + external: true + } // TODO publish to npm : specifier === "npm:@observablehq/xlsx" - ? {id: relativeUrl(source, getClientPath("./src/client/stdlib/xlsx.js")), external: true} // TODO publish to npm + ? { + id: unUrlPath(relativeUrl(source, filePathToUrlPath(getClientPath(FilePath("./src/client/stdlib/xlsx.js"))))), + external: true + } // TODO publish to npm : specifier === "npm:@observablehq/zip" - ? {id: relativeUrl(source, getClientPath("./src/client/stdlib/zip.js")), external: true} // TODO publish to npm + ? { + id: unUrlPath(relativeUrl(source, filePathToUrlPath(getClientPath(FilePath("./src/client/stdlib/zip.js"))))), + external: true + } // TODO publish to npm : specifier.startsWith("npm:") - ? {id: await resolveNpmImport(specifier.slice("npm:".length))} - : source !== specifier && !isPathImport(specifier) && !BUNDLED_MODULES.includes(specifier) - ? {id: await resolveNpmImport(specifier), external: true} + ? {id: unUrlPath(await resolveNpmImport(specifier.slice("npm:".length)))} + : source !== UrlPath(specifier) && !isPathImport(specifier) && !BUNDLED_MODULES.includes(specifier) + ? {id: unUrlPath(await resolveNpmImport(specifier)), external: true} : null; } diff --git a/src/search.ts b/src/search.ts index e833fe9fb..4f91bd993 100644 --- a/src/search.ts +++ b/src/search.ts @@ -1,6 +1,6 @@ -import {basename, join} from "node:path"; import he from "he"; import MiniSearch from "minisearch"; +import {UrlPath, fileBasename, fileJoin, filePathToUrlPath} from "./brandedPath.js"; import type {Config} from "./config.js"; import {visitMarkdownFiles} from "./files.js"; import type {Logger} from "./logger.js"; @@ -31,7 +31,7 @@ export async function searchIndex(config: Config, effects = defaultEffects): Pro if (indexCache.has(config) && indexCache.get(config).freshUntil > +new Date()) return indexCache.get(config).json; // Get all the listed pages (which are indexed by default) - const pagePaths = new Set(["/index"]); + const pagePaths = new Set([UrlPath("/index")]); for (const p of pages) { if ("path" in p) pagePaths.add(p.path); else for (const {path} of p.pages) pagePaths.add(path); @@ -40,12 +40,12 @@ export async function searchIndex(config: Config, effects = defaultEffects): Pro // Index the pages const index = new MiniSearch(indexOptions); for await (const file of visitMarkdownFiles(root)) { - const path = join(root, file); - const {html, title, data} = await parseMarkdown(path, {root, path: "/" + file.slice(0, -3)}); + const path = fileJoin(root, file); + const {html, title, data} = await parseMarkdown(path, {root, path: UrlPath("/" + file.slice(0, -3))}); // Skip pages that opt-out of indexing, and skip unlisted pages unless // opted-in. We only log the first case. - const listed = pagePaths.has(`/${file.slice(0, -3)}`); + const listed = pagePaths.has(UrlPath(`/${file.slice(0, -3)}`)); const indexed = data?.index === undefined ? listed : Boolean(data.index); if (!indexed) { if (listed) effects.logger.log(`${faint("skip")} ${file}`); @@ -54,7 +54,7 @@ export async function searchIndex(config: Config, effects = defaultEffects): Pro // This is the (top-level) serving path to the indexed page. There’s // implicitly a leading slash here. - const id = file.slice(0, basename(file) === "index.md" ? -"index.md".length : -3); + const id = filePathToUrlPath(file.slice(0, fileBasename(file) === "index.md" ? -"index.md".length : -3)); // eslint-disable-next-line import/no-named-as-default-member const text = he diff --git a/src/telemetry.ts b/src/telemetry.ts index 588cd5cc2..321e1d730 100644 --- a/src/telemetry.ts +++ b/src/telemetry.ts @@ -1,8 +1,8 @@ import {exec} from "node:child_process"; import {createHash, randomUUID} from "node:crypto"; -import {readFile, writeFile} from "node:fs/promises"; -import {join} from "node:path"; import os from "os"; +import {readFile, writeFile} from "./brandedFs.js"; +import {fileJoin} from "./brandedPath.js"; import {CliError} from "./error.js"; import type {Logger} from "./logger.js"; import {getObservableUiOrigin} from "./observableApiClient.js"; @@ -137,7 +137,7 @@ export class Telemetry { private async getPersistentId(name: string, generator = randomUUID) { const {readFile, writeFile} = this.effects; - const file = join(os.homedir(), ".observablehq"); + const file = fileJoin(os.homedir(), ".observablehq"); if (!this._config) { this._config = readFile(file, "utf8") .then(JSON.parse) diff --git a/src/theme.ts b/src/theme.ts index 4ecb0f68e..56fd69c67 100644 --- a/src/theme.ts +++ b/src/theme.ts @@ -1,37 +1,38 @@ +import {FilePath} from "./brandedPath.js"; import {getClientPath} from "./files.js"; export interface Theme { name: string; - path: string; + path: FilePath; light?: boolean; dark?: boolean; index: number; } const LIGHT_THEMES: Omit[] = [ - {name: "air", path: getClientPath("./src/style/theme-air.css"), light: true}, - {name: "cotton", path: getClientPath("./src/style/theme-cotton.css"), light: true}, - {name: "glacier", path: getClientPath("./src/style/theme-glacier.css"), light: true}, - {name: "parchment", path: getClientPath("./src/style/theme-parchment.css"), light: true} + {name: "air", path: getClientPath(FilePath("./src/style/theme-air.css")), light: true}, + {name: "cotton", path: getClientPath(FilePath("./src/style/theme-cotton.css")), light: true}, + {name: "glacier", path: getClientPath(FilePath("./src/style/theme-glacier.css")), light: true}, + {name: "parchment", path: getClientPath(FilePath("./src/style/theme-parchment.css")), light: true} ]; const DARK_THEMES: Omit[] = [ - {name: "coffee", path: getClientPath("./src/style/theme-coffee.css"), dark: true}, - {name: "deep-space", path: getClientPath("./src/style/theme-deep-space.css"), dark: true}, - {name: "ink", path: getClientPath("./src/style/theme-ink.css"), dark: true}, - {name: "midnight", path: getClientPath("./src/style/theme-midnight.css"), dark: true}, - {name: "near-midnight", path: getClientPath("./src/style/theme-near-midnight.css"), dark: true}, - {name: "ocean-floor", path: getClientPath("./src/style/theme-ocean-floor.css"), dark: true}, - {name: "slate", path: getClientPath("./src/style/theme-slate.css"), dark: true}, - {name: "stark", path: getClientPath("./src/style/theme-stark.css"), dark: true}, - {name: "sun-faded", path: getClientPath("./src/style/theme-sun-faded.css"), dark: true} + {name: "coffee", path: getClientPath(FilePath("./src/style/theme-coffee.css")), dark: true}, + {name: "deep-space", path: getClientPath(FilePath("./src/style/theme-deep-space.css")), dark: true}, + {name: "ink", path: getClientPath(FilePath("./src/style/theme-ink.css")), dark: true}, + {name: "midnight", path: getClientPath(FilePath("./src/style/theme-midnight.css")), dark: true}, + {name: "near-midnight", path: getClientPath(FilePath("./src/style/theme-near-midnight.css")), dark: true}, + {name: "ocean-floor", path: getClientPath(FilePath("./src/style/theme-ocean-floor.css")), dark: true}, + {name: "slate", path: getClientPath(FilePath("./src/style/theme-slate.css")), dark: true}, + {name: "stark", path: getClientPath(FilePath("./src/style/theme-stark.css")), dark: true}, + {name: "sun-faded", path: getClientPath(FilePath("./src/style/theme-sun-faded.css")), dark: true} ]; export const THEMES: Theme[] = [ ...LIGHT_THEMES, ...DARK_THEMES, - {name: "alt", path: getClientPath("./src/style/theme-alt.css")}, - {name: "wide", path: getClientPath("./src/style/theme-wide.css")} + {name: "alt", path: getClientPath(FilePath("./src/style/theme-alt.css"))}, + {name: "wide", path: getClientPath(FilePath("./src/style/theme-wide.css"))} ].map((theme, i) => ({ ...theme, index: i diff --git a/src/tty.ts b/src/tty.ts index 1b09aba11..d5e56ef84 100644 --- a/src/tty.ts +++ b/src/tty.ts @@ -14,10 +14,10 @@ export const blue = color(34, 39); export const magenta = color(35, 39); export const cyan = color(36, 39); -export type TtyColor = (text: string) => string; +export type TtyColor = (text: string | {toString(): string}) => string; function color(code: number, reset: number): TtyColor { - return process.stdout.isTTY ? (text: string) => `\x1b[${code}m${text}\x1b[${reset}m` : String; + return process.stdout.isTTY ? (text) => `\x1b[${code}m${text}\x1b[${reset}m` : String; } export interface TtyEffects { diff --git a/src/url.ts b/src/url.ts index cb554555a..3b47d4262 100644 --- a/src/url.ts +++ b/src/url.ts @@ -1,4 +1,4 @@ -import {dirname, join} from "node:path"; +import {FilePath, UrlPath, fileDirname, fileJoin, unUrlPath, urlJoin, urlPathToFilePath} from "./brandedPath.js"; /** * Returns the normalized relative path from "/file/path/to/a" to @@ -6,17 +6,17 @@ import {dirname, join} from "node:path"; * are prefixed with "./", and paths that start without a slash are considered * from the root. */ -export function relativeUrl(source: string, target: string): string { - if (/^\w+:/.test(target)) return target; - const from = join("/", source).split(/[/]+/g).slice(0, -1); - const to = join("/", target).split(/[/]+/g); +export function relativeUrl(source: UrlPath, target: UrlPath): UrlPath { + if (/^\w+:/.test(unUrlPath(target))) return target; + const from = urlJoin("/", source).split(/[/]+/g).slice(0, -1); + const to = urlJoin("/", target).split(/[/]+/g); const f = to.pop()!; const m = from.length; const n = Math.min(m, to.length); let i = 0; while (i < n && from[i] === to[i]) ++i; const k = m - i; - return (k ? "../".repeat(k) : "./") + to.slice(i).concat(f).join("/"); + return UrlPath((k ? "../".repeat(k) : "./") + to.slice(i).concat(f).join("/")); } /** @@ -24,9 +24,18 @@ export function relativeUrl(source: string, target: string): string { * defaults to ".", assuming that the target is a relative path such as an * import or fetch from the specified source. */ -export function resolvePath(source: string, target: string): string; -export function resolvePath(root: string, source: string, target: string): string; -export function resolvePath(root: string, source: string, target?: string): string { - if (target === undefined) (target = source), (source = root), (root = "."); - return join(root, target.startsWith("/") ? "." : dirname(source), target); +export function resolvePath(source: FilePath, target: UrlPath): FilePath; +export function resolvePath(root: FilePath, source: FilePath, target: UrlPath): FilePath; +export function resolvePath(arg1: FilePath, arg2: FilePath | UrlPath, arg3?: UrlPath): FilePath { + let root: FilePath, source: FilePath, target: UrlPath; + if (arg3 === undefined) { + root = FilePath("."); + source = arg1; + target = arg2 as UrlPath; + } else { + root = arg1; + source = arg2 as FilePath; + target = arg3; + } + return fileJoin(root, target.startsWith("/") ? "." : fileDirname(source), urlPathToFilePath(target)); } diff --git a/templates/default/package.json b/templates/default/package.json index eb47bc57b..6b5665504 100644 --- a/templates/default/package.json +++ b/templates/default/package.json @@ -2,8 +2,8 @@ "type": "module", "private": true, "scripts": { - "clean": "rm -rf docs/.observablehq/cache", - "build": "rm -rf dist && observable build", + "clean": "rimraf docs/.observablehq/cache", + "build": "rimraf dist && observable build", "dev": "observable preview", "deploy": "observable deploy", "observable": "observable" @@ -13,6 +13,9 @@ "d3-dsv": "^3.0.1", "d3-time-format": "^4.1.0" }, + "devDependencies": { + "rimraf": "^5.0.5" + }, "engines": { "node": ">=20.6" } diff --git a/templates/empty/package.json b/templates/empty/package.json index eb47bc57b..6b5665504 100644 --- a/templates/empty/package.json +++ b/templates/empty/package.json @@ -2,8 +2,8 @@ "type": "module", "private": true, "scripts": { - "clean": "rm -rf docs/.observablehq/cache", - "build": "rm -rf dist && observable build", + "clean": "rimraf docs/.observablehq/cache", + "build": "rimraf dist && observable build", "dev": "observable preview", "deploy": "observable deploy", "observable": "observable" @@ -13,6 +13,9 @@ "d3-dsv": "^3.0.1", "d3-time-format": "^4.1.0" }, + "devDependencies": { + "rimraf": "^5.0.5" + }, "engines": { "node": ">=20.6" } diff --git a/test/build-test.ts b/test/build-test.ts index a9c59508f..e4152e04d 100644 --- a/test/build-test.ts +++ b/test/build-test.ts @@ -1,8 +1,9 @@ import assert from "node:assert"; -import {existsSync, readdirSync, statSync} from "node:fs"; -import {open, readFile, rm} from "node:fs/promises"; -import {join, normalize, relative} from "node:path"; +import os from "node:os"; import {difference} from "d3-array"; +import {existsSync, open, readFile, readdirSync, rm, statSync} from "../src/brandedFs.js"; +import {fileJoin, fileNormalize, fileRelative, unFilePath} from "../src/brandedPath.js"; +import type {FilePath} from "../src/brandedPath.js"; import {FileBuildEffects, build} from "../src/build.js"; import {readConfig, setCurrentDate} from "../src/config.js"; import {mockJsDelivr} from "./mocks/jsdelivr.js"; @@ -17,17 +18,20 @@ describe("build", async () => { mockJsDelivr(); // Each sub-directory of test/input/build is a test case. - const inputRoot = "test/input/build"; - const outputRoot = "test/output/build"; + const inputRoot = fileJoin("test", "input", "build"); + const outputRoot = fileJoin("test", "output", "build"); for (const name of readdirSync(inputRoot)) { - const path = join(inputRoot, name); + const path = fileJoin(inputRoot, name); if (!statSync(path).isDirectory()) continue; const only = name.startsWith("only."); - const skip = name.startsWith("skip."); + const skip = + name.startsWith("skip.") || + (name.endsWith(".posix") && os.platform() === "win32") || + (name.endsWith(".win32") && os.platform() !== "win32"); const outname = only || skip ? name.slice(5) : name; - (only ? it.only : skip ? it.skip : it)(`${inputRoot}/${name}`, async () => { - const actualDir = join(outputRoot, `${outname}-changed`); - const expectedDir = join(outputRoot, outname); + (only ? it.only : skip ? it.skip : it)(unFilePath(fileJoin(inputRoot, name)), async () => { + const actualDir = fileJoin(outputRoot, `${outname}-changed`); + const expectedDir = fileJoin(outputRoot, outname); const generate = !existsSync(expectedDir) && process.env.CI !== "true"; const outputDir = generate ? expectedDir : actualDir; const addPublic = name.endsWith("-public"); @@ -41,10 +45,10 @@ describe("build", async () => { // files because they change often; replace them with empty files so we // can at least check that the expected files exist. if (addPublic) { - const publicDir = join(outputDir, "_observablehq"); + const publicDir = fileJoin(outputDir, "_observablehq"); for (const file of findFiles(publicDir)) { if (file.endsWith(".json")) continue; // e.g., minisearch.json - await (await open(join(publicDir, file), "w")).close(); + await (await open(fileJoin(publicDir, file), "w")).close(); } } @@ -58,8 +62,8 @@ describe("build", async () => { if (unexpectedFiles.size > 0) assert.fail(`Unexpected output files: ${Array.from(unexpectedFiles).join(", ")}`); for (const path of expectedFiles) { - const actual = await readFile(join(actualDir, path), "utf8"); - const expected = await readFile(join(expectedDir, path), "utf8"); + const actual = (await readFile(fileJoin(actualDir, path), "utf8")).replaceAll("\r\n", "\n"); + const expected = (await readFile(fileJoin(expectedDir, path), "utf8")).replaceAll("\r\n", "\n"); assert.ok(actual === expected, `${path} must match snapshot`); } @@ -68,29 +72,29 @@ describe("build", async () => { } }); -function* findFiles(root: string): Iterable { +function* findFiles(root: FilePath): Iterable { const visited = new Set(); - const queue: string[] = [(root = normalize(root))]; + const queue: FilePath[] = [(root = fileNormalize(root))]; for (const path of queue) { const status = statSync(path); if (status.isDirectory()) { if (visited.has(status.ino)) throw new Error(`Circular directory: ${path}`); visited.add(status.ino); for (const entry of readdirSync(path)) { - if (entry === ".DS_Store") continue; // macOS - queue.push(join(path, entry)); + if (unFilePath(entry) === ".DS_Store") continue; // macOS + queue.push(fileJoin(path, entry)); } } else { - yield relative(root, path); + yield fileRelative(root, path); } } } class TestEffects extends FileBuildEffects { - constructor(outputRoot: string) { + constructor(outputRoot: FilePath) { super(outputRoot, silentEffects); } - async writeFile(outputPath: string, contents: string | Buffer): Promise { + async writeFile(outputPath: FilePath, contents: string | Buffer): Promise { if (typeof contents === "string" && outputPath.endsWith(".html")) { contents = contents.replace(/^(\s* + + + + + +
    +
    +

    Tar

    +
    +
    +
    +
    + +
    diff --git a/test/output/build/archives.win32/zip.html b/test/output/build/archives.win32/zip.html new file mode 100644 index 000000000..70741d696 --- /dev/null +++ b/test/output/build/archives.win32/zip.html @@ -0,0 +1,57 @@ + + + +Zip + + + + + + + + + + + + + + +
    +
    +

    Zip

    +
    +
    +
    + +
    diff --git a/test/pager-test.ts b/test/pager-test.ts index 4c88f7fc9..77da99703 100644 --- a/test/pager-test.ts +++ b/test/pager-test.ts @@ -1,19 +1,26 @@ import assert from "node:assert"; +import {UrlPath} from "../src/brandedPath.js"; import {findLink as pager} from "../src/pager.js"; describe("findLink(path, options)", () => { it("returns the previous and next links for three pages", () => { const config = { pages: [ - {name: "a", path: "/a"}, - {name: "b", path: "/b"}, - {name: "c", path: "/c"} + {name: "a", path: UrlPath("/a")}, + {name: "b", path: UrlPath("/b")}, + {name: "c", path: UrlPath("/c")} ] }; - assert.deepStrictEqual(pager("/index", config), {prev: undefined, next: {name: "a", path: "/a"}}); - assert.deepStrictEqual(pager("/a", config), {prev: {name: "Home", path: "/index"}, next: {name: "b", path: "/b"}}); - assert.deepStrictEqual(pager("/b", config), {prev: {name: "a", path: "/a"}, next: {name: "c", path: "/c"}}); - assert.deepStrictEqual(pager("/c", config), {prev: {name: "b", path: "/b"}, next: undefined}); + assert.deepStrictEqual(pager(UrlPath("/index"), config), {prev: undefined, next: {name: "a", path: "/a"}}); + assert.deepStrictEqual(pager(UrlPath("/a"), config), { + prev: {name: "Home", path: "/index"}, + next: {name: "b", path: "/b"} + }); + assert.deepStrictEqual(pager(UrlPath("/b"), config), { + prev: {name: "a", path: "/a"}, + next: {name: "c", path: "/c"} + }); + assert.deepStrictEqual(pager(UrlPath("/c"), config), {prev: {name: "b", path: "/b"}, next: undefined}); }); it("returns the previous and next links for three pages with sections", () => { const config = { @@ -22,74 +29,95 @@ describe("findLink(path, options)", () => { name: "section", open: true, pages: [ - {name: "a", path: "/a"}, - {name: "b", path: "/b"}, - {name: "c", path: "/c"} + {name: "a", path: UrlPath("/a")}, + {name: "b", path: UrlPath("/b")}, + {name: "c", path: UrlPath("/c")} ] } ] }; - assert.deepStrictEqual(pager("/index", config), {prev: undefined, next: {name: "a", path: "/a"}}); - assert.deepStrictEqual(pager("/a", config), {prev: {name: "Home", path: "/index"}, next: {name: "b", path: "/b"}}); - assert.deepStrictEqual(pager("/b", config), {prev: {name: "a", path: "/a"}, next: {name: "c", path: "/c"}}); - assert.deepStrictEqual(pager("/c", config), {prev: {name: "b", path: "/b"}, next: undefined}); + assert.deepStrictEqual(pager(UrlPath("/index"), config), {prev: undefined, next: {name: "a", path: "/a"}}); + assert.deepStrictEqual(pager(UrlPath("/a"), config), { + prev: {name: "Home", path: "/index"}, + next: {name: "b", path: "/b"} + }); + assert.deepStrictEqual(pager(UrlPath("/b"), config), { + prev: {name: "a", path: "/a"}, + next: {name: "c", path: "/c"} + }); + assert.deepStrictEqual(pager(UrlPath("/c"), config), {prev: {name: "b", path: "/b"}, next: undefined}); }); it("returns the previous and next links for two pages", () => { const config = { pages: [ - {name: "a", path: "/a"}, - {name: "b", path: "/b"} + {name: "a", path: UrlPath("/a")}, + {name: "b", path: UrlPath("/b")} ] }; - assert.deepStrictEqual(pager("/index", config), {prev: undefined, next: {name: "a", path: "/a"}}); - assert.deepStrictEqual(pager("/a", config), {prev: {name: "Home", path: "/index"}, next: {name: "b", path: "/b"}}); - assert.deepStrictEqual(pager("/b", config), {prev: {name: "a", path: "/a"}, next: undefined}); + assert.deepStrictEqual(pager(UrlPath("/index"), config), {prev: undefined, next: {name: "a", path: "/a"}}); + assert.deepStrictEqual(pager(UrlPath("/a"), config), { + prev: {name: "Home", path: "/index"}, + next: {name: "b", path: "/b"} + }); + assert.deepStrictEqual(pager(UrlPath("/b"), config), {prev: {name: "a", path: "/a"}, next: undefined}); }); it("returns the previous and next links for one pages", () => { - const config = {pages: [{name: "a", path: "/a"}]}; - assert.deepStrictEqual(pager("/index", config), {prev: undefined, next: {name: "a", path: "/a"}}); - assert.deepStrictEqual(pager("/a", config), {prev: {name: "Home", path: "/index"}, next: undefined}); + const config = {pages: [{name: "a", path: UrlPath("/a")}]}; + assert.deepStrictEqual(pager(UrlPath("/index"), config), {prev: undefined, next: {name: "a", path: "/a"}}); + assert.deepStrictEqual(pager(UrlPath("/a"), config), {prev: {name: "Home", path: "/index"}, next: undefined}); }); it("returns undefined for zero pages", () => { const config = {pages: []}; - assert.deepStrictEqual(pager("/index", config), undefined); + assert.deepStrictEqual(pager(UrlPath("/index"), config), undefined); }); it("returns undefined for non-referenced pages", () => { const config = { pages: [ - {name: "a", path: "/a"}, - {name: "b", path: "/b"}, - {name: "c", path: "/c"} + {name: "a", path: UrlPath("/a")}, + {name: "b", path: UrlPath("/b")}, + {name: "c", path: UrlPath("/c")} ] }; - assert.deepStrictEqual(pager("/d", config), undefined); + assert.deepStrictEqual(pager(UrlPath("/d"), config), undefined); }); it("avoids cycles when a path is listed multiple times", () => { const config = { pages: [ - {name: "a", path: "/a"}, - {name: "b", path: "/b"}, - {name: "a", path: "/a"}, - {name: "c", path: "/c"} + {name: "a", path: UrlPath("/a")}, + {name: "b", path: UrlPath("/b")}, + {name: "a", path: UrlPath("/a")}, + {name: "c", path: UrlPath("/c")} ] }; - assert.deepStrictEqual(pager("/index", config), {prev: undefined, next: {name: "a", path: "/a"}}); - assert.deepStrictEqual(pager("/a", config), {prev: {name: "Home", path: "/index"}, next: {name: "b", path: "/b"}}); - assert.deepStrictEqual(pager("/b", config), {prev: {name: "a", path: "/a"}, next: {name: "c", path: "/c"}}); - assert.deepStrictEqual(pager("/c", config), {prev: {name: "b", path: "/b"}, next: undefined}); + assert.deepStrictEqual(pager(UrlPath("/index"), config), {prev: undefined, next: {name: "a", path: "/a"}}); + assert.deepStrictEqual(pager(UrlPath("/a"), config), { + prev: {name: "Home", path: "/index"}, + next: {name: "b", path: "/b"} + }); + assert.deepStrictEqual(pager(UrlPath("/b"), config), { + prev: {name: "a", path: "/a"}, + next: {name: "c", path: "/c"} + }); + assert.deepStrictEqual(pager(UrlPath("/c"), config), {prev: {name: "b", path: "/b"}, next: undefined}); }); it("implicitly includes the index page if there is a title", () => { const config = { title: "Test", pages: [ - {name: "a", path: "/a"}, - {name: "b", path: "/b"}, - {name: "c", path: "/c"} + {name: "a", path: UrlPath("/a")}, + {name: "b", path: UrlPath("/b")}, + {name: "c", path: UrlPath("/c")} ] }; - assert.deepStrictEqual(pager("/index", config), {prev: undefined, next: {name: "a", path: "/a"}}); - assert.deepStrictEqual(pager("/a", config), {prev: {name: "Test", path: "/index"}, next: {name: "b", path: "/b"}}); - assert.deepStrictEqual(pager("/b", config), {prev: {name: "a", path: "/a"}, next: {name: "c", path: "/c"}}); - assert.deepStrictEqual(pager("/c", config), {prev: {name: "b", path: "/b"}, next: undefined}); + assert.deepStrictEqual(pager(UrlPath("/index"), config), {prev: undefined, next: {name: "a", path: "/a"}}); + assert.deepStrictEqual(pager(UrlPath("/a"), config), { + prev: {name: "Test", path: "/index"}, + next: {name: "b", path: "/b"} + }); + assert.deepStrictEqual(pager(UrlPath("/b"), config), { + prev: {name: "a", path: "/a"}, + next: {name: "c", path: "/c"} + }); + assert.deepStrictEqual(pager(UrlPath("/c"), config), {prev: {name: "b", path: "/b"}, next: undefined}); }); }); diff --git a/test/preview/preview-test.ts b/test/preview/preview-test.ts index 4812262e8..98363ad48 100644 --- a/test/preview/preview-test.ts +++ b/test/preview/preview-test.ts @@ -1,11 +1,12 @@ import chai, {assert, expect} from "chai"; import chaiHttp from "chai-http"; +import {FilePath} from "../../src/brandedPath.js"; import {normalizeConfig} from "../../src/config.js"; import {preview} from "../../src/preview.js"; import type {PreviewOptions, PreviewServer} from "../../src/preview.js"; import {mockJsDelivr} from "../mocks/jsdelivr.js"; -const testHostRoot = "test/preview/dashboard"; +const testHostRoot = FilePath("test/preview/dashboard"); const testHostName = "127.0.0.1"; const testPort = 3000; diff --git a/test/telemetry-test.ts b/test/telemetry-test.ts index ad33536f2..6d1d778bc 100644 --- a/test/telemetry-test.ts +++ b/test/telemetry-test.ts @@ -1,6 +1,6 @@ import assert from "assert"; -import type {readFile} from "fs/promises"; import {MockAgent, getGlobalDispatcher, setGlobalDispatcher} from "undici"; +import type {readFile} from "../src/brandedFs.js"; import {Telemetry} from "../src/telemetry.js"; import {MockLogger} from "./mocks/logger.js"; diff --git a/test/url-test.ts b/test/url-test.ts index 2f1177430..71164c3bc 100644 --- a/test/url-test.ts +++ b/test/url-test.ts @@ -1,40 +1,41 @@ import assert from "node:assert"; +import {UrlPath} from "../src/brandedPath.js"; import {relativeUrl} from "../src/url.js"; describe("relativeUrls", () => { it("respects absolute links", () => { - assert.strictEqual(relativeUrl("/", "https://whatever"), "https://whatever"); - assert.strictEqual(relativeUrl("/", "http://example.org"), "http://example.org"); - assert.strictEqual(relativeUrl("/", "https://example.org/"), "https://example.org/"); - assert.strictEqual(relativeUrl("/", "mailto:hello@example.org"), "mailto:hello@example.org"); + assert.strictEqual(relativeUrl(UrlPath("/"), UrlPath("https://whatever")), "https://whatever"); + assert.strictEqual(relativeUrl(UrlPath("/"), UrlPath("http://example.org")), "http://example.org"); + assert.strictEqual(relativeUrl(UrlPath("/"), UrlPath("https://example.org/")), "https://example.org/"); + assert.strictEqual(relativeUrl(UrlPath("/"), UrlPath("mailto:hello@example.org")), "mailto:hello@example.org"); }); it("return the expected result", () => { - assert.strictEqual(relativeUrl("/", "/"), "./"); - assert.strictEqual(relativeUrl("/foo", "/"), "./"); - assert.strictEqual(relativeUrl("/foo.html", "/"), "./"); - assert.strictEqual(relativeUrl("/", "/foo"), "./foo"); - assert.strictEqual(relativeUrl("/", "/foo.html"), "./foo.html"); - assert.strictEqual(relativeUrl("/", "/foo/bar/baz"), "./foo/bar/baz"); - assert.strictEqual(relativeUrl("/foo", "/foo"), "./foo"); - assert.strictEqual(relativeUrl("/foo/", "/foo/"), "./"); - assert.strictEqual(relativeUrl("/foo", "/foo/"), "./foo/"); - assert.strictEqual(relativeUrl("/foo/", "/foo"), "../foo"); - assert.strictEqual(relativeUrl("/foo/bar", "/foo/bar"), "./bar"); - assert.strictEqual(relativeUrl("/foo/bar/", "/foo/bar/"), "./"); - assert.strictEqual(relativeUrl("/foo/bar", "/foo/bar/"), "./bar/"); - assert.strictEqual(relativeUrl("/foo/bar/", "/foo/bar"), "../bar"); - assert.strictEqual(relativeUrl("/foo", "/bar"), "./bar"); - assert.strictEqual(relativeUrl("/foo/bar", "/baz"), "../baz"); - assert.strictEqual(relativeUrl("/foo/bar", "/foo"), "../foo"); - assert.strictEqual(relativeUrl("/foo/bar", "/foo.csv"), "../foo.csv"); - assert.strictEqual(relativeUrl("/foo/bar", "/foo/"), "./"); - assert.strictEqual(relativeUrl("/foo/bar", "/baz/bar"), "../baz/bar"); - assert.strictEqual(relativeUrl("foo", "bar"), "./bar"); - assert.strictEqual(relativeUrl("foo/bar", "baz"), "../baz"); - assert.strictEqual(relativeUrl("foo/bar", "foo"), "../foo"); - assert.strictEqual(relativeUrl("foo/bar", "foo.csv"), "../foo.csv"); - assert.strictEqual(relativeUrl("foo/bar", "foo/"), "./"); - assert.strictEqual(relativeUrl("foo/bar", "baz/bar"), "../baz/bar"); - assert.strictEqual(relativeUrl("foo////baz", "baz//bar"), "../baz/bar"); + assert.strictEqual(relativeUrl(UrlPath("/"), UrlPath("/")), "./"); + assert.strictEqual(relativeUrl(UrlPath("/foo"), UrlPath("/")), "./"); + assert.strictEqual(relativeUrl(UrlPath("/foo.html"), UrlPath("/")), "./"); + assert.strictEqual(relativeUrl(UrlPath("/"), UrlPath("/foo")), "./foo"); + assert.strictEqual(relativeUrl(UrlPath("/"), UrlPath("/foo.html")), "./foo.html"); + assert.strictEqual(relativeUrl(UrlPath("/"), UrlPath("/foo/bar/baz")), "./foo/bar/baz"); + assert.strictEqual(relativeUrl(UrlPath("/foo"), UrlPath("/foo")), "./foo"); + assert.strictEqual(relativeUrl(UrlPath("/foo/"), UrlPath("/foo/")), "./"); + assert.strictEqual(relativeUrl(UrlPath("/foo"), UrlPath("/foo/")), "./foo/"); + assert.strictEqual(relativeUrl(UrlPath("/foo/"), UrlPath("/foo")), "../foo"); + assert.strictEqual(relativeUrl(UrlPath("/foo/bar"), UrlPath("/foo/bar")), "./bar"); + assert.strictEqual(relativeUrl(UrlPath("/foo/bar/"), UrlPath("/foo/bar/")), "./"); + assert.strictEqual(relativeUrl(UrlPath("/foo/bar"), UrlPath("/foo/bar/")), "./bar/"); + assert.strictEqual(relativeUrl(UrlPath("/foo/bar/"), UrlPath("/foo/bar")), "../bar"); + assert.strictEqual(relativeUrl(UrlPath("/foo"), UrlPath("/bar")), "./bar"); + assert.strictEqual(relativeUrl(UrlPath("/foo/bar"), UrlPath("/baz")), "../baz"); + assert.strictEqual(relativeUrl(UrlPath("/foo/bar"), UrlPath("/foo")), "../foo"); + assert.strictEqual(relativeUrl(UrlPath("/foo/bar"), UrlPath("/foo.csv")), "../foo.csv"); + assert.strictEqual(relativeUrl(UrlPath("/foo/bar"), UrlPath("/foo/")), "./"); + assert.strictEqual(relativeUrl(UrlPath("/foo/bar"), UrlPath("/baz/bar")), "../baz/bar"); + assert.strictEqual(relativeUrl(UrlPath("foo"), UrlPath("bar")), "./bar"); + assert.strictEqual(relativeUrl(UrlPath("foo/bar"), UrlPath("baz")), "../baz"); + assert.strictEqual(relativeUrl(UrlPath("foo/bar"), UrlPath("foo")), "../foo"); + assert.strictEqual(relativeUrl(UrlPath("foo/bar"), UrlPath("foo.csv")), "../foo.csv"); + assert.strictEqual(relativeUrl(UrlPath("foo/bar"), UrlPath("foo/")), "./"); + assert.strictEqual(relativeUrl(UrlPath("foo/bar"), UrlPath("baz/bar")), "../baz/bar"); + assert.strictEqual(relativeUrl(UrlPath("foo////baz"), UrlPath("baz//bar")), "../baz/bar"); }); }); diff --git a/yarn.lock b/yarn.lock index a25753710..fde520c3a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -310,6 +310,18 @@ resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-2.0.1.tgz#e5211452df060fa8522b55c7b3c0c4d1981cb044" integrity sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw== +"@isaacs/cliui@^8.0.2": + version "8.0.2" + resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" + integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA== + dependencies: + string-width "^5.1.2" + string-width-cjs "npm:string-width@^4.2.0" + strip-ansi "^7.0.1" + strip-ansi-cjs "npm:strip-ansi@^6.0.1" + wrap-ansi "^8.1.0" + wrap-ansi-cjs "npm:wrap-ansi@^7.0.0" + "@istanbuljs/schema@^0.1.2", "@istanbuljs/schema@^0.1.3": version "0.1.3" resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" @@ -386,6 +398,11 @@ d3-dsv "^3.0.1" d3-require "^1.3.0" +"@pkgjs/parseargs@^0.11.0": + version "0.11.0" + resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33" + integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== + "@rollup/plugin-node-resolve@^15.2.3": version "15.2.3" resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-15.2.3.tgz#e5e0b059bd85ca57489492f295ce88c2d4b0daf9" @@ -732,7 +749,7 @@ ansi-styles@^4.0.0, ansi-styles@^4.1.0: dependencies: color-convert "^2.0.1" -ansi-styles@^6.2.1: +ansi-styles@^6.1.0, ansi-styles@^6.2.1: version "6.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== @@ -1092,7 +1109,14 @@ core-util-is@~1.0.0: resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== -cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: +cross-env@^7.0.3: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-env/-/cross-env-7.0.3.tgz#865264b29677dc015ba8418918965dd232fc54cf" + integrity sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw== + dependencies: + cross-spawn "^7.0.1" + +cross-spawn@^7.0.0, cross-spawn@^7.0.1, cross-spawn@^7.0.2, cross-spawn@^7.0.3: version "7.0.3" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== @@ -1306,6 +1330,11 @@ domutils@^3.0.1: domelementtype "^2.3.0" domhandler "^5.0.3" +eastasianwidth@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" + integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== + ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" @@ -1321,6 +1350,11 @@ emoji-regex@^8.0.0: resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== +emoji-regex@^9.2.2: + version "9.2.2" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== + encodeurl@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" @@ -1794,6 +1828,14 @@ foreground-child@^2.0.0: cross-spawn "^7.0.0" signal-exit "^3.0.2" +foreground-child@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-3.1.1.tgz#1d173e776d75d2772fed08efe4a0de1ea1b12d0d" + integrity sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg== + dependencies: + cross-spawn "^7.0.0" + signal-exit "^4.0.1" + form-data@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" @@ -1919,6 +1961,17 @@ glob@7.2.0: once "^1.3.0" path-is-absolute "^1.0.0" +glob@^10.3.7: + version "10.3.10" + resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.10.tgz#0351ebb809fd187fe421ab96af83d3a70715df4b" + integrity sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g== + dependencies: + foreground-child "^3.1.0" + jackspeak "^2.3.5" + minimatch "^9.0.1" + minipass "^5.0.0 || ^6.0.2 || ^7.0.0" + path-scurry "^1.10.1" + glob@^7.1.3, glob@^7.1.4: version "7.2.3" resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" @@ -2397,6 +2450,15 @@ istanbul-reports@^3.1.6: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" +jackspeak@^2.3.5: + version "2.3.6" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-2.3.6.tgz#647ecc472238aee4b06ac0e461acc21a8c505ca8" + integrity sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ== + dependencies: + "@isaacs/cliui" "^8.0.2" + optionalDependencies: + "@pkgjs/parseargs" "^0.11.0" + js-yaml@4.1.0, js-yaml@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" @@ -2528,6 +2590,11 @@ lru-cache@^6.0.0: dependencies: yallist "^4.0.0" +"lru-cache@^9.1.1 || ^10.0.0": + version "10.2.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.2.0.tgz#0bd445ca57363465900f4d1f9bd8db343a4d95c3" + integrity sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q== + make-dir@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-4.0.0.tgz#c3c2307a771277cd9638305f915c29ae741b614e" @@ -2630,11 +2697,23 @@ minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: dependencies: brace-expansion "^1.1.7" +minimatch@^9.0.1: + version "9.0.3" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.3.tgz#a6e00c3de44c3a542bfaae70abfc22420a6da825" + integrity sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg== + dependencies: + brace-expansion "^2.0.1" + minimist@^1.2.0, minimist@^1.2.6: version "1.2.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== +"minipass@^5.0.0 || ^6.0.2 || ^7.0.0": + version "7.0.4" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.0.4.tgz#dbce03740f50a4786ba994c1fb908844d27b038c" + integrity sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ== + minisearch@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/minisearch/-/minisearch-6.3.0.tgz#985a2f1ca3c73c2d65af94f0616bfe57164b0b6b" @@ -2867,6 +2946,14 @@ path-parse@^1.0.7: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== +path-scurry@^1.10.1: + version "1.10.1" + resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.10.1.tgz#9ba6bf5aa8500fe9fd67df4f0d9483b2b0bfc698" + integrity sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ== + dependencies: + lru-cache "^9.1.1 || ^10.0.0" + minipass "^5.0.0 || ^6.0.2 || ^7.0.0" + path-type@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" @@ -3001,6 +3088,13 @@ rimraf@^3.0.2: dependencies: glob "^7.1.3" +rimraf@^5.0.5: + version "5.0.5" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-5.0.5.tgz#9be65d2d6e683447d2e9013da2bf451139a61ccf" + integrity sha512-CqDakW+hMe/Bz202FPEymy68P+G50RfMQK+Qo5YUqc9SPipvbGjCGKd0RSKEelbsfQuw3g5NZDSrlZZAJurH1A== + dependencies: + glob "^10.3.7" + rollup-plugin-esbuild@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/rollup-plugin-esbuild/-/rollup-plugin-esbuild-6.1.0.tgz#966d297fe9edea3e6ba5dfd8ca3208825c82d7ce" @@ -3185,6 +3279,11 @@ signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7: resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== +signal-exit@^4.0.1: + version "4.1.0" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" + integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== + sisteransi@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" @@ -3226,7 +3325,7 @@ streamx@^2.15.0: fast-fifo "^1.1.0" queue-tick "^1.0.1" -string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: +"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -3235,6 +3334,15 @@ string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" +string-width@^5.0.1, string-width@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" + integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA== + dependencies: + eastasianwidth "^0.2.0" + emoji-regex "^9.2.2" + strip-ansi "^7.0.1" + string-width@^7.0.0: version "7.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-7.1.0.tgz#d994252935224729ea3719c49f7206dc9c46550a" @@ -3278,14 +3386,14 @@ string_decoder@~1.1.1: dependencies: safe-buffer "~5.1.0" -strip-ansi@^6.0.0, strip-ansi@^6.0.1: +"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== dependencies: ansi-regex "^5.0.1" -strip-ansi@^7.1.0: +strip-ansi@^7.0.1, strip-ansi@^7.1.0: version "7.1.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== @@ -3581,7 +3689,7 @@ workerpool@6.2.1: resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.2.1.tgz#46fc150c17d826b86a008e5a4508656777e9c343" integrity sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw== -wrap-ansi@^7.0.0: +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== @@ -3590,6 +3698,15 @@ wrap-ansi@^7.0.0: string-width "^4.1.0" strip-ansi "^6.0.0" +wrap-ansi@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" + integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ== + dependencies: + ansi-styles "^6.1.0" + string-width "^5.0.1" + strip-ansi "^7.0.1" + wrap-ansi@^9.0.0: version "9.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-9.0.0.tgz#1a3dc8b70d85eeb8398ddfb1e4a02cd186e58b3e"