From 8bef33179026a562d1143f7fb252d383d47856d0 Mon Sep 17 00:00:00 2001 From: Michal Piechowiak Date: Wed, 13 Mar 2024 10:11:11 +0100 Subject: [PATCH 01/27] test: add unit test for produced handler --- .../__snapshots__/lambda-handler.ts.snap | 277 ++++++++++++++++++ .../fixtures/lambda-handler/entry.js | 0 .../fixtures/lambda-handler/included.js | 0 .../src/__tests__/lambda-handler.ts | 42 +++ .../src/lambda-handler.ts | 2 +- 5 files changed, 320 insertions(+), 1 deletion(-) create mode 100644 packages/gatsby-adapter-netlify/src/__tests__/__snapshots__/lambda-handler.ts.snap create mode 100644 packages/gatsby-adapter-netlify/src/__tests__/fixtures/lambda-handler/entry.js create mode 100644 packages/gatsby-adapter-netlify/src/__tests__/fixtures/lambda-handler/included.js create mode 100644 packages/gatsby-adapter-netlify/src/__tests__/lambda-handler.ts diff --git a/packages/gatsby-adapter-netlify/src/__tests__/__snapshots__/lambda-handler.ts.snap b/packages/gatsby-adapter-netlify/src/__tests__/__snapshots__/lambda-handler.ts.snap new file mode 100644 index 0000000000000..afadf3bc08b1b --- /dev/null +++ b/packages/gatsby-adapter-netlify/src/__tests__/__snapshots__/lambda-handler.ts.snap @@ -0,0 +1,277 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`produced handler is correct 1`] = ` +[MockFunction] { + "calls": Array [ + Array [ + "/.netlify/functions-internal/test/test.js", + " +const Stream = require(\\"stream\\") +const http = require(\\"http\\") +const { Buffer } = require(\\"buffer\\") +const cookie = require(\\"./../../../packages/gatsby-adapter-netlify/node_modules/cookie/index.js\\") + + +const preferDefault = m => (m && m.default) || m + +const functionModule = require(\\"./../../../packages/gatsby-adapter-netlify/src/__tests__/fixtures/lambda-handler/entry.js\\") + +const functionHandler = preferDefault(functionModule) + +const statuses = { + \\"100\\": \\"Continue\\", + \\"101\\": \\"Switching Protocols\\", + \\"102\\": \\"Processing\\", + \\"103\\": \\"Early Hints\\", + \\"200\\": \\"OK\\", + \\"201\\": \\"Created\\", + \\"202\\": \\"Accepted\\", + \\"203\\": \\"Non-Authoritative Information\\", + \\"204\\": \\"No Content\\", + \\"205\\": \\"Reset Content\\", + \\"206\\": \\"Partial Content\\", + \\"207\\": \\"Multi-Status\\", + \\"208\\": \\"Already Reported\\", + \\"226\\": \\"IM Used\\", + \\"300\\": \\"Multiple Choices\\", + \\"301\\": \\"Moved Permanently\\", + \\"302\\": \\"Found\\", + \\"303\\": \\"See Other\\", + \\"304\\": \\"Not Modified\\", + \\"305\\": \\"Use Proxy\\", + \\"307\\": \\"Temporary Redirect\\", + \\"308\\": \\"Permanent Redirect\\", + \\"400\\": \\"Bad Request\\", + \\"401\\": \\"Unauthorized\\", + \\"402\\": \\"Payment Required\\", + \\"403\\": \\"Forbidden\\", + \\"404\\": \\"Not Found\\", + \\"405\\": \\"Method Not Allowed\\", + \\"406\\": \\"Not Acceptable\\", + \\"407\\": \\"Proxy Authentication Required\\", + \\"408\\": \\"Request Timeout\\", + \\"409\\": \\"Conflict\\", + \\"410\\": \\"Gone\\", + \\"411\\": \\"Length Required\\", + \\"412\\": \\"Precondition Failed\\", + \\"413\\": \\"Payload Too Large\\", + \\"414\\": \\"URI Too Long\\", + \\"415\\": \\"Unsupported Media Type\\", + \\"416\\": \\"Range Not Satisfiable\\", + \\"417\\": \\"Expectation Failed\\", + \\"418\\": \\"I'm a Teapot\\", + \\"421\\": \\"Misdirected Request\\", + \\"422\\": \\"Unprocessable Entity\\", + \\"423\\": \\"Locked\\", + \\"424\\": \\"Failed Dependency\\", + \\"425\\": \\"Too Early\\", + \\"426\\": \\"Upgrade Required\\", + \\"428\\": \\"Precondition Required\\", + \\"429\\": \\"Too Many Requests\\", + \\"431\\": \\"Request Header Fields Too Large\\", + \\"451\\": \\"Unavailable For Legal Reasons\\", + \\"500\\": \\"Internal Server Error\\", + \\"501\\": \\"Not Implemented\\", + \\"502\\": \\"Bad Gateway\\", + \\"503\\": \\"Service Unavailable\\", + \\"504\\": \\"Gateway Timeout\\", + \\"505\\": \\"HTTP Version Not Supported\\", + \\"506\\": \\"Variant Also Negotiates\\", + \\"507\\": \\"Insufficient Storage\\", + \\"508\\": \\"Loop Detected\\", + \\"509\\": \\"Bandwidth Limit Exceeded\\", + \\"510\\": \\"Not Extended\\", + \\"511\\": \\"Network Authentication Required\\" +} + +const createRequestObject = ({ event, context }) => { + const { + path = \\"\\", + multiValueQueryStringParameters, + queryStringParameters, + httpMethod, + multiValueHeaders = {}, + body, + isBase64Encoded, + } = event + const newStream = new Stream.Readable() + const req = Object.assign(newStream, http.IncomingMessage.prototype) + req.url = path + req.originalUrl = req.url + req.query = queryStringParameters + req.multiValueQuery = multiValueQueryStringParameters + req.method = httpMethod + req.rawHeaders = [] + req.headers = {} + // Expose Netlify Function event and context on request object. + req.netlifyFunctionParams = { event, context } + for (const key of Object.keys(multiValueHeaders)) { + for (const value of multiValueHeaders[key]) { + req.rawHeaders.push(key, value) + } + req.headers[key.toLowerCase()] = multiValueHeaders[key].toString() + } + req.getHeader = name => req.headers[name.toLowerCase()] + req.getHeaders = () => req.headers + // Gatsby includes cookie middleware + const cookies = req.headers.cookie + if (cookies) { + req.cookies = cookie.parse(cookies) + } + // req.connection = {} + if (body) { + req.push(body, isBase64Encoded ? \\"base64\\" : undefined) + } + req.push(null) + return req +} + +const createResponseObject = ({ onResEnd }) => { + const response = { + isBase64Encoded: true, + multiValueHeaders: {}, + }; + const res = new Stream(); + Object.defineProperty(res, 'statusCode', { + get() { + return response.statusCode; + }, + set(statusCode) { + response.statusCode = statusCode; + }, + }); + res.headers = { 'content-type': 'text/html; charset=utf-8' }; + res.writeHead = (status, headers) => { + response.statusCode = status; + if (headers) { + res.headers = Object.assign(res.headers, headers); + } + // Return res object to allow for chaining + // Fixes: https://github.com/netlify/next-on-netlify/pull/74 + return res; + }; + res.write = (chunk) => { + if (!response.body) { + response.body = Buffer.from(''); + } + response.body = Buffer.concat([ + Buffer.isBuffer(response.body) + ? response.body + : Buffer.from(response.body), + Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk), + ]); + return true; + }; + res.setHeader = (name, value) => { + res.headers[name.toLowerCase()] = value; + return res; + }; + res.removeHeader = (name) => { + delete res.headers[name.toLowerCase()]; + }; + res.getHeader = (name) => res.headers[name.toLowerCase()]; + res.getHeaders = () => res.headers; + res.hasHeader = (name) => Boolean(res.getHeader(name)); + res.end = (text) => { + if (text) + res.write(text); + if (!res.statusCode) { + res.statusCode = 200; + } + if (response.body) { + response.body = Buffer.from(response.body).toString('base64'); + } + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore These types are a mess, and need sorting out + response.multiValueHeaders = res.headers; + res.writeHead(response.statusCode); + // Convert all multiValueHeaders into arrays + for (const key of Object.keys(response.multiValueHeaders)) { + const header = response.multiValueHeaders[key]; + if (!Array.isArray(header)) { + response.multiValueHeaders[key] = [header]; + } + } + res.finished = true; + res.writableEnded = true; + // Call onResEnd handler with the response object + onResEnd(response); + return res; + }; + // Gatsby Functions additions + res.send = (data) => { + if (res.finished) { + return res; + } + if (typeof data === 'number') { + return res + .status(data) + .setHeader('content-type', 'text/plain; charset=utf-8') + .end(statuses[data] || String(data)); + } + if (typeof data === 'boolean' || typeof data === 'object') { + if (Buffer.isBuffer(data)) { + res.setHeader('content-type', 'application/octet-Stream'); + } + else if (data !== null) { + return res.json(data); + } + } + res.end(data); + return res; + }; + res.json = (data) => { + if (res.finished) { + return res; + } + res.setHeader('content-type', 'application/json'); + res.end(JSON.stringify(data)); + return res; + }; + res.status = (code) => { + const numericCode = Number.parseInt(code); + if (!Number.isNaN(numericCode)) { + response.statusCode = numericCode; + } + return res; + }; + res.redirect = (statusCodeOrUrl, url) => { + let statusCode = statusCodeOrUrl; + let Location = url; + if (!url && typeof statusCodeOrUrl === 'string') { + Location = statusCodeOrUrl; + statusCode = 302; + } + res.writeHead(statusCode, { Location }); + res.end(); + return res; + }; + return res; +}; + +const handler = async (event, context) => { + const req = createRequestObject({ event, context }) + + return new Promise(async resolve => { + try { + const res = createResponseObject({ onResEnd: resolve }) + await functionHandler(req, res) + } catch(error) { + console.error(\\"Error executing \\" + event.path, error) + resolve({ statusCode: 500 }) + } + }) +} + +exports.handler = handler +", + ], + ], + "results": Array [ + Object { + "type": "return", + "value": Promise {}, + }, + ], +} +`; diff --git a/packages/gatsby-adapter-netlify/src/__tests__/fixtures/lambda-handler/entry.js b/packages/gatsby-adapter-netlify/src/__tests__/fixtures/lambda-handler/entry.js new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/packages/gatsby-adapter-netlify/src/__tests__/fixtures/lambda-handler/included.js b/packages/gatsby-adapter-netlify/src/__tests__/fixtures/lambda-handler/included.js new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/packages/gatsby-adapter-netlify/src/__tests__/lambda-handler.ts b/packages/gatsby-adapter-netlify/src/__tests__/lambda-handler.ts new file mode 100644 index 0000000000000..e6a8049b4090e --- /dev/null +++ b/packages/gatsby-adapter-netlify/src/__tests__/lambda-handler.ts @@ -0,0 +1,42 @@ +import fs from "fs-extra" +import { prepareFunction } from "../lambda-handler" +import { join, relative } from "path" +import { slash } from "gatsby-core-utils/path" + +const writeFileSpy = jest + .spyOn(fs, `writeFile`) + .mockImplementation(async () => {}) +const writeJsonSpy = jest + .spyOn(fs, `writeJSON`) + .mockImplementation(async () => {}) + +const fixturePath = join( + relative(process.cwd(), __dirname), + `fixtures`, + `lambda-handler` +) +const pathToEntryPoint = join(fixturePath, `entry.js`) +const requiredFile = join(fixturePath, `included.js`) + +test(`produced handler is correct`, async () => { + await prepareFunction({ + functionId: `test`, + name: `test`, + pathToEntryPoint, + requiredFiles: [requiredFile], + }) + + // asserting correctness on actual output would be difficult + // so this assertion is mostly to make sure win32 produces same + // output as posix + expect(writeFileSpy).toMatchSnapshot() + expect(writeJsonSpy).toBeCalledWith(expect.any(String), { + config: { + name: `test`, + generator: expect.stringContaining(`gatsby-adapter-netlify@`), + includedFiles: [slash(requiredFile)], + externalNodeModules: [`msgpackr-extract`], + }, + version: 1, + }) +}) diff --git a/packages/gatsby-adapter-netlify/src/lambda-handler.ts b/packages/gatsby-adapter-netlify/src/lambda-handler.ts index ab617a0b7edc7..5407fdb7ab54d 100644 --- a/packages/gatsby-adapter-netlify/src/lambda-handler.ts +++ b/packages/gatsby-adapter-netlify/src/lambda-handler.ts @@ -25,7 +25,7 @@ interface INetlifyFunctionManifest { version: number } -async function prepareFunction( +export async function prepareFunction( fun: IFunctionDefinition, odbfunctionName?: string ): Promise { From 616479c8f70e16db2c6ee362a244feb8a05aeb62 Mon Sep 17 00:00:00 2001 From: pieh Date: Wed, 13 Mar 2024 17:46:45 +0100 Subject: [PATCH 02/27] actually failing test in windows --- .../__snapshots__/lambda-handler.ts.snap | 277 ------------------ .../src/__tests__/lambda-handler.ts | 30 +- 2 files changed, 17 insertions(+), 290 deletions(-) delete mode 100644 packages/gatsby-adapter-netlify/src/__tests__/__snapshots__/lambda-handler.ts.snap diff --git a/packages/gatsby-adapter-netlify/src/__tests__/__snapshots__/lambda-handler.ts.snap b/packages/gatsby-adapter-netlify/src/__tests__/__snapshots__/lambda-handler.ts.snap deleted file mode 100644 index afadf3bc08b1b..0000000000000 --- a/packages/gatsby-adapter-netlify/src/__tests__/__snapshots__/lambda-handler.ts.snap +++ /dev/null @@ -1,277 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`produced handler is correct 1`] = ` -[MockFunction] { - "calls": Array [ - Array [ - "/.netlify/functions-internal/test/test.js", - " -const Stream = require(\\"stream\\") -const http = require(\\"http\\") -const { Buffer } = require(\\"buffer\\") -const cookie = require(\\"./../../../packages/gatsby-adapter-netlify/node_modules/cookie/index.js\\") - - -const preferDefault = m => (m && m.default) || m - -const functionModule = require(\\"./../../../packages/gatsby-adapter-netlify/src/__tests__/fixtures/lambda-handler/entry.js\\") - -const functionHandler = preferDefault(functionModule) - -const statuses = { - \\"100\\": \\"Continue\\", - \\"101\\": \\"Switching Protocols\\", - \\"102\\": \\"Processing\\", - \\"103\\": \\"Early Hints\\", - \\"200\\": \\"OK\\", - \\"201\\": \\"Created\\", - \\"202\\": \\"Accepted\\", - \\"203\\": \\"Non-Authoritative Information\\", - \\"204\\": \\"No Content\\", - \\"205\\": \\"Reset Content\\", - \\"206\\": \\"Partial Content\\", - \\"207\\": \\"Multi-Status\\", - \\"208\\": \\"Already Reported\\", - \\"226\\": \\"IM Used\\", - \\"300\\": \\"Multiple Choices\\", - \\"301\\": \\"Moved Permanently\\", - \\"302\\": \\"Found\\", - \\"303\\": \\"See Other\\", - \\"304\\": \\"Not Modified\\", - \\"305\\": \\"Use Proxy\\", - \\"307\\": \\"Temporary Redirect\\", - \\"308\\": \\"Permanent Redirect\\", - \\"400\\": \\"Bad Request\\", - \\"401\\": \\"Unauthorized\\", - \\"402\\": \\"Payment Required\\", - \\"403\\": \\"Forbidden\\", - \\"404\\": \\"Not Found\\", - \\"405\\": \\"Method Not Allowed\\", - \\"406\\": \\"Not Acceptable\\", - \\"407\\": \\"Proxy Authentication Required\\", - \\"408\\": \\"Request Timeout\\", - \\"409\\": \\"Conflict\\", - \\"410\\": \\"Gone\\", - \\"411\\": \\"Length Required\\", - \\"412\\": \\"Precondition Failed\\", - \\"413\\": \\"Payload Too Large\\", - \\"414\\": \\"URI Too Long\\", - \\"415\\": \\"Unsupported Media Type\\", - \\"416\\": \\"Range Not Satisfiable\\", - \\"417\\": \\"Expectation Failed\\", - \\"418\\": \\"I'm a Teapot\\", - \\"421\\": \\"Misdirected Request\\", - \\"422\\": \\"Unprocessable Entity\\", - \\"423\\": \\"Locked\\", - \\"424\\": \\"Failed Dependency\\", - \\"425\\": \\"Too Early\\", - \\"426\\": \\"Upgrade Required\\", - \\"428\\": \\"Precondition Required\\", - \\"429\\": \\"Too Many Requests\\", - \\"431\\": \\"Request Header Fields Too Large\\", - \\"451\\": \\"Unavailable For Legal Reasons\\", - \\"500\\": \\"Internal Server Error\\", - \\"501\\": \\"Not Implemented\\", - \\"502\\": \\"Bad Gateway\\", - \\"503\\": \\"Service Unavailable\\", - \\"504\\": \\"Gateway Timeout\\", - \\"505\\": \\"HTTP Version Not Supported\\", - \\"506\\": \\"Variant Also Negotiates\\", - \\"507\\": \\"Insufficient Storage\\", - \\"508\\": \\"Loop Detected\\", - \\"509\\": \\"Bandwidth Limit Exceeded\\", - \\"510\\": \\"Not Extended\\", - \\"511\\": \\"Network Authentication Required\\" -} - -const createRequestObject = ({ event, context }) => { - const { - path = \\"\\", - multiValueQueryStringParameters, - queryStringParameters, - httpMethod, - multiValueHeaders = {}, - body, - isBase64Encoded, - } = event - const newStream = new Stream.Readable() - const req = Object.assign(newStream, http.IncomingMessage.prototype) - req.url = path - req.originalUrl = req.url - req.query = queryStringParameters - req.multiValueQuery = multiValueQueryStringParameters - req.method = httpMethod - req.rawHeaders = [] - req.headers = {} - // Expose Netlify Function event and context on request object. - req.netlifyFunctionParams = { event, context } - for (const key of Object.keys(multiValueHeaders)) { - for (const value of multiValueHeaders[key]) { - req.rawHeaders.push(key, value) - } - req.headers[key.toLowerCase()] = multiValueHeaders[key].toString() - } - req.getHeader = name => req.headers[name.toLowerCase()] - req.getHeaders = () => req.headers - // Gatsby includes cookie middleware - const cookies = req.headers.cookie - if (cookies) { - req.cookies = cookie.parse(cookies) - } - // req.connection = {} - if (body) { - req.push(body, isBase64Encoded ? \\"base64\\" : undefined) - } - req.push(null) - return req -} - -const createResponseObject = ({ onResEnd }) => { - const response = { - isBase64Encoded: true, - multiValueHeaders: {}, - }; - const res = new Stream(); - Object.defineProperty(res, 'statusCode', { - get() { - return response.statusCode; - }, - set(statusCode) { - response.statusCode = statusCode; - }, - }); - res.headers = { 'content-type': 'text/html; charset=utf-8' }; - res.writeHead = (status, headers) => { - response.statusCode = status; - if (headers) { - res.headers = Object.assign(res.headers, headers); - } - // Return res object to allow for chaining - // Fixes: https://github.com/netlify/next-on-netlify/pull/74 - return res; - }; - res.write = (chunk) => { - if (!response.body) { - response.body = Buffer.from(''); - } - response.body = Buffer.concat([ - Buffer.isBuffer(response.body) - ? response.body - : Buffer.from(response.body), - Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk), - ]); - return true; - }; - res.setHeader = (name, value) => { - res.headers[name.toLowerCase()] = value; - return res; - }; - res.removeHeader = (name) => { - delete res.headers[name.toLowerCase()]; - }; - res.getHeader = (name) => res.headers[name.toLowerCase()]; - res.getHeaders = () => res.headers; - res.hasHeader = (name) => Boolean(res.getHeader(name)); - res.end = (text) => { - if (text) - res.write(text); - if (!res.statusCode) { - res.statusCode = 200; - } - if (response.body) { - response.body = Buffer.from(response.body).toString('base64'); - } - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore These types are a mess, and need sorting out - response.multiValueHeaders = res.headers; - res.writeHead(response.statusCode); - // Convert all multiValueHeaders into arrays - for (const key of Object.keys(response.multiValueHeaders)) { - const header = response.multiValueHeaders[key]; - if (!Array.isArray(header)) { - response.multiValueHeaders[key] = [header]; - } - } - res.finished = true; - res.writableEnded = true; - // Call onResEnd handler with the response object - onResEnd(response); - return res; - }; - // Gatsby Functions additions - res.send = (data) => { - if (res.finished) { - return res; - } - if (typeof data === 'number') { - return res - .status(data) - .setHeader('content-type', 'text/plain; charset=utf-8') - .end(statuses[data] || String(data)); - } - if (typeof data === 'boolean' || typeof data === 'object') { - if (Buffer.isBuffer(data)) { - res.setHeader('content-type', 'application/octet-Stream'); - } - else if (data !== null) { - return res.json(data); - } - } - res.end(data); - return res; - }; - res.json = (data) => { - if (res.finished) { - return res; - } - res.setHeader('content-type', 'application/json'); - res.end(JSON.stringify(data)); - return res; - }; - res.status = (code) => { - const numericCode = Number.parseInt(code); - if (!Number.isNaN(numericCode)) { - response.statusCode = numericCode; - } - return res; - }; - res.redirect = (statusCodeOrUrl, url) => { - let statusCode = statusCodeOrUrl; - let Location = url; - if (!url && typeof statusCodeOrUrl === 'string') { - Location = statusCodeOrUrl; - statusCode = 302; - } - res.writeHead(statusCode, { Location }); - res.end(); - return res; - }; - return res; -}; - -const handler = async (event, context) => { - const req = createRequestObject({ event, context }) - - return new Promise(async resolve => { - try { - const res = createResponseObject({ onResEnd: resolve }) - await functionHandler(req, res) - } catch(error) { - console.error(\\"Error executing \\" + event.path, error) - resolve({ statusCode: 500 }) - } - }) -} - -exports.handler = handler -", - ], - ], - "results": Array [ - Object { - "type": "return", - "value": Promise {}, - }, - ], -} -`; diff --git a/packages/gatsby-adapter-netlify/src/__tests__/lambda-handler.ts b/packages/gatsby-adapter-netlify/src/__tests__/lambda-handler.ts index e6a8049b4090e..c101f616fcf9f 100644 --- a/packages/gatsby-adapter-netlify/src/__tests__/lambda-handler.ts +++ b/packages/gatsby-adapter-netlify/src/__tests__/lambda-handler.ts @@ -25,18 +25,22 @@ test(`produced handler is correct`, async () => { pathToEntryPoint, requiredFiles: [requiredFile], }) + const handlerCode = writeFileSpy.mock.calls[0][1] + // expect require in produced code (this is to mostly to make sure handlerCode is actual handler code) + expect(handlerCode).toMatch(/require\(["'][^"']*["']\)/) + // require paths should not have backward slashes (win paths) + expect(handlerCode).not.toMatch(/require\(["'][^"']*\\[^"']*["']\)/) - // asserting correctness on actual output would be difficult - // so this assertion is mostly to make sure win32 produces same - // output as posix - expect(writeFileSpy).toMatchSnapshot() - expect(writeJsonSpy).toBeCalledWith(expect.any(String), { - config: { - name: `test`, - generator: expect.stringContaining(`gatsby-adapter-netlify@`), - includedFiles: [slash(requiredFile)], - externalNodeModules: [`msgpackr-extract`], - }, - version: 1, - }) + expect(writeJsonSpy).toBeCalledWith( + expect.any(String), + expect.objectContaining({ + config: expect.objectContaining({ + name: `test`, + generator: expect.stringContaining(`gatsby-adapter-netlify`), + includedFiles: [slash(requiredFile)], + externalNodeModules: [`msgpackr-extract`], + }), + version: 1, + }) + ) }) From 3faca33e22491ef132d44cdb2e551916736046c2 Mon Sep 17 00:00:00 2001 From: pieh Date: Wed, 13 Mar 2024 19:06:08 +0100 Subject: [PATCH 03/27] fix(gatsby-adapter-netlify): produce working function handlers on windows --- packages/gatsby-adapter-netlify/package.json | 3 ++- packages/gatsby-adapter-netlify/src/lambda-handler.ts | 8 ++++++-- packages/gatsby/src/utils/adapter/init.ts | 5 ++++- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/packages/gatsby-adapter-netlify/package.json b/packages/gatsby-adapter-netlify/package.json index cb8e0327a9622..817ee2b0f489b 100644 --- a/packages/gatsby-adapter-netlify/package.json +++ b/packages/gatsby-adapter-netlify/package.json @@ -37,7 +37,8 @@ "@netlify/functions": "^1.6.0", "cookie": "^0.6.0", "fastq": "^1.15.0", - "fs-extra": "^11.2.0" + "fs-extra": "^11.2.0", + "gatsby-core-utils": "^4.14.0-next.2" }, "devDependencies": { "@babel/cli": "^7.20.7", diff --git a/packages/gatsby-adapter-netlify/src/lambda-handler.ts b/packages/gatsby-adapter-netlify/src/lambda-handler.ts index 5407fdb7ab54d..ef70257ba2c2e 100644 --- a/packages/gatsby-adapter-netlify/src/lambda-handler.ts +++ b/packages/gatsby-adapter-netlify/src/lambda-handler.ts @@ -2,6 +2,7 @@ import type { IFunctionDefinition } from "gatsby" import packageJson from "gatsby-adapter-netlify/package.json" import fs from "fs-extra" import * as path from "path" +import { slash } from "gatsby-core-utils/path" interface INetlifyFunctionConfig { externalNodeModules?: Array @@ -58,7 +59,7 @@ export async function prepareFunction( name: displayName, generator: `gatsby-adapter-netlify@${packageJson?.version ?? `unknown`}`, includedFiles: fun.requiredFiles.map(file => - file.replace(/\[/g, `*`).replace(/]/g, `*`) + slash(file).replace(/\[/g, `*`).replace(/]/g, `*`) ), externalNodeModules: [`msgpackr-extract`], }, @@ -73,7 +74,10 @@ export async function prepareFunction( function getRelativePathToModule(modulePath: string): string { const absolutePath = require.resolve(modulePath) - return `./` + path.relative(internalFunctionsDir, absolutePath) + return ( + `./` + + path.posix.relative(slash(internalFunctionsDir), slash(absolutePath)) + ) } const handlerSource = /* javascript */ ` diff --git a/packages/gatsby/src/utils/adapter/init.ts b/packages/gatsby/src/utils/adapter/init.ts index 5d7aecd50edc9..0afab63c2f5e2 100644 --- a/packages/gatsby/src/utils/adapter/init.ts +++ b/packages/gatsby/src/utils/adapter/init.ts @@ -9,6 +9,7 @@ import { satisfies } from "semver" import type { AdapterInit } from "./types" import { preferDefault } from "../../bootstrap/prefer-default" import { getLatestAdapters } from "../get-latest-gatsby-files" +import { maybeAddFileProtocol } from "../../bootstrap/resolve-js-file-path" export const getAdaptersCacheDir = (): string => join(process.cwd(), `.cache/adapters`) @@ -85,7 +86,9 @@ const tryLoadingAlreadyInstalledAdapter = async ({ } } - const required = locationRequire.resolve(adapterToUse.module) + const required = maybeAddFileProtocol( + locationRequire.resolve(adapterToUse.module) + ) if (required) { return { found: true, From 80a6f74ac6766912ed94cfcee52455e1c45e3977 Mon Sep 17 00:00:00 2001 From: pieh Date: Wed, 13 Mar 2024 19:44:09 +0100 Subject: [PATCH 04/27] fix(gatsby): functions compilation on windows --- .../functions/api-function-webpack-loader.ts | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/gatsby/src/internal-plugins/functions/api-function-webpack-loader.ts b/packages/gatsby/src/internal-plugins/functions/api-function-webpack-loader.ts index 2642f691431c8..2b6905b2c55eb 100644 --- a/packages/gatsby/src/internal-plugins/functions/api-function-webpack-loader.ts +++ b/packages/gatsby/src/internal-plugins/functions/api-function-webpack-loader.ts @@ -10,17 +10,17 @@ const APIFunctionLoader: LoaderDefinition = async function () { return /* javascript */ ` const preferDefault = m => (m && m.default) || m - const functionModule = require('${modulePath}'); + const functionModule = require('${slash(modulePath)}'); const functionToExecute = preferDefault(functionModule); const matchPath = '${matchPath}'; - const { match: reachMatch } = require('${require.resolve( - `@gatsbyjs/reach-router` + const { match: reachMatch } = require('${slash( + require.resolve(`@gatsbyjs/reach-router`) )}'); - const { urlencoded, text, json, raw } = require('${require.resolve( - `body-parser` + const { urlencoded, text, json, raw } = require('${slash( + require.resolve(`body-parser`) )}') - const multer = require('${require.resolve(`multer`)}') - const { createConfig } = require('${require.resolve(`./config`)}') + const multer = require('${slash(require.resolve(`multer`))}') + const { createConfig } = require('${slash(require.resolve(`./config`))}') function functionWrapper(req, res) { if (matchPath) { From 4a46d08b7541cc385041c3265ec87e9c48bd0cf4 Mon Sep 17 00:00:00 2001 From: pieh Date: Thu, 14 Mar 2024 19:23:35 +0100 Subject: [PATCH 05/27] tmp: prepare cross-platform binaries for SSR/DSG --- packages/gatsby/src/commands/build.ts | 35 +- .../schema/graphql-engine/bundle-webpack.ts | 322 ++++++++++++++++-- 2 files changed, 306 insertions(+), 51 deletions(-) diff --git a/packages/gatsby/src/commands/build.ts b/packages/gatsby/src/commands/build.ts index 364329e7b26cc..7ba78910b9eac 100644 --- a/packages/gatsby/src/commands/build.ts +++ b/packages/gatsby/src/commands/build.ts @@ -66,7 +66,7 @@ import { getPageMode, preparePageTemplateConfigs, } from "../utils/page-mode" -import { validateEngines } from "../utils/validate-engines" +// import { validateEngines } from "../utils/validate-engines" import { constructConfigObject } from "../utils/gatsby-cloud-config" import { waitUntilWorkerJobsAreComplete } from "../utils/jobs/worker-messaging" import { getSSRChunkHashes } from "../utils/webpack/get-ssr-chunk-hashes" @@ -294,22 +294,23 @@ module.exports = async function build( pageConfigActivity.end() } - if (shouldGenerateEngines()) { - const validateEnginesActivity = report.activityTimer( - `Validating Rendering Engines`, - { - parentSpan: buildSpan, - } - ) - validateEnginesActivity.start() - try { - await validateEngines(store.getState().program.directory) - } catch (error) { - validateEnginesActivity.panic({ id: `98001`, context: {}, error }) - } finally { - validateEnginesActivity.end() - } - } + // TODO: skip this only when target env is different than current one + // if (shouldGenerateEngines()) { + // const validateEnginesActivity = report.activityTimer( + // `Validating Rendering Engines`, + // { + // parentSpan: buildSpan, + // } + // ) + // validateEnginesActivity.start() + // try { + // await validateEngines(store.getState().program.directory) + // } catch (error) { + // validateEnginesActivity.panic({ id: `98001`, context: {}, error }) + // } finally { + // validateEnginesActivity.end() + // } + // } const cacheActivity = report.activityTimer(`Caching Webpack compilations`, { parentSpan: buildSpan, diff --git a/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts b/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts index bf73d6021cdd3..527f00797d36c 100644 --- a/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts +++ b/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts @@ -9,20 +9,27 @@ import { dependencies } from "gatsby/package.json" import { printQueryEnginePlugins } from "./print-plugins" import mod from "module" import { WebpackLoggingPlugin } from "../../utils/webpack/plugins/webpack-logging" +import { getAssetMeta } from "@vercel/webpack-asset-relocator-loader" import reporter from "gatsby-cli/lib/reporter" import { schemaCustomizationAPIs } from "./print-plugins" import type { GatsbyNodeAPI } from "../../redux/types" import * as nodeApis from "../../utils/api-node-docs" import { store } from "../../redux" import { PackageJson } from "../../.." +import { slash } from "gatsby-core-utils/path" +import { isEqual } from "lodash" type Reporter = typeof reporter const extensions = [`.mjs`, `.js`, `.json`, `.node`, `.ts`, `.tsx`] -const outputDir = path.join(process.cwd(), `.cache`, `query-engine`) -const cacheLocation = path.join( - process.cwd(), +const outputDir = path.posix.join( + slash(process.cwd()), + `.cache`, + `query-engine` +) +const cacheLocation = path.posix.join( + slash(process.cwd()), `.cache`, `webpack`, `query-engine` @@ -39,16 +46,30 @@ function getApisToRemoveForQueryEngine(): Array { } const getInternalPackagesCacheDir = (): string => - path.join(process.cwd(), `.cache/internal-packages`) + path.posix.join(slash(process.cwd()), `.cache`, `internal-packages`) // Create a directory and JS module where we install internally used packages -const createInternalPackagesCacheDir = async (): Promise => { +const createInternalPackagesCacheDir = async ( + lambdaTarget: IPlatformAndArch +): Promise => { const cacheDir = getInternalPackagesCacheDir() await fs.ensureDir(cacheDir) - await fs.emptyDir(cacheDir) const packageJsonPath = path.join(cacheDir, `package.json`) + if (fs.existsSync(packageJsonPath)) { + const existingPackageJson = JSON.parse( + fs.readFileSync(packageJsonPath, `utf-8`) + ) + if (isEqual(existingPackageJson.lambdaTarget, lambdaTarget)) { + // if we have already package.json and it is against same lambda target, we can reuse it + // and potentially packages installed in it + return + } + } + + await fs.emptyDir(cacheDir) + await fs.outputJson(packageJsonPath, { name: `gatsby-internal-packages`, description: `This directory contains internal packages installed by Gatsby used to comply with the current platform requirements`, @@ -56,51 +77,202 @@ const createInternalPackagesCacheDir = async (): Promise => { private: true, author: `Gatsby`, license: `MIT`, + lambdaTarget, }) } -// lmdb module with prebuilt binaries for our platform -const lmdbPackage = `@lmdb/lmdb-${process.platform}-${process.arch}` - -// Detect if the prebuilt binaries for lmdb have been installed. These are installed under @lmdb and are tied to each platform/arch. We've seen instances where regular installations lack these modules because of a broken lockfile or skipping optional dependencies installs -function installPrebuiltLmdb(): boolean { +function getLMDBBinaryFromSiteLocation( + lmdbPackageName: string, + version: string +): string | undefined { // Read lmdb's package.json, go through its optional depedencies and validate if there's a prebuilt lmdb module with a compatible binary to our platform and arch let packageJson: PackageJson try { const modulePath = path - .dirname(require.resolve(`lmdb`)) + .dirname(slash(require.resolve(`lmdb`))) .replace(`/dist`, ``) const packageJsonPath = path.join(modulePath, `package.json`) packageJson = JSON.parse(fs.readFileSync(packageJsonPath, `utf-8`)) } catch (e) { // If we fail to read lmdb's package.json there's bigger problems here so just skip installation - return false + return undefined } // If there's no lmdb prebuilt package for our arch/platform listed as optional dep no point in trying to install it const { optionalDependencies } = packageJson - if (!optionalDependencies) return false - if (!Object.keys(optionalDependencies).find(p => p === lmdbPackage)) - return false + if ( + !Object.keys(optionalDependencies ?? {}).find(p => p === lmdbPackageName) + ) { + throw new Error("No optional dependencies in lmdb (?)") + } + return getPackageLocationFromRequireContext( + slash(require.resolve(`lmdb`)), + lmdbPackageName, + version + ) +} + +function getPackageLocationFromRequireContext( + location: string, + packageName: string, + packageVersion?: string +): string | undefined { try { - const lmdbRequire = mod.createRequire(require.resolve(`lmdb`)) - lmdbRequire.resolve(lmdbPackage) - return false + const requireId = `${packageName}/package.json` + const locationRequire = mod.createRequire(location) + const packageJsonLocation = slash(locationRequire.resolve(requireId)) + + if (packageVersion) { + // delete locationRequire.cache[requireId] + const { version } = JSON.parse( + fs.readFileSync(packageJsonLocation, `utf-8`) + ) + if (packageVersion !== version) { + return undefined + } + } + + return path.dirname(packageJsonLocation) } catch (e) { - return true + return undefined + } +} + +interface IPlatformAndArch { + platform: string + arch: string +} + +interface ILMDBBinaryPackageStatusBase { + packageName: string + needToInstall: boolean + packageVersion: string +} + +interface ILMDBBinaryPackageStatusInstalled + extends ILMDBBinaryPackageStatusBase { + needToInstall: false + packageLocation: string +} + +interface ILMDBBinaryPackageStatusNeedAlternative + extends ILMDBBinaryPackageStatusBase { + needToInstall: true +} + +type IBinaryPackageStatus = + | ILMDBBinaryPackageStatusInstalled + | ILMDBBinaryPackageStatusNeedAlternative + +function checkIfInstalledInInternalPackagesCache( + packageStatus: IBinaryPackageStatus +): IBinaryPackageStatus { + const cacheDir = getInternalPackagesCacheDir() + + const packageLocationFromInternalPackageCache = + getPackageLocationFromRequireContext( + path.posix.join(cacheDir, `:internal:`), + packageStatus.packageName, + packageStatus.packageVersion + ) + + if ( + packageLocationFromInternalPackageCache && + !path.posix + .relative(cacheDir, packageLocationFromInternalPackageCache) + .startsWith(`..`) + ) { + return { + ...packageStatus, + needToInstall: false, + packageLocation: packageLocationFromInternalPackageCache, + } + } + + return { + ...packageStatus, + needToInstall: true, } } // Install lmdb's native system module under our internal cache if we detect the current installation // isn't using the pre-build binaries -async function installIfMissingLmdb(): Promise { - if (!installPrebuiltLmdb()) return undefined +function checkIfNeedToInstallMissingLmdb( + lambdaTarget: IPlatformAndArch +): IBinaryPackageStatus { + // lmdb module with prebuilt binaries for target platform + const lmdbPackageName = `@lmdb/lmdb-${lambdaTarget.platform}-${lambdaTarget.arch}` + + const lmdbBinaryFromSiteLocation = getLMDBBinaryFromSiteLocation( + lmdbPackageName, + dependencies.lmdb + ) + + const sharedPackageStatus: ILMDBBinaryPackageStatusNeedAlternative = { + needToInstall: true, + packageName: lmdbPackageName, + packageVersion: dependencies.lmdb, + } + + if (lmdbBinaryFromSiteLocation) { + return { + ...sharedPackageStatus, + needToInstall: false, + packageLocation: lmdbBinaryFromSiteLocation, + } + } + + return checkIfInstalledInInternalPackagesCache(sharedPackageStatus) +} + +function checkIfNeedToInstallMissingSharp( + lambdaTarget: IPlatformAndArch, + currentTarget: IPlatformAndArch +): IBinaryPackageStatus | undefined { + try { + // check if shapr is resolvable + const { version: sharpVersion } = require("sharp/package.json") + + if (isEqual(lambdaTarget, currentTarget)) { + return undefined + } + + return checkIfInstalledInInternalPackagesCache({ + needToInstall: true, + packageName: `sharp`, + packageVersion: sharpVersion, + }) + } catch (e) { + return undefined + } +} - await createInternalPackagesCacheDir() +async function installMissing( + packages: Array, + lambdaTarget: IPlatformAndArch +): Promise> { + function shouldInstall( + p: IBinaryPackageStatus | undefined + ): p is IBinaryPackageStatus { + return Boolean(p?.needToInstall) + } + + const packagesToInstall = packages.filter(shouldInstall) + + if (packagesToInstall.length === 0) { + return packages + } + + await createInternalPackagesCacheDir(lambdaTarget) const cacheDir = getInternalPackagesCacheDir() + const options: ExecaOptions = { stderr: `inherit`, cwd: cacheDir, + env: { + npm_config_arch: lambdaTarget.arch, + npm_config_platform: lambdaTarget.platform, + }, } const npmAdditionalCliArgs = [ @@ -113,15 +285,35 @@ async function installIfMissingLmdb(): Promise { `always`, `--legacy-peer-deps`, `--save-exact`, + // target platform might be different than current and force allows us to install it + `--force`, ] await execa( `npm`, - [`install`, ...npmAdditionalCliArgs, `${lmdbPackage}@${dependencies.lmdb}`], + [ + `install`, + ...npmAdditionalCliArgs, + ...packagesToInstall.map(p => `${p.packageName}@${p.packageVersion}`), + ], options ) - return path.join(cacheDir, `node_modules`, lmdbPackage) + return packages.map(info => + info + ? info.needToInstall + ? { + ...info, + needToInstall: false, + packageLocation: path.posix.join( + cacheDir, + `node_modules`, + info.packageName + ), + } + : info + : undefined + ) } export async function createGraphqlEngineBundle( @@ -151,17 +343,50 @@ export async function createGraphqlEngineBundle( require.resolve(`gatsby-plugin-typescript`) ) - // Alternative lmdb path we've created to self heal from a "broken" lmdb installation - const alternativeLmdbPath = await installIfMissingLmdb() + const currentTarget: IPlatformAndArch = { + platform: process.platform, + arch: process.arch, + } - // We force a specific lmdb binary module if we detected a broken lmdb installation or if we detect the presence of an adapter + const functionsTarget: IPlatformAndArch = state.adapter.instance + ? { + // TODO: allow to pass target from adapter + // for now this seems most likely target + platform: `linux`, + arch: `x64`, + } + : currentTarget + + const dynamicAliases: Record = {} let forcedLmdbBinaryModule: string | undefined = undefined - if (state.adapter.instance) { - forcedLmdbBinaryModule = `${lmdbPackage}/node.abi83.glibc.node` + + // we need to make sure we have internal packages cache directory setup for current lambda target + // before we attempt to check if we can reuse those packages + await createInternalPackagesCacheDir(functionsTarget) + + const [lmdbPackageInfo, sharpPackageInfo] = await installMissing( + [ + checkIfNeedToInstallMissingLmdb(functionsTarget), + checkIfNeedToInstallMissingSharp(functionsTarget, currentTarget), + ], + functionsTarget + ) + + if (!lmdbPackageInfo) { + throw new Error(`no lmdb for target`) + } else if (functionsTarget.platform === `linux`) { + if (lmdbPackageInfo.needToInstall) { + throw new Error("no sharp for target") + } + + forcedLmdbBinaryModule = `${lmdbPackageInfo.packageLocation}/node.abi83.glibc.node` } - // We always force the binary if we've installed an alternative path - if (alternativeLmdbPath) { - forcedLmdbBinaryModule = `${alternativeLmdbPath}/node.abi83.glibc.node` + + if (sharpPackageInfo) { + if (sharpPackageInfo.needToInstall) { + throw new Error("no sharp for target") + } + dynamicAliases[`sharp$`] = sharpPackageInfo.packageLocation } const compiler = webpack({ @@ -299,6 +524,7 @@ export async function createGraphqlEngineBundle( resolve: { extensions, alias: { + ...dynamicAliases, ".cache": process.cwd() + `/.cache/`, [require.resolve(`gatsby-cli/lib/reporter/loggers/ink/index.js`)]: @@ -334,7 +560,7 @@ export async function createGraphqlEngineBundle( }) return new Promise((resolve, reject) => { - compiler.run((err, stats): void => { + compiler.run(async (err, stats): void => { function getResourcePath( webpackModule?: Module | NormalModule | ConcatenatedModule | null ): string | undefined { @@ -387,6 +613,34 @@ export async function createGraphqlEngineBundle( iterateModules(stats.compilation.modules, stats.compilation) } + const binaryFixingPromises: Array> = [] + // sigh - emitAsset used by relocator seems to corrupt binaries + // resulting in "ELF file's phentsize not the expected size" errors + // - see size diff + // > find . -name node.abi83.glibc.node + // ./.cache/internal-packages/node_modules/@lmdb/lmdb-linux-x64/node.abi83.glibc.node + // ./.cache/query-engine/assets/node.abi83.glibc.node + // > ls -al ./.cache/query-engine/assets/node.abi83.glibc.node + // -rw-r--r-- 1 misiek 197121 1285429 Mar 14 11:36 ./.cache/query-engine/assets/node.abi83.glibc.node + // > ls -al ./.cache/internal-packages/node_modules/@lmdb/lmdb-linux-x64/node.abi83.glibc.node + // -rw-r--r-- 1 misiek 197121 693544 Mar 14 11:35 ./.cache/internal-packages/node_modules/@lmdb/lmdb-linux-x64/node.abi83.glibc.node + // so this tries to fix it by straight copying it over + for (const asset of ( + stats?.compilation?.assetsInfo ?? new Map() + ).keys()) { + if (asset?.endsWith(`.node`)) { + const targetRelPath = path.posix.relative("assets", asset) + const assetMeta = getAssetMeta(targetRelPath, stats?.compilation) + const sourcePath = assetMeta?.path + if (sourcePath) { + const dist = path.join(outputDir, asset) + binaryFixingPromises.push(fs.copyFile(sourcePath, dist)) + } + } + } + + await Promise.all(binaryFixingPromises) + compiler.close(closeErr => { if (err) { return reject(err) From 530800b2a6c27d424b7580f334a26b5869884306 Mon Sep 17 00:00:00 2001 From: pieh Date: Thu, 14 Mar 2024 22:11:11 +0100 Subject: [PATCH 06/27] fix: lint --- .../src/schema/graphql-engine/bundle-webpack.ts | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts b/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts index 527f00797d36c..0a7e070ca5818 100644 --- a/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts +++ b/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts @@ -102,7 +102,7 @@ function getLMDBBinaryFromSiteLocation( if ( !Object.keys(optionalDependencies ?? {}).find(p => p === lmdbPackageName) ) { - throw new Error("No optional dependencies in lmdb (?)") + throw new Error(`No optional dependencies in lmdb (?)`) } return getPackageLocationFromRequireContext( slash(require.resolve(`lmdb`)), @@ -230,7 +230,7 @@ function checkIfNeedToInstallMissingSharp( ): IBinaryPackageStatus | undefined { try { // check if shapr is resolvable - const { version: sharpVersion } = require("sharp/package.json") + const { version: sharpVersion } = require(`sharp/package.json`) if (isEqual(lambdaTarget, currentTarget)) { return undefined @@ -376,7 +376,7 @@ export async function createGraphqlEngineBundle( throw new Error(`no lmdb for target`) } else if (functionsTarget.platform === `linux`) { if (lmdbPackageInfo.needToInstall) { - throw new Error("no sharp for target") + throw new Error(`no lmdb for target`) } forcedLmdbBinaryModule = `${lmdbPackageInfo.packageLocation}/node.abi83.glibc.node` @@ -384,7 +384,7 @@ export async function createGraphqlEngineBundle( if (sharpPackageInfo) { if (sharpPackageInfo.needToInstall) { - throw new Error("no sharp for target") + throw new Error(`no sharp for target`) } dynamicAliases[`sharp$`] = sharpPackageInfo.packageLocation } @@ -560,7 +560,7 @@ export async function createGraphqlEngineBundle( }) return new Promise((resolve, reject) => { - compiler.run(async (err, stats): void => { + compiler.run(async (err, stats): Promise => { function getResourcePath( webpackModule?: Module | NormalModule | ConcatenatedModule | null ): string | undefined { @@ -629,7 +629,7 @@ export async function createGraphqlEngineBundle( stats?.compilation?.assetsInfo ?? new Map() ).keys()) { if (asset?.endsWith(`.node`)) { - const targetRelPath = path.posix.relative("assets", asset) + const targetRelPath = path.posix.relative(`assets`, asset) const assetMeta = getAssetMeta(targetRelPath, stats?.compilation) const sourcePath = assetMeta?.path if (sourcePath) { From ea45d1f4ef3031a9603447466f133ce2979d62ef Mon Sep 17 00:00:00 2001 From: pieh Date: Fri, 15 Mar 2024 12:23:40 +0100 Subject: [PATCH 07/27] feat: add a way to configure functions executing platform/arch and add early check in DSG/SSR --- packages/gatsby-adapter-netlify/src/index.ts | 2 + packages/gatsby-cli/src/create-cli.ts | 10 + packages/gatsby/src/commands/build-html.ts | 2 + packages/gatsby/src/commands/serve.ts | 222 +++++++++--------- packages/gatsby/src/commands/types.ts | 2 + .../src/schema/graphql-engine/bootstrap.ts | 1 + .../schema/graphql-engine/bundle-webpack.ts | 81 ++++--- .../graphql-engine/platform-and-arch-check.ts | 8 + packages/gatsby/src/utils/adapter/types.ts | 8 + 9 files changed, 199 insertions(+), 137 deletions(-) create mode 100644 packages/gatsby/src/schema/graphql-engine/platform-and-arch-check.ts diff --git a/packages/gatsby-adapter-netlify/src/index.ts b/packages/gatsby-adapter-netlify/src/index.ts index b34fafbb8718a..57769b7f8c671 100644 --- a/packages/gatsby-adapter-netlify/src/index.ts +++ b/packages/gatsby-adapter-netlify/src/index.ts @@ -162,6 +162,8 @@ const createNetlifyAdapter: AdapterInit = options => { fileCDNUrlGeneratorModulePath: useNetlifyImageCDN ? require.resolve(`./file-cdn-url-generator`) : undefined, + functionsPlatform: `linux`, + functionsArch: `x64`, } }, } diff --git a/packages/gatsby-cli/src/create-cli.ts b/packages/gatsby-cli/src/create-cli.ts index dc25aa3afb9f7..b7b6bba0da1b7 100644 --- a/packages/gatsby-cli/src/create-cli.ts +++ b/packages/gatsby-cli/src/create-cli.ts @@ -273,6 +273,16 @@ function buildLocalCommands(cli: yargs.Argv, isLocalSite: boolean): void { default: false, describe: `Save the log of changed pages for future comparison.`, hidden: true, + }) + .option(`functions-platform`, { + type: `string`, + describe: `The platform bundled function will execute on. Defaults to current platform or settings provided by used adapter.`, + default: process.platform, + }) + .option(`functions-arch`, { + type: `string`, + describe: `The architecture bundled function will execute on. Defaults to current architecture or settings provided by used adapter.`, + default: process.arch, }), handler: handlerP( getCommandHandler( diff --git a/packages/gatsby/src/commands/build-html.ts b/packages/gatsby/src/commands/build-html.ts index 02a8c5494ee25..701fdda2c5d4b 100644 --- a/packages/gatsby/src/commands/build-html.ts +++ b/packages/gatsby/src/commands/build-html.ts @@ -42,6 +42,8 @@ export interface IBuildArgs extends IProgram { profile: boolean graphqlTracing: boolean openTracingConfigFile: string + functionsPlatform?: string + functionsArch?: string // TODO remove in v4 keepPageRenderer: boolean } diff --git a/packages/gatsby/src/commands/serve.ts b/packages/gatsby/src/commands/serve.ts index 3ce95bb8fa96c..e7bcfa18a5c4d 100644 --- a/packages/gatsby/src/commands/serve.ts +++ b/packages/gatsby/src/commands/serve.ts @@ -184,125 +184,137 @@ module.exports = async (program: IServeProgram): Promise => { } // Handle SSR & DSG Pages + let graphqlEnginePath: string | undefined + let pageSSRModule: string | undefined try { - const { GraphQLEngine } = require(path.join( - program.directory, - `.cache`, - `query-engine` - )) as typeof import("../schema/graphql-engine/entry") - const { getData, renderPageData, renderHTML } = require(path.join( - program.directory, - `.cache`, - `page-ssr` - )) as typeof import("../utils/page-ssr-module/entry") - const graphqlEngine = new GraphQLEngine({ - dbPath: path.join(program.directory, `.cache`, `data`, `datastore`), - }) + graphqlEnginePath = require.resolve( + path.join(program.directory, `.cache`, `query-engine`) + ) + pageSSRModule = require.resolve( + path.join(program.directory, `.cache`, `page-ssr`) + ) + } catch (error) { + // TODO: Handle case of engine not being generated + } - router.get( - `/page-data/:pagePath(*)/page-data.json`, - async (req, res, next) => { - const requestedPagePath = req.params.pagePath - if (!requestedPagePath) { - return void next() - } + if (graphqlEnginePath && pageSSRModule) { + try { + const { GraphQLEngine } = + require(graphqlEnginePath) as typeof import("../schema/graphql-engine/entry") + const { getData, renderPageData, renderHTML } = + require(pageSSRModule) as typeof import("../utils/page-ssr-module/entry") + const graphqlEngine = new GraphQLEngine({ + dbPath: path.join(program.directory, `.cache`, `data`, `datastore`), + }) + + router.get( + `/page-data/:pagePath(*)/page-data.json`, + async (req, res, next) => { + const requestedPagePath = req.params.pagePath + if (!requestedPagePath) { + return void next() + } + + const potentialPagePath = reverseFixedPagePath(requestedPagePath) + const page = graphqlEngine.findPageByPath(potentialPagePath) - const potentialPagePath = reverseFixedPagePath(requestedPagePath) - const page = graphqlEngine.findPageByPath(potentialPagePath) - - if (page && (page.mode === `DSG` || page.mode === `SSR`)) { - const requestActivity = report.phantomActivity( - `request for "${req.path}"` - ) - requestActivity.start() - try { - const spanContext = requestActivity.span.context() - const data = await getData({ - pathName: req.path, - graphqlEngine, - req, - spanContext, - }) - const results = await renderPageData({ data, spanContext }) - if (data.serverDataHeaders) { - for (const [name, value] of Object.entries( - data.serverDataHeaders - )) { - res.setHeader(name, value) + if (page && (page.mode === `DSG` || page.mode === `SSR`)) { + const requestActivity = report.phantomActivity( + `request for "${req.path}"` + ) + requestActivity.start() + try { + const spanContext = requestActivity.span.context() + const data = await getData({ + pathName: req.path, + graphqlEngine, + req, + spanContext, + }) + const results = await renderPageData({ data, spanContext }) + if (data.serverDataHeaders) { + for (const [name, value] of Object.entries( + data.serverDataHeaders + )) { + res.setHeader(name, value) + } } - } - if (page.mode === `SSR` && data.serverDataStatus) { - return void res.status(data.serverDataStatus).send(results) - } else { - return void res.send(results) + if (page.mode === `SSR` && data.serverDataStatus) { + return void res.status(data.serverDataStatus).send(results) + } else { + return void res.send(results) + } + } catch (e) { + report.error( + `Generating page-data for "${requestedPagePath}" / "${potentialPagePath}" failed.`, + e + ) + return res + .status(500) + .contentType(`text/plain`) + .send(`Internal server error.`) + } finally { + requestActivity.end() } - } catch (e) { - report.error( - `Generating page-data for "${requestedPagePath}" / "${potentialPagePath}" failed.`, - e - ) - return res - .status(500) - .contentType(`text/plain`) - .send(`Internal server error.`) - } finally { - requestActivity.end() } - } - return void next() - } - ) + return void next() + } + ) - router.use(async (req, res, next) => { - if (req.accepts(`html`)) { - const potentialPagePath = req.path - const page = graphqlEngine.findPageByPath(potentialPagePath) - if (page && (page.mode === `DSG` || page.mode === `SSR`)) { - const requestActivity = report.phantomActivity( - `request for "${req.path}"` - ) - requestActivity.start() - - try { - const spanContext = requestActivity.span.context() - const data = await getData({ - pathName: potentialPagePath, - graphqlEngine, - req, - spanContext, - }) - const results = await renderHTML({ data, spanContext }) - if (data.serverDataHeaders) { - for (const [name, value] of Object.entries( - data.serverDataHeaders - )) { - res.setHeader(name, value) + router.use(async (req, res, next) => { + if (req.accepts(`html`)) { + const potentialPagePath = req.path + const page = graphqlEngine.findPageByPath(potentialPagePath) + if (page && (page.mode === `DSG` || page.mode === `SSR`)) { + const requestActivity = report.phantomActivity( + `request for "${req.path}"` + ) + requestActivity.start() + + try { + const spanContext = requestActivity.span.context() + const data = await getData({ + pathName: potentialPagePath, + graphqlEngine, + req, + spanContext, + }) + const results = await renderHTML({ data, spanContext }) + if (data.serverDataHeaders) { + for (const [name, value] of Object.entries( + data.serverDataHeaders + )) { + res.setHeader(name, value) + } } - } - if (page.mode === `SSR` && data.serverDataStatus) { - return void res.status(data.serverDataStatus).send(results) - } else { - return void res.send(results) - } - } catch (e) { - report.error(`Rendering html for "${potentialPagePath}" failed.`, e) - return res.status(500).sendFile(`500.html`, { root }, err => { - if (err) { - res.contentType(`text/plain`).send(`Internal server error.`) + if (page.mode === `SSR` && data.serverDataStatus) { + return void res.status(data.serverDataStatus).send(results) + } else { + return void res.send(results) } - }) - } finally { - requestActivity.end() + } catch (e) { + report.error( + `Rendering html for "${potentialPagePath}" failed.`, + e + ) + return res.status(500).sendFile(`500.html`, { root }, err => { + if (err) { + res.contentType(`text/plain`).send(`Internal server error.`) + } + }) + } finally { + requestActivity.end() + } } } - } - return next() - }) - } catch (error) { - // TODO: Handle case of engine not being generated + return next() + }) + } catch (error) { + report.panic(`Error loading SSR/DSG engine`, error) + } } const matchPaths = await readMatchPaths(program) diff --git a/packages/gatsby/src/commands/types.ts b/packages/gatsby/src/commands/types.ts index 9f481af90fd90..45e9c62ad0b02 100644 --- a/packages/gatsby/src/commands/types.ts +++ b/packages/gatsby/src/commands/types.ts @@ -34,6 +34,8 @@ export interface IProgram { graphqlTracing?: boolean verbose?: boolean prefixPaths?: boolean + functionsPlatform?: string + functionsArch?: string setStore?: (store: Store) => void disablePlugins?: Array<{ name: string diff --git a/packages/gatsby/src/schema/graphql-engine/bootstrap.ts b/packages/gatsby/src/schema/graphql-engine/bootstrap.ts index 8b7c0c1033572..772b574dda031 100644 --- a/packages/gatsby/src/schema/graphql-engine/bootstrap.ts +++ b/packages/gatsby/src/schema/graphql-engine/bootstrap.ts @@ -1,5 +1,6 @@ // "engines-fs-provider" must be first import, as it sets up global // fs and this need to happen before anything else tries to import fs +import "./platform-and-arch-check" import "../../utils/engines-fs-provider" import { getCache as getGatsbyCache } from "../../utils/get-cache" diff --git a/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts b/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts index 0a7e070ca5818..75ee8e6d1f21a 100644 --- a/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts +++ b/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts @@ -348,14 +348,20 @@ export async function createGraphqlEngineBundle( arch: process.arch, } - const functionsTarget: IPlatformAndArch = state.adapter.instance - ? { - // TODO: allow to pass target from adapter - // for now this seems most likely target - platform: `linux`, - arch: `x64`, - } - : currentTarget + const functionsTarget: IPlatformAndArch = { + platform: + process.env.GATSBY_FUNCTIONS_PLATFORM ?? + state.program.functionsPlatform ?? + state.adapter.config.functionsPlatform ?? + currentTarget.platform, + arch: + process.env.GATSBY_FUNCTIONS_ARCH ?? + state.program.functionsArch ?? + state.adapter.config.functionsArch ?? + currentTarget.arch, + } + + console.log({ functionsTarget, currentTarget }) const dynamicAliases: Record = {} let forcedLmdbBinaryModule: string | undefined = undefined @@ -373,9 +379,11 @@ export async function createGraphqlEngineBundle( ) if (!lmdbPackageInfo) { + // TODO: better error / structured logging throw new Error(`no lmdb for target`) } else if (functionsTarget.platform === `linux`) { if (lmdbPackageInfo.needToInstall) { + // TODO: better error / structured logging throw new Error(`no lmdb for target`) } @@ -384,6 +392,7 @@ export async function createGraphqlEngineBundle( if (sharpPackageInfo) { if (sharpPackageInfo.needToInstall) { + // TODO: better error / structured logging throw new Error(`no sharp for target`) } dynamicAliases[`sharp$`] = sharpPackageInfo.packageLocation @@ -553,6 +562,12 @@ export async function createGraphqlEngineBundle( "process.env.GATSBY_SLICES": JSON.stringify( !!process.env.GATSBY_SLICES ), + "process.env.GATSBY_FUNCTIONS_PLATFORM": JSON.stringify( + functionsTarget.platform + ), + "process.env.GATSBY_FUNCTIONS_ARCH": JSON.stringify( + functionsTarget.arch + ), }), process.env.GATSBY_WEBPACK_LOGGING?.includes(`query-engine`) && new WebpackLoggingPlugin(rootDir, reporter, isVerbose), @@ -613,33 +628,35 @@ export async function createGraphqlEngineBundle( iterateModules(stats.compilation.modules, stats.compilation) } - const binaryFixingPromises: Array> = [] - // sigh - emitAsset used by relocator seems to corrupt binaries - // resulting in "ELF file's phentsize not the expected size" errors - // - see size diff - // > find . -name node.abi83.glibc.node - // ./.cache/internal-packages/node_modules/@lmdb/lmdb-linux-x64/node.abi83.glibc.node - // ./.cache/query-engine/assets/node.abi83.glibc.node - // > ls -al ./.cache/query-engine/assets/node.abi83.glibc.node - // -rw-r--r-- 1 misiek 197121 1285429 Mar 14 11:36 ./.cache/query-engine/assets/node.abi83.glibc.node - // > ls -al ./.cache/internal-packages/node_modules/@lmdb/lmdb-linux-x64/node.abi83.glibc.node - // -rw-r--r-- 1 misiek 197121 693544 Mar 14 11:35 ./.cache/internal-packages/node_modules/@lmdb/lmdb-linux-x64/node.abi83.glibc.node - // so this tries to fix it by straight copying it over - for (const asset of ( - stats?.compilation?.assetsInfo ?? new Map() - ).keys()) { - if (asset?.endsWith(`.node`)) { - const targetRelPath = path.posix.relative(`assets`, asset) - const assetMeta = getAssetMeta(targetRelPath, stats?.compilation) - const sourcePath = assetMeta?.path - if (sourcePath) { - const dist = path.join(outputDir, asset) - binaryFixingPromises.push(fs.copyFile(sourcePath, dist)) + if (!isEqual(functionsTarget, currentTarget)) { + const binaryFixingPromises: Array> = [] + // sigh - emitAsset used by relocator seems to corrupt binaries + // resulting in "ELF file's phentsize not the expected size" errors + // - see size diff + // > find . -name node.abi83.glibc.node + // ./.cache/internal-packages/node_modules/@lmdb/lmdb-linux-x64/node.abi83.glibc.node + // ./.cache/query-engine/assets/node.abi83.glibc.node + // > ls -al ./.cache/query-engine/assets/node.abi83.glibc.node + // -rw-r--r-- 1 misiek 197121 1285429 Mar 14 11:36 ./.cache/query-engine/assets/node.abi83.glibc.node + // > ls -al ./.cache/internal-packages/node_modules/@lmdb/lmdb-linux-x64/node.abi83.glibc.node + // -rw-r--r-- 1 misiek 197121 693544 Mar 14 11:35 ./.cache/internal-packages/node_modules/@lmdb/lmdb-linux-x64/node.abi83.glibc.node + // so this tries to fix it by straight copying it over + for (const asset of ( + stats?.compilation?.assetsInfo ?? new Map() + ).keys()) { + if (asset?.endsWith(`.node`)) { + const targetRelPath = path.posix.relative(`assets`, asset) + const assetMeta = getAssetMeta(targetRelPath, stats?.compilation) + const sourcePath = assetMeta?.path + if (sourcePath) { + const dist = path.join(outputDir, asset) + binaryFixingPromises.push(fs.copyFile(sourcePath, dist)) + } } } - } - await Promise.all(binaryFixingPromises) + await Promise.all(binaryFixingPromises) + } compiler.close(closeErr => { if (err) { diff --git a/packages/gatsby/src/schema/graphql-engine/platform-and-arch-check.ts b/packages/gatsby/src/schema/graphql-engine/platform-and-arch-check.ts new file mode 100644 index 0000000000000..663c0e758cffe --- /dev/null +++ b/packages/gatsby/src/schema/graphql-engine/platform-and-arch-check.ts @@ -0,0 +1,8 @@ +if ( + process.env.GATSBY_FUNCTIONS_PLATFORM !== process.platform || + process.env.GATSBY_FUNCTIONS_ARCH !== process.arch +) { + throw new Error( + `Incompatible function executing environment. Function was built for "${process.env.GATSBY_FUNCTIONS_PLATFORM}/${process.env.GATSBY_FUNCTIONS_ARCH}" but is executing on "${process.platform}/${process.arch}".` + ) +} diff --git a/packages/gatsby/src/utils/adapter/types.ts b/packages/gatsby/src/utils/adapter/types.ts index c23dee9667294..6561ad1bc8463 100644 --- a/packages/gatsby/src/utils/adapter/types.ts +++ b/packages/gatsby/src/utils/adapter/types.ts @@ -201,6 +201,14 @@ export interface IAdapterConfig { * example for the Netlify adapter. */ fileCDNUrlGeneratorModulePath?: string + /** + * TODO: add description + */ + functionsPlatform?: string + /** + * TODO: add description + */ + functionsArch?: string } type WithRequired = T & { [P in K]-?: T[P] } From 407f6bef75cb1792f69094a59e545a1a79ff33ed Mon Sep 17 00:00:00 2001 From: pieh Date: Fri, 15 Mar 2024 15:41:51 +0100 Subject: [PATCH 08/27] refactor: move some utility functions around, cleanup standalone-regenrate, restore engine validation, add structured error and better error messages --- .../src/structured-errors/error-map.ts | 6 + packages/gatsby/src/commands/build.ts | 22 +-- packages/gatsby/src/commands/serve.ts | 6 +- .../schema/graphql-engine/bundle-webpack.ts | 147 +++++++++--------- .../graphql-engine/platform-and-arch-check.ts | 6 +- .../graphql-engine/standalone-regenerate.ts | 34 ++-- packages/gatsby/src/utils/engines-helpers.ts | 31 ++++ .../src/utils/validate-engines/index.ts | 41 ++++- 8 files changed, 175 insertions(+), 118 deletions(-) diff --git a/packages/gatsby-cli/src/structured-errors/error-map.ts b/packages/gatsby-cli/src/structured-errors/error-map.ts index 1ec3757c1cf8a..c0aea02ca5318 100644 --- a/packages/gatsby-cli/src/structured-errors/error-map.ts +++ b/packages/gatsby-cli/src/structured-errors/error-map.ts @@ -83,6 +83,12 @@ const errors: Record = { level: Level.ERROR, category: ErrorCategory.USER, }, + "98051": { + text: (): string => `Built Rendering Engines failed to load.`, + type: Type.ENGINE_EXECUTION, + level: Level.ERROR, + category: ErrorCategory.UNKNOWN, + }, "98123": { text: (context): string => `${context.stageLabel} failed\n\n${ diff --git a/packages/gatsby/src/commands/build.ts b/packages/gatsby/src/commands/build.ts index 7ba78910b9eac..489d99ada5c6f 100644 --- a/packages/gatsby/src/commands/build.ts +++ b/packages/gatsby/src/commands/build.ts @@ -66,7 +66,7 @@ import { getPageMode, preparePageTemplateConfigs, } from "../utils/page-mode" -// import { validateEngines } from "../utils/validate-engines" +import { validateEnginesWithActivity } from "../utils/validate-engines" import { constructConfigObject } from "../utils/gatsby-cloud-config" import { waitUntilWorkerJobsAreComplete } from "../utils/jobs/worker-messaging" import { getSSRChunkHashes } from "../utils/webpack/get-ssr-chunk-hashes" @@ -294,23 +294,9 @@ module.exports = async function build( pageConfigActivity.end() } - // TODO: skip this only when target env is different than current one - // if (shouldGenerateEngines()) { - // const validateEnginesActivity = report.activityTimer( - // `Validating Rendering Engines`, - // { - // parentSpan: buildSpan, - // } - // ) - // validateEnginesActivity.start() - // try { - // await validateEngines(store.getState().program.directory) - // } catch (error) { - // validateEnginesActivity.panic({ id: `98001`, context: {}, error }) - // } finally { - // validateEnginesActivity.end() - // } - // } + if (shouldGenerateEngines()) { + await validateEnginesWithActivity(program.directory, buildSpan) + } const cacheActivity = report.activityTimer(`Caching Webpack compilations`, { parentSpan: buildSpan, diff --git a/packages/gatsby/src/commands/serve.ts b/packages/gatsby/src/commands/serve.ts index e7bcfa18a5c4d..88be01d145430 100644 --- a/packages/gatsby/src/commands/serve.ts +++ b/packages/gatsby/src/commands/serve.ts @@ -313,7 +313,11 @@ module.exports = async (program: IServeProgram): Promise => { return next() }) } catch (error) { - report.panic(`Error loading SSR/DSG engine`, error) + report.panic({ + id: `98051`, + error, + context: {}, + }) } } diff --git a/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts b/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts index 75ee8e6d1f21a..20d10b9a852ed 100644 --- a/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts +++ b/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts @@ -18,6 +18,11 @@ import { store } from "../../redux" import { PackageJson } from "../../.." import { slash } from "gatsby-core-utils/path" import { isEqual } from "lodash" +import { + IPlatformAndArch, + getCurrentPlatformAndTarget, + getFunctionsTargetPlatformAndTarget, +} from "../../utils/engines-helpers" type Reporter = typeof reporter @@ -45,45 +50,44 @@ function getApisToRemoveForQueryEngine(): Array { return apisToRemove } -const getInternalPackagesCacheDir = (): string => - path.posix.join(slash(process.cwd()), `.cache`, `internal-packages`) +const getInternalPackagesCacheDir = ( + functionsTarget: IPlatformAndArch +): string => + path.posix.join( + slash(process.cwd()), + `.cache`, + `internal-packages`, + `${functionsTarget.platform}-${functionsTarget.arch}` + ) // Create a directory and JS module where we install internally used packages const createInternalPackagesCacheDir = async ( - lambdaTarget: IPlatformAndArch + functionsTarget: IPlatformAndArch ): Promise => { - const cacheDir = getInternalPackagesCacheDir() + const cacheDir = getInternalPackagesCacheDir(functionsTarget) await fs.ensureDir(cacheDir) const packageJsonPath = path.join(cacheDir, `package.json`) - if (fs.existsSync(packageJsonPath)) { - const existingPackageJson = JSON.parse( - fs.readFileSync(packageJsonPath, `utf-8`) - ) - if (isEqual(existingPackageJson.lambdaTarget, lambdaTarget)) { - // if we have already package.json and it is against same lambda target, we can reuse it - // and potentially packages installed in it - return - } + if (!fs.existsSync(packageJsonPath)) { + await fs.emptyDir(cacheDir) + + await fs.outputJson(packageJsonPath, { + name: `gatsby-internal-packages`, + description: `This directory contains internal packages installed by Gatsby used to comply with the current platform requirements`, + version: `1.0.0`, + private: true, + author: `Gatsby`, + license: `MIT`, + functionsTarget, + }) } - - await fs.emptyDir(cacheDir) - - await fs.outputJson(packageJsonPath, { - name: `gatsby-internal-packages`, - description: `This directory contains internal packages installed by Gatsby used to comply with the current platform requirements`, - version: `1.0.0`, - private: true, - author: `Gatsby`, - license: `MIT`, - lambdaTarget, - }) } function getLMDBBinaryFromSiteLocation( lmdbPackageName: string, - version: string + version: string, + functionsTarget: IPlatformAndArch ): string | undefined { // Read lmdb's package.json, go through its optional depedencies and validate if there's a prebuilt lmdb module with a compatible binary to our platform and arch let packageJson: PackageJson @@ -102,7 +106,9 @@ function getLMDBBinaryFromSiteLocation( if ( !Object.keys(optionalDependencies ?? {}).find(p => p === lmdbPackageName) ) { - throw new Error(`No optional dependencies in lmdb (?)`) + throw new Error( + `Target platform/arch for functions execution (${functionsTarget.platform}/${functionsTarget.arch}) is not supported.` + ) } return getPackageLocationFromRequireContext( slash(require.resolve(`lmdb`)), @@ -137,11 +143,6 @@ function getPackageLocationFromRequireContext( } } -interface IPlatformAndArch { - platform: string - arch: string -} - interface ILMDBBinaryPackageStatusBase { packageName: string needToInstall: boolean @@ -164,9 +165,10 @@ type IBinaryPackageStatus = | ILMDBBinaryPackageStatusNeedAlternative function checkIfInstalledInInternalPackagesCache( - packageStatus: IBinaryPackageStatus + packageStatus: IBinaryPackageStatus, + functionsTarget: IPlatformAndArch ): IBinaryPackageStatus { - const cacheDir = getInternalPackagesCacheDir() + const cacheDir = getInternalPackagesCacheDir(functionsTarget) const packageLocationFromInternalPackageCache = getPackageLocationFromRequireContext( @@ -197,14 +199,15 @@ function checkIfInstalledInInternalPackagesCache( // Install lmdb's native system module under our internal cache if we detect the current installation // isn't using the pre-build binaries function checkIfNeedToInstallMissingLmdb( - lambdaTarget: IPlatformAndArch + functionsTarget: IPlatformAndArch ): IBinaryPackageStatus { // lmdb module with prebuilt binaries for target platform - const lmdbPackageName = `@lmdb/lmdb-${lambdaTarget.platform}-${lambdaTarget.arch}` + const lmdbPackageName = `@lmdb/lmdb-${functionsTarget.platform}-${functionsTarget.arch}` const lmdbBinaryFromSiteLocation = getLMDBBinaryFromSiteLocation( lmdbPackageName, - dependencies.lmdb + dependencies.lmdb, + functionsTarget ) const sharedPackageStatus: ILMDBBinaryPackageStatusNeedAlternative = { @@ -221,26 +224,32 @@ function checkIfNeedToInstallMissingLmdb( } } - return checkIfInstalledInInternalPackagesCache(sharedPackageStatus) + return checkIfInstalledInInternalPackagesCache( + sharedPackageStatus, + functionsTarget + ) } function checkIfNeedToInstallMissingSharp( - lambdaTarget: IPlatformAndArch, + functionsTarget: IPlatformAndArch, currentTarget: IPlatformAndArch ): IBinaryPackageStatus | undefined { try { // check if shapr is resolvable const { version: sharpVersion } = require(`sharp/package.json`) - if (isEqual(lambdaTarget, currentTarget)) { + if (isEqual(functionsTarget, currentTarget)) { return undefined } - return checkIfInstalledInInternalPackagesCache({ - needToInstall: true, - packageName: `sharp`, - packageVersion: sharpVersion, - }) + return checkIfInstalledInInternalPackagesCache( + { + needToInstall: true, + packageName: `sharp`, + packageVersion: sharpVersion, + }, + functionsTarget + ) } catch (e) { return undefined } @@ -248,7 +257,7 @@ function checkIfNeedToInstallMissingSharp( async function installMissing( packages: Array, - lambdaTarget: IPlatformAndArch + functionsTarget: IPlatformAndArch ): Promise> { function shouldInstall( p: IBinaryPackageStatus | undefined @@ -262,16 +271,16 @@ async function installMissing( return packages } - await createInternalPackagesCacheDir(lambdaTarget) + await createInternalPackagesCacheDir(functionsTarget) - const cacheDir = getInternalPackagesCacheDir() + const cacheDir = getInternalPackagesCacheDir(functionsTarget) const options: ExecaOptions = { stderr: `inherit`, cwd: cacheDir, env: { - npm_config_arch: lambdaTarget.arch, - npm_config_platform: lambdaTarget.platform, + npm_config_arch: functionsTarget.arch, + npm_config_platform: functionsTarget.platform, }, } @@ -343,25 +352,8 @@ export async function createGraphqlEngineBundle( require.resolve(`gatsby-plugin-typescript`) ) - const currentTarget: IPlatformAndArch = { - platform: process.platform, - arch: process.arch, - } - - const functionsTarget: IPlatformAndArch = { - platform: - process.env.GATSBY_FUNCTIONS_PLATFORM ?? - state.program.functionsPlatform ?? - state.adapter.config.functionsPlatform ?? - currentTarget.platform, - arch: - process.env.GATSBY_FUNCTIONS_ARCH ?? - state.program.functionsArch ?? - state.adapter.config.functionsArch ?? - currentTarget.arch, - } - - console.log({ functionsTarget, currentTarget }) + const currentTarget = getCurrentPlatformAndTarget() + const functionsTarget = getFunctionsTargetPlatformAndTarget() const dynamicAliases: Record = {} let forcedLmdbBinaryModule: string | undefined = undefined @@ -379,12 +371,14 @@ export async function createGraphqlEngineBundle( ) if (!lmdbPackageInfo) { - // TODO: better error / structured logging - throw new Error(`no lmdb for target`) + throw new Error(`Failed to find required LMDB binary`) } else if (functionsTarget.platform === `linux`) { + // function execution platform is primarily linux, which is tested the most, so we only force that specific binary + // to not cause untested code paths if (lmdbPackageInfo.needToInstall) { - // TODO: better error / structured logging - throw new Error(`no lmdb for target`) + throw new Error( + `Failed to locate or install LMDB binary for functions execution platform/arch (${functionsTarget.platform}/${functionsTarget.arch})` + ) } forcedLmdbBinaryModule = `${lmdbPackageInfo.packageLocation}/node.abi83.glibc.node` @@ -392,8 +386,9 @@ export async function createGraphqlEngineBundle( if (sharpPackageInfo) { if (sharpPackageInfo.needToInstall) { - // TODO: better error / structured logging - throw new Error(`no sharp for target`) + throw new Error( + `Failed to locate or install Sharp binary for functions execution platform/arch (${functionsTarget.platform}/${functionsTarget.arch})` + ) } dynamicAliases[`sharp$`] = sharpPackageInfo.packageLocation } @@ -419,6 +414,7 @@ export async function createGraphqlEngineBundle( buildDependencies: { config: [__filename], }, + version: JSON.stringify(functionsTarget), }, // those are required in some runtime paths, but we don't need them externals: [ @@ -553,7 +549,6 @@ export async function createGraphqlEngineBundle( plugins: [ new webpack.EnvironmentPlugin([`GATSBY_CLOUD_IMAGE_CDN`]), new webpack.DefinePlugin({ - // "process.env.GATSBY_LOGGER": JSON.stringify(`yurnalist`), "process.env.GATSBY_SKIP_WRITING_SCHEMA_TO_FILE": `true`, "process.env.NODE_ENV": JSON.stringify(`production`), SCHEMA_SNAPSHOT: JSON.stringify(schemaSnapshotString), diff --git a/packages/gatsby/src/schema/graphql-engine/platform-and-arch-check.ts b/packages/gatsby/src/schema/graphql-engine/platform-and-arch-check.ts index 663c0e758cffe..d608d6094f25c 100644 --- a/packages/gatsby/src/schema/graphql-engine/platform-and-arch-check.ts +++ b/packages/gatsby/src/schema/graphql-engine/platform-and-arch-check.ts @@ -3,6 +3,10 @@ if ( process.env.GATSBY_FUNCTIONS_ARCH !== process.arch ) { throw new Error( - `Incompatible function executing environment. Function was built for "${process.env.GATSBY_FUNCTIONS_PLATFORM}/${process.env.GATSBY_FUNCTIONS_ARCH}" but is executing on "${process.platform}/${process.arch}".` + `Incompatible DSG/SSR executing environment. Function was built for "${process.env.GATSBY_FUNCTIONS_PLATFORM}/${process.env.GATSBY_FUNCTIONS_ARCH}" but is executing on "${process.platform}/${process.arch}".` + + (process.env.gatsby_executing_command === `serve` + ? `\n\nIf you are trying to run DSG/SSR engine locally, consider using experimental utility to rebuild functions for your local platform:\n\nnode node_modules/gatsby/dist/schema/graphql-engine/standalone-regenerate.js` + : ``) + + `\n\nTo generate engines for "${process.platform}/${process.arch}" run 'gatsby build --functions-platform=${process.platform} --functions-arch=${process.arch}' or run 'gatsby build' with following envirnment variables:\n\nGATSBY_FUNCTIONS_PLATFORM=${process.platform}\nGATSBY_FUNCTIONS_ARCH=${process.arch}` ) } diff --git a/packages/gatsby/src/schema/graphql-engine/standalone-regenerate.ts b/packages/gatsby/src/schema/graphql-engine/standalone-regenerate.ts index a76ef7bc14a96..8bbb8ae92a3e0 100644 --- a/packages/gatsby/src/schema/graphql-engine/standalone-regenerate.ts +++ b/packages/gatsby/src/schema/graphql-engine/standalone-regenerate.ts @@ -1,16 +1,19 @@ #!/usr/bin/env node /* -this is used for development purposes only -to be able to run `gatsby build` once to source data -and print schema and then just rebundle graphql-engine +This is used mostly for development purposes, but can be attempted to be used +to regenerate just engines for local platform/arch if previous full build +was done to deploy on platform with different arch/platform. + +For development purposes this is used to be able to run `gatsby build` once to +source data and print schema and then just rebundle graphql-engine with source file changes and test re-built engine quickly Usage: There need to be at least one successful `gatsby build` before starting to use this script (warm up datastore, generate "page-ssr" bundle). Once that's done you can -run following command in test site directory: +run following command in site directory: ```shell node node_modules/gatsby/dist/schema/graphql-engine/standalone-regenerate.js @@ -23,18 +26,18 @@ import reporter from "gatsby-cli/lib/reporter" import { loadConfigAndPlugins } from "../../utils/worker/child/load-config-and-plugins" import * as fs from "fs-extra" import { store } from "../../redux" -import { validateEngines } from "../../utils/validate-engines" +import { validateEnginesWithActivity } from "../../utils/validate-engines" async function run(): Promise { process.env.GATSBY_SLICES = `1` // load config - console.log(`loading config and plugins`) + reporter.verbose(`loading config and plugins`) await loadConfigAndPlugins({ siteDirectory: process.cwd(), }) try { - console.log(`clearing webpack cache\n\n`) + reporter.verbose(`clearing webpack cache`) // get rid of cache if it exist await fs.remove(process.cwd() + `/.cache/webpack/query-engine`) await fs.remove(process.cwd() + `/.cache/webpack/page-ssr`) @@ -46,7 +49,7 @@ async function run(): Promise { // recompile const buildActivityTimer = reporter.activityTimer( - `Building Rendering Engines` + `(Re)Building Rendering Engines` ) try { buildActivityTimer.start() @@ -67,20 +70,9 @@ async function run(): Promise { buildActivityTimer.end() } - // validate - const validateEnginesActivity = reporter.activityTimer( - `Validating Rendering Engines` - ) - validateEnginesActivity.start() - try { - await validateEngines(process.cwd()) - } catch (error) { - validateEnginesActivity.panic({ id: `98001`, context: {}, error }) - } finally { - validateEnginesActivity.end() - } + await validateEnginesWithActivity(process.cwd()) - console.log(`DONE`) + reporter.info(`Rebuilding Rendering Engines finished`) } run() diff --git a/packages/gatsby/src/utils/engines-helpers.ts b/packages/gatsby/src/utils/engines-helpers.ts index 20c7814986339..0e01e5e6eb7de 100644 --- a/packages/gatsby/src/utils/engines-helpers.ts +++ b/packages/gatsby/src/utils/engines-helpers.ts @@ -33,3 +33,34 @@ function getCDNObfuscatedPath(path: string): string { } export const LmdbOnCdnPath = getCDNObfuscatedPath(`data.mdb`) + +export interface IPlatformAndArch { + platform: string + arch: string +} + +const currentTarget: IPlatformAndArch = { + platform: process.platform, + arch: process.arch, +} + +export function getCurrentPlatformAndTarget(): IPlatformAndArch { + return currentTarget +} + +export function getFunctionsTargetPlatformAndTarget(): IPlatformAndArch { + const state = store.getState() + + return { + platform: + process.env.GATSBY_FUNCTIONS_PLATFORM ?? + state.program.functionsPlatform ?? + state.adapter.config.functionsPlatform ?? + currentTarget.platform, + arch: + process.env.GATSBY_FUNCTIONS_ARCH ?? + state.program.functionsArch ?? + state.adapter.config.functionsArch ?? + currentTarget.arch, + } +} diff --git a/packages/gatsby/src/utils/validate-engines/index.ts b/packages/gatsby/src/utils/validate-engines/index.ts index 276e7c780c8f5..50f3a8b3f8b43 100644 --- a/packages/gatsby/src/utils/validate-engines/index.ts +++ b/packages/gatsby/src/utils/validate-engines/index.ts @@ -1,6 +1,45 @@ +import reporter from "gatsby-cli/lib/reporter" import { WorkerPool } from "gatsby-worker" +import { isEqual } from "lodash" +import type { Span } from "opentracing" +import { + getCurrentPlatformAndTarget, + getFunctionsTargetPlatformAndTarget, +} from "../engines-helpers" -export async function validateEngines(directory: string): Promise { +export async function validateEnginesWithActivity( + directory: string, + buildSpan?: Span +): Promise { + if ( + !isEqual( + getCurrentPlatformAndTarget(), + getFunctionsTargetPlatformAndTarget() + ) + ) { + reporter.info( + `Skipping Rendering Engines validation as they are build for different platform and/or architecture` + ) + return + } + + const validateEnginesActivity = reporter.activityTimer( + `Validating Rendering Engines`, + { + parentSpan: buildSpan, + } + ) + validateEnginesActivity.start() + try { + await validateEngines(directory) + } catch (error) { + validateEnginesActivity.panic({ id: `98001`, context: {}, error }) + } finally { + validateEnginesActivity.end() + } +} + +async function validateEngines(directory: string): Promise { const worker = new WorkerPool( require.resolve(`./child`), { From e2bdeac1a3521cd6d50f54c1d826fc0af66d4347 Mon Sep 17 00:00:00 2001 From: pieh Date: Fri, 15 Mar 2024 15:46:42 +0100 Subject: [PATCH 09/27] chore: add jsdocs description for functionsPlatform and functionsArch optional config values passed by adapter --- packages/gatsby/src/utils/adapter/types.ts | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/gatsby/src/utils/adapter/types.ts b/packages/gatsby/src/utils/adapter/types.ts index 6561ad1bc8463..3536ae0b2b8bf 100644 --- a/packages/gatsby/src/utils/adapter/types.ts +++ b/packages/gatsby/src/utils/adapter/types.ts @@ -202,11 +202,15 @@ export interface IAdapterConfig { */ fileCDNUrlGeneratorModulePath?: string /** - * TODO: add description + * The platform bundled functions will execute on. Usually should be `linux`. + * This will be used if user didn't specify `GATSBY_FUNCTIONS_PLATFORM` environment variable + * or used `-functions-platform` CLI toggle. If none is defined current platform (process.platform) will be used. */ functionsPlatform?: string /** - * TODO: add description + * The architecture bundled functions will execute on. Usually should be `x64`. + * This will be used if user didn't specify `GATSBY_FUNCTIONS_ARCH` environment variable + * or used `-functions-arch` CLI toggle. If none is defined current arch (process.arch) will be used. */ functionsArch?: string } From 95c7a1fd5e6b2ac8507a54eeeeafe12eefd4b848 Mon Sep 17 00:00:00 2001 From: pieh Date: Fri, 15 Mar 2024 16:34:04 +0100 Subject: [PATCH 10/27] chore: make sure fs wrapper is first --- packages/gatsby-cli/src/create-cli.ts | 4 ++-- packages/gatsby/src/schema/graphql-engine/bootstrap.ts | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gatsby-cli/src/create-cli.ts b/packages/gatsby-cli/src/create-cli.ts index b7b6bba0da1b7..ea7619a786b13 100644 --- a/packages/gatsby-cli/src/create-cli.ts +++ b/packages/gatsby-cli/src/create-cli.ts @@ -276,12 +276,12 @@ function buildLocalCommands(cli: yargs.Argv, isLocalSite: boolean): void { }) .option(`functions-platform`, { type: `string`, - describe: `The platform bundled function will execute on. Defaults to current platform or settings provided by used adapter.`, + describe: `The platform bundled functions will execute on. Defaults to current platform or settings provided by used adapter.`, default: process.platform, }) .option(`functions-arch`, { type: `string`, - describe: `The architecture bundled function will execute on. Defaults to current architecture or settings provided by used adapter.`, + describe: `The architecture bundled functions will execute on. Defaults to current architecture or settings provided by used adapter.`, default: process.arch, }), handler: handlerP( diff --git a/packages/gatsby/src/schema/graphql-engine/bootstrap.ts b/packages/gatsby/src/schema/graphql-engine/bootstrap.ts index 772b574dda031..e4e4b9cf81e72 100644 --- a/packages/gatsby/src/schema/graphql-engine/bootstrap.ts +++ b/packages/gatsby/src/schema/graphql-engine/bootstrap.ts @@ -1,7 +1,7 @@ // "engines-fs-provider" must be first import, as it sets up global // fs and this need to happen before anything else tries to import fs -import "./platform-and-arch-check" import "../../utils/engines-fs-provider" +import "./platform-and-arch-check" import { getCache as getGatsbyCache } from "../../utils/get-cache" From 96217dfb5bc272d1912a127245b4e62b8719c674 Mon Sep 17 00:00:00 2001 From: pieh Date: Fri, 15 Mar 2024 17:10:38 +0100 Subject: [PATCH 11/27] fix: actually use values reported by adapter --- packages/gatsby-cli/src/create-cli.ts | 2 -- packages/gatsby/src/utils/adapter/manager.ts | 2 ++ 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gatsby-cli/src/create-cli.ts b/packages/gatsby-cli/src/create-cli.ts index ea7619a786b13..2c3d605b2a18d 100644 --- a/packages/gatsby-cli/src/create-cli.ts +++ b/packages/gatsby-cli/src/create-cli.ts @@ -277,12 +277,10 @@ function buildLocalCommands(cli: yargs.Argv, isLocalSite: boolean): void { .option(`functions-platform`, { type: `string`, describe: `The platform bundled functions will execute on. Defaults to current platform or settings provided by used adapter.`, - default: process.platform, }) .option(`functions-arch`, { type: `string`, describe: `The architecture bundled functions will execute on. Defaults to current architecture or settings provided by used adapter.`, - default: process.arch, }), handler: handlerP( getCommandHandler( diff --git a/packages/gatsby/src/utils/adapter/manager.ts b/packages/gatsby/src/utils/adapter/manager.ts index 0178da120c4f9..7879565715030 100644 --- a/packages/gatsby/src/utils/adapter/manager.ts +++ b/packages/gatsby/src/utils/adapter/manager.ts @@ -286,6 +286,8 @@ export async function initAdapterManager(): Promise { deployURL: configFromAdapter?.deployURL, supports: configFromAdapter?.supports, pluginsToDisable: configFromAdapter?.pluginsToDisable ?? [], + functionsArch: configFromAdapter?.functionsArch, + functionsPlatform: configFromAdapter?.functionsPlatform, } }, } From 6b9ecdeb7beb36d3bf405e6da4fdc5f5bafa4c2e Mon Sep 17 00:00:00 2001 From: pieh Date: Fri, 15 Mar 2024 17:11:38 +0100 Subject: [PATCH 12/27] test: try to setup windows adapters smoke test --- .circleci/config.yml | 44 +++++++++++++++++++++++++++++++ e2e-tests/adapters/package.json | 1 + e2e-tests/adapters/smoke-test.mjs | 24 +++++++++++++++++ 3 files changed, 69 insertions(+) create mode 100644 e2e-tests/adapters/smoke-test.mjs diff --git a/.circleci/config.yml b/.circleci/config.yml index 986e966a16a2d..81786970f6db9 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -580,6 +580,47 @@ jobs: - store_test_results: path: ./test-results/jest-node/ + windows_adapters_smoke: + executor: + name: win/default + shell: powershell.exe + steps: + - checkout + - run: + command: ./scripts/assert-changed-files.sh "packages/*|(e2e|integration)-tests/*|.circleci/*|scripts/e2e-test.sh|yarn.lock" + shell: bash.exe + - <<: *attach_to_bootstrap + - run: + name: Install node 18.0.0 and yarn + command: | + nvm install 18.0.0 + nvm alias default 18.0.0 + nvm use 18.0.0 + choco install yarn -y + - run: + name: Rebuild packages for windows + command: | + Remove-Item -Recurse -Force -Path "node_modules/sharp/" + yarn + - run: + command: mkdir -p /tmp/e2e-tests/ + shell: bash.exe + - run: + command: cp -r ./e2e-test/adapters /tmp/e2e-tests/adapters + working_directory: ~/project + shell: bash.exe + - run: # Set project dir + command: node ./packages/gatsby-dev-cli/dist/index.js --set-path-to-repo . + shell: bash.exe + - run: # Copy over packages + command: node ~/project/packages/gatsby-dev-cli/dist/index.js --force-install --scan-once + working_directory: /tmp/e2e-tests/adapters + shell: bash.exe + - run: # run smoke test + command: node scripts/deploy-and-run/netlify.mjs test:smoke + working_directory: /tmp/e2e-tests/adapters + shell: bash.exe + workflows: version: 2 @@ -611,6 +652,9 @@ workflows: requires: - lint - bootstrap + - windows_adapters_smoke: + requires: + - bootstrap #todo - this should require windows_unit_tests, but for now let's run it earlier to setup job correctly - unit_tests_node18: <<: *ignore_docs requires: diff --git a/e2e-tests/adapters/package.json b/e2e-tests/adapters/package.json index fc0ab2364df08..4fe0dc05c56ce 100644 --- a/e2e-tests/adapters/package.json +++ b/e2e-tests/adapters/package.json @@ -17,6 +17,7 @@ "test:template:debug": "cross-env-shell CYPRESS_GROUP_NAME=\"adapter:$ADAPTER / trailingSlash:${TRAILING_SLASH:-always} / pathPrefix:${PATH_PREFIX:--}\" TRAILING_SLASH=$TRAILING_SLASH PATH_PREFIX=$PATH_PREFIX npm run cy:open -- --config-file \"cypress/configs/$ADAPTER.ts\" --env TRAILING_SLASH=$TRAILING_SLASH,PATH_PREFIX=$PATH_PREFIX", "test:debug": "npm-run-all -s build:debug ssat:debug", "test:netlify": "cross-env TRAILING_SLASH=always node scripts/deploy-and-run/netlify.mjs test:template", + "test:smoke": "node smoke-test.mjs", "test:netlify:debug": "cross-env TRAILING_SLASH=always node scripts/deploy-and-run/netlify.mjs test:template:debug", "test:netlify:prefix-never": "cross-env TRAILING_SLASH=never PATH_PREFIX=/prefix node scripts/deploy-and-run/netlify.mjs test:template", "test:netlify:prefix-never:debug": "cross-env TRAILING_SLASH=never PATH_PREFIX=/prefix node scripts/deploy-and-run/netlify.mjs test:template:debug", diff --git a/e2e-tests/adapters/smoke-test.mjs b/e2e-tests/adapters/smoke-test.mjs new file mode 100644 index 0000000000000..901a8f6f35505 --- /dev/null +++ b/e2e-tests/adapters/smoke-test.mjs @@ -0,0 +1,24 @@ +import assert from "node:assert" + +{ + // check index page (SSG) + const response = await fetch(process.env.DEPLOY_URL) + assert.equal(response.status, 200) + + const body = await response.text() + assert.match(body, /

Adapters<\/h1>/) + assert.match(body, /]*>Adapters E2E<\/title>/) +} + +{ + // check SSR page + const response = await fetch( + process.env.DEPLOY_URL + `/routes/ssr/remote-file/` + ) + assert.equal(response.status, 200) + + const body = await response.text() + // inline css for placeholder - this tests both LMDB and SHARP + // (LMDB because of page query and sharp because page query will use sharp to generate placeholder values) + assert.match(body, /background-color:rgb\(232,184,8\)/) +} From a09a65d5e8bc326b9815713def685458e5d8d3d0 Mon Sep 17 00:00:00 2001 From: pieh Date: Fri, 15 Mar 2024 18:03:33 +0100 Subject: [PATCH 13/27] test: typo --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 81786970f6db9..6b08bb38f884e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -606,7 +606,7 @@ jobs: command: mkdir -p /tmp/e2e-tests/ shell: bash.exe - run: - command: cp -r ./e2e-test/adapters /tmp/e2e-tests/adapters + command: cp -r ./e2e-tests/adapters /tmp/e2e-tests/adapters working_directory: ~/project shell: bash.exe - run: # Set project dir From d1c761230166ae1c001c2b709a087c531526467a Mon Sep 17 00:00:00 2001 From: Michal Piechowiak Date: Mon, 18 Mar 2024 08:04:52 +0100 Subject: [PATCH 14/27] test: maybe cd into dirs? --- .circleci/config.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6b08bb38f884e..3493377756fc7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -607,18 +607,15 @@ jobs: shell: bash.exe - run: command: cp -r ./e2e-tests/adapters /tmp/e2e-tests/adapters - working_directory: ~/project shell: bash.exe - run: # Set project dir command: node ./packages/gatsby-dev-cli/dist/index.js --set-path-to-repo . shell: bash.exe - run: # Copy over packages - command: node ~/project/packages/gatsby-dev-cli/dist/index.js --force-install --scan-once - working_directory: /tmp/e2e-tests/adapters + command: cd /tmp/e2e-tests/adapters && node ~/project/packages/gatsby-dev-cli/dist/index.js --force-install --scan-once shell: bash.exe - run: # run smoke test - command: node scripts/deploy-and-run/netlify.mjs test:smoke - working_directory: /tmp/e2e-tests/adapters + command: cd /tmp/e2e-tests/adapters && node scripts/deploy-and-run/netlify.mjs test:smoke shell: bash.exe workflows: From 1cb2fe1f577a46de7a5a494be0db79f3e40abe6e Mon Sep 17 00:00:00 2001 From: Michal Piechowiak Date: Mon, 18 Mar 2024 08:11:09 +0100 Subject: [PATCH 15/27] test: no powershell fro smoke test --- .circleci/config.yml | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 3493377756fc7..593b236226726 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -583,12 +583,11 @@ jobs: windows_adapters_smoke: executor: name: win/default - shell: powershell.exe + shell: bash.exe steps: - checkout - run: command: ./scripts/assert-changed-files.sh "packages/*|(e2e|integration)-tests/*|.circleci/*|scripts/e2e-test.sh|yarn.lock" - shell: bash.exe - <<: *attach_to_bootstrap - run: name: Install node 18.0.0 and yarn @@ -599,24 +598,20 @@ jobs: choco install yarn -y - run: name: Rebuild packages for windows - command: | - Remove-Item -Recurse -Force -Path "node_modules/sharp/" - yarn + command: npm rebuild sharp - run: command: mkdir -p /tmp/e2e-tests/ - shell: bash.exe - run: command: cp -r ./e2e-tests/adapters /tmp/e2e-tests/adapters - shell: bash.exe + - run: + command: pwd && ls + working_directory: /tmp/e2e-tests/adapters - run: # Set project dir command: node ./packages/gatsby-dev-cli/dist/index.js --set-path-to-repo . - shell: bash.exe - run: # Copy over packages command: cd /tmp/e2e-tests/adapters && node ~/project/packages/gatsby-dev-cli/dist/index.js --force-install --scan-once - shell: bash.exe - run: # run smoke test command: cd /tmp/e2e-tests/adapters && node scripts/deploy-and-run/netlify.mjs test:smoke - shell: bash.exe workflows: version: 2 From 2a9f92173e1ddc638571588e3ac368d35c0cb50e Mon Sep 17 00:00:00 2001 From: Michal Piechowiak Date: Mon, 18 Mar 2024 08:21:26 +0100 Subject: [PATCH 16/27] chore: single quote to double --- packages/gatsby-legacy-polyfills/package.json | 2 +- packages/gatsby-plugin-offline/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gatsby-legacy-polyfills/package.json b/packages/gatsby-legacy-polyfills/package.json index dd1c62aaae62c..c9cd999b38bd7 100644 --- a/packages/gatsby-legacy-polyfills/package.json +++ b/packages/gatsby-legacy-polyfills/package.json @@ -16,7 +16,7 @@ "license": "MIT", "scripts": { "build": "npm-run-all --npm-path npm -p build:*", - "build:exclude": "cpy 'exclude.js' '../dist' --cwd=./src", + "build:exclude": "cpy \"exclude.js\" \"../dist\" --cwd=./src", "build:polyfills": "microbundle -f iife -i src/polyfills.js --no-sourcemap --external=none", "prepare": "cross-env NODE_ENV=production npm run build", "watch": "npm-run-all --npm-path npm -p watch:*", diff --git a/packages/gatsby-plugin-offline/package.json b/packages/gatsby-plugin-offline/package.json index 4f50ecdd28b91..235e58563bb9e 100644 --- a/packages/gatsby-plugin-offline/package.json +++ b/packages/gatsby-plugin-offline/package.json @@ -47,7 +47,7 @@ "scripts": { "build": "npm run build:src && npm run build:sw-append", "build:src": "babel src --out-dir . --ignore \"**/__tests__,src/sw-append.js\"", - "build:sw-append": "cpy 'sw-append.js' '../' --cwd=./src", + "build:sw-append": "cpy \"sw-append.js\" \"../\" --cwd=./src", "prepare": "cross-env NODE_ENV=production npm run build", "watch": "npm run build:sw-append -- --watch & npm run build:src -- --watch" }, From 686912b2b25631d0e20934e57d7ba49b181b97d7 Mon Sep 17 00:00:00 2001 From: Michal Piechowiak Date: Mon, 18 Mar 2024 08:43:32 +0100 Subject: [PATCH 17/27] chore: install node-gyp requirements --- .circleci/config.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 593b236226726..3ef97a3ddadf6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -590,12 +590,13 @@ jobs: command: ./scripts/assert-changed-files.sh "packages/*|(e2e|integration)-tests/*|.circleci/*|scripts/e2e-test.sh|yarn.lock" - <<: *attach_to_bootstrap - run: - name: Install node 18.0.0 and yarn + name: Install node 18.0.0, yarn and node-gyp requirements command: | nvm install 18.0.0 nvm alias default 18.0.0 nvm use 18.0.0 choco install yarn -y + pip install setuptools - run: name: Rebuild packages for windows command: npm rebuild sharp From 18263ae37452b95ecbbb0fcddf81d100e65973b2 Mon Sep 17 00:00:00 2001 From: Michal Piechowiak Date: Mon, 18 Mar 2024 09:00:21 +0100 Subject: [PATCH 18/27] chore: install deps in win smoke --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 3ef97a3ddadf6..bdd5e93e577b8 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -599,7 +599,7 @@ jobs: pip install setuptools - run: name: Rebuild packages for windows - command: npm rebuild sharp + command: npm rebuild sharp && yarn - run: command: mkdir -p /tmp/e2e-tests/ - run: From 3979168953ed2ecb31963af08088328a9072d117 Mon Sep 17 00:00:00 2001 From: Michal Piechowiak Date: Mon, 18 Mar 2024 09:24:06 +0100 Subject: [PATCH 19/27] ? --- .circleci/config.yml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index bdd5e93e577b8..b9952509d28a8 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -595,11 +595,14 @@ jobs: nvm install 18.0.0 nvm alias default 18.0.0 nvm use 18.0.0 - choco install yarn -y - pip install setuptools + npm install -g yarn - run: - name: Rebuild packages for windows - command: npm rebuild sharp && yarn + name: Clear out sharp + command: | + Remove-Item -Recurse -Force -Path "node_modules/sharp/" + shell: powershell.exe + - run: + command: yarn - run: command: mkdir -p /tmp/e2e-tests/ - run: From 9af58b3d972102949bc768965ec539720384719a Mon Sep 17 00:00:00 2001 From: Michal Piechowiak Date: Mon, 18 Mar 2024 09:58:22 +0100 Subject: [PATCH 20/27] newer node needed for ntl-cli --- .circleci/config.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b9952509d28a8..fb7fe0e252acc 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -590,11 +590,11 @@ jobs: command: ./scripts/assert-changed-files.sh "packages/*|(e2e|integration)-tests/*|.circleci/*|scripts/e2e-test.sh|yarn.lock" - <<: *attach_to_bootstrap - run: - name: Install node 18.0.0, yarn and node-gyp requirements + name: Install node 18.19.0, yarn and node-gyp requirements command: | - nvm install 18.0.0 - nvm alias default 18.0.0 - nvm use 18.0.0 + nvm install 18.19.0 + nvm alias default 18.19.0 + nvm use 18.19.0 npm install -g yarn - run: name: Clear out sharp From 8c55e40f52beabaa0cd7168a95c277fa60411d94 Mon Sep 17 00:00:00 2001 From: Michal Piechowiak Date: Mon, 18 Mar 2024 10:32:09 +0100 Subject: [PATCH 21/27] run ntl through yarn --- .../adapters/scripts/deploy-and-run/netlify.mjs | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/e2e-tests/adapters/scripts/deploy-and-run/netlify.mjs b/e2e-tests/adapters/scripts/deploy-and-run/netlify.mjs index 1326826e6b7ba..95bd7f076b82f 100644 --- a/e2e-tests/adapters/scripts/deploy-and-run/netlify.mjs +++ b/e2e-tests/adapters/scripts/deploy-and-run/netlify.mjs @@ -7,11 +7,11 @@ if (process.env.E2E_ADAPTERS_NETLIFY_SITE_ID) { } process.env.ADAPTER = "netlify" -const deployTitle = `${ - process.env.CIRCLE_SHA1 || "N/A commit" -} - trailingSlash:${process.env.TRAILING_SLASH || `always`} / pathPrefix:${ - process.env.PATH_PREFIX || `-` -}` +const deployTitle = `${process.env.CIRCLE_SHA1 || "N/A commit"} - ${ + process.platform +}/${process.arch} - / trailingSlash:${ + process.env.TRAILING_SLASH || `always` +} / pathPrefix:${process.env.PATH_PREFIX || `-`}` const npmScriptToRun = process.argv[2] || "test:netlify" @@ -19,8 +19,8 @@ const npmScriptToRun = process.argv[2] || "test:netlify" await execa(`npm`, [`run`, `clean`], { stdio: `inherit` }) const deployResults = await execa( - "ntl", - ["deploy", "--build", "--json", "--cwd=.", "--message", deployTitle], + "yarn", + ["ntl", "deploy", "--build", "--json", "--cwd=.", "--message", deployTitle], { reject: false, } @@ -49,7 +49,8 @@ try { } finally { if (!process.env.GATSBY_TEST_SKIP_CLEANUP) { console.log(`Deleting project with deploy_id ${deployInfo.deploy_id}`) - const deleteResponse = await execa("ntl", [ + const deleteResponse = await execa("yarn", [ + "ntl", "api", "deleteDeploy", "--data", From ec1e3197f3a827f9b099f24dbe63e0b530ed4287 Mon Sep 17 00:00:00 2001 From: Michal Piechowiak Date: Mon, 18 Mar 2024 11:56:21 +0100 Subject: [PATCH 22/27] Revert "run ntl through yarn" This reverts commit 8c55e40f52beabaa0cd7168a95c277fa60411d94. --- .../adapters/scripts/deploy-and-run/netlify.mjs | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/e2e-tests/adapters/scripts/deploy-and-run/netlify.mjs b/e2e-tests/adapters/scripts/deploy-and-run/netlify.mjs index 95bd7f076b82f..1326826e6b7ba 100644 --- a/e2e-tests/adapters/scripts/deploy-and-run/netlify.mjs +++ b/e2e-tests/adapters/scripts/deploy-and-run/netlify.mjs @@ -7,11 +7,11 @@ if (process.env.E2E_ADAPTERS_NETLIFY_SITE_ID) { } process.env.ADAPTER = "netlify" -const deployTitle = `${process.env.CIRCLE_SHA1 || "N/A commit"} - ${ - process.platform -}/${process.arch} - / trailingSlash:${ - process.env.TRAILING_SLASH || `always` -} / pathPrefix:${process.env.PATH_PREFIX || `-`}` +const deployTitle = `${ + process.env.CIRCLE_SHA1 || "N/A commit" +} - trailingSlash:${process.env.TRAILING_SLASH || `always`} / pathPrefix:${ + process.env.PATH_PREFIX || `-` +}` const npmScriptToRun = process.argv[2] || "test:netlify" @@ -19,8 +19,8 @@ const npmScriptToRun = process.argv[2] || "test:netlify" await execa(`npm`, [`run`, `clean`], { stdio: `inherit` }) const deployResults = await execa( - "yarn", - ["ntl", "deploy", "--build", "--json", "--cwd=.", "--message", deployTitle], + "ntl", + ["deploy", "--build", "--json", "--cwd=.", "--message", deployTitle], { reject: false, } @@ -49,8 +49,7 @@ try { } finally { if (!process.env.GATSBY_TEST_SKIP_CLEANUP) { console.log(`Deleting project with deploy_id ${deployInfo.deploy_id}`) - const deleteResponse = await execa("yarn", [ - "ntl", + const deleteResponse = await execa("ntl", [ "api", "deleteDeploy", "--data", From 6713e374f8571fc43aa3b7a73a0bea3ff2b9d056 Mon Sep 17 00:00:00 2001 From: Michal Piechowiak Date: Mon, 18 Mar 2024 11:57:27 +0100 Subject: [PATCH 23/27] install ntl-cli in circleci pipeline --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index fb7fe0e252acc..2a41d39eccc27 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -590,12 +590,12 @@ jobs: command: ./scripts/assert-changed-files.sh "packages/*|(e2e|integration)-tests/*|.circleci/*|scripts/e2e-test.sh|yarn.lock" - <<: *attach_to_bootstrap - run: - name: Install node 18.19.0, yarn and node-gyp requirements + name: Install node 18.19.0, yarn and netlify-cli command: | nvm install 18.19.0 nvm alias default 18.19.0 nvm use 18.19.0 - npm install -g yarn + npm install -g yarn netlify-cli - run: name: Clear out sharp command: | From 6f7db061d08680df96fca457c0952914c8a3de57 Mon Sep 17 00:00:00 2001 From: Michal Piechowiak Date: Mon, 18 Mar 2024 12:42:10 +0100 Subject: [PATCH 24/27] test: adjust lmdb regeneration test to changed internal-packages location --- integration-tests/lmdb-regeneration/__tests__/index.js | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/integration-tests/lmdb-regeneration/__tests__/index.js b/integration-tests/lmdb-regeneration/__tests__/index.js index 3da14d6ce0d54..08bab95b7d587 100644 --- a/integration-tests/lmdb-regeneration/__tests__/index.js +++ b/integration-tests/lmdb-regeneration/__tests__/index.js @@ -38,7 +38,13 @@ describe(`Lmdb regeneration`, () => { // If the fix worked correctly we should have installed the prebuilt binary for our platform under our `.cache` directory const lmdbRequire = mod.createRequire( - path.resolve(rootPath, ".cache", "internal-packages", "package.json") + path.resolve( + rootPath, + ".cache", + "internal-packages", + `${process.platform}-${process.arch}`, + "package.json" + ) ) expect(() => { lmdbRequire.resolve(lmdbPackage) From 65091047545cdad3147f735f74245314700e7e5b Mon Sep 17 00:00:00 2001 From: Michal Piechowiak Date: Mon, 18 Mar 2024 12:46:31 +0100 Subject: [PATCH 25/27] test: run windows deploy/smoke test after unit tests passed --- .circleci/config.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 2a41d39eccc27..d12ffd7e09754 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -650,7 +650,12 @@ workflows: - bootstrap - windows_adapters_smoke: requires: - - bootstrap #todo - this should require windows_unit_tests, but for now let's run it earlier to setup job correctly + # ideally we wait for windows unit tests here, but because those are flaky + # feedback loop would be not practical, so at least wait for linux unit tests + # to resemble setup for more robust E2E tests + - lint + - bootstrap + - unit_tests_node18 - unit_tests_node18: <<: *ignore_docs requires: From e25d4e520efa99f40ffc69b201495d34cf42c36a Mon Sep 17 00:00:00 2001 From: Michal Piechowiak Date: Tue, 9 Apr 2024 13:37:01 +0200 Subject: [PATCH 26/27] chore: use path.posix to load engines in serve command --- packages/gatsby/src/commands/serve.ts | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/packages/gatsby/src/commands/serve.ts b/packages/gatsby/src/commands/serve.ts index 88be01d145430..1727e2c0c420e 100644 --- a/packages/gatsby/src/commands/serve.ts +++ b/packages/gatsby/src/commands/serve.ts @@ -29,6 +29,7 @@ import { thirdPartyProxyPath, partytownProxy, } from "../internal-plugins/partytown/proxy" +import { slash } from "gatsby-core-utils/path" interface IMatchPath { path: string @@ -188,10 +189,10 @@ module.exports = async (program: IServeProgram): Promise => { let pageSSRModule: string | undefined try { graphqlEnginePath = require.resolve( - path.join(program.directory, `.cache`, `query-engine`) + path.posix.join(slash(program.directory), `.cache`, `query-engine`) ) pageSSRModule = require.resolve( - path.join(program.directory, `.cache`, `page-ssr`) + path.posix.join(slash(program.directory), `.cache`, `page-ssr`) ) } catch (error) { // TODO: Handle case of engine not being generated @@ -204,7 +205,12 @@ module.exports = async (program: IServeProgram): Promise => { const { getData, renderPageData, renderHTML } = require(pageSSRModule) as typeof import("../utils/page-ssr-module/entry") const graphqlEngine = new GraphQLEngine({ - dbPath: path.join(program.directory, `.cache`, `data`, `datastore`), + dbPath: path.posix.join( + slash(program.directory), + `.cache`, + `data`, + `datastore` + ), }) router.get( From c507fb6cd521b929e94d18c9e54720c3b6ac8e87 Mon Sep 17 00:00:00 2001 From: Michal Piechowiak Date: Tue, 9 Apr 2024 13:38:50 +0200 Subject: [PATCH 27/27] chore: use default value when destructuring instead of nullish coalescing later --- packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts b/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts index 20d10b9a852ed..0a559b951ea97 100644 --- a/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts +++ b/packages/gatsby/src/schema/graphql-engine/bundle-webpack.ts @@ -102,10 +102,8 @@ function getLMDBBinaryFromSiteLocation( return undefined } // If there's no lmdb prebuilt package for our arch/platform listed as optional dep no point in trying to install it - const { optionalDependencies } = packageJson - if ( - !Object.keys(optionalDependencies ?? {}).find(p => p === lmdbPackageName) - ) { + const { optionalDependencies = {} } = packageJson + if (!Object.keys(optionalDependencies).find(p => p === lmdbPackageName)) { throw new Error( `Target platform/arch for functions execution (${functionsTarget.platform}/${functionsTarget.arch}) is not supported.` )