Skip to content

test: refactor serverless invocations so in-process and sandbox implementation use shared code and ability to run multiple invocations in same sandbox #2871

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Apr 25, 2025
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
File renamed without changes.
112 changes: 4 additions & 108 deletions tests/utils/fixture.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,9 @@ import { assert, vi } from 'vitest'

import { type NetlifyPluginConstants, type NetlifyPluginOptions } from '@netlify/build'
import { bundle, serve } from '@netlify/edge-bundler'
import type { LambdaResponse } from '@netlify/serverless-functions-api/dist/lambda/response.js'
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this type was no longer even exported so I got this type in a bit different way in new place by doing some typescript transformations - it seems like we don't check types in test helpers generally (not something to address in this PR tho)

import { zipFunctions } from '@netlify/zip-it-and-ship-it'
import { execaCommand } from 'execa'
import getPort from 'get-port'
import { execute } from 'lambda-local'
import { spawn } from 'node:child_process'
import { createWriteStream, existsSync } from 'node:fs'
import { cp, mkdir, mkdtemp, readFile, rm, writeFile } from 'node:fs/promises'
Expand All @@ -16,17 +14,16 @@ import { env } from 'node:process'
import { fileURLToPath } from 'node:url'
import { v4 } from 'uuid'
import { LocalServer } from './local-server.js'
import { streamToBuffer } from './stream-to-buffer.js'
import { loadAndInvokeFunctionImpl, type FunctionInvocationOptions } from './lambda-helpers.mjs'

import { glob } from 'fast-glob'
import {
EDGE_HANDLER_NAME,
PluginContext,
SERVER_HANDLER_NAME,
} from '../../src/build/plugin-context.js'
import { BLOB_TOKEN } from './constants.js'
import { BLOB_TOKEN } from './constants.mjs'
import { type FixtureTestContext } from './contexts.js'
import { createBlobContext } from './helpers.js'
import { setNextVersionInFixture } from './next-version-helpers.mjs'

const bootstrapURL = 'https://edge.netlify.com/bootstrap/index-combined.ts'
Expand Down Expand Up @@ -339,117 +336,16 @@ export async function uploadBlobs(ctx: FixtureTestContext, blobsDir: string) {
)
}

const DEFAULT_FLAGS = {}
/**
* Execute the function with the provided parameters
* @param ctx
* @param options
*/
export async function invokeFunction(
ctx: FixtureTestContext,
options: {
/**
* The http method that is used for the invocation
* @default 'GET'
*/
httpMethod?: string
/**
* The relative path that should be requested
* @default '/'
*/
url?: string
/** The headers used for the invocation*/
headers?: Record<string, string>
/** The body that is used for the invocation */
body?: unknown
/** Environment variables that should be set during the invocation */
env?: Record<string, string | number>
/** Feature flags that should be set during the invocation */
flags?: Record<string, unknown>
} = {},
options: FunctionInvocationOptions = {},
) {
const { httpMethod, headers, flags, url, env } = options
// now for the execution set the process working directory to the dist entry point
const cwdMock = vi
.spyOn(process, 'cwd')
.mockReturnValue(join(ctx.functionDist, SERVER_HANDLER_NAME))
try {
const { handler } = await import(
join(ctx.functionDist, SERVER_HANDLER_NAME, '___netlify-entry-point.mjs')
)

// The environment variables available during execution
const environment = {
NODE_ENV: 'production',
NETLIFY_BLOBS_CONTEXT: createBlobContext(ctx),
...(env || {}),
}

const envVarsToRestore = {}

// We are not using lambda-local's environment variable setting because it cleans up
// environment vars to early (before stream is closed)
Object.keys(environment).forEach(function (key) {
if (typeof process.env[key] !== 'undefined') {
envVarsToRestore[key] = process.env[key]
}
process.env[key] = environment[key]
})

let resolveInvocation, rejectInvocation
const invocationPromise = new Promise((resolve, reject) => {
resolveInvocation = resolve
rejectInvocation = reject
})

const response = (await execute({
event: {
headers: headers || {},
httpMethod: httpMethod || 'GET',
rawUrl: new URL(url || '/', 'https://example.netlify').href,
flags: flags ?? DEFAULT_FLAGS,
},
lambdaFunc: { handler },
timeoutMs: 4_000,
onInvocationEnd: (error) => {
// lambda-local resolve promise return from execute when response is closed
// but we should wait for tracked background work to finish
// before resolving the promise to allow background work to finish
if (error) {
rejectInvocation(error)
} else {
resolveInvocation()
}
},
})) as LambdaResponse

await invocationPromise

const responseHeaders = Object.entries(response.multiValueHeaders || {}).reduce(
(prev, [key, value]) => ({
...prev,
[key]: value.length === 1 ? `${value}` : value.join(', '),
}),
response.headers || {},
)

const bodyBuffer = await streamToBuffer(response.body)

Object.keys(environment).forEach(function (key) {
if (typeof envVarsToRestore[key] !== 'undefined') {
process.env[key] = envVarsToRestore[key]
} else {
delete process.env[key]
}
})

return {
statusCode: response.statusCode,
bodyBuffer,
body: bodyBuffer.toString('utf-8'),
headers: responseHeaders,
isBase64Encoded: response.isBase64Encoded,
}
return await loadAndInvokeFunctionImpl(ctx, options)
} finally {
cwdMock.mockRestore()
}
Expand Down
15 changes: 2 additions & 13 deletions tests/utils/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@ import { mkdtemp } from 'node:fs/promises'
import { tmpdir } from 'node:os'
import { join } from 'node:path'
import { assert, vi } from 'vitest'
import { BLOB_TOKEN } from './constants'
import { BLOB_TOKEN } from './constants.mjs'
import { type FixtureTestContext } from './contexts'
import { createBlobContext } from './lambda-helpers.mjs'

/**
* Generates a 24char deploy ID (this is validated in the blob storage so we cant use a uuidv4)
Expand All @@ -26,18 +27,6 @@ export const generateRandomObjectID = () => {
return objectId
}

export const createBlobContext = (ctx: FixtureTestContext) =>
Buffer.from(
JSON.stringify({
edgeURL: `http://${ctx.blobStoreHost}`,
uncachedEdgeURL: `http://${ctx.blobStoreHost}`,
token: BLOB_TOKEN,
siteID: ctx.siteID,
deployID: ctx.deployID,
primaryRegion: 'us-test-1',
}),
).toString('base64')

Comment on lines -29 to -40
Copy link
Contributor Author

@pieh pieh Apr 24, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

moved to lambda-helpers.mjs (as it can't import from TS modules and does need to use it)

/**
* Starts a new mock blob storage
* @param ctx
Expand Down
1 change: 0 additions & 1 deletion tests/utils/index.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
export * from './helpers.js'
export * from './mock-file-system.js'
export * from './stream-to-buffer.js'
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this was never imported anywhere

181 changes: 181 additions & 0 deletions tests/utils/lambda-helpers.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,181 @@
// @ts-check

// this is not TS file because it's used both directly inside test process
// as well as child process that lacks TS on-the-fly transpilation
Comment on lines +3 to +4
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I wonder if we could just use tsx or something? It's only for tests... Might be worth it for good type safety.

Copy link
Contributor Author

@pieh pieh Apr 25, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We could try, but ideally this is one that matches what vitest is using and I wasn't sure it was worth the effort given TS-ish jsdocs are possibility. I've done something like that in gatsbyjs/gatsby#32120 (you can grep for ts-register there for adding on-the-fly TS support in tests, tho that exact solution was using @babel/register I wouldn't try to use here). But in gatsby case this child processes handling was part of core framework functionality so full type safety there was much more important, while here it might just result in our own test helper problems as we don't use this in actual next runtime

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

but I'd leave this as maybe for the future, ideally we won't have to touch this code for a long time (I only touched sandbox code once before - when it was originally written heh and now I'm touching it only to add a feature to it)


import { join } from 'node:path'
import { BLOB_TOKEN } from './constants.mjs'
import { execute as untypedExecute } from 'lambda-local'

const SERVER_HANDLER_NAME = '___netlify-server-handler'
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is copied from

export const SERVER_HANDLER_NAME = '___netlify-server-handler'
(for the serverless invocation in same process it was imported from there, but it was previously already copied for sandboxed invocation because it can't import TS, so not fully new that this is duplicated)


/**
* @typedef {import('./contexts').FixtureTestContext} FixtureTestContext
*
* @typedef {Awaited<ReturnType<ReturnType<typeof import('@netlify/serverless-functions-api').getLambdaHandler>>>} LambdaResult
*
* @typedef {Object} FunctionInvocationOptions
* @property {Record<string, string>} [env] Environment variables that should be set during the invocation
* @property {string} [httpMethod] The http method that is used for the invocation. Defaults to 'GET'
* @property {string} [url] TThe relative path that should be requested. Defaults to '/'
* @property {Record<string, string>} [headers] The headers used for the invocation
* @property {Record<string, unknown>} [flags] Feature flags that should be set during the invocation
*/

/**
* This is helper to get LambdaLocal's execute to actually provide result type instead of `unknown`
* Because jsdoc doesn't seem to have equivalent of `as` in TS and trying to assign `LambdaResult` type
* to return value of `execute` leading to `Type 'unknown' is not assignable to type 'LambdaResult'`
* error, this types it as `any` instead which allow to later type it as `LambdaResult`.
* @param {Parameters<typeof untypedExecute>} args
* @returns {Promise<LambdaResult>}
*/
async function execute(...args) {
/**
* @type {any}
*/
const anyResult = await untypedExecute(...args)

return anyResult
}
Comment on lines +27 to +42
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is equavalent for untypedExecute() as Promise<LambdaResult>, but I couldn't find a way to do it nicer with jsdoc as simplest solution was causing Type 'unknown' is not assignable to type 'LambdaResult' ts errors


/**
* @param {FixtureTestContext} ctx
*/
export const createBlobContext = (ctx) =>
Buffer.from(
JSON.stringify({
edgeURL: `http://${ctx.blobStoreHost}`,
uncachedEdgeURL: `http://${ctx.blobStoreHost}`,
token: BLOB_TOKEN,
siteID: ctx.siteID,
deployID: ctx.deployID,
primaryRegion: 'us-test-1',
}),
).toString('base64')

/**
* Converts a readable stream to a buffer
* @param {NodeJS.ReadableStream} stream
* @returns {Promise<Buffer>}
*/
function streamToBuffer(stream) {
/**
* @type {Buffer[]}
*/
const chunks = []

return new Promise((resolve, reject) => {
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)))
stream.on('error', (err) => reject(err))
stream.on('end', () => resolve(Buffer.concat(chunks)))
})
}

/**
* @param {FixtureTestContext} ctx
* @param {Record<string, string>} [env]
*/
function temporarilySetEnv(ctx, env) {
const environment = {
NODE_ENV: 'production',
NETLIFY_BLOBS_CONTEXT: createBlobContext(ctx),
...(env || {}),
}

const envVarsToRestore = {}

// We are not using lambda-local's environment variable setting because it cleans up
// environment vars to early (before stream is closed)
Object.keys(environment).forEach(function (key) {
if (typeof process.env[key] !== 'undefined') {
envVarsToRestore[key] = process.env[key]
}
process.env[key] = environment[key]
})

return function restoreEnvironment() {
Object.keys(environment).forEach(function (key) {
if (typeof envVarsToRestore[key] !== 'undefined') {
process.env[key] = envVarsToRestore[key]
} else {
delete process.env[key]
}
})
}
}

const DEFAULT_FLAGS = {}

/**
* @param {FixtureTestContext} ctx
* @param {FunctionInvocationOptions} options
*/
export async function loadAndInvokeFunctionImpl(
ctx,
{ headers, httpMethod, flags, url, env } = {},
) {
const { handler } = await import(
'file:///' + join(ctx.functionDist, SERVER_HANDLER_NAME, '___netlify-entry-point.mjs')
)

const restoreEnvironment = temporarilySetEnv(ctx, env)

let resolveInvocation, rejectInvocation
const invocationPromise = new Promise((resolve, reject) => {
resolveInvocation = resolve
rejectInvocation = reject
})

const response = await execute({
event: {
headers: headers || {},
httpMethod: httpMethod || 'GET',
rawUrl: new URL(url || '/', 'https://example.netlify').href,
flags: flags ?? DEFAULT_FLAGS,
},
lambdaFunc: { handler },
timeoutMs: 4_000,
onInvocationEnd: (error) => {
// lambda-local resolve promise return from execute when response is closed
// but we should wait for tracked background work to finish
// before resolving the promise to allow background work to finish
if (error) {
rejectInvocation(error)
} else {
resolveInvocation()
}
},
})

await invocationPromise

if (!response) {
throw new Error('No response from lambda-local')
}

const responseHeaders = Object.entries(response.multiValueHeaders || {}).reduce(
(prev, [key, value]) => ({
...prev,
[key]: value.length === 1 ? `${value}` : value.join(', '),
}),
response.headers || {},
)

const bodyBuffer = await streamToBuffer(response.body)

restoreEnvironment()

return {
statusCode: response.statusCode,
bodyBuffer,
body: bodyBuffer.toString('utf-8'),
headers: responseHeaders,
isBase64Encoded: response.isBase64Encoded,
}
}

/**
* @typedef {typeof loadAndInvokeFunctionImpl} InvokeFunction
* @typedef {Promise<Awaited<ReturnType<InvokeFunction>>>} InvokeFunctionResult
*/
Loading