-
Notifications
You must be signed in to change notification settings - Fork 86
/
Copy pathserver.ts
164 lines (135 loc) · 5.64 KB
/
server.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
import type { OutgoingHttpHeaders } from 'http'
import { ComputeJsOutgoingMessage, toComputeResponse, toReqRes } from '@fastly/http-compute-js'
import type { Context } from '@netlify/functions'
import { Span } from '@opentelemetry/api'
import type { NextConfigComplete } from 'next/dist/server/config-shared.js'
import type { WorkerRequestHandler } from 'next/dist/server/lib/types.js'
import {
adjustDateHeader,
setCacheControlHeaders,
setCacheStatusHeader,
setCacheTagsHeaders,
setVaryHeaders,
} from '../headers.js'
import { nextResponseProxy } from '../revalidate.js'
import { getLogger, type RequestContext } from './request-context.cjs'
import { getTracer } from './tracer.cjs'
import { setupWaitUntil } from './wait-until.cjs'
const nextImportPromise = import('../next.cjs')
setupWaitUntil()
let nextHandler: WorkerRequestHandler, nextConfig: NextConfigComplete
/**
* When Next.js proxies requests externally, it writes the response back as-is.
* In some cases, this includes Transfer-Encoding: chunked.
* This triggers behaviour in @fastly/http-compute-js to separate chunks with chunk delimiters, which is not what we want at this level.
* We want Lambda to control the behaviour around chunking, not this.
* This workaround removes the Transfer-Encoding header, which makes the library send the response as-is.
*/
const disableFaultyTransferEncodingHandling = (res: ComputeJsOutgoingMessage) => {
const originalStoreHeader = res._storeHeader
res._storeHeader = function _storeHeader(firstLine, headers) {
if (headers) {
if (Array.isArray(headers)) {
// eslint-disable-next-line no-param-reassign
headers = headers.filter(([header]) => header.toLowerCase() !== 'transfer-encoding')
} else {
delete (headers as OutgoingHttpHeaders)['transfer-encoding']
}
}
return originalStoreHeader.call(this, firstLine, headers)
}
}
export default async (
request: Request,
_context: Context,
topLevelSpan: Span,
requestContext: RequestContext,
) => {
const tracer = getTracer()
if (!nextHandler) {
await tracer.withActiveSpan('initialize next server', async () => {
// set the server config
const { getRunConfig, setRunConfig } = await import('../config.js')
nextConfig = await getRunConfig()
setRunConfig(nextConfig)
const { getMockedRequestHandler } = await nextImportPromise
const url = new URL(request.url)
nextHandler = await getMockedRequestHandler({
port: Number(url.port) || 443,
hostname: url.hostname,
dir: process.cwd(),
isDev: false,
})
})
}
return await tracer.withActiveSpan('generate response', async (span) => {
const { req, res } = toReqRes(request)
// Work around a bug in http-proxy in next@<14.0.2
Object.defineProperty(req, 'connection', {
get() {
return {}
},
})
Object.defineProperty(req, 'socket', {
get() {
return {}
},
})
disableFaultyTransferEncodingHandling(res as unknown as ComputeJsOutgoingMessage)
const resProxy = nextResponseProxy(res, requestContext)
// We don't await this here, because it won't resolve until the response is finished.
const nextHandlerPromise = nextHandler(req, resProxy).catch((error) => {
getLogger().withError(error).error('next handler error')
console.error(error)
resProxy.statusCode = 500
span.setAttribute('http.status_code', 500)
resProxy.end('Internal Server Error')
})
// Contrary to the docs, this resolves when the headers are available, not when the stream closes.
// See https://github.com/fastly/http-compute-js/blob/main/src/http-compute-js/http-server.ts#L168-L173
const response = await toComputeResponse(resProxy)
if (requestContext.responseCacheKey) {
topLevelSpan.setAttribute('responseCacheKey', requestContext.responseCacheKey)
}
const nextCache = response.headers.get('x-nextjs-cache')
const isServedFromNextCache = nextCache === 'HIT' || nextCache === 'STALE'
topLevelSpan.setAttributes({
'x-nextjs-cache': nextCache ?? undefined,
isServedFromNextCache,
})
if (isServedFromNextCache) {
await adjustDateHeader({
headers: response.headers,
request,
span,
tracer,
requestContext,
})
}
setCacheControlHeaders(response, request, requestContext, nextConfig)
setCacheTagsHeaders(response.headers, requestContext)
setVaryHeaders(response.headers, request, nextConfig)
setCacheStatusHeader(response.headers, nextCache)
async function waitForBackgroundWork() {
// it's important to keep the stream open until the next handler has finished
await nextHandlerPromise
// Next.js relies on `close` event emitted by response to trigger running callback variant of `next/after`
// however @fastly/http-compute-js never actually emits that event - so we have to emit it ourselves,
// otherwise Next would never run the callback variant of `next/after`
res.emit('close')
// We have to keep response stream open until tracked background promises that are don't use `context.waitUntil`
// are resolved. If `context.waitUntil` is available, `requestContext.backgroundWorkPromise` will be empty
// resolved promised and so awaiting it is no-op
await requestContext.backgroundWorkPromise
}
const keepOpenUntilNextFullyRendered = new TransformStream({
async flush() {
await waitForBackgroundWork()
},
})
if (!response.body) {
await waitForBackgroundWork()
}
return new Response(response.body?.pipeThrough(keepOpenUntilNextFullyRendered), response)
})
}