|
1 | 1 | import * as express from 'express';
|
2 | 2 | import * as cors from 'cors';
|
3 | 3 | import { createProxyMiddleware, Options } from 'http-proxy-middleware';
|
4 |
| -import { logger, parsePort } from '../../helpers'; |
| 4 | +import { logError, logger, parsePort, pipelineAsync, REPO_DIR } from '../../helpers'; |
5 | 5 | import { Agent } from 'http';
|
6 | 6 | import * as fs from 'fs';
|
| 7 | +import * as path from 'path'; |
7 | 8 | import { addAsync } from '@awaitjs/express';
|
8 | 9 | import * as chokidar from 'chokidar';
|
9 | 10 | import * as jsoncParser from 'jsonc-parser';
|
| 11 | +import fetch, { RequestInit } from 'node-fetch'; |
10 | 12 |
|
11 | 13 | export function GetStacksNodeProxyEndpoint() {
|
12 | 14 | // Use STACKS_CORE_PROXY env vars if available, otherwise fallback to `STACKS_CORE_RPC
|
@@ -79,6 +81,132 @@ export function createCoreNodeRpcProxyRouter(): express.Router {
|
79 | 81 | return null;
|
80 | 82 | };
|
81 | 83 |
|
| 84 | + /** |
| 85 | + * Check for any extra endpoints that have been configured for performing a "multicast" for a tx submission. |
| 86 | + */ |
| 87 | + async function getExtraTxPostEndpoints(): Promise<string[] | false> { |
| 88 | + const STACKS_API_EXTRA_TX_ENDPOINTS_FILE_ENV_VAR = 'STACKS_API_EXTRA_TX_ENDPOINTS_FILE'; |
| 89 | + const extraEndpointsEnvVar = process.env[STACKS_API_EXTRA_TX_ENDPOINTS_FILE_ENV_VAR]; |
| 90 | + if (!extraEndpointsEnvVar) { |
| 91 | + return false; |
| 92 | + } |
| 93 | + const filePath = path.resolve(REPO_DIR, extraEndpointsEnvVar); |
| 94 | + let fileContents: string; |
| 95 | + try { |
| 96 | + fileContents = await fs.promises.readFile(filePath, { encoding: 'utf8' }); |
| 97 | + } catch (error) { |
| 98 | + logError(`Error reading ${STACKS_API_EXTRA_TX_ENDPOINTS_FILE_ENV_VAR}: ${error}`, error); |
| 99 | + return false; |
| 100 | + } |
| 101 | + const endpoints = fileContents |
| 102 | + .split(/\r?\n/) |
| 103 | + .map(r => r.trim()) |
| 104 | + .filter(r => !r.startsWith('#') && r.length !== 0); |
| 105 | + if (endpoints.length === 0) { |
| 106 | + return false; |
| 107 | + } |
| 108 | + return endpoints; |
| 109 | + } |
| 110 | + |
| 111 | + /** |
| 112 | + * Reads an http request stream into a Buffer. |
| 113 | + */ |
| 114 | + async function readRequestBody(req: express.Request, maxSizeBytes = Infinity): Promise<Buffer> { |
| 115 | + return new Promise((resolve, reject) => { |
| 116 | + let resultBuffer: Buffer = Buffer.alloc(0); |
| 117 | + req.on('data', chunk => { |
| 118 | + if (!Buffer.isBuffer(chunk)) { |
| 119 | + reject( |
| 120 | + new Error( |
| 121 | + `Expected request body chunks to be Buffer, received ${chunk.constructor.name}` |
| 122 | + ) |
| 123 | + ); |
| 124 | + req.destroy(); |
| 125 | + return; |
| 126 | + } |
| 127 | + resultBuffer = resultBuffer.length === 0 ? chunk : Buffer.concat([resultBuffer, chunk]); |
| 128 | + if (resultBuffer.byteLength >= maxSizeBytes) { |
| 129 | + reject(new Error(`Request body exceeded max byte size`)); |
| 130 | + req.destroy(); |
| 131 | + return; |
| 132 | + } |
| 133 | + }); |
| 134 | + req.on('end', () => { |
| 135 | + if (!req.complete) { |
| 136 | + return reject( |
| 137 | + new Error('The connection was terminated while the message was still being sent') |
| 138 | + ); |
| 139 | + } |
| 140 | + resolve(resultBuffer); |
| 141 | + }); |
| 142 | + req.on('error', error => reject(error)); |
| 143 | + }); |
| 144 | + } |
| 145 | + |
| 146 | + router.postAsync('/transactions', async (req, res, next) => { |
| 147 | + const extraEndpoints = await getExtraTxPostEndpoints(); |
| 148 | + if (!extraEndpoints) { |
| 149 | + next(); |
| 150 | + return; |
| 151 | + } |
| 152 | + const endpoints = [ |
| 153 | + // The primary proxy endpoint (the http response from this one will be returned to the client) |
| 154 | + `http://${stacksNodeRpcEndpoint}/v2/transactions`, |
| 155 | + ]; |
| 156 | + endpoints.push(...extraEndpoints); |
| 157 | + logger.info(`Overriding POST /v2/transactions to multicast to ${endpoints.join(',')}}`); |
| 158 | + const maxBodySize = 10_000_000; // 10 MB max POST body size |
| 159 | + const reqBody = await readRequestBody(req, maxBodySize); |
| 160 | + const reqHeaders: string[][] = []; |
| 161 | + for (let i = 0; i < req.rawHeaders.length; i += 2) { |
| 162 | + reqHeaders.push([req.rawHeaders[i], req.rawHeaders[i + 1]]); |
| 163 | + } |
| 164 | + const postFn = async (endpoint: string) => { |
| 165 | + const reqOpts: RequestInit = { |
| 166 | + method: 'POST', |
| 167 | + agent: httpAgent, |
| 168 | + body: reqBody, |
| 169 | + headers: reqHeaders, |
| 170 | + }; |
| 171 | + const proxyResult = await fetch(endpoint, reqOpts); |
| 172 | + return proxyResult; |
| 173 | + }; |
| 174 | + |
| 175 | + // Here's were we "multicast" the `/v2/transaction` POST, by concurrently sending the http request to all configured endpoints. |
| 176 | + const results = await Promise.allSettled(endpoints.map(endpoint => postFn(endpoint))); |
| 177 | + |
| 178 | + // Only the first (non-extra) endpoint http response is proxied back through to the client, so ensure any errors from requests |
| 179 | + // to the extra endpoints are logged. |
| 180 | + results.slice(1).forEach(p => { |
| 181 | + if (p.status === 'rejected') { |
| 182 | + logError(`Error during POST /v2/transaction to extra endpoint: ${p.reason}`, p.reason); |
| 183 | + } else { |
| 184 | + if (!p.value.ok) { |
| 185 | + logError( |
| 186 | + `Response ${p.value.status} during POST /v2/transaction to extra endpoint ${p.value.url}` |
| 187 | + ); |
| 188 | + } |
| 189 | + } |
| 190 | + }); |
| 191 | + |
| 192 | + // Proxy the result of the (non-extra) http response back to the client. |
| 193 | + const mainResult = results[0]; |
| 194 | + if (mainResult.status === 'rejected') { |
| 195 | + logError( |
| 196 | + `Error in primary POST /v2/transaction proxy: ${mainResult.reason}`, |
| 197 | + mainResult.reason |
| 198 | + ); |
| 199 | + res.status(500).json({ error: mainResult.reason }); |
| 200 | + } else { |
| 201 | + const proxyResp = mainResult.value; |
| 202 | + res.status(proxyResp.status); |
| 203 | + proxyResp.headers.forEach((value, name) => { |
| 204 | + res.setHeader(name, value); |
| 205 | + }); |
| 206 | + await pipelineAsync(proxyResp.body, res); |
| 207 | + } |
| 208 | + }); |
| 209 | + |
82 | 210 | const proxyOptions: Options = {
|
83 | 211 | agent: httpAgent,
|
84 | 212 | target: `http://${stacksNodeRpcEndpoint}`,
|
|
0 commit comments