From bb3835dc8b75f951a8c9b9946703f8e02648c6ab Mon Sep 17 00:00:00 2001 From: Jake Walker Date: Tue, 9 Apr 2024 17:55:03 +0100 Subject: [PATCH] Switch to Cloudflare R2 --- README.md | 2 +- worker/src/cleanup.ts | 5 ++-- worker/src/controller.ts | 8 ++---- worker/src/index.spec.ts | 22 ++++----------- worker/src/index.ts | 47 ++++++------------------------- worker/src/s3.ts | 61 ---------------------------------------- worker/wrangler.toml | 16 +++++------ 7 files changed, 27 insertions(+), 134 deletions(-) delete mode 100644 worker/src/s3.ts diff --git a/README.md b/README.md index db90148..d849f0d 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ A free and open source URL shortening, file sharing and pastebin service. VH7 is a small project offering a free URL shortening, file sharing and pastebin service. Unlike other major URL shorteners, VH7 offers shorter links (4 characters) as well as the ability to have a short link for files and code snippets under the same roof. -VH7 utilises [Cloudflare Workers](https://workers.cloudflare.com/) for hosting the API, [Cloudflare Pages](https://pages.cloudflare.com/) for hosting the frontend, [AWS DynamoDB](https://aws.amazon.com/dynamodb/) for storing data and [AWS S3](https://aws.amazon.com/s3/) for storing files. _I have chosen to use two different cloud providers to allow me to run VH7 as cheaply as I can. Cloudflare gives a very generous Workers free tier, whereas AWS DynamoDB gives a good balance between price and flexability (whereas Cloudflare Workers KV which was used in the past was slightly more difficult to work with)._ +VH7 utilises [Cloudflare Workers](https://workers.cloudflare.com/) for hosting the API, [Cloudflare Pages](https://pages.cloudflare.com/) for hosting the frontend, [Cloudflare D1](https://developers.cloudflare.com/d1/) for storing data and [Cloudflare R2](https://developers.cloudflare.com/r2/) for storing files. ## Getting Started diff --git a/worker/src/cleanup.ts b/worker/src/cleanup.ts index 766128b..5c7eff9 100644 --- a/worker/src/cleanup.ts +++ b/worker/src/cleanup.ts @@ -1,11 +1,10 @@ import { type DrizzleD1Database } from 'drizzle-orm/d1'; import { eq, lt } from 'drizzle-orm'; import * as models from './models'; -import { deleteObject, type S3Configuration } from './s3'; export default async function cleanup( db: DrizzleD1Database, - s3Config: S3Configuration, + bucket: R2Bucket, ): Promise { const deleted: string[] = []; const toCleanUp = await db.query.shortLinks.findMany({ @@ -26,7 +25,7 @@ export default async function cleanup( await db.delete(models.shortLinkUploads) .where(eq(models.shortLinkUploads.id, shortLink.id)); await db.delete(models.shortLinks).where(eq(models.shortLinks.id, shortLink.id)); - await deleteObject(s3Config, shortLink.id); + await bucket.delete(shortLink.id); break; default: throw new Error(`Unexpected short link type ${shortLink.type}`); diff --git a/worker/src/controller.ts b/worker/src/controller.ts index 0c52099..1752790 100644 --- a/worker/src/controller.ts +++ b/worker/src/controller.ts @@ -2,7 +2,6 @@ import { customAlphabet } from 'nanoid/async'; import { DrizzleD1Database } from 'drizzle-orm/d1'; import { eq } from 'drizzle-orm/expressions'; import * as models from './models'; -import { S3Configuration, putObject } from './s3'; const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', 4); @@ -73,17 +72,14 @@ export async function createPaste( export async function createUpload( db: DrizzleD1Database, + bucket: R2Bucket, file: File, rawExpires: number | null, - s3Config: S3Configuration, ): Promise { const id = await generateId(); const hash = await sha256(file); - const res = await putObject(s3Config, id, file); - if (res.status !== 200) { - throw new Error(`Failed to put object (status=${res.status}, msg=${await res.text()})`); - } + await bucket.put(id, file); const maxExpiry = new Date(); maxExpiry.setDate(maxExpiry.getDate() + 30); diff --git a/worker/src/index.spec.ts b/worker/src/index.spec.ts index 730158a..158af84 100644 --- a/worker/src/index.spec.ts +++ b/worker/src/index.spec.ts @@ -1,20 +1,15 @@ -import { getBindingsProxy } from 'wrangler'; +import { getPlatformProxy } from 'wrangler'; import { drizzle } from 'drizzle-orm/d1'; import app, { type Bindings } from './index'; import * as models from './models'; -import { S3Configuration, putObject } from './s3'; import { sha256 } from './controller'; -const { bindings } = await getBindingsProxy(); +const { env } = await getPlatformProxy(); // eslint-disable-next-line import/prefer-default-export export const appEnv: Bindings = { - DB: bindings.DB as D1Database, + DB: env.DB as D1Database, + UPLOADS: env.UPLOADS as R2Bucket, VH7_ENV: 'testing', - S3_ACCESS_KEY_ID: process.env.S3_ACCESS_KEY_ID || 'minioadmin', - S3_SECRET_ACCESS_KEY: process.env.S3_SECRET_ACCESS_KEY || 'minioadmin', - S3_REGION: process.env.S3_REGION || 'eu-west-1', - S3_ENDPOINT_URL: process.env.S3_ENDPOINT_URL || 'http://localhost:9000', - S3_BUCKET: process.env.S3_BUCKET || 'vh7-uploads', VH7_ADMIN_TOKEN: 'keyboardcat', }; @@ -57,14 +52,7 @@ beforeAll(async () => { id: 'CCCC', filename: file.name, hash: await sha256(file), size: file.size, }); - const s3Config: S3Configuration = { - accessKeyId: appEnv.S3_ACCESS_KEY_ID, - secretAccessKey: appEnv.S3_SECRET_ACCESS_KEY, - bucket: appEnv.S3_BUCKET, - endpointUrl: appEnv.S3_ENDPOINT_URL, - region: appEnv.S3_REGION, - }; - await putObject(s3Config, 'CCCC', file); + await appEnv.UPLOADS.put('CCCC', file); }); describe('API', () => { diff --git a/worker/src/index.ts b/worker/src/index.ts index c4b3a60..6d8b7da 100644 --- a/worker/src/index.ts +++ b/worker/src/index.ts @@ -7,17 +7,12 @@ import { } from './controller'; import { checkDirectUserAgent, getFrontendUrl, isValidId } from './helpers'; import * as models from './models'; -import { S3Configuration, getObject } from './s3'; import cleanup from './cleanup'; export type Bindings = { DB: D1Database, VH7_ENV: string, - S3_ACCESS_KEY_ID: string, - S3_SECRET_ACCESS_KEY: string, - S3_REGION: string, - S3_ENDPOINT_URL: string, - S3_BUCKET: string, + UPLOADS: R2Bucket, VH7_ADMIN_TOKEN: string }; @@ -105,15 +100,8 @@ app.post('/api/upload', return c.status(500); } - const s3Config: S3Configuration = { - accessKeyId: c.env.S3_ACCESS_KEY_ID, - secretAccessKey: c.env.S3_SECRET_ACCESS_KEY, - bucket: c.env.S3_BUCKET, - endpointUrl: c.env.S3_ENDPOINT_URL, - region: c.env.S3_REGION, - }; - - const upload = await createUpload(c.var.db, parsed.data.file, parsed.data.expires, s3Config); + const upload = await createUpload(c.var.db, c.env.UPLOADS, parsed.data.file, + parsed.data.expires); return c.json(upload); }); @@ -149,15 +137,7 @@ app.get('/api/cleanup', withDb, async (c) => { return c.status(500); } - const s3Config: S3Configuration = { - accessKeyId: c.env.S3_ACCESS_KEY_ID, - secretAccessKey: c.env.S3_SECRET_ACCESS_KEY, - bucket: c.env.S3_BUCKET, - endpointUrl: c.env.S3_ENDPOINT_URL, - region: c.env.S3_REGION, - }; - - const deleted = await cleanup(c.var.db, s3Config); + const deleted = await cleanup(c.var.db, c.env.UPLOADS); return c.json({ deleted, }); @@ -203,27 +183,18 @@ app.get('/:id', withDb, async (c) => { }); case 'upload': // eslint-disable-next-line no-case-declarations - const obj = await getObject({ - accessKeyId: c.env.S3_ACCESS_KEY_ID, - secretAccessKey: c.env.S3_SECRET_ACCESS_KEY, - bucket: c.env.S3_BUCKET, - endpointUrl: c.env.S3_ENDPOINT_URL, - region: c.env.S3_REGION, - }, shortlink.id); - - if (obj.status === 404) { - return c.text('Short link not found', 404); - } + const obj = await c.env.UPLOADS.get(shortlink.id); - if (obj.status !== 200) { - return c.status(500); + if (obj === null) { + return c.text('Short link not found', 404); } - return c.body(obj.body as any, 200, { + return c.body(obj.body, 200, { 'Content-Type': 'application/force-download', 'Content-Transfer-Encoding': 'binary', 'Content-Disposition': `attachment; filename="${shortlink.filename}"`, 'Cache-Control': 'max-age=86400', + etag: obj.httpEtag, }); default: return c.status(500); diff --git a/worker/src/s3.ts b/worker/src/s3.ts deleted file mode 100644 index 98ff5c8..0000000 --- a/worker/src/s3.ts +++ /dev/null @@ -1,61 +0,0 @@ -import { AwsClient } from 'aws4fetch'; - -export type S3Configuration = { - accessKeyId: string, - secretAccessKey: string, - region: string, - endpointUrl: string, - bucket: string -}; - -async function makeRequest(config: S3Configuration, path: string = '/', options: RequestInit | Request = {}) { - const client = new AwsClient({ - accessKeyId: config.accessKeyId, - secretAccessKey: config.secretAccessKey, - region: config.region, - }); - - const url = new URL(`${config.endpointUrl}/${config.bucket}${path}`); - const signedRequest = await client.sign(url, { - aws: { - service: 's3', - }, - ...options, - }); - return fetch(signedRequest, { - cf: { - cacheEverything: true, - }, - }); -} - -export async function getObjectMetadata(config: S3Configuration, filename: string) { - const req = makeRequest(config, `/${filename}`, { - method: 'HEAD', - }); - - return req; -} - -export async function getObject(config: S3Configuration, filename: string) { - const req = makeRequest(config, `/${filename}`); - return req; -} - -export async function putObject(config: S3Configuration, filename: string, file: File) { - const req = makeRequest(config, `/${filename}`, { - method: 'PUT', - body: file, - headers: { - 'Content-Type': file.type, - 'Content-Length': file.size.toString(), - }, - }); - - return req; -} - -export async function deleteObject(config: S3Configuration, filename: string) { - const req = makeRequest(config, `/${filename}`, { method: 'DELETE' }); - return req; -} diff --git a/worker/wrangler.toml b/worker/wrangler.toml index 3f074ca..72853b0 100644 --- a/worker/wrangler.toml +++ b/worker/wrangler.toml @@ -4,11 +4,6 @@ workers_dev = true main = "src/index.ts" [vars] -S3_ACCESS_KEY_ID = "minioadmin" -S3_SECRET_ACCESS_KEY = "minioadmin" -S3_REGION = "eu-west-1" -S3_ENDPOINT_URL = "http://localhost:9000" -S3_BUCKET = "vh7-uploads" VH7_ENV = "development" VH7_ADMIN_TOKEN = "keyboardcat" @@ -18,10 +13,11 @@ database_name = "vh7-development" database_id = "87904197-0107-411a-a030-be6ed70f8ff7" migrations_dir = "migrations" +[[r2_buckets]] +binding = "UPLOADS" +bucket_name = "vh7-uploads-development" + [env.production.vars] -S3_REGION = "eu-west-1" -S3_ENDPOINT_URL = "https://gateway.storjshare.io" -S3_BUCKET = "uploads" VH7_ENV = "production" [[env.production.d1_databases]] @@ -30,6 +26,10 @@ database_name = "vh7" database_id = "8248b645-7133-4795-a343-e9273706f77c" migrations_dir = "migrations" +[[env.production.r2_buckets]] +binding = "UPLOADS" +bucket_name = "vh7-uploads-production" + [[env.production.routes]] pattern = "vh7.uk/*" zone_name = "vh7.uk"