From c786e1bc55e4460c8944ba11abfe21bacbcf203f Mon Sep 17 00:00:00 2001 From: Alex Gleason Date: Mon, 29 Apr 2024 15:32:18 -0500 Subject: [PATCH 1/2] Uploader: make second argument an options object --- src/uploaders/config.ts | 8 ++++---- src/uploaders/ipfs.ts | 8 ++++---- src/uploaders/s3.ts | 6 ++---- src/uploaders/types.ts | 4 ++-- 4 files changed, 12 insertions(+), 14 deletions(-) diff --git a/src/uploaders/config.ts b/src/uploaders/config.ts index 2ee2f9a..83874f6 100644 --- a/src/uploaders/config.ts +++ b/src/uploaders/config.ts @@ -7,11 +7,11 @@ import type { Uploader } from './types.ts'; /** Meta-uploader determined from configuration. */ const configUploader: Uploader = { - upload(file, signal) { - return uploader().upload(file, signal); + upload(file, opts) { + return uploader().upload(file, opts); }, - delete(cid, signal) { - return uploader().delete(cid, signal); + delete(cid, opts) { + return uploader().delete(cid, opts); }, }; diff --git a/src/uploaders/ipfs.ts b/src/uploaders/ipfs.ts index 5d82e2d..6e0e0e7 100644 --- a/src/uploaders/ipfs.ts +++ b/src/uploaders/ipfs.ts @@ -18,7 +18,7 @@ const ipfsAddResponseSchema = z.object({ * and upload the file using the REST API. */ const ipfsUploader: Uploader = { - async upload(file, signal) { + async upload(file, opts) { const url = new URL('/api/v0/add', Conf.ipfs.apiUrl); const formData = new FormData(); @@ -27,7 +27,7 @@ const ipfsUploader: Uploader = { const response = await fetchWorker(url, { method: 'POST', body: formData, - signal, + signal: opts?.signal, }); const { Hash } = ipfsAddResponseSchema.parse(await response.json()); @@ -36,7 +36,7 @@ const ipfsUploader: Uploader = { cid: Hash, }; }, - async delete(cid, signal) { + async delete(cid, opts) { const url = new URL('/api/v0/pin/rm', Conf.ipfs.apiUrl); const query = new URLSearchParams(); @@ -46,7 +46,7 @@ const ipfsUploader: Uploader = { await fetchWorker(url, { method: 'POST', - signal, + signal: opts?.signal, }); }, }; diff --git a/src/uploaders/s3.ts b/src/uploaders/s3.ts index 2e02cc3..378b279 100644 --- a/src/uploaders/s3.ts +++ b/src/uploaders/s3.ts @@ -9,10 +9,9 @@ import type { Uploader } from './types.ts'; * take advantage of IPFS features while not really using IPFS. */ const s3Uploader: Uploader = { - async upload(file, _signal) { + async upload(file) { const cid = await IpfsHash.of(file.stream()) as string; - // FIXME: Can't cancel S3 requests: https://github.com/bradenmacdonald/deno-s3-lite-client/issues/24 await client().putObject(`ipfs/${cid}`, file.stream(), { metadata: { 'Content-Type': file.type, @@ -24,8 +23,7 @@ const s3Uploader: Uploader = { cid, }; }, - async delete(cid, _signal) { - // FIXME: Can't cancel S3 requests: https://github.com/bradenmacdonald/deno-s3-lite-client/issues/24 + async delete(cid) { await client().deleteObject(`ipfs/${cid}`); }, }; diff --git a/src/uploaders/types.ts b/src/uploaders/types.ts index 8f11545..8898048 100644 --- a/src/uploaders/types.ts +++ b/src/uploaders/types.ts @@ -1,9 +1,9 @@ /** Modular uploader interface, to support uploading to different backends. */ interface Uploader { /** Upload the file to the backend. */ - upload(file: File, signal?: AbortSignal): Promise; + upload(file: File, opts?: { signal?: AbortSignal }): Promise; /** Delete the file from the backend. */ - delete(cid: string, signal?: AbortSignal): Promise; + delete(cid: string, opts?: { signal?: AbortSignal }): Promise; } /** Return value from the uploader after uploading a file. */ From 7ada849a6a217a462ab8c0205d40eb276320230c Mon Sep 17 00:00:00 2001 From: Alex Gleason Date: Mon, 29 Apr 2024 15:54:01 -0500 Subject: [PATCH 2/2] s3: support pathStyle --- deno.json | 3 +++ src/upload.ts | 3 +-- src/uploaders/ipfs.ts | 6 ++++-- src/uploaders/s3.ts | 31 ++++++++++++++++++++----------- src/uploaders/types.ts | 10 ++++++++-- 5 files changed, 36 insertions(+), 17 deletions(-) diff --git a/deno.json b/deno.json index c5681e3..d241262 100644 --- a/deno.json +++ b/deno.json @@ -18,7 +18,10 @@ "@/": "./src/", "@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.15.0", "@std/cli": "jsr:@std/cli@^0.223.0", + "@std/crypto": "jsr:@std/crypto@^0.224.0", + "@std/encoding": "jsr:@std/encoding@^0.224.0", "@std/json": "jsr:@std/json@^0.223.0", + "@std/media-types": "jsr:@std/media-types@^0.224.0", "@std/streams": "jsr:@std/streams@^0.223.0", "hono": "https://deno.land/x/hono@v3.10.1/mod.ts", "hono/middleware": "https://deno.land/x/hono@v3.10.1/middleware.ts", diff --git a/src/upload.ts b/src/upload.ts index 5c16501..632dbab 100644 --- a/src/upload.ts +++ b/src/upload.ts @@ -16,8 +16,7 @@ async function uploadFile(file: File, meta: FileMeta, signal?: AbortSignal) { throw new Error('File size is too large.'); } - const { cid } = await uploader.upload(file, signal); - const url = new URL(`/ipfs/${cid}`, Conf.mediaDomain).toString(); + const { url } = await uploader.upload(file, { signal }); return insertUnattachedMedia({ pubkey, diff --git a/src/uploaders/ipfs.ts b/src/uploaders/ipfs.ts index 6e0e0e7..21619b5 100644 --- a/src/uploaders/ipfs.ts +++ b/src/uploaders/ipfs.ts @@ -30,10 +30,12 @@ const ipfsUploader: Uploader = { signal: opts?.signal, }); - const { Hash } = ipfsAddResponseSchema.parse(await response.json()); + const { Hash: cid } = ipfsAddResponseSchema.parse(await response.json()); return { - cid: Hash, + id: cid, + cid, + url: new URL(`/ipfs/${cid}`, Conf.mediaDomain).toString(), }; }, async delete(cid, opts) { diff --git a/src/uploaders/s3.ts b/src/uploaders/s3.ts index 378b279..29f3043 100644 --- a/src/uploaders/s3.ts +++ b/src/uploaders/s3.ts @@ -1,30 +1,39 @@ +import { join } from 'node:path'; + +import { crypto } from '@std/crypto'; +import { encodeHex } from '@std/encoding/hex'; +import { extensionsByType } from '@std/media-types'; + import { Conf } from '@/config.ts'; -import { IpfsHash, S3Client } from '@/deps.ts'; +import { S3Client } from '@/deps.ts'; import type { Uploader } from './types.ts'; -/** - * S3-compatible uploader for AWS, Wasabi, DigitalOcean Spaces, and more. - * Files are named by their IPFS CID and exposed at `/ipfs/`, letting it - * take advantage of IPFS features while not really using IPFS. - */ +/** S3-compatible uploader for AWS, Wasabi, DigitalOcean Spaces, and more. */ const s3Uploader: Uploader = { async upload(file) { - const cid = await IpfsHash.of(file.stream()) as string; + const sha256 = encodeHex(await crypto.subtle.digest('SHA-256', file.stream())); + const ext = extensionsByType(file.type)?.[0] ?? 'bin'; + const filename = `${sha256}.${ext}`; - await client().putObject(`ipfs/${cid}`, file.stream(), { + await client().putObject(filename, file.stream(), { metadata: { 'Content-Type': file.type, 'x-amz-acl': 'public-read', }, }); + const { pathStyle, bucket } = Conf.s3; + const path = (pathStyle && bucket) ? join(bucket, filename) : filename; + return { - cid, + id: filename, + sha256, + url: new URL(path, Conf.mediaDomain).toString(), }; }, - async delete(cid) { - await client().deleteObject(`ipfs/${cid}`); + async delete(id) { + await client().deleteObject(id); }, }; diff --git a/src/uploaders/types.ts b/src/uploaders/types.ts index 8898048..c514ad1 100644 --- a/src/uploaders/types.ts +++ b/src/uploaders/types.ts @@ -8,8 +8,14 @@ interface Uploader { /** Return value from the uploader after uploading a file. */ interface UploadResult { - /** IPFS CID for the file. */ - cid: string; + /** File ID specific to the uploader, so it can later be referenced or deleted. */ + id: string; + /** URL where the file can be accessed. */ + url: string; + /** SHA-256 hash of the file. */ + sha256?: string; + /** IPFS CID of the file. */ + cid?: string; } export type { Uploader };