Merge branch 's3-path' into 'main'
S3: support pathStyle See merge request soapbox-pub/ditto!193
This commit is contained in:
commit
22064f86aa
|
@ -18,7 +18,10 @@
|
|||
"@/": "./src/",
|
||||
"@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.15.0",
|
||||
"@std/cli": "jsr:@std/cli@^0.223.0",
|
||||
"@std/crypto": "jsr:@std/crypto@^0.224.0",
|
||||
"@std/encoding": "jsr:@std/encoding@^0.224.0",
|
||||
"@std/json": "jsr:@std/json@^0.223.0",
|
||||
"@std/media-types": "jsr:@std/media-types@^0.224.0",
|
||||
"@std/streams": "jsr:@std/streams@^0.223.0",
|
||||
"hono": "https://deno.land/x/hono@v3.10.1/mod.ts",
|
||||
"hono/middleware": "https://deno.land/x/hono@v3.10.1/middleware.ts",
|
||||
|
|
|
@ -16,8 +16,7 @@ async function uploadFile(file: File, meta: FileMeta, signal?: AbortSignal) {
|
|||
throw new Error('File size is too large.');
|
||||
}
|
||||
|
||||
const { cid } = await uploader.upload(file, signal);
|
||||
const url = new URL(`/ipfs/${cid}`, Conf.mediaDomain).toString();
|
||||
const { url } = await uploader.upload(file, { signal });
|
||||
|
||||
return insertUnattachedMedia({
|
||||
pubkey,
|
||||
|
|
|
@ -7,11 +7,11 @@ import type { Uploader } from './types.ts';
|
|||
|
||||
/** Meta-uploader determined from configuration. */
|
||||
const configUploader: Uploader = {
|
||||
upload(file, signal) {
|
||||
return uploader().upload(file, signal);
|
||||
upload(file, opts) {
|
||||
return uploader().upload(file, opts);
|
||||
},
|
||||
delete(cid, signal) {
|
||||
return uploader().delete(cid, signal);
|
||||
delete(cid, opts) {
|
||||
return uploader().delete(cid, opts);
|
||||
},
|
||||
};
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ const ipfsAddResponseSchema = z.object({
|
|||
* and upload the file using the REST API.
|
||||
*/
|
||||
const ipfsUploader: Uploader = {
|
||||
async upload(file, signal) {
|
||||
async upload(file, opts) {
|
||||
const url = new URL('/api/v0/add', Conf.ipfs.apiUrl);
|
||||
|
||||
const formData = new FormData();
|
||||
|
@ -27,16 +27,18 @@ const ipfsUploader: Uploader = {
|
|||
const response = await fetchWorker(url, {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
signal,
|
||||
signal: opts?.signal,
|
||||
});
|
||||
|
||||
const { Hash } = ipfsAddResponseSchema.parse(await response.json());
|
||||
const { Hash: cid } = ipfsAddResponseSchema.parse(await response.json());
|
||||
|
||||
return {
|
||||
cid: Hash,
|
||||
id: cid,
|
||||
cid,
|
||||
url: new URL(`/ipfs/${cid}`, Conf.mediaDomain).toString(),
|
||||
};
|
||||
},
|
||||
async delete(cid, signal) {
|
||||
async delete(cid, opts) {
|
||||
const url = new URL('/api/v0/pin/rm', Conf.ipfs.apiUrl);
|
||||
|
||||
const query = new URLSearchParams();
|
||||
|
@ -46,7 +48,7 @@ const ipfsUploader: Uploader = {
|
|||
|
||||
await fetchWorker(url, {
|
||||
method: 'POST',
|
||||
signal,
|
||||
signal: opts?.signal,
|
||||
});
|
||||
},
|
||||
};
|
||||
|
|
|
@ -1,32 +1,39 @@
|
|||
import { join } from 'node:path';
|
||||
|
||||
import { crypto } from '@std/crypto';
|
||||
import { encodeHex } from '@std/encoding/hex';
|
||||
import { extensionsByType } from '@std/media-types';
|
||||
|
||||
import { Conf } from '@/config.ts';
|
||||
import { IpfsHash, S3Client } from '@/deps.ts';
|
||||
import { S3Client } from '@/deps.ts';
|
||||
|
||||
import type { Uploader } from './types.ts';
|
||||
|
||||
/**
|
||||
* S3-compatible uploader for AWS, Wasabi, DigitalOcean Spaces, and more.
|
||||
* Files are named by their IPFS CID and exposed at `/ipfs/<cid>`, letting it
|
||||
* take advantage of IPFS features while not really using IPFS.
|
||||
*/
|
||||
/** S3-compatible uploader for AWS, Wasabi, DigitalOcean Spaces, and more. */
|
||||
const s3Uploader: Uploader = {
|
||||
async upload(file, _signal) {
|
||||
const cid = await IpfsHash.of(file.stream()) as string;
|
||||
async upload(file) {
|
||||
const sha256 = encodeHex(await crypto.subtle.digest('SHA-256', file.stream()));
|
||||
const ext = extensionsByType(file.type)?.[0] ?? 'bin';
|
||||
const filename = `${sha256}.${ext}`;
|
||||
|
||||
// FIXME: Can't cancel S3 requests: https://github.com/bradenmacdonald/deno-s3-lite-client/issues/24
|
||||
await client().putObject(`ipfs/${cid}`, file.stream(), {
|
||||
await client().putObject(filename, file.stream(), {
|
||||
metadata: {
|
||||
'Content-Type': file.type,
|
||||
'x-amz-acl': 'public-read',
|
||||
},
|
||||
});
|
||||
|
||||
const { pathStyle, bucket } = Conf.s3;
|
||||
const path = (pathStyle && bucket) ? join(bucket, filename) : filename;
|
||||
|
||||
return {
|
||||
cid,
|
||||
id: filename,
|
||||
sha256,
|
||||
url: new URL(path, Conf.mediaDomain).toString(),
|
||||
};
|
||||
},
|
||||
async delete(cid, _signal) {
|
||||
// FIXME: Can't cancel S3 requests: https://github.com/bradenmacdonald/deno-s3-lite-client/issues/24
|
||||
await client().deleteObject(`ipfs/${cid}`);
|
||||
async delete(id) {
|
||||
await client().deleteObject(id);
|
||||
},
|
||||
};
|
||||
|
||||
|
|
|
@ -1,15 +1,21 @@
|
|||
/** Modular uploader interface, to support uploading to different backends. */
|
||||
interface Uploader {
|
||||
/** Upload the file to the backend. */
|
||||
upload(file: File, signal?: AbortSignal): Promise<UploadResult>;
|
||||
upload(file: File, opts?: { signal?: AbortSignal }): Promise<UploadResult>;
|
||||
/** Delete the file from the backend. */
|
||||
delete(cid: string, signal?: AbortSignal): Promise<void>;
|
||||
delete(cid: string, opts?: { signal?: AbortSignal }): Promise<void>;
|
||||
}
|
||||
|
||||
/** Return value from the uploader after uploading a file. */
|
||||
interface UploadResult {
|
||||
/** IPFS CID for the file. */
|
||||
cid: string;
|
||||
/** File ID specific to the uploader, so it can later be referenced or deleted. */
|
||||
id: string;
|
||||
/** URL where the file can be accessed. */
|
||||
url: string;
|
||||
/** SHA-256 hash of the file. */
|
||||
sha256?: string;
|
||||
/** IPFS CID of the file. */
|
||||
cid?: string;
|
||||
}
|
||||
|
||||
export type { Uploader };
|
||||
|
|
Loading…
Reference in New Issue