Merge branch 'uploader-type' into 'main'
Rewrite all the uploaders See merge request soapbox-pub/ditto!278
This commit is contained in:
commit
3d93be6a13
|
@ -81,6 +81,7 @@ import { hostMetaController } from '@/controllers/well-known/host-meta.ts';
|
|||
import { nodeInfoController, nodeInfoSchemaController } from '@/controllers/well-known/nodeinfo.ts';
|
||||
import { nostrController } from '@/controllers/well-known/nostr.ts';
|
||||
import { webfingerController } from '@/controllers/well-known/webfinger.ts';
|
||||
import { DittoUploader } from '@/interfaces/DittoUploader.ts';
|
||||
import { auth98Middleware, requireProof, requireRole } from '@/middleware/auth98Middleware.ts';
|
||||
import { cacheMiddleware } from '@/middleware/cacheMiddleware.ts';
|
||||
import { cspMiddleware } from '@/middleware/cspMiddleware.ts';
|
||||
|
@ -89,11 +90,14 @@ import { signerMiddleware } from '@/middleware/signerMiddleware.ts';
|
|||
import { storeMiddleware } from '@/middleware/storeMiddleware.ts';
|
||||
import { blockController } from '@/controllers/api/accounts.ts';
|
||||
import { unblockController } from '@/controllers/api/accounts.ts';
|
||||
import { uploaderMiddleware } from '@/middleware/uploaderMiddleware.ts';
|
||||
|
||||
interface AppEnv extends HonoEnv {
|
||||
Variables: {
|
||||
/** Signer to get the logged-in user's pubkey, relays, and to sign events, or `undefined` if the user isn't logged in. */
|
||||
signer?: NostrSigner;
|
||||
/** Uploader for the user to upload files. */
|
||||
uploader?: DittoUploader;
|
||||
/** NIP-98 signed event proving the pubkey is owned by the user. */
|
||||
proof?: NostrEvent;
|
||||
/** Store */
|
||||
|
@ -129,6 +133,7 @@ app.use(
|
|||
cspMiddleware(),
|
||||
cors({ origin: '*', exposeHeaders: ['link'] }),
|
||||
signerMiddleware,
|
||||
uploaderMiddleware,
|
||||
auth98Middleware(),
|
||||
storeMiddleware,
|
||||
);
|
||||
|
|
|
@ -8,7 +8,7 @@ import { getAuthor, getFollowedPubkeys } from '@/queries.ts';
|
|||
import { booleanParamSchema, fileSchema } from '@/schema.ts';
|
||||
import { Storages } from '@/storages.ts';
|
||||
import { addTag, deleteTag, findReplyTag, getTagSet } from '@/tags.ts';
|
||||
import { uploadFile } from '@/upload.ts';
|
||||
import { uploadFile } from '@/utils/upload.ts';
|
||||
import { nostrNow } from '@/utils.ts';
|
||||
import { createEvent, paginated, paginationSchema, parseBody, updateListEvent } from '@/utils/api.ts';
|
||||
import { lookupAccount } from '@/utils/lookup.ts';
|
||||
|
@ -221,8 +221,8 @@ const updateCredentialsController: AppController = async (c) => {
|
|||
} = result.data;
|
||||
|
||||
const [avatar, header] = await Promise.all([
|
||||
avatarFile ? uploadFile(avatarFile, { pubkey }) : undefined,
|
||||
headerFile ? uploadFile(headerFile, { pubkey }) : undefined,
|
||||
avatarFile ? uploadFile(c, avatarFile, { pubkey }) : undefined,
|
||||
headerFile ? uploadFile(c, headerFile, { pubkey }) : undefined,
|
||||
]);
|
||||
|
||||
meta.name = display_name ?? meta.name;
|
||||
|
|
|
@ -4,7 +4,7 @@ import { AppController } from '@/app.ts';
|
|||
import { fileSchema } from '@/schema.ts';
|
||||
import { parseBody } from '@/utils/api.ts';
|
||||
import { renderAttachment } from '@/views/mastodon/attachments.ts';
|
||||
import { uploadFile } from '@/upload.ts';
|
||||
import { uploadFile } from '@/utils/upload.ts';
|
||||
|
||||
const mediaBodySchema = z.object({
|
||||
file: fileSchema,
|
||||
|
@ -24,7 +24,7 @@ const mediaController: AppController = async (c) => {
|
|||
|
||||
try {
|
||||
const { file, description } = result.data;
|
||||
const media = await uploadFile(file, { pubkey, description }, signal);
|
||||
const media = await uploadFile(c, file, { pubkey, description }, signal);
|
||||
return c.json(renderAttachment(media));
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
export interface DittoUploader {
|
||||
upload(file: File, opts?: { signal?: AbortSignal }): Promise<[['url', string], ...string[][]]>;
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
import { AppMiddleware } from '@/app.ts';
|
||||
import { Conf } from '@/config.ts';
|
||||
import { DenoUploader } from '@/uploaders/DenoUploader.ts';
|
||||
import { IPFSUploader } from '@/uploaders/IPFSUploader.ts';
|
||||
import { NostrBuildUploader } from '@/uploaders/NostrBuildUploader.ts';
|
||||
import { S3Uploader } from '@/uploaders/S3Uploader.ts';
|
||||
import { fetchWorker } from '@/workers/fetch.ts';
|
||||
|
||||
/** Set an uploader for the user. */
|
||||
export const uploaderMiddleware: AppMiddleware = async (c, next) => {
|
||||
switch (Conf.uploader) {
|
||||
case 's3':
|
||||
c.set('uploader', new S3Uploader(Conf.s3));
|
||||
break;
|
||||
case 'ipfs':
|
||||
c.set('uploader', new IPFSUploader({ baseUrl: Conf.mediaDomain, apiUrl: Conf.ipfs.apiUrl, fetch: fetchWorker }));
|
||||
break;
|
||||
case 'local':
|
||||
c.set('uploader', new DenoUploader({ baseUrl: Conf.mediaDomain, dir: Conf.uploadsDir }));
|
||||
break;
|
||||
case 'nostrbuild':
|
||||
c.set('uploader', new NostrBuildUploader({ endpoint: Conf.nostrbuildEndpoint, fetch: fetchWorker }));
|
||||
break;
|
||||
}
|
||||
|
||||
await next();
|
||||
};
|
|
@ -1,19 +0,0 @@
|
|||
import { z } from 'zod';
|
||||
|
||||
export const nostrbuildFileSchema = z.object({
|
||||
name: z.string(),
|
||||
url: z.string().url(),
|
||||
thumbnail: z.string(),
|
||||
blurhash: z.string(),
|
||||
sha256: z.string(),
|
||||
original_sha256: z.string(),
|
||||
mime: z.string(),
|
||||
dimensions: z.object({
|
||||
width: z.number(),
|
||||
height: z.number(),
|
||||
}).optional().catch(undefined),
|
||||
});
|
||||
|
||||
export const nostrbuildSchema = z.object({
|
||||
data: nostrbuildFileSchema.array().min(1),
|
||||
});
|
|
@ -0,0 +1,47 @@
|
|||
import { join } from 'node:path';
|
||||
|
||||
import { crypto } from '@std/crypto';
|
||||
import { encodeHex } from '@std/encoding/hex';
|
||||
import { extensionsByType } from '@std/media-types';
|
||||
|
||||
import { DittoUploader } from '@/interfaces/DittoUploader.ts';
|
||||
|
||||
export interface DenoUploaderOpts {
|
||||
baseUrl: string;
|
||||
dir: string;
|
||||
}
|
||||
|
||||
/** Local Deno filesystem uploader. */
|
||||
export class DenoUploader implements DittoUploader {
|
||||
baseUrl: string;
|
||||
dir: string;
|
||||
|
||||
constructor(opts: DenoUploaderOpts) {
|
||||
this.baseUrl = opts.baseUrl;
|
||||
this.dir = opts.dir;
|
||||
}
|
||||
|
||||
async upload(file: File): Promise<[['url', string], ...string[][]]> {
|
||||
const sha256 = encodeHex(await crypto.subtle.digest('SHA-256', file.stream()));
|
||||
const ext = extensionsByType(file.type)?.[0] ?? 'bin';
|
||||
const filename = `${sha256}.${ext}`;
|
||||
|
||||
await Deno.mkdir(this.dir, { recursive: true });
|
||||
await Deno.writeFile(join(this.dir, filename), file.stream());
|
||||
|
||||
const url = new URL(this.baseUrl);
|
||||
const path = url.pathname === '/' ? filename : join(url.pathname, filename);
|
||||
|
||||
return [
|
||||
['url', new URL(path, url).toString()],
|
||||
['m', file.type],
|
||||
['x', sha256],
|
||||
['size', file.size.toString()],
|
||||
];
|
||||
}
|
||||
|
||||
async delete(filename: string) {
|
||||
const path = join(this.dir, filename);
|
||||
await Deno.remove(path);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
import { z } from 'zod';
|
||||
|
||||
import { DittoUploader } from '@/interfaces/DittoUploader.ts';
|
||||
|
||||
export interface IPFSUploaderOpts {
|
||||
baseUrl: string;
|
||||
apiUrl?: string;
|
||||
fetch?: typeof fetch;
|
||||
}
|
||||
|
||||
/**
|
||||
* IPFS uploader. It expects an IPFS node up and running.
|
||||
* It will try to connect to `http://localhost:5001` by default,
|
||||
* and upload the file using the REST API.
|
||||
*/
|
||||
export class IPFSUploader implements DittoUploader {
|
||||
private baseUrl: string;
|
||||
private apiUrl: string;
|
||||
private fetch: typeof fetch;
|
||||
|
||||
constructor(opts: IPFSUploaderOpts) {
|
||||
this.baseUrl = opts.baseUrl;
|
||||
this.apiUrl = opts.apiUrl ?? 'http://localhost:5001';
|
||||
this.fetch = opts.fetch ?? globalThis.fetch;
|
||||
}
|
||||
|
||||
async upload(file: File, opts?: { signal?: AbortSignal }): Promise<[['url', string], ...string[][]]> {
|
||||
const url = new URL('/api/v0/add', this.apiUrl);
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
||||
const response = await this.fetch(url, {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
signal: opts?.signal,
|
||||
});
|
||||
|
||||
const { Hash: cid } = IPFSUploader.schema().parse(await response.json());
|
||||
|
||||
return [
|
||||
['url', new URL(`/ipfs/${cid}`, this.baseUrl).toString()],
|
||||
['m', file.type],
|
||||
['cid', cid],
|
||||
['size', file.size.toString()],
|
||||
];
|
||||
}
|
||||
|
||||
async delete(cid: string, opts?: { signal?: AbortSignal }): Promise<void> {
|
||||
const url = new URL('/api/v0/pin/rm', this.apiUrl);
|
||||
|
||||
const query = new URLSearchParams();
|
||||
query.set('arg', cid);
|
||||
url.search = query.toString();
|
||||
|
||||
await this.fetch(url, {
|
||||
method: 'POST',
|
||||
signal: opts?.signal,
|
||||
});
|
||||
}
|
||||
|
||||
/** Response schema for POST `/api/v0/add`. */
|
||||
private static schema() {
|
||||
return z.object({
|
||||
Name: z.string(),
|
||||
Hash: z.string(),
|
||||
Size: z.string(),
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
import { z } from 'zod';
|
||||
|
||||
import { DittoUploader } from '@/interfaces/DittoUploader.ts';
|
||||
|
||||
export interface NostrBuildUploaderOpts {
|
||||
endpoint?: string;
|
||||
fetch?: typeof fetch;
|
||||
}
|
||||
|
||||
/** Upload files to nostr.build or another compatible server. */
|
||||
export class NostrBuildUploader implements DittoUploader {
|
||||
private endpoint: string;
|
||||
private fetch: typeof fetch;
|
||||
|
||||
constructor(opts: NostrBuildUploaderOpts) {
|
||||
this.endpoint = opts.endpoint ?? 'https://nostr.build/api/v2/upload/files';
|
||||
this.fetch = opts.fetch ?? globalThis.fetch;
|
||||
}
|
||||
|
||||
async upload(file: File, opts?: { signal?: AbortSignal }): Promise<[['url', string], ...string[][]]> {
|
||||
const formData = new FormData();
|
||||
formData.append('fileToUpload', file);
|
||||
|
||||
const response = await this.fetch(this.endpoint, {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
signal: opts?.signal,
|
||||
});
|
||||
|
||||
const json = await response.json();
|
||||
const [data] = NostrBuildUploader.schema().parse(json).data;
|
||||
|
||||
const tags: [['url', string], ...string[][]] = [
|
||||
['url', data.url],
|
||||
['m', data.mime],
|
||||
['x', data.sha256],
|
||||
['ox', data.original_sha256],
|
||||
['size', data.size.toString()],
|
||||
];
|
||||
|
||||
if (data.dimensions) {
|
||||
tags.push(['dim', `${data.dimensions.width}x${data.dimensions.height}`]);
|
||||
}
|
||||
|
||||
if (data.blurhash) {
|
||||
tags.push(['blurhash', data.blurhash]);
|
||||
}
|
||||
|
||||
return tags;
|
||||
}
|
||||
|
||||
/** nostr.build API response schema. */
|
||||
private static schema() {
|
||||
return z.object({
|
||||
data: z.object({
|
||||
url: z.string().url(),
|
||||
blurhash: z.string().optional().catch(undefined),
|
||||
sha256: z.string(),
|
||||
original_sha256: z.string(),
|
||||
mime: z.string(),
|
||||
size: z.number(),
|
||||
dimensions: z.object({
|
||||
width: z.number(),
|
||||
height: z.number(),
|
||||
}).optional().catch(undefined),
|
||||
}).array().min(1),
|
||||
});
|
||||
}
|
||||
}
|
|
@ -6,17 +6,34 @@ import { encodeHex } from '@std/encoding/hex';
|
|||
import { extensionsByType } from '@std/media-types';
|
||||
|
||||
import { Conf } from '@/config.ts';
|
||||
import { DittoUploader } from '@/interfaces/DittoUploader.ts';
|
||||
|
||||
import type { Uploader } from './types.ts';
|
||||
export interface S3UploaderOpts {
|
||||
endPoint: string;
|
||||
region: string;
|
||||
accessKey?: string;
|
||||
secretKey?: string;
|
||||
bucket?: string;
|
||||
pathStyle?: boolean;
|
||||
port?: number;
|
||||
sessionToken?: string;
|
||||
useSSL?: boolean;
|
||||
}
|
||||
|
||||
/** S3-compatible uploader for AWS, Wasabi, DigitalOcean Spaces, and more. */
|
||||
const s3Uploader: Uploader = {
|
||||
async upload(file) {
|
||||
export class S3Uploader implements DittoUploader {
|
||||
private client: S3Client;
|
||||
|
||||
constructor(opts: S3UploaderOpts) {
|
||||
this.client = new S3Client(opts);
|
||||
}
|
||||
|
||||
async upload(file: File): Promise<[['url', string], ...string[][]]> {
|
||||
const sha256 = encodeHex(await crypto.subtle.digest('SHA-256', file.stream()));
|
||||
const ext = extensionsByType(file.type)?.[0] ?? 'bin';
|
||||
const filename = `${sha256}.${ext}`;
|
||||
|
||||
await client().putObject(filename, file.stream(), {
|
||||
await this.client.putObject(filename, file.stream(), {
|
||||
metadata: {
|
||||
'Content-Type': file.type,
|
||||
'x-amz-acl': 'public-read',
|
||||
|
@ -24,6 +41,7 @@ const s3Uploader: Uploader = {
|
|||
});
|
||||
|
||||
const { pathStyle, bucket } = Conf.s3;
|
||||
|
||||
const path = (pathStyle && bucket) ? join(bucket, filename) : filename;
|
||||
const url = new URL(path, Conf.mediaDomain).toString();
|
||||
|
||||
|
@ -33,15 +51,9 @@ const s3Uploader: Uploader = {
|
|||
['x', sha256],
|
||||
['size', file.size.toString()],
|
||||
];
|
||||
},
|
||||
async delete(id) {
|
||||
await client().deleteObject(id);
|
||||
},
|
||||
};
|
||||
|
||||
/** Build S3 client from config. */
|
||||
function client() {
|
||||
return new S3Client({ ...Conf.s3 });
|
||||
}
|
||||
|
||||
export { s3Uploader };
|
||||
async delete(objectName: string) {
|
||||
await this.client.deleteObject(objectName);
|
||||
}
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
import { Conf } from '@/config.ts';
|
||||
|
||||
import { ipfsUploader } from '@/uploaders/ipfs.ts';
|
||||
import { localUploader } from '@/uploaders/local.ts';
|
||||
import { nostrbuildUploader } from '@/uploaders/nostrbuild.ts';
|
||||
import { s3Uploader } from '@/uploaders/s3.ts';
|
||||
|
||||
import type { Uploader } from './types.ts';
|
||||
|
||||
/** Meta-uploader determined from configuration. */
|
||||
const configUploader: Uploader = {
|
||||
upload(file, opts) {
|
||||
return uploader().upload(file, opts);
|
||||
},
|
||||
async delete(id, opts) {
|
||||
return await uploader().delete?.(id, opts);
|
||||
},
|
||||
};
|
||||
|
||||
/** Get the uploader module based on configuration. */
|
||||
function uploader() {
|
||||
switch (Conf.uploader) {
|
||||
case 's3':
|
||||
return s3Uploader;
|
||||
case 'ipfs':
|
||||
return ipfsUploader;
|
||||
case 'local':
|
||||
return localUploader;
|
||||
case 'nostrbuild':
|
||||
return nostrbuildUploader;
|
||||
default:
|
||||
throw new Error('No `DITTO_UPLOADER` configured. Uploads are disabled.');
|
||||
}
|
||||
}
|
||||
|
||||
export { configUploader };
|
|
@ -1,57 +0,0 @@
|
|||
import { z } from 'zod';
|
||||
|
||||
import { Conf } from '@/config.ts';
|
||||
import { fetchWorker } from '@/workers/fetch.ts';
|
||||
|
||||
import type { Uploader } from './types.ts';
|
||||
|
||||
/** Response schema for POST `/api/v0/add`. */
|
||||
const ipfsAddResponseSchema = z.object({
|
||||
Name: z.string(),
|
||||
Hash: z.string(),
|
||||
Size: z.string(),
|
||||
});
|
||||
|
||||
/**
|
||||
* IPFS uploader. It expects an IPFS node up and running.
|
||||
* It will try to connect to `http://localhost:5001` by default,
|
||||
* and upload the file using the REST API.
|
||||
*/
|
||||
const ipfsUploader: Uploader = {
|
||||
async upload(file, opts) {
|
||||
const url = new URL('/api/v0/add', Conf.ipfs.apiUrl);
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
||||
const response = await fetchWorker(url, {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
signal: opts?.signal,
|
||||
});
|
||||
|
||||
const { Hash: cid } = ipfsAddResponseSchema.parse(await response.json());
|
||||
|
||||
return [
|
||||
['url', new URL(`/ipfs/${cid}`, Conf.mediaDomain).toString()],
|
||||
['m', file.type],
|
||||
['cid', cid],
|
||||
['size', file.size.toString()],
|
||||
];
|
||||
},
|
||||
async delete(cid, opts) {
|
||||
const url = new URL('/api/v0/pin/rm', Conf.ipfs.apiUrl);
|
||||
|
||||
const query = new URLSearchParams();
|
||||
query.set('arg', cid);
|
||||
|
||||
url.search = query.toString();
|
||||
|
||||
await fetchWorker(url, {
|
||||
method: 'POST',
|
||||
signal: opts?.signal,
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
export { ipfsUploader };
|
|
@ -1,37 +0,0 @@
|
|||
import { join } from 'node:path';
|
||||
|
||||
import { crypto } from '@std/crypto';
|
||||
import { encodeHex } from '@std/encoding/hex';
|
||||
import { extensionsByType } from '@std/media-types';
|
||||
|
||||
import { Conf } from '@/config.ts';
|
||||
|
||||
import type { Uploader } from './types.ts';
|
||||
|
||||
/** Local filesystem uploader. */
|
||||
const localUploader: Uploader = {
|
||||
async upload(file) {
|
||||
const sha256 = encodeHex(await crypto.subtle.digest('SHA-256', file.stream()));
|
||||
const ext = extensionsByType(file.type)?.[0] ?? 'bin';
|
||||
const filename = `${sha256}.${ext}`;
|
||||
|
||||
await Deno.mkdir(Conf.uploadsDir, { recursive: true });
|
||||
await Deno.writeFile(join(Conf.uploadsDir, filename), file.stream());
|
||||
|
||||
const { mediaDomain } = Conf;
|
||||
const url = new URL(mediaDomain);
|
||||
const path = url.pathname === '/' ? filename : join(url.pathname, filename);
|
||||
|
||||
return [
|
||||
['url', new URL(path, url).toString()],
|
||||
['m', file.type],
|
||||
['x', sha256],
|
||||
['size', file.size.toString()],
|
||||
];
|
||||
},
|
||||
async delete(id) {
|
||||
await Deno.remove(join(Conf.uploadsDir, id));
|
||||
},
|
||||
};
|
||||
|
||||
export { localUploader };
|
|
@ -1,35 +0,0 @@
|
|||
import { Conf } from '@/config.ts';
|
||||
import { nostrbuildSchema } from '@/schemas/nostrbuild.ts';
|
||||
|
||||
import type { Uploader } from './types.ts';
|
||||
|
||||
/** nostr.build uploader. */
|
||||
export const nostrbuildUploader: Uploader = {
|
||||
async upload(file) {
|
||||
const formData = new FormData();
|
||||
formData.append('fileToUpload', file);
|
||||
|
||||
const response = await fetch(Conf.nostrbuildEndpoint, {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
});
|
||||
|
||||
const json = await response.json();
|
||||
const [data] = nostrbuildSchema.parse(json).data;
|
||||
|
||||
const tags: [['url', string], ...string[][]] = [
|
||||
['url', data.url],
|
||||
['m', data.mime],
|
||||
['x', data.sha256],
|
||||
['ox', data.original_sha256],
|
||||
['size', file.size.toString()],
|
||||
['blurhash', data.blurhash],
|
||||
];
|
||||
|
||||
if (data.dimensions) {
|
||||
tags.push(['dim', `${data.dimensions.width}x${data.dimensions.height}`]);
|
||||
}
|
||||
|
||||
return tags;
|
||||
},
|
||||
};
|
|
@ -1,9 +0,0 @@
|
|||
/** Modular uploader interface, to support uploading to different backends. */
|
||||
interface Uploader {
|
||||
/** Upload the file to the backend. */
|
||||
upload(file: File, opts?: { signal?: AbortSignal }): Promise<[['url', string], ...string[][]]>;
|
||||
/** Delete the file from the backend. */
|
||||
delete?(cid: string, opts?: { signal?: AbortSignal }): Promise<void>;
|
||||
}
|
||||
|
||||
export type { Uploader };
|
|
@ -1,14 +1,26 @@
|
|||
import { AppContext } from '@/app.ts';
|
||||
import { Conf } from '@/config.ts';
|
||||
import { insertUnattachedMedia, UnattachedMedia } from '@/db/unattached-media.ts';
|
||||
import { configUploader as uploader } from '@/uploaders/config.ts';
|
||||
|
||||
import { HTTPException } from 'hono';
|
||||
interface FileMeta {
|
||||
pubkey: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
/** Upload a file, track it in the database, and return the resulting media object. */
|
||||
async function uploadFile(file: File, meta: FileMeta, signal?: AbortSignal): Promise<UnattachedMedia> {
|
||||
export async function uploadFile(
|
||||
c: AppContext,
|
||||
file: File,
|
||||
meta: FileMeta,
|
||||
signal?: AbortSignal,
|
||||
): Promise<UnattachedMedia> {
|
||||
const uploader = c.get('uploader');
|
||||
if (!uploader) {
|
||||
throw new HTTPException(500, {
|
||||
res: c.json({ error: 'No uploader configured.' }),
|
||||
});
|
||||
}
|
||||
|
||||
const { pubkey, description } = meta;
|
||||
|
||||
if (file.size > Conf.maxUploadSize) {
|
||||
|
@ -30,5 +42,3 @@ async function uploadFile(file: File, meta: FileMeta, signal?: AbortSignal): Pro
|
|||
uploaded_at: Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
export { uploadFile };
|
Loading…
Reference in New Issue