Merge remote-tracking branch 'origin/main' into refactor-trends

This commit is contained in:
Alex Gleason 2024-05-21 16:45:58 -05:00
commit 4d21dd05a8
No known key found for this signature in database
GPG Key ID: 7211D1F99744FBB7
65 changed files with 970 additions and 782 deletions

View File

@ -1,4 +1,4 @@
image: denoland/deno:1.41.3 image: denoland/deno:1.43.3
default: default:
interruptible: true interruptible: true

View File

@ -1 +1 @@
deno 1.41.3 deno 1.43.3

View File

@ -11,7 +11,8 @@
"nsec": "deno run scripts/nsec.ts", "nsec": "deno run scripts/nsec.ts",
"admin:event": "deno run -A scripts/admin-event.ts", "admin:event": "deno run -A scripts/admin-event.ts",
"admin:role": "deno run -A scripts/admin-role.ts", "admin:role": "deno run -A scripts/admin-role.ts",
"stats:recompute": "deno run -A scripts/stats-recompute.ts" "stats:recompute": "deno run -A scripts/stats-recompute.ts",
"soapbox": "curl -O https://dl.soapbox.pub/main/soapbox.zip && mkdir -p public && mv soapbox.zip public/ && cd public/ && unzip soapbox.zip && rm soapbox.zip"
}, },
"unstable": ["ffi", "kv", "worker-options"], "unstable": ["ffi", "kv", "worker-options"],
"exclude": ["./public"], "exclude": ["./public"],
@ -20,8 +21,9 @@
"@bradenmacdonald/s3-lite-client": "jsr:@bradenmacdonald/s3-lite-client@^0.7.4", "@bradenmacdonald/s3-lite-client": "jsr:@bradenmacdonald/s3-lite-client@^0.7.4",
"@db/sqlite": "jsr:@db/sqlite@^0.11.1", "@db/sqlite": "jsr:@db/sqlite@^0.11.1",
"@isaacs/ttlcache": "npm:@isaacs/ttlcache@^1.4.1", "@isaacs/ttlcache": "npm:@isaacs/ttlcache@^1.4.1",
"@lambdalisue/async": "jsr:@lambdalisue/async@^2.1.1",
"@noble/secp256k1": "npm:@noble/secp256k1@^2.0.0", "@noble/secp256k1": "npm:@noble/secp256k1@^2.0.0",
"@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.19.1", "@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.22.0",
"@sentry/deno": "https://deno.land/x/sentry@7.112.2/index.mjs", "@sentry/deno": "https://deno.land/x/sentry@7.112.2/index.mjs",
"@soapbox/kysely-deno-sqlite": "jsr:@soapbox/kysely-deno-sqlite@^2.1.0", "@soapbox/kysely-deno-sqlite": "jsr:@soapbox/kysely-deno-sqlite@^2.1.0",
"@soapbox/stickynotes": "jsr:@soapbox/stickynotes@^0.4.0", "@soapbox/stickynotes": "jsr:@soapbox/stickynotes@^0.4.0",
@ -31,15 +33,18 @@
"@std/dotenv": "jsr:@std/dotenv@^0.224.0", "@std/dotenv": "jsr:@std/dotenv@^0.224.0",
"@std/encoding": "jsr:@std/encoding@^0.224.0", "@std/encoding": "jsr:@std/encoding@^0.224.0",
"@std/json": "jsr:@std/json@^0.223.0", "@std/json": "jsr:@std/json@^0.223.0",
"@std/media-types": "jsr:@std/media-types@^0.224.0", "@std/media-types": "jsr:@std/media-types@^0.224.1",
"@std/streams": "jsr:@std/streams@^0.223.0", "@std/streams": "jsr:@std/streams@^0.223.0",
"comlink": "npm:comlink@^4.4.1", "comlink": "npm:comlink@^4.4.1",
"deno-safe-fetch": "https://gitlab.com/soapbox-pub/deno-safe-fetch/-/raw/v1.0.0/load.ts", "deno-safe-fetch/load": "https://gitlab.com/soapbox-pub/deno-safe-fetch/-/raw/v1.0.0/load.ts",
"deno-sqlite": "https://raw.githubusercontent.com/alexgleason/deno-sqlite/325f66d8c395e7f6f5ee78ebfa42a0eeea4a942b/mod.ts",
"entities": "npm:entities@^4.5.0",
"fast-stable-stringify": "npm:fast-stable-stringify@^1.0.0", "fast-stable-stringify": "npm:fast-stable-stringify@^1.0.0",
"formdata-helper": "npm:formdata-helper@^0.3.0", "formdata-helper": "npm:formdata-helper@^0.3.0",
"hono": "https://deno.land/x/hono@v3.10.1/mod.ts", "hono": "https://deno.land/x/hono@v3.10.1/mod.ts",
"hono/middleware": "https://deno.land/x/hono@v3.10.1/middleware.ts", "hono/middleware": "https://deno.land/x/hono@v3.10.1/middleware.ts",
"iso-639-1": "npm:iso-639-1@2.1.15", "iso-639-1": "npm:iso-639-1@2.1.15",
"isomorphic-dompurify": "npm:isomorphic-dompurify@^2.11.0",
"kysely": "npm:kysely@^0.27.3", "kysely": "npm:kysely@^0.27.3",
"kysely_deno_postgres": "https://deno.land/x/kysely_deno_postgres@v0.4.0/mod.ts", "kysely_deno_postgres": "https://deno.land/x/kysely_deno_postgres@v0.4.0/mod.ts",
"linkify-plugin-hashtag": "npm:linkify-plugin-hashtag@^4.1.1", "linkify-plugin-hashtag": "npm:linkify-plugin-hashtag@^4.1.1",
@ -53,7 +58,6 @@
"tseep": "npm:tseep@^1.2.1", "tseep": "npm:tseep@^1.2.1",
"type-fest": "npm:type-fest@^4.3.0", "type-fest": "npm:type-fest@^4.3.0",
"unfurl.js": "npm:unfurl.js@^6.4.0", "unfurl.js": "npm:unfurl.js@^6.4.0",
"uuid62": "npm:uuid62@^1.0.2",
"zod": "npm:zod@^3.23.5", "zod": "npm:zod@^3.23.5",
"~/fixtures/": "./fixtures/" "~/fixtures/": "./fixtures/"
}, },

View File

@ -0,0 +1,20 @@
{
"id": "1264cc4051db59af9a21f7fd001fdf5213424f558ea9ab16a1b014fca2250af5",
"pubkey": "6be38f8c63df7dbf84db7ec4a6e6fbbd8d19dca3b980efad18585c46f04b26f9",
"created_at": 1716306470,
"kind": 1,
"tags": [
[
"imeta",
"url https://image.nostr.build/258d978b91e7424cfa43b31f3cfc077d7172ae10b3b45ac956feff9e72175126.png",
"m image/png",
"x b1ceee58405ef05a41190a0946ca6b6511dff426c68013cdd165514c1ef301f9",
"ox 258d978b91e7424cfa43b31f3cfc077d7172ae10b3b45ac956feff9e72175126",
"size 114350",
"dim 1414x594",
"blurhash LDRfkC.8_4_N_3NGR*t8%gIVWBxt"
]
],
"content": "Today we were made aware of multiple Fediverse blog posts incorrectly attributing “vote Trump” spam on Bluesky to the Mostr.pub Bridge. \n\nThis spam is NOT coming from Mostr. From the screenshots used in these blogs, it's clear the spam is coming from an entirely different bridge called momostr.pink. This bridge is not affiliated with Mostr, and is not even a fork of Mostr. We appreciate that the authors of these posts responded quickly to us and have since corrected the blogs. \n\nMostr.pub uses stirfry policies for anti-spam filtering. This includes an anti-duplication policy that prevents spam like the recent “vote Trump” posts weve seen repeated over and over. \n\nIt is important to note WHY there are multiple bridges, though. \n\nWhen Mostr.pub launched, multiple major servers immediately blocked Mostr, including Mastodon.social. The moderators of Mastodon.social claimed that this was because Nostr was unregulated, and suggested to one user that if they want to bridge their account they should host their own bridge.\n\nThat is exactly what momostr.pink, the source of this spam, has done. \n\nThe obvious response to the censorship of the Mostr Bridge is to build more bridges. \n\nWhile we have opted for pro-social policies that aim to reduce spam and build better connections between decentralized platforms, other bridges built to get around censorship of the Mostr Bridge may not — as were already seeing.\n\nThere will inevitably be multiple bridges, and were working on creating solutions to the problems that arise from that. In the meantime, if the Fediverse could do itself a favor and chill with the censorship for two seconds, we might not have so many problems. \n\n\nhttps://image.nostr.build/258d978b91e7424cfa43b31f3cfc077d7172ae10b3b45ac956feff9e72175126.png",
"sig": "b950e6e2ff1dc786ef344e7dad3edf8aa315a1053ede146725bde181acf7c2c1a5fcf1e0c796552b743607d6ae161a3ff4eb3af5033ffbfd314e68213d315215"
}

View File

@ -0,0 +1,34 @@
{
"status": "success",
"message": "Upload successful.",
"data": [
{
"input_name": "APIv2",
"name": "e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3.gif",
"sha256": "0a71f1c9dd982079bc52e96403368209cbf9507c5f6956134686f56e684b6377",
"original_sha256": "e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3",
"type": "picture",
"mime": "image/gif",
"size": 1796276,
"blurhash": "LGH-S^Vwm]x]04kX-qR-R]SL5FxZ",
"dimensions": {
"width": 360,
"height": 216
},
"dimensionsString": "360x216",
"url": "https://image.nostr.build/e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3.gif",
"thumbnail": "https://image.nostr.build/thumb/e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3.gif",
"responsive": {
"240p": "https://image.nostr.build/resp/240p/e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3.gif",
"360p": "https://image.nostr.build/resp/360p/e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3.gif",
"480p": "https://image.nostr.build/resp/480p/e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3.gif",
"720p": "https://image.nostr.build/resp/720p/e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3.gif",
"1080p": "https://image.nostr.build/resp/1080p/e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3.gif"
},
"metadata": {
"date:create": "2024-05-18T02:11:39+00:00",
"date:modify": "2024-05-18T02:11:39+00:00"
}
}
]
}

View File

@ -0,0 +1,29 @@
{
"status": "success",
"message": "Upload successful.",
"data": [
{
"id": 0,
"input_name": "APIv2",
"name": "f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725.mp3",
"url": "https://media.nostr.build/av/f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725.mp3",
"thumbnail": "https://media.nostr.build/av/f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725.mp3",
"responsive": {
"240p": "https://media.nostr.build/av/f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725.mp3",
"360p": "https://media.nostr.build/av/f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725.mp3",
"480p": "https://media.nostr.build/av/f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725.mp3",
"720p": "https://media.nostr.build/av/f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725.mp3",
"1080p": "https://media.nostr.build/av/f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725.mp3"
},
"blurhash": "",
"sha256": "f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725",
"original_sha256": "f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725",
"type": "video",
"mime": "audio/mpeg",
"size": 1519616,
"metadata": [],
"dimensions": [],
"dimensionsString": "0x0"
}
]
}

View File

@ -1,8 +1,6 @@
import { nip19 } from 'nostr-tools'; import { nip19 } from 'nostr-tools';
import { DittoDB } from '@/db/DittoDB.ts'; import { refreshAuthorStats } from '@/stats.ts';
import { DittoTables } from '@/db/DittoTables.ts';
import { Storages } from '@/storages.ts';
let pubkey: string; let pubkey: string;
try { try {
@ -17,23 +15,4 @@ try {
Deno.exit(1); Deno.exit(1);
} }
const store = await Storages.db(); await refreshAuthorStats(pubkey);
const kysely = await DittoDB.getInstance();
const [followList] = await store.query([{ kinds: [3], authors: [pubkey], limit: 1 }]);
const authorStats: DittoTables['author_stats'] = {
pubkey,
followers_count: (await store.count([{ kinds: [3], '#p': [pubkey] }])).count,
following_count: followList?.tags.filter(([name]) => name === 'p')?.length ?? 0,
notes_count: (await store.count([{ kinds: [1], authors: [pubkey] }])).count,
};
await kysely.insertInto('author_stats')
.values(authorStats)
.onConflict((oc) =>
oc
.column('pubkey')
.doUpdateSet(authorStats)
)
.execute();

View File

@ -1,4 +1,4 @@
import { NostrEvent, NostrSigner, NStore } from '@nostrify/nostrify'; import { NostrEvent, NostrSigner, NStore, NUploader } from '@nostrify/nostrify';
import Debug from '@soapbox/stickynotes/debug'; import Debug from '@soapbox/stickynotes/debug';
import { type Context, Env as HonoEnv, type Handler, Hono, Input as HonoInput, type MiddlewareHandler } from 'hono'; import { type Context, Env as HonoEnv, type Handler, Hono, Input as HonoInput, type MiddlewareHandler } from 'hono';
import { cors, logger, serveStatic } from 'hono/middleware'; import { cors, logger, serveStatic } from 'hono/middleware';
@ -7,7 +7,6 @@ import { Conf } from '@/config.ts';
import { startFirehose } from '@/firehose.ts'; import { startFirehose } from '@/firehose.ts';
import { Time } from '@/utils.ts'; import { Time } from '@/utils.ts';
import { actorController } from '@/controllers/activitypub/actor.ts';
import { import {
accountController, accountController,
accountLookupController, accountLookupController,
@ -77,10 +76,8 @@ import {
} from '@/controllers/api/timelines.ts'; } from '@/controllers/api/timelines.ts';
import { trendingTagsController } from '@/controllers/api/trends.ts'; import { trendingTagsController } from '@/controllers/api/trends.ts';
import { indexController } from '@/controllers/site.ts'; import { indexController } from '@/controllers/site.ts';
import { hostMetaController } from '@/controllers/well-known/host-meta.ts';
import { nodeInfoController, nodeInfoSchemaController } from '@/controllers/well-known/nodeinfo.ts'; import { nodeInfoController, nodeInfoSchemaController } from '@/controllers/well-known/nodeinfo.ts';
import { nostrController } from '@/controllers/well-known/nostr.ts'; import { nostrController } from '@/controllers/well-known/nostr.ts';
import { webfingerController } from '@/controllers/well-known/webfinger.ts';
import { auth98Middleware, requireProof, requireRole } from '@/middleware/auth98Middleware.ts'; import { auth98Middleware, requireProof, requireRole } from '@/middleware/auth98Middleware.ts';
import { cacheMiddleware } from '@/middleware/cacheMiddleware.ts'; import { cacheMiddleware } from '@/middleware/cacheMiddleware.ts';
import { cspMiddleware } from '@/middleware/cspMiddleware.ts'; import { cspMiddleware } from '@/middleware/cspMiddleware.ts';
@ -89,11 +86,14 @@ import { signerMiddleware } from '@/middleware/signerMiddleware.ts';
import { storeMiddleware } from '@/middleware/storeMiddleware.ts'; import { storeMiddleware } from '@/middleware/storeMiddleware.ts';
import { blockController } from '@/controllers/api/accounts.ts'; import { blockController } from '@/controllers/api/accounts.ts';
import { unblockController } from '@/controllers/api/accounts.ts'; import { unblockController } from '@/controllers/api/accounts.ts';
import { uploaderMiddleware } from '@/middleware/uploaderMiddleware.ts';
interface AppEnv extends HonoEnv { interface AppEnv extends HonoEnv {
Variables: { Variables: {
/** Signer to get the logged-in user's pubkey, relays, and to sign events, or `undefined` if the user isn't logged in. */ /** Signer to get the logged-in user's pubkey, relays, and to sign events, or `undefined` if the user isn't logged in. */
signer?: NostrSigner; signer?: NostrSigner;
/** Uploader for the user to upload files. */
uploader?: NUploader;
/** NIP-98 signed event proving the pubkey is owned by the user. */ /** NIP-98 signed event proving the pubkey is owned by the user. */
proof?: NostrEvent; proof?: NostrEvent;
/** Store */ /** Store */
@ -129,17 +129,14 @@ app.use(
cspMiddleware(), cspMiddleware(),
cors({ origin: '*', exposeHeaders: ['link'] }), cors({ origin: '*', exposeHeaders: ['link'] }),
signerMiddleware, signerMiddleware,
uploaderMiddleware,
auth98Middleware(), auth98Middleware(),
storeMiddleware, storeMiddleware,
); );
app.get('/.well-known/webfinger', webfingerController);
app.get('/.well-known/host-meta', hostMetaController);
app.get('/.well-known/nodeinfo', nodeInfoController); app.get('/.well-known/nodeinfo', nodeInfoController);
app.get('/.well-known/nostr.json', nostrController); app.get('/.well-known/nostr.json', nostrController);
app.get('/users/:username', actorController);
app.get('/nodeinfo/:version', nodeInfoSchemaController); app.get('/nodeinfo/:version', nodeInfoSchemaController);
app.get('/api/v1/instance', cacheMiddleware({ cacheName: 'web', expires: Time.minutes(5) }), instanceController); app.get('/api/v1/instance', cacheMiddleware({ cacheName: 'web', expires: Time.minutes(5) }), instanceController);

View File

@ -136,6 +136,14 @@ class Conf {
return Deno.env.get('IPFS_API_URL') || 'http://localhost:5001'; return Deno.env.get('IPFS_API_URL') || 'http://localhost:5001';
}, },
}; };
/** nostr.build API endpoint when the `nostrbuild` uploader is used. */
static get nostrbuildEndpoint(): string {
return Deno.env.get('NOSTRBUILD_ENDPOINT') || 'https://nostr.build/api/v2/upload/files';
}
/** Default Blossom servers to use when the `blossom` uploader is set. */
static get blossomServers(): string[] {
return Deno.env.get('BLOSSOM_SERVERS')?.split(',') || ['https://blossom.primal.net/'];
}
/** Module to upload files with. */ /** Module to upload files with. */
static get uploader() { static get uploader() {
return Deno.env.get('DITTO_UPLOADER'); return Deno.env.get('DITTO_UPLOADER');

View File

@ -1,28 +0,0 @@
import { getAuthor } from '@/queries.ts';
import { activityJson } from '@/utils/api.ts';
import { renderActor } from '@/views/activitypub/actor.ts';
import { localNip05Lookup } from '@/utils/nip05.ts';
import type { AppContext, AppController } from '@/app.ts';
const actorController: AppController = async (c) => {
const username = c.req.param('username');
const { signal } = c.req.raw;
const pointer = await localNip05Lookup(c.get('store'), username);
if (!pointer) return notFound(c);
const event = await getAuthor(pointer.pubkey, { signal });
if (!event) return notFound(c);
const actor = await renderActor(event, username);
if (!actor) return notFound(c);
return activityJson(c, actor);
};
function notFound(c: AppContext) {
return c.json({ error: 'Not found' }, 404);
}
export { actorController };

View File

@ -7,8 +7,7 @@ import { Conf } from '@/config.ts';
import { getAuthor, getFollowedPubkeys } from '@/queries.ts'; import { getAuthor, getFollowedPubkeys } from '@/queries.ts';
import { booleanParamSchema, fileSchema } from '@/schema.ts'; import { booleanParamSchema, fileSchema } from '@/schema.ts';
import { Storages } from '@/storages.ts'; import { Storages } from '@/storages.ts';
import { addTag, deleteTag, findReplyTag, getTagSet } from '@/tags.ts'; import { uploadFile } from '@/utils/upload.ts';
import { uploadFile } from '@/upload.ts';
import { nostrNow } from '@/utils.ts'; import { nostrNow } from '@/utils.ts';
import { createEvent, paginated, paginationSchema, parseBody, updateListEvent } from '@/utils/api.ts'; import { createEvent, paginated, paginationSchema, parseBody, updateListEvent } from '@/utils/api.ts';
import { lookupAccount } from '@/utils/lookup.ts'; import { lookupAccount } from '@/utils/lookup.ts';
@ -18,6 +17,7 @@ import { renderRelationship } from '@/views/mastodon/relationships.ts';
import { renderStatus } from '@/views/mastodon/statuses.ts'; import { renderStatus } from '@/views/mastodon/statuses.ts';
import { hydrateEvents } from '@/storages/hydrate.ts'; import { hydrateEvents } from '@/storages/hydrate.ts';
import { bech32ToPubkey } from '@/utils.ts'; import { bech32ToPubkey } from '@/utils.ts';
import { addTag, deleteTag, findReplyTag, getTagSet } from '@/utils/tags.ts';
const usernameSchema = z const usernameSchema = z
.string().min(1).max(30) .string().min(1).max(30)
@ -45,14 +45,32 @@ const createAccountController: AppController = async (c) => {
}; };
const verifyCredentialsController: AppController = async (c) => { const verifyCredentialsController: AppController = async (c) => {
const pubkey = await c.get('signer')?.getPublicKey()!; const signer = c.get('signer')!;
const pubkey = await signer.getPublicKey();
const event = await getAuthor(pubkey, { relations: ['author_stats'] }); const eventsDB = await Storages.db();
if (event) {
return c.json(await renderAccount(event, { withSource: true })); const [author, [settingsStore]] = await Promise.all([
} else { getAuthor(pubkey, { signal: AbortSignal.timeout(5000) }),
return c.json(await accountFromPubkey(pubkey, { withSource: true }));
eventsDB.query([{
authors: [pubkey],
kinds: [30078],
'#d': ['pub.ditto.pleroma_settings_store'],
limit: 1,
}]),
]);
const account = author
? await renderAccount(author, { withSource: true })
: await accountFromPubkey(pubkey, { withSource: true });
if (settingsStore) {
const data = await signer.nip44!.decrypt(pubkey, settingsStore.content);
account.pleroma.settings_store = JSON.parse(data);
} }
return c.json(account);
}; };
const accountController: AppController = async (c) => { const accountController: AppController = async (c) => {
@ -86,25 +104,35 @@ const accountLookupController: AppController = async (c) => {
} }
}; };
const accountSearchController: AppController = async (c) => { const accountSearchQuerySchema = z.object({
const q = c.req.query('q'); q: z.string().transform(decodeURIComponent),
resolve: booleanParamSchema.optional().transform(Boolean),
following: z.boolean().default(false),
limit: z.coerce.number().catch(20).transform((value) => Math.min(Math.max(value, 0), 40)),
});
if (!q) { const accountSearchController: AppController = async (c) => {
return c.json({ error: 'Missing `q` query parameter.' }, 422); const result = accountSearchQuerySchema.safeParse(c.req.query());
const { signal } = c.req.raw;
if (!result.success) {
return c.json({ error: 'Bad request', schema: result.error }, 422);
} }
const { q, limit } = result.data;
const query = decodeURIComponent(q); const query = decodeURIComponent(q);
const store = await Storages.search(); const store = await Storages.search();
const [event, events] = await Promise.all([ const [event, events] = await Promise.all([
lookupAccount(query), lookupAccount(query),
store.query([{ kinds: [0], search: query, limit: 20 }], { signal: c.req.raw.signal }), store.query([{ kinds: [0], search: query, limit }], { signal }),
]); ]);
const results = await hydrateEvents({ const results = await hydrateEvents({
events: event ? [event, ...events] : events, events: event ? [event, ...events] : events,
store, store,
signal: c.req.raw.signal, signal,
}); });
if ((results.length < 1) && query.match(/npub1\w+/)) { if ((results.length < 1) && query.match(/npub1\w+/)) {
@ -198,10 +226,12 @@ const updateCredentialsSchema = z.object({
bot: z.boolean().optional(), bot: z.boolean().optional(),
discoverable: z.boolean().optional(), discoverable: z.boolean().optional(),
nip05: z.string().optional(), nip05: z.string().optional(),
pleroma_settings_store: z.unknown().optional(),
}); });
const updateCredentialsController: AppController = async (c) => { const updateCredentialsController: AppController = async (c) => {
const pubkey = await c.get('signer')?.getPublicKey()!; const signer = c.get('signer')!;
const pubkey = await signer.getPublicKey();
const body = await parseBody(c.req.raw); const body = await parseBody(c.req.raw);
const result = updateCredentialsSchema.safeParse(body); const result = updateCredentialsSchema.safeParse(body);
@ -221,8 +251,8 @@ const updateCredentialsController: AppController = async (c) => {
} = result.data; } = result.data;
const [avatar, header] = await Promise.all([ const [avatar, header] = await Promise.all([
avatarFile ? uploadFile(avatarFile, { pubkey }) : undefined, avatarFile ? uploadFile(c, avatarFile, { pubkey }) : undefined,
headerFile ? uploadFile(headerFile, { pubkey }) : undefined, headerFile ? uploadFile(c, headerFile, { pubkey }) : undefined,
]); ]);
meta.name = display_name ?? meta.name; meta.name = display_name ?? meta.name;
@ -238,6 +268,18 @@ const updateCredentialsController: AppController = async (c) => {
}, c); }, c);
const account = await renderAccount(event, { withSource: true }); const account = await renderAccount(event, { withSource: true });
const settingsStore = result.data.pleroma_settings_store;
if (settingsStore) {
await createEvent({
kind: 30078,
tags: [['d', 'pub.ditto.pleroma_settings_store']],
content: await signer.nip44!.encrypt(pubkey, JSON.stringify(settingsStore)),
}, c);
}
account.pleroma.settings_store = settingsStore;
return c.json(account); return c.json(account);
}; };

View File

@ -5,8 +5,8 @@ import { Conf } from '@/config.ts';
import { DittoEvent } from '@/interfaces/DittoEvent.ts'; import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { booleanParamSchema } from '@/schema.ts'; import { booleanParamSchema } from '@/schema.ts';
import { Storages } from '@/storages.ts'; import { Storages } from '@/storages.ts';
import { addTag } from '@/tags.ts';
import { paginated, paginationSchema, parseBody, updateListAdminEvent } from '@/utils/api.ts'; import { paginated, paginationSchema, parseBody, updateListAdminEvent } from '@/utils/api.ts';
import { addTag } from '@/utils/tags.ts';
import { renderAdminAccount } from '@/views/mastodon/admin-accounts.ts'; import { renderAdminAccount } from '@/views/mastodon/admin-accounts.ts';
const adminAccountQuerySchema = z.object({ const adminAccountQuerySchema = z.object({

View File

@ -1,6 +1,6 @@
import { type AppController } from '@/app.ts'; import { type AppController } from '@/app.ts';
import { Storages } from '@/storages.ts'; import { Storages } from '@/storages.ts';
import { getTagSet } from '@/tags.ts'; import { getTagSet } from '@/utils/tags.ts';
import { renderStatuses } from '@/views.ts'; import { renderStatuses } from '@/views.ts';
/** https://docs.joinmastodon.org/methods/bookmarks/#get */ /** https://docs.joinmastodon.org/methods/bookmarks/#get */

View File

@ -4,7 +4,7 @@ import { AppController } from '@/app.ts';
import { fileSchema } from '@/schema.ts'; import { fileSchema } from '@/schema.ts';
import { parseBody } from '@/utils/api.ts'; import { parseBody } from '@/utils/api.ts';
import { renderAttachment } from '@/views/mastodon/attachments.ts'; import { renderAttachment } from '@/views/mastodon/attachments.ts';
import { uploadFile } from '@/upload.ts'; import { uploadFile } from '@/utils/upload.ts';
const mediaBodySchema = z.object({ const mediaBodySchema = z.object({
file: fileSchema, file: fileSchema,
@ -24,7 +24,7 @@ const mediaController: AppController = async (c) => {
try { try {
const { file, description } = result.data; const { file, description } = result.data;
const media = await uploadFile(file, { pubkey, description }, signal); const media = await uploadFile(c, file, { pubkey, description }, signal);
return c.json(renderAttachment(media)); return c.json(renderAttachment(media));
} catch (e) { } catch (e) {
console.error(e); console.error(e);

View File

@ -1,6 +1,6 @@
import { type AppController } from '@/app.ts'; import { type AppController } from '@/app.ts';
import { Storages } from '@/storages.ts'; import { Storages } from '@/storages.ts';
import { getTagSet } from '@/tags.ts'; import { getTagSet } from '@/utils/tags.ts';
import { renderAccounts } from '@/views.ts'; import { renderAccounts } from '@/views.ts';
/** https://docs.joinmastodon.org/methods/mutes/#get */ /** https://docs.joinmastodon.org/methods/mutes/#get */

View File

@ -1,9 +1,9 @@
import { encodeBase64 } from '@std/encoding/base64'; import { encodeBase64 } from '@std/encoding/base64';
import { escape } from 'entities';
import { nip19 } from 'nostr-tools'; import { nip19 } from 'nostr-tools';
import { z } from 'zod'; import { z } from 'zod';
import { AppController } from '@/app.ts'; import { AppController } from '@/app.ts';
import { lodash } from '@/deps.ts';
import { nostrNow } from '@/utils.ts'; import { nostrNow } from '@/utils.ts';
import { parseBody } from '@/utils/api.ts'; import { parseBody } from '@/utils/api.ts';
import { getClientConnectUri } from '@/utils/connect.ts'; import { getClientConnectUri } from '@/utils/connect.ts';
@ -100,11 +100,11 @@ const oauthController: AppController = async (c) => {
<form id="oauth_form" action="/oauth/authorize" method="post"> <form id="oauth_form" action="/oauth/authorize" method="post">
<input type="text" placeholder="npub1... or nsec1..." name="nip19" autocomplete="off"> <input type="text" placeholder="npub1... or nsec1..." name="nip19" autocomplete="off">
<input type="hidden" name="pubkey" id="pubkey" value=""> <input type="hidden" name="pubkey" id="pubkey" value="">
<input type="hidden" name="redirect_uri" id="redirect_uri" value="${lodash.escape(redirectUri)}"> <input type="hidden" name="redirect_uri" id="redirect_uri" value="${escape(redirectUri)}">
<button type="submit">Authorize</button> <button type="submit">Authorize</button>
</form> </form>
<br> <br>
<a href="${lodash.escape(connectUri)}">Nostr Connect</a> <a href="${escape(connectUri)}">Nostr Connect</a>
</body> </body>
</html> </html>
`); `);

View File

@ -1,20 +1,22 @@
import { NostrEvent, NSchema as n } from '@nostrify/nostrify'; import { NostrEvent, NSchema as n } from '@nostrify/nostrify';
import ISO6391 from 'iso-639-1'; import ISO6391 from 'iso-639-1';
import { nip19 } from 'nostr-tools';
import { z } from 'zod'; import { z } from 'zod';
import { type AppController } from '@/app.ts'; import { type AppController } from '@/app.ts';
import { Conf } from '@/config.ts'; import { Conf } from '@/config.ts';
import { DittoDB } from '@/db/DittoDB.ts';
import { getUnattachedMediaByIds } from '@/db/unattached-media.ts'; import { getUnattachedMediaByIds } from '@/db/unattached-media.ts';
import { getAncestors, getAuthor, getDescendants, getEvent } from '@/queries.ts'; import { getAncestors, getAuthor, getDescendants, getEvent } from '@/queries.ts';
import { addTag, deleteTag } from '@/tags.ts';
import { createEvent, paginationSchema, parseBody, updateListEvent } from '@/utils/api.ts';
import { renderEventAccounts } from '@/views.ts'; import { renderEventAccounts } from '@/views.ts';
import { renderReblog, renderStatus } from '@/views/mastodon/statuses.ts'; import { renderReblog, renderStatus } from '@/views/mastodon/statuses.ts';
import { getLnurl } from '@/utils/lnurl.ts';
import { asyncReplaceAll } from '@/utils/text.ts';
import { Storages } from '@/storages.ts'; import { Storages } from '@/storages.ts';
import { hydrateEvents } from '@/storages/hydrate.ts'; import { hydrateEvents } from '@/storages/hydrate.ts';
import { createEvent, paginationSchema, parseBody, updateListEvent } from '@/utils/api.ts';
import { getLnurl } from '@/utils/lnurl.ts';
import { lookupPubkey } from '@/utils/lookup.ts'; import { lookupPubkey } from '@/utils/lookup.ts';
import { addTag, deleteTag } from '@/utils/tags.ts';
import { asyncReplaceAll } from '@/utils/text.ts';
const createStatusSchema = z.object({ const createStatusSchema = z.object({
in_reply_to_id: z.string().regex(/[0-9a-f]{64}/).nullish(), in_reply_to_id: z.string().regex(/[0-9a-f]{64}/).nullish(),
@ -56,6 +58,7 @@ const statusController: AppController = async (c) => {
const createStatusController: AppController = async (c) => { const createStatusController: AppController = async (c) => {
const body = await parseBody(c.req.raw); const body = await parseBody(c.req.raw);
const result = createStatusSchema.safeParse(body); const result = createStatusSchema.safeParse(body);
const kysely = await DittoDB.getInstance();
if (!result.success) { if (!result.success) {
return c.json({ error: 'Bad request', schema: result.error }, 400); return c.json({ error: 'Bad request', schema: result.error }, 400);
@ -73,12 +76,21 @@ const createStatusController: AppController = async (c) => {
const tags: string[][] = []; const tags: string[][] = [];
if (data.quote_id) { if (data.in_reply_to_id) {
tags.push(['q', data.quote_id]); const ancestor = await getEvent(data.in_reply_to_id);
if (!ancestor) {
return c.json({ error: 'Original post not found.' }, 404);
}
const root = ancestor.tags.find((tag) => tag[0] === 'e' && tag[3] === 'root')?.[1] ?? ancestor.id;
tags.push(['e', root, 'root']);
tags.push(['e', data.in_reply_to_id, 'reply']);
} }
if (data.in_reply_to_id) { if (data.quote_id) {
tags.push(['e', data.in_reply_to_id, 'reply']); tags.push(['q', data.quote_id]);
} }
if (data.sensitive && data.spoiler_text) { if (data.sensitive && data.spoiler_text) {
@ -89,15 +101,14 @@ const createStatusController: AppController = async (c) => {
tags.push(['subject', data.spoiler_text]); tags.push(['subject', data.spoiler_text]);
} }
const viewerPubkey = await c.get('signer')?.getPublicKey(); const media = data.media_ids?.length ? await getUnattachedMediaByIds(kysely, data.media_ids) : [];
if (data.media_ids?.length) { const imeta: string[][] = media.map(({ data }) => {
const media = await getUnattachedMediaByIds(data.media_ids) const values: string[] = data.map((tag) => tag.join(' '));
.then((media) => media.filter(({ pubkey }) => pubkey === viewerPubkey)) return ['imeta', ...values];
.then((media) => media.map(({ url, data }) => ['media', url, data])); });
tags.push(...media); tags.push(...imeta);
}
const pubkeys = new Set<string>(); const pubkeys = new Set<string>();
@ -110,7 +121,11 @@ const createStatusController: AppController = async (c) => {
pubkeys.add(pubkey); pubkeys.add(pubkey);
} }
return `nostr:${pubkey}`; try {
return `nostr:${nip19.npubEncode(pubkey)}`;
} catch {
return match;
}
}); });
// Explicit addressing // Explicit addressing
@ -129,9 +144,15 @@ const createStatusController: AppController = async (c) => {
tags.push(['t', match[1]]); tags.push(['t', match[1]]);
} }
const mediaUrls: string[] = media
.map(({ data }) => data.find(([name]) => name === 'url')?.[1])
.filter((url): url is string => Boolean(url));
const mediaCompat: string = mediaUrls.length ? ['', '', ...mediaUrls].join('\n') : '';
const event = await createEvent({ const event = await createEvent({
kind: 1, kind: 1,
content, content: content + mediaCompat,
tags, tags,
}, c); }, c);
@ -261,21 +282,19 @@ const reblogStatusController: AppController = async (c) => {
const unreblogStatusController: AppController = async (c) => { const unreblogStatusController: AppController = async (c) => {
const eventId = c.req.param('id'); const eventId = c.req.param('id');
const pubkey = await c.get('signer')?.getPublicKey()!; const pubkey = await c.get('signer')?.getPublicKey()!;
const event = await getEvent(eventId, { kind: 1 });
if (!event) {
return c.json({ error: 'Event not found.' }, 404);
}
const store = await Storages.db(); const store = await Storages.db();
const [event] = await store.query([{ ids: [eventId], kinds: [1] }]);
if (!event) {
return c.json({ error: 'Record not found' }, 404);
}
const [repostedEvent] = await store.query( const [repostedEvent] = await store.query(
[{ kinds: [6], authors: [pubkey], '#e': [event.id], limit: 1 }], [{ kinds: [6], authors: [pubkey], '#e': [event.id], limit: 1 }],
); );
if (!repostedEvent) { if (!repostedEvent) {
return c.json({ error: 'Event not found.' }, 404); return c.json({ error: 'Record not found' }, 404);
} }
await createEvent({ await createEvent({

View File

@ -2,8 +2,8 @@ import { NStore } from '@nostrify/nostrify';
import { AppController } from '@/app.ts'; import { AppController } from '@/app.ts';
import { Conf } from '@/config.ts'; import { Conf } from '@/config.ts';
import { getTagSet } from '@/tags.ts';
import { hydrateEvents } from '@/storages/hydrate.ts'; import { hydrateEvents } from '@/storages/hydrate.ts';
import { getTagSet } from '@/utils/tags.ts';
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts'; import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
export const suggestionsV1Controller: AppController = async (c) => { export const suggestionsV1Controller: AppController = async (c) => {

View File

@ -1,20 +0,0 @@
import { Conf } from '@/config.ts';
import type { AppController } from '@/app.ts';
/** https://datatracker.ietf.org/doc/html/rfc6415 */
const hostMetaController: AppController = (c) => {
const template = Conf.local('/.well-known/webfinger?resource={uri}');
c.header('content-type', 'application/xrd+xml');
return c.body(
`<?xml version="1.0" encoding="UTF-8"?>
<XRD xmlns="http://docs.oasis-open.org/ns/xri/xrd-1.0">
<Link rel="lrdd" template="${template}" type="application/xrd+xml" />
</XRD>
`,
);
};
export { hostMetaController };

View File

@ -1,97 +0,0 @@
import { nip19 } from 'nostr-tools';
import { z } from 'zod';
import { Conf } from '@/config.ts';
import { localNip05Lookup } from '@/utils/nip05.ts';
import type { AppContext, AppController } from '@/app.ts';
import type { Webfinger } from '@/schemas/webfinger.ts';
const webfingerQuerySchema = z.object({
resource: z.string().url(),
});
const webfingerController: AppController = (c) => {
const query = webfingerQuerySchema.safeParse(c.req.query());
if (!query.success) {
return c.json({ error: 'Bad request', schema: query.error }, 400);
}
const resource = new URL(query.data.resource);
switch (resource.protocol) {
case 'acct:': {
return handleAcct(c, resource);
}
default:
return c.json({ error: 'Unsupported URI scheme' }, 400);
}
};
/** Transforms the resource URI into a `[username, domain]` tuple. */
const acctSchema = z.custom<URL>((value) => value instanceof URL)
.transform((uri) => uri.pathname)
.pipe(z.string().email('Invalid acct'))
.transform((acct) => acct.split('@') as [username: string, host: string])
.refine(([_username, host]) => host === Conf.url.hostname, {
message: 'Host must be local',
path: ['resource', 'acct'],
});
async function handleAcct(c: AppContext, resource: URL): Promise<Response> {
const result = acctSchema.safeParse(resource);
if (!result.success) {
return c.json({ error: 'Invalid acct URI', schema: result.error }, 400);
}
const [username, host] = result.data;
const pointer = await localNip05Lookup(c.get('store'), username);
if (!pointer) {
return c.json({ error: 'Not found' }, 404);
}
const json = renderWebfinger({
pubkey: pointer.pubkey,
username,
subject: `acct:${username}@${host}`,
});
c.header('content-type', 'application/jrd+json');
return c.body(JSON.stringify(json));
}
interface RenderWebfingerOpts {
pubkey: string;
username: string;
subject: string;
}
/** Present Nostr user on Webfinger. */
function renderWebfinger({ pubkey, username, subject }: RenderWebfingerOpts): Webfinger {
const apId = Conf.local(`/users/${username}`);
return {
subject,
aliases: [apId],
links: [
{
rel: 'self',
type: 'application/activity+json',
href: apId,
},
{
rel: 'self',
type: 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"',
href: apId,
},
{
rel: 'self',
type: 'application/nostr+json',
href: `nostr:${nip19.npubEncode(pubkey)}`,
},
],
};
}
export { webfingerController };

View File

@ -0,0 +1,19 @@
import { Kysely, sql } from 'kysely';
import { Conf } from '@/config.ts';
export async function up(db: Kysely<any>): Promise<void> {
if (['postgres:', 'postgresql:'].includes(Conf.databaseUrl.protocol!)) {
await db.schema.createTable('nostr_pgfts')
.ifNotExists()
.addColumn('event_id', 'text', (c) => c.primaryKey().references('nostr_events.id').onDelete('cascade'))
.addColumn('search_vec', sql`tsvector`, (c) => c.notNull())
.execute();
}
}
export async function down(db: Kysely<any>): Promise<void> {
if (['postgres:', 'postgresql:'].includes(Conf.databaseUrl.protocol!)) {
await db.schema.dropTable('nostr_pgfts').ifExists().execute();
}
}

View File

@ -0,0 +1,21 @@
import { Kysely } from 'kysely';
import { Conf } from '@/config.ts';
export async function up(db: Kysely<any>): Promise<void> {
if (['postgres:', 'postgresql:'].includes(Conf.databaseUrl.protocol!)) {
await db.schema
.createIndex('nostr_pgfts_gin_search_vec')
.ifNotExists()
.on('nostr_pgfts')
.using('gin')
.column('search_vec')
.execute();
}
}
export async function down(db: Kysely<any>): Promise<void> {
if (['postgres:', 'postgresql:'].includes(Conf.databaseUrl.protocol!)) {
await db.schema.dropIndex('nostr_pgfts_gin_search_vec').ifExists().execute();
}
}

View File

@ -1,35 +1,29 @@
import uuid62 from 'uuid62'; import { Kysely } from 'kysely';
import { DittoDB } from '@/db/DittoDB.ts'; import { DittoDB } from '@/db/DittoDB.ts';
import { type MediaData } from '@/schemas/nostr.ts'; import { DittoTables } from '@/db/DittoTables.ts';
interface UnattachedMedia { interface UnattachedMedia {
id: string; id: string;
pubkey: string; pubkey: string;
url: string; url: string;
data: MediaData; /** NIP-94 tags. */
data: string[][];
uploaded_at: number; uploaded_at: number;
} }
/** Add unattached media into the database. */ /** Add unattached media into the database. */
async function insertUnattachedMedia(media: Omit<UnattachedMedia, 'id' | 'uploaded_at'>) { async function insertUnattachedMedia(media: UnattachedMedia) {
const result = {
id: uuid62.v4(),
uploaded_at: Date.now(),
...media,
};
const kysely = await DittoDB.getInstance(); const kysely = await DittoDB.getInstance();
await kysely.insertInto('unattached_media') await kysely.insertInto('unattached_media')
.values({ ...result, data: JSON.stringify(media.data) }) .values({ ...media, data: JSON.stringify(media.data) })
.execute(); .execute();
return result; return media;
} }
/** Select query for unattached media. */ /** Select query for unattached media. */
async function selectUnattachedMediaQuery() { function selectUnattachedMediaQuery(kysely: Kysely<DittoTables>) {
const kysely = await DittoDB.getInstance();
return kysely.selectFrom('unattached_media') return kysely.selectFrom('unattached_media')
.select([ .select([
'unattached_media.id', 'unattached_media.id',
@ -41,9 +35,8 @@ async function selectUnattachedMediaQuery() {
} }
/** Find attachments that exist but aren't attached to any events. */ /** Find attachments that exist but aren't attached to any events. */
async function getUnattachedMedia(until: Date) { function getUnattachedMedia(kysely: Kysely<DittoTables>, until: Date) {
const query = await selectUnattachedMediaQuery(); return selectUnattachedMediaQuery(kysely)
return query
.leftJoin('nostr_tags', 'unattached_media.url', 'nostr_tags.value') .leftJoin('nostr_tags', 'unattached_media.url', 'nostr_tags.value')
.where('uploaded_at', '<', until.getTime()) .where('uploaded_at', '<', until.getTime())
.execute(); .execute();
@ -58,12 +51,17 @@ async function deleteUnattachedMediaByUrl(url: string) {
} }
/** Get unattached media by IDs. */ /** Get unattached media by IDs. */
async function getUnattachedMediaByIds(ids: string[]) { async function getUnattachedMediaByIds(kysely: Kysely<DittoTables>, ids: string[]): Promise<UnattachedMedia[]> {
if (!ids.length) return []; if (!ids.length) return [];
const query = await selectUnattachedMediaQuery();
return query const results = await selectUnattachedMediaQuery(kysely)
.where('id', 'in', ids) .where('id', 'in', ids)
.execute(); .execute();
return results.map((row) => ({
...row,
data: JSON.parse(row.data),
}));
} }
/** Delete rows as an event with media is being created. */ /** Delete rows as an event with media is being created. */

View File

@ -1,18 +0,0 @@
import 'deno-safe-fetch';
// @deno-types="npm:@types/lodash@4.14.194"
export { default as lodash } from 'https://esm.sh/lodash@4.17.21';
// @deno-types="npm:@types/mime@3.0.0"
export { default as mime } from 'npm:mime@^3.0.0';
// @deno-types="npm:@types/sanitize-html@2.9.0"
export { default as sanitizeHtml } from 'npm:sanitize-html@^2.11.0';
export {
type ParsedSignature,
pemToPublicKey,
publicKeyToPem,
signRequest,
verifyRequest,
} from 'https://gitlab.com/soapbox-pub/fedisign/-/raw/v0.2.1/mod.ts';
export { generateSeededRsa } from 'https://gitlab.com/soapbox-pub/seeded-rsa/-/raw/v1.0.0/mod.ts';
export {
DB as Sqlite,
} from 'https://raw.githubusercontent.com/alexgleason/deno-sqlite/325f66d8c395e7f6f5ee78ebfa42a0eeea4a942b/mod.ts';

View File

@ -0,0 +1,35 @@
import { BlossomUploader, NostrBuildUploader } from '@nostrify/nostrify/uploaders';
import { AppMiddleware } from '@/app.ts';
import { Conf } from '@/config.ts';
import { DenoUploader } from '@/uploaders/DenoUploader.ts';
import { IPFSUploader } from '@/uploaders/IPFSUploader.ts';
import { S3Uploader } from '@/uploaders/S3Uploader.ts';
import { fetchWorker } from '@/workers/fetch.ts';
/** Set an uploader for the user. */
export const uploaderMiddleware: AppMiddleware = async (c, next) => {
const signer = c.get('signer');
switch (Conf.uploader) {
case 's3':
c.set('uploader', new S3Uploader(Conf.s3));
break;
case 'ipfs':
c.set('uploader', new IPFSUploader({ baseUrl: Conf.mediaDomain, apiUrl: Conf.ipfs.apiUrl, fetch: fetchWorker }));
break;
case 'local':
c.set('uploader', new DenoUploader({ baseUrl: Conf.mediaDomain, dir: Conf.uploadsDir }));
break;
case 'nostrbuild':
c.set('uploader', new NostrBuildUploader({ endpoint: Conf.nostrbuildEndpoint, signer, fetch: fetchWorker }));
break;
case 'blossom':
if (signer) {
c.set('uploader', new BlossomUploader({ servers: Conf.blossomServers, signer, fetch: fetchWorker }));
}
break;
}
await next();
};

View File

@ -13,7 +13,6 @@ import { RelayError } from '@/RelayError.ts';
import { updateStats } from '@/stats.ts'; import { updateStats } from '@/stats.ts';
import { hydrateEvents, purifyEvent } from '@/storages/hydrate.ts'; import { hydrateEvents, purifyEvent } from '@/storages/hydrate.ts';
import { Storages } from '@/storages.ts'; import { Storages } from '@/storages.ts';
import { getTagSet } from '@/tags.ts';
import { eventAge, nostrDate, nostrNow, parseNip05, Time } from '@/utils.ts'; import { eventAge, nostrDate, nostrNow, parseNip05, Time } from '@/utils.ts';
import { fetchWorker } from '@/workers/fetch.ts'; import { fetchWorker } from '@/workers/fetch.ts';
import { policyWorker } from '@/workers/policy.ts'; import { policyWorker } from '@/workers/policy.ts';
@ -22,6 +21,7 @@ import { verifyEventWorker } from '@/workers/verify.ts';
import { AdminSigner } from '@/signers/AdminSigner.ts'; import { AdminSigner } from '@/signers/AdminSigner.ts';
import { lnurlCache } from '@/utils/lnurl.ts'; import { lnurlCache } from '@/utils/lnurl.ts';
import { nip05Cache } from '@/utils/nip05.ts'; import { nip05Cache } from '@/utils/nip05.ts';
import { getTagSet } from '@/utils/tags.ts';
import { MuteListPolicy } from '@/policies/MuteListPolicy.ts'; import { MuteListPolicy } from '@/policies/MuteListPolicy.ts';

View File

@ -1,6 +1,6 @@
import { NostrEvent, NostrRelayOK, NPolicy, NStore } from '@nostrify/nostrify'; import { NostrEvent, NostrRelayOK, NPolicy, NStore } from '@nostrify/nostrify';
import { getTagSet } from '@/tags.ts'; import { getTagSet } from '@/utils/tags.ts';
export class MuteListPolicy implements NPolicy { export class MuteListPolicy implements NPolicy {
constructor(private pubkey: string, private store: NStore) {} constructor(private pubkey: string, private store: NStore) {}

View File

@ -5,8 +5,8 @@ import { Conf } from '@/config.ts';
import { Storages } from '@/storages.ts'; import { Storages } from '@/storages.ts';
import { type DittoEvent } from '@/interfaces/DittoEvent.ts'; import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
import { type DittoRelation } from '@/interfaces/DittoFilter.ts'; import { type DittoRelation } from '@/interfaces/DittoFilter.ts';
import { findReplyTag, getTagSet } from '@/tags.ts';
import { hydrateEvents } from '@/storages/hydrate.ts'; import { hydrateEvents } from '@/storages/hydrate.ts';
import { findReplyTag, getTagSet } from '@/utils/tags.ts';
const debug = Debug('ditto:queries'); const debug = Debug('ditto:queries');
@ -88,7 +88,11 @@ async function getAncestors(event: NostrEvent, result: NostrEvent[] = []): Promi
async function getDescendants(eventId: string, signal = AbortSignal.timeout(2000)): Promise<NostrEvent[]> { async function getDescendants(eventId: string, signal = AbortSignal.timeout(2000)): Promise<NostrEvent[]> {
const store = await Storages.db(); const store = await Storages.db();
const events = await store.query([{ kinds: [1], '#e': [eventId] }], { limit: 200, signal });
const events = await store
.query([{ kinds: [1], '#e': [eventId] }], { limit: 200, signal })
.then((events) => events.filter(({ tags }) => findReplyTag(tags)?.[1] === eventId));
return hydrateEvents({ events, store, signal }); return hydrateEvents({ events, store, signal });
} }

View File

@ -9,27 +9,12 @@ const signedEventSchema = n.event()
.refine((event) => event.id === getEventHash(event), 'Event ID does not match hash') .refine((event) => event.id === getEventHash(event), 'Event ID does not match hash')
.refine(verifyEvent, 'Event signature is invalid'); .refine(verifyEvent, 'Event signature is invalid');
/** Media data schema from `"media"` tags. */
const mediaDataSchema = z.object({
blurhash: z.string().optional().catch(undefined),
cid: z.string().optional().catch(undefined),
description: z.string().max(200).optional().catch(undefined),
height: z.number().int().positive().optional().catch(undefined),
mime: z.string().optional().catch(undefined),
name: z.string().optional().catch(undefined),
size: z.number().int().positive().optional().catch(undefined),
width: z.number().int().positive().optional().catch(undefined),
});
/** Kind 0 content schema for the Ditto server admin user. */ /** Kind 0 content schema for the Ditto server admin user. */
const serverMetaSchema = n.metadata().and(z.object({ const serverMetaSchema = n.metadata().and(z.object({
tagline: z.string().optional().catch(undefined), tagline: z.string().optional().catch(undefined),
email: z.string().optional().catch(undefined), email: z.string().optional().catch(undefined),
})); }));
/** Media data from `"media"` tags. */
type MediaData = z.infer<typeof mediaDataSchema>;
/** NIP-11 Relay Information Document. */ /** NIP-11 Relay Information Document. */
const relayInfoDocSchema = z.object({ const relayInfoDocSchema = z.object({
name: z.string().transform((val) => val.slice(0, 30)).optional().catch(undefined), name: z.string().transform((val) => val.slice(0, 30)).optional().catch(undefined),
@ -47,12 +32,4 @@ const emojiTagSchema = z.tuple([z.literal('emoji'), z.string(), z.string().url()
/** NIP-30 custom emoji tag. */ /** NIP-30 custom emoji tag. */
type EmojiTag = z.infer<typeof emojiTagSchema>; type EmojiTag = z.infer<typeof emojiTagSchema>;
export { export { type EmojiTag, emojiTagSchema, relayInfoDocSchema, serverMetaSchema, signedEventSchema };
type EmojiTag,
emojiTagSchema,
type MediaData,
mediaDataSchema,
relayInfoDocSchema,
serverMetaSchema,
signedEventSchema,
};

View File

@ -1,19 +0,0 @@
import { z } from 'zod';
const linkSchema = z.object({
rel: z.string().optional(),
type: z.string().optional(),
href: z.string().optional(),
template: z.string().optional(),
});
const webfingerSchema = z.object({
subject: z.string(),
aliases: z.array(z.string()).catch([]),
links: z.array(linkSchema),
});
type Webfinger = z.infer<typeof webfingerSchema>;
export { webfingerSchema };
export type { Webfinger };

View File

@ -1,3 +1,5 @@
import 'deno-safe-fetch/load';
import '@/precheck.ts'; import '@/precheck.ts';
import '@/sentry.ts'; import '@/sentry.ts';
import '@/nostr-wasm.ts'; import '@/nostr-wasm.ts';

View File

@ -1,11 +1,14 @@
import { NKinds, NostrEvent } from '@nostrify/nostrify'; import { Semaphore } from '@lambdalisue/async';
import { NKinds, NostrEvent, NStore } from '@nostrify/nostrify';
import Debug from '@soapbox/stickynotes/debug'; import Debug from '@soapbox/stickynotes/debug';
import { InsertQueryBuilder, Kysely } from 'kysely'; import { InsertQueryBuilder, Kysely } from 'kysely';
import { LRUCache } from 'lru-cache';
import { SetRequired } from 'type-fest';
import { DittoDB } from '@/db/DittoDB.ts'; import { DittoDB } from '@/db/DittoDB.ts';
import { DittoTables } from '@/db/DittoTables.ts'; import { DittoTables } from '@/db/DittoTables.ts';
import { Storages } from '@/storages.ts'; import { Storages } from '@/storages.ts';
import { findReplyTag } from '@/tags.ts'; import { findReplyTag, getTagSet } from '@/utils/tags.ts';
type AuthorStat = keyof Omit<DittoTables['author_stats'], 'pubkey'>; type AuthorStat = keyof Omit<DittoTables['author_stats'], 'pubkey'>;
type EventStat = keyof Omit<DittoTables['event_stats'], 'event_id'>; type EventStat = keyof Omit<DittoTables['event_stats'], 'event_id'>;
@ -37,6 +40,8 @@ async function updateStats(event: NostrEvent) {
debug(JSON.stringify({ id: event.id, pubkey: event.pubkey, kind: event.kind, tags: event.tags, statDiffs })); debug(JSON.stringify({ id: event.id, pubkey: event.pubkey, kind: event.kind, tags: event.tags, statDiffs }));
} }
pubkeyDiffs.forEach(([_, pubkey]) => refreshAuthorStatsDebounced(pubkey));
const kysely = await DittoDB.getInstance(); const kysely = await DittoDB.getInstance();
if (pubkeyDiffs.length) queries.push(authorStatsQuery(kysely, pubkeyDiffs)); if (pubkeyDiffs.length) queries.push(authorStatsQuery(kysely, pubkeyDiffs));
@ -216,4 +221,53 @@ function getFollowDiff(event: NostrEvent, prev?: NostrEvent): AuthorStatDiff[] {
]; ];
} }
export { updateStats }; /** Refresh the author's stats in the database. */
async function refreshAuthorStats(pubkey: string): Promise<DittoTables['author_stats']> {
const store = await Storages.db();
const stats = await countAuthorStats(store, pubkey);
const kysely = await DittoDB.getInstance();
await kysely.insertInto('author_stats')
.values(stats)
.onConflict((oc) => oc.column('pubkey').doUpdateSet(stats))
.execute();
return stats;
}
/** Calculate author stats from the database. */
async function countAuthorStats(
store: SetRequired<NStore, 'count'>,
pubkey: string,
): Promise<DittoTables['author_stats']> {
const [{ count: followers_count }, { count: notes_count }, [followList]] = await Promise.all([
store.count([{ kinds: [3], '#p': [pubkey] }]),
store.count([{ kinds: [1], authors: [pubkey] }]),
store.query([{ kinds: [3], authors: [pubkey], limit: 1 }]),
]);
return {
pubkey,
followers_count,
following_count: getTagSet(followList?.tags ?? [], 'p').size,
notes_count,
};
}
const authorStatsSemaphore = new Semaphore(10);
const refreshedAuthors = new LRUCache<string, true>({ max: 1000 });
/** Calls `refreshAuthorStats` only once per author. */
function refreshAuthorStatsDebounced(pubkey: string): void {
if (refreshedAuthors.get(pubkey)) {
return;
}
refreshedAuthors.set(pubkey, true);
debug('refreshing author stats:', pubkey);
authorStatsSemaphore
.lock(() => refreshAuthorStats(pubkey).catch(() => {}));
}
export { refreshAuthorStats, refreshAuthorStatsDebounced, updateStats };

View File

@ -9,10 +9,7 @@ import { DittoDB } from '@/db/DittoDB.ts';
import { DittoTables } from '@/db/DittoTables.ts'; import { DittoTables } from '@/db/DittoTables.ts';
import { RelayError } from '@/RelayError.ts'; import { RelayError } from '@/RelayError.ts';
import { EventsDB } from '@/storages/EventsDB.ts'; import { EventsDB } from '@/storages/EventsDB.ts';
import { genEvent } from '@/test.ts'; import { eventFixture, genEvent } from '@/test.ts';
import event0 from '~/fixtures/events/event-0.json' with { type: 'json' };
import event1 from '~/fixtures/events/event-1.json' with { type: 'json' };
/** Create in-memory database for testing. */ /** Create in-memory database for testing. */
const createDB = async () => { const createDB = async () => {
@ -28,6 +25,7 @@ const createDB = async () => {
Deno.test('count filters', async () => { Deno.test('count filters', async () => {
const { eventsDB } = await createDB(); const { eventsDB } = await createDB();
const event1 = await eventFixture('event-1');
assertEquals((await eventsDB.count([{ kinds: [1] }])).count, 0); assertEquals((await eventsDB.count([{ kinds: [1] }])).count, 0);
await eventsDB.event(event1); await eventsDB.event(event1);
@ -37,6 +35,7 @@ Deno.test('count filters', async () => {
Deno.test('insert and filter events', async () => { Deno.test('insert and filter events', async () => {
const { eventsDB } = await createDB(); const { eventsDB } = await createDB();
const event1 = await eventFixture('event-1');
await eventsDB.event(event1); await eventsDB.event(event1);
assertEquals(await eventsDB.query([{ kinds: [1] }]), [event1]); assertEquals(await eventsDB.query([{ kinds: [1] }]), [event1]);
@ -52,6 +51,7 @@ Deno.test('insert and filter events', async () => {
Deno.test('query events with domain search filter', async () => { Deno.test('query events with domain search filter', async () => {
const { eventsDB, kysely } = await createDB(); const { eventsDB, kysely } = await createDB();
const event1 = await eventFixture('event-1');
await eventsDB.event(event1); await eventsDB.event(event1);
assertEquals(await eventsDB.query([{}]), [event1]); assertEquals(await eventsDB.query([{}]), [event1]);
@ -180,7 +180,7 @@ Deno.test('throws a RelayError when inserting an event deleted by a user', async
Deno.test('inserting replaceable events', async () => { Deno.test('inserting replaceable events', async () => {
const { eventsDB } = await createDB(); const { eventsDB } = await createDB();
const event = event0; const event = await eventFixture('event-0');
await eventsDB.event(event); await eventsDB.event(event);
const olderEvent = { ...event, id: '123', created_at: event.created_at - 1 }; const olderEvent = { ...event, id: '123', created_at: event.created_at - 1 };

View File

@ -7,11 +7,11 @@ import { Kysely } from 'kysely';
import { Conf } from '@/config.ts'; import { Conf } from '@/config.ts';
import { DittoTables } from '@/db/DittoTables.ts'; import { DittoTables } from '@/db/DittoTables.ts';
import { normalizeFilters } from '@/filter.ts'; import { normalizeFilters } from '@/filter.ts';
import { RelayError } from '@/RelayError.ts';
import { purifyEvent } from '@/storages/hydrate.ts'; import { purifyEvent } from '@/storages/hydrate.ts';
import { getTagSet } from '@/tags.ts';
import { isNostrId, isURL } from '@/utils.ts'; import { isNostrId, isURL } from '@/utils.ts';
import { abortError } from '@/utils/abort.ts'; import { abortError } from '@/utils/abort.ts';
import { RelayError } from '@/RelayError.ts'; import { getTagSet } from '@/utils/tags.ts';
/** Function to decide whether or not to index a tag. */ /** Function to decide whether or not to index a tag. */
type TagCondition = ({ event, count, value }: { type TagCondition = ({ event, count, value }: {
@ -42,8 +42,17 @@ class EventsDB implements NStore {
}; };
constructor(private kysely: Kysely<DittoTables>) { constructor(private kysely: Kysely<DittoTables>) {
let fts: 'sqlite' | 'postgres' | undefined;
if (Conf.databaseUrl.protocol === 'sqlite:') {
fts = 'sqlite';
}
if (['postgres:', 'postgresql:'].includes(Conf.databaseUrl.protocol!)) {
fts = 'postgres';
}
this.store = new NDatabase(kysely, { this.store = new NDatabase(kysely, {
fts5: Conf.databaseUrl.protocol === 'sqlite:', fts,
indexTags: EventsDB.indexTags, indexTags: EventsDB.indexTags,
searchText: EventsDB.searchText, searchText: EventsDB.searchText,
}); });
@ -171,8 +180,8 @@ class EventsDB implements NStore {
/** Build search content for a user. */ /** Build search content for a user. */
static buildUserSearchContent(event: NostrEvent): string { static buildUserSearchContent(event: NostrEvent): string {
const { name, nip05, about } = n.json().pipe(n.metadata()).catch({}).parse(event.content); const { name, nip05 } = n.json().pipe(n.metadata()).catch({}).parse(event.content);
return [name, nip05, about].filter(Boolean).join('\n'); return [name, nip05].filter(Boolean).join('\n');
} }
/** Build search content from tag values. */ /** Build search content from tag values. */

View File

@ -1,7 +1,7 @@
import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify'; import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify';
import { DittoEvent } from '@/interfaces/DittoEvent.ts'; import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { getTagSet } from '@/tags.ts'; import { getTagSet } from '@/utils/tags.ts';
export class UserStore implements NStore { export class UserStore implements NStore {
constructor(private pubkey: string, private store: NStore) {} constructor(private pubkey: string, private store: NStore) {}

View File

@ -5,6 +5,8 @@ import { DittoDB } from '@/db/DittoDB.ts';
import { type DittoEvent } from '@/interfaces/DittoEvent.ts'; import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
import { DittoTables } from '@/db/DittoTables.ts'; import { DittoTables } from '@/db/DittoTables.ts';
import { Conf } from '@/config.ts'; import { Conf } from '@/config.ts';
import { refreshAuthorStatsDebounced } from '@/stats.ts';
import { findQuoteTag } from '@/utils/tags.ts';
interface HydrateOpts { interface HydrateOpts {
events: DittoEvent[]; events: DittoEvent[];
@ -55,6 +57,8 @@ async function hydrateEvents(opts: HydrateOpts): Promise<DittoEvent[]> {
events: await gatherEventStats(cache), events: await gatherEventStats(cache),
}; };
refreshMissingAuthorStats(events, stats.authors);
// Dedupe events. // Dedupe events.
const results = [...new Map(cache.map((event) => [event.id, event])).values()]; const results = [...new Map(cache.map((event) => [event.id, event])).values()];
@ -78,7 +82,7 @@ function assembleEvents(
event.user = b.find((e) => matchFilter({ kinds: [30361], authors: [admin], '#d': [event.pubkey] }, e)); event.user = b.find((e) => matchFilter({ kinds: [30361], authors: [admin], '#d': [event.pubkey] }, e));
if (event.kind === 1) { if (event.kind === 1) {
const id = event.tags.find(([name]) => name === 'q')?.[1]; const id = findQuoteTag(event.tags)?.[1];
if (id) { if (id) {
event.quote = b.find((e) => matchFilter({ kinds: [1], ids: [id] }, e)); event.quote = b.find((e) => matchFilter({ kinds: [1], ids: [id] }, e));
} }
@ -166,7 +170,7 @@ function gatherQuotes({ events, store, signal }: HydrateOpts): Promise<DittoEven
for (const event of events) { for (const event of events) {
if (event.kind === 1) { if (event.kind === 1) {
const id = event.tags.find(([name]) => name === 'q')?.[1]; const id = findQuoteTag(event.tags)?.[1];
if (id) { if (id) {
ids.add(id); ids.add(id);
} }
@ -251,11 +255,35 @@ async function gatherAuthorStats(events: DittoEvent[]): Promise<DittoTables['aut
} }
const kysely = await DittoDB.getInstance(); const kysely = await DittoDB.getInstance();
return kysely
const rows = await kysely
.selectFrom('author_stats') .selectFrom('author_stats')
.selectAll() .selectAll()
.where('pubkey', 'in', [...pubkeys]) .where('pubkey', 'in', [...pubkeys])
.execute(); .execute();
return rows.map((row) => ({
pubkey: row.pubkey,
followers_count: Math.max(0, row.followers_count),
following_count: Math.max(0, row.following_count),
notes_count: Math.max(0, row.notes_count),
}));
}
function refreshMissingAuthorStats(events: NostrEvent[], stats: DittoTables['author_stats'][]) {
const pubkeys = new Set<string>(
events
.filter((event) => event.kind === 0)
.map((event) => event.pubkey),
);
const missing = pubkeys.difference(
new Set(stats.map((stat) => stat.pubkey)),
);
for (const pubkey of missing) {
refreshAuthorStatsDebounced(pubkey);
}
} }
/** Collect event stats from the events. */ /** Collect event stats from the events. */
@ -271,11 +299,19 @@ async function gatherEventStats(events: DittoEvent[]): Promise<DittoTables['even
} }
const kysely = await DittoDB.getInstance(); const kysely = await DittoDB.getInstance();
return kysely
const rows = await kysely
.selectFrom('event_stats') .selectFrom('event_stats')
.selectAll() .selectAll()
.where('event_id', 'in', [...ids]) .where('event_id', 'in', [...ids])
.execute(); .execute();
return rows.map((row) => ({
event_id: row.event_id,
reposts_count: Math.max(0, row.reposts_count),
reactions_count: Math.max(0, row.reactions_count),
replies_count: Math.max(0, row.replies_count),
}));
} }
/** Return a normalized event without any non-standard keys. */ /** Return a normalized event without any non-standard keys. */

View File

@ -1,25 +0,0 @@
import { assertEquals } from '@std/assert';
import { addTag, deleteTag, getTagSet } from './tags.ts';
Deno.test('getTagSet', () => {
assertEquals(getTagSet([], 'p'), new Set());
assertEquals(getTagSet([['p', '123']], 'p'), new Set(['123']));
assertEquals(getTagSet([['p', '123'], ['p', '456']], 'p'), new Set(['123', '456']));
assertEquals(getTagSet([['p', '123'], ['p', '456'], ['q', '789']], 'p'), new Set(['123', '456']));
});
Deno.test('addTag', () => {
assertEquals(addTag([], ['p', '123']), [['p', '123']]);
assertEquals(addTag([['p', '123']], ['p', '123']), [['p', '123']]);
assertEquals(addTag([['p', '123'], ['p', '456']], ['p', '123']), [['p', '123'], ['p', '456']]);
assertEquals(addTag([['p', '123'], ['p', '456']], ['p', '789']), [['p', '123'], ['p', '456'], ['p', '789']]);
});
Deno.test('deleteTag', () => {
assertEquals(deleteTag([], ['p', '123']), []);
assertEquals(deleteTag([['p', '123']], ['p', '123']), []);
assertEquals(deleteTag([['p', '123']], ['p', '456']), [['p', '123']]);
assertEquals(deleteTag([['p', '123'], ['p', '123']], ['p', '123']), []);
assertEquals(deleteTag([['p', '123'], ['p', '456']], ['p', '456']), [['p', '123']]);
});

View File

@ -1,42 +0,0 @@
/** Get the values for a tag in a `Set`. */
function getTagSet(tags: string[][], tagName: string): Set<string> {
const set = new Set<string>();
tags.forEach((tag) => {
if (tag[0] === tagName) {
set.add(tag[1]);
}
});
return set;
}
/** Check if the tag exists by its name and value. */
function hasTag(tags: string[][], tag: string[]): boolean {
return tags.some(([name, value]) => name === tag[0] && value === tag[1]);
}
/** Delete all occurences of the tag by its name/value pair. */
function deleteTag(tags: readonly string[][], tag: string[]): string[][] {
return tags.filter(([name, value]) => !(name === tag[0] && value === tag[1]));
}
/** Add a tag to the list, replacing the name/value pair if it already exists. */
function addTag(tags: readonly string[][], tag: string[]): string[][] {
const tagIndex = tags.findIndex(([name, value]) => name === tag[0] && value === tag[1]);
if (tagIndex === -1) {
return [...tags, tag];
} else {
return [...tags.slice(0, tagIndex), tag, ...tags.slice(tagIndex + 1)];
}
}
const isReplyTag = (tag: string[]) => tag[0] === 'e' && tag[3] === 'reply';
const isRootTag = (tag: string[]) => tag[0] === 'e' && tag[3] === 'root';
const isLegacyReplyTag = (tag: string[]) => tag[0] === 'e' && !tag[3];
function findReplyTag(tags: string[][]) {
return tags.find(isReplyTag) || tags.find(isRootTag) || tags.findLast(isLegacyReplyTag);
}
export { addTag, deleteTag, findReplyTag, getTagSet, hasTag };

View File

@ -1,33 +0,0 @@
import { Conf } from '@/config.ts';
import { insertUnattachedMedia } from '@/db/unattached-media.ts';
import { configUploader as uploader } from '@/uploaders/config.ts';
interface FileMeta {
pubkey: string;
description?: string;
}
/** Upload a file, track it in the database, and return the resulting media object. */
async function uploadFile(file: File, meta: FileMeta, signal?: AbortSignal) {
const { name, type, size } = file;
const { pubkey, description } = meta;
if (file.size > Conf.maxUploadSize) {
throw new Error('File size is too large.');
}
const { url } = await uploader.upload(file, { signal });
return insertUnattachedMedia({
pubkey,
url,
data: {
name,
size,
description,
mime: type,
},
});
}
export { uploadFile };

View File

@ -0,0 +1,46 @@
import { join } from 'node:path';
import { NUploader } from '@nostrify/nostrify';
import { crypto } from '@std/crypto';
import { encodeHex } from '@std/encoding/hex';
import { extensionsByType } from '@std/media-types';
export interface DenoUploaderOpts {
baseUrl: string;
dir: string;
}
/** Local Deno filesystem uploader. */
export class DenoUploader implements NUploader {
baseUrl: string;
dir: string;
constructor(opts: DenoUploaderOpts) {
this.baseUrl = opts.baseUrl;
this.dir = opts.dir;
}
async upload(file: File): Promise<[['url', string], ...string[][]]> {
const sha256 = encodeHex(await crypto.subtle.digest('SHA-256', file.stream()));
const ext = extensionsByType(file.type)?.[0] ?? 'bin';
const filename = `${sha256}.${ext}`;
await Deno.mkdir(this.dir, { recursive: true });
await Deno.writeFile(join(this.dir, filename), file.stream());
const url = new URL(this.baseUrl);
const path = url.pathname === '/' ? filename : join(url.pathname, filename);
return [
['url', new URL(path, url).toString()],
['m', file.type],
['x', sha256],
['size', file.size.toString()],
];
}
async delete(filename: string) {
const path = join(this.dir, filename);
await Deno.remove(path);
}
}

View File

@ -0,0 +1,69 @@
import { NUploader } from '@nostrify/nostrify';
import { z } from 'zod';
export interface IPFSUploaderOpts {
baseUrl: string;
apiUrl?: string;
fetch?: typeof fetch;
}
/**
* IPFS uploader. It expects an IPFS node up and running.
* It will try to connect to `http://localhost:5001` by default,
* and upload the file using the REST API.
*/
export class IPFSUploader implements NUploader {
private baseUrl: string;
private apiUrl: string;
private fetch: typeof fetch;
constructor(opts: IPFSUploaderOpts) {
this.baseUrl = opts.baseUrl;
this.apiUrl = opts.apiUrl ?? 'http://localhost:5001';
this.fetch = opts.fetch ?? globalThis.fetch;
}
async upload(file: File, opts?: { signal?: AbortSignal }): Promise<[['url', string], ...string[][]]> {
const url = new URL('/api/v0/add', this.apiUrl);
const formData = new FormData();
formData.append('file', file);
const response = await this.fetch(url, {
method: 'POST',
body: formData,
signal: opts?.signal,
});
const { Hash: cid } = IPFSUploader.schema().parse(await response.json());
return [
['url', new URL(`/ipfs/${cid}`, this.baseUrl).toString()],
['m', file.type],
['cid', cid],
['size', file.size.toString()],
];
}
async delete(cid: string, opts?: { signal?: AbortSignal }): Promise<void> {
const url = new URL('/api/v0/pin/rm', this.apiUrl);
const query = new URLSearchParams();
query.set('arg', cid);
url.search = query.toString();
await this.fetch(url, {
method: 'POST',
signal: opts?.signal,
});
}
/** Response schema for POST `/api/v0/add`. */
private static schema() {
return z.object({
Name: z.string(),
Hash: z.string(),
Size: z.string(),
});
}
}

View File

@ -0,0 +1,59 @@
import { join } from 'node:path';
import { S3Client } from '@bradenmacdonald/s3-lite-client';
import { NUploader } from '@nostrify/nostrify';
import { crypto } from '@std/crypto';
import { encodeHex } from '@std/encoding/hex';
import { extensionsByType } from '@std/media-types';
import { Conf } from '@/config.ts';
export interface S3UploaderOpts {
endPoint: string;
region: string;
accessKey?: string;
secretKey?: string;
bucket?: string;
pathStyle?: boolean;
port?: number;
sessionToken?: string;
useSSL?: boolean;
}
/** S3-compatible uploader for AWS, Wasabi, DigitalOcean Spaces, and more. */
export class S3Uploader implements NUploader {
private client: S3Client;
constructor(opts: S3UploaderOpts) {
this.client = new S3Client(opts);
}
async upload(file: File): Promise<[['url', string], ...string[][]]> {
const sha256 = encodeHex(await crypto.subtle.digest('SHA-256', file.stream()));
const ext = extensionsByType(file.type)?.[0] ?? 'bin';
const filename = `${sha256}.${ext}`;
await this.client.putObject(filename, file.stream(), {
metadata: {
'Content-Type': file.type,
'x-amz-acl': 'public-read',
},
});
const { pathStyle, bucket } = Conf.s3;
const path = (pathStyle && bucket) ? join(bucket, filename) : filename;
const url = new URL(path, Conf.mediaDomain).toString();
return [
['url', url],
['m', file.type],
['x', sha256],
['size', file.size.toString()],
];
}
async delete(objectName: string) {
await this.client.deleteObject(objectName);
}
}

View File

@ -1,33 +0,0 @@
import { Conf } from '@/config.ts';
import { ipfsUploader } from '@/uploaders/ipfs.ts';
import { localUploader } from '@/uploaders/local.ts';
import { s3Uploader } from '@/uploaders/s3.ts';
import type { Uploader } from './types.ts';
/** Meta-uploader determined from configuration. */
const configUploader: Uploader = {
upload(file, opts) {
return uploader().upload(file, opts);
},
delete(id, opts) {
return uploader().delete(id, opts);
},
};
/** Get the uploader module based on configuration. */
function uploader() {
switch (Conf.uploader) {
case 's3':
return s3Uploader;
case 'ipfs':
return ipfsUploader;
case 'local':
return localUploader;
default:
throw new Error('No `DITTO_UPLOADER` configured. Uploads are disabled.');
}
}
export { configUploader };

View File

@ -1,56 +0,0 @@
import { z } from 'zod';
import { Conf } from '@/config.ts';
import { fetchWorker } from '@/workers/fetch.ts';
import type { Uploader } from './types.ts';
/** Response schema for POST `/api/v0/add`. */
const ipfsAddResponseSchema = z.object({
Name: z.string(),
Hash: z.string(),
Size: z.string(),
});
/**
* IPFS uploader. It expects an IPFS node up and running.
* It will try to connect to `http://localhost:5001` by default,
* and upload the file using the REST API.
*/
const ipfsUploader: Uploader = {
async upload(file, opts) {
const url = new URL('/api/v0/add', Conf.ipfs.apiUrl);
const formData = new FormData();
formData.append('file', file);
const response = await fetchWorker(url, {
method: 'POST',
body: formData,
signal: opts?.signal,
});
const { Hash: cid } = ipfsAddResponseSchema.parse(await response.json());
return {
id: cid,
cid,
url: new URL(`/ipfs/${cid}`, Conf.mediaDomain).toString(),
};
},
async delete(cid, opts) {
const url = new URL('/api/v0/pin/rm', Conf.ipfs.apiUrl);
const query = new URLSearchParams();
query.set('arg', cid);
url.search = query.toString();
await fetchWorker(url, {
method: 'POST',
signal: opts?.signal,
});
},
};
export { ipfsUploader };

View File

@ -1,36 +0,0 @@
import { join } from 'node:path';
import { crypto } from '@std/crypto';
import { encodeHex } from '@std/encoding/hex';
import { extensionsByType } from '@std/media-types';
import { Conf } from '@/config.ts';
import type { Uploader } from './types.ts';
/** Local filesystem uploader. */
const localUploader: Uploader = {
async upload(file) {
const sha256 = encodeHex(await crypto.subtle.digest('SHA-256', file.stream()));
const ext = extensionsByType(file.type)?.[0] ?? 'bin';
const filename = `${sha256}.${ext}`;
await Deno.mkdir(Conf.uploadsDir, { recursive: true });
await Deno.writeFile(join(Conf.uploadsDir, filename), file.stream());
const { mediaDomain } = Conf;
const url = new URL(mediaDomain);
const path = url.pathname === '/' ? filename : join(url.pathname, filename);
return {
id: filename,
sha256,
url: new URL(path, url).toString(),
};
},
async delete(id) {
await Deno.remove(join(Conf.uploadsDir, id));
},
};
export { localUploader };

View File

@ -1,45 +0,0 @@
import { join } from 'node:path';
import { S3Client } from '@bradenmacdonald/s3-lite-client';
import { crypto } from '@std/crypto';
import { encodeHex } from '@std/encoding/hex';
import { extensionsByType } from '@std/media-types';
import { Conf } from '@/config.ts';
import type { Uploader } from './types.ts';
/** S3-compatible uploader for AWS, Wasabi, DigitalOcean Spaces, and more. */
const s3Uploader: Uploader = {
async upload(file) {
const sha256 = encodeHex(await crypto.subtle.digest('SHA-256', file.stream()));
const ext = extensionsByType(file.type)?.[0] ?? 'bin';
const filename = `${sha256}.${ext}`;
await client().putObject(filename, file.stream(), {
metadata: {
'Content-Type': file.type,
'x-amz-acl': 'public-read',
},
});
const { pathStyle, bucket } = Conf.s3;
const path = (pathStyle && bucket) ? join(bucket, filename) : filename;
return {
id: filename,
sha256,
url: new URL(path, Conf.mediaDomain).toString(),
};
},
async delete(id) {
await client().deleteObject(id);
},
};
/** Build S3 client from config. */
function client() {
return new S3Client({ ...Conf.s3 });
}
export { s3Uploader };

View File

@ -1,21 +0,0 @@
/** Modular uploader interface, to support uploading to different backends. */
interface Uploader {
/** Upload the file to the backend. */
upload(file: File, opts?: { signal?: AbortSignal }): Promise<UploadResult>;
/** Delete the file from the backend. */
delete(cid: string, opts?: { signal?: AbortSignal }): Promise<void>;
}
/** Return value from the uploader after uploading a file. */
interface UploadResult {
/** File ID specific to the uploader, so it can later be referenced or deleted. */
id: string;
/** URL where the file can be accessed. */
url: string;
/** SHA-256 hash of the file. */
sha256?: string;
/** IPFS CID of the file. */
cid?: string;
}
export type { Uploader };

View File

@ -28,8 +28,12 @@ function getLnurl({ lud06, lud16 }: { lud06?: string; lud16?: string }, limit?:
if (lud16) { if (lud16) {
const [name, host] = lud16.split('@'); const [name, host] = lud16.split('@');
if (name && host) { if (name && host) {
const url = new URL(`/.well-known/lnurlp/${name}`, `https://${host}`); try {
return LNURL.encode(url, limit); const url = new URL(`/.well-known/lnurlp/${name}`, `https://${host}`);
return LNURL.encode(url, limit);
} catch {
return;
}
} }
} }
} }

17
src/utils/media.test.ts Normal file
View File

@ -0,0 +1,17 @@
import { assertEquals } from '@std/assert';
import { getUrlMediaType, isPermittedMediaType } from '@/utils/media.ts';
Deno.test('getUrlMediaType', () => {
assertEquals(getUrlMediaType('https://example.com/image.png'), 'image/png');
assertEquals(getUrlMediaType('https://example.com/index.html'), 'text/html');
assertEquals(getUrlMediaType('https://example.com/yolo'), undefined);
assertEquals(getUrlMediaType('https://example.com/'), undefined);
});
Deno.test('isPermittedMediaType', () => {
assertEquals(isPermittedMediaType('image/png', ['image', 'video']), true);
assertEquals(isPermittedMediaType('video/webm', ['image', 'video']), true);
assertEquals(isPermittedMediaType('audio/ogg', ['image', 'video']), false);
assertEquals(isPermittedMediaType('application/json', ['image', 'video']), false);
});

24
src/utils/media.ts Normal file
View File

@ -0,0 +1,24 @@
import { typeByExtension } from '@std/media-types';
/** Get media type of the filename in the URL by its extension, if any. */
export function getUrlMediaType(url: string): string | undefined {
try {
const { pathname } = new URL(url);
const ext = pathname.split('.').pop() ?? '';
return typeByExtension(ext);
} catch {
return undefined;
}
}
/**
* Check if the base type matches any of the permitted types.
*
* ```ts
* isPermittedMediaType('image/png', ['image', 'video']); // true
* ```
*/
export function isPermittedMediaType(mediaType: string, permitted: string[]): boolean {
const [baseType, _subType] = mediaType.split('/');
return permitted.includes(baseType);
}

35
src/utils/note.test.ts Normal file
View File

@ -0,0 +1,35 @@
import { assertEquals } from '@std/assert';
import { eventFixture } from '@/test.ts';
import { getMediaLinks, parseNoteContent, stripimeta } from '@/utils/note.ts';
Deno.test('parseNoteContent', () => {
const { html, links, firstUrl } = parseNoteContent('Hello, world!');
assertEquals(html, 'Hello, world!');
assertEquals(links, []);
assertEquals(firstUrl, undefined);
});
Deno.test('getMediaLinks', () => {
const links = [
{ href: 'https://example.com/image.png' },
{ href: 'https://example.com/index.html' },
{ href: 'https://example.com/yolo' },
{ href: 'https://example.com/' },
];
const mediaLinks = getMediaLinks(links);
assertEquals(mediaLinks, [[
['url', 'https://example.com/image.png'],
['m', 'image/png'],
]]);
});
Deno.test('stripimeta', async () => {
const { content, tags } = await eventFixture('event-imeta');
const stripped = stripimeta(content, tags);
const expected =
`Today we were made aware of multiple Fediverse blog posts incorrectly attributing “vote Trump” spam on Bluesky to the Mostr.pub Bridge. \n\nThis spam is NOT coming from Mostr. From the screenshots used in these blogs, it's clear the spam is coming from an entirely different bridge called momostr.pink. This bridge is not affiliated with Mostr, and is not even a fork of Mostr. We appreciate that the authors of these posts responded quickly to us and have since corrected the blogs. \n\nMostr.pub uses stirfry policies for anti-spam filtering. This includes an anti-duplication policy that prevents spam like the recent “vote Trump” posts weve seen repeated over and over. \n\nIt is important to note WHY there are multiple bridges, though. \n\nWhen Mostr.pub launched, multiple major servers immediately blocked Mostr, including Mastodon.social. The moderators of Mastodon.social claimed that this was because Nostr was unregulated, and suggested to one user that if they want to bridge their account they should host their own bridge.\n\nThat is exactly what momostr.pink, the source of this spam, has done. \n\nThe obvious response to the censorship of the Mostr Bridge is to build more bridges. \n\nWhile we have opted for pro-social policies that aim to reduce spam and build better connections between decentralized platforms, other bridges built to get around censorship of the Mostr Bridge may not — as were already seeing.\n\nThere will inevitably be multiple bridges, and were working on creating solutions to the problems that arise from that. In the meantime, if the Fediverse could do itself a favor and chill with the censorship for two seconds, we might not have so many problems. `;
assertEquals(stripped, expected);
});

View File

@ -4,8 +4,7 @@ import linkify from 'linkifyjs';
import { nip19, nip21 } from 'nostr-tools'; import { nip19, nip21 } from 'nostr-tools';
import { Conf } from '@/config.ts'; import { Conf } from '@/config.ts';
import { mime } from '@/deps.ts'; import { getUrlMediaType, isPermittedMediaType } from '@/utils/media.ts';
import { type DittoAttachment } from '@/views/mastodon/attachments.ts';
linkify.registerCustomProtocol('nostr', true); linkify.registerCustomProtocol('nostr', true);
linkify.registerCustomProtocol('wss'); linkify.registerCustomProtocol('wss');
@ -18,7 +17,7 @@ const linkifyOpts: linkify.Opts = {
return `<a class=\"mention hashtag\" href=\"${href}\" rel=\"tag\"><span>#</span>${tag}</a>`; return `<a class=\"mention hashtag\" href=\"${href}\" rel=\"tag\"><span>#</span>${tag}</a>`;
}, },
url: ({ content }) => { url: ({ content }) => {
if (nip21.test(content)) { try {
const { decoded } = nip21.parse(content); const { decoded } = nip21.parse(content);
const pubkey = getDecodedPubkey(decoded); const pubkey = getDecodedPubkey(decoded);
if (pubkey) { if (pubkey) {
@ -28,7 +27,7 @@ const linkifyOpts: linkify.Opts = {
} else { } else {
return ''; return '';
} }
} else { } catch {
return `<a href="${content}">${content}</a>`; return `<a href="${content}">${content}</a>`;
} }
}, },
@ -58,20 +57,42 @@ function parseNoteContent(content: string): ParsedNoteContent {
}; };
} }
function getMediaLinks(links: Link[]): DittoAttachment[] { /** Remove imeta links. */
return links.reduce<DittoAttachment[]>((acc, link) => { function stripimeta(content: string, tags: string[][]): string {
const mimeType = getUrlMimeType(link.href); const imeta = tags.filter(([name]) => name === 'imeta');
if (!mimeType) return acc;
const [baseType, _subType] = mimeType.split('/'); if (!imeta.length) {
return content;
}
if (['audio', 'image', 'video'].includes(baseType)) { const urls = new Set(
acc.push({ imeta.map(([, ...values]) => values.map((v) => v.split(' ')).find(([name]) => name === 'url')?.[1]),
url: link.href, );
data: {
mime: mimeType, const lines = content.split('\n').reverse();
},
}); for (const line of [...lines]) {
if (line === '' || urls.has(line)) {
lines.splice(0, 1);
} else {
break;
}
}
return lines.reverse().join('\n');
}
/** Returns a matrix of tags. Each item is a list of NIP-94 tags representing a file. */
function getMediaLinks(links: Pick<Link, 'href'>[]): string[][][] {
return links.reduce<string[][][]>((acc, link) => {
const mediaType = getUrlMediaType(link.href);
if (!mediaType) return acc;
if (isPermittedMediaType(mediaType, ['audio', 'image', 'video'])) {
acc.push([
['url', link.href],
['m', mediaType],
]);
} }
return acc; return acc;
@ -79,7 +100,7 @@ function getMediaLinks(links: Link[]): DittoAttachment[] {
} }
function isNonMediaLink({ href }: Link): boolean { function isNonMediaLink({ href }: Link): boolean {
return /^https?:\/\//.test(href) && !getUrlMimeType(href); return /^https?:\/\//.test(href) && !getUrlMediaType(href);
} }
/** Ensures the Link is a URL so it can be parsed. */ /** Ensures the Link is a URL so it can be parsed. */
@ -87,16 +108,6 @@ function isLinkURL(link: Link): boolean {
return link.type === 'url'; return link.type === 'url';
} }
/** `npm:mime` treats `.com` as a file extension, so parse the full URL to get its path first. */
function getUrlMimeType(url: string): string | undefined {
try {
const { pathname } = new URL(url);
return mime.getType(pathname) || undefined;
} catch (_e) {
return undefined;
}
}
/** Get pubkey from decoded bech32 entity, or undefined if not applicable. */ /** Get pubkey from decoded bech32 entity, or undefined if not applicable. */
function getDecodedPubkey(decoded: nip19.DecodeResult): string | undefined { function getDecodedPubkey(decoded: nip19.DecodeResult): string | undefined {
switch (decoded.type) { switch (decoded.type) {
@ -107,4 +118,4 @@ function getDecodedPubkey(decoded: nip19.DecodeResult): string | undefined {
} }
} }
export { getMediaLinks, parseNoteContent }; export { getMediaLinks, parseNoteContent, stripimeta };

View File

@ -1,32 +0,0 @@
import * as secp from '@noble/secp256k1';
import { LRUCache } from 'lru-cache';
import { Conf } from '@/config.ts';
import { generateSeededRsa, publicKeyToPem } from '@/deps.ts';
const opts = {
bits: 2048,
};
const rsaCache = new LRUCache<string, Promise<string>>({ max: 1000 });
async function buildSeed(pubkey: string): Promise<string> {
const key = await Conf.cryptoKey;
const data = new TextEncoder().encode(pubkey);
const signature = await window.crypto.subtle.sign('HMAC', key, data);
return secp.etc.bytesToHex(new Uint8Array(signature));
}
async function getPublicKeyPem(pubkey: string): Promise<string> {
const cached = await rsaCache.get(pubkey);
if (cached) return cached;
const seed = await buildSeed(pubkey);
const { publicKey } = await generateSeededRsa(seed, opts);
const promise = publicKeyToPem(publicKey);
rsaCache.set(pubkey, promise);
return promise;
}
export { getPublicKeyPem };

45
src/utils/tags.test.ts Normal file
View File

@ -0,0 +1,45 @@
import { assertEquals } from '@std/assert';
import { addTag, deleteTag, findQuoteTag, findReplyTag, getTagSet, hasTag } from './tags.ts';
Deno.test('addTag', () => {
const tags = [['p', 'alex']];
assertEquals(addTag(tags, ['p', 'alex']), [['p', 'alex']]);
assertEquals(addTag(tags, ['p', 'fiatjaf']), [['p', 'alex'], ['p', 'fiatjaf']]);
});
Deno.test('deleteTag', () => {
const tags = [['p', 'alex'], ['p', 'fiatjaf']];
assertEquals(deleteTag(tags, ['p', 'alex']), [['p', 'fiatjaf']]);
assertEquals(deleteTag(tags, ['p', 'fiatjaf']), [['p', 'alex']]);
});
Deno.test('findQuoteTag', () => {
assertEquals(findQuoteTag([['q', '123']]), ['q', '123']);
assertEquals(findQuoteTag([['e', '', '', 'mention', '456']]), ['e', '', '', 'mention', '456']);
assertEquals(findQuoteTag([['e', '', '', 'mention', '456'], ['q', '123']]), ['q', '123']);
assertEquals(findQuoteTag([['q', '123'], ['e', '', '', 'mention', '456']]), ['q', '123']);
});
Deno.test('findReplyTag', () => {
const root = ['e', '123', '', 'root'];
const reply = ['e', '456', '', 'reply'];
assertEquals(findReplyTag([root]), root);
assertEquals(findReplyTag([reply]), reply);
assertEquals(findReplyTag([root, reply]), reply);
assertEquals(findReplyTag([reply, root]), reply);
assertEquals(findReplyTag([['e', '321'], ['e', '789']]), ['e', '789']);
assertEquals(findReplyTag([reply, ['e', '789']]), reply);
});
Deno.test('getTagSet', () => {
const tags = [['p', 'alex'], ['p', 'fiatjaf'], ['p', 'alex']];
assertEquals(getTagSet(tags, 'p'), new Set(['alex', 'fiatjaf']));
});
Deno.test('hasTag', () => {
const tags = [['p', 'alex']];
assertEquals(hasTag(tags, ['p', 'alex']), true);
assertEquals(hasTag(tags, ['p', 'fiatjaf']), false);
});

71
src/utils/tags.ts Normal file
View File

@ -0,0 +1,71 @@
/** Get the values for a tag in a `Set`. */
function getTagSet(tags: string[][], tagName: string): Set<string> {
const set = new Set<string>();
tags.forEach((tag) => {
if (tag[0] === tagName) {
set.add(tag[1]);
}
});
return set;
}
/** Check if the tag exists by its name and value. */
function hasTag(tags: string[][], tag: string[]): boolean {
return tags.some(([name, value]) => name === tag[0] && value === tag[1]);
}
/** Delete all occurences of the tag by its name/value pair. */
function deleteTag(tags: readonly string[][], tag: string[]): string[][] {
return tags.filter(([name, value]) => !(name === tag[0] && value === tag[1]));
}
/** Add a tag to the list, replacing the name/value pair if it already exists. */
function addTag(tags: readonly string[][], tag: string[]): string[][] {
const tagIndex = tags.findIndex(([name, value]) => name === tag[0] && value === tag[1]);
if (tagIndex === -1) {
return [...tags, tag];
} else {
return [...tags.slice(0, tagIndex), tag, ...tags.slice(tagIndex + 1)];
}
}
/** Tag is a NIP-10 root tag. */
function isRootTag(tag: string[]): tag is ['e', string, string, 'root', ...string[]] {
return tag[0] === 'e' && tag[3] === 'root';
}
/** Tag is a NIP-10 reply tag. */
function isReplyTag(tag: string[]): tag is ['e', string, string, 'reply', ...string[]] {
return tag[0] === 'e' && tag[3] === 'reply';
}
/** Tag is a legacy "e" tag with a "mention" marker. */
function isLegacyQuoteTag(tag: string[]): tag is ['e', string, string, 'mention', ...string[]] {
return tag[0] === 'e' && tag[3] === 'mention';
}
/** Tag is an "e" tag without a NIP-10 marker. */
function isLegacyReplyTag(tag: string[]): tag is ['e', string, string] {
return tag[0] === 'e' && !tag[3];
}
/** Tag is a "q" tag. */
function isQuoteTag(tag: string[]): tag is ['q', ...string[]] {
return tag[0] === 'q';
}
/** Get the "e" tag for the event being replied to, first according to the NIPs then falling back to the legacy way. */
function findReplyTag(tags: string[][]): ['e', ...string[]] | undefined {
return tags.find(isReplyTag) || tags.find(isRootTag) || tags.findLast(isLegacyReplyTag);
}
/** Get the "q" tag, falling back to the legacy "e" tag with a "mention" marker. */
function findQuoteTag(
tags: string[][],
): ['q', ...string[]] | ['e', string, string, 'mention', ...string[]] | undefined {
return tags.find(isQuoteTag) || tags.find(isLegacyQuoteTag);
}
export { addTag, deleteTag, findQuoteTag, findReplyTag, getTagSet, hasTag };

View File

@ -1,8 +1,8 @@
import TTLCache from '@isaacs/ttlcache'; import TTLCache from '@isaacs/ttlcache';
import Debug from '@soapbox/stickynotes/debug'; import Debug from '@soapbox/stickynotes/debug';
import DOMPurify from 'isomorphic-dompurify';
import { unfurl } from 'unfurl.js'; import { unfurl } from 'unfurl.js';
import { sanitizeHtml } from '@/deps.ts';
import { Time } from '@/utils/time.ts'; import { Time } from '@/utils/time.ts';
import { fetchWorker } from '@/workers/fetch.ts'; import { fetchWorker } from '@/workers/fetch.ts';
@ -44,11 +44,9 @@ async function unfurlCard(url: string, signal: AbortSignal): Promise<PreviewCard
provider_name: oEmbed?.provider_name || '', provider_name: oEmbed?.provider_name || '',
provider_url: oEmbed?.provider_url || '', provider_url: oEmbed?.provider_url || '',
// @ts-expect-error `html` does in fact exist on oEmbed. // @ts-expect-error `html` does in fact exist on oEmbed.
html: sanitizeHtml(oEmbed?.html || '', { html: DOMPurify.sanitize(oEmbed?.html || '', {
allowedTags: ['iframe'], ALLOWED_TAGS: ['iframe'],
allowedAttributes: { ALLOWED_ATTR: ['src', 'width', 'height', 'frameborder', 'allowfullscreen'],
iframe: ['width', 'height', 'src', 'frameborder', 'allowfullscreen'],
},
}), }),
width: ((oEmbed && oEmbed.type !== 'link') ? oEmbed.width : 0) || 0, width: ((oEmbed && oEmbed.type !== 'link') ? oEmbed.width : 0) || 0,
height: ((oEmbed && oEmbed.type !== 'link') ? oEmbed.height : 0) || 0, height: ((oEmbed && oEmbed.type !== 'link') ? oEmbed.height : 0) || 0,

44
src/utils/upload.ts Normal file
View File

@ -0,0 +1,44 @@
import { AppContext } from '@/app.ts';
import { Conf } from '@/config.ts';
import { insertUnattachedMedia, UnattachedMedia } from '@/db/unattached-media.ts';
import { HTTPException } from 'hono';
interface FileMeta {
pubkey: string;
description?: string;
}
/** Upload a file, track it in the database, and return the resulting media object. */
export async function uploadFile(
c: AppContext,
file: File,
meta: FileMeta,
signal?: AbortSignal,
): Promise<UnattachedMedia> {
const uploader = c.get('uploader');
if (!uploader) {
throw new HTTPException(500, {
res: c.json({ error: 'No uploader configured.' }),
});
}
const { pubkey, description } = meta;
if (file.size > Conf.maxUploadSize) {
throw new Error('File size is too large.');
}
const tags = await uploader.upload(file, { signal });
const url = tags[0][1];
if (description) {
tags.push(['alt', description]);
}
return insertUnattachedMedia({
id: crypto.randomUUID(),
pubkey,
url,
data: tags,
uploaded_at: Date.now(),
});
}

View File

@ -5,6 +5,7 @@ import { renderAccount } from '@/views/mastodon/accounts.ts';
import { renderStatus } from '@/views/mastodon/statuses.ts'; import { renderStatus } from '@/views/mastodon/statuses.ts';
import { paginated, paginationSchema } from '@/utils/api.ts'; import { paginated, paginationSchema } from '@/utils/api.ts';
import { hydrateEvents } from '@/storages/hydrate.ts'; import { hydrateEvents } from '@/storages/hydrate.ts';
import { accountFromPubkey } from '@/views/mastodon/accounts.ts';
/** Render account objects for the author of each event. */ /** Render account objects for the author of each event. */
async function renderEventAccounts(c: AppContext, filters: NostrFilter[], signal = AbortSignal.timeout(1000)) { async function renderEventAccounts(c: AppContext, filters: NostrFilter[], signal = AbortSignal.timeout(1000)) {
@ -24,7 +25,13 @@ async function renderEventAccounts(c: AppContext, filters: NostrFilter[], signal
.then((events) => hydrateEvents({ events, store, signal })); .then((events) => hydrateEvents({ events, store, signal }));
const accounts = await Promise.all( const accounts = await Promise.all(
authors.map((event) => renderAccount(event)), Array.from(pubkeys).map(async (pubkey) => {
const event = authors.find((event) => event.pubkey === pubkey);
if (event) {
return await renderAccount(event);
}
return await accountFromPubkey(pubkey);
}),
); );
return paginated(c, events, accounts); return paginated(c, events, accounts);
@ -39,7 +46,13 @@ async function renderAccounts(c: AppContext, authors: string[], signal = AbortSi
.then((events) => hydrateEvents({ events, store, signal })); .then((events) => hydrateEvents({ events, store, signal }));
const accounts = await Promise.all( const accounts = await Promise.all(
events.map((event) => renderAccount(event)), authors.map(async (pubkey) => {
const event = events.find((event) => event.pubkey === pubkey);
if (event) {
return await renderAccount(event);
}
return await accountFromPubkey(pubkey);
}),
); );
return paginated(c, events, accounts); return paginated(c, events, accounts);

View File

@ -1,48 +0,0 @@
import { NSchema as n } from '@nostrify/nostrify';
import { Conf } from '@/config.ts';
import { getPublicKeyPem } from '@/utils/rsa.ts';
import type { NostrEvent } from '@nostrify/nostrify';
import type { Actor } from '@/schemas/activitypub.ts';
/** Nostr metadata event to ActivityPub actor. */
async function renderActor(event: NostrEvent, username: string): Promise<Actor | undefined> {
const content = n.json().pipe(n.metadata()).catch({}).parse(event.content);
return {
type: 'Person',
id: Conf.local(`/users/${username}`),
name: content?.name || '',
preferredUsername: username,
inbox: Conf.local(`/users/${username}/inbox`),
followers: Conf.local(`/users/${username}/followers`),
following: Conf.local(`/users/${username}/following`),
outbox: Conf.local(`/users/${username}/outbox`),
icon: content.picture
? {
type: 'Image',
url: content.picture,
}
: undefined,
image: content.banner
? {
type: 'Image',
url: content.banner,
}
: undefined,
summary: content.about ?? '',
attachment: [],
tag: [],
publicKey: {
id: Conf.local(`/users/${username}#main-key`),
owner: Conf.local(`/users/${username}`),
publicKeyPem: await getPublicKeyPem(event.pubkey),
},
endpoints: {
sharedInbox: Conf.local('/inbox'),
},
};
}
export { renderActor };

View File

@ -1,8 +1,8 @@
import { NSchema as n } from '@nostrify/nostrify'; import { NSchema as n } from '@nostrify/nostrify';
import { escape } from 'entities';
import { nip19, UnsignedEvent } from 'nostr-tools'; import { nip19, UnsignedEvent } from 'nostr-tools';
import { Conf } from '@/config.ts'; import { Conf } from '@/config.ts';
import { lodash } from '@/deps.ts';
import { type DittoEvent } from '@/interfaces/DittoEvent.ts'; import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
import { getLnurl } from '@/utils/lnurl.ts'; import { getLnurl } from '@/utils/lnurl.ts';
import { nip05Cache } from '@/utils/nip05.ts'; import { nip05Cache } from '@/utils/nip05.ts';
@ -53,7 +53,7 @@ async function renderAccount(
header_static: banner, header_static: banner,
last_status_at: null, last_status_at: null,
locked: false, locked: false,
note: lodash.escape(about), note: about ? escape(about) : '',
roles: [], roles: [],
source: withSource source: withSource
? { ? {
@ -79,6 +79,7 @@ async function renderAccount(
is_admin: role === 'admin', is_admin: role === 'admin',
is_moderator: ['admin', 'moderator'].includes(role), is_moderator: ['admin', 'moderator'].includes(role),
is_local: parsed05?.domain === Conf.url.host, is_local: parsed05?.domain === Conf.url.host,
settings_store: undefined as unknown,
}, },
nostr: { nostr: {
pubkey, pubkey,

View File

@ -1,20 +1,41 @@
import * as TypeFest from 'type-fest'; import { getUrlMediaType } from '@/utils/media.ts';
import { UnattachedMedia } from '@/db/unattached-media.ts'; /** Render Mastodon media attachment. */
function renderAttachment(media: { id?: string; data: string[][] }) {
const { id, data: tags } = media;
type DittoAttachment = TypeFest.SetOptional<UnattachedMedia, 'id' | 'pubkey' | 'uploaded_at'>; const url = tags.find(([name]) => name === 'url')?.[1];
const m = tags.find(([name]) => name === 'm')?.[1] ?? getUrlMediaType(url!);
const alt = tags.find(([name]) => name === 'alt')?.[1];
const cid = tags.find(([name]) => name === 'cid')?.[1];
const dim = tags.find(([name]) => name === 'dim')?.[1];
const blurhash = tags.find(([name]) => name === 'blurhash')?.[1];
if (!url) return;
const [width, height] = dim?.split('x').map(Number) ?? [null, null];
const meta = (typeof width === 'number' && typeof height === 'number')
? {
original: {
width,
height,
aspect: width / height,
},
}
: undefined;
function renderAttachment(media: DittoAttachment) {
const { id, data, url } = media;
return { return {
id: id ?? url ?? data.cid, id: id ?? url,
type: getAttachmentType(data.mime ?? ''), type: getAttachmentType(m ?? ''),
url, url,
preview_url: url, preview_url: url,
remote_url: null, remote_url: null,
description: data.description ?? '', description: alt ?? '',
blurhash: data.blurhash || null, blurhash: blurhash || null,
cid: data.cid, meta,
cid: cid,
}; };
} }
@ -32,4 +53,4 @@ function getAttachmentType(mime: string): string {
} }
} }
export { type DittoAttachment, renderAttachment }; export { renderAttachment };

View File

@ -1,5 +1,5 @@
import { Storages } from '@/storages.ts'; import { Storages } from '@/storages.ts';
import { hasTag } from '@/tags.ts'; import { hasTag } from '@/utils/tags.ts';
async function renderRelationship(sourcePubkey: string, targetPubkey: string) { async function renderRelationship(sourcePubkey: string, targetPubkey: string) {
const db = await Storages.db(); const db = await Storages.db();

View File

@ -1,18 +1,16 @@
import { NostrEvent, NSchema as n } from '@nostrify/nostrify'; import { NostrEvent } from '@nostrify/nostrify';
import { isCWTag } from 'https://gitlab.com/soapbox-pub/mostr/-/raw/c67064aee5ade5e01597c6d23e22e53c628ef0e2/src/nostr/tags.ts';
import { nip19 } from 'nostr-tools'; import { nip19 } from 'nostr-tools';
import { Conf } from '@/config.ts'; import { Conf } from '@/config.ts';
import { type DittoEvent } from '@/interfaces/DittoEvent.ts'; import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
import { getMediaLinks, parseNoteContent } from '@/note.ts';
import { Storages } from '@/storages.ts'; import { Storages } from '@/storages.ts';
import { findReplyTag } from '@/tags.ts';
import { nostrDate } from '@/utils.ts'; import { nostrDate } from '@/utils.ts';
import { getMediaLinks, parseNoteContent, stripimeta } from '@/utils/note.ts';
import { findQuoteTag, findReplyTag } from '@/utils/tags.ts';
import { unfurlCardCached } from '@/utils/unfurl.ts'; import { unfurlCardCached } from '@/utils/unfurl.ts';
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts'; import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
import { DittoAttachment, renderAttachment } from '@/views/mastodon/attachments.ts'; import { renderAttachment } from '@/views/mastodon/attachments.ts';
import { renderEmojis } from '@/views/mastodon/emojis.ts'; import { renderEmojis } from '@/views/mastodon/emojis.ts';
import { mediaDataSchema } from '@/schemas/nostr.ts';
interface RenderStatusOpts { interface RenderStatusOpts {
viewerPubkey?: string; viewerPubkey?: string;
@ -31,6 +29,7 @@ async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise<
: await accountFromPubkey(event.pubkey); : await accountFromPubkey(event.pubkey);
const replyTag = findReplyTag(event.tags); const replyTag = findReplyTag(event.tags);
const quoteTag = findQuoteTag(event.tags);
const mentionedPubkeys = [ const mentionedPubkeys = [
...new Set( ...new Set(
@ -47,7 +46,7 @@ async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise<
[{ kinds: [0], authors: mentionedPubkeys, limit: mentionedPubkeys.length }], [{ kinds: [0], authors: mentionedPubkeys, limit: mentionedPubkeys.length }],
); );
const { html, links, firstUrl } = parseNoteContent(event.content); const { html, links, firstUrl } = parseNoteContent(stripimeta(event.content, event.tags));
const [mentions, card, relatedEvents] = await Promise const [mentions, card, relatedEvents] = await Promise
.all([ .all([
@ -74,16 +73,14 @@ async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise<
const content = buildInlineRecipients(mentions) + html; const content = buildInlineRecipients(mentions) + html;
const cw = event.tags.find(isCWTag); const cw = event.tags.find(([name]) => name === 'content-warning');
const subject = event.tags.find((tag) => tag[0] === 'subject'); const subject = event.tags.find(([name]) => name === 'subject');
const mediaLinks = getMediaLinks(links); const imeta: string[][][] = event.tags
.filter(([name]) => name === 'imeta')
.map(([_, ...entries]) => entries.map((entry) => entry.split(' ')));
const mediaTags: DittoAttachment[] = event.tags const media = imeta.length ? imeta : getMediaLinks(links);
.filter((tag) => tag[0] === 'media')
.map(([_, url, json]) => ({ url, data: n.json().pipe(mediaDataSchema).parse(json) }));
const media = [...mediaLinks, ...mediaTags];
return { return {
id: event.id, id: event.id,
@ -91,7 +88,7 @@ async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise<
card, card,
content, content,
created_at: nostrDate(event.created_at).toISOString(), created_at: nostrDate(event.created_at).toISOString(),
in_reply_to_id: replyTag ? replyTag[1] : null, in_reply_to_id: replyTag?.[1] ?? null,
in_reply_to_account_id: null, in_reply_to_account_id: null,
sensitive: !!cw, sensitive: !!cw,
spoiler_text: (cw ? cw[1] : subject?.[1]) || '', spoiler_text: (cw ? cw[1] : subject?.[1]) || '',
@ -107,13 +104,13 @@ async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise<
pinned: Boolean(pinEvent), pinned: Boolean(pinEvent),
reblog: null, reblog: null,
application: null, application: null,
media_attachments: media.map(renderAttachment), media_attachments: media.map((m) => renderAttachment({ data: m })).filter(Boolean),
mentions, mentions,
tags: [], tags: [],
emojis: renderEmojis(event), emojis: renderEmojis(event),
poll: null, poll: null,
quote: !event.quote ? null : await renderStatus(event.quote, { depth: depth + 1 }), quote: !event.quote ? null : await renderStatus(event.quote, { depth: depth + 1 }),
quote_id: event.tags.find(([name]) => name === 'q')?.[1] ?? null, quote_id: quoteTag?.[1] ?? null,
uri: Conf.external(note), uri: Conf.external(note),
url: Conf.external(note), url: Conf.external(note),
zapped: Boolean(zapEvent), zapped: Boolean(zapEvent),
@ -123,8 +120,6 @@ async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise<
async function renderReblog(event: DittoEvent, opts: RenderStatusOpts) { async function renderReblog(event: DittoEvent, opts: RenderStatusOpts) {
const { viewerPubkey } = opts; const { viewerPubkey } = opts;
if (!event.author) return;
const repostId = event.tags.find(([name]) => name === 'e')?.[1]; const repostId = event.tags.find(([name]) => name === 'e')?.[1];
if (!repostId) return; if (!repostId) return;
@ -134,7 +129,7 @@ async function renderReblog(event: DittoEvent, opts: RenderStatusOpts) {
return { return {
id: event.id, id: event.id,
account: await renderAccount(event.author), account: event.author ? await renderAccount(event.author) : await accountFromPubkey(event.pubkey),
reblogged: true, reblogged: true,
reblog, reblog,
}; };

View File

@ -1,4 +1,4 @@
import 'deno-safe-fetch'; import 'deno-safe-fetch/load';
import { NostrEvent, NostrRelayOK, NPolicy } from '@nostrify/nostrify'; import { NostrEvent, NostrRelayOK, NPolicy } from '@nostrify/nostrify';
import { ReadOnlyPolicy } from '@nostrify/nostrify/policies'; import { ReadOnlyPolicy } from '@nostrify/nostrify/policies';
import * as Comlink from 'comlink'; import * as Comlink from 'comlink';

View File

@ -1,5 +1,6 @@
import { NSchema } from '@nostrify/nostrify'; import { NSchema } from '@nostrify/nostrify';
import * as Comlink from 'comlink'; import * as Comlink from 'comlink';
import { DB as Sqlite } from 'deno-sqlite';
import { hashtagSchema } from '@/schema.ts'; import { hashtagSchema } from '@/schema.ts';
import { generateDateRange, Time } from '@/utils/time.ts'; import { generateDateRange, Time } from '@/utils/time.ts';