diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000..0771aee
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,6 @@
+.env
+*.cpuprofile
+*.swp
+deno-test.xml
+
+/data
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 17f06fa..39dbfbb 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,4 @@
.env
-*.cpuprofile
\ No newline at end of file
+*.cpuprofile
+*.swp
+deno-test.xml
\ No newline at end of file
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 5cff577..b2140db 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,4 +1,4 @@
-image: denoland/deno:1.41.3
+image: denoland/deno:1.43.3
default:
interruptible: true
@@ -22,4 +22,10 @@ test:
stage: test
script: deno task test
variables:
- DITTO_NSEC: nsec1zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zygs4rm7hz
\ No newline at end of file
+ DITTO_NSEC: nsec1zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zygs4rm7hz
+ artifacts:
+ when: always
+ paths:
+ - deno-test.xml
+ reports:
+ junit: deno-test.xml
\ No newline at end of file
diff --git a/.hooks/pre-commit b/.hooks/pre-commit
new file mode 100755
index 0000000..c3451ed
--- /dev/null
+++ b/.hooks/pre-commit
@@ -0,0 +1,4 @@
+#!/usr/bin/env sh
+. "$(dirname -- "$0")/_/hook.sh"
+
+deno run -A npm:lint-staged
diff --git a/.lintstagedrc b/.lintstagedrc
new file mode 100644
index 0000000..d3a3e55
--- /dev/null
+++ b/.lintstagedrc
@@ -0,0 +1,3 @@
+{
+ "*.{ts,tsx,md}": "deno fmt"
+}
\ No newline at end of file
diff --git a/.tool-versions b/.tool-versions
index a13fd5f..b3e19cd 100644
--- a/.tool-versions
+++ b/.tool-versions
@@ -1 +1 @@
-deno 1.41.3
\ No newline at end of file
+deno 1.43.3
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..f8df815
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,8 @@
+FROM denoland/deno:1.43.3
+EXPOSE 4036
+WORKDIR /app
+RUN mkdir -p data && chown -R deno data
+USER deno
+COPY . .
+RUN deno cache src/server.ts
+CMD deno task start
diff --git a/README.md b/README.md
index 3e46067..6551f27 100644
--- a/README.md
+++ b/README.md
@@ -3,6 +3,8 @@
Ditto is a Nostr server for building resilient communities online.
With Ditto, you can create your own social network that is decentralized, customizable, and free from ads and tracking.
+For more info see: https://docs.soapbox.pub/ditto/
+
⚠️ This software is a work in progress.
@@ -14,10 +16,11 @@ With Ditto, you can create your own social network that is decentralized, custom
- [x] Like and comment on posts
- [x] Share posts
- [x] Reposts
-- [ ] Notifications
+- [x] Notifications
- [x] Profiles
- [ ] Search
-- [ ] Moderation
+- [x] Moderation
+- [ ] Zaps
- [x] Customizable
- [x] Open source
- [x] Self-hosted
@@ -38,7 +41,7 @@ With Ditto, you can create your own social network that is decentralized, custom
## License
-© Alex Gleason & other Ditto contributors
+© Alex Gleason & other Ditto contributors
Ditto is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
diff --git a/deno.json b/deno.json
index c5681e3..5567cd2 100644
--- a/deno.json
+++ b/deno.json
@@ -4,27 +4,57 @@
"tasks": {
"start": "deno run -A src/server.ts",
"dev": "deno run -A --watch src/server.ts",
+ "hook": "deno run --allow-read --allow-run --allow-write https://deno.land/x/deno_hooks@0.1.1/mod.ts",
"debug": "deno run -A --inspect src/server.ts",
- "test": "DATABASE_URL=\"sqlite://:memory:\" deno test -A",
+ "test": "DATABASE_URL=\"sqlite://:memory:\" deno test -A --junit-path=./deno-test.xml",
"check": "deno check src/server.ts",
- "relays:sync": "deno run -A scripts/relays.ts sync",
"nsec": "deno run scripts/nsec.ts",
"admin:event": "deno run -A scripts/admin-event.ts",
- "admin:role": "deno run -A scripts/admin-role.ts"
+ "admin:role": "deno run -A scripts/admin-role.ts",
+ "stats:recompute": "deno run -A scripts/stats-recompute.ts"
},
- "unstable": ["ffi", "kv"],
+ "unstable": ["ffi", "kv", "worker-options"],
"exclude": ["./public"],
"imports": {
"@/": "./src/",
- "@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.15.0",
+ "@bradenmacdonald/s3-lite-client": "jsr:@bradenmacdonald/s3-lite-client@^0.7.4",
+ "@db/sqlite": "jsr:@db/sqlite@^0.11.1",
+ "@isaacs/ttlcache": "npm:@isaacs/ttlcache@^1.4.1",
+ "@lambdalisue/async": "jsr:@lambdalisue/async@^2.1.1",
+ "@noble/secp256k1": "npm:@noble/secp256k1@^2.0.0",
+ "@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.22.0",
+ "@sentry/deno": "https://deno.land/x/sentry@7.112.2/index.mjs",
+ "@soapbox/kysely-deno-sqlite": "jsr:@soapbox/kysely-deno-sqlite@^2.1.0",
+ "@soapbox/stickynotes": "jsr:@soapbox/stickynotes@^0.4.0",
+ "@std/assert": "jsr:@std/assert@^0.225.1",
"@std/cli": "jsr:@std/cli@^0.223.0",
+ "@std/crypto": "jsr:@std/crypto@^0.224.0",
+ "@std/dotenv": "jsr:@std/dotenv@^0.224.0",
+ "@std/encoding": "jsr:@std/encoding@^0.224.0",
"@std/json": "jsr:@std/json@^0.223.0",
+ "@std/media-types": "jsr:@std/media-types@^0.224.1",
"@std/streams": "jsr:@std/streams@^0.223.0",
+ "comlink": "npm:comlink@^4.4.1",
+ "deno-safe-fetch": "https://gitlab.com/soapbox-pub/deno-safe-fetch/-/raw/v1.0.0/load.ts",
+ "fast-stable-stringify": "npm:fast-stable-stringify@^1.0.0",
+ "formdata-helper": "npm:formdata-helper@^0.3.0",
"hono": "https://deno.land/x/hono@v3.10.1/mod.ts",
"hono/middleware": "https://deno.land/x/hono@v3.10.1/middleware.ts",
- "kysely": "npm:kysely@^0.26.3",
+ "iso-639-1": "npm:iso-639-1@2.1.15",
+ "kysely": "npm:kysely@^0.27.3",
"kysely_deno_postgres": "https://deno.land/x/kysely_deno_postgres@v0.4.0/mod.ts",
- "zod": "npm:zod@^3.23.4",
+ "linkify-plugin-hashtag": "npm:linkify-plugin-hashtag@^4.1.1",
+ "linkify-string": "npm:linkify-string@^4.1.1",
+ "linkifyjs": "npm:linkifyjs@^4.1.1",
+ "lru-cache": "npm:lru-cache@^10.2.2",
+ "nostr-relaypool": "npm:nostr-relaypool2@0.6.34",
+ "nostr-tools": "npm:nostr-tools@^2.5.1",
+ "nostr-wasm": "npm:nostr-wasm@^0.1.0",
+ "tldts": "npm:tldts@^6.0.14",
+ "tseep": "npm:tseep@^1.2.1",
+ "type-fest": "npm:type-fest@^4.3.0",
+ "unfurl.js": "npm:unfurl.js@^6.4.0",
+ "zod": "npm:zod@^3.23.5",
"~/fixtures/": "./fixtures/"
},
"lint": {
diff --git a/docs/events.md b/docs/events.md
index e850fcb..1674239 100644
--- a/docs/events.md
+++ b/docs/events.md
@@ -9,9 +9,7 @@ The Ditto server publishes kind `30361` events to represent users. These events
User events have the following tags:
- `d` - pubkey of the user.
-- `name` - NIP-05 username granted to the user, without the domain.
- `role` - one of `admin` or `user`.
-- `origin` - the origin of the user's NIP-05, at the time the event was published.
Example:
@@ -25,7 +23,6 @@ Example:
"tags": [
["d", "79c2cae114ea28a981e7559b4fe7854a473521a8d22a66bbab9fa248eb820ff6"],
["role", "user"],
- ["origin", "https://ditto.ngrok.app"],
["alt", "User's account was updated by the admins of ditto.ngrok.app"]
],
"sig": "fc12db77b1c8f8aa86c73b617f0cd4af1e6ba244239eaf3164a292de6d39363f32d6b817ffff796ace7a103d75e1d8e6a0fb7f618819b32d81a953b4a75d7507"
@@ -40,4 +37,4 @@ The sections below describe the `content` field. Some are encrypted and some are
### `pub.ditto.pleroma.config`
-NIP-04 encrypted JSON array of Pleroma ConfigDB objects. Pleroma admin API endpoints set this config, and Ditto reads from it.
\ No newline at end of file
+NIP-04 encrypted JSON array of Pleroma ConfigDB objects. Pleroma admin API endpoints set this config, and Ditto reads from it.
diff --git a/fixtures/events/kind-0-dictator.json b/fixtures/events/kind-0-dictator.json
new file mode 100644
index 0000000..a547332
--- /dev/null
+++ b/fixtures/events/kind-0-dictator.json
@@ -0,0 +1,9 @@
+{
+ "id": "2238893aee54bbe9188498a5aa124d62870d5757894bf52cdb362d1a0874ed18",
+ "pubkey": "c9f5508526e213c3bc5468161f1b738a86063a2ece540730f9412e7becd5f0b2",
+ "created_at": 1715517440,
+ "kind": 0,
+ "tags": [],
+ "content": "{\"name\":\"dictator\",\"about\":\"\",\"nip05\":\"\"}",
+ "sig": "a630ba158833eea10289fe077087ccad22c71ddfbe475153958cfc158ae94fb0a5f7b7626e62da6a3ef8bfbe67321e8f993517ed7f1578a45aff11bc2bec484c"
+}
diff --git a/fixtures/events/kind-0-george-orwell.json b/fixtures/events/kind-0-george-orwell.json
new file mode 100644
index 0000000..d835447
--- /dev/null
+++ b/fixtures/events/kind-0-george-orwell.json
@@ -0,0 +1,9 @@
+{
+ "id": "da4e1e727c6456cee2b0341a1d7a2356e4263523374a2570a7dd318ab5d73f93",
+ "pubkey": "e4d96e951739787e62ada74ee06a9a185af22791a899a6166ec23aab58c5d700",
+ "created_at": 1715517565,
+ "kind": 0,
+ "tags": [],
+ "content": "{\"name\":\"george orwell\",\"about\":\"\",\"nip05\":\"\"}",
+ "sig": "cd375e2065cf452d3bfefa9951b04ab63018ab7c253803256cca1d89d03b38e454c71ed36fdd3c28a8ff2723cc19b21371ce0f9bbd39a92b1d1aa946137237bd"
+}
diff --git a/fixtures/events/kind-1-author-george-orwell.json b/fixtures/events/kind-1-author-george-orwell.json
new file mode 100644
index 0000000..d1bd4ab
--- /dev/null
+++ b/fixtures/events/kind-1-author-george-orwell.json
@@ -0,0 +1,9 @@
+{
+ "id": "44f19148f5af60b0f43ed8c737fbda31b165e05bb55562003c45d9a9f02e8228",
+ "pubkey": "e4d96e951739787e62ada74ee06a9a185af22791a899a6166ec23aab58c5d700",
+ "created_at": 1715636249,
+ "kind": 1,
+ "tags": [],
+ "content": "I like free speech",
+ "sig": "6b50db9c1c02bd8b0e64512e71d53a0058569f44e8dcff65ad17fce544d6ae79f8f79fa0f9a615446fa8cbc2375709bf835751843b0cd10e62ae5d505fe106d4"
+}
diff --git a/fixtures/events/kind-1984-dictator-reports-george-orwell.json b/fixtures/events/kind-1984-dictator-reports-george-orwell.json
new file mode 100644
index 0000000..7280c59
--- /dev/null
+++ b/fixtures/events/kind-1984-dictator-reports-george-orwell.json
@@ -0,0 +1,24 @@
+{
+ "id": "129b2749330a7f1189d3e74c6764a955851f1e4017a818dfd51ab8e24192b0f3",
+ "pubkey": "c9f5508526e213c3bc5468161f1b738a86063a2ece540730f9412e7becd5f0b2",
+ "created_at": 1715636348,
+ "kind": 1984,
+ "tags": [
+ [
+ "p",
+ "e4d96e951739787e62ada74ee06a9a185af22791a899a6166ec23aab58c5d700",
+ "other"
+ ],
+ [
+ "P",
+ "e724b1c1b90eab9cc0f5976b380b80dda050de1820dc143e62d9e4f27a9a0b2c"
+ ],
+ [
+ "e",
+ "44f19148f5af60b0f43ed8c737fbda31b165e05bb55562003c45d9a9f02e8228",
+ "other"
+ ]
+ ],
+ "content": "freedom of speech not freedom of reach",
+ "sig": "cd05a14749cdf0c7664d056e2c02518740000387732218dacd0c71de5b96c0c3c99a0b927b0cd0778f25a211525fa03b4ed4f4f537bb1221c73467780d4ee1bc"
+}
diff --git a/fixtures/nostrbuild-gif.json b/fixtures/nostrbuild-gif.json
new file mode 100644
index 0000000..49a969a
--- /dev/null
+++ b/fixtures/nostrbuild-gif.json
@@ -0,0 +1,34 @@
+{
+ "status": "success",
+ "message": "Upload successful.",
+ "data": [
+ {
+ "input_name": "APIv2",
+ "name": "e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3.gif",
+ "sha256": "0a71f1c9dd982079bc52e96403368209cbf9507c5f6956134686f56e684b6377",
+ "original_sha256": "e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3",
+ "type": "picture",
+ "mime": "image/gif",
+ "size": 1796276,
+ "blurhash": "LGH-S^Vwm]x]04kX-qR-R]SL5FxZ",
+ "dimensions": {
+ "width": 360,
+ "height": 216
+ },
+ "dimensionsString": "360x216",
+ "url": "https://image.nostr.build/e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3.gif",
+ "thumbnail": "https://image.nostr.build/thumb/e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3.gif",
+ "responsive": {
+ "240p": "https://image.nostr.build/resp/240p/e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3.gif",
+ "360p": "https://image.nostr.build/resp/360p/e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3.gif",
+ "480p": "https://image.nostr.build/resp/480p/e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3.gif",
+ "720p": "https://image.nostr.build/resp/720p/e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3.gif",
+ "1080p": "https://image.nostr.build/resp/1080p/e5f6e0e380536780efa774e8d3c8a5a040e3f9f99dbb48910b261c32872ee3a3.gif"
+ },
+ "metadata": {
+ "date:create": "2024-05-18T02:11:39+00:00",
+ "date:modify": "2024-05-18T02:11:39+00:00"
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/fixtures/nostrbuild-mp3.json b/fixtures/nostrbuild-mp3.json
new file mode 100644
index 0000000..42a60b4
--- /dev/null
+++ b/fixtures/nostrbuild-mp3.json
@@ -0,0 +1,29 @@
+{
+ "status": "success",
+ "message": "Upload successful.",
+ "data": [
+ {
+ "id": 0,
+ "input_name": "APIv2",
+ "name": "f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725.mp3",
+ "url": "https://media.nostr.build/av/f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725.mp3",
+ "thumbnail": "https://media.nostr.build/av/f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725.mp3",
+ "responsive": {
+ "240p": "https://media.nostr.build/av/f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725.mp3",
+ "360p": "https://media.nostr.build/av/f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725.mp3",
+ "480p": "https://media.nostr.build/av/f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725.mp3",
+ "720p": "https://media.nostr.build/av/f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725.mp3",
+ "1080p": "https://media.nostr.build/av/f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725.mp3"
+ },
+ "blurhash": "",
+ "sha256": "f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725",
+ "original_sha256": "f94665e6877741feb3fa3031342f95ae2ee00caae1cc651ce31ed6d524e05725",
+ "type": "video",
+ "mime": "audio/mpeg",
+ "size": 1519616,
+ "metadata": [],
+ "dimensions": [],
+ "dimensionsString": "0x0"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/installation/ditto.conf b/installation/ditto.conf
index 2a49173..afdf65c 100644
--- a/installation/ditto.conf
+++ b/installation/ditto.conf
@@ -3,7 +3,7 @@
# Edit this file to change occurences of "example.com" to your own domain.
upstream ditto {
- server 127.0.0.1:8000;
+ server 127.0.0.1:4036;
}
upstream ipfs_gateway {
diff --git a/scripts/admin-event.ts b/scripts/admin-event.ts
index a9939ad..ca94251 100644
--- a/scripts/admin-event.ts
+++ b/scripts/admin-event.ts
@@ -1,15 +1,16 @@
import { JsonParseStream } from '@std/json/json-parse-stream';
import { TextLineStream } from '@std/streams/text-line-stream';
-import { db } from '@/db.ts';
+import { DittoDB } from '@/db/DittoDB.ts';
import { AdminSigner } from '@/signers/AdminSigner.ts';
-import { EventsDB } from '@/storages/events-db.ts';
+import { EventsDB } from '@/storages/EventsDB.ts';
import { type EventStub } from '@/utils/api.ts';
import { nostrNow } from '@/utils.ts';
const signer = new AdminSigner();
-const eventsDB = new EventsDB(db);
+const kysely = await DittoDB.getInstance();
+const eventsDB = new EventsDB(kysely);
const readable = Deno.stdin.readable
.pipeThrough(new TextDecoderStream())
diff --git a/scripts/admin-role.ts b/scripts/admin-role.ts
index 4fa212e..6e7bfc6 100644
--- a/scripts/admin-role.ts
+++ b/scripts/admin-role.ts
@@ -1,12 +1,13 @@
import { NSchema } from '@nostrify/nostrify';
-import { db } from '@/db.ts';
+import { DittoDB } from '@/db/DittoDB.ts';
import { Conf } from '@/config.ts';
import { AdminSigner } from '@/signers/AdminSigner.ts';
-import { EventsDB } from '@/storages/events-db.ts';
+import { EventsDB } from '@/storages/EventsDB.ts';
import { nostrNow } from '@/utils.ts';
-const eventsDB = new EventsDB(db);
+const kysely = await DittoDB.getInstance();
+const eventsDB = new EventsDB(kysely);
const [pubkey, role] = Deno.args;
diff --git a/scripts/relays.ts b/scripts/relays.ts
deleted file mode 100644
index 84f8a7e..0000000
--- a/scripts/relays.ts
+++ /dev/null
@@ -1,23 +0,0 @@
-import { addRelays } from '@/db/relays.ts';
-import { filteredArray } from '@/schema.ts';
-import { relaySchema } from '@/utils.ts';
-
-switch (Deno.args[0]) {
- case 'sync':
- await sync(Deno.args.slice(1));
- break;
- default:
- console.log('Usage: deno run -A scripts/relays.ts sync ');
-}
-
-async function sync([url]: string[]) {
- if (!url) {
- console.error('Error: please provide a URL');
- Deno.exit(1);
- }
- const response = await fetch(url);
- const data = await response.json();
- const values = filteredArray(relaySchema).parse(data) as `wss://${string}`[];
- await addRelays(values, { active: true });
- console.log(`Done: added ${values.length} relays.`);
-}
diff --git a/scripts/stats-recompute.ts b/scripts/stats-recompute.ts
new file mode 100644
index 0000000..4037a85
--- /dev/null
+++ b/scripts/stats-recompute.ts
@@ -0,0 +1,18 @@
+import { nip19 } from 'nostr-tools';
+
+import { refreshAuthorStats } from '@/stats.ts';
+
+let pubkey: string;
+try {
+ const result = nip19.decode(Deno.args[0]);
+ if (result.type === 'npub') {
+ pubkey = result.data;
+ } else {
+ throw new Error('Invalid npub');
+ }
+} catch {
+ console.error('Invalid npub');
+ Deno.exit(1);
+}
+
+await refreshAuthorStats(pubkey);
diff --git a/src/RelayError.ts b/src/RelayError.ts
new file mode 100644
index 0000000..0b01de3
--- /dev/null
+++ b/src/RelayError.ts
@@ -0,0 +1,24 @@
+import { NostrRelayOK } from '@nostrify/nostrify';
+
+export type RelayErrorPrefix = 'duplicate' | 'pow' | 'blocked' | 'rate-limited' | 'invalid' | 'error';
+
+/** NIP-01 command line result. */
+export class RelayError extends Error {
+ constructor(prefix: RelayErrorPrefix, message: string) {
+ super(`${prefix}: ${message}`);
+ }
+
+ /** Construct a RelayError from the reason message. */
+ static fromReason(reason: string): RelayError {
+ const [prefix, ...rest] = reason.split(': ');
+ return new RelayError(prefix as RelayErrorPrefix, rest.join(': '));
+ }
+
+ /** Throw a new RelayError if the OK message is false. */
+ static assert(msg: NostrRelayOK): void {
+ const [, , ok, reason] = msg;
+ if (!ok) {
+ throw RelayError.fromReason(reason);
+ }
+ }
+}
diff --git a/src/app.ts b/src/app.ts
index abdec32..5300b48 100644
--- a/src/app.ts
+++ b/src/app.ts
@@ -1,12 +1,10 @@
-import { NostrEvent, NStore } from '@nostrify/nostrify';
+import { NostrEvent, NostrSigner, NStore, NUploader } from '@nostrify/nostrify';
+import Debug from '@soapbox/stickynotes/debug';
import { type Context, Env as HonoEnv, type Handler, Hono, Input as HonoInput, type MiddlewareHandler } from 'hono';
import { cors, logger, serveStatic } from 'hono/middleware';
import { Conf } from '@/config.ts';
-import '@/cron.ts';
-import { type User } from '@/db/users.ts';
-import { Debug, sentryMiddleware } from '@/deps.ts';
-import '@/firehose.ts';
+import { startFirehose } from '@/firehose.ts';
import { Time } from '@/utils.ts';
import { actorController } from '@/controllers/activitypub/actor.ts';
@@ -15,25 +13,28 @@ import {
accountLookupController,
accountSearchController,
accountStatusesController,
- blockController,
createAccountController,
favouritesController,
followController,
followersController,
followingController,
+ muteController,
relationshipsController,
- unblockController,
unfollowController,
+ unmuteController,
updateCredentialsController,
verifyCredentialsController,
} from '@/controllers/api/accounts.ts';
-import { adminAccountsController } from '@/controllers/api/admin.ts';
+import { adminAccountAction, adminAccountsController } from '@/controllers/api/admin.ts';
import { appCredentialsController, createAppController } from '@/controllers/api/apps.ts';
import { blocksController } from '@/controllers/api/blocks.ts';
import { bookmarksController } from '@/controllers/api/bookmarks.ts';
+import { adminRelaysController, adminSetRelaysController } from '@/controllers/api/ditto.ts';
import { emptyArrayController, emptyObjectController, notImplementedController } from '@/controllers/api/fallback.ts';
import { instanceController } from '@/controllers/api/instance.ts';
+import { markersController, updateMarkersController } from '@/controllers/api/markers.ts';
import { mediaController } from '@/controllers/api/media.ts';
+import { mutesController } from '@/controllers/api/mutes.ts';
import { notificationsController } from '@/controllers/api/notifications.ts';
import { createTokenController, oauthAuthorizeController, oauthController } from '@/controllers/api/oauth.ts';
import {
@@ -44,6 +45,12 @@ import {
} from '@/controllers/api/pleroma.ts';
import { preferencesController } from '@/controllers/api/preferences.ts';
import { relayController } from '@/controllers/nostr/relay.ts';
+import {
+ adminReportController,
+ adminReportResolveController,
+ adminReportsController,
+ reportController,
+} from '@/controllers/api/reports.ts';
import { searchController } from '@/controllers/api/search.ts';
import {
bookmarkController,
@@ -62,6 +69,7 @@ import {
zapController,
} from '@/controllers/api/statuses.ts';
import { streamingController } from '@/controllers/api/streaming.ts';
+import { suggestionsV1Controller, suggestionsV2Controller } from '@/controllers/api/suggestions.ts';
import {
hashtagTimelineController,
homeTimelineController,
@@ -73,25 +81,26 @@ import { hostMetaController } from '@/controllers/well-known/host-meta.ts';
import { nodeInfoController, nodeInfoSchemaController } from '@/controllers/well-known/nodeinfo.ts';
import { nostrController } from '@/controllers/well-known/nostr.ts';
import { webfingerController } from '@/controllers/well-known/webfinger.ts';
-import { auth19, requirePubkey } from '@/middleware/auth19.ts';
-import { auth98, requireProof, requireRole } from '@/middleware/auth98.ts';
-import { cache } from '@/middleware/cache.ts';
-import { csp } from '@/middleware/csp.ts';
-import { adminRelaysController } from '@/controllers/api/ditto.ts';
-import { storeMiddleware } from '@/middleware/store.ts';
+import { auth98Middleware, requireProof, requireRole } from '@/middleware/auth98Middleware.ts';
+import { cacheMiddleware } from '@/middleware/cacheMiddleware.ts';
+import { cspMiddleware } from '@/middleware/cspMiddleware.ts';
+import { requireSigner } from '@/middleware/requireSigner.ts';
+import { signerMiddleware } from '@/middleware/signerMiddleware.ts';
+import { storeMiddleware } from '@/middleware/storeMiddleware.ts';
+import { blockController } from '@/controllers/api/accounts.ts';
+import { unblockController } from '@/controllers/api/accounts.ts';
+import { uploaderMiddleware } from '@/middleware/uploaderMiddleware.ts';
interface AppEnv extends HonoEnv {
Variables: {
- /** Hex pubkey for the current user. If provided, the user is considered "logged in." */
- pubkey?: string;
- /** Hex secret key for the current user. Optional, but easiest way to use legacy Mastodon apps. */
- seckey?: Uint8Array;
+ /** Signer to get the logged-in user's pubkey, relays, and to sign events, or `undefined` if the user isn't logged in. */
+ signer?: NostrSigner;
+ /** Uploader for the user to upload files. */
+ uploader?: NUploader;
/** NIP-98 signed event proving the pubkey is owned by the user. */
proof?: NostrEvent;
- /** User associated with the pubkey, if any. */
- user?: User;
/** Store */
- store?: NStore;
+ store: NStore;
};
}
@@ -101,13 +110,12 @@ type AppController = Handler();
-if (Conf.sentryDsn) {
- // @ts-ignore Mismatched hono types.
- app.use('*', sentryMiddleware({ dsn: Conf.sentryDsn }));
-}
-
const debug = Debug('ditto:http');
+if (Conf.firehoseEnabled) {
+ startFirehose();
+}
+
app.use('/api/*', logger(debug));
app.use('/relay/*', logger(debug));
app.use('/.well-known/*', logger(debug));
@@ -119,7 +127,15 @@ app.get('/api/v1/streaming', streamingController);
app.get('/api/v1/streaming/', streamingController);
app.get('/relay', relayController);
-app.use('*', csp(), cors({ origin: '*', exposeHeaders: ['link'] }), auth19, auth98());
+app.use(
+ '*',
+ cspMiddleware(),
+ cors({ origin: '*', exposeHeaders: ['link'] }),
+ signerMiddleware,
+ uploaderMiddleware,
+ auth98Middleware(),
+ storeMiddleware,
+);
app.get('/.well-known/webfinger', webfingerController);
app.get('/.well-known/host-meta', hostMetaController);
@@ -130,7 +146,7 @@ app.get('/users/:username', actorController);
app.get('/nodeinfo/:version', nodeInfoSchemaController);
-app.get('/api/v1/instance', cache({ cacheName: 'web', expires: Time.minutes(5) }), instanceController);
+app.get('/api/v1/instance', cacheMiddleware({ cacheName: 'web', expires: Time.minutes(5) }), instanceController);
app.get('/api/v1/apps/verify_credentials', appCredentialsController);
app.post('/api/v1/apps', createAppController);
@@ -141,15 +157,17 @@ app.post('/oauth/authorize', oauthAuthorizeController);
app.get('/oauth/authorize', oauthController);
app.post('/api/v1/accounts', requireProof({ pow: 20 }), createAccountController);
-app.get('/api/v1/accounts/verify_credentials', requirePubkey, verifyCredentialsController);
-app.patch('/api/v1/accounts/update_credentials', requirePubkey, updateCredentialsController);
+app.get('/api/v1/accounts/verify_credentials', requireSigner, verifyCredentialsController);
+app.patch('/api/v1/accounts/update_credentials', requireSigner, updateCredentialsController);
app.get('/api/v1/accounts/search', accountSearchController);
app.get('/api/v1/accounts/lookup', accountLookupController);
-app.get('/api/v1/accounts/relationships', requirePubkey, relationshipsController);
-app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/block', requirePubkey, blockController);
-app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unblock', requirePubkey, unblockController);
-app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/follow', requirePubkey, followController);
-app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unfollow', requirePubkey, unfollowController);
+app.get('/api/v1/accounts/relationships', requireSigner, relationshipsController);
+app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/block', requireSigner, blockController);
+app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unblock', requireSigner, unblockController);
+app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/mute', requireSigner, muteController);
+app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unmute', requireSigner, unmuteController);
+app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/follow', requireSigner, followController);
+app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unfollow', requireSigner, unfollowController);
app.get('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/followers', followersController);
app.get('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/following', followingController);
app.get('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/statuses', accountStatusesController);
@@ -159,22 +177,22 @@ app.get('/api/v1/statuses/:id{[0-9a-f]{64}}/favourited_by', favouritedByControll
app.get('/api/v1/statuses/:id{[0-9a-f]{64}}/reblogged_by', rebloggedByController);
app.get('/api/v1/statuses/:id{[0-9a-f]{64}}/context', contextController);
app.get('/api/v1/statuses/:id{[0-9a-f]{64}}', statusController);
-app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/favourite', requirePubkey, favouriteController);
-app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/bookmark', requirePubkey, bookmarkController);
-app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unbookmark', requirePubkey, unbookmarkController);
-app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/pin', requirePubkey, pinController);
-app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unpin', requirePubkey, unpinController);
-app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/zap', requirePubkey, zapController);
-app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/reblog', requirePubkey, reblogStatusController);
-app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unreblog', requirePubkey, unreblogStatusController);
-app.post('/api/v1/statuses', requirePubkey, createStatusController);
-app.delete('/api/v1/statuses/:id{[0-9a-f]{64}}', requirePubkey, deleteStatusController);
+app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/favourite', requireSigner, favouriteController);
+app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/bookmark', requireSigner, bookmarkController);
+app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unbookmark', requireSigner, unbookmarkController);
+app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/pin', requireSigner, pinController);
+app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unpin', requireSigner, unpinController);
+app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/zap', requireSigner, zapController);
+app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/reblog', requireSigner, reblogStatusController);
+app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unreblog', requireSigner, unreblogStatusController);
+app.post('/api/v1/statuses', requireSigner, createStatusController);
+app.delete('/api/v1/statuses/:id{[0-9a-f]{64}}', requireSigner, deleteStatusController);
app.post('/api/v1/media', mediaController);
app.post('/api/v2/media', mediaController);
-app.get('/api/v1/timelines/home', requirePubkey, storeMiddleware, homeTimelineController);
-app.get('/api/v1/timelines/public', storeMiddleware, publicTimelineController);
+app.get('/api/v1/timelines/home', requireSigner, homeTimelineController);
+app.get('/api/v1/timelines/public', publicTimelineController);
app.get('/api/v1/timelines/tag/:hashtag', hashtagTimelineController);
app.get('/api/v1/preferences', preferencesController);
@@ -183,13 +201,24 @@ app.get('/api/v2/search', searchController);
app.get('/api/pleroma/frontend_configurations', frontendConfigController);
-app.get('/api/v1/trends/tags', cache({ cacheName: 'web', expires: Time.minutes(15) }), trendingTagsController);
-app.get('/api/v1/trends', cache({ cacheName: 'web', expires: Time.minutes(15) }), trendingTagsController);
+app.get(
+ '/api/v1/trends/tags',
+ cacheMiddleware({ cacheName: 'web', expires: Time.minutes(15) }),
+ trendingTagsController,
+);
+app.get('/api/v1/trends', cacheMiddleware({ cacheName: 'web', expires: Time.minutes(15) }), trendingTagsController);
-app.get('/api/v1/notifications', requirePubkey, notificationsController);
-app.get('/api/v1/favourites', requirePubkey, favouritesController);
-app.get('/api/v1/bookmarks', requirePubkey, bookmarksController);
-app.get('/api/v1/blocks', requirePubkey, blocksController);
+app.get('/api/v1/suggestions', suggestionsV1Controller);
+app.get('/api/v2/suggestions', suggestionsV2Controller);
+
+app.get('/api/v1/notifications', requireSigner, notificationsController);
+app.get('/api/v1/favourites', requireSigner, favouritesController);
+app.get('/api/v1/bookmarks', requireSigner, bookmarksController);
+app.get('/api/v1/blocks', requireSigner, blocksController);
+app.get('/api/v1/mutes', requireSigner, mutesController);
+
+app.get('/api/v1/markers', requireProof(), markersController);
+app.post('/api/v1/markers', requireProof(), updateMarkersController);
app.get('/api/v1/admin/accounts', requireRole('admin'), adminAccountsController);
app.get('/api/v1/pleroma/admin/config', requireRole('admin'), configController);
@@ -197,14 +226,24 @@ app.post('/api/v1/pleroma/admin/config', requireRole('admin'), updateConfigContr
app.delete('/api/v1/pleroma/admin/statuses/:id', requireRole('admin'), pleromaAdminDeleteStatusController);
app.get('/api/v1/admin/ditto/relays', requireRole('admin'), adminRelaysController);
-app.put('/api/v1/admin/ditto/relays', requireRole('admin'), adminRelaysController);
+app.put('/api/v1/admin/ditto/relays', requireRole('admin'), adminSetRelaysController);
+
+app.post('/api/v1/reports', requireSigner, reportController);
+app.get('/api/v1/admin/reports', requireSigner, requireRole('admin'), adminReportsController);
+app.get('/api/v1/admin/reports/:id{[0-9a-f]{64}}', requireSigner, requireRole('admin'), adminReportController);
+app.post(
+ '/api/v1/admin/reports/:id{[0-9a-f]{64}}/resolve',
+ requireSigner,
+ requireRole('admin'),
+ adminReportResolveController,
+);
+
+app.post('/api/v1/admin/accounts/:id{[0-9a-f]{64}}/action', requireSigner, requireRole('admin'), adminAccountAction);
// Not (yet) implemented.
app.get('/api/v1/custom_emojis', emptyArrayController);
app.get('/api/v1/filters', emptyArrayController);
-app.get('/api/v1/mutes', emptyArrayController);
app.get('/api/v1/domain_blocks', emptyArrayController);
-app.get('/api/v1/markers', emptyObjectController);
app.get('/api/v1/conversations', emptyArrayController);
app.get('/api/v1/lists', emptyArrayController);
diff --git a/src/config.ts b/src/config.ts
index c4d6a9b..cc14998 100644
--- a/src/config.ts
+++ b/src/config.ts
@@ -1,7 +1,8 @@
import url from 'node:url';
-import { z } from 'zod';
-import { dotenv, getPublicKey, nip19 } from '@/deps.ts';
+import * as dotenv from '@std/dotenv';
+import { getPublicKey, nip19 } from 'nostr-tools';
+import { z } from 'zod';
/** Load environment config from `.env` */
await dotenv.load({
@@ -41,7 +42,7 @@ class Conf {
}
static get port() {
- return parseInt(Deno.env.get('PORT') || '8000');
+ return parseInt(Deno.env.get('PORT') || '4036');
}
static get relay(): `wss://${string}` | `ws://${string}` {
@@ -54,7 +55,7 @@ class Conf {
}
/** Origin of the Ditto server, including the protocol and port. */
static get localDomain() {
- return Deno.env.get('LOCAL_DOMAIN') || 'http://localhost:8000';
+ return Deno.env.get('LOCAL_DOMAIN') || `http://localhost:${Conf.port}`;
}
/** URL to an external Nostr viewer. */
static get externalDomain() {
@@ -135,10 +136,22 @@ class Conf {
return Deno.env.get('IPFS_API_URL') || 'http://localhost:5001';
},
};
+ /** nostr.build API endpoint when the `nostrbuild` uploader is used. */
+ static get nostrbuildEndpoint(): string {
+ return Deno.env.get('NOSTRBUILD_ENDPOINT') || 'https://nostr.build/api/v2/upload/files';
+ }
+ /** Default Blossom servers to use when the `blossom` uploader is set. */
+ static get blossomServers(): string[] {
+ return Deno.env.get('BLOSSOM_SERVERS')?.split(',') || ['https://blossom.primal.net/'];
+ }
/** Module to upload files with. */
static get uploader() {
return Deno.env.get('DITTO_UPLOADER');
}
+ /** Location to use for local uploads. */
+ static get uploadsDir() {
+ return Deno.env.get('UPLOADS_DIR') || 'data/uploads';
+ }
/** Media base URL for uploads. */
static get mediaDomain() {
const value = Deno.env.get('MEDIA_DOMAIN');
@@ -203,6 +216,21 @@ class Conf {
}
},
};
+ /** Postgres settings. */
+ static pg = {
+ /** Number of connections to use in the pool. */
+ get poolSize(): number {
+ return Number(Deno.env.get('PG_POOL_SIZE') ?? 10);
+ },
+ };
+ /** Whether to enable requesting events from known relays. */
+ static get firehoseEnabled(): boolean {
+ return optionalBooleanSchema.parse(Deno.env.get('FIREHOSE_ENABLED')) ?? true;
+ }
+ /** Path to the custom policy module. Must be an absolute path, https:, npm:, or jsr: URI. */
+ static get policy(): string {
+ return Deno.env.get('DITTO_POLICY') || new URL('../data/policy.ts', import.meta.url).pathname;
+ }
}
const optionalBooleanSchema = z
diff --git a/src/controllers/activitypub/actor.ts b/src/controllers/activitypub/actor.ts
index e82a88a..19f5f10 100644
--- a/src/controllers/activitypub/actor.ts
+++ b/src/controllers/activitypub/actor.ts
@@ -9,7 +9,7 @@ const actorController: AppController = async (c) => {
const username = c.req.param('username');
const { signal } = c.req.raw;
- const pointer = await localNip05Lookup(username);
+ const pointer = await localNip05Lookup(c.get('store'), username);
if (!pointer) return notFound(c);
const event = await getAuthor(pointer.pubkey, { signal });
diff --git a/src/controllers/api/accounts.ts b/src/controllers/api/accounts.ts
index 5f0840e..f717be3 100644
--- a/src/controllers/api/accounts.ts
+++ b/src/controllers/api/accounts.ts
@@ -1,15 +1,14 @@
-import { NostrFilter } from '@nostrify/nostrify';
+import { NostrFilter, NSchema as n } from '@nostrify/nostrify';
+import { nip19 } from 'nostr-tools';
import { z } from 'zod';
import { type AppController } from '@/app.ts';
import { Conf } from '@/config.ts';
-import { nip19 } from '@/deps.ts';
import { getAuthor, getFollowedPubkeys } from '@/queries.ts';
import { booleanParamSchema, fileSchema } from '@/schema.ts';
-import { jsonMetaContentSchema } from '@/schemas/nostr.ts';
-import { eventsDB, searchStore } from '@/storages.ts';
+import { Storages } from '@/storages.ts';
import { addTag, deleteTag, findReplyTag, getTagSet } from '@/tags.ts';
-import { uploadFile } from '@/upload.ts';
+import { uploadFile } from '@/utils/upload.ts';
import { nostrNow } from '@/utils.ts';
import { createEvent, paginated, paginationSchema, parseBody, updateListEvent } from '@/utils/api.ts';
import { lookupAccount } from '@/utils/lookup.ts';
@@ -18,7 +17,7 @@ import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
import { renderRelationship } from '@/views/mastodon/relationships.ts';
import { renderStatus } from '@/views/mastodon/statuses.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
-import { APISigner } from '@/signers/APISigner.ts';
+import { bech32ToPubkey } from '@/utils.ts';
const usernameSchema = z
.string().min(1).max(30)
@@ -30,7 +29,7 @@ const createAccountSchema = z.object({
});
const createAccountController: AppController = async (c) => {
- const pubkey = c.get('pubkey')!;
+ const pubkey = await c.get('signer')?.getPublicKey()!;
const result = createAccountSchema.safeParse(await c.req.json());
if (!result.success) {
@@ -46,28 +45,32 @@ const createAccountController: AppController = async (c) => {
};
const verifyCredentialsController: AppController = async (c) => {
- const pubkey = c.get('pubkey')!;
+ const signer = c.get('signer')!;
+ const pubkey = await signer.getPublicKey();
- const event = await getAuthor(pubkey, { relations: ['author_stats'] });
- if (event) {
- const account = await renderAccount(event, { withSource: true });
+ const eventsDB = await Storages.db();
- const [userPreferencesEvent] = await eventsDB.query([{
+ const [author, [settingsStore]] = await Promise.all([
+ getAuthor(pubkey, { signal: AbortSignal.timeout(5000) }),
+
+ eventsDB.query([{
authors: [pubkey],
kinds: [30078],
'#d': ['pub.ditto.pleroma_settings_store'],
limit: 1,
- }]);
- if (userPreferencesEvent) {
- const signer = new APISigner(c);
- const userPreference = JSON.parse(await signer.nip44.decrypt(pubkey, userPreferencesEvent.content));
- (account.pleroma as any).settings_store = userPreference;
- }
+ }]),
+ ]);
- return c.json(account);
- } else {
- return c.json(await accountFromPubkey(pubkey, { withSource: true }));
+ const account = author
+ ? await renderAccount(author, { withSource: true })
+ : await accountFromPubkey(pubkey, { withSource: true });
+
+ if (settingsStore) {
+ const data = await signer.nip44!.decrypt(pubkey, settingsStore.content);
+ account.pleroma.settings_store = JSON.parse(data);
}
+
+ return c.json(account);
};
const accountController: AppController = async (c) => {
@@ -92,28 +95,44 @@ const accountLookupController: AppController = async (c) => {
if (event) {
return c.json(await renderAccount(event));
}
-
- return c.json({ error: 'Could not find user.' }, 404);
+ try {
+ const pubkey = bech32ToPubkey(decodeURIComponent(acct)) as string;
+ return c.json(await accountFromPubkey(pubkey));
+ } catch (e) {
+ console.log(e);
+ return c.json({ error: 'Could not find user.' }, 404);
+ }
};
-const accountSearchController: AppController = async (c) => {
- const q = c.req.query('q');
+const accountSearchQuerySchema = z.object({
+ q: z.string().transform(decodeURIComponent),
+ resolve: booleanParamSchema.optional().transform(Boolean),
+ following: z.boolean().default(false),
+ limit: z.coerce.number().catch(20).transform((value) => Math.min(Math.max(value, 0), 40)),
+});
- if (!q) {
- return c.json({ error: 'Missing `q` query parameter.' }, 422);
+const accountSearchController: AppController = async (c) => {
+ const result = accountSearchQuerySchema.safeParse(c.req.query());
+ const { signal } = c.req.raw;
+
+ if (!result.success) {
+ return c.json({ error: 'Bad request', schema: result.error }, 422);
}
+ const { q, limit } = result.data;
+
const query = decodeURIComponent(q);
+ const store = await Storages.search();
const [event, events] = await Promise.all([
lookupAccount(query),
- searchStore.query([{ kinds: [0], search: query, limit: 20 }], { signal: c.req.raw.signal }),
+ store.query([{ kinds: [0], search: query, limit }], { signal }),
]);
const results = await hydrateEvents({
events: event ? [event, ...events] : events,
- storage: eventsDB,
- signal: c.req.raw.signal,
+ store,
+ signal,
});
if ((results.length < 1) && query.match(/npub1\w+/)) {
@@ -132,7 +151,7 @@ const accountSearchController: AppController = async (c) => {
};
const relationshipsController: AppController = async (c) => {
- const pubkey = c.get('pubkey')!;
+ const pubkey = await c.get('signer')?.getPublicKey()!;
const ids = z.array(z.string()).safeParse(c.req.queries('id[]'));
if (!ids.success) {
@@ -157,8 +176,10 @@ const accountStatusesController: AppController = async (c) => {
const { pinned, limit, exclude_replies, tagged } = accountStatusesQuerySchema.parse(c.req.query());
const { signal } = c.req.raw;
+ const store = await Storages.db();
+
if (pinned) {
- const [pinEvent] = await eventsDB.query([{ kinds: [10001], authors: [pubkey], limit: 1 }], { signal });
+ const [pinEvent] = await store.query([{ kinds: [10001], authors: [pubkey], limit: 1 }], { signal });
if (pinEvent) {
const pinnedEventIds = getTagSet(pinEvent.tags, 'e');
return renderStatuses(c, [...pinnedEventIds].reverse());
@@ -179,8 +200,8 @@ const accountStatusesController: AppController = async (c) => {
filter['#t'] = [tagged];
}
- const events = await eventsDB.query([filter], { signal })
- .then((events) => hydrateEvents({ events, storage: eventsDB, signal }))
+ const events = await store.query([filter], { signal })
+ .then((events) => hydrateEvents({ events, store, signal }))
.then((events) => {
if (exclude_replies) {
return events.filter((event) => !findReplyTag(event.tags));
@@ -188,7 +209,11 @@ const accountStatusesController: AppController = async (c) => {
return events;
});
- const statuses = await Promise.all(events.map((event) => renderStatus(event, { viewerPubkey: c.get('pubkey') })));
+ const viewerPubkey = await c.get('signer')?.getPublicKey();
+
+ const statuses = await Promise.all(
+ events.map((event) => renderStatus(event, { viewerPubkey })),
+ );
return paginated(c, events, statuses);
};
@@ -201,11 +226,12 @@ const updateCredentialsSchema = z.object({
bot: z.boolean().optional(),
discoverable: z.boolean().optional(),
nip05: z.string().optional(),
- pleroma_settings_store: z.object({ soapbox_fe: z.record(z.string(), z.unknown()) }).optional(),
+ pleroma_settings_store: z.unknown().optional(),
});
const updateCredentialsController: AppController = async (c) => {
- const pubkey = c.get('pubkey')!;
+ const signer = c.get('signer')!;
+ const pubkey = await signer.getPublicKey();
const body = await parseBody(c.req.raw);
const result = updateCredentialsSchema.safeParse(body);
@@ -214,7 +240,7 @@ const updateCredentialsController: AppController = async (c) => {
}
const author = await getAuthor(pubkey);
- const meta = author ? jsonMetaContentSchema.parse(author.content) : {};
+ const meta = author ? n.json().pipe(n.metadata()).catch({}).parse(author.content) : {};
const {
avatar: avatarFile,
@@ -225,8 +251,8 @@ const updateCredentialsController: AppController = async (c) => {
} = result.data;
const [avatar, header] = await Promise.all([
- avatarFile ? uploadFile(avatarFile, { pubkey }) : undefined,
- headerFile ? uploadFile(headerFile, { pubkey }) : undefined,
+ avatarFile ? uploadFile(c, avatarFile, { pubkey }) : undefined,
+ headerFile ? uploadFile(c, headerFile, { pubkey }) : undefined,
]);
meta.name = display_name ?? meta.name;
@@ -241,55 +267,46 @@ const updateCredentialsController: AppController = async (c) => {
tags: [],
}, c);
- const pleroma_frontend = result.data.pleroma_settings_store;
- if (pleroma_frontend) {
- const signer = new APISigner(c);
+ const account = await renderAccount(event, { withSource: true });
+ const settingsStore = result.data.pleroma_settings_store;
+
+ if (settingsStore) {
await createEvent({
kind: 30078,
tags: [['d', 'pub.ditto.pleroma_settings_store']],
- content: await signer.nip44.encrypt(pubkey, JSON.stringify(pleroma_frontend)),
+ content: await signer.nip44!.encrypt(pubkey, JSON.stringify(settingsStore)),
}, c);
}
- const account = await renderAccount(event, { withSource: true });
-
- const [userPreferencesEvent] = await eventsDB.query([{
- authors: [pubkey],
- kinds: [30078],
- '#d': ['pub.ditto.pleroma_settings_store'],
- limit: 1,
- }]);
- if (userPreferencesEvent) {
- const signer = new APISigner(c);
- const userPreference = JSON.parse(await signer.nip44.decrypt(pubkey, userPreferencesEvent.content));
- (account.pleroma as any).settings_store = userPreference;
- }
+ account.pleroma.settings_store = settingsStore;
return c.json(account);
};
/** https://docs.joinmastodon.org/methods/accounts/#follow */
const followController: AppController = async (c) => {
- const sourcePubkey = c.get('pubkey')!;
+ const sourcePubkey = await c.get('signer')?.getPublicKey()!;
const targetPubkey = c.req.param('pubkey');
await updateListEvent(
- { kinds: [3], authors: [sourcePubkey] },
+ { kinds: [3], authors: [sourcePubkey], limit: 1 },
(tags) => addTag(tags, ['p', targetPubkey]),
c,
);
const relationship = await renderRelationship(sourcePubkey, targetPubkey);
+ relationship.following = true;
+
return c.json(relationship);
};
/** https://docs.joinmastodon.org/methods/accounts/#unfollow */
const unfollowController: AppController = async (c) => {
- const sourcePubkey = c.get('pubkey')!;
+ const sourcePubkey = await c.get('signer')?.getPublicKey()!;
const targetPubkey = c.req.param('pubkey');
await updateListEvent(
- { kinds: [3], authors: [sourcePubkey] },
+ { kinds: [3], authors: [sourcePubkey], limit: 1 },
(tags) => deleteTag(tags, ['p', targetPubkey]),
c,
);
@@ -311,12 +328,22 @@ const followingController: AppController = async (c) => {
};
/** https://docs.joinmastodon.org/methods/accounts/#block */
-const blockController: AppController = async (c) => {
- const sourcePubkey = c.get('pubkey')!;
+const blockController: AppController = (c) => {
+ return c.json({ error: 'Blocking is not supported by Nostr' }, 422);
+};
+
+/** https://docs.joinmastodon.org/methods/accounts/#unblock */
+const unblockController: AppController = (c) => {
+ return c.json({ error: 'Blocking is not supported by Nostr' }, 422);
+};
+
+/** https://docs.joinmastodon.org/methods/accounts/#mute */
+const muteController: AppController = async (c) => {
+ const sourcePubkey = await c.get('signer')?.getPublicKey()!;
const targetPubkey = c.req.param('pubkey');
await updateListEvent(
- { kinds: [10000], authors: [sourcePubkey] },
+ { kinds: [10000], authors: [sourcePubkey], limit: 1 },
(tags) => addTag(tags, ['p', targetPubkey]),
c,
);
@@ -325,13 +352,13 @@ const blockController: AppController = async (c) => {
return c.json(relationship);
};
-/** https://docs.joinmastodon.org/methods/accounts/#unblock */
-const unblockController: AppController = async (c) => {
- const sourcePubkey = c.get('pubkey')!;
+/** https://docs.joinmastodon.org/methods/accounts/#unmute */
+const unmuteController: AppController = async (c) => {
+ const sourcePubkey = await c.get('signer')?.getPublicKey()!;
const targetPubkey = c.req.param('pubkey');
await updateListEvent(
- { kinds: [10000], authors: [sourcePubkey] },
+ { kinds: [10000], authors: [sourcePubkey], limit: 1 },
(tags) => deleteTag(tags, ['p', targetPubkey]),
c,
);
@@ -341,11 +368,13 @@ const unblockController: AppController = async (c) => {
};
const favouritesController: AppController = async (c) => {
- const pubkey = c.get('pubkey')!;
+ const pubkey = await c.get('signer')?.getPublicKey()!;
const params = paginationSchema.parse(c.req.query());
const { signal } = c.req.raw;
- const events7 = await eventsDB.query(
+ const store = await Storages.db();
+
+ const events7 = await store.query(
[{ kinds: [7], authors: [pubkey], ...params }],
{ signal },
);
@@ -354,10 +383,14 @@ const favouritesController: AppController = async (c) => {
.map((event) => event.tags.find((tag) => tag[0] === 'e')?.[1])
.filter((id): id is string => !!id);
- const events1 = await eventsDB.query([{ kinds: [1], ids }], { signal })
- .then((events) => hydrateEvents({ events, storage: eventsDB, signal }));
+ const events1 = await store.query([{ kinds: [1], ids }], { signal })
+ .then((events) => hydrateEvents({ events, store, signal }));
- const statuses = await Promise.all(events1.map((event) => renderStatus(event, { viewerPubkey: c.get('pubkey') })));
+ const viewerPubkey = await c.get('signer')?.getPublicKey();
+
+ const statuses = await Promise.all(
+ events1.map((event) => renderStatus(event, { viewerPubkey })),
+ );
return paginated(c, events1, statuses);
};
@@ -372,9 +405,11 @@ export {
followController,
followersController,
followingController,
+ muteController,
relationshipsController,
unblockController,
unfollowController,
+ unmuteController,
updateCredentialsController,
verifyCredentialsController,
};
diff --git a/src/controllers/api/admin.ts b/src/controllers/api/admin.ts
index 990c0fc..77571aa 100644
--- a/src/controllers/api/admin.ts
+++ b/src/controllers/api/admin.ts
@@ -2,10 +2,12 @@ import { z } from 'zod';
import { type AppController } from '@/app.ts';
import { Conf } from '@/config.ts';
+import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { booleanParamSchema } from '@/schema.ts';
-import { eventsDB } from '@/storages.ts';
+import { Storages } from '@/storages.ts';
+import { addTag } from '@/tags.ts';
+import { paginated, paginationSchema, parseBody, updateListAdminEvent } from '@/utils/api.ts';
import { renderAdminAccount } from '@/views/mastodon/admin-accounts.ts';
-import { paginated, paginationSchema } from '@/utils/api.ts';
const adminAccountQuerySchema = z.object({
local: booleanParamSchema.optional(),
@@ -38,16 +40,17 @@ const adminAccountsController: AppController = async (c) => {
return c.json([]);
}
+ const store = await Storages.db();
const { since, until, limit } = paginationSchema.parse(c.req.query());
const { signal } = c.req.raw;
- const events = await eventsDB.query([{ kinds: [30361], authors: [Conf.pubkey], since, until, limit }], { signal });
+ const events = await store.query([{ kinds: [30361], authors: [Conf.pubkey], since, until, limit }], { signal });
const pubkeys = events.map((event) => event.tags.find(([name]) => name === 'd')?.[1]!);
- const authors = await eventsDB.query([{ kinds: [0], authors: pubkeys }], { signal });
+ const authors = await store.query([{ kinds: [0], authors: pubkeys }], { signal });
for (const event of events) {
const d = event.tags.find(([name]) => name === 'd')?.[1];
- event.d_author = authors.find((author) => author.pubkey === d);
+ (event as DittoEvent).d_author = authors.find((author) => author.pubkey === d);
}
const accounts = await Promise.all(
@@ -57,4 +60,32 @@ const adminAccountsController: AppController = async (c) => {
return paginated(c, events, accounts);
};
-export { adminAccountsController };
+const adminAccountActionSchema = z.object({
+ type: z.enum(['none', 'sensitive', 'disable', 'silence', 'suspend']),
+});
+
+const adminAccountAction: AppController = async (c) => {
+ const body = await parseBody(c.req.raw);
+ const result = adminAccountActionSchema.safeParse(body);
+ const authorId = c.req.param('id');
+
+ if (!result.success) {
+ return c.json({ error: 'This action is not allowed' }, 403);
+ }
+
+ const { data } = result;
+
+ if (data.type !== 'disable') {
+ return c.json({ error: 'Record invalid' }, 422);
+ }
+
+ await updateListAdminEvent(
+ { kinds: [10000], authors: [Conf.pubkey], limit: 1 },
+ (tags) => addTag(tags, ['p', authorId]),
+ c,
+ );
+
+ return c.json({}, 200);
+};
+
+export { adminAccountAction, adminAccountsController };
diff --git a/src/controllers/api/blocks.ts b/src/controllers/api/blocks.ts
index d54773a..b006a1d 100644
--- a/src/controllers/api/blocks.ts
+++ b/src/controllers/api/blocks.ts
@@ -1,24 +1,6 @@
-import { type AppController } from '@/app.ts';
-import { eventsDB } from '@/storages.ts';
-import { getTagSet } from '@/tags.ts';
-import { renderAccounts } from '@/views.ts';
+import { AppController } from '@/app.ts';
/** https://docs.joinmastodon.org/methods/blocks/#get */
-const blocksController: AppController = async (c) => {
- const pubkey = c.get('pubkey')!;
- const { signal } = c.req.raw;
-
- const [event10000] = await eventsDB.query(
- [{ kinds: [10000], authors: [pubkey], limit: 1 }],
- { signal },
- );
-
- if (event10000) {
- const pubkeys = getTagSet(event10000.tags, 'p');
- return renderAccounts(c, [...pubkeys].reverse());
- } else {
- return c.json([]);
- }
+export const blocksController: AppController = (c) => {
+ return c.json({ error: 'Blocking is not supported by Nostr' }, 422);
};
-
-export { blocksController };
diff --git a/src/controllers/api/bookmarks.ts b/src/controllers/api/bookmarks.ts
index 16e87e7..7655182 100644
--- a/src/controllers/api/bookmarks.ts
+++ b/src/controllers/api/bookmarks.ts
@@ -1,14 +1,15 @@
import { type AppController } from '@/app.ts';
-import { eventsDB } from '@/storages.ts';
+import { Storages } from '@/storages.ts';
import { getTagSet } from '@/tags.ts';
import { renderStatuses } from '@/views.ts';
/** https://docs.joinmastodon.org/methods/bookmarks/#get */
const bookmarksController: AppController = async (c) => {
- const pubkey = c.get('pubkey')!;
+ const store = await Storages.db();
+ const pubkey = await c.get('signer')?.getPublicKey()!;
const { signal } = c.req.raw;
- const [event10003] = await eventsDB.query(
+ const [event10003] = await store.query(
[{ kinds: [10003], authors: [pubkey], limit: 1 }],
{ signal },
);
diff --git a/src/controllers/api/ditto.ts b/src/controllers/api/ditto.ts
index 425dcfb..df4f210 100644
--- a/src/controllers/api/ditto.ts
+++ b/src/controllers/api/ditto.ts
@@ -3,19 +3,22 @@ import { z } from 'zod';
import { AppController } from '@/app.ts';
import { Conf } from '@/config.ts';
-import { eventsDB } from '@/storages.ts';
+import { Storages } from '@/storages.ts';
import { AdminSigner } from '@/signers/AdminSigner.ts';
+const markerSchema = z.enum(['read', 'write']);
+
const relaySchema = z.object({
url: z.string().url(),
- read: z.boolean(),
- write: z.boolean(),
+ marker: markerSchema.optional(),
});
type RelayEntity = z.infer;
export const adminRelaysController: AppController = async (c) => {
- const [event] = await eventsDB.query([
+ const store = await Storages.db();
+
+ const [event] = await store.query([
{ kinds: [10002], authors: [Conf.pubkey], limit: 1 },
]);
@@ -27,16 +30,17 @@ export const adminRelaysController: AppController = async (c) => {
};
export const adminSetRelaysController: AppController = async (c) => {
+ const store = await Storages.db();
const relays = relaySchema.array().parse(await c.req.json());
const event = await new AdminSigner().signEvent({
kind: 10002,
- tags: relays.map(({ url, read, write }) => ['r', url, read && write ? '' : read ? 'read' : 'write']),
+ tags: relays.map(({ url, marker }) => marker ? ['r', url, marker] : ['r', url]),
content: '',
created_at: Math.floor(Date.now() / 1000),
});
- await eventsDB.event(event);
+ await store.event(event);
return c.json(renderRelays(event));
};
@@ -47,8 +51,7 @@ function renderRelays(event: NostrEvent): RelayEntity[] {
if (name === 'r') {
const relay: RelayEntity = {
url,
- read: !marker || marker === 'read',
- write: !marker || marker === 'write',
+ marker: markerSchema.safeParse(marker).success ? marker as 'read' | 'write' : undefined,
};
acc.push(relay);
}
diff --git a/src/controllers/api/instance.ts b/src/controllers/api/instance.ts
index 1355330..5f949b0 100644
--- a/src/controllers/api/instance.ts
+++ b/src/controllers/api/instance.ts
@@ -1,23 +1,20 @@
-import { type AppController } from '@/app.ts';
+import { AppController } from '@/app.ts';
import { Conf } from '@/config.ts';
-import { jsonServerMetaSchema } from '@/schemas/nostr.ts';
-import { eventsDB } from '@/storages.ts';
+import { Storages } from '@/storages.ts';
+import { getInstanceMetadata } from '@/utils/instance.ts';
const instanceController: AppController = async (c) => {
const { host, protocol } = Conf.url;
- const { signal } = c.req.raw;
-
- const [event] = await eventsDB.query([{ kinds: [0], authors: [Conf.pubkey], limit: 1 }], { signal });
- const meta = jsonServerMetaSchema.parse(event?.content);
+ const meta = await getInstanceMetadata(await Storages.db(), c.req.raw.signal);
/** Protocol to use for WebSocket URLs, depending on the protocol of the `LOCAL_DOMAIN`. */
const wsProtocol = protocol === 'http:' ? 'ws:' : 'wss:';
return c.json({
uri: host,
- title: meta.name ?? 'Ditto',
- description: meta.about ?? 'Nostr and the Fediverse',
- short_description: meta.tagline ?? meta.about ?? 'Nostr and the Fediverse',
+ title: meta.name,
+ description: meta.about,
+ short_description: meta.tagline,
registrations: true,
max_toot_chars: Conf.postCharLimit,
configuration: {
@@ -43,6 +40,7 @@ const instanceController: AppController = async (c) => {
'mastodon_api_streaming',
'exposable_reactions',
'quote_posting',
+ 'v2_suggestions',
],
},
},
@@ -56,7 +54,7 @@ const instanceController: AppController = async (c) => {
streaming_api: `${wsProtocol}//${host}`,
},
version: '0.0.0 (compatible; Ditto 0.0.1)',
- email: meta.email ?? `postmaster@${host}`,
+ email: meta.email,
nostr: {
pubkey: Conf.pubkey,
relay: `${wsProtocol}//${host}/relay`,
diff --git a/src/controllers/api/markers.ts b/src/controllers/api/markers.ts
new file mode 100644
index 0000000..005ebbe
--- /dev/null
+++ b/src/controllers/api/markers.ts
@@ -0,0 +1,64 @@
+import { z } from 'zod';
+
+import { AppController } from '@/app.ts';
+import { parseBody } from '@/utils/api.ts';
+
+const kv = await Deno.openKv();
+
+type Timeline = 'home' | 'notifications';
+
+interface Marker {
+ last_read_id: string;
+ version: number;
+ updated_at: string;
+}
+
+export const markersController: AppController = async (c) => {
+ const pubkey = await c.get('signer')?.getPublicKey()!;
+ const timelines = c.req.queries('timeline[]') ?? [];
+
+ const results = await kv.getMany(
+ timelines.map((timeline) => ['markers', pubkey, timeline]),
+ );
+
+ const marker = results.reduce>((acc, { key, value }) => {
+ if (value) {
+ const timeline = key[key.length - 1] as string;
+ acc[timeline] = value;
+ }
+ return acc;
+ }, {});
+
+ return c.json(marker);
+};
+
+const markerDataSchema = z.object({
+ last_read_id: z.string(),
+});
+
+export const updateMarkersController: AppController = async (c) => {
+ const pubkey = await c.get('signer')?.getPublicKey()!;
+ const record = z.record(z.enum(['home', 'notifications']), markerDataSchema).parse(await parseBody(c.req.raw));
+ const timelines = Object.keys(record) as Timeline[];
+
+ const markers: Record = {};
+
+ const entries = await kv.getMany(
+ timelines.map((timeline) => ['markers', pubkey, timeline]),
+ );
+
+ for (const timeline of timelines) {
+ const last = entries.find(({ key }) => key[key.length - 1] === timeline);
+
+ const marker: Marker = {
+ last_read_id: record[timeline]!.last_read_id,
+ version: last?.value ? last.value.version + 1 : 1,
+ updated_at: new Date().toISOString(),
+ };
+
+ await kv.set(['markers', pubkey, timeline], marker);
+ markers[timeline] = marker;
+ }
+
+ return c.json(markers);
+};
diff --git a/src/controllers/api/media.ts b/src/controllers/api/media.ts
index dd36a53..71b3e78 100644
--- a/src/controllers/api/media.ts
+++ b/src/controllers/api/media.ts
@@ -4,7 +4,7 @@ import { AppController } from '@/app.ts';
import { fileSchema } from '@/schema.ts';
import { parseBody } from '@/utils/api.ts';
import { renderAttachment } from '@/views/mastodon/attachments.ts';
-import { uploadFile } from '@/upload.ts';
+import { uploadFile } from '@/utils/upload.ts';
const mediaBodySchema = z.object({
file: fileSchema,
@@ -14,7 +14,7 @@ const mediaBodySchema = z.object({
});
const mediaController: AppController = async (c) => {
- const pubkey = c.get('pubkey')!;
+ const pubkey = await c.get('signer')?.getPublicKey()!;
const result = mediaBodySchema.safeParse(await parseBody(c.req.raw));
const { signal } = c.req.raw;
@@ -24,7 +24,7 @@ const mediaController: AppController = async (c) => {
try {
const { file, description } = result.data;
- const media = await uploadFile(file, { pubkey, description }, signal);
+ const media = await uploadFile(c, file, { pubkey, description }, signal);
return c.json(renderAttachment(media));
} catch (e) {
console.error(e);
diff --git a/src/controllers/api/mutes.ts b/src/controllers/api/mutes.ts
new file mode 100644
index 0000000..4afb6c4
--- /dev/null
+++ b/src/controllers/api/mutes.ts
@@ -0,0 +1,25 @@
+import { type AppController } from '@/app.ts';
+import { Storages } from '@/storages.ts';
+import { getTagSet } from '@/tags.ts';
+import { renderAccounts } from '@/views.ts';
+
+/** https://docs.joinmastodon.org/methods/mutes/#get */
+const mutesController: AppController = async (c) => {
+ const store = await Storages.db();
+ const pubkey = await c.get('signer')?.getPublicKey()!;
+ const { signal } = c.req.raw;
+
+ const [event10000] = await store.query(
+ [{ kinds: [10000], authors: [pubkey], limit: 1 }],
+ { signal },
+ );
+
+ if (event10000) {
+ const pubkeys = getTagSet(event10000.tags, 'p');
+ return renderAccounts(c, [...pubkeys].reverse());
+ } else {
+ return c.json([]);
+ }
+};
+
+export { mutesController };
diff --git a/src/controllers/api/notifications.ts b/src/controllers/api/notifications.ts
index 703e79f..ba15bd0 100644
--- a/src/controllers/api/notifications.ts
+++ b/src/controllers/api/notifications.ts
@@ -1,20 +1,40 @@
-import { type AppController } from '@/app.ts';
-import { eventsDB } from '@/storages.ts';
+import { NostrFilter } from '@nostrify/nostrify';
+
+import { AppContext, AppController } from '@/app.ts';
+import { hydrateEvents } from '@/storages/hydrate.ts';
import { paginated, paginationSchema } from '@/utils/api.ts';
import { renderNotification } from '@/views/mastodon/notifications.ts';
const notificationsController: AppController = async (c) => {
- const pubkey = c.get('pubkey')!;
+ const pubkey = await c.get('signer')?.getPublicKey()!;
const { since, until } = paginationSchema.parse(c.req.query());
- const { signal } = c.req.raw;
- const events = await eventsDB.query(
- [{ kinds: [1], '#p': [pubkey], since, until }],
- { signal },
- );
-
- const statuses = await Promise.all(events.map((event) => renderNotification(event, pubkey)));
- return paginated(c, events, statuses);
+ return renderNotifications(c, [{ kinds: [1, 6, 7], '#p': [pubkey], since, until }]);
};
+async function renderNotifications(c: AppContext, filters: NostrFilter[]) {
+ const store = c.get('store');
+ const pubkey = await c.get('signer')?.getPublicKey()!;
+ const { signal } = c.req.raw;
+
+ const events = await store
+ .query(filters, { signal })
+ .then((events) => events.filter((event) => event.pubkey !== pubkey))
+ .then((events) => hydrateEvents({ events, store, signal }));
+
+ if (!events.length) {
+ return c.json([]);
+ }
+
+ const notifications = (await Promise
+ .all(events.map((event) => renderNotification(event, { viewerPubkey: pubkey }))))
+ .filter(Boolean);
+
+ if (!notifications.length) {
+ return c.json([]);
+ }
+
+ return paginated(c, events, notifications);
+}
+
export { notificationsController };
diff --git a/src/controllers/api/oauth.ts b/src/controllers/api/oauth.ts
index 7ada2a4..a755a4d 100644
--- a/src/controllers/api/oauth.ts
+++ b/src/controllers/api/oauth.ts
@@ -1,9 +1,12 @@
+import { encodeBase64 } from '@std/encoding/base64';
+import { nip19 } from 'nostr-tools';
import { z } from 'zod';
-import { lodash, nip19 } from '@/deps.ts';
import { AppController } from '@/app.ts';
+import { lodash } from '@/deps.ts';
import { nostrNow } from '@/utils.ts';
import { parseBody } from '@/utils/api.ts';
+import { getClientConnectUri } from '@/utils/connect.ts';
const passwordGrantSchema = z.object({
grant_type: z.literal('password'),
@@ -59,25 +62,16 @@ const createTokenController: AppController = async (c) => {
};
/** Display the OAuth form. */
-const oauthController: AppController = (c) => {
+const oauthController: AppController = async (c) => {
const encodedUri = c.req.query('redirect_uri');
if (!encodedUri) {
return c.text('Missing `redirect_uri` query param.', 422);
}
const redirectUri = maybeDecodeUri(encodedUri);
+ const connectUri = await getClientConnectUri(c.req.raw.signal);
- c.res.headers.set(
- 'content-security-policy',
- "default-src 'self' 'sha256-m2qD6rbE2Ixbo2Bjy2dgQebcotRIAawW7zbmXItIYAM='",
- );
-
- return c.html(`
-
-
- Log in with Ditto
-
-
+ `;
+
+ const hash = encodeBase64(await crypto.subtle.digest('SHA-256', new TextEncoder().encode(script)));
+
+ c.res.headers.set(
+ 'content-security-policy',
+ `default-src 'self' 'sha256-${hash}'`,
+ );
+
+ return c.html(`
+
+
+ Log in with Ditto
+
+
+
+ Nostr Connect
`);
diff --git a/src/controllers/api/pleroma.ts b/src/controllers/api/pleroma.ts
index 64984d7..3bbdd70 100644
--- a/src/controllers/api/pleroma.ts
+++ b/src/controllers/api/pleroma.ts
@@ -1,15 +1,16 @@
+import { NSchema as n, NStore } from '@nostrify/nostrify';
import { z } from 'zod';
import { type AppController } from '@/app.ts';
import { Conf } from '@/config.ts';
import { configSchema, elixirTupleSchema, type PleromaConfig } from '@/schemas/pleroma-api.ts';
import { AdminSigner } from '@/signers/AdminSigner.ts';
-import { eventsDB } from '@/storages.ts';
+import { Storages } from '@/storages.ts';
import { createAdminEvent } from '@/utils/api.ts';
-import { jsonSchema } from '@/schema.ts';
const frontendConfigController: AppController = async (c) => {
- const configs = await getConfigs(c.req.raw.signal);
+ const store = await Storages.db();
+ const configs = await getConfigs(store, c.req.raw.signal);
const frontendConfig = configs.find(({ group, key }) => group === ':pleroma' && key === ':frontend_configurations');
if (frontendConfig) {
@@ -25,7 +26,8 @@ const frontendConfigController: AppController = async (c) => {
};
const configController: AppController = async (c) => {
- const configs = await getConfigs(c.req.raw.signal);
+ const store = await Storages.db();
+ const configs = await getConfigs(store, c.req.raw.signal);
return c.json({ configs, need_reboot: false });
};
@@ -33,7 +35,8 @@ const configController: AppController = async (c) => {
const updateConfigController: AppController = async (c) => {
const { pubkey } = Conf;
- const configs = await getConfigs(c.req.raw.signal);
+ const store = await Storages.db();
+ const configs = await getConfigs(store, c.req.raw.signal);
const { configs: newConfigs } = z.object({ configs: z.array(configSchema) }).parse(await c.req.json());
for (const { group, key, value } of newConfigs) {
@@ -63,10 +66,10 @@ const pleromaAdminDeleteStatusController: AppController = async (c) => {
return c.json({});
};
-async function getConfigs(signal: AbortSignal): Promise {
+async function getConfigs(store: NStore, signal: AbortSignal): Promise {
const { pubkey } = Conf;
- const [event] = await eventsDB.query([{
+ const [event] = await store.query([{
kinds: [30078],
authors: [pubkey],
'#d': ['pub.ditto.pleroma.config'],
@@ -75,7 +78,7 @@ async function getConfigs(signal: AbortSignal): Promise {
try {
const decrypted = await new AdminSigner().nip44.decrypt(Conf.pubkey, event.content);
- return jsonSchema.pipe(configSchema.array()).catch([]).parse(decrypted);
+ return n.json().pipe(configSchema.array()).catch([]).parse(decrypted);
} catch (_e) {
return [];
}
diff --git a/src/controllers/api/reports.ts b/src/controllers/api/reports.ts
new file mode 100644
index 0000000..9cb2627
--- /dev/null
+++ b/src/controllers/api/reports.ts
@@ -0,0 +1,121 @@
+import { NSchema as n } from '@nostrify/nostrify';
+import { z } from 'zod';
+
+import { type AppController } from '@/app.ts';
+import { Conf } from '@/config.ts';
+import { createAdminEvent, createEvent, parseBody } from '@/utils/api.ts';
+import { hydrateEvents } from '@/storages/hydrate.ts';
+import { renderAdminReport } from '@/views/mastodon/reports.ts';
+import { renderReport } from '@/views/mastodon/reports.ts';
+
+const reportSchema = z.object({
+ account_id: n.id(),
+ status_ids: n.id().array().default([]),
+ comment: z.string().max(1000).default(''),
+ category: z.string().default('other'),
+ // TODO: rules_ids[] is not implemented
+});
+
+/** https://docs.joinmastodon.org/methods/reports/#post */
+const reportController: AppController = async (c) => {
+ const store = c.get('store');
+ const body = await parseBody(c.req.raw);
+ const result = reportSchema.safeParse(body);
+
+ if (!result.success) {
+ return c.json(result.error, 422);
+ }
+
+ const {
+ account_id,
+ status_ids,
+ comment,
+ category,
+ } = result.data;
+
+ const tags = [
+ ['p', account_id, category],
+ ['P', Conf.pubkey],
+ ];
+
+ for (const status of status_ids) {
+ tags.push(['e', status, category]);
+ }
+
+ const event = await createEvent({
+ kind: 1984,
+ content: comment,
+ tags,
+ }, c);
+
+ await hydrateEvents({ events: [event], store });
+ return c.json(await renderReport(event));
+};
+
+/** https://docs.joinmastodon.org/methods/admin/reports/#get */
+const adminReportsController: AppController = async (c) => {
+ const store = c.get('store');
+ const viewerPubkey = await c.get('signer')?.getPublicKey();
+
+ const reports = await store.query([{ kinds: [1984], '#P': [Conf.pubkey] }])
+ .then((events) => hydrateEvents({ store, events: events, signal: c.req.raw.signal }))
+ .then((events) =>
+ Promise.all(
+ events.map((event) => renderAdminReport(event, { viewerPubkey })),
+ )
+ );
+
+ return c.json(reports);
+};
+
+/** https://docs.joinmastodon.org/methods/admin/reports/#get-one */
+const adminReportController: AppController = async (c) => {
+ const eventId = c.req.param('id');
+ const { signal } = c.req.raw;
+ const store = c.get('store');
+ const pubkey = await c.get('signer')?.getPublicKey();
+
+ const [event] = await store.query([{
+ kinds: [1984],
+ ids: [eventId],
+ limit: 1,
+ }], { signal });
+
+ if (!event) {
+ return c.json({ error: 'This action is not allowed' }, 403);
+ }
+
+ await hydrateEvents({ events: [event], store, signal });
+
+ return c.json(await renderAdminReport(event, { viewerPubkey: pubkey }));
+};
+
+/** https://docs.joinmastodon.org/methods/admin/reports/#resolve */
+const adminReportResolveController: AppController = async (c) => {
+ const eventId = c.req.param('id');
+ const { signal } = c.req.raw;
+ const store = c.get('store');
+ const pubkey = await c.get('signer')?.getPublicKey();
+
+ const [event] = await store.query([{
+ kinds: [1984],
+ ids: [eventId],
+ limit: 1,
+ }], { signal });
+
+ if (!event) {
+ return c.json({ error: 'This action is not allowed' }, 403);
+ }
+
+ await hydrateEvents({ events: [event], store, signal });
+
+ await createAdminEvent({
+ kind: 5,
+ tags: [['e', event.id]],
+ content: 'Report closed.',
+ }, c);
+
+ return c.json(await renderAdminReport(event, { viewerPubkey: pubkey, actionTaken: true }));
+};
+
+export { adminReportController, adminReportResolveController, adminReportsController, reportController };
diff --git a/src/controllers/api/search.ts b/src/controllers/api/search.ts
index 2facd80..0151f7d 100644
--- a/src/controllers/api/search.ts
+++ b/src/controllers/api/search.ts
@@ -1,11 +1,10 @@
-import { NostrEvent, NostrFilter } from '@nostrify/nostrify';
+import { NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify';
+import { nip19 } from 'nostr-tools';
import { z } from 'zod';
import { AppController } from '@/app.ts';
-import { nip19 } from '@/deps.ts';
import { booleanParamSchema } from '@/schema.ts';
-import { nostrIdSchema } from '@/schemas/nostr.ts';
-import { searchStore } from '@/storages.ts';
+import { Storages } from '@/storages.ts';
import { dedupeEvents } from '@/utils.ts';
import { nip05Cache } from '@/utils/nip05.ts';
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
@@ -20,7 +19,7 @@ const searchQuerySchema = z.object({
type: z.enum(['accounts', 'statuses', 'hashtags']).optional(),
resolve: booleanParamSchema.optional().transform(Boolean),
following: z.boolean().default(false),
- account_id: nostrIdSchema.optional(),
+ account_id: n.id().optional(),
limit: z.coerce.number().catch(20).transform((value) => Math.min(Math.max(value, 0), 40)),
});
@@ -44,6 +43,7 @@ const searchController: AppController = async (c) => {
}
const results = dedupeEvents(events);
+ const viewerPubkey = await c.get('signer')?.getPublicKey();
const [accounts, statuses] = await Promise.all([
Promise.all(
@@ -55,7 +55,7 @@ const searchController: AppController = async (c) => {
Promise.all(
results
.filter((event) => event.kind === 1)
- .map((event) => renderStatus(event, { viewerPubkey: c.get('pubkey') }))
+ .map((event) => renderStatus(event, { viewerPubkey }))
.filter(Boolean),
),
]);
@@ -78,7 +78,7 @@ const searchController: AppController = async (c) => {
};
/** Get events for the search params. */
-function searchEvents({ q, type, limit, account_id }: SearchQuery, signal: AbortSignal): Promise {
+async function searchEvents({ q, type, limit, account_id }: SearchQuery, signal: AbortSignal): Promise {
if (type === 'hashtags') return Promise.resolve([]);
const filter: NostrFilter = {
@@ -91,8 +91,10 @@ function searchEvents({ q, type, limit, account_id }: SearchQuery, signal: Abort
filter.authors = [account_id];
}
- return searchStore.query([filter], { signal })
- .then((events) => hydrateEvents({ events, storage: searchStore, signal }));
+ const store = await Storages.search();
+
+ return store.query([filter], { signal })
+ .then((events) => hydrateEvents({ events, store, signal }));
}
/** Get event kinds to search from `type` query param. */
@@ -110,9 +112,10 @@ function typeToKinds(type: SearchQuery['type']): number[] {
/** Resolve a searched value into an event, if applicable. */
async function lookupEvent(query: SearchQuery, signal: AbortSignal): Promise {
const filters = await getLookupFilters(query, signal);
+ const store = await Storages.search();
- return searchStore.query(filters, { limit: 1, signal })
- .then((events) => hydrateEvents({ events, storage: searchStore, signal }))
+ return store.query(filters, { limit: 1, signal })
+ .then((events) => hydrateEvents({ events, store, signal }))
.then(([event]) => event);
}
diff --git a/src/controllers/api/statuses.ts b/src/controllers/api/statuses.ts
index d257d98..291d970 100644
--- a/src/controllers/api/statuses.ts
+++ b/src/controllers/api/statuses.ts
@@ -1,21 +1,22 @@
-import { NIP05, NostrEvent, NostrFilter } from '@nostrify/nostrify';
+import { NostrEvent, NSchema as n } from '@nostrify/nostrify';
+import ISO6391 from 'iso-639-1';
+import { nip19 } from 'nostr-tools';
import { z } from 'zod';
import { type AppController } from '@/app.ts';
import { Conf } from '@/config.ts';
+import { DittoDB } from '@/db/DittoDB.ts';
import { getUnattachedMediaByIds } from '@/db/unattached-media.ts';
-import { ISO6391, nip19 } from '@/deps.ts';
import { getAncestors, getAuthor, getDescendants, getEvent } from '@/queries.ts';
-import { jsonMetaContentSchema } from '@/schemas/nostr.ts';
import { addTag, deleteTag } from '@/tags.ts';
import { createEvent, paginationSchema, parseBody, updateListEvent } from '@/utils/api.ts';
import { renderEventAccounts } from '@/views.ts';
import { renderReblog, renderStatus } from '@/views/mastodon/statuses.ts';
import { getLnurl } from '@/utils/lnurl.ts';
-import { nip05Cache } from '@/utils/nip05.ts';
import { asyncReplaceAll } from '@/utils/text.ts';
-import { eventsDB } from '@/storages.ts';
+import { Storages } from '@/storages.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
+import { lookupPubkey } from '@/utils/lookup.ts';
const createStatusSchema = z.object({
in_reply_to_id: z.string().regex(/[0-9a-f]{64}/).nullish(),
@@ -31,6 +32,7 @@ const createStatusSchema = z.object({
sensitive: z.boolean().nullish(),
spoiler_text: z.string().nullish(),
status: z.string().nullish(),
+ to: z.string().array().nullish(),
visibility: z.enum(['public', 'unlisted', 'private', 'direct']).nullish(),
quote_id: z.string().nullish(),
}).refine(
@@ -47,7 +49,7 @@ const statusController: AppController = async (c) => {
});
if (event) {
- return c.json(await renderStatus(event, { viewerPubkey: c.get('pubkey') }));
+ return c.json(await renderStatus(event, { viewerPubkey: await c.get('signer')?.getPublicKey() }));
}
return c.json({ error: 'Event not found.' }, 404);
@@ -56,6 +58,7 @@ const statusController: AppController = async (c) => {
const createStatusController: AppController = async (c) => {
const body = await parseBody(c.req.raw);
const result = createStatusSchema.safeParse(body);
+ const kysely = await DittoDB.getInstance();
if (!result.success) {
return c.json({ error: 'Bad request', schema: result.error }, 400);
@@ -89,45 +92,58 @@ const createStatusController: AppController = async (c) => {
tags.push(['subject', data.spoiler_text]);
}
- if (data.media_ids?.length) {
- const media = await getUnattachedMediaByIds(data.media_ids)
- .then((media) => media.filter(({ pubkey }) => pubkey === c.get('pubkey')))
- .then((media) => media.map(({ url, data }) => ['media', url, data]));
+ const media = data.media_ids?.length ? await getUnattachedMediaByIds(kysely, data.media_ids) : [];
- tags.push(...media);
- }
+ const imeta: string[][] = media.map(({ data }) => {
+ const values: string[] = data.map((tag) => tag.join(' '));
+ return ['imeta', ...values];
+ });
+
+ tags.push(...imeta);
+
+ const pubkeys = new Set();
const content = await asyncReplaceAll(data.status ?? '', /@([\w@+._]+)/g, async (match, username) => {
+ const pubkey = await lookupPubkey(username);
+ if (!pubkey) return match;
+
+ // Content addressing (default)
+ if (!data.to) {
+ pubkeys.add(pubkey);
+ }
+
try {
- const result = nip19.decode(username);
- if (result.type === 'npub') {
- tags.push(['p', result.data]);
- return `nostr:${username}`;
- } else {
- return match;
- }
- } catch (_e) {
- // do nothing
+ return `nostr:${nip19.npubEncode(pubkey)}`;
+ } catch {
+ return match;
}
-
- if (NIP05.regex().test(username)) {
- const pointer = await nip05Cache.fetch(username);
- if (pointer) {
- tags.push(['p', pointer.pubkey]);
- return `nostr:${nip19.npubEncode(pointer.pubkey)}`;
- }
- }
-
- return match;
});
+ // Explicit addressing
+ for (const to of data.to ?? []) {
+ const pubkey = await lookupPubkey(to);
+ if (pubkey) {
+ pubkeys.add(pubkey);
+ }
+ }
+
+ for (const pubkey of pubkeys) {
+ tags.push(['p', pubkey]);
+ }
+
for (const match of content.matchAll(/#(\w+)/g)) {
tags.push(['t', match[1]]);
}
+ const mediaUrls: string[] = media
+ .map(({ data }) => data.find(([name]) => name === 'url')?.[1])
+ .filter((url): url is string => Boolean(url));
+
+ const mediaCompat: string = mediaUrls.length ? ['', '', ...mediaUrls].join('\n') : '';
+
const event = await createEvent({
kind: 1,
- content,
+ content: content + mediaCompat,
tags,
}, c);
@@ -136,17 +152,17 @@ const createStatusController: AppController = async (c) => {
if (data.quote_id) {
await hydrateEvents({
events: [event],
- storage: eventsDB,
+ store: await Storages.db(),
signal: c.req.raw.signal,
});
}
- return c.json(await renderStatus({ ...event, author }, { viewerPubkey: c.get('pubkey') }));
+ return c.json(await renderStatus({ ...event, author }, { viewerPubkey: await c.get('signer')?.getPublicKey() }));
};
const deleteStatusController: AppController = async (c) => {
const id = c.req.param('id');
- const pubkey = c.get('pubkey');
+ const pubkey = await c.get('signer')?.getPublicKey();
const event = await getEvent(id, { signal: c.req.raw.signal });
@@ -170,9 +186,12 @@ const deleteStatusController: AppController = async (c) => {
const contextController: AppController = async (c) => {
const id = c.req.param('id');
const event = await getEvent(id, { kind: 1, relations: ['author', 'event_stats', 'author_stats'] });
+ const viewerPubkey = await c.get('signer')?.getPublicKey();
async function renderStatuses(events: NostrEvent[]) {
- const statuses = await Promise.all(events.map((event) => renderStatus(event, { viewerPubkey: c.get('pubkey') })));
+ const statuses = await Promise.all(
+ events.map((event) => renderStatus(event, { viewerPubkey })),
+ );
return statuses.filter(Boolean);
}
@@ -202,7 +221,7 @@ const favouriteController: AppController = async (c) => {
],
}, c);
- const status = await renderStatus(target, { viewerPubkey: c.get('pubkey') });
+ const status = await renderStatus(target, { viewerPubkey: await c.get('signer')?.getPublicKey() });
if (status) {
status.favourited = true;
@@ -241,11 +260,11 @@ const reblogStatusController: AppController = async (c) => {
await hydrateEvents({
events: [reblogEvent],
- storage: eventsDB,
+ store: await Storages.db(),
signal: signal,
});
- const status = await renderReblog(reblogEvent, { viewerPubkey: c.get('pubkey') });
+ const status = await renderReblog(reblogEvent, { viewerPubkey: await c.get('signer')?.getPublicKey() });
return c.json(status);
};
@@ -253,23 +272,28 @@ const reblogStatusController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/statuses/#unreblog */
const unreblogStatusController: AppController = async (c) => {
const eventId = c.req.param('id');
- const pubkey = c.get('pubkey') as string;
+ const pubkey = await c.get('signer')?.getPublicKey()!;
+ const store = await Storages.db();
- const event = await getEvent(eventId, {
- kind: 1,
- });
- if (!event) return c.json({ error: 'Event not found.' }, 404);
+ const [event] = await store.query([{ ids: [eventId], kinds: [1] }]);
+ if (!event) {
+ return c.json({ error: 'Record not found' }, 404);
+ }
- const filters: NostrFilter[] = [{ kinds: [6], authors: [pubkey], '#e': [event.id] }];
- const [repostedEvent] = await eventsDB.query(filters, { limit: 1 });
- if (!repostedEvent) return c.json({ error: 'Event not found.' }, 404);
+ const [repostedEvent] = await store.query(
+ [{ kinds: [6], authors: [pubkey], '#e': [event.id], limit: 1 }],
+ );
+
+ if (!repostedEvent) {
+ return c.json({ error: 'Record not found' }, 404);
+ }
await createEvent({
kind: 5,
tags: [['e', repostedEvent.id]],
}, c);
- return c.json(await renderStatus(event, {}));
+ return c.json(await renderStatus(event, { viewerPubkey: pubkey }));
};
const rebloggedByController: AppController = (c) => {
@@ -280,7 +304,7 @@ const rebloggedByController: AppController = (c) => {
/** https://docs.joinmastodon.org/methods/statuses/#bookmark */
const bookmarkController: AppController = async (c) => {
- const pubkey = c.get('pubkey')!;
+ const pubkey = await c.get('signer')?.getPublicKey()!;
const eventId = c.req.param('id');
const event = await getEvent(eventId, {
@@ -290,7 +314,7 @@ const bookmarkController: AppController = async (c) => {
if (event) {
await updateListEvent(
- { kinds: [10003], authors: [pubkey] },
+ { kinds: [10003], authors: [pubkey], limit: 1 },
(tags) => addTag(tags, ['e', eventId]),
c,
);
@@ -307,7 +331,7 @@ const bookmarkController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/statuses/#unbookmark */
const unbookmarkController: AppController = async (c) => {
- const pubkey = c.get('pubkey')!;
+ const pubkey = await c.get('signer')?.getPublicKey()!;
const eventId = c.req.param('id');
const event = await getEvent(eventId, {
@@ -317,7 +341,7 @@ const unbookmarkController: AppController = async (c) => {
if (event) {
await updateListEvent(
- { kinds: [10003], authors: [pubkey] },
+ { kinds: [10003], authors: [pubkey], limit: 1 },
(tags) => deleteTag(tags, ['e', eventId]),
c,
);
@@ -334,7 +358,7 @@ const unbookmarkController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/statuses/#pin */
const pinController: AppController = async (c) => {
- const pubkey = c.get('pubkey')!;
+ const pubkey = await c.get('signer')?.getPublicKey()!;
const eventId = c.req.param('id');
const event = await getEvent(eventId, {
@@ -344,7 +368,7 @@ const pinController: AppController = async (c) => {
if (event) {
await updateListEvent(
- { kinds: [10001], authors: [pubkey] },
+ { kinds: [10001], authors: [pubkey], limit: 1 },
(tags) => addTag(tags, ['e', eventId]),
c,
);
@@ -361,7 +385,7 @@ const pinController: AppController = async (c) => {
/** https://docs.joinmastodon.org/methods/statuses/#unpin */
const unpinController: AppController = async (c) => {
- const pubkey = c.get('pubkey')!;
+ const pubkey = await c.get('signer')?.getPublicKey()!;
const eventId = c.req.param('id');
const { signal } = c.req.raw;
@@ -373,7 +397,7 @@ const unpinController: AppController = async (c) => {
if (event) {
await updateListEvent(
- { kinds: [10001], authors: [pubkey] },
+ { kinds: [10001], authors: [pubkey], limit: 1 },
(tags) => deleteTag(tags, ['e', eventId]),
c,
);
@@ -405,7 +429,7 @@ const zapController: AppController = async (c) => {
const target = await getEvent(id, { kind: 1, relations: ['author', 'event_stats', 'author_stats'], signal });
const author = target?.author;
- const meta = jsonMetaContentSchema.parse(author?.content);
+ const meta = n.json().pipe(n.metadata()).catch({}).parse(author?.content);
const lnurl = getLnurl(meta);
if (target && lnurl) {
@@ -421,7 +445,7 @@ const zapController: AppController = async (c) => {
],
}, c);
- const status = await renderStatus(target, { viewerPubkey: c.get('pubkey') });
+ const status = await renderStatus(target, { viewerPubkey: await c.get('signer')?.getPublicKey() });
status.zapped = true;
return c.json(status);
diff --git a/src/controllers/api/streaming.ts b/src/controllers/api/streaming.ts
index 668218d..e3852d9 100644
--- a/src/controllers/api/streaming.ts
+++ b/src/controllers/api/streaming.ts
@@ -1,15 +1,15 @@
import { NostrFilter } from '@nostrify/nostrify';
+import Debug from '@soapbox/stickynotes/debug';
import { z } from 'zod';
import { type AppController } from '@/app.ts';
import { Conf } from '@/config.ts';
-import { Debug } from '@/deps.ts';
+import { MuteListPolicy } from '@/policies/MuteListPolicy.ts';
import { getFeedPubkeys } from '@/queries.ts';
+import { hydrateEvents } from '@/storages/hydrate.ts';
+import { Storages } from '@/storages.ts';
import { bech32ToPubkey } from '@/utils.ts';
import { renderReblog, renderStatus } from '@/views/mastodon/statuses.ts';
-import { hydrateEvents } from '@/storages/hydrate.ts';
-import { eventsDB } from '@/storages.ts';
-import { Storages } from '@/storages.ts';
const debug = Debug('ditto:streaming');
@@ -69,11 +69,24 @@ const streamingController: AppController = (c) => {
if (!filter) return;
try {
- for await (const msg of Storages.pubsub.req([filter], { signal: controller.signal })) {
+ const pubsub = await Storages.pubsub();
+ const optimizer = await Storages.optimizer();
+
+ for await (const msg of pubsub.req([filter], { signal: controller.signal })) {
if (msg[0] === 'EVENT') {
- const [event] = await hydrateEvents({
- events: [msg[2]],
- storage: eventsDB,
+ const event = msg[2];
+
+ if (pubkey) {
+ const policy = new MuteListPolicy(pubkey, await Storages.admin());
+ const [, , ok] = await policy.call(event);
+ if (!ok) {
+ continue;
+ }
+ }
+
+ await hydrateEvents({
+ events: [event],
+ store: optimizer,
signal: AbortSignal.timeout(1000),
});
diff --git a/src/controllers/api/suggestions.ts b/src/controllers/api/suggestions.ts
new file mode 100644
index 0000000..6377bd4
--- /dev/null
+++ b/src/controllers/api/suggestions.ts
@@ -0,0 +1,51 @@
+import { NStore } from '@nostrify/nostrify';
+
+import { AppController } from '@/app.ts';
+import { Conf } from '@/config.ts';
+import { getTagSet } from '@/tags.ts';
+import { hydrateEvents } from '@/storages/hydrate.ts';
+import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
+
+export const suggestionsV1Controller: AppController = async (c) => {
+ const store = c.get('store');
+ const signal = c.req.raw.signal;
+ const accounts = await renderSuggestedAccounts(store, signal);
+
+ return c.json(accounts);
+};
+
+export const suggestionsV2Controller: AppController = async (c) => {
+ const store = c.get('store');
+ const signal = c.req.raw.signal;
+ const accounts = await renderSuggestedAccounts(store, signal);
+
+ const suggestions = accounts.map((account) => ({
+ source: 'staff',
+ account,
+ }));
+
+ return c.json(suggestions);
+};
+
+async function renderSuggestedAccounts(store: NStore, signal?: AbortSignal) {
+ const [follows] = await store.query(
+ [{ kinds: [3], authors: [Conf.pubkey], limit: 1 }],
+ { signal },
+ );
+
+ // TODO: pagination
+ const pubkeys = [...getTagSet(follows?.tags ?? [], 'p')].slice(0, 20);
+
+ const profiles = await store.query(
+ [{ kinds: [0], authors: pubkeys, limit: pubkeys.length }],
+ { signal },
+ )
+ .then((events) => hydrateEvents({ events, store, signal }));
+
+ const accounts = await Promise.all(pubkeys.map((pubkey) => {
+ const profile = profiles.find((event) => event.pubkey === pubkey);
+ return profile ? renderAccount(profile) : accountFromPubkey(pubkey);
+ }));
+
+ return accounts.filter(Boolean);
+}
diff --git a/src/controllers/api/timelines.ts b/src/controllers/api/timelines.ts
index 191fce7..8ea66ba 100644
--- a/src/controllers/api/timelines.ts
+++ b/src/controllers/api/timelines.ts
@@ -1,4 +1,4 @@
-import { NostrFilter, NStore } from '@nostrify/nostrify';
+import { NostrFilter } from '@nostrify/nostrify';
import { z } from 'zod';
import { type AppContext, type AppController } from '@/app.ts';
@@ -11,7 +11,7 @@ import { renderReblog, renderStatus } from '@/views/mastodon/statuses.ts';
const homeTimelineController: AppController = async (c) => {
const params = paginationSchema.parse(c.req.query());
- const pubkey = c.get('pubkey')!;
+ const pubkey = await c.get('signer')?.getPublicKey()!;
const authors = await getFeedPubkeys(pubkey);
return renderStatuses(c, [{ authors, kinds: [1, 6], ...params }]);
};
@@ -37,7 +37,7 @@ const publicTimelineController: AppController = (c) => {
};
const hashtagTimelineController: AppController = (c) => {
- const hashtag = c.req.param('hashtag')!;
+ const hashtag = c.req.param('hashtag')!.toLowerCase();
const params = paginationSchema.parse(c.req.query());
return renderStatuses(c, [{ kinds: [1], '#t': [hashtag], ...params }]);
};
@@ -45,28 +45,24 @@ const hashtagTimelineController: AppController = (c) => {
/** Render statuses for timelines. */
async function renderStatuses(c: AppContext, filters: NostrFilter[]) {
const { signal } = c.req.raw;
- const store = c.get('store') as NStore;
+ const store = c.get('store');
const events = await store
.query(filters, { signal })
- .then((events) =>
- hydrateEvents({
- events,
- storage: store,
- signal,
- })
- );
+ .then((events) => hydrateEvents({ events, store, signal }));
if (!events.length) {
return c.json([]);
}
+ const viewerPubkey = await c.get('signer')?.getPublicKey();
+
const statuses = (await Promise.all(events.map((event) => {
if (event.kind === 6) {
- return renderReblog(event, { viewerPubkey: c.get('pubkey') });
+ return renderReblog(event, { viewerPubkey });
}
- return renderStatus(event, { viewerPubkey: c.get('pubkey') });
- }))).filter((boolean) => boolean);
+ return renderStatus(event, { viewerPubkey });
+ }))).filter(Boolean);
if (!statuses.length) {
return c.json([]);
diff --git a/src/controllers/nostr/relay-info.ts b/src/controllers/nostr/relay-info.ts
index 9d24644..bbce7d3 100644
--- a/src/controllers/nostr/relay-info.ts
+++ b/src/controllers/nostr/relay-info.ts
@@ -1,18 +1,17 @@
import { AppController } from '@/app.ts';
import { Conf } from '@/config.ts';
-import { jsonServerMetaSchema } from '@/schemas/nostr.ts';
-import { eventsDB } from '@/storages.ts';
+import { Storages } from '@/storages.ts';
+import { getInstanceMetadata } from '@/utils/instance.ts';
const relayInfoController: AppController = async (c) => {
- const { signal } = c.req.raw;
- const [event] = await eventsDB.query([{ kinds: [0], authors: [Conf.pubkey], limit: 1 }], { signal });
- const meta = jsonServerMetaSchema.parse(event?.content);
+ const store = await Storages.db();
+ const meta = await getInstanceMetadata(store, c.req.raw.signal);
return c.json({
- name: meta.name ?? 'Ditto',
- description: meta.about ?? 'Nostr and the Fediverse.',
+ name: meta.name,
+ description: meta.about,
pubkey: Conf.pubkey,
- contact: `mailto:${meta.email ?? `postmaster@${Conf.url.host}`}`,
+ contact: meta.email,
supported_nips: [1, 5, 9, 11, 16, 45, 50, 46, 98],
software: 'Ditto',
version: '0.0.0',
diff --git a/src/controllers/nostr/relay.ts b/src/controllers/nostr/relay.ts
index 3db72c3..259f5e9 100644
--- a/src/controllers/nostr/relay.ts
+++ b/src/controllers/nostr/relay.ts
@@ -1,15 +1,16 @@
-import { NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify';
-import { relayInfoController } from '@/controllers/nostr/relay-info.ts';
-import { eventsDB } from '@/storages.ts';
-import * as pipeline from '@/pipeline.ts';
import {
- type ClientCLOSE,
- type ClientCOUNT,
- type ClientEVENT,
- type ClientMsg,
- clientMsgSchema,
- type ClientREQ,
-} from '@/schemas/nostr.ts';
+ NostrClientCLOSE,
+ NostrClientCOUNT,
+ NostrClientEVENT,
+ NostrClientMsg,
+ NostrClientREQ,
+ NostrEvent,
+ NostrFilter,
+ NSchema as n,
+} from '@nostrify/nostrify';
+import { relayInfoController } from '@/controllers/nostr/relay-info.ts';
+import * as pipeline from '@/pipeline.ts';
+import { RelayError } from '@/RelayError.ts';
import { Storages } from '@/storages.ts';
import type { AppController } from '@/app.ts';
@@ -31,7 +32,7 @@ function connectStream(socket: WebSocket) {
const controllers = new Map();
socket.onmessage = (e) => {
- const result = n.json().pipe(clientMsgSchema).safeParse(e.data);
+ const result = n.json().pipe(n.clientMsg()).safeParse(e.data);
if (result.success) {
handleMsg(result.data);
} else {
@@ -46,7 +47,7 @@ function connectStream(socket: WebSocket) {
};
/** Handle client message. */
- function handleMsg(msg: ClientMsg) {
+ function handleMsg(msg: NostrClientMsg) {
switch (msg[0]) {
case 'REQ':
handleReq(msg);
@@ -64,21 +65,24 @@ function connectStream(socket: WebSocket) {
}
/** Handle REQ. Start a subscription. */
- async function handleReq([_, subId, ...rest]: ClientREQ): Promise {
+ async function handleReq([_, subId, ...rest]: NostrClientREQ): Promise {
const filters = prepareFilters(rest);
const controller = new AbortController();
controllers.get(subId)?.abort();
controllers.set(subId, controller);
- for (const event of await eventsDB.query(filters, { limit: FILTER_LIMIT })) {
+ const db = await Storages.db();
+ const pubsub = await Storages.pubsub();
+
+ for (const event of await db.query(filters, { limit: FILTER_LIMIT })) {
send(['EVENT', subId, event]);
}
send(['EOSE', subId]);
try {
- for await (const msg of Storages.pubsub.req(filters, { signal: controller.signal })) {
+ for await (const msg of pubsub.req(filters, { signal: controller.signal })) {
if (msg[0] === 'EVENT') {
send(['EVENT', subId, msg[2]]);
}
@@ -89,13 +93,13 @@ function connectStream(socket: WebSocket) {
}
/** Handle EVENT. Store the event. */
- async function handleEvent([_, event]: ClientEVENT): Promise {
+ async function handleEvent([_, event]: NostrClientEVENT): Promise {
try {
// This will store it (if eligible) and run other side-effects.
await pipeline.handleEvent(event, AbortSignal.timeout(1000));
send(['OK', event.id, true, '']);
} catch (e) {
- if (e instanceof pipeline.RelayError) {
+ if (e instanceof RelayError) {
send(['OK', event.id, false, e.message]);
} else {
send(['OK', event.id, false, 'error: something went wrong']);
@@ -105,7 +109,7 @@ function connectStream(socket: WebSocket) {
}
/** Handle CLOSE. Close the subscription. */
- function handleClose([_, subId]: ClientCLOSE): void {
+ function handleClose([_, subId]: NostrClientCLOSE): void {
const controller = controllers.get(subId);
if (controller) {
controller.abort();
@@ -114,8 +118,9 @@ function connectStream(socket: WebSocket) {
}
/** Handle COUNT. Return the number of events matching the filters. */
- async function handleCount([_, subId, ...rest]: ClientCOUNT): Promise {
- const { count } = await eventsDB.count(prepareFilters(rest));
+ async function handleCount([_, subId, ...rest]: NostrClientCOUNT): Promise {
+ const store = await Storages.db();
+ const { count } = await store.count(prepareFilters(rest));
send(['COUNT', subId, { count, approximate: false }]);
}
@@ -128,7 +133,7 @@ function connectStream(socket: WebSocket) {
}
/** Enforce the filters with certain criteria. */
-function prepareFilters(filters: ClientREQ[2][]): NostrFilter[] {
+function prepareFilters(filters: NostrClientREQ[2][]): NostrFilter[] {
return filters.map((filter) => {
const narrow = Boolean(filter.ids?.length || filter.authors?.length);
const search = narrow ? filter.search : `domain:${Conf.url.host} ${filter.search ?? ''}`;
diff --git a/src/controllers/well-known/nostr.ts b/src/controllers/well-known/nostr.ts
index f1ebb6b..0669888 100644
--- a/src/controllers/well-known/nostr.ts
+++ b/src/controllers/well-known/nostr.ts
@@ -12,7 +12,7 @@ const nameSchema = z.string().min(1).regex(/^\w+$/);
const nostrController: AppController = async (c) => {
const result = nameSchema.safeParse(c.req.query('name'));
const name = result.success ? result.data : undefined;
- const pointer = name ? await localNip05Lookup(name) : undefined;
+ const pointer = name ? await localNip05Lookup(c.get('store'), name) : undefined;
if (!name || !pointer) {
return c.json({ names: {}, relays: {} });
diff --git a/src/controllers/well-known/webfinger.ts b/src/controllers/well-known/webfinger.ts
index 9a24576..c1c8b81 100644
--- a/src/controllers/well-known/webfinger.ts
+++ b/src/controllers/well-known/webfinger.ts
@@ -1,7 +1,7 @@
+import { nip19 } from 'nostr-tools';
import { z } from 'zod';
import { Conf } from '@/config.ts';
-import { nip19 } from '@/deps.ts';
import { localNip05Lookup } from '@/utils/nip05.ts';
import type { AppContext, AppController } from '@/app.ts';
@@ -45,7 +45,7 @@ async function handleAcct(c: AppContext, resource: URL): Promise {
}
const [username, host] = result.data;
- const pointer = await localNip05Lookup(username);
+ const pointer = await localNip05Lookup(c.get('store'), username);
if (!pointer) {
return c.json({ error: 'Not found' }, 404);
diff --git a/src/cron.ts b/src/cron.ts
deleted file mode 100644
index bfaf773..0000000
--- a/src/cron.ts
+++ /dev/null
@@ -1,29 +0,0 @@
-import { deleteUnattachedMediaByUrl, getUnattachedMedia } from '@/db/unattached-media.ts';
-import { cron } from '@/deps.ts';
-import { Time } from '@/utils/time.ts';
-import { configUploader as uploader } from '@/uploaders/config.ts';
-import { cidFromUrl } from '@/utils/ipfs.ts';
-
-/** Delete files that aren't attached to any events. */
-async function cleanupMedia() {
- console.info('Deleting orphaned media files...');
-
- const until = new Date(Date.now() - Time.minutes(15));
- const media = await getUnattachedMedia(until);
-
- for (const { url } of media) {
- const cid = cidFromUrl(new URL(url))!;
- try {
- await uploader.delete(cid);
- await deleteUnattachedMediaByUrl(url);
- } catch (e) {
- console.error(`Failed to delete file ${url}`);
- console.error(e);
- }
- }
-
- console.info(`Removed ${media?.length ?? 0} orphaned media files.`);
-}
-
-await cleanupMedia();
-cron.every15Minute(cleanupMedia);
diff --git a/src/db.ts b/src/db.ts
deleted file mode 100644
index 7125f13..0000000
--- a/src/db.ts
+++ /dev/null
@@ -1,40 +0,0 @@
-import fs from 'node:fs/promises';
-import path from 'node:path';
-
-import { DittoDB } from '@/db/DittoDB.ts';
-import { FileMigrationProvider, Migrator } from '@/deps.ts';
-
-const db = await DittoDB.getInstance();
-
-const migrator = new Migrator({
- db,
- provider: new FileMigrationProvider({
- fs,
- path,
- migrationFolder: new URL(import.meta.resolve('./db/migrations')).pathname,
- }),
-});
-
-/** Migrate the database to the latest version. */
-async function migrate() {
- console.info('Running migrations...');
- const results = await migrator.migrateToLatest();
-
- if (results.error) {
- console.error(results.error);
- Deno.exit(1);
- } else {
- if (!results.results?.length) {
- console.info('Everything up-to-date.');
- } else {
- console.info('Migrations finished!');
- for (const { migrationName, status } of results.results!) {
- console.info(` - ${migrationName}: ${status}`);
- }
- }
- }
-}
-
-await migrate();
-
-export { db };
diff --git a/src/db/DittoDB.ts b/src/db/DittoDB.ts
index 8ebe5e6..68fdc62 100644
--- a/src/db/DittoDB.ts
+++ b/src/db/DittoDB.ts
@@ -1,21 +1,71 @@
+import fs from 'node:fs/promises';
+import path from 'node:path';
+
+import { FileMigrationProvider, Kysely, Migrator } from 'kysely';
+
import { Conf } from '@/config.ts';
import { DittoPostgres } from '@/db/adapters/DittoPostgres.ts';
import { DittoSQLite } from '@/db/adapters/DittoSQLite.ts';
import { DittoTables } from '@/db/DittoTables.ts';
-import { Kysely } from '@/deps.ts';
export class DittoDB {
+ private static kysely: Promise> | undefined;
+
static getInstance(): Promise> {
+ if (!this.kysely) {
+ this.kysely = this._getInstance();
+ }
+ return this.kysely;
+ }
+
+ static async _getInstance(): Promise> {
const { databaseUrl } = Conf;
+ let kysely: Kysely;
+
switch (databaseUrl.protocol) {
case 'sqlite:':
- return DittoSQLite.getInstance();
+ kysely = await DittoSQLite.getInstance();
+ break;
case 'postgres:':
case 'postgresql:':
- return DittoPostgres.getInstance();
+ kysely = await DittoPostgres.getInstance();
+ break;
default:
throw new Error('Unsupported database URL.');
}
+
+ await this.migrate(kysely);
+
+ return kysely;
+ }
+
+ /** Migrate the database to the latest version. */
+ static async migrate(kysely: Kysely) {
+ const migrator = new Migrator({
+ db: kysely,
+ provider: new FileMigrationProvider({
+ fs,
+ path,
+ migrationFolder: new URL(import.meta.resolve('../db/migrations')).pathname,
+ }),
+ });
+
+ console.info('Running migrations...');
+ const results = await migrator.migrateToLatest();
+
+ if (results.error) {
+ console.error(results.error);
+ Deno.exit(1);
+ } else {
+ if (!results.results?.length) {
+ console.info('Everything up-to-date.');
+ } else {
+ console.info('Migrations finished!');
+ for (const { migrationName, status } of results.results!) {
+ console.info(` - ${migrationName}: ${status}`);
+ }
+ }
+ }
}
}
diff --git a/src/db/DittoTables.ts b/src/db/DittoTables.ts
index 79fec5d..42d39ea 100644
--- a/src/db/DittoTables.ts
+++ b/src/db/DittoTables.ts
@@ -1,8 +1,7 @@
export interface DittoTables {
- events: EventRow;
- events_fts: EventFTSRow;
- tags: TagRow;
- relays: RelayRow;
+ nostr_events: EventRow;
+ nostr_tags: TagRow;
+ nostr_fts5: EventFTSRow;
unattached_media: UnattachedMediaRow;
author_stats: AuthorStatsRow;
event_stats: EventStatsRow;
@@ -31,24 +30,17 @@ interface EventRow {
created_at: number;
tags: string;
sig: string;
- deleted_at: number | null;
}
interface EventFTSRow {
- id: string;
+ event_id: string;
content: string;
}
interface TagRow {
- tag: string;
- value: string;
event_id: string;
-}
-
-interface RelayRow {
- url: string;
- domain: string;
- active: boolean;
+ name: string;
+ value: string;
}
interface UnattachedMediaRow {
diff --git a/src/db/KyselyLogger.ts b/src/db/KyselyLogger.ts
new file mode 100644
index 0000000..e39cbd0
--- /dev/null
+++ b/src/db/KyselyLogger.ts
@@ -0,0 +1,18 @@
+import { Stickynotes } from '@soapbox/stickynotes';
+import { Logger } from 'kysely';
+
+/** Log the SQL for queries. */
+export const KyselyLogger: Logger = (event) => {
+ if (event.level === 'query') {
+ const console = new Stickynotes('ditto:sql');
+
+ const { query, queryDurationMillis } = event;
+ const { sql, parameters } = query;
+
+ console.debug(
+ sql,
+ JSON.stringify(parameters),
+ `\x1b[90m(${(queryDurationMillis / 1000).toFixed(2)}s)\x1b[0m`,
+ );
+ }
+};
diff --git a/src/db/adapters/DittoPostgres.ts b/src/db/adapters/DittoPostgres.ts
index f8a5112..d0abbf9 100644
--- a/src/db/adapters/DittoPostgres.ts
+++ b/src/db/adapters/DittoPostgres.ts
@@ -1,7 +1,9 @@
import { Kysely, PostgresAdapter, PostgresIntrospector, PostgresQueryCompiler } from 'kysely';
import { PostgreSQLDriver } from 'kysely_deno_postgres';
+import { Conf } from '@/config.ts';
import { DittoTables } from '@/db/DittoTables.ts';
+import { KyselyLogger } from '@/db/KyselyLogger.ts';
export class DittoPostgres {
static db: Kysely | undefined;
@@ -16,9 +18,10 @@ export class DittoPostgres {
},
// @ts-ignore mismatched kysely versions probably
createDriver() {
- return new PostgreSQLDriver({
- connectionString: Deno.env.get('DATABASE_URL'),
- });
+ return new PostgreSQLDriver(
+ { connectionString: Deno.env.get('DATABASE_URL') },
+ Conf.pg.poolSize,
+ );
},
createIntrospector(db: Kysely) {
return new PostgresIntrospector(db);
@@ -27,6 +30,7 @@ export class DittoPostgres {
return new PostgresQueryCompiler();
},
},
+ log: KyselyLogger,
});
}
diff --git a/src/db/adapters/DittoSQLite.ts b/src/db/adapters/DittoSQLite.ts
index c91407a..fe225a2 100644
--- a/src/db/adapters/DittoSQLite.ts
+++ b/src/db/adapters/DittoSQLite.ts
@@ -1,6 +1,9 @@
+import { PolySqliteDialect } from '@soapbox/kysely-deno-sqlite';
+import { Kysely, sql } from 'kysely';
+
import { Conf } from '@/config.ts';
import { DittoTables } from '@/db/DittoTables.ts';
-import { Kysely, PolySqliteDialect, sql } from '@/deps.ts';
+import { KyselyLogger } from '@/db/KyselyLogger.ts';
import SqliteWorker from '@/workers/sqlite.ts';
export class DittoSQLite {
@@ -15,6 +18,7 @@ export class DittoSQLite {
dialect: new PolySqliteDialect({
database: sqliteWorker,
}),
+ log: KyselyLogger,
});
// Set PRAGMA values.
diff --git a/src/db/migrations/000_create_events.ts b/src/db/migrations/000_create_events.ts
index 158551b..f08a614 100644
--- a/src/db/migrations/000_create_events.ts
+++ b/src/db/migrations/000_create_events.ts
@@ -1,4 +1,4 @@
-import { Kysely } from '@/deps.ts';
+import { Kysely } from 'kysely';
export async function up(db: Kysely): Promise {
await db.schema
diff --git a/src/db/migrations/001_add_relays.ts b/src/db/migrations/001_add_relays.ts
index 1415f5f..11c6884 100644
--- a/src/db/migrations/001_add_relays.ts
+++ b/src/db/migrations/001_add_relays.ts
@@ -1,4 +1,4 @@
-import { Kysely } from '@/deps.ts';
+import { Kysely } from 'kysely';
export async function up(db: Kysely): Promise {
await db.schema
diff --git a/src/db/migrations/002_events_fts.ts b/src/db/migrations/002_events_fts.ts
index 9324195..ffaf5fb 100644
--- a/src/db/migrations/002_events_fts.ts
+++ b/src/db/migrations/002_events_fts.ts
@@ -1,5 +1,6 @@
+import { Kysely, sql } from 'kysely';
+
import { Conf } from '@/config.ts';
-import { Kysely, sql } from '@/deps.ts';
export async function up(db: Kysely): Promise {
if (Conf.databaseUrl.protocol === 'sqlite:') {
diff --git a/src/db/migrations/003_events_admin.ts b/src/db/migrations/003_events_admin.ts
index 8469fc2..388a3a4 100644
--- a/src/db/migrations/003_events_admin.ts
+++ b/src/db/migrations/003_events_admin.ts
@@ -1,4 +1,4 @@
-import { Kysely } from '@/deps.ts';
+import { Kysely } from 'kysely';
export async function up(_db: Kysely): Promise {
}
diff --git a/src/db/migrations/004_add_user_indexes.ts b/src/db/migrations/004_add_user_indexes.ts
index 929181c..fca9c5f 100644
--- a/src/db/migrations/004_add_user_indexes.ts
+++ b/src/db/migrations/004_add_user_indexes.ts
@@ -1,4 +1,4 @@
-import { Kysely } from '@/deps.ts';
+import { Kysely } from 'kysely';
export async function up(_db: Kysely): Promise {
}
diff --git a/src/db/migrations/005_rework_tags.ts b/src/db/migrations/005_rework_tags.ts
index f274670..1f95810 100644
--- a/src/db/migrations/005_rework_tags.ts
+++ b/src/db/migrations/005_rework_tags.ts
@@ -1,4 +1,4 @@
-import { Kysely, sql } from '@/deps.ts';
+import { Kysely, sql } from 'kysely';
export async function up(db: Kysely): Promise {
await db.schema
diff --git a/src/db/migrations/006_pragma.ts b/src/db/migrations/006_pragma.ts
index 2639e81..f20ee9b 100644
--- a/src/db/migrations/006_pragma.ts
+++ b/src/db/migrations/006_pragma.ts
@@ -1,4 +1,4 @@
-import { Kysely } from '@/deps.ts';
+import { Kysely } from 'kysely';
export async function up(_db: Kysely): Promise {
}
diff --git a/src/db/migrations/007_unattached_media.ts b/src/db/migrations/007_unattached_media.ts
index 1887111..a36c5d3 100644
--- a/src/db/migrations/007_unattached_media.ts
+++ b/src/db/migrations/007_unattached_media.ts
@@ -1,4 +1,4 @@
-import { Kysely } from '@/deps.ts';
+import { Kysely } from 'kysely';
export async function up(db: Kysely): Promise {
await db.schema
diff --git a/src/db/migrations/008_wal.ts b/src/db/migrations/008_wal.ts
index 2639e81..f20ee9b 100644
--- a/src/db/migrations/008_wal.ts
+++ b/src/db/migrations/008_wal.ts
@@ -1,4 +1,4 @@
-import { Kysely } from '@/deps.ts';
+import { Kysely } from 'kysely';
export async function up(_db: Kysely): Promise {
}
diff --git a/src/db/migrations/009_add_stats.ts b/src/db/migrations/009_add_stats.ts
index 60d9447..ef1c443 100644
--- a/src/db/migrations/009_add_stats.ts
+++ b/src/db/migrations/009_add_stats.ts
@@ -1,4 +1,4 @@
-import { Kysely } from '@/deps.ts';
+import { Kysely } from 'kysely';
export async function up(db: Kysely): Promise {
await db.schema
diff --git a/src/db/migrations/010_drop_users.ts b/src/db/migrations/010_drop_users.ts
index 6cd83c0..c36f2fa 100644
--- a/src/db/migrations/010_drop_users.ts
+++ b/src/db/migrations/010_drop_users.ts
@@ -1,4 +1,4 @@
-import { Kysely } from '@/deps.ts';
+import { Kysely } from 'kysely';
export async function up(db: Kysely): Promise {
await db.schema.dropTable('users').ifExists().execute();
diff --git a/src/db/migrations/011_kind_author_index.ts b/src/db/migrations/011_kind_author_index.ts
index da21988..c41910b 100644
--- a/src/db/migrations/011_kind_author_index.ts
+++ b/src/db/migrations/011_kind_author_index.ts
@@ -1,10 +1,10 @@
-import { Kysely } from '@/deps.ts';
+import { Kysely } from 'kysely';
export async function up(db: Kysely): Promise {
await db.schema
.createIndex('idx_events_kind_pubkey_created_at')
.on('events')
- .columns(['kind', 'pubkey', 'created_at'])
+ .columns(['kind', 'pubkey', 'created_at desc'])
.execute();
}
diff --git a/src/db/migrations/012_tags_composite_index.ts b/src/db/migrations/012_tags_composite_index.ts
index 8769289..412fa59 100644
--- a/src/db/migrations/012_tags_composite_index.ts
+++ b/src/db/migrations/012_tags_composite_index.ts
@@ -1,4 +1,4 @@
-import { Kysely } from '@/deps.ts';
+import { Kysely } from 'kysely';
export async function up(db: Kysely): Promise {
await db.schema.dropIndex('idx_tags_tag').execute();
diff --git a/src/db/migrations/013_soft_deletion.ts b/src/db/migrations/013_soft_deletion.ts
index 3856ca0..df19da5 100644
--- a/src/db/migrations/013_soft_deletion.ts
+++ b/src/db/migrations/013_soft_deletion.ts
@@ -1,4 +1,4 @@
-import { Kysely } from '@/deps.ts';
+import { Kysely } from 'kysely';
export async function up(db: Kysely): Promise {
await db.schema.alterTable('events').addColumn('deleted_at', 'integer').execute();
diff --git a/src/db/migrations/014_stats_indexes.ts.ts b/src/db/migrations/014_stats_indexes.ts.ts
index d9071c6..0f27a7f 100644
--- a/src/db/migrations/014_stats_indexes.ts.ts
+++ b/src/db/migrations/014_stats_indexes.ts.ts
@@ -1,4 +1,4 @@
-import { Kysely } from '@/deps.ts';
+import { Kysely } from 'kysely';
export async function up(db: Kysely): Promise {
await db.schema.createIndex('idx_author_stats_pubkey').on('author_stats').column('pubkey').execute();
diff --git a/src/db/migrations/015_add_pubkey_domains.ts b/src/db/migrations/015_add_pubkey_domains.ts
index 0b5fe29..4b7e23c 100644
--- a/src/db/migrations/015_add_pubkey_domains.ts
+++ b/src/db/migrations/015_add_pubkey_domains.ts
@@ -1,4 +1,4 @@
-import { Kysely } from '@/deps.ts';
+import { Kysely } from 'kysely';
export async function up(db: Kysely): Promise {
await db.schema
diff --git a/src/db/migrations/016_pubkey_domains_updated_at.ts b/src/db/migrations/016_pubkey_domains_updated_at.ts
index 3a000c1..8b1f75d 100644
--- a/src/db/migrations/016_pubkey_domains_updated_at.ts
+++ b/src/db/migrations/016_pubkey_domains_updated_at.ts
@@ -1,4 +1,4 @@
-import { Kysely } from '@/deps.ts';
+import { Kysely } from 'kysely';
export async function up(db: Kysely): Promise {
await db.schema
diff --git a/src/db/migrations/017_rm_relays.ts b/src/db/migrations/017_rm_relays.ts
new file mode 100644
index 0000000..70a274d
--- /dev/null
+++ b/src/db/migrations/017_rm_relays.ts
@@ -0,0 +1,14 @@
+import { Kysely } from 'kysely';
+
+export async function up(db: Kysely): Promise {
+ await db.schema.dropTable('relays').execute();
+}
+
+export async function down(db: Kysely): Promise {
+ await db.schema
+ .createTable('relays')
+ .addColumn('url', 'text', (col) => col.primaryKey())
+ .addColumn('domain', 'text', (col) => col.notNull())
+ .addColumn('active', 'boolean', (col) => col.notNull())
+ .execute();
+}
diff --git a/src/db/migrations/018_events_created_at_kind_index.ts b/src/db/migrations/018_events_created_at_kind_index.ts
new file mode 100644
index 0000000..8e6c67c
--- /dev/null
+++ b/src/db/migrations/018_events_created_at_kind_index.ts
@@ -0,0 +1,14 @@
+import { Kysely } from 'kysely';
+
+export async function up(db: Kysely): Promise {
+ await db.schema
+ .createIndex('idx_events_created_at_kind')
+ .on('events')
+ .columns(['created_at desc', 'kind'])
+ .ifNotExists()
+ .execute();
+}
+
+export async function down(db: Kysely): Promise {
+ await db.schema.dropIndex('idx_events_created_at_kind').ifExists().execute();
+}
diff --git a/src/db/migrations/019_ndatabase_schema.ts b/src/db/migrations/019_ndatabase_schema.ts
new file mode 100644
index 0000000..94378f0
--- /dev/null
+++ b/src/db/migrations/019_ndatabase_schema.ts
@@ -0,0 +1,25 @@
+import { Kysely, sql } from 'kysely';
+
+import { Conf } from '@/config.ts';
+
+export async function up(db: Kysely): Promise {
+ await db.schema.alterTable('events').renameTo('nostr_events').execute();
+ await db.schema.alterTable('tags').renameTo('nostr_tags').execute();
+ await db.schema.alterTable('nostr_tags').renameColumn('tag', 'name').execute();
+
+ if (Conf.databaseUrl.protocol === 'sqlite:') {
+ await db.schema.dropTable('events_fts').execute();
+ await sql`CREATE VIRTUAL TABLE nostr_fts5 USING fts5(event_id, content)`.execute(db);
+ }
+}
+
+export async function down(db: Kysely): Promise {
+ await db.schema.alterTable('nostr_events').renameTo('events').execute();
+ await db.schema.alterTable('nostr_tags').renameTo('tags').execute();
+ await db.schema.alterTable('tags').renameColumn('name', 'tag').execute();
+
+ if (Conf.databaseUrl.protocol === 'sqlite:') {
+ await db.schema.dropTable('nostr_fts5').execute();
+ await sql`CREATE VIRTUAL TABLE events_fts USING fts5(id, content)`.execute(db);
+ }
+}
diff --git a/src/db/migrations/020_drop_deleted_at.ts b/src/db/migrations/020_drop_deleted_at.ts
new file mode 100644
index 0000000..4894b9f
--- /dev/null
+++ b/src/db/migrations/020_drop_deleted_at.ts
@@ -0,0 +1,10 @@
+import { Kysely } from 'kysely';
+
+export async function up(db: Kysely): Promise {
+ await db.deleteFrom('nostr_events').where('deleted_at', 'is not', null).execute();
+ await db.schema.alterTable('nostr_events').dropColumn('deleted_at').execute();
+}
+
+export async function down(db: Kysely): Promise {
+ await db.schema.alterTable('nostr_events').addColumn('deleted_at', 'integer').execute();
+}
diff --git a/src/db/migrations/020_pgfts.ts b/src/db/migrations/020_pgfts.ts
new file mode 100644
index 0000000..8b3cfa0
--- /dev/null
+++ b/src/db/migrations/020_pgfts.ts
@@ -0,0 +1,19 @@
+import { Kysely, sql } from 'kysely';
+
+import { Conf } from '@/config.ts';
+
+export async function up(db: Kysely): Promise {
+ if (['postgres:', 'postgresql:'].includes(Conf.databaseUrl.protocol!)) {
+ await db.schema.createTable('nostr_pgfts')
+ .ifNotExists()
+ .addColumn('event_id', 'text', (c) => c.primaryKey().references('nostr_events.id').onDelete('cascade'))
+ .addColumn('search_vec', sql`tsvector`, (c) => c.notNull())
+ .execute();
+ }
+}
+
+export async function down(db: Kysely): Promise {
+ if (['postgres:', 'postgresql:'].includes(Conf.databaseUrl.protocol!)) {
+ await db.schema.dropTable('nostr_pgfts').ifExists().execute();
+ }
+}
diff --git a/src/db/relays.ts b/src/db/relays.ts
deleted file mode 100644
index 836f520..0000000
--- a/src/db/relays.ts
+++ /dev/null
@@ -1,36 +0,0 @@
-import { tldts } from '@/deps.ts';
-import { db } from '@/db.ts';
-
-interface AddRelaysOpts {
- active?: boolean;
-}
-
-/** Inserts relays into the database, skipping duplicates. */
-function addRelays(relays: `wss://${string}`[], opts: AddRelaysOpts = {}) {
- if (!relays.length) return Promise.resolve();
- const { active = false } = opts;
-
- const values = relays.map((url) => ({
- url: new URL(url).toString(),
- domain: tldts.getDomain(url)!,
- active,
- }));
-
- return db.insertInto('relays')
- .values(values)
- .onConflict((oc) => oc.column('url').doNothing())
- .execute();
-}
-
-/** Get a list of all known active relay URLs. */
-async function getActiveRelays(): Promise {
- const rows = await db
- .selectFrom('relays')
- .select('relays.url')
- .where('relays.active', '=', true)
- .execute();
-
- return rows.map((row) => row.url);
-}
-
-export { addRelays, getActiveRelays };
diff --git a/src/db/unattached-media.ts b/src/db/unattached-media.ts
index 3761947..0e0aeea 100644
--- a/src/db/unattached-media.ts
+++ b/src/db/unattached-media.ts
@@ -1,33 +1,30 @@
-import { db } from '@/db.ts';
-import { uuid62 } from '@/deps.ts';
-import { type MediaData } from '@/schemas/nostr.ts';
+import { Kysely } from 'kysely';
+
+import { DittoDB } from '@/db/DittoDB.ts';
+import { DittoTables } from '@/db/DittoTables.ts';
interface UnattachedMedia {
id: string;
pubkey: string;
url: string;
- data: MediaData;
+ /** NIP-94 tags. */
+ data: string[][];
uploaded_at: number;
}
/** Add unattached media into the database. */
-async function insertUnattachedMedia(media: Omit) {
- const result = {
- id: uuid62.v4(),
- uploaded_at: Date.now(),
- ...media,
- };
-
- await db.insertInto('unattached_media')
- .values({ ...result, data: JSON.stringify(media.data) })
+async function insertUnattachedMedia(media: UnattachedMedia) {
+ const kysely = await DittoDB.getInstance();
+ await kysely.insertInto('unattached_media')
+ .values({ ...media, data: JSON.stringify(media.data) })
.execute();
- return result;
+ return media;
}
/** Select query for unattached media. */
-function selectUnattachedMediaQuery() {
- return db.selectFrom('unattached_media')
+function selectUnattachedMediaQuery(kysely: Kysely) {
+ return kysely.selectFrom('unattached_media')
.select([
'unattached_media.id',
'unattached_media.pubkey',
@@ -38,30 +35,40 @@ function selectUnattachedMediaQuery() {
}
/** Find attachments that exist but aren't attached to any events. */
-function getUnattachedMedia(until: Date) {
- return selectUnattachedMediaQuery()
- .leftJoin('tags', 'unattached_media.url', 'tags.value')
+function getUnattachedMedia(kysely: Kysely, until: Date) {
+ return selectUnattachedMediaQuery(kysely)
+ .leftJoin('nostr_tags', 'unattached_media.url', 'nostr_tags.value')
.where('uploaded_at', '<', until.getTime())
.execute();
}
/** Delete unattached media by URL. */
-function deleteUnattachedMediaByUrl(url: string) {
- return db.deleteFrom('unattached_media')
+async function deleteUnattachedMediaByUrl(url: string) {
+ const kysely = await DittoDB.getInstance();
+ return kysely.deleteFrom('unattached_media')
.where('url', '=', url)
.execute();
}
/** Get unattached media by IDs. */
-function getUnattachedMediaByIds(ids: string[]) {
- return selectUnattachedMediaQuery()
+async function getUnattachedMediaByIds(kysely: Kysely, ids: string[]): Promise {
+ if (!ids.length) return [];
+
+ const results = await selectUnattachedMediaQuery(kysely)
.where('id', 'in', ids)
.execute();
+
+ return results.map((row) => ({
+ ...row,
+ data: JSON.parse(row.data),
+ }));
}
/** Delete rows as an event with media is being created. */
-function deleteAttachedMedia(pubkey: string, urls: string[]) {
- return db.deleteFrom('unattached_media')
+async function deleteAttachedMedia(pubkey: string, urls: string[]): Promise {
+ if (!urls.length) return;
+ const kysely = await DittoDB.getInstance();
+ await kysely.deleteFrom('unattached_media')
.where('pubkey', '=', pubkey)
.where('url', 'in', urls)
.execute();
diff --git a/src/db/users.ts b/src/db/users.ts
index 61c7341..bf0cab7 100644
--- a/src/db/users.ts
+++ b/src/db/users.ts
@@ -1,9 +1,10 @@
import { NostrFilter } from '@nostrify/nostrify';
+import Debug from '@soapbox/stickynotes/debug';
+
import { Conf } from '@/config.ts';
-import { Debug } from '@/deps.ts';
import * as pipeline from '@/pipeline.ts';
import { AdminSigner } from '@/signers/AdminSigner.ts';
-import { eventsDB } from '@/storages.ts';
+import { Storages } from '@/storages.ts';
const debug = Debug('ditto:users');
@@ -59,7 +60,8 @@ async function findUser(user: Partial, signal?: AbortSignal): Promise isReplaceableKind(kind))
- ? filter.authors.length * filter.kinds.length
- : Infinity,
- );
-}
-
/** Returns true if the filter could potentially return any stored events at all. */
function canFilter(filter: NostrFilter): boolean {
return getFilterLimit(filter) > 0;
diff --git a/src/firehose.ts b/src/firehose.ts
index 98cb4db..2c776fe 100644
--- a/src/firehose.ts
+++ b/src/firehose.ts
@@ -1,28 +1,28 @@
-import { NostrEvent } from '@nostrify/nostrify';
-import { Debug } from '@/deps.ts';
-import { activeRelays, pool } from '@/pool.ts';
+import { Stickynotes } from '@soapbox/stickynotes';
+
+import { Storages } from '@/storages.ts';
import { nostrNow } from '@/utils.ts';
import * as pipeline from './pipeline.ts';
-const debug = Debug('ditto:firehose');
+const console = new Stickynotes('ditto:firehose');
-// This file watches events on all known relays and performs
-// side-effects based on them, such as trending hashtag tracking
-// and storing events for notifications and the home feed.
-pool.subscribe(
- [{ kinds: [0, 1, 3, 5, 6, 7, 9735, 10002], limit: 0, since: nostrNow() }],
- activeRelays,
- handleEvent,
- undefined,
- undefined,
-);
+/**
+ * This function watches events on all known relays and performs
+ * side-effects based on them, such as trending hashtag tracking
+ * and storing events for notifications and the home feed.
+ */
+export async function startFirehose() {
+ const store = await Storages.client();
-/** Handle events through the firehose pipeline. */
-function handleEvent(event: NostrEvent): Promise {
- debug(`NostrEvent<${event.kind}> ${event.id}`);
+ for await (const msg of store.req([{ kinds: [0, 1, 3, 5, 6, 7, 9735, 10002], limit: 0, since: nostrNow() }])) {
+ if (msg[0] === 'EVENT') {
+ const event = msg[2];
+ console.debug(`NostrEvent<${event.kind}> ${event.id}`);
- return pipeline
- .handleEvent(event, AbortSignal.timeout(5000))
- .catch(() => {});
+ pipeline
+ .handleEvent(event, AbortSignal.timeout(5000))
+ .catch(() => {});
+ }
+ }
}
diff --git a/src/interfaces/DittoEvent.ts b/src/interfaces/DittoEvent.ts
index 08879f8..41847fb 100644
--- a/src/interfaces/DittoEvent.ts
+++ b/src/interfaces/DittoEvent.ts
@@ -23,5 +23,15 @@ export interface DittoEvent extends NostrEvent {
d_author?: DittoEvent;
user?: DittoEvent;
repost?: DittoEvent;
- quote_repost?: DittoEvent;
+ quote?: DittoEvent;
+ reacted?: DittoEvent;
+ /** The profile being reported.
+ * Must be a kind 0 hydrated.
+ * https://github.com/nostr-protocol/nips/blob/master/56.md
+ */
+ reported_profile?: DittoEvent;
+ /** The notes being reported.
+ * https://github.com/nostr-protocol/nips/blob/master/56.md
+ */
+ reported_notes?: DittoEvent[];
}
diff --git a/src/kinds.ts b/src/kinds.ts
deleted file mode 100644
index 7953837..0000000
--- a/src/kinds.ts
+++ /dev/null
@@ -1,46 +0,0 @@
-/** Events are **regular**, which means they're all expected to be stored by relays. */
-function isRegularKind(kind: number) {
- return (1000 <= kind && kind < 10000) || [1, 2, 4, 5, 6, 7, 8, 16, 40, 41, 42, 43, 44].includes(kind);
-}
-
-/** Events are **replaceable**, which means that, for each combination of `pubkey` and `kind`, only the latest event is expected to (SHOULD) be stored by relays, older versions are expected to be discarded. */
-function isReplaceableKind(kind: number) {
- return (10000 <= kind && kind < 20000) || [0, 3].includes(kind);
-}
-
-/** Events are **ephemeral**, which means they are not expected to be stored by relays. */
-function isEphemeralKind(kind: number) {
- return 20000 <= kind && kind < 30000;
-}
-
-/** Events are **parameterized replaceable**, which means that, for each combination of `pubkey`, `kind` and the `d` tag, only the latest event is expected to be stored by relays, older versions are expected to be discarded. */
-function isParameterizedReplaceableKind(kind: number) {
- return 30000 <= kind && kind < 40000;
-}
-
-/** These events are only valid if published by the server keypair. */
-function isDittoInternalKind(kind: number) {
- return kind === 30361;
-}
-
-/** Classification of the event kind. */
-type KindClassification = 'regular' | 'replaceable' | 'ephemeral' | 'parameterized' | 'unknown';
-
-/** Determine the classification of this kind of event if known, or `unknown`. */
-function classifyKind(kind: number): KindClassification {
- if (isRegularKind(kind)) return 'regular';
- if (isReplaceableKind(kind)) return 'replaceable';
- if (isEphemeralKind(kind)) return 'ephemeral';
- if (isParameterizedReplaceableKind(kind)) return 'parameterized';
- return 'unknown';
-}
-
-export {
- classifyKind,
- isDittoInternalKind,
- isEphemeralKind,
- isParameterizedReplaceableKind,
- isRegularKind,
- isReplaceableKind,
- type KindClassification,
-};
diff --git a/src/middleware/auth19.ts b/src/middleware/auth19.ts
deleted file mode 100644
index d81c257..0000000
--- a/src/middleware/auth19.ts
+++ /dev/null
@@ -1,48 +0,0 @@
-import { HTTPException } from 'hono';
-import { type AppMiddleware } from '@/app.ts';
-import { getPublicKey, nip19 } from '@/deps.ts';
-
-/** We only accept "Bearer" type. */
-const BEARER_REGEX = new RegExp(`^Bearer (${nip19.BECH32_REGEX.source})$`);
-
-/** NIP-19 auth middleware. */
-const auth19: AppMiddleware = async (c, next) => {
- const authHeader = c.req.header('authorization');
- const match = authHeader?.match(BEARER_REGEX);
-
- if (match) {
- const [_, bech32] = match;
-
- try {
- const decoded = nip19.decode(bech32!);
-
- switch (decoded.type) {
- case 'npub':
- c.set('pubkey', decoded.data);
- break;
- case 'nprofile':
- c.set('pubkey', decoded.data.pubkey);
- break;
- case 'nsec':
- c.set('pubkey', getPublicKey(decoded.data));
- c.set('seckey', decoded.data);
- break;
- }
- } catch (_e) {
- //
- }
- }
-
- await next();
-};
-
-/** Throw a 401 if the pubkey isn't set. */
-const requirePubkey: AppMiddleware = async (c, next) => {
- if (!c.get('pubkey')) {
- throw new HTTPException(401, { message: 'No pubkey provided' });
- }
-
- await next();
-};
-
-export { auth19, requirePubkey };
diff --git a/src/middleware/auth98.ts b/src/middleware/auth98Middleware.ts
similarity index 80%
rename from src/middleware/auth98.ts
rename to src/middleware/auth98Middleware.ts
index db025ae..abecea7 100644
--- a/src/middleware/auth98.ts
+++ b/src/middleware/auth98Middleware.ts
@@ -1,27 +1,28 @@
import { NostrEvent } from '@nostrify/nostrify';
import { HTTPException } from 'hono';
+
import { type AppContext, type AppMiddleware } from '@/app.ts';
+import { findUser, User } from '@/db/users.ts';
+import { ConnectSigner } from '@/signers/ConnectSigner.ts';
+import { localRequest } from '@/utils/api.ts';
import {
buildAuthEventTemplate,
parseAuthRequest,
type ParseAuthRequestOpts,
validateAuthEvent,
} from '@/utils/nip98.ts';
-import { localRequest } from '@/utils/api.ts';
-import { APISigner } from '@/signers/APISigner.ts';
-import { findUser, User } from '@/db/users.ts';
/**
* NIP-98 auth.
* https://github.com/nostr-protocol/nips/blob/master/98.md
*/
-function auth98(opts: ParseAuthRequestOpts = {}): AppMiddleware {
+function auth98Middleware(opts: ParseAuthRequestOpts = {}): AppMiddleware {
return async (c, next) => {
const req = localRequest(c);
const result = await parseAuthRequest(req, opts);
if (result.success) {
- c.set('pubkey', result.data.pubkey);
+ c.set('signer', new ConnectSigner(result.data.pubkey));
c.set('proof', result.data);
}
@@ -33,9 +34,8 @@ type UserRole = 'user' | 'admin';
/** Require the user to prove their role before invoking the controller. */
function requireRole(role: UserRole, opts?: ParseAuthRequestOpts): AppMiddleware {
- return withProof(async (c, proof, next) => {
+ return withProof(async (_c, proof, next) => {
const user = await findUser({ pubkey: proof.pubkey });
- c.set('user', user);
if (user && matchesRole(user, role)) {
await next();
@@ -70,7 +70,7 @@ function withProof(
opts?: ParseAuthRequestOpts,
): AppMiddleware {
return async (c, next) => {
- const pubkey = c.get('pubkey');
+ const pubkey = await c.get('signer')?.getPublicKey();
const proof = c.get('proof') || await obtainProof(c, opts);
// Prevent people from accidentally using the wrong account. This has no other security implications.
@@ -79,7 +79,7 @@ function withProof(
}
if (proof) {
- c.set('pubkey', proof.pubkey);
+ c.set('signer', new ConnectSigner(proof.pubkey));
c.set('proof', proof);
await handler(c, proof, next);
} else {
@@ -90,9 +90,16 @@ function withProof(
/** Get the proof over Nostr Connect. */
async function obtainProof(c: AppContext, opts?: ParseAuthRequestOpts) {
+ const signer = c.get('signer');
+ if (!signer) {
+ throw new HTTPException(401, {
+ res: c.json({ error: 'No way to sign Nostr event' }, 401),
+ });
+ }
+
const req = localRequest(c);
const reqEvent = await buildAuthEventTemplate(req, opts);
- const resEvent = await new APISigner(c).signEvent(reqEvent);
+ const resEvent = await signer.signEvent(reqEvent);
const result = await validateAuthEvent(req, resEvent, opts);
if (result.success) {
@@ -100,4 +107,4 @@ async function obtainProof(c: AppContext, opts?: ParseAuthRequestOpts) {
}
}
-export { auth98, requireProof, requireRole };
+export { auth98Middleware, requireProof, requireRole };
diff --git a/src/middleware/cache.ts b/src/middleware/cacheMiddleware.ts
similarity index 89%
rename from src/middleware/cache.ts
rename to src/middleware/cacheMiddleware.ts
index fe28c5f..baa4976 100644
--- a/src/middleware/cache.ts
+++ b/src/middleware/cacheMiddleware.ts
@@ -1,10 +1,11 @@
+import Debug from '@soapbox/stickynotes/debug';
import { type MiddlewareHandler } from 'hono';
-import { Debug } from '@/deps.ts';
+
import ExpiringCache from '@/utils/expiring-cache.ts';
const debug = Debug('ditto:middleware:cache');
-export const cache = (options: {
+export const cacheMiddleware = (options: {
cacheName: string;
expires?: number;
}): MiddlewareHandler => {
diff --git a/src/middleware/csp.ts b/src/middleware/cspMiddleware.ts
similarity index 93%
rename from src/middleware/csp.ts
rename to src/middleware/cspMiddleware.ts
index fdce5c7..00c4ecc 100644
--- a/src/middleware/csp.ts
+++ b/src/middleware/cspMiddleware.ts
@@ -1,7 +1,7 @@
import { AppMiddleware } from '@/app.ts';
import { Conf } from '@/config.ts';
-const csp = (): AppMiddleware => {
+export const cspMiddleware = (): AppMiddleware => {
return async (c, next) => {
const { host, protocol, origin } = Conf.url;
const wsProtocol = protocol === 'http:' ? 'ws:' : 'wss:';
@@ -26,5 +26,3 @@ const csp = (): AppMiddleware => {
await next();
};
};
-
-export { csp };
diff --git a/src/middleware/requireSigner.ts b/src/middleware/requireSigner.ts
new file mode 100644
index 0000000..6e337c2
--- /dev/null
+++ b/src/middleware/requireSigner.ts
@@ -0,0 +1,12 @@
+import { HTTPException } from 'hono';
+
+import { AppMiddleware } from '@/app.ts';
+
+/** Throw a 401 if a signer isn't set. */
+export const requireSigner: AppMiddleware = async (c, next) => {
+ if (!c.get('signer')) {
+ throw new HTTPException(401, { message: 'No pubkey provided' });
+ }
+
+ await next();
+};
diff --git a/src/middleware/signerMiddleware.ts b/src/middleware/signerMiddleware.ts
new file mode 100644
index 0000000..1d35708
--- /dev/null
+++ b/src/middleware/signerMiddleware.ts
@@ -0,0 +1,41 @@
+import { NSecSigner } from '@nostrify/nostrify';
+import { Stickynotes } from '@soapbox/stickynotes';
+import { nip19 } from 'nostr-tools';
+
+import { AppMiddleware } from '@/app.ts';
+import { ConnectSigner } from '@/signers/ConnectSigner.ts';
+
+const console = new Stickynotes('ditto:signerMiddleware');
+
+/** We only accept "Bearer" type. */
+const BEARER_REGEX = new RegExp(`^Bearer (${nip19.BECH32_REGEX.source})$`);
+
+/** Make a `signer` object available to all controllers, or unset if the user isn't logged in. */
+export const signerMiddleware: AppMiddleware = async (c, next) => {
+ const header = c.req.header('authorization');
+ const match = header?.match(BEARER_REGEX);
+
+ if (match) {
+ const [_, bech32] = match;
+
+ try {
+ const decoded = nip19.decode(bech32!);
+
+ switch (decoded.type) {
+ case 'npub':
+ c.set('signer', new ConnectSigner(decoded.data));
+ break;
+ case 'nprofile':
+ c.set('signer', new ConnectSigner(decoded.data.pubkey, decoded.data.relays));
+ break;
+ case 'nsec':
+ c.set('signer', new NSecSigner(decoded.data));
+ break;
+ }
+ } catch {
+ console.debug('The user is not logged in');
+ }
+ }
+
+ await next();
+};
diff --git a/src/middleware/store.ts b/src/middleware/store.ts
deleted file mode 100644
index 8bb595c..0000000
--- a/src/middleware/store.ts
+++ /dev/null
@@ -1,18 +0,0 @@
-import { AppMiddleware } from '@/app.ts';
-import { UserStore } from '@/storages/UserStore.ts';
-import { eventsDB } from '@/storages.ts';
-
-/** Store middleware. */
-const storeMiddleware: AppMiddleware = async (c, next) => {
- const pubkey = c.get('pubkey') as string;
-
- if (pubkey) {
- const store = new UserStore(pubkey, eventsDB);
- c.set('store', store);
- } else {
- c.set('store', eventsDB);
- }
- await next();
-};
-
-export { storeMiddleware };
diff --git a/src/middleware/storeMiddleware.ts b/src/middleware/storeMiddleware.ts
new file mode 100644
index 0000000..4e24ab0
--- /dev/null
+++ b/src/middleware/storeMiddleware.ts
@@ -0,0 +1,16 @@
+import { AppMiddleware } from '@/app.ts';
+import { UserStore } from '@/storages/UserStore.ts';
+import { Storages } from '@/storages.ts';
+
+/** Store middleware. */
+export const storeMiddleware: AppMiddleware = async (c, next) => {
+ const pubkey = await c.get('signer')?.getPublicKey();
+
+ if (pubkey) {
+ const store = new UserStore(pubkey, await Storages.admin());
+ c.set('store', store);
+ } else {
+ c.set('store', await Storages.admin());
+ }
+ await next();
+};
diff --git a/src/middleware/uploaderMiddleware.ts b/src/middleware/uploaderMiddleware.ts
new file mode 100644
index 0000000..38e8ace
--- /dev/null
+++ b/src/middleware/uploaderMiddleware.ts
@@ -0,0 +1,35 @@
+import { BlossomUploader, NostrBuildUploader } from '@nostrify/nostrify/uploaders';
+
+import { AppMiddleware } from '@/app.ts';
+import { Conf } from '@/config.ts';
+import { DenoUploader } from '@/uploaders/DenoUploader.ts';
+import { IPFSUploader } from '@/uploaders/IPFSUploader.ts';
+import { S3Uploader } from '@/uploaders/S3Uploader.ts';
+import { fetchWorker } from '@/workers/fetch.ts';
+
+/** Set an uploader for the user. */
+export const uploaderMiddleware: AppMiddleware = async (c, next) => {
+ const signer = c.get('signer');
+
+ switch (Conf.uploader) {
+ case 's3':
+ c.set('uploader', new S3Uploader(Conf.s3));
+ break;
+ case 'ipfs':
+ c.set('uploader', new IPFSUploader({ baseUrl: Conf.mediaDomain, apiUrl: Conf.ipfs.apiUrl, fetch: fetchWorker }));
+ break;
+ case 'local':
+ c.set('uploader', new DenoUploader({ baseUrl: Conf.mediaDomain, dir: Conf.uploadsDir }));
+ break;
+ case 'nostrbuild':
+ c.set('uploader', new NostrBuildUploader({ endpoint: Conf.nostrbuildEndpoint, signer, fetch: fetchWorker }));
+ break;
+ case 'blossom':
+ if (signer) {
+ c.set('uploader', new BlossomUploader({ servers: Conf.blossomServers, signer, fetch: fetchWorker }));
+ }
+ break;
+ }
+
+ await next();
+};
diff --git a/src/nostr-wasm.ts b/src/nostr-wasm.ts
new file mode 100644
index 0000000..4413590
--- /dev/null
+++ b/src/nostr-wasm.ts
@@ -0,0 +1,4 @@
+import { setNostrWasm } from 'nostr-tools/wasm';
+import { initNostrWasm } from 'nostr-wasm';
+
+await initNostrWasm().then(setNostrWasm);
diff --git a/src/pipeline.ts b/src/pipeline.ts
index 1b19a78..15d495e 100644
--- a/src/pipeline.ts
+++ b/src/pipeline.ts
@@ -1,37 +1,32 @@
-import { NostrEvent, NSchema as n } from '@nostrify/nostrify';
+import { NKinds, NostrEvent, NPolicy, NSchema as n } from '@nostrify/nostrify';
import { LNURL } from '@nostrify/nostrify/ln';
+import { PipePolicy } from '@nostrify/nostrify/policies';
+import Debug from '@soapbox/stickynotes/debug';
+import { sql } from 'kysely';
+
import { Conf } from '@/config.ts';
-import { db } from '@/db.ts';
-import { addRelays } from '@/db/relays.ts';
+import { DittoDB } from '@/db/DittoDB.ts';
import { deleteAttachedMedia } from '@/db/unattached-media.ts';
-import { Debug, sql } from '@/deps.ts';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
-import { isEphemeralKind } from '@/kinds.ts';
import { DVM } from '@/pipeline/DVM.ts';
+import { RelayError } from '@/RelayError.ts';
import { updateStats } from '@/stats.ts';
import { hydrateEvents, purifyEvent } from '@/storages/hydrate.ts';
-import { cache, eventsDB, reqmeister, Storages } from '@/storages.ts';
+import { Storages } from '@/storages.ts';
import { getTagSet } from '@/tags.ts';
-import { eventAge, isRelay, nostrDate, nostrNow, parseNip05, Time } from '@/utils.ts';
+import { eventAge, nostrDate, nostrNow, parseNip05, Time } from '@/utils.ts';
import { fetchWorker } from '@/workers/fetch.ts';
+import { policyWorker } from '@/workers/policy.ts';
import { TrendsWorker } from '@/workers/trends.ts';
import { verifyEventWorker } from '@/workers/verify.ts';
import { AdminSigner } from '@/signers/AdminSigner.ts';
import { lnurlCache } from '@/utils/lnurl.ts';
import { nip05Cache } from '@/utils/nip05.ts';
+import { MuteListPolicy } from '@/policies/MuteListPolicy.ts';
+
const debug = Debug('ditto:pipeline');
-let UserPolicy: any;
-
-try {
- UserPolicy = (await import('../data/policy.ts')).default;
- debug('policy loaded from data/policy.ts');
-} catch (_e) {
- // do nothing
- debug('policy not found');
-}
-
/**
* Common pipeline function to process (and maybe store) events.
* It is idempotent, so it can be called multiple times for the same event.
@@ -40,45 +35,80 @@ async function handleEvent(event: DittoEvent, signal: AbortSignal): Promise ${event.id}`);
- await hydrateEvent(event, signal);
- if (UserPolicy) {
- const result = await new UserPolicy().call(event, signal);
- debug(JSON.stringify(result));
- const [_, _eventId, ok, reason] = result;
- if (!ok) {
- const [prefix, ...rest] = reason.split(': ');
- throw new RelayError(prefix, rest.join(': '));
- }
+ if (event.kind !== 24133) {
+ await policyFilter(event);
}
+ await hydrateEvent(event, signal);
+
await Promise.all([
storeEvent(event, signal),
parseMetadata(event, signal),
- processDeletions(event, signal),
DVM.event(event),
- trackRelays(event),
trackHashtags(event),
- fetchRelatedEvents(event, signal),
+ fetchRelatedEvents(event),
processMedia(event),
payZap(event, signal),
streamOut(event),
]);
}
+async function policyFilter(event: NostrEvent): Promise {
+ const debug = Debug('ditto:policy');
+
+ const policies: NPolicy[] = [
+ new MuteListPolicy(Conf.pubkey, await Storages.admin()),
+ ];
+
+ try {
+ await policyWorker.import(Conf.policy);
+ policies.push(policyWorker);
+ debug(`Using custom policy: ${Conf.policy}`);
+ } catch (e) {
+ if (e.message.includes('Module not found')) {
+ debug('Custom policy not found ');
+ } else {
+ console.error(`DITTO_POLICY (error importing policy): ${Conf.policy}`, e);
+ throw new RelayError('blocked', 'policy could not be loaded');
+ }
+ }
+
+ const policy = new PipePolicy(policies.reverse());
+
+ try {
+ const result = await policy.call(event);
+ debug(JSON.stringify(result));
+ RelayError.assert(result);
+ } catch (e) {
+ if (e instanceof RelayError) {
+ throw e;
+ } else {
+ console.error('POLICY ERROR:', e);
+ throw new RelayError('blocked', 'policy error');
+ }
+ }
+}
+
/** Encounter the event, and return whether it has already been encountered. */
async function encounterEvent(event: NostrEvent, signal: AbortSignal): Promise {
+ const cache = await Storages.cache();
+ const reqmeister = await Storages.reqmeister();
+
const [existing] = await cache.query([{ ids: [event.id], limit: 1 }]);
+
cache.event(event);
reqmeister.event(event, { signal });
+
return !!existing;
}
/** Hydrate the event with the user, if applicable. */
async function hydrateEvent(event: DittoEvent, signal: AbortSignal): Promise {
- await hydrateEvents({ events: [event], storage: eventsDB, signal });
+ await hydrateEvents({ events: [event], store: await Storages.db(), signal });
- const domain = await db
+ const kysely = await DittoDB.getInstance();
+ const domain = await kysely
.selectFrom('pubkey_domains')
.select('domain')
.where('pubkey', '=', event.pubkey)
@@ -89,21 +119,11 @@ async function hydrateEvent(event: DittoEvent, signal: AbortSignal): Promise {
- if (isEphemeralKind(event.kind)) return;
+ if (NKinds.ephemeral(event.kind)) return;
+ const store = await Storages.db();
- const [deletion] = await eventsDB.query(
- [{ kinds: [5], authors: [Conf.pubkey, event.pubkey], '#e': [event.id], limit: 1 }],
- { signal },
- );
-
- if (deletion) {
- return Promise.reject(new RelayError('blocked', 'event was deleted'));
- } else {
- await Promise.all([
- eventsDB.event(event, { signal }).catch(debug),
- updateStats(event).catch(debug),
- ]);
- }
+ await updateStats(event).catch(debug);
+ await store.event(event, { signal });
}
/** Parse kind 0 metadata and track indexes in the database. */
@@ -111,7 +131,7 @@ async function parseMetadata(event: NostrEvent, signal: AbortSignal): Promise pubkey_domains.last_updated_at
- `.execute(db);
+ `.execute(kysely);
} catch (_e) {
// do nothing
}
}
-/** Query to-be-deleted events, ensure their pubkey matches, then delete them from the database. */
-async function processDeletions(event: NostrEvent, signal: AbortSignal): Promise {
- if (event.kind === 5) {
- const ids = getTagSet(event.tags, 'e');
-
- if (event.pubkey === Conf.pubkey) {
- await eventsDB.remove([{ ids: [...ids] }], { signal });
- } else {
- const events = await eventsDB.query(
- [{ ids: [...ids], authors: [event.pubkey] }],
- { signal },
- );
-
- const deleteIds = events.map(({ id }) => id);
- await eventsDB.remove([{ ids: deleteIds }], { signal });
- }
- }
-}
-
/** Track whenever a hashtag is used, for processing trending tags. */
async function trackHashtags(event: NostrEvent): Promise {
const date = nostrDate(event.created_at);
@@ -181,33 +183,26 @@ async function trackHashtags(event: NostrEvent): Promise {
}
}
-/** Tracks known relays in the database. */
-function trackRelays(event: NostrEvent) {
- const relays = new Set<`wss://${string}`>();
-
- event.tags.forEach((tag) => {
- if (['p', 'e', 'a'].includes(tag[0]) && isRelay(tag[2])) {
- relays.add(tag[2]);
- }
- if (event.kind === 10002 && tag[0] === 'r' && isRelay(tag[1])) {
- relays.add(tag[1]);
- }
- });
-
- return addRelays([...relays]);
-}
-
/** Queue related events to fetch. */
-async function fetchRelatedEvents(event: DittoEvent, signal: AbortSignal) {
- if (!event.user) {
- reqmeister.req({ kinds: [0], authors: [event.pubkey] }, { signal }).catch(() => {});
+async function fetchRelatedEvents(event: DittoEvent) {
+ const cache = await Storages.cache();
+ const reqmeister = await Storages.reqmeister();
+
+ if (!event.author) {
+ const signal = AbortSignal.timeout(3000);
+ reqmeister.query([{ kinds: [0], authors: [event.pubkey] }], { signal })
+ .then((events) => Promise.allSettled(events.map((event) => handleEvent(event, signal))))
+ .catch(() => {});
}
- for (const [name, id, relay] of event.tags) {
+ for (const [name, id] of event.tags) {
if (name === 'e') {
const { count } = await cache.count([{ ids: [id] }]);
if (!count) {
- reqmeister.req({ ids: [id] }, { relays: [relay] }).catch(() => {});
+ const signal = AbortSignal.timeout(3000);
+ reqmeister.query([{ ids: [id] }], { signal })
+ .then((events) => Promise.allSettled(events.map((event) => handleEvent(event, signal))))
+ .catch(() => {});
}
}
}
@@ -276,15 +271,9 @@ function isFresh(event: NostrEvent): boolean {
/** Distribute the event through active subscriptions. */
async function streamOut(event: NostrEvent): Promise {
if (isFresh(event)) {
- await Storages.pubsub.event(event);
+ const pubsub = await Storages.pubsub();
+ await pubsub.event(event);
}
}
-/** NIP-20 command line result. */
-class RelayError extends Error {
- constructor(prefix: 'duplicate' | 'pow' | 'blocked' | 'rate-limited' | 'invalid' | 'error', message: string) {
- super(`${prefix}: ${message}`);
- }
-}
-
-export { handleEvent, RelayError };
+export { handleEvent };
diff --git a/src/pipeline/DVM.ts b/src/pipeline/DVM.ts
index 96e3c40..a811067 100644
--- a/src/pipeline/DVM.ts
+++ b/src/pipeline/DVM.ts
@@ -3,7 +3,7 @@ import { NIP05, NostrEvent } from '@nostrify/nostrify';
import { Conf } from '@/config.ts';
import * as pipeline from '@/pipeline.ts';
import { AdminSigner } from '@/signers/AdminSigner.ts';
-import { eventsDB } from '@/storages.ts';
+import { Storages } from '@/storages.ts';
export class DVM {
static async event(event: NostrEvent): Promise {
@@ -34,7 +34,9 @@ export class DVM {
return DVM.feedback(event, 'error', `Forbidden user: ${user}`);
}
- const [label] = await eventsDB.query([{
+ const store = await Storages.db();
+
+ const [label] = await store.query([{
kinds: [1985],
authors: [admin],
'#L': ['nip05'],
diff --git a/src/policies/MuteListPolicy.test.ts b/src/policies/MuteListPolicy.test.ts
new file mode 100644
index 0000000..89d7d99
--- /dev/null
+++ b/src/policies/MuteListPolicy.test.ts
@@ -0,0 +1,72 @@
+import { MockRelay } from '@nostrify/nostrify/test';
+
+import { assertEquals } from '@std/assert';
+import { UserStore } from '@/storages/UserStore.ts';
+import { MuteListPolicy } from '@/policies/MuteListPolicy.ts';
+
+import userBlack from '~/fixtures/events/kind-0-black.json' with { type: 'json' };
+import userMe from '~/fixtures/events/event-0-makes-repost-with-quote-repost.json' with { type: 'json' };
+import blockEvent from '~/fixtures/events/kind-10000-black-blocks-user-me.json' with { type: 'json' };
+import event1authorUserMe from '~/fixtures/events/event-1-quote-repost-will-be-reposted.json' with { type: 'json' };
+import event1 from '~/fixtures/events/event-1.json' with { type: 'json' };
+
+Deno.test('block event: muted user cannot post', async () => {
+ const userBlackCopy = structuredClone(userBlack);
+ const userMeCopy = structuredClone(userMe);
+ const blockEventCopy = structuredClone(blockEvent);
+ const event1authorUserMeCopy = structuredClone(event1authorUserMe);
+
+ const db = new MockRelay();
+
+ const store = new UserStore(userBlackCopy.pubkey, db);
+ const policy = new MuteListPolicy(userBlack.pubkey, db);
+
+ await store.event(blockEventCopy);
+ await store.event(userBlackCopy);
+ await store.event(userMeCopy);
+
+ const ok = await policy.call(event1authorUserMeCopy);
+
+ assertEquals(ok, ['OK', event1authorUserMeCopy.id, false, 'blocked: Your account has been deactivated.']);
+});
+
+Deno.test('allow event: user is NOT muted because there is no muted event', async () => {
+ const userBlackCopy = structuredClone(userBlack);
+ const userMeCopy = structuredClone(userMe);
+ const event1authorUserMeCopy = structuredClone(event1authorUserMe);
+
+ const db = new MockRelay();
+
+ const store = new UserStore(userBlackCopy.pubkey, db);
+ const policy = new MuteListPolicy(userBlack.pubkey, db);
+
+ await store.event(userBlackCopy);
+ await store.event(userMeCopy);
+
+ const ok = await policy.call(event1authorUserMeCopy);
+
+ assertEquals(ok, ['OK', event1authorUserMeCopy.id, true, '']);
+});
+
+Deno.test('allow event: user is NOT muted because he is not in mute event', async () => {
+ const userBlackCopy = structuredClone(userBlack);
+ const userMeCopy = structuredClone(userMe);
+ const event1authorUserMeCopy = structuredClone(event1authorUserMe);
+ const blockEventCopy = structuredClone(blockEvent);
+ const event1copy = structuredClone(event1);
+
+ const db = new MockRelay();
+
+ const store = new UserStore(userBlackCopy.pubkey, db);
+ const policy = new MuteListPolicy(userBlack.pubkey, db);
+
+ await store.event(userBlackCopy);
+ await store.event(blockEventCopy);
+ await store.event(userMeCopy);
+ await store.event(event1copy);
+ await store.event(event1authorUserMeCopy);
+
+ const ok = await policy.call(event1copy);
+
+ assertEquals(ok, ['OK', event1.id, true, '']);
+});
diff --git a/src/policies/MuteListPolicy.ts b/src/policies/MuteListPolicy.ts
new file mode 100644
index 0000000..cae08eb
--- /dev/null
+++ b/src/policies/MuteListPolicy.ts
@@ -0,0 +1,18 @@
+import { NostrEvent, NostrRelayOK, NPolicy, NStore } from '@nostrify/nostrify';
+
+import { getTagSet } from '@/tags.ts';
+
+export class MuteListPolicy implements NPolicy {
+ constructor(private pubkey: string, private store: NStore) {}
+
+ async call(event: NostrEvent): Promise {
+ const [muteList] = await this.store.query([{ authors: [this.pubkey], kinds: [10000], limit: 1 }]);
+ const pubkeys = getTagSet(muteList?.tags ?? [], 'p');
+
+ if (pubkeys.has(event.pubkey)) {
+ return ['OK', event.id, false, 'blocked: Your account has been deactivated.'];
+ }
+
+ return ['OK', event.id, true, ''];
+ }
+}
diff --git a/src/pool.ts b/src/pool.ts
deleted file mode 100644
index 06c251e..0000000
--- a/src/pool.ts
+++ /dev/null
@@ -1,21 +0,0 @@
-import { getActiveRelays } from '@/db/relays.ts';
-import { RelayPoolWorker } from '@/deps.ts';
-
-const activeRelays = await getActiveRelays();
-
-console.log(`pool: connecting to ${activeRelays.length} relays.`);
-
-const worker = new Worker('https://unpkg.com/nostr-relaypool2@0.6.34/lib/nostr-relaypool.worker.js', {
- type: 'module',
-});
-
-// @ts-ignore Wrong types.
-const pool = new RelayPoolWorker(worker, activeRelays, {
- autoReconnect: true,
- // The pipeline verifies events.
- skipVerification: true,
- // The logging feature overwhelms the CPU and creates too many logs.
- logErrorsAndNotices: false,
-});
-
-export { activeRelays, pool };
diff --git a/src/queries.ts b/src/queries.ts
index cf61b84..76fabfd 100644
--- a/src/queries.ts
+++ b/src/queries.ts
@@ -1,7 +1,8 @@
import { NostrEvent, NostrFilter } from '@nostrify/nostrify';
+import Debug from '@soapbox/stickynotes/debug';
+
import { Conf } from '@/config.ts';
-import { eventsDB, optimizer } from '@/storages.ts';
-import { Debug } from '@/deps.ts';
+import { Storages } from '@/storages.ts';
import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
import { type DittoRelation } from '@/interfaces/DittoFilter.ts';
import { findReplyTag, getTagSet } from '@/tags.ts';
@@ -24,6 +25,7 @@ const getEvent = async (
opts: GetEventOpts = {},
): Promise => {
debug(`getEvent: ${id}`);
+ const store = await Storages.optimizer();
const { kind, signal = AbortSignal.timeout(1000) } = opts;
const filter: NostrFilter = { ids: [id], limit: 1 };
@@ -31,23 +33,25 @@ const getEvent = async (
filter.kinds = [kind];
}
- return await optimizer.query([filter], { limit: 1, signal })
- .then((events) => hydrateEvents({ events, storage: optimizer, signal }))
+ return await store.query([filter], { limit: 1, signal })
+ .then((events) => hydrateEvents({ events, store, signal }))
.then(([event]) => event);
};
/** Get a Nostr `set_medatadata` event for a user's pubkey. */
const getAuthor = async (pubkey: string, opts: GetEventOpts = {}): Promise => {
+ const store = await Storages.optimizer();
const { signal = AbortSignal.timeout(1000) } = opts;
- return await optimizer.query([{ authors: [pubkey], kinds: [0], limit: 1 }], { limit: 1, signal })
- .then((events) => hydrateEvents({ events, storage: optimizer, signal }))
+ return await store.query([{ authors: [pubkey], kinds: [0], limit: 1 }], { limit: 1, signal })
+ .then((events) => hydrateEvents({ events, store, signal }))
.then(([event]) => event);
};
/** Get users the given pubkey follows. */
const getFollows = async (pubkey: string, signal?: AbortSignal): Promise => {
- const [event] = await eventsDB.query([{ authors: [pubkey], kinds: [3], limit: 1 }], { limit: 1, signal });
+ const store = await Storages.db();
+ const [event] = await store.query([{ authors: [pubkey], kinds: [3], limit: 1 }], { limit: 1, signal });
return event;
};
@@ -82,16 +86,19 @@ async function getAncestors(event: NostrEvent, result: NostrEvent[] = []): Promi
return result.reverse();
}
-function getDescendants(eventId: string, signal = AbortSignal.timeout(2000)): Promise {
- return eventsDB.query([{ kinds: [1], '#e': [eventId] }], { limit: 200, signal })
- .then((events) => hydrateEvents({ events, storage: eventsDB, signal }));
+async function getDescendants(eventId: string, signal = AbortSignal.timeout(2000)): Promise {
+ const store = await Storages.db();
+ const events = await store.query([{ kinds: [1], '#e': [eventId] }], { limit: 200, signal });
+ return hydrateEvents({ events, store, signal });
}
/** Returns whether the pubkey is followed by a local user. */
async function isLocallyFollowed(pubkey: string): Promise {
const { host } = Conf.url;
- const [event] = await eventsDB.query(
+ const store = await Storages.db();
+
+ const [event] = await store.query(
[{ kinds: [3], '#p': [pubkey], search: `domain:${host}`, limit: 1 }],
{ limit: 1 },
);
diff --git a/src/schema.ts b/src/schema.ts
index 74dc7af..d152a0d 100644
--- a/src/schema.ts
+++ b/src/schema.ts
@@ -11,16 +11,6 @@ function filteredArray(schema: T) {
));
}
-/** Parses a JSON string into its native type. */
-const jsonSchema = z.string().transform((value, ctx) => {
- try {
- return JSON.parse(value) as unknown;
- } catch (_e) {
- ctx.addIssue({ code: z.ZodIssueCode.custom, message: 'Invalid JSON' });
- return z.NEVER;
- }
-});
-
/** https://developer.mozilla.org/en-US/docs/Glossary/Base64#the_unicode_problem */
const decode64Schema = z.string().transform((value, ctx) => {
try {
@@ -48,4 +38,4 @@ const booleanParamSchema = z.enum(['true', 'false']).transform((value) => value
/** Schema for `File` objects. */
const fileSchema = z.custom((value) => value instanceof File);
-export { booleanParamSchema, decode64Schema, fileSchema, filteredArray, hashtagSchema, jsonSchema, safeUrlSchema };
+export { booleanParamSchema, decode64Schema, fileSchema, filteredArray, hashtagSchema, safeUrlSchema };
diff --git a/src/schemas/nostr.ts b/src/schemas/nostr.ts
index 0497093..d8aa29a 100644
--- a/src/schemas/nostr.ts
+++ b/src/schemas/nostr.ts
@@ -1,151 +1,35 @@
+import { NSchema as n } from '@nostrify/nostrify';
+import { getEventHash, verifyEvent } from 'nostr-tools';
import { z } from 'zod';
-import { getEventHash, verifyEvent } from '@/deps.ts';
-import { jsonSchema, safeUrlSchema } from '@/schema.ts';
-
-/** Schema to validate Nostr hex IDs such as event IDs and pubkeys. */
-const nostrIdSchema = z.string().regex(/^[0-9a-f]{64}$/);
-/** Nostr kinds are positive integers. */
-const kindSchema = z.number().int().nonnegative();
-
-/** Nostr event schema. */
-const eventSchema = z.object({
- id: nostrIdSchema,
- kind: kindSchema,
- tags: z.array(z.array(z.string())),
- content: z.string(),
- created_at: z.number(),
- pubkey: nostrIdSchema,
- sig: z.string(),
-});
+import { safeUrlSchema } from '@/schema.ts';
/** Nostr event schema that also verifies the event's signature. */
-const signedEventSchema = eventSchema
+const signedEventSchema = n.event()
.refine((event) => event.id === getEventHash(event), 'Event ID does not match hash')
.refine(verifyEvent, 'Event signature is invalid');
-/** Nostr relay filter schema. */
-const filterSchema = z.object({
- kinds: kindSchema.array().optional(),
- ids: nostrIdSchema.array().optional(),
- authors: nostrIdSchema.array().optional(),
- since: z.number().int().nonnegative().optional(),
- until: z.number().int().nonnegative().optional(),
- limit: z.number().int().nonnegative().optional(),
- search: z.string().optional(),
-}).passthrough().and(
- z.record(
- z.custom<`#${string}`>((val) => typeof val === 'string' && val.startsWith('#')),
- z.string().array(),
- ).catch({}),
-);
-
-const clientReqSchema = z.tuple([z.literal('REQ'), z.string().min(1)]).rest(filterSchema);
-const clientEventSchema = z.tuple([z.literal('EVENT'), signedEventSchema]);
-const clientCloseSchema = z.tuple([z.literal('CLOSE'), z.string().min(1)]);
-const clientCountSchema = z.tuple([z.literal('COUNT'), z.string().min(1)]).rest(filterSchema);
-
-/** Client message to a Nostr relay. */
-const clientMsgSchema = z.union([
- clientReqSchema,
- clientEventSchema,
- clientCloseSchema,
- clientCountSchema,
-]);
-
-/** REQ message from client to relay. */
-type ClientREQ = z.infer;
-/** EVENT message from client to relay. */
-type ClientEVENT = z.infer;
-/** CLOSE message from client to relay. */
-type ClientCLOSE = z.infer;
-/** COUNT message from client to relay. */
-type ClientCOUNT = z.infer;
-/** Client message to a Nostr relay. */
-type ClientMsg = z.infer;
-
-/** Kind 0 content schema. */
-const metaContentSchema = z.object({
- name: z.string().optional().catch(undefined),
- about: z.string().optional().catch(undefined),
- picture: z.string().optional().catch(undefined),
- banner: z.string().optional().catch(undefined),
- nip05: z.string().optional().catch(undefined),
- lud06: z.string().optional().catch(undefined),
- lud16: z.string().optional().catch(undefined),
-}).partial().passthrough();
-
-/** Media data schema from `"media"` tags. */
-const mediaDataSchema = z.object({
- blurhash: z.string().optional().catch(undefined),
- cid: z.string().optional().catch(undefined),
- description: z.string().max(200).optional().catch(undefined),
- height: z.number().int().positive().optional().catch(undefined),
- mime: z.string().optional().catch(undefined),
- name: z.string().optional().catch(undefined),
- size: z.number().int().positive().optional().catch(undefined),
- width: z.number().int().positive().optional().catch(undefined),
-});
-
/** Kind 0 content schema for the Ditto server admin user. */
-const serverMetaSchema = metaContentSchema.extend({
+const serverMetaSchema = n.metadata().and(z.object({
tagline: z.string().optional().catch(undefined),
email: z.string().optional().catch(undefined),
-});
-
-/** Media data from `"media"` tags. */
-type MediaData = z.infer;
-
-/** Parses kind 0 content from a JSON string. */
-const jsonMetaContentSchema = jsonSchema.pipe(metaContentSchema).catch({});
-
-/** Parses media data from a JSON string. */
-const jsonMediaDataSchema = jsonSchema.pipe(mediaDataSchema).catch({});
-
-/** Parses server admin meta from a JSON string. */
-const jsonServerMetaSchema = jsonSchema.pipe(serverMetaSchema).catch({});
+}));
/** NIP-11 Relay Information Document. */
const relayInfoDocSchema = z.object({
name: z.string().transform((val) => val.slice(0, 30)).optional().catch(undefined),
description: z.string().transform((val) => val.slice(0, 3000)).optional().catch(undefined),
- pubkey: nostrIdSchema.optional().catch(undefined),
+ pubkey: n.id().optional().catch(undefined),
contact: safeUrlSchema.optional().catch(undefined),
supported_nips: z.number().int().nonnegative().array().optional().catch(undefined),
software: safeUrlSchema.optional().catch(undefined),
icon: safeUrlSchema.optional().catch(undefined),
});
-/** NIP-46 signer response. */
-const connectResponseSchema = z.object({
- id: z.string(),
- result: signedEventSchema,
-});
-
/** Parses a Nostr emoji tag. */
const emojiTagSchema = z.tuple([z.literal('emoji'), z.string(), z.string().url()]);
/** NIP-30 custom emoji tag. */
type EmojiTag = z.infer;
-export {
- type ClientCLOSE,
- type ClientCOUNT,
- type ClientEVENT,
- type ClientMsg,
- clientMsgSchema,
- type ClientREQ,
- connectResponseSchema,
- type EmojiTag,
- emojiTagSchema,
- filterSchema,
- jsonMediaDataSchema,
- jsonMetaContentSchema,
- jsonServerMetaSchema,
- type MediaData,
- mediaDataSchema,
- metaContentSchema,
- nostrIdSchema,
- relayInfoDocSchema,
- signedEventSchema,
-};
+export { type EmojiTag, emojiTagSchema, relayInfoDocSchema, serverMetaSchema, signedEventSchema };
diff --git a/src/sentry.ts b/src/sentry.ts
index eefe9c5..84b662e 100644
--- a/src/sentry.ts
+++ b/src/sentry.ts
@@ -1,5 +1,6 @@
-import { Conf } from './config.ts';
-import { Sentry } from './deps.ts';
+import * as Sentry from '@sentry/deno';
+
+import { Conf } from '@/config.ts';
// Sentry
if (Conf.sentryDsn) {
diff --git a/src/server.ts b/src/server.ts
index 68af681..4825e99 100644
--- a/src/server.ts
+++ b/src/server.ts
@@ -1,5 +1,6 @@
import '@/precheck.ts';
import '@/sentry.ts';
+import '@/nostr-wasm.ts';
import app from '@/app.ts';
import { Conf } from '@/config.ts';
diff --git a/src/signers/APISigner.ts b/src/signers/APISigner.ts
deleted file mode 100644
index e9914b1..0000000
--- a/src/signers/APISigner.ts
+++ /dev/null
@@ -1,65 +0,0 @@
-// deno-lint-ignore-file require-await
-
-import { NConnectSigner, NostrEvent, NostrSigner, NSecSigner } from '@nostrify/nostrify';
-import { HTTPException } from 'hono';
-import { type AppContext } from '@/app.ts';
-import { AdminSigner } from '@/signers/AdminSigner.ts';
-import { Storages } from '@/storages.ts';
-
-/**
- * Sign Nostr event using the app context.
- *
- * - If a secret key is provided, it will be used to sign the event.
- * - Otherwise, it will use NIP-46 to sign the event.
- */
-export class APISigner implements NostrSigner {
- private signer: NostrSigner;
-
- constructor(c: AppContext) {
- const seckey = c.get('seckey');
- const pubkey = c.get('pubkey');
-
- if (!pubkey) {
- throw new HTTPException(401, { message: 'Missing pubkey' });
- }
-
- if (seckey) {
- this.signer = new NSecSigner(seckey);
- } else {
- this.signer = new NConnectSigner({
- pubkey,
- relay: Storages.pubsub,
- signer: new AdminSigner(),
- timeout: 60000,
- });
- }
- }
-
- async getPublicKey(): Promise {
- return this.signer.getPublicKey();
- }
-
- async signEvent(event: Omit): Promise {
- return this.signer.signEvent(event);
- }
-
- readonly nip04 = {
- encrypt: async (pubkey: string, plaintext: string): Promise => {
- return this.signer.nip04!.encrypt(pubkey, plaintext);
- },
-
- decrypt: async (pubkey: string, ciphertext: string): Promise => {
- return this.signer.nip04!.decrypt(pubkey, ciphertext);
- },
- };
-
- readonly nip44 = {
- encrypt: async (pubkey: string, plaintext: string): Promise => {
- return this.signer.nip44!.encrypt(pubkey, plaintext);
- },
-
- decrypt: async (pubkey: string, ciphertext: string): Promise => {
- return this.signer.nip44!.decrypt(pubkey, ciphertext);
- },
- };
-}
diff --git a/src/signers/ConnectSigner.ts b/src/signers/ConnectSigner.ts
new file mode 100644
index 0000000..f482413
--- /dev/null
+++ b/src/signers/ConnectSigner.ts
@@ -0,0 +1,70 @@
+// deno-lint-ignore-file require-await
+import { NConnectSigner, NostrEvent, NostrSigner } from '@nostrify/nostrify';
+
+import { AdminSigner } from '@/signers/AdminSigner.ts';
+import { Storages } from '@/storages.ts';
+
+/**
+ * NIP-46 signer.
+ *
+ * Simple extension of nostrify's `NConnectSigner`, with our options to keep it DRY.
+ */
+export class ConnectSigner implements NostrSigner {
+ private signer: Promise;
+
+ constructor(private pubkey: string, private relays?: string[]) {
+ this.signer = this.init();
+ }
+
+ async init(): Promise {
+ return new NConnectSigner({
+ pubkey: this.pubkey,
+ // TODO: use a remote relay for `nprofile` signing (if present and `Conf.relay` isn't already in the list)
+ relay: await Storages.pubsub(),
+ signer: new AdminSigner(),
+ timeout: 60000,
+ });
+ }
+
+ async signEvent(event: Omit): Promise {
+ const signer = await this.signer;
+ return signer.signEvent(event);
+ }
+
+ readonly nip04 = {
+ encrypt: async (pubkey: string, plaintext: string): Promise => {
+ const signer = await this.signer;
+ return signer.nip04.encrypt(pubkey, plaintext);
+ },
+
+ decrypt: async (pubkey: string, ciphertext: string): Promise => {
+ const signer = await this.signer;
+ return signer.nip04.decrypt(pubkey, ciphertext);
+ },
+ };
+
+ readonly nip44 = {
+ encrypt: async (pubkey: string, plaintext: string): Promise => {
+ const signer = await this.signer;
+ return signer.nip44.encrypt(pubkey, plaintext);
+ },
+
+ decrypt: async (pubkey: string, ciphertext: string): Promise => {
+ const signer = await this.signer;
+ return signer.nip44.decrypt(pubkey, ciphertext);
+ },
+ };
+
+ // Prevent unnecessary NIP-46 round-trips.
+ async getPublicKey(): Promise {
+ return this.pubkey;
+ }
+
+ /** Get the user's relays if they passed in an `nprofile` auth token. */
+ async getRelays(): Promise> {
+ return this.relays?.reduce>((acc, relay) => {
+ acc[relay] = { read: true, write: true };
+ return acc;
+ }, {}) ?? {};
+ }
+}
diff --git a/src/stats.ts b/src/stats.ts
index 48cc41b..256c570 100644
--- a/src/stats.ts
+++ b/src/stats.ts
@@ -1,9 +1,14 @@
-import { NostrEvent } from '@nostrify/nostrify';
-import { db } from '@/db.ts';
+import { Semaphore } from '@lambdalisue/async';
+import { NKinds, NostrEvent, NStore } from '@nostrify/nostrify';
+import Debug from '@soapbox/stickynotes/debug';
+import { InsertQueryBuilder, Kysely } from 'kysely';
+import { LRUCache } from 'lru-cache';
+import { SetRequired } from 'type-fest';
+
+import { DittoDB } from '@/db/DittoDB.ts';
import { DittoTables } from '@/db/DittoTables.ts';
-import { Debug, type InsertQueryBuilder } from '@/deps.ts';
-import { eventsDB } from '@/storages.ts';
-import { findReplyTag } from '@/tags.ts';
+import { Storages } from '@/storages.ts';
+import { findReplyTag, getTagSet } from '@/tags.ts';
type AuthorStat = keyof Omit;
type EventStat = keyof Omit;
@@ -14,16 +19,16 @@ type StatDiff = AuthorStatDiff | EventStatDiff;
const debug = Debug('ditto:stats');
-/** Store stats for the event in LMDB. */
+/** Store stats for the event. */
async function updateStats(event: NostrEvent) {
let prev: NostrEvent | undefined;
const queries: InsertQueryBuilder[] = [];
// Kind 3 is a special case - replace the count with the new list.
if (event.kind === 3) {
- prev = await maybeGetPrev(event);
+ prev = await getPrevEvent(event);
if (!prev || event.created_at >= prev.created_at) {
- queries.push(updateFollowingCountQuery(event));
+ queries.push(await updateFollowingCountQuery(event));
}
}
@@ -35,8 +40,12 @@ async function updateStats(event: NostrEvent) {
debug(JSON.stringify({ id: event.id, pubkey: event.pubkey, kind: event.kind, tags: event.tags, statDiffs }));
}
- if (pubkeyDiffs.length) queries.push(authorStatsQuery(pubkeyDiffs));
- if (eventDiffs.length) queries.push(eventStatsQuery(eventDiffs));
+ pubkeyDiffs.forEach(([_, pubkey]) => refreshAuthorStatsDebounced(pubkey));
+
+ const kysely = await DittoDB.getInstance();
+
+ if (pubkeyDiffs.length) queries.push(authorStatsQuery(kysely, pubkeyDiffs));
+ if (eventDiffs.length) queries.push(eventStatsQuery(kysely, eventDiffs));
if (queries.length) {
await Promise.all(queries.map((query) => query.execute()));
@@ -45,6 +54,7 @@ async function updateStats(event: NostrEvent) {
/** Calculate stats changes ahead of time so we can build an efficient query. */
async function getStatsDiff(event: NostrEvent, prev: NostrEvent | undefined): Promise {
+ const store = await Storages.db();
const statDiffs: StatDiff[] = [];
const firstTaggedId = event.tags.find(([name]) => name === 'e')?.[1];
@@ -63,7 +73,7 @@ async function getStatsDiff(event: NostrEvent, prev: NostrEvent | undefined): Pr
case 5: {
if (!firstTaggedId) break;
- const [repostedEvent] = await eventsDB.query(
+ const [repostedEvent] = await store.query(
[{ kinds: [6], ids: [firstTaggedId], authors: [event.pubkey] }],
{ limit: 1 },
);
@@ -75,7 +85,7 @@ async function getStatsDiff(event: NostrEvent, prev: NostrEvent | undefined): Pr
const eventBeingRepostedPubkey = repostedEvent.tags.find(([name]) => name === 'p')?.[1];
if (!eventBeingRepostedId || !eventBeingRepostedPubkey) break;
- const [eventBeingReposted] = await eventsDB.query(
+ const [eventBeingReposted] = await store.query(
[{ kinds: [1], ids: [eventBeingRepostedId], authors: [eventBeingRepostedPubkey] }],
{ limit: 1 },
);
@@ -99,7 +109,7 @@ async function getStatsDiff(event: NostrEvent, prev: NostrEvent | undefined): Pr
}
/** Create an author stats query from the list of diffs. */
-function authorStatsQuery(diffs: AuthorStatDiff[]) {
+function authorStatsQuery(kysely: Kysely, diffs: AuthorStatDiff[]) {
const values: DittoTables['author_stats'][] = diffs.map(([_, pubkey, stat, diff]) => {
const row: DittoTables['author_stats'] = {
pubkey,
@@ -111,21 +121,21 @@ function authorStatsQuery(diffs: AuthorStatDiff[]) {
return row;
});
- return db.insertInto('author_stats')
+ return kysely.insertInto('author_stats')
.values(values)
.onConflict((oc) =>
oc
.column('pubkey')
.doUpdateSet((eb) => ({
- followers_count: eb('followers_count', '+', eb.ref('excluded.followers_count')),
- following_count: eb('following_count', '+', eb.ref('excluded.following_count')),
- notes_count: eb('notes_count', '+', eb.ref('excluded.notes_count')),
+ followers_count: eb('author_stats.followers_count', '+', eb.ref('excluded.followers_count')),
+ following_count: eb('author_stats.following_count', '+', eb.ref('excluded.following_count')),
+ notes_count: eb('author_stats.notes_count', '+', eb.ref('excluded.notes_count')),
}))
);
}
/** Create an event stats query from the list of diffs. */
-function eventStatsQuery(diffs: EventStatDiff[]) {
+function eventStatsQuery(kysely: Kysely, diffs: EventStatDiff[]) {
const values: DittoTables['event_stats'][] = diffs.map(([_, event_id, stat, diff]) => {
const row: DittoTables['event_stats'] = {
event_id,
@@ -137,37 +147,42 @@ function eventStatsQuery(diffs: EventStatDiff[]) {
return row;
});
- return db.insertInto('event_stats')
+ return kysely.insertInto('event_stats')
.values(values)
.onConflict((oc) =>
oc
.column('event_id')
.doUpdateSet((eb) => ({
- replies_count: eb('replies_count', '+', eb.ref('excluded.replies_count')),
- reposts_count: eb('reposts_count', '+', eb.ref('excluded.reposts_count')),
- reactions_count: eb('reactions_count', '+', eb.ref('excluded.reactions_count')),
+ replies_count: eb('event_stats.replies_count', '+', eb.ref('excluded.replies_count')),
+ reposts_count: eb('event_stats.reposts_count', '+', eb.ref('excluded.reposts_count')),
+ reactions_count: eb('event_stats.reactions_count', '+', eb.ref('excluded.reactions_count')),
}))
);
}
/** Get the last version of the event, if any. */
-async function maybeGetPrev(event: NostrEvent): Promise {
- const [prev] = await eventsDB.query([
- { kinds: [event.kind], authors: [event.pubkey], limit: 1 },
- ]);
+async function getPrevEvent(event: NostrEvent): Promise {
+ if (NKinds.replaceable(event.kind) || NKinds.parameterizedReplaceable(event.kind)) {
+ const store = await Storages.db();
- return prev;
+ const [prev] = await store.query([
+ { kinds: [event.kind], authors: [event.pubkey], limit: 1 },
+ ]);
+
+ return prev;
+ }
}
/** Set the following count to the total number of unique "p" tags in the follow list. */
-function updateFollowingCountQuery({ pubkey, tags }: NostrEvent) {
+async function updateFollowingCountQuery({ pubkey, tags }: NostrEvent) {
const following_count = new Set(
tags
.filter(([name]) => name === 'p')
.map(([_, value]) => value),
).size;
- return db.insertInto('author_stats')
+ const kysely = await DittoDB.getInstance();
+ return kysely.insertInto('author_stats')
.values({
pubkey,
following_count,
@@ -206,4 +221,53 @@ function getFollowDiff(event: NostrEvent, prev?: NostrEvent): AuthorStatDiff[] {
];
}
-export { updateStats };
+/** Refresh the author's stats in the database. */
+async function refreshAuthorStats(pubkey: string): Promise {
+ const store = await Storages.db();
+ const stats = await countAuthorStats(store, pubkey);
+
+ const kysely = await DittoDB.getInstance();
+ await kysely.insertInto('author_stats')
+ .values(stats)
+ .onConflict((oc) => oc.column('pubkey').doUpdateSet(stats))
+ .execute();
+
+ return stats;
+}
+
+/** Calculate author stats from the database. */
+async function countAuthorStats(
+ store: SetRequired,
+ pubkey: string,
+): Promise {
+ const [{ count: followers_count }, { count: notes_count }, [followList]] = await Promise.all([
+ store.count([{ kinds: [3], '#p': [pubkey] }]),
+ store.count([{ kinds: [1], authors: [pubkey] }]),
+ store.query([{ kinds: [3], authors: [pubkey], limit: 1 }]),
+ ]);
+
+ return {
+ pubkey,
+ followers_count,
+ following_count: getTagSet(followList?.tags ?? [], 'p').size,
+ notes_count,
+ };
+}
+
+const authorStatsSemaphore = new Semaphore(10);
+const refreshedAuthors = new LRUCache({ max: 1000 });
+
+/** Calls `refreshAuthorStats` only once per author. */
+function refreshAuthorStatsDebounced(pubkey: string): void {
+ if (refreshedAuthors.get(pubkey)) {
+ return;
+ }
+
+ refreshedAuthors.set(pubkey, true);
+ debug('refreshing author stats:', pubkey);
+
+ authorStatsSemaphore
+ .lock(() => refreshAuthorStats(pubkey).catch(() => {}));
+}
+
+export { refreshAuthorStats, refreshAuthorStatsDebounced, updateStats };
diff --git a/src/storages.ts b/src/storages.ts
index 6c6a4a5..10d5b05 100644
--- a/src/storages.ts
+++ b/src/storages.ts
@@ -1,59 +1,144 @@
+// deno-lint-ignore-file require-await
import { NCache } from '@nostrify/nostrify';
+import { RelayPoolWorker } from 'nostr-relaypool';
+
import { Conf } from '@/config.ts';
-import { db } from '@/db.ts';
-import * as pipeline from '@/pipeline.ts';
-import { activeRelays, pool } from '@/pool.ts';
-import { EventsDB } from '@/storages/events-db.ts';
+import { DittoDB } from '@/db/DittoDB.ts';
+import { EventsDB } from '@/storages/EventsDB.ts';
import { Optimizer } from '@/storages/optimizer.ts';
import { PoolStore } from '@/storages/pool-store.ts';
import { Reqmeister } from '@/storages/reqmeister.ts';
import { SearchStore } from '@/storages/search-store.ts';
import { InternalRelay } from '@/storages/InternalRelay.ts';
+import { UserStore } from '@/storages/UserStore.ts';
import { Time } from '@/utils/time.ts';
-/** Relay pool storage. */
-const client = new PoolStore({
- pool,
- relays: activeRelays,
- publisher: pipeline,
-});
-
-/** SQLite database to store events this Ditto server cares about. */
-const eventsDB = new EventsDB(db);
-
-/** In-memory data store for cached events. */
-const cache = new NCache({ max: 3000 });
-
-/** Batches requests for single events. */
-const reqmeister = new Reqmeister({
- client,
- delay: Time.seconds(1),
- timeout: Time.seconds(1),
-});
-
-/** Main Ditto storage adapter */
-const optimizer = new Optimizer({
- db: eventsDB,
- cache,
- client: reqmeister,
-});
-
-/** Storage to use for remote search. */
-const searchStore = new SearchStore({
- relay: Conf.searchRelay,
- fallback: optimizer,
-});
-
export class Storages {
- private static _pubsub: InternalRelay | undefined;
+ private static _db: Promise | undefined;
+ private static _admin: Promise | undefined;
+ private static _cache: Promise | undefined;
+ private static _client: Promise | undefined;
+ private static _optimizer: Promise | undefined;
+ private static _reqmeister: Promise | undefined;
+ private static _pubsub: Promise | undefined;
+ private static _search: Promise | undefined;
- static get pubsub(): InternalRelay {
- if (!this._pubsub) {
- this._pubsub = new InternalRelay();
+ /** SQLite database to store events this Ditto server cares about. */
+ public static async db(): Promise {
+ if (!this._db) {
+ this._db = (async () => {
+ const kysely = await DittoDB.getInstance();
+ return new EventsDB(kysely);
+ })();
}
+ return this._db;
+ }
+ /** Admin user storage. */
+ public static async admin(): Promise {
+ if (!this._admin) {
+ this._admin = Promise.resolve(new UserStore(Conf.pubkey, await this.db()));
+ }
+ return this._admin;
+ }
+
+ /** Internal pubsub relay between controllers and the pipeline. */
+ public static async pubsub(): Promise {
+ if (!this._pubsub) {
+ this._pubsub = Promise.resolve(new InternalRelay());
+ }
return this._pubsub;
}
-}
-export { cache, client, eventsDB, optimizer, reqmeister, searchStore };
+ /** Relay pool storage. */
+ public static async client(): Promise {
+ if (!this._client) {
+ this._client = (async () => {
+ const db = await this.db();
+
+ const [relayList] = await db.query([
+ { kinds: [10002], authors: [Conf.pubkey], limit: 1 },
+ ]);
+
+ const tags = relayList?.tags ?? [];
+
+ const activeRelays = tags.reduce((acc, [name, url, marker]) => {
+ if (name === 'r' && !marker) {
+ acc.push(url);
+ }
+ return acc;
+ }, []);
+
+ console.log(`pool: connecting to ${activeRelays.length} relays.`);
+
+ const worker = new Worker('https://unpkg.com/nostr-relaypool2@0.6.34/lib/nostr-relaypool.worker.js', {
+ type: 'module',
+ });
+
+ // @ts-ignore Wrong types.
+ const pool = new RelayPoolWorker(worker, activeRelays, {
+ autoReconnect: true,
+ // The pipeline verifies events.
+ skipVerification: true,
+ // The logging feature overwhelms the CPU and creates too many logs.
+ logErrorsAndNotices: false,
+ });
+
+ return new PoolStore({
+ pool,
+ relays: activeRelays,
+ });
+ })();
+ }
+ return this._client;
+ }
+
+ /** In-memory data store for cached events. */
+ public static async cache(): Promise {
+ if (!this._cache) {
+ this._cache = Promise.resolve(new NCache({ max: 3000 }));
+ }
+ return this._cache;
+ }
+
+ /** Batches requests for single events. */
+ public static async reqmeister(): Promise {
+ if (!this._reqmeister) {
+ this._reqmeister = Promise.resolve(
+ new Reqmeister({
+ client: await this.client(),
+ delay: Time.seconds(1),
+ timeout: Time.seconds(1),
+ }),
+ );
+ }
+ return this._reqmeister;
+ }
+
+ /** Main Ditto storage adapter */
+ public static async optimizer(): Promise {
+ if (!this._optimizer) {
+ this._optimizer = Promise.resolve(
+ new Optimizer({
+ db: await this.db(),
+ cache: await this.cache(),
+ client: await this.reqmeister(),
+ }),
+ );
+ }
+ return this._optimizer;
+ }
+
+ /** Storage to use for remote search. */
+ public static async search(): Promise {
+ if (!this._search) {
+ this._search = Promise.resolve(
+ new SearchStore({
+ relay: Conf.searchRelay,
+ fallback: await this.optimizer(),
+ }),
+ );
+ }
+ return this._search;
+ }
+}
diff --git a/src/storages/EventsDB.test.ts b/src/storages/EventsDB.test.ts
new file mode 100644
index 0000000..16b429d
--- /dev/null
+++ b/src/storages/EventsDB.test.ts
@@ -0,0 +1,193 @@
+import { Database as Sqlite } from '@db/sqlite';
+import { DenoSqlite3Dialect } from '@soapbox/kysely-deno-sqlite';
+import { assertEquals, assertRejects } from '@std/assert';
+import { Kysely } from 'kysely';
+import { generateSecretKey } from 'nostr-tools';
+
+import { Conf } from '@/config.ts';
+import { DittoDB } from '@/db/DittoDB.ts';
+import { DittoTables } from '@/db/DittoTables.ts';
+import { RelayError } from '@/RelayError.ts';
+import { EventsDB } from '@/storages/EventsDB.ts';
+import { eventFixture, genEvent } from '@/test.ts';
+
+/** Create in-memory database for testing. */
+const createDB = async () => {
+ const kysely = new Kysely({
+ dialect: new DenoSqlite3Dialect({
+ database: new Sqlite(':memory:'),
+ }),
+ });
+ const eventsDB = new EventsDB(kysely);
+ await DittoDB.migrate(kysely);
+ return { eventsDB, kysely };
+};
+
+Deno.test('count filters', async () => {
+ const { eventsDB } = await createDB();
+ const event1 = await eventFixture('event-1');
+
+ assertEquals((await eventsDB.count([{ kinds: [1] }])).count, 0);
+ await eventsDB.event(event1);
+ assertEquals((await eventsDB.count([{ kinds: [1] }])).count, 1);
+});
+
+Deno.test('insert and filter events', async () => {
+ const { eventsDB } = await createDB();
+
+ const event1 = await eventFixture('event-1');
+ await eventsDB.event(event1);
+
+ assertEquals(await eventsDB.query([{ kinds: [1] }]), [event1]);
+ assertEquals(await eventsDB.query([{ kinds: [3] }]), []);
+ assertEquals(await eventsDB.query([{ since: 1691091000 }]), [event1]);
+ assertEquals(await eventsDB.query([{ until: 1691091000 }]), []);
+ assertEquals(
+ await eventsDB.query([{ '#proxy': ['https://gleasonator.com/objects/8f6fac53-4f66-4c6e-ac7d-92e5e78c3e79'] }]),
+ [event1],
+ );
+});
+
+Deno.test('query events with domain search filter', async () => {
+ const { eventsDB, kysely } = await createDB();
+
+ const event1 = await eventFixture('event-1');
+ await eventsDB.event(event1);
+
+ assertEquals(await eventsDB.query([{}]), [event1]);
+ assertEquals(await eventsDB.query([{ search: 'domain:localhost:4036' }]), []);
+ assertEquals(await eventsDB.query([{ search: '' }]), [event1]);
+
+ await kysely
+ .insertInto('pubkey_domains')
+ .values({ pubkey: event1.pubkey, domain: 'localhost:4036', last_updated_at: event1.created_at })
+ .execute();
+
+ assertEquals(await eventsDB.query([{ kinds: [1], search: 'domain:localhost:4036' }]), [event1]);
+ assertEquals(await eventsDB.query([{ kinds: [1], search: 'domain:example.com' }]), []);
+});
+
+Deno.test('delete events', async () => {
+ const { eventsDB } = await createDB();
+
+ const [one, two] = [
+ { id: '1', kind: 1, pubkey: 'abc', content: 'hello world', created_at: 1, sig: '', tags: [] },
+ { id: '2', kind: 1, pubkey: 'abc', content: 'yolo fam', created_at: 2, sig: '', tags: [] },
+ ];
+
+ await eventsDB.event(one);
+ await eventsDB.event(two);
+
+ // Sanity check
+ assertEquals(await eventsDB.query([{ kinds: [1] }]), [two, one]);
+
+ await eventsDB.event({
+ kind: 5,
+ pubkey: one.pubkey,
+ tags: [['e', one.id]],
+ created_at: 0,
+ content: '',
+ id: '',
+ sig: '',
+ });
+
+ assertEquals(await eventsDB.query([{ kinds: [1] }]), [two]);
+});
+
+Deno.test("user cannot delete another user's event", async () => {
+ const { eventsDB } = await createDB();
+
+ const event = { id: '1', kind: 1, pubkey: 'abc', content: 'hello world', created_at: 1, sig: '', tags: [] };
+ await eventsDB.event(event);
+
+ // Sanity check
+ assertEquals(await eventsDB.query([{ kinds: [1] }]), [event]);
+
+ await eventsDB.event({
+ kind: 5,
+ pubkey: 'def', // different pubkey
+ tags: [['e', event.id]],
+ created_at: 0,
+ content: '',
+ id: '',
+ sig: '',
+ });
+
+ assertEquals(await eventsDB.query([{ kinds: [1] }]), [event]);
+});
+
+Deno.test('admin can delete any event', async () => {
+ const { eventsDB } = await createDB();
+
+ const [one, two] = [
+ { id: '1', kind: 1, pubkey: 'abc', content: 'hello world', created_at: 1, sig: '', tags: [] },
+ { id: '2', kind: 1, pubkey: 'abc', content: 'yolo fam', created_at: 2, sig: '', tags: [] },
+ ];
+
+ await eventsDB.event(one);
+ await eventsDB.event(two);
+
+ // Sanity check
+ assertEquals(await eventsDB.query([{ kinds: [1] }]), [two, one]);
+
+ await eventsDB.event({
+ kind: 5,
+ pubkey: Conf.pubkey, // Admin pubkey
+ tags: [['e', one.id]],
+ created_at: 0,
+ content: '',
+ id: '',
+ sig: '',
+ });
+
+ assertEquals(await eventsDB.query([{ kinds: [1] }]), [two]);
+});
+
+Deno.test('throws a RelayError when inserting an event deleted by the admin', async () => {
+ const { eventsDB } = await createDB();
+
+ const event = genEvent();
+ await eventsDB.event(event);
+
+ const deletion = genEvent({ kind: 5, tags: [['e', event.id]] }, Conf.seckey);
+ await eventsDB.event(deletion);
+
+ await assertRejects(
+ () => eventsDB.event(event),
+ RelayError,
+ 'event deleted by admin',
+ );
+});
+
+Deno.test('throws a RelayError when inserting an event deleted by a user', async () => {
+ const { eventsDB } = await createDB();
+
+ const sk = generateSecretKey();
+
+ const event = genEvent({}, sk);
+ await eventsDB.event(event);
+
+ const deletion = genEvent({ kind: 5, tags: [['e', event.id]] }, sk);
+ await eventsDB.event(deletion);
+
+ await assertRejects(
+ () => eventsDB.event(event),
+ RelayError,
+ 'event deleted by user',
+ );
+});
+
+Deno.test('inserting replaceable events', async () => {
+ const { eventsDB } = await createDB();
+
+ const event = await eventFixture('event-0');
+ await eventsDB.event(event);
+
+ const olderEvent = { ...event, id: '123', created_at: event.created_at - 1 };
+ await eventsDB.event(olderEvent);
+ assertEquals(await eventsDB.query([{ kinds: [0], authors: [event.pubkey] }]), [event]);
+
+ const newerEvent = { ...event, id: '123', created_at: event.created_at + 1 };
+ await eventsDB.event(newerEvent);
+ assertEquals(await eventsDB.query([{ kinds: [0] }]), [newerEvent]);
+});
diff --git a/src/storages/EventsDB.ts b/src/storages/EventsDB.ts
new file mode 100644
index 0000000..5a3839a
--- /dev/null
+++ b/src/storages/EventsDB.ts
@@ -0,0 +1,229 @@
+// deno-lint-ignore-file require-await
+
+import { NDatabase, NIP50, NKinds, NostrEvent, NostrFilter, NSchema as n, NStore } from '@nostrify/nostrify';
+import { Stickynotes } from '@soapbox/stickynotes';
+import { Kysely } from 'kysely';
+
+import { Conf } from '@/config.ts';
+import { DittoTables } from '@/db/DittoTables.ts';
+import { normalizeFilters } from '@/filter.ts';
+import { purifyEvent } from '@/storages/hydrate.ts';
+import { getTagSet } from '@/tags.ts';
+import { isNostrId, isURL } from '@/utils.ts';
+import { abortError } from '@/utils/abort.ts';
+import { RelayError } from '@/RelayError.ts';
+
+/** Function to decide whether or not to index a tag. */
+type TagCondition = ({ event, count, value }: {
+ event: NostrEvent;
+ count: number;
+ value: string;
+}) => boolean;
+
+/** SQLite database storage adapter for Nostr events. */
+class EventsDB implements NStore {
+ private store: NDatabase;
+ private console = new Stickynotes('ditto:db:events');
+
+ /** Conditions for when to index certain tags. */
+ static tagConditions: Record = {
+ 'd': ({ event, count }) => count === 0 && NKinds.parameterizedReplaceable(event.kind),
+ 'e': ({ event, count, value }) => ((event.kind === 10003) || count < 15) && isNostrId(value),
+ 'L': ({ event, count }) => event.kind === 1985 || count === 0,
+ 'l': ({ event, count }) => event.kind === 1985 || count === 0,
+ 'media': ({ count, value }) => (count < 4) && isURL(value),
+ 'P': ({ count, value }) => count === 0 && isNostrId(value),
+ 'p': ({ event, count, value }) => (count < 15 || event.kind === 3) && isNostrId(value),
+ 'proxy': ({ count, value }) => count === 0 && isURL(value),
+ 'q': ({ event, count, value }) => count === 0 && event.kind === 1 && isNostrId(value),
+ 't': ({ count, value }) => count < 5 && value.length < 50,
+ 'name': ({ event, count }) => event.kind === 30361 && count === 0,
+ 'role': ({ event, count }) => event.kind === 30361 && count === 0,
+ };
+
+ constructor(private kysely: Kysely) {
+ let fts: 'sqlite' | 'postgres' | undefined;
+
+ if (Conf.databaseUrl.protocol === 'sqlite:') {
+ fts = 'sqlite';
+ }
+ if (['postgres:', 'postgresql:'].includes(Conf.databaseUrl.protocol!)) {
+ fts = 'postgres';
+ }
+
+ this.store = new NDatabase(kysely, {
+ fts,
+ indexTags: EventsDB.indexTags,
+ searchText: EventsDB.searchText,
+ });
+ }
+
+ /** Insert an event (and its tags) into the database. */
+ async event(event: NostrEvent, _opts?: { signal?: AbortSignal }): Promise {
+ event = purifyEvent(event);
+ this.console.debug('EVENT', JSON.stringify(event));
+
+ if (await this.isDeletedAdmin(event)) {
+ throw new RelayError('blocked', 'event deleted by admin');
+ }
+
+ await this.deleteEventsAdmin(event);
+
+ try {
+ await this.store.event(event);
+ } catch (e) {
+ if (e.message === 'Cannot add a deleted event') {
+ throw new RelayError('blocked', 'event deleted by user');
+ } else if (e.message === 'Cannot replace an event with an older event') {
+ return;
+ } else {
+ this.console.debug('ERROR', e.message);
+ }
+ }
+ }
+
+ /** Check if an event has been deleted by the admin. */
+ private async isDeletedAdmin(event: NostrEvent): Promise {
+ const [deletion] = await this.query([
+ { kinds: [5], authors: [Conf.pubkey], '#e': [event.id], limit: 1 },
+ ]);
+ return !!deletion;
+ }
+
+ /** The DITTO_NSEC can delete any event from the database. NDatabase already handles user deletions. */
+ private async deleteEventsAdmin(event: NostrEvent): Promise {
+ if (event.kind === 5 && event.pubkey === Conf.pubkey) {
+ const ids = getTagSet(event.tags, 'e');
+ await this.remove([{ ids: [...ids] }]);
+ }
+ }
+
+ /** Get events for filters from the database. */
+ async query(filters: NostrFilter[], opts: { signal?: AbortSignal; limit?: number } = {}): Promise {
+ filters = await this.expandFilters(filters);
+
+ if (opts.signal?.aborted) return Promise.resolve([]);
+ if (!filters.length) return Promise.resolve([]);
+
+ this.console.debug('REQ', JSON.stringify(filters));
+
+ return this.store.query(filters, opts);
+ }
+
+ /** Delete events based on filters from the database. */
+ async remove(filters: NostrFilter[], _opts?: { signal?: AbortSignal }): Promise {
+ if (!filters.length) return Promise.resolve();
+ this.console.debug('DELETE', JSON.stringify(filters));
+
+ return this.store.remove(filters);
+ }
+
+ /** Get number of events that would be returned by filters. */
+ async count(
+ filters: NostrFilter[],
+ opts: { signal?: AbortSignal } = {},
+ ): Promise<{ count: number; approximate: boolean }> {
+ if (opts.signal?.aborted) return Promise.reject(abortError());
+ if (!filters.length) return Promise.resolve({ count: 0, approximate: false });
+
+ this.console.debug('COUNT', JSON.stringify(filters));
+
+ return this.store.count(filters);
+ }
+
+ /** Return only the tags that should be indexed. */
+ static indexTags(event: NostrEvent): string[][] {
+ const tagCounts: Record = {};
+
+ function getCount(name: string) {
+ return tagCounts[name] || 0;
+ }
+
+ function incrementCount(name: string) {
+ tagCounts[name] = getCount(name) + 1;
+ }
+
+ function checkCondition(name: string, value: string, condition: TagCondition) {
+ return condition({
+ event,
+ count: getCount(name),
+ value,
+ });
+ }
+
+ return event.tags.reduce((results, tag) => {
+ const [name, value] = tag;
+ const condition = EventsDB.tagConditions[name] as TagCondition | undefined;
+
+ if (value && condition && value.length < 200 && checkCondition(name, value, condition)) {
+ results.push(tag);
+ }
+
+ incrementCount(name);
+ return results;
+ }, []);
+ }
+
+ /** Build a search index from the event. */
+ static searchText(event: NostrEvent): string {
+ switch (event.kind) {
+ case 0:
+ return EventsDB.buildUserSearchContent(event);
+ case 1:
+ return event.content;
+ case 30009:
+ return EventsDB.buildTagsSearchContent(event.tags.filter(([t]) => t !== 'alt'));
+ default:
+ return '';
+ }
+ }
+
+ /** Build search content for a user. */
+ static buildUserSearchContent(event: NostrEvent): string {
+ const { name, nip05 } = n.json().pipe(n.metadata()).catch({}).parse(event.content);
+ return [name, nip05].filter(Boolean).join('\n');
+ }
+
+ /** Build search content from tag values. */
+ static buildTagsSearchContent(tags: string[][]): string {
+ return tags.map(([_tag, value]) => value).join('\n');
+ }
+
+ /** Converts filters to more performant, simpler filters that are better for SQLite. */
+ async expandFilters(filters: NostrFilter[]): Promise {
+ for (const filter of filters) {
+ if (filter.search) {
+ const tokens = NIP50.parseInput(filter.search);
+
+ const domain = (tokens.find((t) =>
+ typeof t === 'object' && t.key === 'domain'
+ ) as { key: 'domain'; value: string } | undefined)?.value;
+
+ if (domain) {
+ const query = this.kysely
+ .selectFrom('pubkey_domains')
+ .select('pubkey')
+ .where('domain', '=', domain);
+
+ if (filter.authors) {
+ query.where('pubkey', 'in', filter.authors);
+ }
+
+ const pubkeys = await query
+ .execute()
+ .then((rows) =>
+ rows.map((row) => row.pubkey)
+ );
+
+ filter.authors = pubkeys;
+ }
+
+ filter.search = tokens.filter((t) => typeof t === 'string').join(' ');
+ }
+ }
+
+ return normalizeFilters(filters); // Improves performance of `{ kinds: [0], authors: ['...'] }` queries.
+ }
+}
+
+export { EventsDB };
diff --git a/src/storages/InternalRelay.ts b/src/storages/InternalRelay.ts
index d42f94f..233a095 100644
--- a/src/storages/InternalRelay.ts
+++ b/src/storages/InternalRelay.ts
@@ -9,8 +9,8 @@ import {
NRelay,
} from '@nostrify/nostrify';
import { Machina } from '@nostrify/nostrify/utils';
+import { matchFilter } from 'nostr-tools';
-import { matchFilter } from '@/deps.ts';
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { purifyEvent } from '@/storages/hydrate.ts';
diff --git a/src/storages/UserStore.test.ts b/src/storages/UserStore.test.ts
index 11f96cb..d04ece0 100644
--- a/src/storages/UserStore.test.ts
+++ b/src/storages/UserStore.test.ts
@@ -1,6 +1,6 @@
import { MockRelay } from '@nostrify/nostrify/test';
-import { assertEquals } from '@/deps-test.ts';
+import { assertEquals } from '@std/assert';
import { UserStore } from '@/storages/UserStore.ts';
import userBlack from '~/fixtures/events/kind-0-black.json' with { type: 'json' };
@@ -8,7 +8,7 @@ import userMe from '~/fixtures/events/event-0-makes-repost-with-quote-repost.jso
import blockEvent from '~/fixtures/events/kind-10000-black-blocks-user-me.json' with { type: 'json' };
import event1authorUserMe from '~/fixtures/events/event-1-quote-repost-will-be-reposted.json' with { type: 'json' };
-Deno.test('query events of users that are not blocked', async () => {
+Deno.test('query events of users that are not muted', async () => {
const userBlackCopy = structuredClone(userBlack);
const userMeCopy = structuredClone(userMe);
const blockEventCopy = structuredClone(blockEvent);
@@ -25,3 +25,17 @@ Deno.test('query events of users that are not blocked', async () => {
assertEquals(await store.query([{ kinds: [1] }], { limit: 1 }), []);
});
+
+Deno.test('user never muted anyone', async () => {
+ const userBlackCopy = structuredClone(userBlack);
+ const userMeCopy = structuredClone(userMe);
+
+ const db = new MockRelay();
+
+ const store = new UserStore(userBlackCopy.pubkey, db);
+
+ await store.event(userBlackCopy);
+ await store.event(userMeCopy);
+
+ assertEquals(await store.query([{ kinds: [0], authors: [userMeCopy.pubkey] }], { limit: 1 }), [userMeCopy]);
+});
diff --git a/src/storages/UserStore.ts b/src/storages/UserStore.ts
index 78c3d33..c5657b6 100644
--- a/src/storages/UserStore.ts
+++ b/src/storages/UserStore.ts
@@ -1,35 +1,25 @@
import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify';
+
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
import { getTagSet } from '@/tags.ts';
export class UserStore implements NStore {
- private store: NStore;
- private pubkey: string;
-
- constructor(pubkey: string, store: NStore) {
- this.pubkey = pubkey;
- this.store = store;
- }
+ constructor(private pubkey: string, private store: NStore) {}
async event(event: NostrEvent, opts?: { signal?: AbortSignal }): Promise {
return await this.store.event(event, opts);
}
/**
- * Query events that `pubkey` did not block
+ * Query events that `pubkey` did not mute
* https://github.com/nostr-protocol/nips/blob/master/51.md#standard-lists
*/
async query(filters: NostrFilter[], opts: { signal?: AbortSignal; limit?: number } = {}): Promise {
- const allEvents = await this.store.query(filters, opts);
+ const events = await this.store.query(filters, opts);
+ const pubkeys = await this.getMutedPubkeys();
- const mutedPubkeysEvent = await this.getMuteList();
- if (!mutedPubkeysEvent) {
- return allEvents;
- }
- const mutedPubkeys = getTagSet(mutedPubkeysEvent.tags, 'p');
-
- return allEvents.filter((event) => {
- return mutedPubkeys.has(event.pubkey) === false;
+ return events.filter((event) => {
+ return event.kind === 0 || !pubkeys.has(event.pubkey);
});
}
@@ -37,4 +27,12 @@ export class UserStore implements NStore {
const [muteList] = await this.store.query([{ authors: [this.pubkey], kinds: [10000], limit: 1 }]);
return muteList;
}
+
+ private async getMutedPubkeys(): Promise> {
+ const mutedPubkeysEvent = await this.getMuteList();
+ if (!mutedPubkeysEvent) {
+ return new Set();
+ }
+ return getTagSet(mutedPubkeysEvent.tags, 'p');
+ }
}
diff --git a/src/storages/events-db.test.ts b/src/storages/events-db.test.ts
deleted file mode 100644
index dd92c1b..0000000
--- a/src/storages/events-db.test.ts
+++ /dev/null
@@ -1,63 +0,0 @@
-import { db } from '@/db.ts';
-import { assertEquals, assertRejects } from '@/deps-test.ts';
-
-import event0 from '~/fixtures/events/event-0.json' with { type: 'json' };
-import event1 from '~/fixtures/events/event-1.json' with { type: 'json' };
-
-import { EventsDB } from '@/storages/events-db.ts';
-
-const eventsDB = new EventsDB(db);
-
-Deno.test('count filters', async () => {
- assertEquals((await eventsDB.count([{ kinds: [1] }])).count, 0);
- await eventsDB.event(event1);
- assertEquals((await eventsDB.count([{ kinds: [1] }])).count, 1);
-});
-
-Deno.test('insert and filter events', async () => {
- await eventsDB.event(event1);
-
- assertEquals(await eventsDB.query([{ kinds: [1] }]), [event1]);
- assertEquals(await eventsDB.query([{ kinds: [3] }]), []);
- assertEquals(await eventsDB.query([{ since: 1691091000 }]), [event1]);
- assertEquals(await eventsDB.query([{ until: 1691091000 }]), []);
- assertEquals(
- await eventsDB.query([{ '#proxy': ['https://gleasonator.com/objects/8f6fac53-4f66-4c6e-ac7d-92e5e78c3e79'] }]),
- [event1],
- );
-});
-
-Deno.test('query events with domain search filter', async () => {
- await eventsDB.event(event1);
-
- assertEquals(await eventsDB.query([{}]), [event1]);
- assertEquals(await eventsDB.query([{ search: 'domain:localhost:8000' }]), []);
- assertEquals(await eventsDB.query([{ search: '' }]), [event1]);
-
- await db
- .insertInto('pubkey_domains')
- .values({ pubkey: event1.pubkey, domain: 'localhost:8000', last_updated_at: event1.created_at })
- .execute();
-
- assertEquals(await eventsDB.query([{ kinds: [1], search: 'domain:localhost:8000' }]), [event1]);
- assertEquals(await eventsDB.query([{ kinds: [1], search: 'domain:example.com' }]), []);
-});
-
-Deno.test('delete events', async () => {
- await eventsDB.event(event1);
- assertEquals(await eventsDB.query([{ kinds: [1] }]), [event1]);
- await eventsDB.remove([{ kinds: [1] }]);
- assertEquals(await eventsDB.query([{ kinds: [1] }]), []);
-});
-
-Deno.test('inserting replaceable events', async () => {
- assertEquals((await eventsDB.count([{ kinds: [0], authors: [event0.pubkey] }])).count, 0);
-
- await eventsDB.event(event0);
- await assertRejects(() => eventsDB.event(event0));
- assertEquals((await eventsDB.count([{ kinds: [0], authors: [event0.pubkey] }])).count, 1);
-
- const changeEvent = { ...event0, id: '123', created_at: event0.created_at + 1 };
- await eventsDB.event(changeEvent);
- assertEquals(await eventsDB.query([{ kinds: [0] }]), [changeEvent]);
-});
diff --git a/src/storages/events-db.ts b/src/storages/events-db.ts
deleted file mode 100644
index 6d80f70..0000000
--- a/src/storages/events-db.ts
+++ /dev/null
@@ -1,422 +0,0 @@
-import { NIP50, NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify';
-import { Conf } from '@/config.ts';
-import { DittoTables } from '@/db/DittoTables.ts';
-import { Debug, Kysely, type SelectQueryBuilder } from '@/deps.ts';
-import { normalizeFilters } from '@/filter.ts';
-import { DittoEvent } from '@/interfaces/DittoEvent.ts';
-import { isDittoInternalKind, isParameterizedReplaceableKind, isReplaceableKind } from '@/kinds.ts';
-import { jsonMetaContentSchema } from '@/schemas/nostr.ts';
-import { purifyEvent } from '@/storages/hydrate.ts';
-import { isNostrId, isURL } from '@/utils.ts';
-import { abortError } from '@/utils/abort.ts';
-
-/** Function to decide whether or not to index a tag. */
-type TagCondition = ({ event, count, value }: {
- event: DittoEvent;
- count: number;
- value: string;
-}) => boolean;
-
-/** Conditions for when to index certain tags. */
-const tagConditions: Record = {
- 'd': ({ event, count }) => count === 0 && isParameterizedReplaceableKind(event.kind),
- 'e': ({ event, count, value }) => ((event.user && event.kind === 10003) || count < 15) && isNostrId(value),
- 'L': ({ event, count }) => event.kind === 1985 || count === 0,
- 'l': ({ event, count }) => event.kind === 1985 || count === 0,
- 'media': ({ event, count, value }) => (event.user || count < 4) && isURL(value),
- 'P': ({ event, count, value }) => event.kind === 9735 && count === 0 && isNostrId(value),
- 'p': ({ event, count, value }) => (count < 15 || event.kind === 3) && isNostrId(value),
- 'proxy': ({ count, value }) => count === 0 && isURL(value),
- 'q': ({ event, count, value }) => count === 0 && event.kind === 1 && isNostrId(value),
- 't': ({ count, value }) => count < 5 && value.length < 50,
- 'name': ({ event, count }) => event.kind === 30361 && count === 0,
- 'role': ({ event, count }) => event.kind === 30361 && count === 0,
-};
-
-type EventQuery = SelectQueryBuilder;
-
-/** SQLite database storage adapter for Nostr events. */
-class EventsDB implements NStore {
- #db: Kysely;
- #debug = Debug('ditto:db:events');
- private protocol = Conf.databaseUrl.protocol;
-
- constructor(db: Kysely) {
- this.#db = db;
- }
-
- /** Insert an event (and its tags) into the database. */
- async event(event: NostrEvent, _opts?: { signal?: AbortSignal }): Promise {
- event = purifyEvent(event);
- this.#debug('EVENT', JSON.stringify(event));
-
- if (isDittoInternalKind(event.kind) && event.pubkey !== Conf.pubkey) {
- throw new Error('Internal events can only be stored by the server keypair');
- }
-
- return await this.#db.transaction().execute(async (trx) => {
- /** Insert the event into the database. */
- async function addEvent() {
- await trx.insertInto('events')
- .values({ ...event, tags: JSON.stringify(event.tags) })
- .execute();
- }
-
- const protocol = this.protocol;
- /** Add search data to the FTS table. */
- async function indexSearch() {
- if (protocol !== 'sqlite:') return;
- const searchContent = buildSearchContent(event);
- if (!searchContent) return;
- await trx.insertInto('events_fts')
- .values({ id: event.id, content: searchContent.substring(0, 1000) })
- .execute();
- }
-
- /** Index event tags depending on the conditions defined above. */
- async function indexTags() {
- const tags = filterIndexableTags(event);
- const rows = tags.map(([tag, value]) => ({ event_id: event.id, tag, value }));
-
- if (!tags.length) return;
- await trx.insertInto('tags')
- .values(rows)
- .execute();
- }
-
- if (isReplaceableKind(event.kind)) {
- const prevEvents = await this.getFilterQuery(trx, { kinds: [event.kind], authors: [event.pubkey] }).execute();
- for (const prevEvent of prevEvents) {
- if (prevEvent.created_at >= event.created_at) {
- throw new Error('Cannot replace an event with an older event');
- }
- }
- await this.deleteEventsTrx(trx, [{ kinds: [event.kind], authors: [event.pubkey] }]);
- }
-
- if (isParameterizedReplaceableKind(event.kind)) {
- const d = event.tags.find(([tag]) => tag === 'd')?.[1];
- if (d) {
- const prevEvents = await this.getFilterQuery(trx, { kinds: [event.kind], authors: [event.pubkey], '#d': [d] })
- .execute();
- for (const prevEvent of prevEvents) {
- if (prevEvent.created_at >= event.created_at) {
- throw new Error('Cannot replace an event with an older event');
- }
- }
- await this.deleteEventsTrx(trx, [{ kinds: [event.kind], authors: [event.pubkey], '#d': [d] }]);
- }
- }
-
- // Run the queries.
- await Promise.all([
- addEvent(),
- indexTags(),
- indexSearch(),
- ]);
- }).catch((error) => {
- // Don't throw for duplicate events.
- if (error.message.includes('UNIQUE constraint failed')) {
- return;
- } else {
- throw error;
- }
- });
- }
-
- /** Build the query for a filter. */
- getFilterQuery(db: Kysely, filter: NostrFilter): EventQuery {
- let query = db
- .selectFrom('events')
- .select([
- 'events.id',
- 'events.kind',
- 'events.pubkey',
- 'events.content',
- 'events.tags',
- 'events.created_at',
- 'events.sig',
- ])
- .where('events.deleted_at', 'is', null)
- .orderBy('events.created_at', 'desc');
-
- for (const [key, value] of Object.entries(filter)) {
- if (value === undefined) continue;
-
- switch (key as keyof NostrFilter) {
- case 'ids':
- query = query.where('events.id', 'in', filter.ids!);
- break;
- case 'kinds':
- query = query.where('events.kind', 'in', filter.kinds!);
- break;
- case 'authors':
- query = query.where('events.pubkey', 'in', filter.authors!);
- break;
- case 'since':
- query = query.where('events.created_at', '>=', filter.since!);
- break;
- case 'until':
- query = query.where('events.created_at', '<=', filter.until!);
- break;
- case 'limit':
- query = query.limit(filter.limit!);
- break;
- }
- }
-
- const joinedQuery = query.leftJoin('tags', 'tags.event_id', 'events.id');
-
- for (const [key, value] of Object.entries(filter)) {
- if (key.startsWith('#') && Array.isArray(value)) {
- const name = key.replace(/^#/, '');
- query = joinedQuery
- .where('tags.tag', '=', name)
- .where('tags.value', 'in', value);
- }
- }
-
- if (filter.search && this.protocol === 'sqlite:') {
- query = query
- .innerJoin('events_fts', 'events_fts.id', 'events.id')
- .where('events_fts.content', 'match', JSON.stringify(filter.search));
- }
-
- return query;
- }
-
- /** Combine filter queries into a single union query. */
- getEventsQuery(filters: NostrFilter[]) {
- return filters
- .map((filter) => this.#db.selectFrom(() => this.getFilterQuery(this.#db, filter).as('events')).selectAll())
- .reduce((result, query) => result.unionAll(query));
- }
-
- /** Query to get user events, joined by tags. */
- usersQuery() {
- return this.getFilterQuery(this.#db, { kinds: [30361], authors: [Conf.pubkey] })
- .leftJoin('tags', 'tags.event_id', 'events.id')
- .where('tags.tag', '=', 'd')
- .select('tags.value as d_tag')
- .as('users');
- }
-
- /** Converts filters to more performant, simpler filters that are better for SQLite. */
- async expandFilters(filters: NostrFilter[]): Promise {
- filters = normalizeFilters(filters); // Improves performance of `{ kinds: [0], authors: ['...'] }` queries.
-
- for (const filter of filters) {
- if (filter.search) {
- const tokens = NIP50.parseInput(filter.search);
-
- const domain = (tokens.find((t) =>
- typeof t === 'object' && t.key === 'domain'
- ) as { key: 'domain'; value: string } | undefined)?.value;
-
- if (domain) {
- const query = this.#db
- .selectFrom('pubkey_domains')
- .select('pubkey')
- .where('domain', '=', domain);
-
- if (filter.authors) {
- query.where('pubkey', 'in', filter.authors);
- }
-
- const pubkeys = await query
- .execute()
- .then((rows) =>
- rows.map((row) => row.pubkey)
- );
-
- filter.authors = pubkeys;
- }
-
- filter.search = tokens.filter((t) => typeof t === 'string').join(' ');
- }
- }
-
- return filters;
- }
-
- /** Get events for filters from the database. */
- async query(filters: NostrFilter[], opts: { signal?: AbortSignal; limit?: number } = {}): Promise {
- filters = await this.expandFilters(filters);
-
- if (opts.signal?.aborted) return Promise.resolve([]);
- if (!filters.length) return Promise.resolve([]);
-
- this.#debug('REQ', JSON.stringify(filters));
- let query = this.getEventsQuery(filters);
-
- if (typeof opts.limit === 'number') {
- query = query.limit(opts.limit);
- }
-
- return (await query.execute()).map((row) => {
- const event: DittoEvent = {
- id: row.id,
- kind: row.kind,
- pubkey: row.pubkey,
- content: row.content,
- created_at: row.created_at,
- tags: JSON.parse(row.tags),
- sig: row.sig,
- };
-
- if (row.author_id) {
- event.author = {
- id: row.author_id,
- kind: row.author_kind! as 0,
- pubkey: row.author_pubkey!,
- content: row.author_content!,
- created_at: row.author_created_at!,
- tags: JSON.parse(row.author_tags!),
- sig: row.author_sig!,
- };
- }
-
- if (typeof row.author_stats_followers_count === 'number') {
- event.author_stats = {
- followers_count: row.author_stats_followers_count,
- following_count: row.author_stats_following_count!,
- notes_count: row.author_stats_notes_count!,
- };
- }
-
- if (typeof row.stats_replies_count === 'number') {
- event.event_stats = {
- replies_count: row.stats_replies_count,
- reposts_count: row.stats_reposts_count!,
- reactions_count: row.stats_reactions_count!,
- };
- }
-
- return event;
- });
- }
-
- /** Delete events from each table. Should be run in a transaction! */
- async deleteEventsTrx(db: Kysely, filters: NostrFilter[]) {
- if (!filters.length) return Promise.resolve();
- this.#debug('DELETE', JSON.stringify(filters));
-
- const query = this.getEventsQuery(filters).clearSelect().select('id');
-
- return await db.updateTable('events')
- .where('id', 'in', () => query)
- .set({ deleted_at: Math.floor(Date.now() / 1000) })
- .execute();
- }
-
- /** Delete events based on filters from the database. */
- async remove(filters: NostrFilter[], _opts?: { signal?: AbortSignal }): Promise {
- if (!filters.length) return Promise.resolve();
- this.#debug('DELETE', JSON.stringify(filters));
-
- await this.#db.transaction().execute((trx) => this.deleteEventsTrx(trx, filters));
- }
-
- /** Get number of events that would be returned by filters. */
- async count(
- filters: NostrFilter[],
- opts: { signal?: AbortSignal } = {},
- ): Promise<{ count: number; approximate: boolean }> {
- if (opts.signal?.aborted) return Promise.reject(abortError());
- if (!filters.length) return Promise.resolve({ count: 0, approximate: false });
-
- this.#debug('COUNT', JSON.stringify(filters));
- const query = this.getEventsQuery(filters);
-
- const [{ count }] = await query
- .clearSelect()
- .select((eb) => eb.fn.count('id').as('count'))
- .execute();
-
- return {
- count: Number(count),
- approximate: false,
- };
- }
-}
-
-/** Return only the tags that should be indexed. */
-function filterIndexableTags(event: DittoEvent): string[][] {
- const tagCounts: Record = {};
-
- function getCount(name: string) {
- return tagCounts[name] || 0;
- }
-
- function incrementCount(name: string) {
- tagCounts[name] = getCount(name) + 1;
- }
-
- function checkCondition(name: string, value: string, condition: TagCondition) {
- return condition({
- event,
- count: getCount(name),
- value,
- });
- }
-
- return event.tags.reduce((results, tag) => {
- const [name, value] = tag;
- const condition = tagConditions[name] as TagCondition | undefined;
-
- if (value && condition && value.length < 200 && checkCondition(name, value, condition)) {
- results.push(tag);
- }
-
- incrementCount(name);
- return results;
- }, []);
-}
-
-/** Build a search index from the event. */
-function buildSearchContent(event: NostrEvent): string {
- switch (event.kind) {
- case 0:
- return buildUserSearchContent(event);
- case 1:
- return event.content;
- case 30009:
- return buildTagsSearchContent(event.tags.filter(([t]) => t !== 'alt'));
- default:
- return '';
- }
-}
-
-/** Build search content for a user. */
-function buildUserSearchContent(event: NostrEvent): string {
- const { name, nip05, about } = jsonMetaContentSchema.parse(event.content);
- return [name, nip05, about].filter(Boolean).join('\n');
-}
-
-/** Build search content from tag values. */
-function buildTagsSearchContent(tags: string[][]): string {
- return tags.map(([_tag, value]) => value).join('\n');
-}
-
-export { EventsDB };
diff --git a/src/storages/hydrate.test.ts b/src/storages/hydrate.test.ts
index c0c0a42..1edafd7 100644
--- a/src/storages/hydrate.test.ts
+++ b/src/storages/hydrate.test.ts
@@ -1,133 +1,135 @@
-import { assertEquals } from '@/deps-test.ts';
+import { assertEquals } from '@std/assert';
import { hydrateEvents } from '@/storages/hydrate.ts';
import { MockRelay } from '@nostrify/nostrify/test';
-import event0 from '~/fixtures/events/event-0.json' with { type: 'json' };
-import event0madePost from '~/fixtures/events/event-0-the-one-who-post-and-users-repost.json' with { type: 'json' };
-import event0madeRepost from '~/fixtures/events/event-0-the-one-who-repost.json' with { type: 'json' };
-import event0madeQuoteRepost from '~/fixtures/events/event-0-the-one-who-quote-repost.json' with { type: 'json' };
-import event0madeRepostWithQuoteRepost from '~/fixtures/events/event-0-makes-repost-with-quote-repost.json' with {
- type: 'json',
-};
-import event1 from '~/fixtures/events/event-1.json' with { type: 'json' };
-import event1quoteRepost from '~/fixtures/events/event-1-quote-repost.json' with { type: 'json' };
-import event1futureIsMine from '~/fixtures/events/event-1-will-be-reposted-with-quote-repost.json' with {
- type: 'json',
-};
-import event1quoteRepostLatin from '~/fixtures/events/event-1-quote-repost-will-be-reposted.json' with { type: 'json' };
-import event1willBeQuoteReposted from '~/fixtures/events/event-1-that-will-be-quote-reposted.json' with {
- type: 'json',
-};
-import event1reposted from '~/fixtures/events/event-1-reposted.json' with { type: 'json' };
-import event6 from '~/fixtures/events/event-6.json' with { type: 'json' };
-import event6ofQuoteRepost from '~/fixtures/events/event-6-of-quote-repost.json' with { type: 'json' };
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
+import { eventFixture } from '@/test.ts';
Deno.test('hydrateEvents(): author --- WITHOUT stats', async () => {
const db = new MockRelay();
- const event0copy = structuredClone(event0);
- const event1copy = structuredClone(event1);
+ const event0 = await eventFixture('event-0');
+ const event1 = await eventFixture('event-1');
// Save events to database
- await db.event(event0copy);
- await db.event(event1copy);
-
- assertEquals((event1copy as DittoEvent).author, undefined, "Event hasn't been hydrated yet");
+ await db.event(event0);
+ await db.event(event1);
await hydrateEvents({
- events: [event1copy],
- storage: db,
+ events: [event1],
+ store: db,
});
- const expectedEvent = { ...event1copy, author: event0copy };
- assertEquals(event1copy, expectedEvent);
+ const expectedEvent = { ...event1, author: event0 };
+ assertEquals(event1, expectedEvent);
});
Deno.test('hydrateEvents(): repost --- WITHOUT stats', async () => {
const db = new MockRelay();
- const event0madePostCopy = structuredClone(event0madePost);
- const event0madeRepostCopy = structuredClone(event0madeRepost);
- const event1repostedCopy = structuredClone(event1reposted);
- const event6copy = structuredClone(event6);
+ const event0madePost = await eventFixture('event-0-the-one-who-post-and-users-repost');
+ const event0madeRepost = await eventFixture('event-0-the-one-who-repost');
+ const event1reposted = await eventFixture('event-1-reposted');
+ const event6 = await eventFixture('event-6');
// Save events to database
- await db.event(event0madePostCopy);
- await db.event(event0madeRepostCopy);
- await db.event(event1repostedCopy);
- await db.event(event6copy);
-
- assertEquals((event6copy as DittoEvent).author, undefined, "Event hasn't hydrated author yet");
- assertEquals((event6copy as DittoEvent).repost, undefined, "Event hasn't hydrated repost yet");
+ await db.event(event0madePost);
+ await db.event(event0madeRepost);
+ await db.event(event1reposted);
+ await db.event(event6);
await hydrateEvents({
- events: [event6copy],
- storage: db,
+ events: [event6],
+ store: db,
});
const expectedEvent6 = {
- ...event6copy,
- author: event0madeRepostCopy,
- repost: { ...event1repostedCopy, author: event0madePostCopy },
+ ...event6,
+ author: event0madeRepost,
+ repost: { ...event1reposted, author: event0madePost },
};
- assertEquals(event6copy, expectedEvent6);
+ assertEquals(event6, expectedEvent6);
});
Deno.test('hydrateEvents(): quote repost --- WITHOUT stats', async () => {
const db = new MockRelay();
- const event0madeQuoteRepostCopy = structuredClone(event0madeQuoteRepost);
- const event0copy = structuredClone(event0);
- const event1quoteRepostCopy = structuredClone(event1quoteRepost);
- const event1willBeQuoteRepostedCopy = structuredClone(event1willBeQuoteReposted);
+ const event0madeQuoteRepost = await eventFixture('event-0-the-one-who-quote-repost');
+ const event0 = await eventFixture('event-0');
+ const event1quoteRepost = await eventFixture('event-1-quote-repost');
+ const event1willBeQuoteReposted = await eventFixture('event-1-that-will-be-quote-reposted');
// Save events to database
- await db.event(event0madeQuoteRepostCopy);
- await db.event(event0copy);
- await db.event(event1quoteRepostCopy);
- await db.event(event1willBeQuoteRepostedCopy);
+ await db.event(event0madeQuoteRepost);
+ await db.event(event0);
+ await db.event(event1quoteRepost);
+ await db.event(event1willBeQuoteReposted);
await hydrateEvents({
- events: [event1quoteRepostCopy],
- storage: db,
+ events: [event1quoteRepost],
+ store: db,
});
const expectedEvent1quoteRepost = {
- ...event1quoteRepostCopy,
- author: event0madeQuoteRepostCopy,
- quote_repost: { ...event1willBeQuoteRepostedCopy, author: event0copy },
+ ...event1quoteRepost,
+ author: event0madeQuoteRepost,
+ quote: { ...event1willBeQuoteReposted, author: event0 },
};
- assertEquals(event1quoteRepostCopy, expectedEvent1quoteRepost);
+ assertEquals(event1quoteRepost, expectedEvent1quoteRepost);
});
Deno.test('hydrateEvents(): repost of quote repost --- WITHOUT stats', async () => {
const db = new MockRelay();
- const event0copy = structuredClone(event0madeRepostWithQuoteRepost);
- const event1copy = structuredClone(event1futureIsMine);
- const event1quoteCopy = structuredClone(event1quoteRepostLatin);
- const event6copy = structuredClone(event6ofQuoteRepost);
+ const author = await eventFixture('event-0-makes-repost-with-quote-repost');
+ const event1 = await eventFixture('event-1-will-be-reposted-with-quote-repost');
+ const event6 = await eventFixture('event-6-of-quote-repost');
+ const event1quote = await eventFixture('event-1-quote-repost-will-be-reposted');
// Save events to database
- await db.event(event0copy);
- await db.event(event1copy);
- await db.event(event1quoteCopy);
- await db.event(event6copy);
-
- assertEquals((event6copy as DittoEvent).author, undefined, "Event hasn't hydrated author yet");
- assertEquals((event6copy as DittoEvent).repost, undefined, "Event hasn't hydrated repost yet");
+ await db.event(author);
+ await db.event(event1);
+ await db.event(event1quote);
+ await db.event(event6);
await hydrateEvents({
- events: [event6copy],
- storage: db,
+ events: [event6],
+ store: db,
});
const expectedEvent6 = {
- ...event6copy,
- author: event0copy,
- repost: { ...event1quoteCopy, author: event0copy, quote_repost: { author: event0copy, ...event1copy } },
+ ...event6,
+ author,
+ repost: { ...event1quote, author, quote: { author, ...event1 } },
};
- assertEquals(event6copy, expectedEvent6);
+ assertEquals(event6, expectedEvent6);
+});
+
+Deno.test('hydrateEvents(): report pubkey and post // kind 1984 --- WITHOUT stats', async () => {
+ const db = new MockRelay();
+
+ const authorDictator = await eventFixture('kind-0-dictator');
+ const authorVictim = await eventFixture('kind-0-george-orwell');
+ const reportEvent = await eventFixture('kind-1984-dictator-reports-george-orwell');
+ const event1 = await eventFixture('kind-1-author-george-orwell');
+
+ // Save events to database
+ await db.event(authorDictator);
+ await db.event(authorVictim);
+ await db.event(reportEvent);
+ await db.event(event1);
+
+ await hydrateEvents({
+ events: [reportEvent],
+ store: db,
+ });
+
+ const expectedEvent: DittoEvent = {
+ ...reportEvent,
+ author: authorDictator,
+ reported_notes: [event1],
+ reported_profile: authorVictim,
+ };
+ assertEquals(reportEvent, expectedEvent);
});
diff --git a/src/storages/hydrate.ts b/src/storages/hydrate.ts
index 619b798..e5c488e 100644
--- a/src/storages/hydrate.ts
+++ b/src/storages/hydrate.ts
@@ -1,20 +1,21 @@
import { NostrEvent, NStore } from '@nostrify/nostrify';
+import { matchFilter } from 'nostr-tools';
-import { db } from '@/db.ts';
-import { matchFilter } from '@/deps.ts';
+import { DittoDB } from '@/db/DittoDB.ts';
import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
import { DittoTables } from '@/db/DittoTables.ts';
import { Conf } from '@/config.ts';
+import { refreshAuthorStatsDebounced } from '@/stats.ts';
interface HydrateOpts {
events: DittoEvent[];
- storage: NStore;
+ store: NStore;
signal?: AbortSignal;
}
/** Hydrate events using the provided storage. */
async function hydrateEvents(opts: HydrateOpts): Promise {
- const { events, storage, signal } = opts;
+ const { events, store, signal } = opts;
if (!events.length) {
return events;
@@ -22,19 +23,31 @@ async function hydrateEvents(opts: HydrateOpts): Promise {
const cache = [...events];
- for (const event of await gatherReposts({ events: cache, storage, signal })) {
+ for (const event of await gatherReposts({ events: cache, store, signal })) {
cache.push(event);
}
- for (const event of await gatherQuotes({ events: cache, storage, signal })) {
+ for (const event of await gatherReacted({ events: cache, store, signal })) {
cache.push(event);
}
- for (const event of await gatherAuthors({ events: cache, storage, signal })) {
+ for (const event of await gatherQuotes({ events: cache, store, signal })) {
cache.push(event);
}
- for (const event of await gatherUsers({ events: cache, storage, signal })) {
+ for (const event of await gatherAuthors({ events: cache, store, signal })) {
+ cache.push(event);
+ }
+
+ for (const event of await gatherUsers({ events: cache, store, signal })) {
+ cache.push(event);
+ }
+
+ for (const event of await gatherReportedProfiles({ events: cache, store, signal })) {
+ cache.push(event);
+ }
+
+ for (const event of await gatherReportedNotes({ events: cache, store, signal })) {
cache.push(event);
}
@@ -43,6 +56,8 @@ async function hydrateEvents(opts: HydrateOpts): Promise {
events: await gatherEventStats(cache),
};
+ refreshMissingAuthorStats(events, stats.authors);
+
// Dedupe events.
const results = [...new Map(cache.map((event) => [event.id, event])).values()];
@@ -65,6 +80,13 @@ function assembleEvents(
event.author = b.find((e) => matchFilter({ kinds: [0], authors: [event.pubkey] }, e));
event.user = b.find((e) => matchFilter({ kinds: [30361], authors: [admin], '#d': [event.pubkey] }, e));
+ if (event.kind === 1) {
+ const id = event.tags.find(([name]) => name === 'q')?.[1];
+ if (id) {
+ event.quote = b.find((e) => matchFilter({ kinds: [1], ids: [id] }, e));
+ }
+ }
+
if (event.kind === 6) {
const id = event.tags.find(([name]) => name === 'e')?.[1];
if (id) {
@@ -72,10 +94,27 @@ function assembleEvents(
}
}
- if (event.kind === 1) {
- const id = event.tags.find(([name]) => name === 'q')?.[1];
+ if (event.kind === 7) {
+ const id = event.tags.find(([name]) => name === 'e')?.[1];
if (id) {
- event.quote_repost = b.find((e) => matchFilter({ kinds: [1], ids: [id] }, e));
+ event.reacted = b.find((e) => matchFilter({ kinds: [1], ids: [id] }, e));
+ }
+ }
+
+ if (event.kind === 1984) {
+ const targetAccountId = event.tags.find(([name]) => name === 'p')?.[1];
+ if (targetAccountId) {
+ event.reported_profile = b.find((e) => matchFilter({ kinds: [0], authors: [targetAccountId] }, e));
+ }
+ const reportedEvents: DittoEvent[] = [];
+
+ const status_ids = event.tags.filter(([name]) => name === 'e').map((tag) => tag[1]);
+ if (status_ids.length > 0) {
+ for (const id of status_ids) {
+ const reportedEvent = b.find((e) => matchFilter({ kinds: [1], ids: [id] }, e));
+ if (reportedEvent) reportedEvents.push(reportedEvent);
+ }
+ event.reported_notes = reportedEvents;
}
}
@@ -87,7 +126,7 @@ function assembleEvents(
}
/** Collect reposts from the events. */
-function gatherReposts({ events, storage, signal }: HydrateOpts): Promise {
+function gatherReposts({ events, store, signal }: HydrateOpts): Promise {
const ids = new Set();
for (const event of events) {
@@ -99,14 +138,33 @@ function gatherReposts({ events, storage, signal }: HydrateOpts): Promise {
+ const ids = new Set();
+
+ for (const event of events) {
+ if (event.kind === 7) {
+ const id = event.tags.find(([name]) => name === 'e')?.[1];
+ if (id) {
+ ids.add(id);
+ }
+ }
+ }
+
+ return store.query(
[{ ids: [...ids], limit: ids.size }],
{ signal },
);
}
/** Collect quotes from the events. */
-function gatherQuotes({ events, storage, signal }: HydrateOpts): Promise {
+function gatherQuotes({ events, store, signal }: HydrateOpts): Promise {
const ids = new Set();
for (const event of events) {
@@ -118,34 +176,73 @@ function gatherQuotes({ events, storage, signal }: HydrateOpts): Promise {
+function gatherAuthors({ events, store, signal }: HydrateOpts): Promise {
const pubkeys = new Set(events.map((event) => event.pubkey));
- return storage.query(
+ return store.query(
[{ kinds: [0], authors: [...pubkeys], limit: pubkeys.size }],
{ signal },
);
}
/** Collect users from the events. */
-function gatherUsers({ events, storage, signal }: HydrateOpts): Promise {
+function gatherUsers({ events, store, signal }: HydrateOpts): Promise {
const pubkeys = new Set(events.map((event) => event.pubkey));
- return storage.query(
+ return store.query(
[{ kinds: [30361], authors: [Conf.pubkey], '#d': [...pubkeys], limit: pubkeys.size }],
{ signal },
);
}
+/** Collect reported notes from the events. */
+function gatherReportedNotes({ events, store, signal }: HydrateOpts): Promise {
+ const ids = new Set();
+ for (const event of events) {
+ if (event.kind === 1984) {
+ const status_ids = event.tags.filter(([name]) => name === 'e').map((tag) => tag[1]);
+ if (status_ids.length > 0) {
+ for (const id of status_ids) {
+ ids.add(id);
+ }
+ }
+ }
+ }
+
+ return store.query(
+ [{ kinds: [1], ids: [...ids], limit: ids.size }],
+ { signal },
+ );
+}
+
+/** Collect reported profiles from the events. */
+function gatherReportedProfiles({ events, store, signal }: HydrateOpts): Promise {
+ const pubkeys = new Set();
+
+ for (const event of events) {
+ if (event.kind === 1984) {
+ const pubkey = event.tags.find(([name]) => name === 'p')?.[1];
+ if (pubkey) {
+ pubkeys.add(pubkey);
+ }
+ }
+ }
+
+ return store.query(
+ [{ kinds: [0], authors: [...pubkeys], limit: pubkeys.size }],
+ { signal },
+ );
+}
+
/** Collect author stats from the events. */
-function gatherAuthorStats(events: DittoEvent[]): Promise {
+async function gatherAuthorStats(events: DittoEvent[]): Promise {
const pubkeys = new Set(
events
.filter((event) => event.kind === 0)
@@ -156,15 +253,40 @@ function gatherAuthorStats(events: DittoEvent[]): Promise ({
+ pubkey: row.pubkey,
+ followers_count: Math.max(0, row.followers_count),
+ following_count: Math.max(0, row.following_count),
+ notes_count: Math.max(0, row.notes_count),
+ }));
+}
+
+function refreshMissingAuthorStats(events: NostrEvent[], stats: DittoTables['author_stats'][]) {
+ const pubkeys = new Set(
+ events
+ .filter((event) => event.kind === 0)
+ .map((event) => event.pubkey),
+ );
+
+ const missing = pubkeys.difference(
+ new Set(stats.map((stat) => stat.pubkey)),
+ );
+
+ for (const pubkey of missing) {
+ refreshAuthorStatsDebounced(pubkey);
+ }
}
/** Collect event stats from the events. */
-function gatherEventStats(events: DittoEvent[]): Promise {
+async function gatherEventStats(events: DittoEvent[]): Promise {
const ids = new Set(
events
.filter((event) => event.kind === 1)
@@ -175,11 +297,20 @@ function gatherEventStats(events: DittoEvent[]): Promise ({
+ event_id: row.event_id,
+ reposts_count: Math.max(0, row.reposts_count),
+ reactions_count: Math.max(0, row.reactions_count),
+ replies_count: Math.max(0, row.replies_count),
+ }));
}
/** Return a normalized event without any non-standard keys. */
diff --git a/src/storages/optimizer.ts b/src/storages/optimizer.ts
index 518fc15..7b4153e 100644
--- a/src/storages/optimizer.ts
+++ b/src/storages/optimizer.ts
@@ -1,5 +1,6 @@
import { NostrFilter, NSet, NStore } from '@nostrify/nostrify';
-import { Debug } from '@/deps.ts';
+import Debug from '@soapbox/stickynotes/debug';
+
import { normalizeFilters } from '@/filter.ts';
import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
import { abortError } from '@/utils/abort.ts';
diff --git a/src/storages/pool-store.ts b/src/storages/pool-store.ts
index 6620ec9..5456509 100644
--- a/src/storages/pool-store.ts
+++ b/src/storages/pool-store.ts
@@ -1,94 +1,102 @@
-import { NostrEvent, NostrFilter, NSet, NStore } from '@nostrify/nostrify';
-import { Debug, matchFilters, type RelayPoolWorker } from '@/deps.ts';
-import { normalizeFilters } from '@/filter.ts';
+import {
+ NostrEvent,
+ NostrFilter,
+ NostrRelayCLOSED,
+ NostrRelayEOSE,
+ NostrRelayEVENT,
+ NRelay,
+ NSet,
+} from '@nostrify/nostrify';
+import { Machina } from '@nostrify/nostrify/utils';
+import Debug from '@soapbox/stickynotes/debug';
+import { RelayPoolWorker } from 'nostr-relaypool';
+import { getFilterLimit, matchFilters } from 'nostr-tools';
+
+import { Conf } from '@/config.ts';
+import { Storages } from '@/storages.ts';
import { purifyEvent } from '@/storages/hydrate.ts';
import { abortError } from '@/utils/abort.ts';
import { getRelays } from '@/utils/outbox.ts';
-import { Conf } from '@/config.ts';
interface PoolStoreOpts {
pool: InstanceType;
relays: WebSocket['url'][];
- publisher: {
- handleEvent(event: NostrEvent, signal: AbortSignal): Promise;
- };
}
-class PoolStore implements NStore {
- #debug = Debug('ditto:client');
- #pool: InstanceType;
- #relays: WebSocket['url'][];
- #publisher: {
- handleEvent(event: NostrEvent, signal: AbortSignal): Promise;
- };
+class PoolStore implements NRelay {
+ private debug = Debug('ditto:client');
+ private pool: InstanceType;
+ private relays: WebSocket['url'][];
constructor(opts: PoolStoreOpts) {
- this.#pool = opts.pool;
- this.#relays = opts.relays;
- this.#publisher = opts.publisher;
+ this.pool = opts.pool;
+ this.relays = opts.relays;
}
async event(event: NostrEvent, opts: { signal?: AbortSignal } = {}): Promise {
if (opts.signal?.aborted) return Promise.reject(abortError());
- const relaySet = await getRelays(event.pubkey);
+ const relaySet = await getRelays(await Storages.db(), event.pubkey);
relaySet.delete(Conf.relay);
const relays = [...relaySet].slice(0, 4);
event = purifyEvent(event);
- this.#debug('EVENT', event, relays);
+ this.debug('EVENT', event, relays);
- this.#pool.publish(event, relays);
+ this.pool.publish(event, relays);
return Promise.resolve();
}
- query(filters: NostrFilter[], opts: { signal?: AbortSignal; limit?: number } = {}): Promise {
- if (opts.signal?.aborted) return Promise.reject(abortError());
+ async *req(
+ filters: NostrFilter[],
+ opts: { signal?: AbortSignal; limit?: number } = {},
+ ): AsyncIterable {
+ this.debug('REQ', JSON.stringify(filters));
- filters = normalizeFilters(filters);
- this.#debug('REQ', JSON.stringify(filters));
- if (!filters.length) return Promise.resolve([]);
+ const uuid = crypto.randomUUID();
+ const machina = new Machina(opts.signal);
- return new Promise((resolve, reject) => {
- const results = new NSet();
+ const unsub = this.pool.subscribe(
+ filters,
+ this.relays,
+ (event: NostrEvent | null) => {
+ if (event && matchFilters(filters, event)) {
+ machina.push(['EVENT', uuid, purifyEvent(event)]);
+ }
+ },
+ undefined,
+ () => {
+ machina.push(['EOSE', uuid]);
+ },
+ );
- const unsub = this.#pool.subscribe(
- filters,
- this.#relays,
- (event: NostrEvent | null) => {
- if (event && matchFilters(filters, event)) {
- this.#publisher.handleEvent(event, AbortSignal.timeout(1000)).catch(() => {});
- results.add({
- id: event.id,
- kind: event.kind,
- pubkey: event.pubkey,
- content: event.content,
- tags: event.tags,
- created_at: event.created_at,
- sig: event.sig,
- });
- }
- if (typeof opts.limit === 'number' && results.size >= opts.limit) {
- unsub();
- resolve([...results]);
- }
- },
- undefined,
- () => {
- unsub();
- resolve([...results]);
- },
- );
+ try {
+ for await (const msg of machina) {
+ yield msg;
+ }
+ } finally {
+ unsub();
+ }
+ }
- const onAbort = () => {
- unsub();
- reject(abortError());
- opts.signal?.removeEventListener('abort', onAbort);
- };
+ async query(filters: NostrFilter[], opts: { signal?: AbortSignal; limit?: number } = {}): Promise {
+ const events = new NSet();
- opts.signal?.addEventListener('abort', onAbort);
- });
+ const limit = filters.reduce((result, filter) => result + getFilterLimit(filter), 0);
+ if (limit === 0) return [];
+
+ for await (const msg of this.req(filters, opts)) {
+ if (msg[0] === 'EOSE') break;
+ if (msg[0] === 'EVENT') events.add(msg[2]);
+ if (msg[0] === 'CLOSED') throw new Error('Subscription closed');
+
+ if (events.size >= limit) {
+ break;
+ }
+ }
+
+ return [...events];
}
}
diff --git a/src/storages/reqmeister.ts b/src/storages/reqmeister.ts
index 6be5a56..e3833d3 100644
--- a/src/storages/reqmeister.ts
+++ b/src/storages/reqmeister.ts
@@ -1,5 +1,7 @@
import { NostrEvent, NostrFilter, NStore } from '@nostrify/nostrify';
-import { Debug, EventEmitter } from '@/deps.ts';
+import Debug from '@soapbox/stickynotes/debug';
+import { EventEmitter } from 'tseep';
+
import { eventToMicroFilter, getFilterId, isMicrofilter, type MicroFilter } from '@/filter.ts';
import { Time } from '@/utils/time.ts';
import { abortError } from '@/utils/abort.ts';
@@ -80,7 +82,7 @@ class Reqmeister extends EventEmitter<{ [filterId: string]: (event: NostrEvent)
this.#perform();
}
- req(filter: MicroFilter, opts: ReqmeisterReqOpts = {}): Promise {
+ private fetch(filter: MicroFilter, opts: ReqmeisterReqOpts = {}): Promise {
const {
relays = [],
signal = AbortSignal.timeout(this.#opts.timeout ?? 1000),
@@ -118,12 +120,7 @@ class Reqmeister extends EventEmitter<{ [filterId: string]: (event: NostrEvent)
return Promise.resolve();
}
- isWanted(event: NostrEvent): boolean {
- const filterId = getFilterId(eventToMicroFilter(event));
- return this.#queue.some(([id]) => id === filterId);
- }
-
- query(filters: NostrFilter[], opts?: { signal?: AbortSignal }): Promise {
+ async query(filters: NostrFilter[], opts?: { signal?: AbortSignal }): Promise {
if (opts?.signal?.aborted) return Promise.reject(abortError());
this.#debug('REQ', JSON.stringify(filters));
@@ -131,12 +128,16 @@ class Reqmeister extends EventEmitter<{ [filterId: string]: (event: NostrEvent)
const promises = filters.reduce[]>((result, filter) => {
if (isMicrofilter(filter)) {
- result.push(this.req(filter, opts));
+ result.push(this.fetch(filter, opts));
}
return result;
}, []);
- return Promise.all(promises);
+ const results = await Promise.allSettled(promises);
+
+ return results
+ .filter((result): result is PromiseFulfilledResult => result.status === 'fulfilled')
+ .map((result) => result.value);
}
}
diff --git a/src/storages/search-store.ts b/src/storages/search-store.ts
index 6150896..4951c72 100644
--- a/src/storages/search-store.ts
+++ b/src/storages/search-store.ts
@@ -1,5 +1,6 @@
import { NostrEvent, NostrFilter, NRelay1, NStore } from '@nostrify/nostrify';
-import { Debug } from '@/deps.ts';
+import Debug from '@soapbox/stickynotes/debug';
+
import { normalizeFilters } from '@/filter.ts';
import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
import { hydrateEvents } from '@/storages/hydrate.ts';
@@ -47,7 +48,7 @@ class SearchStore implements NStore {
return hydrateEvents({
events,
- storage: this.#hydrator,
+ store: this.#hydrator,
signal: opts?.signal,
});
} else {
diff --git a/src/tags.test.ts b/src/tags.test.ts
index c4d3214..e49d31a 100644
--- a/src/tags.test.ts
+++ b/src/tags.test.ts
@@ -1,4 +1,4 @@
-import { assertEquals } from '@/deps-test.ts';
+import { assertEquals } from '@std/assert';
import { addTag, deleteTag, getTagSet } from './tags.ts';
diff --git a/src/test.ts b/src/test.ts
new file mode 100644
index 0000000..ea9c8fa
--- /dev/null
+++ b/src/test.ts
@@ -0,0 +1,23 @@
+import { NostrEvent } from '@nostrify/nostrify';
+import { finalizeEvent, generateSecretKey } from 'nostr-tools';
+
+import { purifyEvent } from '@/storages/hydrate.ts';
+
+/** Import an event fixture by name in tests. */
+export async function eventFixture(name: string): Promise {
+ const result = await import(`~/fixtures/events/${name}.json`, { with: { type: 'json' } });
+ return structuredClone(result.default);
+}
+
+/** Generate an event for use in tests. */
+export function genEvent(t: Partial = {}, sk: Uint8Array = generateSecretKey()): NostrEvent {
+ const event = finalizeEvent({
+ kind: 255,
+ created_at: 0,
+ content: '',
+ tags: [],
+ ...t,
+ }, sk);
+
+ return purifyEvent(event);
+}
diff --git a/src/upload.ts b/src/upload.ts
deleted file mode 100644
index 5c16501..0000000
--- a/src/upload.ts
+++ /dev/null
@@ -1,34 +0,0 @@
-import { Conf } from '@/config.ts';
-import { insertUnattachedMedia } from '@/db/unattached-media.ts';
-import { configUploader as uploader } from '@/uploaders/config.ts';
-
-interface FileMeta {
- pubkey: string;
- description?: string;
-}
-
-/** Upload a file, track it in the database, and return the resulting media object. */
-async function uploadFile(file: File, meta: FileMeta, signal?: AbortSignal) {
- const { name, type, size } = file;
- const { pubkey, description } = meta;
-
- if (file.size > Conf.maxUploadSize) {
- throw new Error('File size is too large.');
- }
-
- const { cid } = await uploader.upload(file, signal);
- const url = new URL(`/ipfs/${cid}`, Conf.mediaDomain).toString();
-
- return insertUnattachedMedia({
- pubkey,
- url,
- data: {
- name,
- size,
- description,
- mime: type,
- },
- });
-}
-
-export { uploadFile };
diff --git a/src/uploaders/DenoUploader.ts b/src/uploaders/DenoUploader.ts
new file mode 100644
index 0000000..fd30d8c
--- /dev/null
+++ b/src/uploaders/DenoUploader.ts
@@ -0,0 +1,46 @@
+import { join } from 'node:path';
+
+import { NUploader } from '@nostrify/nostrify';
+import { crypto } from '@std/crypto';
+import { encodeHex } from '@std/encoding/hex';
+import { extensionsByType } from '@std/media-types';
+
+export interface DenoUploaderOpts {
+ baseUrl: string;
+ dir: string;
+}
+
+/** Local Deno filesystem uploader. */
+export class DenoUploader implements NUploader {
+ baseUrl: string;
+ dir: string;
+
+ constructor(opts: DenoUploaderOpts) {
+ this.baseUrl = opts.baseUrl;
+ this.dir = opts.dir;
+ }
+
+ async upload(file: File): Promise<[['url', string], ...string[][]]> {
+ const sha256 = encodeHex(await crypto.subtle.digest('SHA-256', file.stream()));
+ const ext = extensionsByType(file.type)?.[0] ?? 'bin';
+ const filename = `${sha256}.${ext}`;
+
+ await Deno.mkdir(this.dir, { recursive: true });
+ await Deno.writeFile(join(this.dir, filename), file.stream());
+
+ const url = new URL(this.baseUrl);
+ const path = url.pathname === '/' ? filename : join(url.pathname, filename);
+
+ return [
+ ['url', new URL(path, url).toString()],
+ ['m', file.type],
+ ['x', sha256],
+ ['size', file.size.toString()],
+ ];
+ }
+
+ async delete(filename: string) {
+ const path = join(this.dir, filename);
+ await Deno.remove(path);
+ }
+}
diff --git a/src/uploaders/IPFSUploader.ts b/src/uploaders/IPFSUploader.ts
new file mode 100644
index 0000000..7bf5165
--- /dev/null
+++ b/src/uploaders/IPFSUploader.ts
@@ -0,0 +1,69 @@
+import { NUploader } from '@nostrify/nostrify';
+import { z } from 'zod';
+
+export interface IPFSUploaderOpts {
+ baseUrl: string;
+ apiUrl?: string;
+ fetch?: typeof fetch;
+}
+
+/**
+ * IPFS uploader. It expects an IPFS node up and running.
+ * It will try to connect to `http://localhost:5001` by default,
+ * and upload the file using the REST API.
+ */
+export class IPFSUploader implements NUploader {
+ private baseUrl: string;
+ private apiUrl: string;
+ private fetch: typeof fetch;
+
+ constructor(opts: IPFSUploaderOpts) {
+ this.baseUrl = opts.baseUrl;
+ this.apiUrl = opts.apiUrl ?? 'http://localhost:5001';
+ this.fetch = opts.fetch ?? globalThis.fetch;
+ }
+
+ async upload(file: File, opts?: { signal?: AbortSignal }): Promise<[['url', string], ...string[][]]> {
+ const url = new URL('/api/v0/add', this.apiUrl);
+
+ const formData = new FormData();
+ formData.append('file', file);
+
+ const response = await this.fetch(url, {
+ method: 'POST',
+ body: formData,
+ signal: opts?.signal,
+ });
+
+ const { Hash: cid } = IPFSUploader.schema().parse(await response.json());
+
+ return [
+ ['url', new URL(`/ipfs/${cid}`, this.baseUrl).toString()],
+ ['m', file.type],
+ ['cid', cid],
+ ['size', file.size.toString()],
+ ];
+ }
+
+ async delete(cid: string, opts?: { signal?: AbortSignal }): Promise {
+ const url = new URL('/api/v0/pin/rm', this.apiUrl);
+
+ const query = new URLSearchParams();
+ query.set('arg', cid);
+ url.search = query.toString();
+
+ await this.fetch(url, {
+ method: 'POST',
+ signal: opts?.signal,
+ });
+ }
+
+ /** Response schema for POST `/api/v0/add`. */
+ private static schema() {
+ return z.object({
+ Name: z.string(),
+ Hash: z.string(),
+ Size: z.string(),
+ });
+ }
+}
diff --git a/src/uploaders/S3Uploader.ts b/src/uploaders/S3Uploader.ts
new file mode 100644
index 0000000..b74796a
--- /dev/null
+++ b/src/uploaders/S3Uploader.ts
@@ -0,0 +1,59 @@
+import { join } from 'node:path';
+
+import { S3Client } from '@bradenmacdonald/s3-lite-client';
+import { NUploader } from '@nostrify/nostrify';
+import { crypto } from '@std/crypto';
+import { encodeHex } from '@std/encoding/hex';
+import { extensionsByType } from '@std/media-types';
+
+import { Conf } from '@/config.ts';
+
+export interface S3UploaderOpts {
+ endPoint: string;
+ region: string;
+ accessKey?: string;
+ secretKey?: string;
+ bucket?: string;
+ pathStyle?: boolean;
+ port?: number;
+ sessionToken?: string;
+ useSSL?: boolean;
+}
+
+/** S3-compatible uploader for AWS, Wasabi, DigitalOcean Spaces, and more. */
+export class S3Uploader implements NUploader {
+ private client: S3Client;
+
+ constructor(opts: S3UploaderOpts) {
+ this.client = new S3Client(opts);
+ }
+
+ async upload(file: File): Promise<[['url', string], ...string[][]]> {
+ const sha256 = encodeHex(await crypto.subtle.digest('SHA-256', file.stream()));
+ const ext = extensionsByType(file.type)?.[0] ?? 'bin';
+ const filename = `${sha256}.${ext}`;
+
+ await this.client.putObject(filename, file.stream(), {
+ metadata: {
+ 'Content-Type': file.type,
+ 'x-amz-acl': 'public-read',
+ },
+ });
+
+ const { pathStyle, bucket } = Conf.s3;
+
+ const path = (pathStyle && bucket) ? join(bucket, filename) : filename;
+ const url = new URL(path, Conf.mediaDomain).toString();
+
+ return [
+ ['url', url],
+ ['m', file.type],
+ ['x', sha256],
+ ['size', file.size.toString()],
+ ];
+ }
+
+ async delete(objectName: string) {
+ await this.client.deleteObject(objectName);
+ }
+}
diff --git a/src/uploaders/config.ts b/src/uploaders/config.ts
deleted file mode 100644
index 2ee2f9a..0000000
--- a/src/uploaders/config.ts
+++ /dev/null
@@ -1,30 +0,0 @@
-import { Conf } from '@/config.ts';
-
-import { ipfsUploader } from './ipfs.ts';
-import { s3Uploader } from './s3.ts';
-
-import type { Uploader } from './types.ts';
-
-/** Meta-uploader determined from configuration. */
-const configUploader: Uploader = {
- upload(file, signal) {
- return uploader().upload(file, signal);
- },
- delete(cid, signal) {
- return uploader().delete(cid, signal);
- },
-};
-
-/** Get the uploader module based on configuration. */
-function uploader() {
- switch (Conf.uploader) {
- case 's3':
- return s3Uploader;
- case 'ipfs':
- return ipfsUploader;
- default:
- return ipfsUploader;
- }
-}
-
-export { configUploader };
diff --git a/src/uploaders/ipfs.ts b/src/uploaders/ipfs.ts
deleted file mode 100644
index 5d82e2d..0000000
--- a/src/uploaders/ipfs.ts
+++ /dev/null
@@ -1,54 +0,0 @@
-import { z } from 'zod';
-
-import { Conf } from '@/config.ts';
-import { fetchWorker } from '@/workers/fetch.ts';
-
-import type { Uploader } from './types.ts';
-
-/** Response schema for POST `/api/v0/add`. */
-const ipfsAddResponseSchema = z.object({
- Name: z.string(),
- Hash: z.string(),
- Size: z.string(),
-});
-
-/**
- * IPFS uploader. It expects an IPFS node up and running.
- * It will try to connect to `http://localhost:5001` by default,
- * and upload the file using the REST API.
- */
-const ipfsUploader: Uploader = {
- async upload(file, signal) {
- const url = new URL('/api/v0/add', Conf.ipfs.apiUrl);
-
- const formData = new FormData();
- formData.append('file', file);
-
- const response = await fetchWorker(url, {
- method: 'POST',
- body: formData,
- signal,
- });
-
- const { Hash } = ipfsAddResponseSchema.parse(await response.json());
-
- return {
- cid: Hash,
- };
- },
- async delete(cid, signal) {
- const url = new URL('/api/v0/pin/rm', Conf.ipfs.apiUrl);
-
- const query = new URLSearchParams();
- query.set('arg', cid);
-
- url.search = query.toString();
-
- await fetchWorker(url, {
- method: 'POST',
- signal,
- });
- },
-};
-
-export { ipfsUploader };
diff --git a/src/uploaders/s3.ts b/src/uploaders/s3.ts
deleted file mode 100644
index 2e02cc3..0000000
--- a/src/uploaders/s3.ts
+++ /dev/null
@@ -1,38 +0,0 @@
-import { Conf } from '@/config.ts';
-import { IpfsHash, S3Client } from '@/deps.ts';
-
-import type { Uploader } from './types.ts';
-
-/**
- * S3-compatible uploader for AWS, Wasabi, DigitalOcean Spaces, and more.
- * Files are named by their IPFS CID and exposed at `/ipfs/`, letting it
- * take advantage of IPFS features while not really using IPFS.
- */
-const s3Uploader: Uploader = {
- async upload(file, _signal) {
- const cid = await IpfsHash.of(file.stream()) as string;
-
- // FIXME: Can't cancel S3 requests: https://github.com/bradenmacdonald/deno-s3-lite-client/issues/24
- await client().putObject(`ipfs/${cid}`, file.stream(), {
- metadata: {
- 'Content-Type': file.type,
- 'x-amz-acl': 'public-read',
- },
- });
-
- return {
- cid,
- };
- },
- async delete(cid, _signal) {
- // FIXME: Can't cancel S3 requests: https://github.com/bradenmacdonald/deno-s3-lite-client/issues/24
- await client().deleteObject(`ipfs/${cid}`);
- },
-};
-
-/** Build S3 client from config. */
-function client() {
- return new S3Client({ ...Conf.s3 });
-}
-
-export { s3Uploader };
diff --git a/src/uploaders/types.ts b/src/uploaders/types.ts
deleted file mode 100644
index 8f11545..0000000
--- a/src/uploaders/types.ts
+++ /dev/null
@@ -1,15 +0,0 @@
-/** Modular uploader interface, to support uploading to different backends. */
-interface Uploader {
- /** Upload the file to the backend. */
- upload(file: File, signal?: AbortSignal): Promise;
- /** Delete the file from the backend. */
- delete(cid: string, signal?: AbortSignal): Promise;
-}
-
-/** Return value from the uploader after uploading a file. */
-interface UploadResult {
- /** IPFS CID for the file. */
- cid: string;
-}
-
-export type { Uploader };
diff --git a/src/utils.ts b/src/utils.ts
index 747ea43..e9213ed 100644
--- a/src/utils.ts
+++ b/src/utils.ts
@@ -1,17 +1,13 @@
-import { NostrEvent } from '@nostrify/nostrify';
+import { NostrEvent, NSchema as n } from '@nostrify/nostrify';
+import { nip19 } from 'nostr-tools';
import { z } from 'zod';
-import { type EventTemplate, getEventHash, nip19 } from '@/deps.ts';
-import { nostrIdSchema } from '@/schemas/nostr.ts';
-
/** Get the current time in Nostr format. */
const nostrNow = (): number => Math.floor(Date.now() / 1000);
+
/** Convenience function to convert Nostr dates into native Date objects. */
const nostrDate = (seconds: number): Date => new Date(seconds * 1000);
-/** Pass to sort() to sort events by date. */
-const eventDateComparator = (a: NostrEvent, b: NostrEvent): number => b.created_at - a.created_at;
-
/** Get pubkey from bech32 string, if applicable. */
function bech32ToPubkey(bech32: string): string | undefined {
try {
@@ -82,74 +78,32 @@ async function sha256(message: string): Promise {
return hashHex;
}
-/** Schema to parse a relay URL. */
-const relaySchema = z.string().max(255).startsWith('wss://').url();
-
-/** Check whether the value is a valid relay URL. */
-const isRelay = (relay: string): relay is `wss://${string}` => relaySchema.safeParse(relay).success;
-
/** Deduplicate events by ID. */
function dedupeEvents(events: NostrEvent[]): NostrEvent[] {
return [...new Map(events.map((event) => [event.id, event])).values()];
}
-/** Return a copy of the event with the given tags removed. */
-function stripTags(event: E, tags: string[] = []): E {
- if (!tags.length) return event;
- return {
- ...event,
- tags: event.tags.filter(([name]) => !tags.includes(name)),
- };
-}
-
-/** Ensure the template and event match on their shared keys. */
-function eventMatchesTemplate(event: NostrEvent, template: EventTemplate): boolean {
- const whitelist = ['nonce'];
-
- event = stripTags(event, whitelist);
- template = stripTags(template, whitelist);
-
- if (template.created_at > event.created_at) {
- return false;
- }
-
- return getEventHash(event) === getEventHash({
- pubkey: event.pubkey,
- ...template,
- created_at: event.created_at,
- });
-}
-
/** Test whether the value is a Nostr ID. */
function isNostrId(value: unknown): boolean {
- return nostrIdSchema.safeParse(value).success;
+ return n.id().safeParse(value).success;
}
/** Test whether the value is a URL. */
function isURL(value: unknown): boolean {
- try {
- new URL(value as string);
- return true;
- } catch (_) {
- return false;
- }
+ return z.string().url().safeParse(value).success;
}
export {
bech32ToPubkey,
dedupeEvents,
eventAge,
- eventDateComparator,
- eventMatchesTemplate,
findTag,
isNostrId,
- isRelay,
isURL,
type Nip05,
nostrDate,
nostrNow,
parseNip05,
- relaySchema,
sha256,
};
diff --git a/src/utils/SimpleLRU.ts b/src/utils/SimpleLRU.ts
index 26f51fc..f1bf651 100644
--- a/src/utils/SimpleLRU.ts
+++ b/src/utils/SimpleLRU.ts
@@ -1,6 +1,6 @@
// deno-lint-ignore-file ban-types
-import { LRUCache } from '@/deps.ts';
+import { LRUCache } from 'lru-cache';
type FetchFn = (key: K, opts: O) => Promise;
diff --git a/src/utils/api.ts b/src/utils/api.ts
index cd4e6e2..dceede7 100644
--- a/src/utils/api.ts
+++ b/src/utils/api.ts
@@ -1,14 +1,17 @@
import { NostrEvent, NostrFilter } from '@nostrify/nostrify';
+import Debug from '@soapbox/stickynotes/debug';
import { type Context, HTTPException } from 'hono';
+import { parseFormData } from 'formdata-helper';
+import { EventTemplate } from 'nostr-tools';
+import * as TypeFest from 'type-fest';
import { z } from 'zod';
import { type AppContext } from '@/app.ts';
import { Conf } from '@/config.ts';
-import { Debug, EventTemplate, parseFormData, type TypeFest } from '@/deps.ts';
import * as pipeline from '@/pipeline.ts';
+import { RelayError } from '@/RelayError.ts';
import { AdminSigner } from '@/signers/AdminSigner.ts';
-import { APISigner } from '@/signers/APISigner.ts';
-import { client, eventsDB } from '@/storages.ts';
+import { Storages } from '@/storages.ts';
import { nostrNow } from '@/utils.ts';
const debug = Debug('ditto:api');
@@ -18,7 +21,13 @@ type EventStub = TypeFest.SetOptional {
- const signer = new APISigner(c);
+ const signer = c.get('signer');
+
+ if (!signer) {
+ throw new HTTPException(401, {
+ res: c.json({ error: 'No way to sign Nostr event' }, 401),
+ });
+ }
const event = await signer.signEvent({
content: '',
@@ -33,7 +42,7 @@ async function createEvent(t: EventStub, c: AppContext): Promise {
/** Filter for fetching an existing event to update. */
interface UpdateEventFilter extends NostrFilter {
kinds: [number];
- limit?: 1;
+ limit: 1;
}
/** Fetch existing event, update it, then publish the new event. */
@@ -42,7 +51,8 @@ async function updateEvent(
fn: (prev: NostrEvent | undefined) => E,
c: AppContext,
): Promise {
- const [prev] = await eventsDB.query([filter], { limit: 1, signal: c.req.raw.signal });
+ const store = await Storages.db();
+ const [prev] = await store.query([filter], { signal: c.req.raw.signal });
return createEvent(fn(prev), c);
}
@@ -73,16 +83,39 @@ async function createAdminEvent(t: EventStub, c: AppContext): Promise string[][],
+ c: AppContext,
+): Promise {
+ return updateAdminEvent(filter, (prev) => ({
+ kind: filter.kinds[0],
+ content: prev?.content ?? '',
+ tags: fn(prev?.tags ?? []),
+ }), c);
+}
+
+/** Fetch existing event, update it, then publish the new admin event. */
+async function updateAdminEvent(
+ filter: UpdateEventFilter,
+ fn: (prev: NostrEvent | undefined) => E,
+ c: AppContext,
+): Promise {
+ const store = await Storages.db();
+ const [prev] = await store.query([filter], { limit: 1, signal: c.req.raw.signal });
+ return createAdminEvent(fn(prev), c);
+}
+
/** Push the event through the pipeline, rethrowing any RelayError. */
async function publishEvent(event: NostrEvent, c: AppContext): Promise