Merge branch 'main' into fix-blocked-users-in-streaming
(((updating local branch)))
This commit is contained in:
commit
9e34838de8
|
@ -1,3 +1,4 @@
|
|||
.env
|
||||
*.cpuprofile
|
||||
*.swp
|
||||
deno-test.xml
|
|
@ -20,10 +20,11 @@
|
|||
"@db/sqlite": "jsr:@db/sqlite@^0.11.1",
|
||||
"@isaacs/ttlcache": "npm:@isaacs/ttlcache@^1.4.1",
|
||||
"@noble/secp256k1": "npm:@noble/secp256k1@^2.0.0",
|
||||
"@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.17.1",
|
||||
"@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.19.0",
|
||||
"@sentry/deno": "https://deno.land/x/sentry@7.112.2/index.mjs",
|
||||
"@soapbox/kysely-deno-sqlite": "jsr:@soapbox/kysely-deno-sqlite@^2.1.0",
|
||||
"@soapbox/stickynotes": "jsr:@soapbox/stickynotes@^0.4.0",
|
||||
"@std/assert": "jsr:@std/assert@^0.225.1",
|
||||
"@std/cli": "jsr:@std/cli@^0.223.0",
|
||||
"@std/crypto": "jsr:@std/crypto@^0.224.0",
|
||||
"@std/dotenv": "jsr:@std/dotenv@^0.224.0",
|
||||
|
|
|
@ -9,9 +9,7 @@ The Ditto server publishes kind `30361` events to represent users. These events
|
|||
User events have the following tags:
|
||||
|
||||
- `d` - pubkey of the user.
|
||||
- `name` - NIP-05 username granted to the user, without the domain.
|
||||
- `role` - one of `admin` or `user`.
|
||||
- `origin` - the origin of the user's NIP-05, at the time the event was published.
|
||||
|
||||
Example:
|
||||
|
||||
|
@ -25,7 +23,6 @@ Example:
|
|||
"tags": [
|
||||
["d", "79c2cae114ea28a981e7559b4fe7854a473521a8d22a66bbab9fa248eb820ff6"],
|
||||
["role", "user"],
|
||||
["origin", "https://ditto.ngrok.app"],
|
||||
["alt", "User's account was updated by the admins of ditto.ngrok.app"]
|
||||
],
|
||||
"sig": "fc12db77b1c8f8aa86c73b617f0cd4af1e6ba244239eaf3164a292de6d39363f32d6b817ffff796ace7a103d75e1d8e6a0fb7f618819b32d81a953b4a75d7507"
|
||||
|
@ -40,4 +37,4 @@ The sections below describe the `content` field. Some are encrypted and some are
|
|||
|
||||
### `pub.ditto.pleroma.config`
|
||||
|
||||
NIP-04 encrypted JSON array of Pleroma ConfigDB objects. Pleroma admin API endpoints set this config, and Ditto reads from it.
|
||||
NIP-04 encrypted JSON array of Pleroma ConfigDB objects. Pleroma admin API endpoints set this config, and Ditto reads from it.
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"id": "2238893aee54bbe9188498a5aa124d62870d5757894bf52cdb362d1a0874ed18",
|
||||
"pubkey": "c9f5508526e213c3bc5468161f1b738a86063a2ece540730f9412e7becd5f0b2",
|
||||
"created_at": 1715517440,
|
||||
"kind": 0,
|
||||
"tags": [],
|
||||
"content": "{\"name\":\"dictator\",\"about\":\"\",\"nip05\":\"\"}",
|
||||
"sig": "a630ba158833eea10289fe077087ccad22c71ddfbe475153958cfc158ae94fb0a5f7b7626e62da6a3ef8bfbe67321e8f993517ed7f1578a45aff11bc2bec484c"
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"id": "da4e1e727c6456cee2b0341a1d7a2356e4263523374a2570a7dd318ab5d73f93",
|
||||
"pubkey": "e4d96e951739787e62ada74ee06a9a185af22791a899a6166ec23aab58c5d700",
|
||||
"created_at": 1715517565,
|
||||
"kind": 0,
|
||||
"tags": [],
|
||||
"content": "{\"name\":\"george orwell\",\"about\":\"\",\"nip05\":\"\"}",
|
||||
"sig": "cd375e2065cf452d3bfefa9951b04ab63018ab7c253803256cca1d89d03b38e454c71ed36fdd3c28a8ff2723cc19b21371ce0f9bbd39a92b1d1aa946137237bd"
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"id": "44f19148f5af60b0f43ed8c737fbda31b165e05bb55562003c45d9a9f02e8228",
|
||||
"pubkey": "e4d96e951739787e62ada74ee06a9a185af22791a899a6166ec23aab58c5d700",
|
||||
"created_at": 1715636249,
|
||||
"kind": 1,
|
||||
"tags": [],
|
||||
"content": "I like free speech",
|
||||
"sig": "6b50db9c1c02bd8b0e64512e71d53a0058569f44e8dcff65ad17fce544d6ae79f8f79fa0f9a615446fa8cbc2375709bf835751843b0cd10e62ae5d505fe106d4"
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
{
|
||||
"id": "129b2749330a7f1189d3e74c6764a955851f1e4017a818dfd51ab8e24192b0f3",
|
||||
"pubkey": "c9f5508526e213c3bc5468161f1b738a86063a2ece540730f9412e7becd5f0b2",
|
||||
"created_at": 1715636348,
|
||||
"kind": 1984,
|
||||
"tags": [
|
||||
[
|
||||
"p",
|
||||
"e4d96e951739787e62ada74ee06a9a185af22791a899a6166ec23aab58c5d700",
|
||||
"other"
|
||||
],
|
||||
[
|
||||
"P",
|
||||
"e724b1c1b90eab9cc0f5976b380b80dda050de1820dc143e62d9e4f27a9a0b2c"
|
||||
],
|
||||
[
|
||||
"e",
|
||||
"44f19148f5af60b0f43ed8c737fbda31b165e05bb55562003c45d9a9f02e8228",
|
||||
"other"
|
||||
]
|
||||
],
|
||||
"content": "freedom of speech not freedom of reach",
|
||||
"sig": "cd05a14749cdf0c7664d056e2c02518740000387732218dacd0c71de5b96c0c3c99a0b927b0cd0778f25a211525fa03b4ed4f4f537bb1221c73467780d4ee1bc"
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
import { JsonParseStream } from '@std/json/json-parse-stream';
|
||||
import { TextLineStream } from '@std/streams/text-line-stream';
|
||||
|
||||
import { db } from '@/db.ts';
|
||||
import { DittoDB } from '@/db/DittoDB.ts';
|
||||
import { AdminSigner } from '@/signers/AdminSigner.ts';
|
||||
import { EventsDB } from '@/storages/events-db.ts';
|
||||
import { type EventStub } from '@/utils/api.ts';
|
||||
|
@ -9,7 +9,8 @@ import { nostrNow } from '@/utils.ts';
|
|||
|
||||
const signer = new AdminSigner();
|
||||
|
||||
const eventsDB = new EventsDB(db);
|
||||
const kysely = await DittoDB.getInstance();
|
||||
const eventsDB = new EventsDB(kysely);
|
||||
|
||||
const readable = Deno.stdin.readable
|
||||
.pipeThrough(new TextDecoderStream())
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
import { NSchema } from '@nostrify/nostrify';
|
||||
|
||||
import { db } from '@/db.ts';
|
||||
import { DittoDB } from '@/db/DittoDB.ts';
|
||||
import { Conf } from '@/config.ts';
|
||||
import { AdminSigner } from '@/signers/AdminSigner.ts';
|
||||
import { EventsDB } from '@/storages/events-db.ts';
|
||||
import { nostrNow } from '@/utils.ts';
|
||||
|
||||
const eventsDB = new EventsDB(db);
|
||||
const kysely = await DittoDB.getInstance();
|
||||
const eventsDB = new EventsDB(kysely);
|
||||
|
||||
const [pubkey, role] = Deno.args;
|
||||
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
import { NostrRelayOK } from '@nostrify/nostrify';
|
||||
|
||||
export type RelayErrorPrefix = 'duplicate' | 'pow' | 'blocked' | 'rate-limited' | 'invalid' | 'error';
|
||||
|
||||
/** NIP-01 command line result. */
|
||||
export class RelayError extends Error {
|
||||
constructor(prefix: RelayErrorPrefix, message: string) {
|
||||
super(`${prefix}: ${message}`);
|
||||
}
|
||||
|
||||
/** Construct a RelayError from the reason message. */
|
||||
static fromReason(reason: string): RelayError {
|
||||
const [prefix, ...rest] = reason.split(': ');
|
||||
return new RelayError(prefix as RelayErrorPrefix, rest.join(': '));
|
||||
}
|
||||
|
||||
/** Throw a new RelayError if the OK message is false. */
|
||||
static assert(msg: NostrRelayOK): void {
|
||||
const [_, _eventId, ok, reason] = msg;
|
||||
if (!ok) {
|
||||
throw RelayError.fromReason(reason);
|
||||
}
|
||||
}
|
||||
}
|
110
src/app.ts
110
src/app.ts
|
@ -1,10 +1,10 @@
|
|||
import { NostrEvent, NStore } from '@nostrify/nostrify';
|
||||
import { NostrEvent, NostrSigner, NStore } from '@nostrify/nostrify';
|
||||
import Debug from '@soapbox/stickynotes/debug';
|
||||
import { type Context, Env as HonoEnv, type Handler, Hono, Input as HonoInput, type MiddlewareHandler } from 'hono';
|
||||
import { cors, logger, serveStatic } from 'hono/middleware';
|
||||
|
||||
import { type User } from '@/db/users.ts';
|
||||
import '@/firehose.ts';
|
||||
import { Conf } from '@/config.ts';
|
||||
import { startFirehose } from '@/firehose.ts';
|
||||
import { Time } from '@/utils.ts';
|
||||
|
||||
import { actorController } from '@/controllers/activitypub/actor.ts';
|
||||
|
@ -29,6 +29,7 @@ import { adminAccountAction, adminAccountsController } from '@/controllers/api/a
|
|||
import { appCredentialsController, createAppController } from '@/controllers/api/apps.ts';
|
||||
import { blocksController } from '@/controllers/api/blocks.ts';
|
||||
import { bookmarksController } from '@/controllers/api/bookmarks.ts';
|
||||
import { adminRelaysController, adminSetRelaysController } from '@/controllers/api/ditto.ts';
|
||||
import { emptyArrayController, emptyObjectController, notImplementedController } from '@/controllers/api/fallback.ts';
|
||||
import { instanceController } from '@/controllers/api/instance.ts';
|
||||
import { markersController, updateMarkersController } from '@/controllers/api/markers.ts';
|
||||
|
@ -80,25 +81,21 @@ import { hostMetaController } from '@/controllers/well-known/host-meta.ts';
|
|||
import { nodeInfoController, nodeInfoSchemaController } from '@/controllers/well-known/nodeinfo.ts';
|
||||
import { nostrController } from '@/controllers/well-known/nostr.ts';
|
||||
import { webfingerController } from '@/controllers/well-known/webfinger.ts';
|
||||
import { auth19, requirePubkey } from '@/middleware/auth19.ts';
|
||||
import { auth98, requireProof, requireRole } from '@/middleware/auth98.ts';
|
||||
import { cache } from '@/middleware/cache.ts';
|
||||
import { csp } from '@/middleware/csp.ts';
|
||||
import { adminRelaysController, adminSetRelaysController } from '@/controllers/api/ditto.ts';
|
||||
import { storeMiddleware } from '@/middleware/store.ts';
|
||||
import { auth98Middleware, requireProof, requireRole } from '@/middleware/auth98Middleware.ts';
|
||||
import { cacheMiddleware } from '@/middleware/cacheMiddleware.ts';
|
||||
import { cspMiddleware } from '@/middleware/cspMiddleware.ts';
|
||||
import { requireSigner } from '@/middleware/requireSigner.ts';
|
||||
import { signerMiddleware } from '@/middleware/signerMiddleware.ts';
|
||||
import { storeMiddleware } from '@/middleware/storeMiddleware.ts';
|
||||
import { blockController } from '@/controllers/api/accounts.ts';
|
||||
import { unblockController } from '@/controllers/api/accounts.ts';
|
||||
|
||||
interface AppEnv extends HonoEnv {
|
||||
Variables: {
|
||||
/** Hex pubkey for the current user. If provided, the user is considered "logged in." */
|
||||
pubkey?: string;
|
||||
/** Hex secret key for the current user. Optional, but easiest way to use legacy Mastodon apps. */
|
||||
seckey?: Uint8Array;
|
||||
/** Signer to get the logged-in user's pubkey, relays, and to sign events, or `undefined` if the user isn't logged in. */
|
||||
signer?: NostrSigner;
|
||||
/** NIP-98 signed event proving the pubkey is owned by the user. */
|
||||
proof?: NostrEvent;
|
||||
/** User associated with the pubkey, if any. */
|
||||
user?: User;
|
||||
/** Store */
|
||||
store: NStore;
|
||||
};
|
||||
|
@ -112,6 +109,10 @@ const app = new Hono<AppEnv>();
|
|||
|
||||
const debug = Debug('ditto:http');
|
||||
|
||||
if (Conf.firehoseEnabled) {
|
||||
startFirehose();
|
||||
}
|
||||
|
||||
app.use('/api/*', logger(debug));
|
||||
app.use('/relay/*', logger(debug));
|
||||
app.use('/.well-known/*', logger(debug));
|
||||
|
@ -123,7 +124,14 @@ app.get('/api/v1/streaming', streamingController);
|
|||
app.get('/api/v1/streaming/', streamingController);
|
||||
app.get('/relay', relayController);
|
||||
|
||||
app.use('*', csp(), cors({ origin: '*', exposeHeaders: ['link'] }), auth19, auth98(), storeMiddleware);
|
||||
app.use(
|
||||
'*',
|
||||
cspMiddleware(),
|
||||
cors({ origin: '*', exposeHeaders: ['link'] }),
|
||||
signerMiddleware,
|
||||
auth98Middleware(),
|
||||
storeMiddleware,
|
||||
);
|
||||
|
||||
app.get('/.well-known/webfinger', webfingerController);
|
||||
app.get('/.well-known/host-meta', hostMetaController);
|
||||
|
@ -134,7 +142,7 @@ app.get('/users/:username', actorController);
|
|||
|
||||
app.get('/nodeinfo/:version', nodeInfoSchemaController);
|
||||
|
||||
app.get('/api/v1/instance', cache({ cacheName: 'web', expires: Time.minutes(5) }), instanceController);
|
||||
app.get('/api/v1/instance', cacheMiddleware({ cacheName: 'web', expires: Time.minutes(5) }), instanceController);
|
||||
|
||||
app.get('/api/v1/apps/verify_credentials', appCredentialsController);
|
||||
app.post('/api/v1/apps', createAppController);
|
||||
|
@ -145,17 +153,17 @@ app.post('/oauth/authorize', oauthAuthorizeController);
|
|||
app.get('/oauth/authorize', oauthController);
|
||||
|
||||
app.post('/api/v1/accounts', requireProof({ pow: 20 }), createAccountController);
|
||||
app.get('/api/v1/accounts/verify_credentials', requirePubkey, verifyCredentialsController);
|
||||
app.patch('/api/v1/accounts/update_credentials', requirePubkey, updateCredentialsController);
|
||||
app.get('/api/v1/accounts/verify_credentials', requireSigner, verifyCredentialsController);
|
||||
app.patch('/api/v1/accounts/update_credentials', requireSigner, updateCredentialsController);
|
||||
app.get('/api/v1/accounts/search', accountSearchController);
|
||||
app.get('/api/v1/accounts/lookup', accountLookupController);
|
||||
app.get('/api/v1/accounts/relationships', requirePubkey, relationshipsController);
|
||||
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/block', requirePubkey, blockController);
|
||||
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unblock', requirePubkey, unblockController);
|
||||
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/mute', requirePubkey, muteController);
|
||||
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unmute', requirePubkey, unmuteController);
|
||||
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/follow', requirePubkey, followController);
|
||||
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unfollow', requirePubkey, unfollowController);
|
||||
app.get('/api/v1/accounts/relationships', requireSigner, relationshipsController);
|
||||
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/block', requireSigner, blockController);
|
||||
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unblock', requireSigner, unblockController);
|
||||
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/mute', requireSigner, muteController);
|
||||
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unmute', requireSigner, unmuteController);
|
||||
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/follow', requireSigner, followController);
|
||||
app.post('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/unfollow', requireSigner, unfollowController);
|
||||
app.get('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/followers', followersController);
|
||||
app.get('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/following', followingController);
|
||||
app.get('/api/v1/accounts/:pubkey{[0-9a-f]{64}}/statuses', accountStatusesController);
|
||||
|
@ -165,21 +173,21 @@ app.get('/api/v1/statuses/:id{[0-9a-f]{64}}/favourited_by', favouritedByControll
|
|||
app.get('/api/v1/statuses/:id{[0-9a-f]{64}}/reblogged_by', rebloggedByController);
|
||||
app.get('/api/v1/statuses/:id{[0-9a-f]{64}}/context', contextController);
|
||||
app.get('/api/v1/statuses/:id{[0-9a-f]{64}}', statusController);
|
||||
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/favourite', requirePubkey, favouriteController);
|
||||
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/bookmark', requirePubkey, bookmarkController);
|
||||
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unbookmark', requirePubkey, unbookmarkController);
|
||||
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/pin', requirePubkey, pinController);
|
||||
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unpin', requirePubkey, unpinController);
|
||||
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/zap', requirePubkey, zapController);
|
||||
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/reblog', requirePubkey, reblogStatusController);
|
||||
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unreblog', requirePubkey, unreblogStatusController);
|
||||
app.post('/api/v1/statuses', requirePubkey, createStatusController);
|
||||
app.delete('/api/v1/statuses/:id{[0-9a-f]{64}}', requirePubkey, deleteStatusController);
|
||||
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/favourite', requireSigner, favouriteController);
|
||||
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/bookmark', requireSigner, bookmarkController);
|
||||
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unbookmark', requireSigner, unbookmarkController);
|
||||
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/pin', requireSigner, pinController);
|
||||
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unpin', requireSigner, unpinController);
|
||||
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/zap', requireSigner, zapController);
|
||||
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/reblog', requireSigner, reblogStatusController);
|
||||
app.post('/api/v1/statuses/:id{[0-9a-f]{64}}/unreblog', requireSigner, unreblogStatusController);
|
||||
app.post('/api/v1/statuses', requireSigner, createStatusController);
|
||||
app.delete('/api/v1/statuses/:id{[0-9a-f]{64}}', requireSigner, deleteStatusController);
|
||||
|
||||
app.post('/api/v1/media', mediaController);
|
||||
app.post('/api/v2/media', mediaController);
|
||||
|
||||
app.get('/api/v1/timelines/home', requirePubkey, homeTimelineController);
|
||||
app.get('/api/v1/timelines/home', requireSigner, homeTimelineController);
|
||||
app.get('/api/v1/timelines/public', publicTimelineController);
|
||||
app.get('/api/v1/timelines/tag/:hashtag', hashtagTimelineController);
|
||||
|
||||
|
@ -189,17 +197,21 @@ app.get('/api/v2/search', searchController);
|
|||
|
||||
app.get('/api/pleroma/frontend_configurations', frontendConfigController);
|
||||
|
||||
app.get('/api/v1/trends/tags', cache({ cacheName: 'web', expires: Time.minutes(15) }), trendingTagsController);
|
||||
app.get('/api/v1/trends', cache({ cacheName: 'web', expires: Time.minutes(15) }), trendingTagsController);
|
||||
app.get(
|
||||
'/api/v1/trends/tags',
|
||||
cacheMiddleware({ cacheName: 'web', expires: Time.minutes(15) }),
|
||||
trendingTagsController,
|
||||
);
|
||||
app.get('/api/v1/trends', cacheMiddleware({ cacheName: 'web', expires: Time.minutes(15) }), trendingTagsController);
|
||||
|
||||
app.get('/api/v1/suggestions', suggestionsV1Controller);
|
||||
app.get('/api/v2/suggestions', suggestionsV2Controller);
|
||||
|
||||
app.get('/api/v1/notifications', requirePubkey, notificationsController);
|
||||
app.get('/api/v1/favourites', requirePubkey, favouritesController);
|
||||
app.get('/api/v1/bookmarks', requirePubkey, bookmarksController);
|
||||
app.get('/api/v1/blocks', requirePubkey, blocksController);
|
||||
app.get('/api/v1/mutes', requirePubkey, mutesController);
|
||||
app.get('/api/v1/notifications', requireSigner, notificationsController);
|
||||
app.get('/api/v1/favourites', requireSigner, favouritesController);
|
||||
app.get('/api/v1/bookmarks', requireSigner, bookmarksController);
|
||||
app.get('/api/v1/blocks', requireSigner, blocksController);
|
||||
app.get('/api/v1/mutes', requireSigner, mutesController);
|
||||
|
||||
app.get('/api/v1/markers', requireProof(), markersController);
|
||||
app.post('/api/v1/markers', requireProof(), updateMarkersController);
|
||||
|
@ -212,17 +224,17 @@ app.delete('/api/v1/pleroma/admin/statuses/:id', requireRole('admin'), pleromaAd
|
|||
app.get('/api/v1/admin/ditto/relays', requireRole('admin'), adminRelaysController);
|
||||
app.put('/api/v1/admin/ditto/relays', requireRole('admin'), adminSetRelaysController);
|
||||
|
||||
app.post('/api/v1/reports', requirePubkey, reportController);
|
||||
app.get('/api/v1/admin/reports', requirePubkey, requireRole('admin'), adminReportsController);
|
||||
app.get('/api/v1/admin/reports/:id{[0-9a-f]{64}}', requirePubkey, requireRole('admin'), adminReportController);
|
||||
app.post('/api/v1/reports', requireSigner, reportController);
|
||||
app.get('/api/v1/admin/reports', requireSigner, requireRole('admin'), adminReportsController);
|
||||
app.get('/api/v1/admin/reports/:id{[0-9a-f]{64}}', requireSigner, requireRole('admin'), adminReportController);
|
||||
app.post(
|
||||
'/api/v1/admin/reports/:id{[0-9a-f]{64}}/resolve',
|
||||
requirePubkey,
|
||||
requireSigner,
|
||||
requireRole('admin'),
|
||||
adminReportResolveController,
|
||||
);
|
||||
|
||||
app.post('/api/v1/admin/accounts/:id{[0-9a-f]{64}}/action', requirePubkey, requireRole('admin'), adminAccountAction);
|
||||
app.post('/api/v1/admin/accounts/:id{[0-9a-f]{64}}/action', requireSigner, requireRole('admin'), adminAccountAction);
|
||||
|
||||
// Not (yet) implemented.
|
||||
app.get('/api/v1/custom_emojis', emptyArrayController);
|
||||
|
|
|
@ -215,6 +215,10 @@ class Conf {
|
|||
return Number(Deno.env.get('PG_POOL_SIZE') ?? 10);
|
||||
},
|
||||
};
|
||||
/** Whether to enable requesting events from known relays. */
|
||||
static get firehoseEnabled(): boolean {
|
||||
return optionalBooleanSchema.parse(Deno.env.get('FIREHOSE_ENABLED')) ?? true;
|
||||
}
|
||||
}
|
||||
|
||||
const optionalBooleanSchema = z
|
||||
|
|
|
@ -9,7 +9,7 @@ const actorController: AppController = async (c) => {
|
|||
const username = c.req.param('username');
|
||||
const { signal } = c.req.raw;
|
||||
|
||||
const pointer = await localNip05Lookup(username);
|
||||
const pointer = await localNip05Lookup(c.get('store'), username);
|
||||
if (!pointer) return notFound(c);
|
||||
|
||||
const event = await getAuthor(pointer.pubkey, { signal });
|
||||
|
|
|
@ -29,7 +29,7 @@ const createAccountSchema = z.object({
|
|||
});
|
||||
|
||||
const createAccountController: AppController = async (c) => {
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const result = createAccountSchema.safeParse(await c.req.json());
|
||||
|
||||
if (!result.success) {
|
||||
|
@ -45,7 +45,7 @@ const createAccountController: AppController = async (c) => {
|
|||
};
|
||||
|
||||
const verifyCredentialsController: AppController = async (c) => {
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
|
||||
const event = await getAuthor(pubkey, { relations: ['author_stats'] });
|
||||
if (event) {
|
||||
|
@ -94,15 +94,16 @@ const accountSearchController: AppController = async (c) => {
|
|||
}
|
||||
|
||||
const query = decodeURIComponent(q);
|
||||
const store = await Storages.search();
|
||||
|
||||
const [event, events] = await Promise.all([
|
||||
lookupAccount(query),
|
||||
Storages.search.query([{ kinds: [0], search: query, limit: 20 }], { signal: c.req.raw.signal }),
|
||||
store.query([{ kinds: [0], search: query, limit: 20 }], { signal: c.req.raw.signal }),
|
||||
]);
|
||||
|
||||
const results = await hydrateEvents({
|
||||
events: event ? [event, ...events] : events,
|
||||
storage: Storages.db,
|
||||
store,
|
||||
signal: c.req.raw.signal,
|
||||
});
|
||||
|
||||
|
@ -122,7 +123,7 @@ const accountSearchController: AppController = async (c) => {
|
|||
};
|
||||
|
||||
const relationshipsController: AppController = async (c) => {
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const ids = z.array(z.string()).safeParse(c.req.queries('id[]'));
|
||||
|
||||
if (!ids.success) {
|
||||
|
@ -147,8 +148,10 @@ const accountStatusesController: AppController = async (c) => {
|
|||
const { pinned, limit, exclude_replies, tagged } = accountStatusesQuerySchema.parse(c.req.query());
|
||||
const { signal } = c.req.raw;
|
||||
|
||||
const store = await Storages.db();
|
||||
|
||||
if (pinned) {
|
||||
const [pinEvent] = await Storages.db.query([{ kinds: [10001], authors: [pubkey], limit: 1 }], { signal });
|
||||
const [pinEvent] = await store.query([{ kinds: [10001], authors: [pubkey], limit: 1 }], { signal });
|
||||
if (pinEvent) {
|
||||
const pinnedEventIds = getTagSet(pinEvent.tags, 'e');
|
||||
return renderStatuses(c, [...pinnedEventIds].reverse());
|
||||
|
@ -169,8 +172,8 @@ const accountStatusesController: AppController = async (c) => {
|
|||
filter['#t'] = [tagged];
|
||||
}
|
||||
|
||||
const events = await Storages.db.query([filter], { signal })
|
||||
.then((events) => hydrateEvents({ events, storage: Storages.db, signal }))
|
||||
const events = await store.query([filter], { signal })
|
||||
.then((events) => hydrateEvents({ events, store, signal }))
|
||||
.then((events) => {
|
||||
if (exclude_replies) {
|
||||
return events.filter((event) => !findReplyTag(event.tags));
|
||||
|
@ -178,7 +181,11 @@ const accountStatusesController: AppController = async (c) => {
|
|||
return events;
|
||||
});
|
||||
|
||||
const statuses = await Promise.all(events.map((event) => renderStatus(event, { viewerPubkey: c.get('pubkey') })));
|
||||
const viewerPubkey = await c.get('signer')?.getPublicKey();
|
||||
|
||||
const statuses = await Promise.all(
|
||||
events.map((event) => renderStatus(event, { viewerPubkey })),
|
||||
);
|
||||
return paginated(c, events, statuses);
|
||||
};
|
||||
|
||||
|
@ -194,7 +201,7 @@ const updateCredentialsSchema = z.object({
|
|||
});
|
||||
|
||||
const updateCredentialsController: AppController = async (c) => {
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const body = await parseBody(c.req.raw);
|
||||
const result = updateCredentialsSchema.safeParse(body);
|
||||
|
||||
|
@ -236,11 +243,11 @@ const updateCredentialsController: AppController = async (c) => {
|
|||
|
||||
/** https://docs.joinmastodon.org/methods/accounts/#follow */
|
||||
const followController: AppController = async (c) => {
|
||||
const sourcePubkey = c.get('pubkey')!;
|
||||
const sourcePubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const targetPubkey = c.req.param('pubkey');
|
||||
|
||||
await updateListEvent(
|
||||
{ kinds: [3], authors: [sourcePubkey] },
|
||||
{ kinds: [3], authors: [sourcePubkey], limit: 1 },
|
||||
(tags) => addTag(tags, ['p', targetPubkey]),
|
||||
c,
|
||||
);
|
||||
|
@ -253,11 +260,11 @@ const followController: AppController = async (c) => {
|
|||
|
||||
/** https://docs.joinmastodon.org/methods/accounts/#unfollow */
|
||||
const unfollowController: AppController = async (c) => {
|
||||
const sourcePubkey = c.get('pubkey')!;
|
||||
const sourcePubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const targetPubkey = c.req.param('pubkey');
|
||||
|
||||
await updateListEvent(
|
||||
{ kinds: [3], authors: [sourcePubkey] },
|
||||
{ kinds: [3], authors: [sourcePubkey], limit: 1 },
|
||||
(tags) => deleteTag(tags, ['p', targetPubkey]),
|
||||
c,
|
||||
);
|
||||
|
@ -290,11 +297,11 @@ const unblockController: AppController = (c) => {
|
|||
|
||||
/** https://docs.joinmastodon.org/methods/accounts/#mute */
|
||||
const muteController: AppController = async (c) => {
|
||||
const sourcePubkey = c.get('pubkey')!;
|
||||
const sourcePubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const targetPubkey = c.req.param('pubkey');
|
||||
|
||||
await updateListEvent(
|
||||
{ kinds: [10000], authors: [sourcePubkey] },
|
||||
{ kinds: [10000], authors: [sourcePubkey], limit: 1 },
|
||||
(tags) => addTag(tags, ['p', targetPubkey]),
|
||||
c,
|
||||
);
|
||||
|
@ -305,11 +312,11 @@ const muteController: AppController = async (c) => {
|
|||
|
||||
/** https://docs.joinmastodon.org/methods/accounts/#unmute */
|
||||
const unmuteController: AppController = async (c) => {
|
||||
const sourcePubkey = c.get('pubkey')!;
|
||||
const sourcePubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const targetPubkey = c.req.param('pubkey');
|
||||
|
||||
await updateListEvent(
|
||||
{ kinds: [10000], authors: [sourcePubkey] },
|
||||
{ kinds: [10000], authors: [sourcePubkey], limit: 1 },
|
||||
(tags) => deleteTag(tags, ['p', targetPubkey]),
|
||||
c,
|
||||
);
|
||||
|
@ -319,11 +326,13 @@ const unmuteController: AppController = async (c) => {
|
|||
};
|
||||
|
||||
const favouritesController: AppController = async (c) => {
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const params = paginationSchema.parse(c.req.query());
|
||||
const { signal } = c.req.raw;
|
||||
|
||||
const events7 = await Storages.db.query(
|
||||
const store = await Storages.db();
|
||||
|
||||
const events7 = await store.query(
|
||||
[{ kinds: [7], authors: [pubkey], ...params }],
|
||||
{ signal },
|
||||
);
|
||||
|
@ -332,10 +341,14 @@ const favouritesController: AppController = async (c) => {
|
|||
.map((event) => event.tags.find((tag) => tag[0] === 'e')?.[1])
|
||||
.filter((id): id is string => !!id);
|
||||
|
||||
const events1 = await Storages.db.query([{ kinds: [1], ids }], { signal })
|
||||
.then((events) => hydrateEvents({ events, storage: Storages.db, signal }));
|
||||
const events1 = await store.query([{ kinds: [1], ids }], { signal })
|
||||
.then((events) => hydrateEvents({ events, store, signal }));
|
||||
|
||||
const statuses = await Promise.all(events1.map((event) => renderStatus(event, { viewerPubkey: c.get('pubkey') })));
|
||||
const viewerPubkey = await c.get('signer')?.getPublicKey();
|
||||
|
||||
const statuses = await Promise.all(
|
||||
events1.map((event) => renderStatus(event, { viewerPubkey })),
|
||||
);
|
||||
return paginated(c, events1, statuses);
|
||||
};
|
||||
|
||||
|
|
|
@ -39,12 +39,13 @@ const adminAccountsController: AppController = async (c) => {
|
|||
return c.json([]);
|
||||
}
|
||||
|
||||
const store = await Storages.db();
|
||||
const { since, until, limit } = paginationSchema.parse(c.req.query());
|
||||
const { signal } = c.req.raw;
|
||||
|
||||
const events = await Storages.db.query([{ kinds: [30361], authors: [Conf.pubkey], since, until, limit }], { signal });
|
||||
const events = await store.query([{ kinds: [30361], authors: [Conf.pubkey], since, until, limit }], { signal });
|
||||
const pubkeys = events.map((event) => event.tags.find(([name]) => name === 'd')?.[1]!);
|
||||
const authors = await Storages.db.query([{ kinds: [0], authors: pubkeys }], { signal });
|
||||
const authors = await store.query([{ kinds: [0], authors: pubkeys }], { signal });
|
||||
|
||||
for (const event of events) {
|
||||
const d = event.tags.find(([name]) => name === 'd')?.[1];
|
||||
|
@ -78,7 +79,7 @@ const adminAccountAction: AppController = async (c) => {
|
|||
}
|
||||
|
||||
await updateListAdminEvent(
|
||||
{ kinds: [10000], authors: [Conf.pubkey] },
|
||||
{ kinds: [10000], authors: [Conf.pubkey], limit: 1 },
|
||||
(tags) => addTag(tags, ['p', authorId]),
|
||||
c,
|
||||
);
|
||||
|
|
|
@ -5,10 +5,11 @@ import { renderStatuses } from '@/views.ts';
|
|||
|
||||
/** https://docs.joinmastodon.org/methods/bookmarks/#get */
|
||||
const bookmarksController: AppController = async (c) => {
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const store = await Storages.db();
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const { signal } = c.req.raw;
|
||||
|
||||
const [event10003] = await Storages.db.query(
|
||||
const [event10003] = await store.query(
|
||||
[{ kinds: [10003], authors: [pubkey], limit: 1 }],
|
||||
{ signal },
|
||||
);
|
||||
|
|
|
@ -16,7 +16,9 @@ const relaySchema = z.object({
|
|||
type RelayEntity = z.infer<typeof relaySchema>;
|
||||
|
||||
export const adminRelaysController: AppController = async (c) => {
|
||||
const [event] = await Storages.db.query([
|
||||
const store = await Storages.db();
|
||||
|
||||
const [event] = await store.query([
|
||||
{ kinds: [10002], authors: [Conf.pubkey], limit: 1 },
|
||||
]);
|
||||
|
||||
|
@ -28,6 +30,7 @@ export const adminRelaysController: AppController = async (c) => {
|
|||
};
|
||||
|
||||
export const adminSetRelaysController: AppController = async (c) => {
|
||||
const store = await Storages.db();
|
||||
const relays = relaySchema.array().parse(await c.req.json());
|
||||
|
||||
const event = await new AdminSigner().signEvent({
|
||||
|
@ -37,7 +40,7 @@ export const adminSetRelaysController: AppController = async (c) => {
|
|||
created_at: Math.floor(Date.now() / 1000),
|
||||
});
|
||||
|
||||
await Storages.db.event(event);
|
||||
await store.event(event);
|
||||
|
||||
return c.json(renderRelays(event));
|
||||
};
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import { AppController } from '@/app.ts';
|
||||
import { Conf } from '@/config.ts';
|
||||
import { Storages } from '@/storages.ts';
|
||||
import { getInstanceMetadata } from '@/utils/instance.ts';
|
||||
|
||||
const instanceController: AppController = async (c) => {
|
||||
const { host, protocol } = Conf.url;
|
||||
const meta = await getInstanceMetadata(c.req.raw.signal);
|
||||
const meta = await getInstanceMetadata(await Storages.db(), c.req.raw.signal);
|
||||
|
||||
/** Protocol to use for WebSocket URLs, depending on the protocol of the `LOCAL_DOMAIN`. */
|
||||
const wsProtocol = protocol === 'http:' ? 'ws:' : 'wss:';
|
||||
|
|
|
@ -14,7 +14,7 @@ interface Marker {
|
|||
}
|
||||
|
||||
export const markersController: AppController = async (c) => {
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const timelines = c.req.queries('timeline[]') ?? [];
|
||||
|
||||
const results = await kv.getMany<Marker[]>(
|
||||
|
@ -37,7 +37,7 @@ const markerDataSchema = z.object({
|
|||
});
|
||||
|
||||
export const updateMarkersController: AppController = async (c) => {
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const record = z.record(z.enum(['home', 'notifications']), markerDataSchema).parse(await parseBody(c.req.raw));
|
||||
const timelines = Object.keys(record) as Timeline[];
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ const mediaBodySchema = z.object({
|
|||
});
|
||||
|
||||
const mediaController: AppController = async (c) => {
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const result = mediaBodySchema.safeParse(await parseBody(c.req.raw));
|
||||
const { signal } = c.req.raw;
|
||||
|
||||
|
|
|
@ -5,10 +5,11 @@ import { renderAccounts } from '@/views.ts';
|
|||
|
||||
/** https://docs.joinmastodon.org/methods/mutes/#get */
|
||||
const mutesController: AppController = async (c) => {
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const store = await Storages.db();
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const { signal } = c.req.raw;
|
||||
|
||||
const [event10000] = await Storages.db.query(
|
||||
const [event10000] = await store.query(
|
||||
[{ kinds: [10000], authors: [pubkey], limit: 1 }],
|
||||
{ signal },
|
||||
);
|
||||
|
|
|
@ -5,8 +5,8 @@ import { hydrateEvents } from '@/storages/hydrate.ts';
|
|||
import { paginated, paginationSchema } from '@/utils/api.ts';
|
||||
import { renderNotification } from '@/views/mastodon/notifications.ts';
|
||||
|
||||
const notificationsController: AppController = (c) => {
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const notificationsController: AppController = async (c) => {
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const { since, until } = paginationSchema.parse(c.req.query());
|
||||
|
||||
return renderNotifications(c, [{ kinds: [1, 6, 7], '#p': [pubkey], since, until }]);
|
||||
|
@ -14,13 +14,13 @@ const notificationsController: AppController = (c) => {
|
|||
|
||||
async function renderNotifications(c: AppContext, filters: NostrFilter[]) {
|
||||
const store = c.get('store');
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const { signal } = c.req.raw;
|
||||
|
||||
const events = await store
|
||||
.query(filters, { signal })
|
||||
.then((events) => events.filter((event) => event.pubkey !== pubkey))
|
||||
.then((events) => hydrateEvents({ events, storage: store, signal }));
|
||||
.then((events) => hydrateEvents({ events, store, signal }));
|
||||
|
||||
if (!events.length) {
|
||||
return c.json([]);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { NSchema as n } from '@nostrify/nostrify';
|
||||
import { NSchema as n, NStore } from '@nostrify/nostrify';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { type AppController } from '@/app.ts';
|
||||
|
@ -9,7 +9,8 @@ import { Storages } from '@/storages.ts';
|
|||
import { createAdminEvent } from '@/utils/api.ts';
|
||||
|
||||
const frontendConfigController: AppController = async (c) => {
|
||||
const configs = await getConfigs(c.req.raw.signal);
|
||||
const store = await Storages.db();
|
||||
const configs = await getConfigs(store, c.req.raw.signal);
|
||||
const frontendConfig = configs.find(({ group, key }) => group === ':pleroma' && key === ':frontend_configurations');
|
||||
|
||||
if (frontendConfig) {
|
||||
|
@ -25,7 +26,8 @@ const frontendConfigController: AppController = async (c) => {
|
|||
};
|
||||
|
||||
const configController: AppController = async (c) => {
|
||||
const configs = await getConfigs(c.req.raw.signal);
|
||||
const store = await Storages.db();
|
||||
const configs = await getConfigs(store, c.req.raw.signal);
|
||||
return c.json({ configs, need_reboot: false });
|
||||
};
|
||||
|
||||
|
@ -33,7 +35,8 @@ const configController: AppController = async (c) => {
|
|||
const updateConfigController: AppController = async (c) => {
|
||||
const { pubkey } = Conf;
|
||||
|
||||
const configs = await getConfigs(c.req.raw.signal);
|
||||
const store = await Storages.db();
|
||||
const configs = await getConfigs(store, c.req.raw.signal);
|
||||
const { configs: newConfigs } = z.object({ configs: z.array(configSchema) }).parse(await c.req.json());
|
||||
|
||||
for (const { group, key, value } of newConfigs) {
|
||||
|
@ -63,10 +66,10 @@ const pleromaAdminDeleteStatusController: AppController = async (c) => {
|
|||
return c.json({});
|
||||
};
|
||||
|
||||
async function getConfigs(signal: AbortSignal): Promise<PleromaConfig[]> {
|
||||
async function getConfigs(store: NStore, signal: AbortSignal): Promise<PleromaConfig[]> {
|
||||
const { pubkey } = Conf;
|
||||
|
||||
const [event] = await Storages.db.query([{
|
||||
const [event] = await store.query([{
|
||||
kinds: [30078],
|
||||
authors: [pubkey],
|
||||
'#d': ['pub.ditto.pleroma.config'],
|
||||
|
|
|
@ -48,16 +48,22 @@ const reportController: AppController = async (c) => {
|
|||
tags,
|
||||
}, c);
|
||||
|
||||
await hydrateEvents({ events: [event], storage: store });
|
||||
await hydrateEvents({ events: [event], store });
|
||||
return c.json(await renderReport(event));
|
||||
};
|
||||
|
||||
/** https://docs.joinmastodon.org/methods/admin/reports/#get */
|
||||
const adminReportsController: AppController = async (c) => {
|
||||
const store = c.get('store');
|
||||
const viewerPubkey = await c.get('signer')?.getPublicKey();
|
||||
|
||||
const reports = await store.query([{ kinds: [1984], '#P': [Conf.pubkey] }])
|
||||
.then((events) => hydrateEvents({ storage: store, events: events, signal: c.req.raw.signal }))
|
||||
.then((events) => Promise.all(events.map((event) => renderAdminReport(event, { viewerPubkey: c.get('pubkey') }))));
|
||||
.then((events) => hydrateEvents({ store, events: events, signal: c.req.raw.signal }))
|
||||
.then((events) =>
|
||||
Promise.all(
|
||||
events.map((event) => renderAdminReport(event, { viewerPubkey })),
|
||||
)
|
||||
);
|
||||
|
||||
return c.json(reports);
|
||||
};
|
||||
|
@ -67,7 +73,7 @@ const adminReportController: AppController = async (c) => {
|
|||
const eventId = c.req.param('id');
|
||||
const { signal } = c.req.raw;
|
||||
const store = c.get('store');
|
||||
const pubkey = c.get('pubkey');
|
||||
const pubkey = await c.get('signer')?.getPublicKey();
|
||||
|
||||
const [event] = await store.query([{
|
||||
kinds: [1984],
|
||||
|
@ -79,7 +85,7 @@ const adminReportController: AppController = async (c) => {
|
|||
return c.json({ error: 'This action is not allowed' }, 403);
|
||||
}
|
||||
|
||||
await hydrateEvents({ events: [event], storage: store, signal });
|
||||
await hydrateEvents({ events: [event], store, signal });
|
||||
|
||||
return c.json(await renderAdminReport(event, { viewerPubkey: pubkey }));
|
||||
};
|
||||
|
@ -89,7 +95,7 @@ const adminReportResolveController: AppController = async (c) => {
|
|||
const eventId = c.req.param('id');
|
||||
const { signal } = c.req.raw;
|
||||
const store = c.get('store');
|
||||
const pubkey = c.get('pubkey');
|
||||
const pubkey = await c.get('signer')?.getPublicKey();
|
||||
|
||||
const [event] = await store.query([{
|
||||
kinds: [1984],
|
||||
|
@ -101,7 +107,7 @@ const adminReportResolveController: AppController = async (c) => {
|
|||
return c.json({ error: 'This action is not allowed' }, 403);
|
||||
}
|
||||
|
||||
await hydrateEvents({ events: [event], storage: store, signal });
|
||||
await hydrateEvents({ events: [event], store, signal });
|
||||
|
||||
await createAdminEvent({
|
||||
kind: 5,
|
||||
|
|
|
@ -43,6 +43,7 @@ const searchController: AppController = async (c) => {
|
|||
}
|
||||
|
||||
const results = dedupeEvents(events);
|
||||
const viewerPubkey = await c.get('signer')?.getPublicKey();
|
||||
|
||||
const [accounts, statuses] = await Promise.all([
|
||||
Promise.all(
|
||||
|
@ -54,7 +55,7 @@ const searchController: AppController = async (c) => {
|
|||
Promise.all(
|
||||
results
|
||||
.filter((event) => event.kind === 1)
|
||||
.map((event) => renderStatus(event, { viewerPubkey: c.get('pubkey') }))
|
||||
.map((event) => renderStatus(event, { viewerPubkey }))
|
||||
.filter(Boolean),
|
||||
),
|
||||
]);
|
||||
|
@ -77,7 +78,7 @@ const searchController: AppController = async (c) => {
|
|||
};
|
||||
|
||||
/** Get events for the search params. */
|
||||
function searchEvents({ q, type, limit, account_id }: SearchQuery, signal: AbortSignal): Promise<NostrEvent[]> {
|
||||
async function searchEvents({ q, type, limit, account_id }: SearchQuery, signal: AbortSignal): Promise<NostrEvent[]> {
|
||||
if (type === 'hashtags') return Promise.resolve([]);
|
||||
|
||||
const filter: NostrFilter = {
|
||||
|
@ -90,8 +91,10 @@ function searchEvents({ q, type, limit, account_id }: SearchQuery, signal: Abort
|
|||
filter.authors = [account_id];
|
||||
}
|
||||
|
||||
return Storages.search.query([filter], { signal })
|
||||
.then((events) => hydrateEvents({ events, storage: Storages.search, signal }));
|
||||
const store = await Storages.search();
|
||||
|
||||
return store.query([filter], { signal })
|
||||
.then((events) => hydrateEvents({ events, store, signal }));
|
||||
}
|
||||
|
||||
/** Get event kinds to search from `type` query param. */
|
||||
|
@ -109,9 +112,10 @@ function typeToKinds(type: SearchQuery['type']): number[] {
|
|||
/** Resolve a searched value into an event, if applicable. */
|
||||
async function lookupEvent(query: SearchQuery, signal: AbortSignal): Promise<NostrEvent | undefined> {
|
||||
const filters = await getLookupFilters(query, signal);
|
||||
const store = await Storages.search();
|
||||
|
||||
return Storages.search.query(filters, { limit: 1, signal })
|
||||
.then((events) => hydrateEvents({ events, storage: Storages.search, signal }))
|
||||
return store.query(filters, { limit: 1, signal })
|
||||
.then((events) => hydrateEvents({ events, store, signal }))
|
||||
.then(([event]) => event);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { NostrEvent, NostrFilter, NSchema as n } from '@nostrify/nostrify';
|
||||
import { NostrEvent, NSchema as n } from '@nostrify/nostrify';
|
||||
import ISO6391 from 'iso-639-1';
|
||||
import { z } from 'zod';
|
||||
|
||||
|
@ -47,7 +47,7 @@ const statusController: AppController = async (c) => {
|
|||
});
|
||||
|
||||
if (event) {
|
||||
return c.json(await renderStatus(event, { viewerPubkey: c.get('pubkey') }));
|
||||
return c.json(await renderStatus(event, { viewerPubkey: await c.get('signer')?.getPublicKey() }));
|
||||
}
|
||||
|
||||
return c.json({ error: 'Event not found.' }, 404);
|
||||
|
@ -89,9 +89,11 @@ const createStatusController: AppController = async (c) => {
|
|||
tags.push(['subject', data.spoiler_text]);
|
||||
}
|
||||
|
||||
const viewerPubkey = await c.get('signer')?.getPublicKey();
|
||||
|
||||
if (data.media_ids?.length) {
|
||||
const media = await getUnattachedMediaByIds(data.media_ids)
|
||||
.then((media) => media.filter(({ pubkey }) => pubkey === c.get('pubkey')))
|
||||
.then((media) => media.filter(({ pubkey }) => pubkey === viewerPubkey))
|
||||
.then((media) => media.map(({ url, data }) => ['media', url, data]));
|
||||
|
||||
tags.push(...media);
|
||||
|
@ -138,17 +140,17 @@ const createStatusController: AppController = async (c) => {
|
|||
if (data.quote_id) {
|
||||
await hydrateEvents({
|
||||
events: [event],
|
||||
storage: Storages.db,
|
||||
store: await Storages.db(),
|
||||
signal: c.req.raw.signal,
|
||||
});
|
||||
}
|
||||
|
||||
return c.json(await renderStatus({ ...event, author }, { viewerPubkey: c.get('pubkey') }));
|
||||
return c.json(await renderStatus({ ...event, author }, { viewerPubkey: await c.get('signer')?.getPublicKey() }));
|
||||
};
|
||||
|
||||
const deleteStatusController: AppController = async (c) => {
|
||||
const id = c.req.param('id');
|
||||
const pubkey = c.get('pubkey');
|
||||
const pubkey = await c.get('signer')?.getPublicKey();
|
||||
|
||||
const event = await getEvent(id, { signal: c.req.raw.signal });
|
||||
|
||||
|
@ -172,9 +174,12 @@ const deleteStatusController: AppController = async (c) => {
|
|||
const contextController: AppController = async (c) => {
|
||||
const id = c.req.param('id');
|
||||
const event = await getEvent(id, { kind: 1, relations: ['author', 'event_stats', 'author_stats'] });
|
||||
const viewerPubkey = await c.get('signer')?.getPublicKey();
|
||||
|
||||
async function renderStatuses(events: NostrEvent[]) {
|
||||
const statuses = await Promise.all(events.map((event) => renderStatus(event, { viewerPubkey: c.get('pubkey') })));
|
||||
const statuses = await Promise.all(
|
||||
events.map((event) => renderStatus(event, { viewerPubkey })),
|
||||
);
|
||||
return statuses.filter(Boolean);
|
||||
}
|
||||
|
||||
|
@ -204,7 +209,7 @@ const favouriteController: AppController = async (c) => {
|
|||
],
|
||||
}, c);
|
||||
|
||||
const status = await renderStatus(target, { viewerPubkey: c.get('pubkey') });
|
||||
const status = await renderStatus(target, { viewerPubkey: await c.get('signer')?.getPublicKey() });
|
||||
|
||||
if (status) {
|
||||
status.favourited = true;
|
||||
|
@ -243,11 +248,11 @@ const reblogStatusController: AppController = async (c) => {
|
|||
|
||||
await hydrateEvents({
|
||||
events: [reblogEvent],
|
||||
storage: Storages.db,
|
||||
store: await Storages.db(),
|
||||
signal: signal,
|
||||
});
|
||||
|
||||
const status = await renderReblog(reblogEvent, { viewerPubkey: c.get('pubkey') });
|
||||
const status = await renderReblog(reblogEvent, { viewerPubkey: await c.get('signer')?.getPublicKey() });
|
||||
|
||||
return c.json(status);
|
||||
};
|
||||
|
@ -255,23 +260,30 @@ const reblogStatusController: AppController = async (c) => {
|
|||
/** https://docs.joinmastodon.org/methods/statuses/#unreblog */
|
||||
const unreblogStatusController: AppController = async (c) => {
|
||||
const eventId = c.req.param('id');
|
||||
const pubkey = c.get('pubkey') as string;
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
|
||||
const event = await getEvent(eventId, {
|
||||
kind: 1,
|
||||
});
|
||||
if (!event) return c.json({ error: 'Event not found.' }, 404);
|
||||
const event = await getEvent(eventId, { kind: 1 });
|
||||
|
||||
const filters: NostrFilter[] = [{ kinds: [6], authors: [pubkey], '#e': [event.id] }];
|
||||
const [repostedEvent] = await Storages.db.query(filters, { limit: 1 });
|
||||
if (!repostedEvent) return c.json({ error: 'Event not found.' }, 404);
|
||||
if (!event) {
|
||||
return c.json({ error: 'Event not found.' }, 404);
|
||||
}
|
||||
|
||||
const store = await Storages.db();
|
||||
|
||||
const [repostedEvent] = await store.query(
|
||||
[{ kinds: [6], authors: [pubkey], '#e': [event.id], limit: 1 }],
|
||||
);
|
||||
|
||||
if (!repostedEvent) {
|
||||
return c.json({ error: 'Event not found.' }, 404);
|
||||
}
|
||||
|
||||
await createEvent({
|
||||
kind: 5,
|
||||
tags: [['e', repostedEvent.id]],
|
||||
}, c);
|
||||
|
||||
return c.json(await renderStatus(event, {}));
|
||||
return c.json(await renderStatus(event, { viewerPubkey: pubkey }));
|
||||
};
|
||||
|
||||
const rebloggedByController: AppController = (c) => {
|
||||
|
@ -282,7 +294,7 @@ const rebloggedByController: AppController = (c) => {
|
|||
|
||||
/** https://docs.joinmastodon.org/methods/statuses/#bookmark */
|
||||
const bookmarkController: AppController = async (c) => {
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const eventId = c.req.param('id');
|
||||
|
||||
const event = await getEvent(eventId, {
|
||||
|
@ -292,7 +304,7 @@ const bookmarkController: AppController = async (c) => {
|
|||
|
||||
if (event) {
|
||||
await updateListEvent(
|
||||
{ kinds: [10003], authors: [pubkey] },
|
||||
{ kinds: [10003], authors: [pubkey], limit: 1 },
|
||||
(tags) => addTag(tags, ['e', eventId]),
|
||||
c,
|
||||
);
|
||||
|
@ -309,7 +321,7 @@ const bookmarkController: AppController = async (c) => {
|
|||
|
||||
/** https://docs.joinmastodon.org/methods/statuses/#unbookmark */
|
||||
const unbookmarkController: AppController = async (c) => {
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const eventId = c.req.param('id');
|
||||
|
||||
const event = await getEvent(eventId, {
|
||||
|
@ -319,7 +331,7 @@ const unbookmarkController: AppController = async (c) => {
|
|||
|
||||
if (event) {
|
||||
await updateListEvent(
|
||||
{ kinds: [10003], authors: [pubkey] },
|
||||
{ kinds: [10003], authors: [pubkey], limit: 1 },
|
||||
(tags) => deleteTag(tags, ['e', eventId]),
|
||||
c,
|
||||
);
|
||||
|
@ -336,7 +348,7 @@ const unbookmarkController: AppController = async (c) => {
|
|||
|
||||
/** https://docs.joinmastodon.org/methods/statuses/#pin */
|
||||
const pinController: AppController = async (c) => {
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const eventId = c.req.param('id');
|
||||
|
||||
const event = await getEvent(eventId, {
|
||||
|
@ -346,7 +358,7 @@ const pinController: AppController = async (c) => {
|
|||
|
||||
if (event) {
|
||||
await updateListEvent(
|
||||
{ kinds: [10001], authors: [pubkey] },
|
||||
{ kinds: [10001], authors: [pubkey], limit: 1 },
|
||||
(tags) => addTag(tags, ['e', eventId]),
|
||||
c,
|
||||
);
|
||||
|
@ -363,7 +375,7 @@ const pinController: AppController = async (c) => {
|
|||
|
||||
/** https://docs.joinmastodon.org/methods/statuses/#unpin */
|
||||
const unpinController: AppController = async (c) => {
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const eventId = c.req.param('id');
|
||||
const { signal } = c.req.raw;
|
||||
|
||||
|
@ -375,7 +387,7 @@ const unpinController: AppController = async (c) => {
|
|||
|
||||
if (event) {
|
||||
await updateListEvent(
|
||||
{ kinds: [10001], authors: [pubkey] },
|
||||
{ kinds: [10001], authors: [pubkey], limit: 1 },
|
||||
(tags) => deleteTag(tags, ['e', eventId]),
|
||||
c,
|
||||
);
|
||||
|
@ -423,7 +435,7 @@ const zapController: AppController = async (c) => {
|
|||
],
|
||||
}, c);
|
||||
|
||||
const status = await renderStatus(target, { viewerPubkey: c.get('pubkey') });
|
||||
const status = await renderStatus(target, { viewerPubkey: await c.get('signer')?.getPublicKey() });
|
||||
status.zapped = true;
|
||||
|
||||
return c.json(status);
|
||||
|
|
|
@ -69,7 +69,10 @@ const streamingController: AppController = (c) => {
|
|||
if (!filter) return;
|
||||
|
||||
try {
|
||||
for await (const msg of Storages.pubsub.req([filter], { signal: controller.signal })) {
|
||||
const pubsub = await Storages.pubsub();
|
||||
const optimizer = await Storages.optimizer();
|
||||
|
||||
for await (const msg of pubsub.req([filter], { signal: controller.signal })) {
|
||||
if (msg[0] === 'EVENT') {
|
||||
const event = msg[2];
|
||||
|
||||
|
@ -83,7 +86,7 @@ const streamingController: AppController = (c) => {
|
|||
|
||||
await hydrateEvents({
|
||||
events: [event],
|
||||
storage: Storages.admin,
|
||||
store: optimizer,
|
||||
signal: AbortSignal.timeout(1000),
|
||||
});
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ async function renderSuggestedAccounts(store: NStore, signal?: AbortSignal) {
|
|||
[{ kinds: [0], authors: pubkeys, limit: pubkeys.length }],
|
||||
{ signal },
|
||||
)
|
||||
.then((events) => hydrateEvents({ events, storage: store, signal }));
|
||||
.then((events) => hydrateEvents({ events, store, signal }));
|
||||
|
||||
const accounts = await Promise.all(pubkeys.map((pubkey) => {
|
||||
const profile = profiles.find((event) => event.pubkey === pubkey);
|
||||
|
|
|
@ -11,7 +11,7 @@ import { renderReblog, renderStatus } from '@/views/mastodon/statuses.ts';
|
|||
|
||||
const homeTimelineController: AppController = async (c) => {
|
||||
const params = paginationSchema.parse(c.req.query());
|
||||
const pubkey = c.get('pubkey')!;
|
||||
const pubkey = await c.get('signer')?.getPublicKey()!;
|
||||
const authors = await getFeedPubkeys(pubkey);
|
||||
return renderStatuses(c, [{ authors, kinds: [1, 6], ...params }]);
|
||||
};
|
||||
|
@ -49,24 +49,20 @@ async function renderStatuses(c: AppContext, filters: NostrFilter[]) {
|
|||
|
||||
const events = await store
|
||||
.query(filters, { signal })
|
||||
.then((events) =>
|
||||
hydrateEvents({
|
||||
events,
|
||||
storage: store,
|
||||
signal,
|
||||
})
|
||||
);
|
||||
.then((events) => hydrateEvents({ events, store, signal }));
|
||||
|
||||
if (!events.length) {
|
||||
return c.json([]);
|
||||
}
|
||||
|
||||
const viewerPubkey = await c.get('signer')?.getPublicKey();
|
||||
|
||||
const statuses = (await Promise.all(events.map((event) => {
|
||||
if (event.kind === 6) {
|
||||
return renderReblog(event, { viewerPubkey: c.get('pubkey') });
|
||||
return renderReblog(event, { viewerPubkey });
|
||||
}
|
||||
return renderStatus(event, { viewerPubkey: c.get('pubkey') });
|
||||
}))).filter((boolean) => boolean);
|
||||
return renderStatus(event, { viewerPubkey });
|
||||
}))).filter(Boolean);
|
||||
|
||||
if (!statuses.length) {
|
||||
return c.json([]);
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
import { AppController } from '@/app.ts';
|
||||
import { Conf } from '@/config.ts';
|
||||
import { Storages } from '@/storages.ts';
|
||||
import { getInstanceMetadata } from '@/utils/instance.ts';
|
||||
|
||||
const relayInfoController: AppController = async (c) => {
|
||||
const meta = await getInstanceMetadata(c.req.raw.signal);
|
||||
const store = await Storages.db();
|
||||
const meta = await getInstanceMetadata(store, c.req.raw.signal);
|
||||
|
||||
return c.json({
|
||||
name: meta.name,
|
||||
|
|
|
@ -10,6 +10,7 @@ import {
|
|||
} from '@nostrify/nostrify';
|
||||
import { relayInfoController } from '@/controllers/nostr/relay-info.ts';
|
||||
import * as pipeline from '@/pipeline.ts';
|
||||
import { RelayError } from '@/RelayError.ts';
|
||||
import { Storages } from '@/storages.ts';
|
||||
|
||||
import type { AppController } from '@/app.ts';
|
||||
|
@ -71,14 +72,17 @@ function connectStream(socket: WebSocket) {
|
|||
controllers.get(subId)?.abort();
|
||||
controllers.set(subId, controller);
|
||||
|
||||
for (const event of await Storages.db.query(filters, { limit: FILTER_LIMIT })) {
|
||||
const db = await Storages.db();
|
||||
const pubsub = await Storages.pubsub();
|
||||
|
||||
for (const event of await db.query(filters, { limit: FILTER_LIMIT })) {
|
||||
send(['EVENT', subId, event]);
|
||||
}
|
||||
|
||||
send(['EOSE', subId]);
|
||||
|
||||
try {
|
||||
for await (const msg of Storages.pubsub.req(filters, { signal: controller.signal })) {
|
||||
for await (const msg of pubsub.req(filters, { signal: controller.signal })) {
|
||||
if (msg[0] === 'EVENT') {
|
||||
send(['EVENT', subId, msg[2]]);
|
||||
}
|
||||
|
@ -95,7 +99,7 @@ function connectStream(socket: WebSocket) {
|
|||
await pipeline.handleEvent(event, AbortSignal.timeout(1000));
|
||||
send(['OK', event.id, true, '']);
|
||||
} catch (e) {
|
||||
if (e instanceof pipeline.RelayError) {
|
||||
if (e instanceof RelayError) {
|
||||
send(['OK', event.id, false, e.message]);
|
||||
} else {
|
||||
send(['OK', event.id, false, 'error: something went wrong']);
|
||||
|
@ -115,7 +119,8 @@ function connectStream(socket: WebSocket) {
|
|||
|
||||
/** Handle COUNT. Return the number of events matching the filters. */
|
||||
async function handleCount([_, subId, ...rest]: NostrClientCOUNT): Promise<void> {
|
||||
const { count } = await Storages.db.count(prepareFilters(rest));
|
||||
const store = await Storages.db();
|
||||
const { count } = await store.count(prepareFilters(rest));
|
||||
send(['COUNT', subId, { count, approximate: false }]);
|
||||
}
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ const nameSchema = z.string().min(1).regex(/^\w+$/);
|
|||
const nostrController: AppController = async (c) => {
|
||||
const result = nameSchema.safeParse(c.req.query('name'));
|
||||
const name = result.success ? result.data : undefined;
|
||||
const pointer = name ? await localNip05Lookup(name) : undefined;
|
||||
const pointer = name ? await localNip05Lookup(c.get('store'), name) : undefined;
|
||||
|
||||
if (!name || !pointer) {
|
||||
return c.json({ names: {}, relays: {} });
|
||||
|
|
|
@ -45,7 +45,7 @@ async function handleAcct(c: AppContext, resource: URL): Promise<Response> {
|
|||
}
|
||||
|
||||
const [username, host] = result.data;
|
||||
const pointer = await localNip05Lookup(username);
|
||||
const pointer = await localNip05Lookup(c.get('store'), username);
|
||||
|
||||
if (!pointer) {
|
||||
return c.json({ error: 'Not found' }, 404);
|
||||
|
|
41
src/db.ts
41
src/db.ts
|
@ -1,41 +0,0 @@
|
|||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
|
||||
import { FileMigrationProvider, Migrator } from 'kysely';
|
||||
|
||||
import { DittoDB } from '@/db/DittoDB.ts';
|
||||
|
||||
const db = await DittoDB.getInstance();
|
||||
|
||||
const migrator = new Migrator({
|
||||
db,
|
||||
provider: new FileMigrationProvider({
|
||||
fs,
|
||||
path,
|
||||
migrationFolder: new URL(import.meta.resolve('./db/migrations')).pathname,
|
||||
}),
|
||||
});
|
||||
|
||||
/** Migrate the database to the latest version. */
|
||||
async function migrate() {
|
||||
console.info('Running migrations...');
|
||||
const results = await migrator.migrateToLatest();
|
||||
|
||||
if (results.error) {
|
||||
console.error(results.error);
|
||||
Deno.exit(1);
|
||||
} else {
|
||||
if (!results.results?.length) {
|
||||
console.info('Everything up-to-date.');
|
||||
} else {
|
||||
console.info('Migrations finished!');
|
||||
for (const { migrationName, status } of results.results!) {
|
||||
console.info(` - ${migrationName}: ${status}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await migrate();
|
||||
|
||||
export { db };
|
|
@ -1,4 +1,7 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
|
||||
import { FileMigrationProvider, Kysely, Migrator } from 'kysely';
|
||||
|
||||
import { Conf } from '@/config.ts';
|
||||
import { DittoPostgres } from '@/db/adapters/DittoPostgres.ts';
|
||||
|
@ -6,17 +9,63 @@ import { DittoSQLite } from '@/db/adapters/DittoSQLite.ts';
|
|||
import { DittoTables } from '@/db/DittoTables.ts';
|
||||
|
||||
export class DittoDB {
|
||||
private static kysely: Promise<Kysely<DittoTables>> | undefined;
|
||||
|
||||
static getInstance(): Promise<Kysely<DittoTables>> {
|
||||
if (!this.kysely) {
|
||||
this.kysely = this._getInstance();
|
||||
}
|
||||
return this.kysely;
|
||||
}
|
||||
|
||||
static async _getInstance(): Promise<Kysely<DittoTables>> {
|
||||
const { databaseUrl } = Conf;
|
||||
|
||||
let kysely: Kysely<DittoTables>;
|
||||
|
||||
switch (databaseUrl.protocol) {
|
||||
case 'sqlite:':
|
||||
return DittoSQLite.getInstance();
|
||||
kysely = await DittoSQLite.getInstance();
|
||||
break;
|
||||
case 'postgres:':
|
||||
case 'postgresql:':
|
||||
return DittoPostgres.getInstance();
|
||||
kysely = await DittoPostgres.getInstance();
|
||||
break;
|
||||
default:
|
||||
throw new Error('Unsupported database URL.');
|
||||
}
|
||||
|
||||
await this.migrate(kysely);
|
||||
|
||||
return kysely;
|
||||
}
|
||||
|
||||
/** Migrate the database to the latest version. */
|
||||
private static async migrate(kysely: Kysely<DittoTables>) {
|
||||
const migrator = new Migrator({
|
||||
db: kysely,
|
||||
provider: new FileMigrationProvider({
|
||||
fs,
|
||||
path,
|
||||
migrationFolder: new URL(import.meta.resolve('../db/migrations')).pathname,
|
||||
}),
|
||||
});
|
||||
|
||||
console.info('Running migrations...');
|
||||
const results = await migrator.migrateToLatest();
|
||||
|
||||
if (results.error) {
|
||||
console.error(results.error);
|
||||
Deno.exit(1);
|
||||
} else {
|
||||
if (!results.results?.length) {
|
||||
console.info('Everything up-to-date.');
|
||||
} else {
|
||||
console.info('Migrations finished!');
|
||||
for (const { migrationName, status } of results.results!) {
|
||||
console.info(` - ${migrationName}: ${status}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import uuid62 from 'uuid62';
|
||||
|
||||
import { db } from '@/db.ts';
|
||||
import { DittoDB } from '@/db/DittoDB.ts';
|
||||
import { type MediaData } from '@/schemas/nostr.ts';
|
||||
|
||||
interface UnattachedMedia {
|
||||
|
@ -19,7 +19,8 @@ async function insertUnattachedMedia(media: Omit<UnattachedMedia, 'id' | 'upload
|
|||
...media,
|
||||
};
|
||||
|
||||
await db.insertInto('unattached_media')
|
||||
const kysely = await DittoDB.getInstance();
|
||||
await kysely.insertInto('unattached_media')
|
||||
.values({ ...result, data: JSON.stringify(media.data) })
|
||||
.execute();
|
||||
|
||||
|
@ -27,8 +28,9 @@ async function insertUnattachedMedia(media: Omit<UnattachedMedia, 'id' | 'upload
|
|||
}
|
||||
|
||||
/** Select query for unattached media. */
|
||||
function selectUnattachedMediaQuery() {
|
||||
return db.selectFrom('unattached_media')
|
||||
async function selectUnattachedMediaQuery() {
|
||||
const kysely = await DittoDB.getInstance();
|
||||
return kysely.selectFrom('unattached_media')
|
||||
.select([
|
||||
'unattached_media.id',
|
||||
'unattached_media.pubkey',
|
||||
|
@ -39,25 +41,27 @@ function selectUnattachedMediaQuery() {
|
|||
}
|
||||
|
||||
/** Find attachments that exist but aren't attached to any events. */
|
||||
function getUnattachedMedia(until: Date) {
|
||||
return selectUnattachedMediaQuery()
|
||||
async function getUnattachedMedia(until: Date) {
|
||||
const query = await selectUnattachedMediaQuery();
|
||||
return query
|
||||
.leftJoin('tags', 'unattached_media.url', 'tags.value')
|
||||
.where('uploaded_at', '<', until.getTime())
|
||||
.execute();
|
||||
}
|
||||
|
||||
/** Delete unattached media by URL. */
|
||||
function deleteUnattachedMediaByUrl(url: string) {
|
||||
return db.deleteFrom('unattached_media')
|
||||
async function deleteUnattachedMediaByUrl(url: string) {
|
||||
const kysely = await DittoDB.getInstance();
|
||||
return kysely.deleteFrom('unattached_media')
|
||||
.where('url', '=', url)
|
||||
.execute();
|
||||
}
|
||||
|
||||
/** Get unattached media by IDs. */
|
||||
// deno-lint-ignore require-await
|
||||
async function getUnattachedMediaByIds(ids: string[]) {
|
||||
if (!ids.length) return [];
|
||||
return selectUnattachedMediaQuery()
|
||||
const query = await selectUnattachedMediaQuery();
|
||||
return query
|
||||
.where('id', 'in', ids)
|
||||
.execute();
|
||||
}
|
||||
|
@ -65,7 +69,8 @@ async function getUnattachedMediaByIds(ids: string[]) {
|
|||
/** Delete rows as an event with media is being created. */
|
||||
async function deleteAttachedMedia(pubkey: string, urls: string[]): Promise<void> {
|
||||
if (!urls.length) return;
|
||||
await db.deleteFrom('unattached_media')
|
||||
const kysely = await DittoDB.getInstance();
|
||||
await kysely.deleteFrom('unattached_media')
|
||||
.where('pubkey', '=', pubkey)
|
||||
.where('url', 'in', urls)
|
||||
.execute();
|
||||
|
|
|
@ -60,7 +60,8 @@ async function findUser(user: Partial<User>, signal?: AbortSignal): Promise<User
|
|||
}
|
||||
}
|
||||
|
||||
const [event] = await Storages.db.query([filter], { signal });
|
||||
const store = await Storages.db();
|
||||
const [event] = await store.query([filter], { signal });
|
||||
|
||||
if (event) {
|
||||
return {
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
export { assert, assertEquals, assertRejects, assertThrows } from 'https://deno.land/std@0.198.0/assert/mod.ts';
|
|
@ -1,4 +1,4 @@
|
|||
import { assertEquals } from '@/deps-test.ts';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import event0 from '~/fixtures/events/event-0.json' with { type: 'json' };
|
||||
import event1 from '~/fixtures/events/event-1.json' with { type: 'json' };
|
||||
|
|
|
@ -1,29 +1,28 @@
|
|||
import { NostrEvent } from '@nostrify/nostrify';
|
||||
import Debug from '@soapbox/stickynotes/debug';
|
||||
import { Stickynotes } from '@soapbox/stickynotes';
|
||||
|
||||
import { activeRelays, pool } from '@/pool.ts';
|
||||
import { Storages } from '@/storages.ts';
|
||||
import { nostrNow } from '@/utils.ts';
|
||||
|
||||
import * as pipeline from './pipeline.ts';
|
||||
|
||||
const debug = Debug('ditto:firehose');
|
||||
const console = new Stickynotes('ditto:firehose');
|
||||
|
||||
// This file watches events on all known relays and performs
|
||||
// side-effects based on them, such as trending hashtag tracking
|
||||
// and storing events for notifications and the home feed.
|
||||
pool.subscribe(
|
||||
[{ kinds: [0, 1, 3, 5, 6, 7, 9735, 10002], limit: 0, since: nostrNow() }],
|
||||
activeRelays,
|
||||
handleEvent,
|
||||
undefined,
|
||||
undefined,
|
||||
);
|
||||
/**
|
||||
* This function watches events on all known relays and performs
|
||||
* side-effects based on them, such as trending hashtag tracking
|
||||
* and storing events for notifications and the home feed.
|
||||
*/
|
||||
export async function startFirehose() {
|
||||
const store = await Storages.client();
|
||||
|
||||
/** Handle events through the firehose pipeline. */
|
||||
function handleEvent(event: NostrEvent): Promise<void> {
|
||||
debug(`NostrEvent<${event.kind}> ${event.id}`);
|
||||
for await (const msg of store.req([{ kinds: [0, 1, 3, 5, 6, 7, 9735, 10002], limit: 0, since: nostrNow() }])) {
|
||||
if (msg[0] === 'EVENT') {
|
||||
const event = msg[2];
|
||||
console.debug(`NostrEvent<${event.kind}> ${event.id}`);
|
||||
|
||||
return pipeline
|
||||
.handleEvent(event, AbortSignal.timeout(5000))
|
||||
.catch(() => {});
|
||||
pipeline
|
||||
.handleEvent(event, AbortSignal.timeout(5000))
|
||||
.catch(() => {});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,49 +0,0 @@
|
|||
import { HTTPException } from 'hono';
|
||||
import { getPublicKey, nip19 } from 'nostr-tools';
|
||||
|
||||
import { type AppMiddleware } from '@/app.ts';
|
||||
|
||||
/** We only accept "Bearer" type. */
|
||||
const BEARER_REGEX = new RegExp(`^Bearer (${nip19.BECH32_REGEX.source})$`);
|
||||
|
||||
/** NIP-19 auth middleware. */
|
||||
const auth19: AppMiddleware = async (c, next) => {
|
||||
const authHeader = c.req.header('authorization');
|
||||
const match = authHeader?.match(BEARER_REGEX);
|
||||
|
||||
if (match) {
|
||||
const [_, bech32] = match;
|
||||
|
||||
try {
|
||||
const decoded = nip19.decode(bech32!);
|
||||
|
||||
switch (decoded.type) {
|
||||
case 'npub':
|
||||
c.set('pubkey', decoded.data);
|
||||
break;
|
||||
case 'nprofile':
|
||||
c.set('pubkey', decoded.data.pubkey);
|
||||
break;
|
||||
case 'nsec':
|
||||
c.set('pubkey', getPublicKey(decoded.data));
|
||||
c.set('seckey', decoded.data);
|
||||
break;
|
||||
}
|
||||
} catch (_e) {
|
||||
//
|
||||
}
|
||||
}
|
||||
|
||||
await next();
|
||||
};
|
||||
|
||||
/** Throw a 401 if the pubkey isn't set. */
|
||||
const requirePubkey: AppMiddleware = async (c, next) => {
|
||||
if (!c.get('pubkey')) {
|
||||
throw new HTTPException(401, { message: 'No pubkey provided' });
|
||||
}
|
||||
|
||||
await next();
|
||||
};
|
||||
|
||||
export { auth19, requirePubkey };
|
|
@ -1,27 +1,28 @@
|
|||
import { NostrEvent } from '@nostrify/nostrify';
|
||||
import { HTTPException } from 'hono';
|
||||
|
||||
import { type AppContext, type AppMiddleware } from '@/app.ts';
|
||||
import { findUser, User } from '@/db/users.ts';
|
||||
import { ConnectSigner } from '@/signers/ConnectSigner.ts';
|
||||
import { localRequest } from '@/utils/api.ts';
|
||||
import {
|
||||
buildAuthEventTemplate,
|
||||
parseAuthRequest,
|
||||
type ParseAuthRequestOpts,
|
||||
validateAuthEvent,
|
||||
} from '@/utils/nip98.ts';
|
||||
import { localRequest } from '@/utils/api.ts';
|
||||
import { APISigner } from '@/signers/APISigner.ts';
|
||||
import { findUser, User } from '@/db/users.ts';
|
||||
|
||||
/**
|
||||
* NIP-98 auth.
|
||||
* https://github.com/nostr-protocol/nips/blob/master/98.md
|
||||
*/
|
||||
function auth98(opts: ParseAuthRequestOpts = {}): AppMiddleware {
|
||||
function auth98Middleware(opts: ParseAuthRequestOpts = {}): AppMiddleware {
|
||||
return async (c, next) => {
|
||||
const req = localRequest(c);
|
||||
const result = await parseAuthRequest(req, opts);
|
||||
|
||||
if (result.success) {
|
||||
c.set('pubkey', result.data.pubkey);
|
||||
c.set('signer', new ConnectSigner(result.data.pubkey));
|
||||
c.set('proof', result.data);
|
||||
}
|
||||
|
||||
|
@ -33,9 +34,8 @@ type UserRole = 'user' | 'admin';
|
|||
|
||||
/** Require the user to prove their role before invoking the controller. */
|
||||
function requireRole(role: UserRole, opts?: ParseAuthRequestOpts): AppMiddleware {
|
||||
return withProof(async (c, proof, next) => {
|
||||
return withProof(async (_c, proof, next) => {
|
||||
const user = await findUser({ pubkey: proof.pubkey });
|
||||
c.set('user', user);
|
||||
|
||||
if (user && matchesRole(user, role)) {
|
||||
await next();
|
||||
|
@ -70,7 +70,7 @@ function withProof(
|
|||
opts?: ParseAuthRequestOpts,
|
||||
): AppMiddleware {
|
||||
return async (c, next) => {
|
||||
const pubkey = c.get('pubkey');
|
||||
const pubkey = await c.get('signer')?.getPublicKey();
|
||||
const proof = c.get('proof') || await obtainProof(c, opts);
|
||||
|
||||
// Prevent people from accidentally using the wrong account. This has no other security implications.
|
||||
|
@ -79,7 +79,7 @@ function withProof(
|
|||
}
|
||||
|
||||
if (proof) {
|
||||
c.set('pubkey', proof.pubkey);
|
||||
c.set('signer', new ConnectSigner(proof.pubkey));
|
||||
c.set('proof', proof);
|
||||
await handler(c, proof, next);
|
||||
} else {
|
||||
|
@ -90,9 +90,16 @@ function withProof(
|
|||
|
||||
/** Get the proof over Nostr Connect. */
|
||||
async function obtainProof(c: AppContext, opts?: ParseAuthRequestOpts) {
|
||||
const signer = c.get('signer');
|
||||
if (!signer) {
|
||||
throw new HTTPException(401, {
|
||||
res: c.json({ error: 'No way to sign Nostr event' }, 401),
|
||||
});
|
||||
}
|
||||
|
||||
const req = localRequest(c);
|
||||
const reqEvent = await buildAuthEventTemplate(req, opts);
|
||||
const resEvent = await new APISigner(c).signEvent(reqEvent);
|
||||
const resEvent = await signer.signEvent(reqEvent);
|
||||
const result = await validateAuthEvent(req, resEvent, opts);
|
||||
|
||||
if (result.success) {
|
||||
|
@ -100,4 +107,4 @@ async function obtainProof(c: AppContext, opts?: ParseAuthRequestOpts) {
|
|||
}
|
||||
}
|
||||
|
||||
export { auth98, requireProof, requireRole };
|
||||
export { auth98Middleware, requireProof, requireRole };
|
|
@ -5,7 +5,7 @@ import ExpiringCache from '@/utils/expiring-cache.ts';
|
|||
|
||||
const debug = Debug('ditto:middleware:cache');
|
||||
|
||||
export const cache = (options: {
|
||||
export const cacheMiddleware = (options: {
|
||||
cacheName: string;
|
||||
expires?: number;
|
||||
}): MiddlewareHandler => {
|
|
@ -1,7 +1,7 @@
|
|||
import { AppMiddleware } from '@/app.ts';
|
||||
import { Conf } from '@/config.ts';
|
||||
|
||||
const csp = (): AppMiddleware => {
|
||||
export const cspMiddleware = (): AppMiddleware => {
|
||||
return async (c, next) => {
|
||||
const { host, protocol, origin } = Conf.url;
|
||||
const wsProtocol = protocol === 'http:' ? 'ws:' : 'wss:';
|
||||
|
@ -26,5 +26,3 @@ const csp = (): AppMiddleware => {
|
|||
await next();
|
||||
};
|
||||
};
|
||||
|
||||
export { csp };
|
|
@ -0,0 +1,12 @@
|
|||
import { HTTPException } from 'hono';
|
||||
|
||||
import { AppMiddleware } from '@/app.ts';
|
||||
|
||||
/** Throw a 401 if a signer isn't set. */
|
||||
export const requireSigner: AppMiddleware = async (c, next) => {
|
||||
if (!c.get('signer')) {
|
||||
throw new HTTPException(401, { message: 'No pubkey provided' });
|
||||
}
|
||||
|
||||
await next();
|
||||
};
|
|
@ -0,0 +1,41 @@
|
|||
import { NSecSigner } from '@nostrify/nostrify';
|
||||
import { Stickynotes } from '@soapbox/stickynotes';
|
||||
import { nip19 } from 'nostr-tools';
|
||||
|
||||
import { AppMiddleware } from '@/app.ts';
|
||||
import { ConnectSigner } from '@/signers/ConnectSigner.ts';
|
||||
|
||||
const console = new Stickynotes('ditto:signerMiddleware');
|
||||
|
||||
/** We only accept "Bearer" type. */
|
||||
const BEARER_REGEX = new RegExp(`^Bearer (${nip19.BECH32_REGEX.source})$`);
|
||||
|
||||
/** Make a `signer` object available to all controllers, or unset if the user isn't logged in. */
|
||||
export const signerMiddleware: AppMiddleware = async (c, next) => {
|
||||
const header = c.req.header('authorization');
|
||||
const match = header?.match(BEARER_REGEX);
|
||||
|
||||
if (match) {
|
||||
const [_, bech32] = match;
|
||||
|
||||
try {
|
||||
const decoded = nip19.decode(bech32!);
|
||||
|
||||
switch (decoded.type) {
|
||||
case 'npub':
|
||||
c.set('signer', new ConnectSigner(decoded.data));
|
||||
break;
|
||||
case 'nprofile':
|
||||
c.set('signer', new ConnectSigner(decoded.data.pubkey, decoded.data.relays));
|
||||
break;
|
||||
case 'nsec':
|
||||
c.set('signer', new NSecSigner(decoded.data));
|
||||
break;
|
||||
}
|
||||
} catch {
|
||||
console.debug('The user is not logged in');
|
||||
}
|
||||
}
|
||||
|
||||
await next();
|
||||
};
|
|
@ -3,16 +3,14 @@ import { UserStore } from '@/storages/UserStore.ts';
|
|||
import { Storages } from '@/storages.ts';
|
||||
|
||||
/** Store middleware. */
|
||||
const storeMiddleware: AppMiddleware = async (c, next) => {
|
||||
const pubkey = c.get('pubkey');
|
||||
export const storeMiddleware: AppMiddleware = async (c, next) => {
|
||||
const pubkey = await c.get('signer')?.getPublicKey();
|
||||
|
||||
if (pubkey) {
|
||||
const store = new UserStore(pubkey, Storages.admin);
|
||||
const store = new UserStore(pubkey, await Storages.admin());
|
||||
c.set('store', store);
|
||||
} else {
|
||||
c.set('store', Storages.admin);
|
||||
c.set('store', await Storages.admin());
|
||||
}
|
||||
await next();
|
||||
};
|
||||
|
||||
export { storeMiddleware };
|
106
src/pipeline.ts
106
src/pipeline.ts
|
@ -1,14 +1,16 @@
|
|||
import { NostrEvent, NSchema as n } from '@nostrify/nostrify';
|
||||
import { NostrEvent, NPolicy, NSchema as n } from '@nostrify/nostrify';
|
||||
import { LNURL } from '@nostrify/nostrify/ln';
|
||||
import { PipePolicy } from '@nostrify/nostrify/policies';
|
||||
import Debug from '@soapbox/stickynotes/debug';
|
||||
import { sql } from 'kysely';
|
||||
|
||||
import { Conf } from '@/config.ts';
|
||||
import { db } from '@/db.ts';
|
||||
import { DittoDB } from '@/db/DittoDB.ts';
|
||||
import { deleteAttachedMedia } from '@/db/unattached-media.ts';
|
||||
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
|
||||
import { isEphemeralKind } from '@/kinds.ts';
|
||||
import { DVM } from '@/pipeline/DVM.ts';
|
||||
import { RelayError } from '@/RelayError.ts';
|
||||
import { updateStats } from '@/stats.ts';
|
||||
import { hydrateEvents, purifyEvent } from '@/storages/hydrate.ts';
|
||||
import { Storages } from '@/storages.ts';
|
||||
|
@ -21,18 +23,10 @@ import { AdminSigner } from '@/signers/AdminSigner.ts';
|
|||
import { lnurlCache } from '@/utils/lnurl.ts';
|
||||
import { nip05Cache } from '@/utils/nip05.ts';
|
||||
|
||||
import { MuteListPolicy } from '@/policies/MuteListPolicy.ts';
|
||||
|
||||
const debug = Debug('ditto:pipeline');
|
||||
|
||||
let UserPolicy: any;
|
||||
|
||||
try {
|
||||
UserPolicy = (await import('../data/policy.ts')).default;
|
||||
debug('policy loaded from data/policy.ts');
|
||||
} catch (_e) {
|
||||
// do nothing
|
||||
debug('policy not found');
|
||||
}
|
||||
|
||||
/**
|
||||
* Common pipeline function to process (and maybe store) events.
|
||||
* It is idempotent, so it can be called multiple times for the same event.
|
||||
|
@ -41,18 +35,13 @@ async function handleEvent(event: DittoEvent, signal: AbortSignal): Promise<void
|
|||
if (!(await verifyEventWorker(event))) return;
|
||||
if (await encounterEvent(event, signal)) return;
|
||||
debug(`NostrEvent<${event.kind}> ${event.id}`);
|
||||
await hydrateEvent(event, signal);
|
||||
|
||||
if (UserPolicy) {
|
||||
const result = await new UserPolicy().call(event, signal);
|
||||
debug(JSON.stringify(result));
|
||||
const [_, _eventId, ok, reason] = result;
|
||||
if (!ok) {
|
||||
const [prefix, ...rest] = reason.split(': ');
|
||||
throw new RelayError(prefix, rest.join(': '));
|
||||
}
|
||||
if (event.kind !== 24133) {
|
||||
await policyFilter(event);
|
||||
}
|
||||
|
||||
await hydrateEvent(event, signal);
|
||||
|
||||
await Promise.all([
|
||||
storeEvent(event, signal),
|
||||
parseMetadata(event, signal),
|
||||
|
@ -66,19 +55,44 @@ async function handleEvent(event: DittoEvent, signal: AbortSignal): Promise<void
|
|||
]);
|
||||
}
|
||||
|
||||
async function policyFilter(event: NostrEvent): Promise<void> {
|
||||
const policies: NPolicy[] = [
|
||||
new MuteListPolicy(Conf.pubkey, await Storages.admin()),
|
||||
];
|
||||
|
||||
try {
|
||||
const CustomPolicy = (await import('../data/policy.ts')).default;
|
||||
policies.push(new CustomPolicy());
|
||||
} catch (_e) {
|
||||
debug('policy not found - https://docs.soapbox.pub/ditto/policies/');
|
||||
}
|
||||
|
||||
const policy = new PipePolicy(policies.reverse());
|
||||
|
||||
const result = await policy.call(event);
|
||||
debug(JSON.stringify(result));
|
||||
RelayError.assert(result);
|
||||
}
|
||||
|
||||
/** Encounter the event, and return whether it has already been encountered. */
|
||||
async function encounterEvent(event: NostrEvent, signal: AbortSignal): Promise<boolean> {
|
||||
const [existing] = await Storages.cache.query([{ ids: [event.id], limit: 1 }]);
|
||||
Storages.cache.event(event);
|
||||
Storages.reqmeister.event(event, { signal });
|
||||
const cache = await Storages.cache();
|
||||
const reqmeister = await Storages.reqmeister();
|
||||
|
||||
const [existing] = await cache.query([{ ids: [event.id], limit: 1 }]);
|
||||
|
||||
cache.event(event);
|
||||
reqmeister.event(event, { signal });
|
||||
|
||||
return !!existing;
|
||||
}
|
||||
|
||||
/** Hydrate the event with the user, if applicable. */
|
||||
async function hydrateEvent(event: DittoEvent, signal: AbortSignal): Promise<void> {
|
||||
await hydrateEvents({ events: [event], storage: Storages.db, signal });
|
||||
await hydrateEvents({ events: [event], store: await Storages.db(), signal });
|
||||
|
||||
const domain = await db
|
||||
const kysely = await DittoDB.getInstance();
|
||||
const domain = await kysely
|
||||
.selectFrom('pubkey_domains')
|
||||
.select('domain')
|
||||
.where('pubkey', '=', event.pubkey)
|
||||
|
@ -90,8 +104,9 @@ async function hydrateEvent(event: DittoEvent, signal: AbortSignal): Promise<voi
|
|||
/** Maybe store the event, if eligible. */
|
||||
async function storeEvent(event: DittoEvent, signal?: AbortSignal): Promise<void> {
|
||||
if (isEphemeralKind(event.kind)) return;
|
||||
const store = await Storages.db();
|
||||
|
||||
const [deletion] = await Storages.db.query(
|
||||
const [deletion] = await store.query(
|
||||
[{ kinds: [5], authors: [Conf.pubkey, event.pubkey], '#e': [event.id], limit: 1 }],
|
||||
{ signal },
|
||||
);
|
||||
|
@ -99,10 +114,8 @@ async function storeEvent(event: DittoEvent, signal?: AbortSignal): Promise<void
|
|||
if (deletion) {
|
||||
return Promise.reject(new RelayError('blocked', 'event was deleted'));
|
||||
} else {
|
||||
await Promise.all([
|
||||
Storages.db.event(event, { signal }).catch(debug),
|
||||
updateStats(event).catch(debug),
|
||||
]);
|
||||
await updateStats(event).catch(debug);
|
||||
await store.event(event, { signal }).catch(debug);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -128,6 +141,7 @@ async function parseMetadata(event: NostrEvent, signal: AbortSignal): Promise<vo
|
|||
|
||||
// Track pubkey domain.
|
||||
try {
|
||||
const kysely = await DittoDB.getInstance();
|
||||
const { domain } = parseNip05(nip05);
|
||||
|
||||
await sql`
|
||||
|
@ -137,7 +151,7 @@ async function parseMetadata(event: NostrEvent, signal: AbortSignal): Promise<vo
|
|||
domain = excluded.domain,
|
||||
last_updated_at = excluded.last_updated_at
|
||||
WHERE excluded.last_updated_at > pubkey_domains.last_updated_at
|
||||
`.execute(db);
|
||||
`.execute(kysely);
|
||||
} catch (_e) {
|
||||
// do nothing
|
||||
}
|
||||
|
@ -147,17 +161,18 @@ async function parseMetadata(event: NostrEvent, signal: AbortSignal): Promise<vo
|
|||
async function processDeletions(event: NostrEvent, signal: AbortSignal): Promise<void> {
|
||||
if (event.kind === 5) {
|
||||
const ids = getTagSet(event.tags, 'e');
|
||||
const store = await Storages.db();
|
||||
|
||||
if (event.pubkey === Conf.pubkey) {
|
||||
await Storages.db.remove([{ ids: [...ids] }], { signal });
|
||||
await store.remove([{ ids: [...ids] }], { signal });
|
||||
} else {
|
||||
const events = await Storages.db.query(
|
||||
const events = await store.query(
|
||||
[{ ids: [...ids], authors: [event.pubkey] }],
|
||||
{ signal },
|
||||
);
|
||||
|
||||
const deleteIds = events.map(({ id }) => id);
|
||||
await Storages.db.remove([{ ids: deleteIds }], { signal });
|
||||
await store.remove([{ ids: deleteIds }], { signal });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -183,19 +198,22 @@ async function trackHashtags(event: NostrEvent): Promise<void> {
|
|||
|
||||
/** Queue related events to fetch. */
|
||||
async function fetchRelatedEvents(event: DittoEvent) {
|
||||
const cache = await Storages.cache();
|
||||
const reqmeister = await Storages.reqmeister();
|
||||
|
||||
if (!event.author) {
|
||||
const signal = AbortSignal.timeout(3000);
|
||||
Storages.reqmeister.query([{ kinds: [0], authors: [event.pubkey] }], { signal })
|
||||
reqmeister.query([{ kinds: [0], authors: [event.pubkey] }], { signal })
|
||||
.then((events) => Promise.allSettled(events.map((event) => handleEvent(event, signal))))
|
||||
.catch(() => {});
|
||||
}
|
||||
|
||||
for (const [name, id] of event.tags) {
|
||||
if (name === 'e') {
|
||||
const { count } = await Storages.cache.count([{ ids: [id] }]);
|
||||
const { count } = await cache.count([{ ids: [id] }]);
|
||||
if (!count) {
|
||||
const signal = AbortSignal.timeout(3000);
|
||||
Storages.reqmeister.query([{ ids: [id] }], { signal })
|
||||
reqmeister.query([{ ids: [id] }], { signal })
|
||||
.then((events) => Promise.allSettled(events.map((event) => handleEvent(event, signal))))
|
||||
.catch(() => {});
|
||||
}
|
||||
|
@ -266,15 +284,9 @@ function isFresh(event: NostrEvent): boolean {
|
|||
/** Distribute the event through active subscriptions. */
|
||||
async function streamOut(event: NostrEvent): Promise<void> {
|
||||
if (isFresh(event)) {
|
||||
await Storages.pubsub.event(event);
|
||||
const pubsub = await Storages.pubsub();
|
||||
await pubsub.event(event);
|
||||
}
|
||||
}
|
||||
|
||||
/** NIP-20 command line result. */
|
||||
class RelayError extends Error {
|
||||
constructor(prefix: 'duplicate' | 'pow' | 'blocked' | 'rate-limited' | 'invalid' | 'error', message: string) {
|
||||
super(`${prefix}: ${message}`);
|
||||
}
|
||||
}
|
||||
|
||||
export { handleEvent, RelayError };
|
||||
export { handleEvent };
|
||||
|
|
|
@ -34,7 +34,9 @@ export class DVM {
|
|||
return DVM.feedback(event, 'error', `Forbidden user: ${user}`);
|
||||
}
|
||||
|
||||
const [label] = await Storages.db.query([{
|
||||
const store = await Storages.db();
|
||||
|
||||
const [label] = await store.query([{
|
||||
kinds: [1985],
|
||||
authors: [admin],
|
||||
'#L': ['nip05'],
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { MockRelay } from '@nostrify/nostrify/test';
|
||||
|
||||
import { assertEquals } from '@/deps-test.ts';
|
||||
import { assertEquals } from '@std/assert';
|
||||
import { UserStore } from '@/storages/UserStore.ts';
|
||||
import { MuteListPolicy } from '@/policies/MuteListPolicy.ts';
|
||||
|
||||
|
@ -27,7 +27,7 @@ Deno.test('block event: muted user cannot post', async () => {
|
|||
|
||||
const ok = await policy.call(event1authorUserMeCopy);
|
||||
|
||||
assertEquals(ok, ['OK', event1authorUserMeCopy.id, false, 'You are banned in this server.']);
|
||||
assertEquals(ok, ['OK', event1authorUserMeCopy.id, false, 'blocked: Your account has been deactivated.']);
|
||||
});
|
||||
|
||||
Deno.test('allow event: user is NOT muted because there is no muted event', async () => {
|
||||
|
|
|
@ -10,7 +10,7 @@ export class MuteListPolicy implements NPolicy {
|
|||
const pubkeys = getTagSet(muteList?.tags ?? [], 'p');
|
||||
|
||||
if (pubkeys.has(event.pubkey)) {
|
||||
return ['OK', event.id, false, 'You are banned in this server.'];
|
||||
return ['OK', event.id, false, 'blocked: Your account has been deactivated.'];
|
||||
}
|
||||
|
||||
return ['OK', event.id, true, ''];
|
||||
|
|
34
src/pool.ts
34
src/pool.ts
|
@ -1,34 +0,0 @@
|
|||
import { RelayPoolWorker } from 'nostr-relaypool';
|
||||
|
||||
import { Storages } from '@/storages.ts';
|
||||
import { Conf } from '@/config.ts';
|
||||
|
||||
const [relayList] = await Storages.db.query([
|
||||
{ kinds: [10002], authors: [Conf.pubkey], limit: 1 },
|
||||
]);
|
||||
|
||||
const tags = relayList?.tags ?? [];
|
||||
|
||||
const activeRelays = tags.reduce((acc, [name, url, marker]) => {
|
||||
if (name === 'r' && !marker) {
|
||||
acc.push(url);
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
console.log(`pool: connecting to ${activeRelays.length} relays.`);
|
||||
|
||||
const worker = new Worker('https://unpkg.com/nostr-relaypool2@0.6.34/lib/nostr-relaypool.worker.js', {
|
||||
type: 'module',
|
||||
});
|
||||
|
||||
// @ts-ignore Wrong types.
|
||||
const pool = new RelayPoolWorker(worker, activeRelays, {
|
||||
autoReconnect: true,
|
||||
// The pipeline verifies events.
|
||||
skipVerification: true,
|
||||
// The logging feature overwhelms the CPU and creates too many logs.
|
||||
logErrorsAndNotices: false,
|
||||
});
|
||||
|
||||
export { activeRelays, pool };
|
|
@ -25,6 +25,7 @@ const getEvent = async (
|
|||
opts: GetEventOpts = {},
|
||||
): Promise<DittoEvent | undefined> => {
|
||||
debug(`getEvent: ${id}`);
|
||||
const store = await Storages.optimizer();
|
||||
const { kind, signal = AbortSignal.timeout(1000) } = opts;
|
||||
|
||||
const filter: NostrFilter = { ids: [id], limit: 1 };
|
||||
|
@ -32,23 +33,25 @@ const getEvent = async (
|
|||
filter.kinds = [kind];
|
||||
}
|
||||
|
||||
return await Storages.optimizer.query([filter], { limit: 1, signal })
|
||||
.then((events) => hydrateEvents({ events, storage: Storages.optimizer, signal }))
|
||||
return await store.query([filter], { limit: 1, signal })
|
||||
.then((events) => hydrateEvents({ events, store, signal }))
|
||||
.then(([event]) => event);
|
||||
};
|
||||
|
||||
/** Get a Nostr `set_medatadata` event for a user's pubkey. */
|
||||
const getAuthor = async (pubkey: string, opts: GetEventOpts = {}): Promise<NostrEvent | undefined> => {
|
||||
const store = await Storages.optimizer();
|
||||
const { signal = AbortSignal.timeout(1000) } = opts;
|
||||
|
||||
return await Storages.optimizer.query([{ authors: [pubkey], kinds: [0], limit: 1 }], { limit: 1, signal })
|
||||
.then((events) => hydrateEvents({ events, storage: Storages.optimizer, signal }))
|
||||
return await store.query([{ authors: [pubkey], kinds: [0], limit: 1 }], { limit: 1, signal })
|
||||
.then((events) => hydrateEvents({ events, store, signal }))
|
||||
.then(([event]) => event);
|
||||
};
|
||||
|
||||
/** Get users the given pubkey follows. */
|
||||
const getFollows = async (pubkey: string, signal?: AbortSignal): Promise<NostrEvent | undefined> => {
|
||||
const [event] = await Storages.db.query([{ authors: [pubkey], kinds: [3], limit: 1 }], { limit: 1, signal });
|
||||
const store = await Storages.db();
|
||||
const [event] = await store.query([{ authors: [pubkey], kinds: [3], limit: 1 }], { limit: 1, signal });
|
||||
return event;
|
||||
};
|
||||
|
||||
|
@ -84,15 +87,18 @@ async function getAncestors(event: NostrEvent, result: NostrEvent[] = []): Promi
|
|||
}
|
||||
|
||||
async function getDescendants(eventId: string, signal = AbortSignal.timeout(2000)): Promise<NostrEvent[]> {
|
||||
const events = await Storages.db.query([{ kinds: [1], '#e': [eventId] }], { limit: 200, signal });
|
||||
return hydrateEvents({ events, storage: Storages.db, signal });
|
||||
const store = await Storages.db();
|
||||
const events = await store.query([{ kinds: [1], '#e': [eventId] }], { limit: 200, signal });
|
||||
return hydrateEvents({ events, store, signal });
|
||||
}
|
||||
|
||||
/** Returns whether the pubkey is followed by a local user. */
|
||||
async function isLocallyFollowed(pubkey: string): Promise<boolean> {
|
||||
const { host } = Conf.url;
|
||||
|
||||
const [event] = await Storages.db.query(
|
||||
const store = await Storages.db();
|
||||
|
||||
const [event] = await store.query(
|
||||
[{ kinds: [3], '#p': [pubkey], search: `domain:${host}`, limit: 1 }],
|
||||
{ limit: 1 },
|
||||
);
|
||||
|
|
|
@ -1,65 +0,0 @@
|
|||
// deno-lint-ignore-file require-await
|
||||
|
||||
import { NConnectSigner, NostrEvent, NostrSigner, NSecSigner } from '@nostrify/nostrify';
|
||||
import { HTTPException } from 'hono';
|
||||
import { type AppContext } from '@/app.ts';
|
||||
import { AdminSigner } from '@/signers/AdminSigner.ts';
|
||||
import { Storages } from '@/storages.ts';
|
||||
|
||||
/**
|
||||
* Sign Nostr event using the app context.
|
||||
*
|
||||
* - If a secret key is provided, it will be used to sign the event.
|
||||
* - Otherwise, it will use NIP-46 to sign the event.
|
||||
*/
|
||||
export class APISigner implements NostrSigner {
|
||||
private signer: NostrSigner;
|
||||
|
||||
constructor(c: AppContext) {
|
||||
const seckey = c.get('seckey');
|
||||
const pubkey = c.get('pubkey');
|
||||
|
||||
if (!pubkey) {
|
||||
throw new HTTPException(401, { message: 'Missing pubkey' });
|
||||
}
|
||||
|
||||
if (seckey) {
|
||||
this.signer = new NSecSigner(seckey);
|
||||
} else {
|
||||
this.signer = new NConnectSigner({
|
||||
pubkey,
|
||||
relay: Storages.pubsub,
|
||||
signer: new AdminSigner(),
|
||||
timeout: 60000,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async getPublicKey(): Promise<string> {
|
||||
return this.signer.getPublicKey();
|
||||
}
|
||||
|
||||
async signEvent(event: Omit<NostrEvent, 'id' | 'pubkey' | 'sig'>): Promise<NostrEvent> {
|
||||
return this.signer.signEvent(event);
|
||||
}
|
||||
|
||||
readonly nip04 = {
|
||||
encrypt: async (pubkey: string, plaintext: string): Promise<string> => {
|
||||
return this.signer.nip04!.encrypt(pubkey, plaintext);
|
||||
},
|
||||
|
||||
decrypt: async (pubkey: string, ciphertext: string): Promise<string> => {
|
||||
return this.signer.nip04!.decrypt(pubkey, ciphertext);
|
||||
},
|
||||
};
|
||||
|
||||
readonly nip44 = {
|
||||
encrypt: async (pubkey: string, plaintext: string): Promise<string> => {
|
||||
return this.signer.nip44!.encrypt(pubkey, plaintext);
|
||||
},
|
||||
|
||||
decrypt: async (pubkey: string, ciphertext: string): Promise<string> => {
|
||||
return this.signer.nip44!.decrypt(pubkey, ciphertext);
|
||||
},
|
||||
};
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
// deno-lint-ignore-file require-await
|
||||
import { NConnectSigner, NostrEvent, NostrSigner } from '@nostrify/nostrify';
|
||||
|
||||
import { AdminSigner } from '@/signers/AdminSigner.ts';
|
||||
import { Storages } from '@/storages.ts';
|
||||
|
||||
/**
|
||||
* NIP-46 signer.
|
||||
*
|
||||
* Simple extension of nostrify's `NConnectSigner`, with our options to keep it DRY.
|
||||
*/
|
||||
export class ConnectSigner implements NostrSigner {
|
||||
private signer: Promise<NConnectSigner>;
|
||||
|
||||
constructor(private pubkey: string, private relays?: string[]) {
|
||||
this.signer = this.init();
|
||||
}
|
||||
|
||||
async init(): Promise<NConnectSigner> {
|
||||
return new NConnectSigner({
|
||||
pubkey: this.pubkey,
|
||||
// TODO: use a remote relay for `nprofile` signing (if present and `Conf.relay` isn't already in the list)
|
||||
relay: await Storages.pubsub(),
|
||||
signer: new AdminSigner(),
|
||||
timeout: 60000,
|
||||
});
|
||||
}
|
||||
|
||||
async signEvent(event: Omit<NostrEvent, 'id' | 'pubkey' | 'sig'>): Promise<NostrEvent> {
|
||||
const signer = await this.signer;
|
||||
return signer.signEvent(event);
|
||||
}
|
||||
|
||||
readonly nip04 = {
|
||||
encrypt: async (pubkey: string, plaintext: string): Promise<string> => {
|
||||
const signer = await this.signer;
|
||||
return signer.nip04.encrypt(pubkey, plaintext);
|
||||
},
|
||||
|
||||
decrypt: async (pubkey: string, ciphertext: string): Promise<string> => {
|
||||
const signer = await this.signer;
|
||||
return signer.nip04.decrypt(pubkey, ciphertext);
|
||||
},
|
||||
};
|
||||
|
||||
readonly nip44 = {
|
||||
encrypt: async (pubkey: string, plaintext: string): Promise<string> => {
|
||||
const signer = await this.signer;
|
||||
return signer.nip44.encrypt(pubkey, plaintext);
|
||||
},
|
||||
|
||||
decrypt: async (pubkey: string, ciphertext: string): Promise<string> => {
|
||||
const signer = await this.signer;
|
||||
return signer.nip44.decrypt(pubkey, ciphertext);
|
||||
},
|
||||
};
|
||||
|
||||
// Prevent unnecessary NIP-46 round-trips.
|
||||
async getPublicKey(): Promise<string> {
|
||||
return this.pubkey;
|
||||
}
|
||||
|
||||
/** Get the user's relays if they passed in an `nprofile` auth token. */
|
||||
async getRelays(): Promise<Record<string, { read: boolean; write: boolean }>> {
|
||||
return this.relays?.reduce<Record<string, { read: boolean; write: boolean }>>((acc, relay) => {
|
||||
acc[relay] = { read: true, write: true };
|
||||
return acc;
|
||||
}, {}) ?? {};
|
||||
}
|
||||
}
|
60
src/stats.ts
60
src/stats.ts
|
@ -1,8 +1,8 @@
|
|||
import { NostrEvent } from '@nostrify/nostrify';
|
||||
import { NKinds, NostrEvent } from '@nostrify/nostrify';
|
||||
import Debug from '@soapbox/stickynotes/debug';
|
||||
import { InsertQueryBuilder } from 'kysely';
|
||||
|
||||
import { db } from '@/db.ts';
|
||||
import { DittoDB } from '@/db/DittoDB.ts';
|
||||
import { DittoTables } from '@/db/DittoTables.ts';
|
||||
import { Storages } from '@/storages.ts';
|
||||
import { findReplyTag } from '@/tags.ts';
|
||||
|
@ -16,16 +16,16 @@ type StatDiff = AuthorStatDiff | EventStatDiff;
|
|||
|
||||
const debug = Debug('ditto:stats');
|
||||
|
||||
/** Store stats for the event in LMDB. */
|
||||
/** Store stats for the event. */
|
||||
async function updateStats(event: NostrEvent) {
|
||||
let prev: NostrEvent | undefined;
|
||||
const queries: InsertQueryBuilder<DittoTables, any, unknown>[] = [];
|
||||
|
||||
// Kind 3 is a special case - replace the count with the new list.
|
||||
if (event.kind === 3) {
|
||||
prev = await maybeGetPrev(event);
|
||||
prev = await getPrevEvent(event);
|
||||
if (!prev || event.created_at >= prev.created_at) {
|
||||
queries.push(updateFollowingCountQuery(event));
|
||||
queries.push(await updateFollowingCountQuery(event));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -37,8 +37,8 @@ async function updateStats(event: NostrEvent) {
|
|||
debug(JSON.stringify({ id: event.id, pubkey: event.pubkey, kind: event.kind, tags: event.tags, statDiffs }));
|
||||
}
|
||||
|
||||
if (pubkeyDiffs.length) queries.push(authorStatsQuery(pubkeyDiffs));
|
||||
if (eventDiffs.length) queries.push(eventStatsQuery(eventDiffs));
|
||||
if (pubkeyDiffs.length) queries.push(await authorStatsQuery(pubkeyDiffs));
|
||||
if (eventDiffs.length) queries.push(await eventStatsQuery(eventDiffs));
|
||||
|
||||
if (queries.length) {
|
||||
await Promise.all(queries.map((query) => query.execute()));
|
||||
|
@ -47,6 +47,7 @@ async function updateStats(event: NostrEvent) {
|
|||
|
||||
/** Calculate stats changes ahead of time so we can build an efficient query. */
|
||||
async function getStatsDiff(event: NostrEvent, prev: NostrEvent | undefined): Promise<StatDiff[]> {
|
||||
const store = await Storages.db();
|
||||
const statDiffs: StatDiff[] = [];
|
||||
|
||||
const firstTaggedId = event.tags.find(([name]) => name === 'e')?.[1];
|
||||
|
@ -65,7 +66,7 @@ async function getStatsDiff(event: NostrEvent, prev: NostrEvent | undefined): Pr
|
|||
case 5: {
|
||||
if (!firstTaggedId) break;
|
||||
|
||||
const [repostedEvent] = await Storages.db.query(
|
||||
const [repostedEvent] = await store.query(
|
||||
[{ kinds: [6], ids: [firstTaggedId], authors: [event.pubkey] }],
|
||||
{ limit: 1 },
|
||||
);
|
||||
|
@ -77,7 +78,7 @@ async function getStatsDiff(event: NostrEvent, prev: NostrEvent | undefined): Pr
|
|||
const eventBeingRepostedPubkey = repostedEvent.tags.find(([name]) => name === 'p')?.[1];
|
||||
if (!eventBeingRepostedId || !eventBeingRepostedPubkey) break;
|
||||
|
||||
const [eventBeingReposted] = await Storages.db.query(
|
||||
const [eventBeingReposted] = await store.query(
|
||||
[{ kinds: [1], ids: [eventBeingRepostedId], authors: [eventBeingRepostedPubkey] }],
|
||||
{ limit: 1 },
|
||||
);
|
||||
|
@ -101,7 +102,7 @@ async function getStatsDiff(event: NostrEvent, prev: NostrEvent | undefined): Pr
|
|||
}
|
||||
|
||||
/** Create an author stats query from the list of diffs. */
|
||||
function authorStatsQuery(diffs: AuthorStatDiff[]) {
|
||||
async function authorStatsQuery(diffs: AuthorStatDiff[]) {
|
||||
const values: DittoTables['author_stats'][] = diffs.map(([_, pubkey, stat, diff]) => {
|
||||
const row: DittoTables['author_stats'] = {
|
||||
pubkey,
|
||||
|
@ -113,21 +114,22 @@ function authorStatsQuery(diffs: AuthorStatDiff[]) {
|
|||
return row;
|
||||
});
|
||||
|
||||
return db.insertInto('author_stats')
|
||||
const kysely = await DittoDB.getInstance();
|
||||
return kysely.insertInto('author_stats')
|
||||
.values(values)
|
||||
.onConflict((oc) =>
|
||||
oc
|
||||
.column('pubkey')
|
||||
.doUpdateSet((eb) => ({
|
||||
followers_count: eb('followers_count', '+', eb.ref('excluded.followers_count')),
|
||||
following_count: eb('following_count', '+', eb.ref('excluded.following_count')),
|
||||
notes_count: eb('notes_count', '+', eb.ref('excluded.notes_count')),
|
||||
followers_count: eb('author_stats.followers_count', '+', eb.ref('excluded.followers_count')),
|
||||
following_count: eb('author_stats.following_count', '+', eb.ref('excluded.following_count')),
|
||||
notes_count: eb('author_stats.notes_count', '+', eb.ref('excluded.notes_count')),
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
/** Create an event stats query from the list of diffs. */
|
||||
function eventStatsQuery(diffs: EventStatDiff[]) {
|
||||
async function eventStatsQuery(diffs: EventStatDiff[]) {
|
||||
const values: DittoTables['event_stats'][] = diffs.map(([_, event_id, stat, diff]) => {
|
||||
const row: DittoTables['event_stats'] = {
|
||||
event_id,
|
||||
|
@ -139,37 +141,43 @@ function eventStatsQuery(diffs: EventStatDiff[]) {
|
|||
return row;
|
||||
});
|
||||
|
||||
return db.insertInto('event_stats')
|
||||
const kysely = await DittoDB.getInstance();
|
||||
return kysely.insertInto('event_stats')
|
||||
.values(values)
|
||||
.onConflict((oc) =>
|
||||
oc
|
||||
.column('event_id')
|
||||
.doUpdateSet((eb) => ({
|
||||
replies_count: eb('replies_count', '+', eb.ref('excluded.replies_count')),
|
||||
reposts_count: eb('reposts_count', '+', eb.ref('excluded.reposts_count')),
|
||||
reactions_count: eb('reactions_count', '+', eb.ref('excluded.reactions_count')),
|
||||
replies_count: eb('event_stats.replies_count', '+', eb.ref('excluded.replies_count')),
|
||||
reposts_count: eb('event_stats.reposts_count', '+', eb.ref('excluded.reposts_count')),
|
||||
reactions_count: eb('event_stats.reactions_count', '+', eb.ref('excluded.reactions_count')),
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
/** Get the last version of the event, if any. */
|
||||
async function maybeGetPrev(event: NostrEvent): Promise<NostrEvent> {
|
||||
const [prev] = await Storages.db.query([
|
||||
{ kinds: [event.kind], authors: [event.pubkey], limit: 1 },
|
||||
]);
|
||||
async function getPrevEvent(event: NostrEvent): Promise<NostrEvent | undefined> {
|
||||
if (NKinds.replaceable(event.kind) || NKinds.parameterizedReplaceable(event.kind)) {
|
||||
const store = await Storages.db();
|
||||
|
||||
return prev;
|
||||
const [prev] = await store.query([
|
||||
{ kinds: [event.kind], authors: [event.pubkey], limit: 1 },
|
||||
]);
|
||||
|
||||
return prev;
|
||||
}
|
||||
}
|
||||
|
||||
/** Set the following count to the total number of unique "p" tags in the follow list. */
|
||||
function updateFollowingCountQuery({ pubkey, tags }: NostrEvent) {
|
||||
async function updateFollowingCountQuery({ pubkey, tags }: NostrEvent) {
|
||||
const following_count = new Set(
|
||||
tags
|
||||
.filter(([name]) => name === 'p')
|
||||
.map(([_, value]) => value),
|
||||
).size;
|
||||
|
||||
return db.insertInto('author_stats')
|
||||
const kysely = await DittoDB.getInstance();
|
||||
return kysely.insertInto('author_stats')
|
||||
.values({
|
||||
pubkey,
|
||||
following_count,
|
||||
|
|
123
src/storages.ts
123
src/storages.ts
|
@ -1,7 +1,9 @@
|
|||
// deno-lint-ignore-file require-await
|
||||
import { NCache } from '@nostrify/nostrify';
|
||||
import { RelayPoolWorker } from 'nostr-relaypool';
|
||||
|
||||
import { Conf } from '@/config.ts';
|
||||
import { db } from '@/db.ts';
|
||||
import { activeRelays, pool } from '@/pool.ts';
|
||||
import { DittoDB } from '@/db/DittoDB.ts';
|
||||
import { EventsDB } from '@/storages/events-db.ts';
|
||||
import { Optimizer } from '@/storages/optimizer.ts';
|
||||
import { PoolStore } from '@/storages/pool-store.ts';
|
||||
|
@ -12,89 +14,130 @@ import { UserStore } from '@/storages/UserStore.ts';
|
|||
import { Time } from '@/utils/time.ts';
|
||||
|
||||
export class Storages {
|
||||
private static _db: EventsDB | undefined;
|
||||
private static _admin: UserStore | undefined;
|
||||
private static _cache: NCache | undefined;
|
||||
private static _client: PoolStore | undefined;
|
||||
private static _optimizer: Optimizer | undefined;
|
||||
private static _reqmeister: Reqmeister | undefined;
|
||||
private static _pubsub: InternalRelay | undefined;
|
||||
private static _search: SearchStore | undefined;
|
||||
private static _db: Promise<EventsDB> | undefined;
|
||||
private static _admin: Promise<UserStore> | undefined;
|
||||
private static _cache: Promise<NCache> | undefined;
|
||||
private static _client: Promise<PoolStore> | undefined;
|
||||
private static _optimizer: Promise<Optimizer> | undefined;
|
||||
private static _reqmeister: Promise<Reqmeister> | undefined;
|
||||
private static _pubsub: Promise<InternalRelay> | undefined;
|
||||
private static _search: Promise<SearchStore> | undefined;
|
||||
|
||||
/** SQLite database to store events this Ditto server cares about. */
|
||||
public static get db(): EventsDB {
|
||||
public static async db(): Promise<EventsDB> {
|
||||
if (!this._db) {
|
||||
this._db = new EventsDB(db);
|
||||
this._db = (async () => {
|
||||
const kysely = await DittoDB.getInstance();
|
||||
return new EventsDB(kysely);
|
||||
})();
|
||||
}
|
||||
return this._db;
|
||||
}
|
||||
|
||||
/** Admin user storage. */
|
||||
public static get admin(): UserStore {
|
||||
public static async admin(): Promise<UserStore> {
|
||||
if (!this._admin) {
|
||||
this._admin = new UserStore(Conf.pubkey, this.db);
|
||||
this._admin = Promise.resolve(new UserStore(Conf.pubkey, await this.db()));
|
||||
}
|
||||
return this._admin;
|
||||
}
|
||||
|
||||
/** Internal pubsub relay between controllers and the pipeline. */
|
||||
public static get pubsub(): InternalRelay {
|
||||
public static async pubsub(): Promise<InternalRelay> {
|
||||
if (!this._pubsub) {
|
||||
this._pubsub = new InternalRelay();
|
||||
this._pubsub = Promise.resolve(new InternalRelay());
|
||||
}
|
||||
return this._pubsub;
|
||||
}
|
||||
|
||||
/** Relay pool storage. */
|
||||
public static get client(): PoolStore {
|
||||
public static async client(): Promise<PoolStore> {
|
||||
if (!this._client) {
|
||||
this._client = new PoolStore({
|
||||
pool,
|
||||
relays: activeRelays,
|
||||
});
|
||||
this._client = (async () => {
|
||||
const db = await this.db();
|
||||
|
||||
const [relayList] = await db.query([
|
||||
{ kinds: [10002], authors: [Conf.pubkey], limit: 1 },
|
||||
]);
|
||||
|
||||
const tags = relayList?.tags ?? [];
|
||||
|
||||
const activeRelays = tags.reduce((acc, [name, url, marker]) => {
|
||||
if (name === 'r' && !marker) {
|
||||
acc.push(url);
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
console.log(`pool: connecting to ${activeRelays.length} relays.`);
|
||||
|
||||
const worker = new Worker('https://unpkg.com/nostr-relaypool2@0.6.34/lib/nostr-relaypool.worker.js', {
|
||||
type: 'module',
|
||||
});
|
||||
|
||||
// @ts-ignore Wrong types.
|
||||
const pool = new RelayPoolWorker(worker, activeRelays, {
|
||||
autoReconnect: true,
|
||||
// The pipeline verifies events.
|
||||
skipVerification: true,
|
||||
// The logging feature overwhelms the CPU and creates too many logs.
|
||||
logErrorsAndNotices: false,
|
||||
});
|
||||
|
||||
return new PoolStore({
|
||||
pool,
|
||||
relays: activeRelays,
|
||||
});
|
||||
})();
|
||||
}
|
||||
return this._client;
|
||||
}
|
||||
|
||||
/** In-memory data store for cached events. */
|
||||
public static get cache(): NCache {
|
||||
public static async cache(): Promise<NCache> {
|
||||
if (!this._cache) {
|
||||
this._cache = new NCache({ max: 3000 });
|
||||
this._cache = Promise.resolve(new NCache({ max: 3000 }));
|
||||
}
|
||||
return this._cache;
|
||||
}
|
||||
|
||||
/** Batches requests for single events. */
|
||||
public static get reqmeister(): Reqmeister {
|
||||
public static async reqmeister(): Promise<Reqmeister> {
|
||||
if (!this._reqmeister) {
|
||||
this._reqmeister = new Reqmeister({
|
||||
client: this.client,
|
||||
delay: Time.seconds(1),
|
||||
timeout: Time.seconds(1),
|
||||
});
|
||||
this._reqmeister = Promise.resolve(
|
||||
new Reqmeister({
|
||||
client: await this.client(),
|
||||
delay: Time.seconds(1),
|
||||
timeout: Time.seconds(1),
|
||||
}),
|
||||
);
|
||||
}
|
||||
return this._reqmeister;
|
||||
}
|
||||
|
||||
/** Main Ditto storage adapter */
|
||||
public static get optimizer(): Optimizer {
|
||||
public static async optimizer(): Promise<Optimizer> {
|
||||
if (!this._optimizer) {
|
||||
this._optimizer = new Optimizer({
|
||||
db: this.db,
|
||||
cache: this.cache,
|
||||
client: this.reqmeister,
|
||||
});
|
||||
this._optimizer = Promise.resolve(
|
||||
new Optimizer({
|
||||
db: await this.db(),
|
||||
cache: await this.cache(),
|
||||
client: await this.reqmeister(),
|
||||
}),
|
||||
);
|
||||
}
|
||||
return this._optimizer;
|
||||
}
|
||||
|
||||
/** Storage to use for remote search. */
|
||||
public static get search(): SearchStore {
|
||||
public static async search(): Promise<SearchStore> {
|
||||
if (!this._search) {
|
||||
this._search = new SearchStore({
|
||||
relay: Conf.searchRelay,
|
||||
fallback: this.optimizer,
|
||||
});
|
||||
this._search = Promise.resolve(
|
||||
new SearchStore({
|
||||
relay: Conf.searchRelay,
|
||||
fallback: await this.optimizer(),
|
||||
}),
|
||||
);
|
||||
}
|
||||
return this._search;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { MockRelay } from '@nostrify/nostrify/test';
|
||||
|
||||
import { assertEquals } from '@/deps-test.ts';
|
||||
import { assertEquals } from '@std/assert';
|
||||
import { UserStore } from '@/storages/UserStore.ts';
|
||||
|
||||
import userBlack from '~/fixtures/events/kind-0-black.json' with { type: 'json' };
|
||||
|
|
|
@ -4,13 +4,7 @@ import { DittoEvent } from '@/interfaces/DittoEvent.ts';
|
|||
import { getTagSet } from '@/tags.ts';
|
||||
|
||||
export class UserStore implements NStore {
|
||||
private store: NStore;
|
||||
private pubkey: string;
|
||||
|
||||
constructor(pubkey: string, store: NStore) {
|
||||
this.pubkey = pubkey;
|
||||
this.store = store;
|
||||
}
|
||||
constructor(private pubkey: string, private store: NStore) {}
|
||||
|
||||
async event(event: NostrEvent, opts?: { signal?: AbortSignal }): Promise<void> {
|
||||
return await this.store.event(event, opts);
|
||||
|
@ -21,12 +15,11 @@ export class UserStore implements NStore {
|
|||
* https://github.com/nostr-protocol/nips/blob/master/51.md#standard-lists
|
||||
*/
|
||||
async query(filters: NostrFilter[], opts: { signal?: AbortSignal; limit?: number } = {}): Promise<DittoEvent[]> {
|
||||
const allEvents = await this.store.query(filters, opts);
|
||||
const events = await this.store.query(filters, opts);
|
||||
const pubkeys = await this.getMutedPubkeys();
|
||||
|
||||
const mutedPubkeys = await this.getMutedPubkeys();
|
||||
|
||||
return allEvents.filter((event) => {
|
||||
return event.kind === 0 || mutedPubkeys.has(event.pubkey) === false;
|
||||
return events.filter((event) => {
|
||||
return event.kind === 0 || !pubkeys.has(event.pubkey);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
import { db } from '@/db.ts';
|
||||
import { assertEquals, assertRejects } from '@/deps-test.ts';
|
||||
import { assertEquals, assertRejects } from '@std/assert';
|
||||
|
||||
import { DittoDB } from '@/db/DittoDB.ts';
|
||||
|
||||
import event0 from '~/fixtures/events/event-0.json' with { type: 'json' };
|
||||
import event1 from '~/fixtures/events/event-1.json' with { type: 'json' };
|
||||
|
||||
import { EventsDB } from '@/storages/events-db.ts';
|
||||
|
||||
const eventsDB = new EventsDB(db);
|
||||
const kysely = await DittoDB.getInstance();
|
||||
const eventsDB = new EventsDB(kysely);
|
||||
|
||||
Deno.test('count filters', async () => {
|
||||
assertEquals((await eventsDB.count([{ kinds: [1] }])).count, 0);
|
||||
|
@ -34,7 +36,7 @@ Deno.test('query events with domain search filter', async () => {
|
|||
assertEquals(await eventsDB.query([{ search: 'domain:localhost:8000' }]), []);
|
||||
assertEquals(await eventsDB.query([{ search: '' }]), [event1]);
|
||||
|
||||
await db
|
||||
await kysely
|
||||
.insertInto('pubkey_domains')
|
||||
.values({ pubkey: event1.pubkey, domain: 'localhost:8000', last_updated_at: event1.created_at })
|
||||
.execute();
|
||||
|
|
|
@ -1,131 +1,135 @@
|
|||
import { assertEquals } from '@/deps-test.ts';
|
||||
import { assertEquals } from '@std/assert';
|
||||
import { hydrateEvents } from '@/storages/hydrate.ts';
|
||||
import { MockRelay } from '@nostrify/nostrify/test';
|
||||
|
||||
import { DittoEvent } from '@/interfaces/DittoEvent.ts';
|
||||
import { eventFixture } from '@/test.ts';
|
||||
|
||||
import event0madePost from '~/fixtures/events/event-0-the-one-who-post-and-users-repost.json' with { type: 'json' };
|
||||
import event0madeRepost from '~/fixtures/events/event-0-the-one-who-repost.json' with { type: 'json' };
|
||||
import event0madeQuoteRepost from '~/fixtures/events/event-0-the-one-who-quote-repost.json' with { type: 'json' };
|
||||
import event1 from '~/fixtures/events/event-1.json' with { type: 'json' };
|
||||
import event1quoteRepost from '~/fixtures/events/event-1-quote-repost.json' with { type: 'json' };
|
||||
import event1futureIsMine from '~/fixtures/events/event-1-will-be-reposted-with-quote-repost.json' with {
|
||||
type: 'json',
|
||||
};
|
||||
import event1quoteRepostLatin from '~/fixtures/events/event-1-quote-repost-will-be-reposted.json' with { type: 'json' };
|
||||
import event1willBeQuoteReposted from '~/fixtures/events/event-1-that-will-be-quote-reposted.json' with {
|
||||
type: 'json',
|
||||
};
|
||||
import event1reposted from '~/fixtures/events/event-1-reposted.json' with { type: 'json' };
|
||||
import event6 from '~/fixtures/events/event-6.json' with { type: 'json' };
|
||||
import event6ofQuoteRepost from '~/fixtures/events/event-6-of-quote-repost.json' with { type: 'json' };
|
||||
|
||||
Deno.test('hydrateEvents(): author --- WITHOUT stats', async () => {
|
||||
const db = new MockRelay();
|
||||
|
||||
const event0 = await eventFixture('event-0');
|
||||
const event1copy = structuredClone(event1);
|
||||
const event1 = await eventFixture('event-1');
|
||||
|
||||
// Save events to database
|
||||
await db.event(event0);
|
||||
await db.event(event1copy);
|
||||
|
||||
assertEquals((event1copy as DittoEvent).author, undefined, "Event hasn't been hydrated yet");
|
||||
await db.event(event1);
|
||||
|
||||
await hydrateEvents({
|
||||
events: [event1copy],
|
||||
storage: db,
|
||||
events: [event1],
|
||||
store: db,
|
||||
});
|
||||
|
||||
const expectedEvent = { ...event1copy, author: event0 };
|
||||
assertEquals(event1copy, expectedEvent);
|
||||
const expectedEvent = { ...event1, author: event0 };
|
||||
assertEquals(event1, expectedEvent);
|
||||
});
|
||||
|
||||
Deno.test('hydrateEvents(): repost --- WITHOUT stats', async () => {
|
||||
const db = new MockRelay();
|
||||
|
||||
const event0madePostCopy = structuredClone(event0madePost);
|
||||
const event0madeRepostCopy = structuredClone(event0madeRepost);
|
||||
const event1repostedCopy = structuredClone(event1reposted);
|
||||
const event6copy = structuredClone(event6);
|
||||
const event0madePost = await eventFixture('event-0-the-one-who-post-and-users-repost');
|
||||
const event0madeRepost = await eventFixture('event-0-the-one-who-repost');
|
||||
const event1reposted = await eventFixture('event-1-reposted');
|
||||
const event6 = await eventFixture('event-6');
|
||||
|
||||
// Save events to database
|
||||
await db.event(event0madePostCopy);
|
||||
await db.event(event0madeRepostCopy);
|
||||
await db.event(event1repostedCopy);
|
||||
await db.event(event6copy);
|
||||
|
||||
assertEquals((event6copy as DittoEvent).author, undefined, "Event hasn't hydrated author yet");
|
||||
assertEquals((event6copy as DittoEvent).repost, undefined, "Event hasn't hydrated repost yet");
|
||||
await db.event(event0madePost);
|
||||
await db.event(event0madeRepost);
|
||||
await db.event(event1reposted);
|
||||
await db.event(event6);
|
||||
|
||||
await hydrateEvents({
|
||||
events: [event6copy],
|
||||
storage: db,
|
||||
events: [event6],
|
||||
store: db,
|
||||
});
|
||||
|
||||
const expectedEvent6 = {
|
||||
...event6copy,
|
||||
author: event0madeRepostCopy,
|
||||
repost: { ...event1repostedCopy, author: event0madePostCopy },
|
||||
...event6,
|
||||
author: event0madeRepost,
|
||||
repost: { ...event1reposted, author: event0madePost },
|
||||
};
|
||||
assertEquals(event6copy, expectedEvent6);
|
||||
assertEquals(event6, expectedEvent6);
|
||||
});
|
||||
|
||||
Deno.test('hydrateEvents(): quote repost --- WITHOUT stats', async () => {
|
||||
const db = new MockRelay();
|
||||
|
||||
const event0madeQuoteRepostCopy = structuredClone(event0madeQuoteRepost);
|
||||
const event0madeQuoteRepost = await eventFixture('event-0-the-one-who-quote-repost');
|
||||
const event0 = await eventFixture('event-0');
|
||||
const event1quoteRepostCopy = structuredClone(event1quoteRepost);
|
||||
const event1willBeQuoteRepostedCopy = structuredClone(event1willBeQuoteReposted);
|
||||
const event1quoteRepost = await eventFixture('event-1-quote-repost');
|
||||
const event1willBeQuoteReposted = await eventFixture('event-1-that-will-be-quote-reposted');
|
||||
|
||||
// Save events to database
|
||||
await db.event(event0madeQuoteRepostCopy);
|
||||
await db.event(event0madeQuoteRepost);
|
||||
await db.event(event0);
|
||||
await db.event(event1quoteRepostCopy);
|
||||
await db.event(event1willBeQuoteRepostedCopy);
|
||||
await db.event(event1quoteRepost);
|
||||
await db.event(event1willBeQuoteReposted);
|
||||
|
||||
await hydrateEvents({
|
||||
events: [event1quoteRepostCopy],
|
||||
storage: db,
|
||||
events: [event1quoteRepost],
|
||||
store: db,
|
||||
});
|
||||
|
||||
const expectedEvent1quoteRepost = {
|
||||
...event1quoteRepostCopy,
|
||||
author: event0madeQuoteRepostCopy,
|
||||
quote: { ...event1willBeQuoteRepostedCopy, author: event0 },
|
||||
...event1quoteRepost,
|
||||
author: event0madeQuoteRepost,
|
||||
quote: { ...event1willBeQuoteReposted, author: event0 },
|
||||
};
|
||||
|
||||
assertEquals(event1quoteRepostCopy, expectedEvent1quoteRepost);
|
||||
assertEquals(event1quoteRepost, expectedEvent1quoteRepost);
|
||||
});
|
||||
|
||||
Deno.test('hydrateEvents(): repost of quote repost --- WITHOUT stats', async () => {
|
||||
const db = new MockRelay();
|
||||
|
||||
const author = await eventFixture('event-0-makes-repost-with-quote-repost');
|
||||
const event1copy = structuredClone(event1futureIsMine);
|
||||
const event1quoteCopy = structuredClone(event1quoteRepostLatin);
|
||||
const event6copy = structuredClone(event6ofQuoteRepost);
|
||||
const event1 = await eventFixture('event-1-will-be-reposted-with-quote-repost');
|
||||
const event6 = await eventFixture('event-6-of-quote-repost');
|
||||
const event1quote = await eventFixture('event-1-quote-repost-will-be-reposted');
|
||||
|
||||
// Save events to database
|
||||
await db.event(author);
|
||||
await db.event(event1copy);
|
||||
await db.event(event1quoteCopy);
|
||||
await db.event(event6copy);
|
||||
|
||||
assertEquals((event6copy as DittoEvent).author, undefined, "Event hasn't hydrated author yet");
|
||||
assertEquals((event6copy as DittoEvent).repost, undefined, "Event hasn't hydrated repost yet");
|
||||
await db.event(event1);
|
||||
await db.event(event1quote);
|
||||
await db.event(event6);
|
||||
|
||||
await hydrateEvents({
|
||||
events: [event6copy],
|
||||
storage: db,
|
||||
events: [event6],
|
||||
store: db,
|
||||
});
|
||||
|
||||
const expectedEvent6 = {
|
||||
...event6copy,
|
||||
...event6,
|
||||
author,
|
||||
repost: { ...event1quoteCopy, author, quote: { author, ...event1copy } },
|
||||
repost: { ...event1quote, author, quote: { author, ...event1 } },
|
||||
};
|
||||
assertEquals(event6copy, expectedEvent6);
|
||||
assertEquals(event6, expectedEvent6);
|
||||
});
|
||||
|
||||
Deno.test('hydrateEvents(): report pubkey and post // kind 1984 --- WITHOUT stats', async () => {
|
||||
const db = new MockRelay();
|
||||
|
||||
const authorDictator = await eventFixture('kind-0-dictator');
|
||||
const authorVictim = await eventFixture('kind-0-george-orwell');
|
||||
const reportEvent = await eventFixture('kind-1984-dictator-reports-george-orwell');
|
||||
const event1 = await eventFixture('kind-1-author-george-orwell');
|
||||
|
||||
// Save events to database
|
||||
await db.event(authorDictator);
|
||||
await db.event(authorVictim);
|
||||
await db.event(reportEvent);
|
||||
await db.event(event1);
|
||||
|
||||
await hydrateEvents({
|
||||
events: [reportEvent],
|
||||
store: db,
|
||||
});
|
||||
|
||||
const expectedEvent: DittoEvent = {
|
||||
...reportEvent,
|
||||
author: authorDictator,
|
||||
reported_notes: [event1],
|
||||
reported_profile: authorVictim,
|
||||
};
|
||||
assertEquals(reportEvent, expectedEvent);
|
||||
});
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
import { NostrEvent, NStore } from '@nostrify/nostrify';
|
||||
import { matchFilter } from 'nostr-tools';
|
||||
|
||||
import { db } from '@/db.ts';
|
||||
import { DittoDB } from '@/db/DittoDB.ts';
|
||||
import { type DittoEvent } from '@/interfaces/DittoEvent.ts';
|
||||
import { DittoTables } from '@/db/DittoTables.ts';
|
||||
import { Conf } from '@/config.ts';
|
||||
|
||||
interface HydrateOpts {
|
||||
events: DittoEvent[];
|
||||
storage: NStore;
|
||||
store: NStore;
|
||||
signal?: AbortSignal;
|
||||
}
|
||||
|
||||
/** Hydrate events using the provided storage. */
|
||||
async function hydrateEvents(opts: HydrateOpts): Promise<DittoEvent[]> {
|
||||
const { events, storage, signal } = opts;
|
||||
const { events, store, signal } = opts;
|
||||
|
||||
if (!events.length) {
|
||||
return events;
|
||||
|
@ -22,31 +22,31 @@ async function hydrateEvents(opts: HydrateOpts): Promise<DittoEvent[]> {
|
|||
|
||||
const cache = [...events];
|
||||
|
||||
for (const event of await gatherReposts({ events: cache, storage, signal })) {
|
||||
for (const event of await gatherReposts({ events: cache, store, signal })) {
|
||||
cache.push(event);
|
||||
}
|
||||
|
||||
for (const event of await gatherReacted({ events: cache, storage, signal })) {
|
||||
for (const event of await gatherReacted({ events: cache, store, signal })) {
|
||||
cache.push(event);
|
||||
}
|
||||
|
||||
for (const event of await gatherQuotes({ events: cache, storage, signal })) {
|
||||
for (const event of await gatherQuotes({ events: cache, store, signal })) {
|
||||
cache.push(event);
|
||||
}
|
||||
|
||||
for (const event of await gatherAuthors({ events: cache, storage, signal })) {
|
||||
for (const event of await gatherAuthors({ events: cache, store, signal })) {
|
||||
cache.push(event);
|
||||
}
|
||||
|
||||
for (const event of await gatherUsers({ events: cache, storage, signal })) {
|
||||
for (const event of await gatherUsers({ events: cache, store, signal })) {
|
||||
cache.push(event);
|
||||
}
|
||||
|
||||
for (const event of await gatherReportedProfiles({ events: cache, storage, signal })) {
|
||||
for (const event of await gatherReportedProfiles({ events: cache, store, signal })) {
|
||||
cache.push(event);
|
||||
}
|
||||
|
||||
for (const event of await gatherReportedNotes({ events: cache, storage, signal })) {
|
||||
for (const event of await gatherReportedNotes({ events: cache, store, signal })) {
|
||||
cache.push(event);
|
||||
}
|
||||
|
||||
|
@ -123,7 +123,7 @@ function assembleEvents(
|
|||
}
|
||||
|
||||
/** Collect reposts from the events. */
|
||||
function gatherReposts({ events, storage, signal }: HydrateOpts): Promise<DittoEvent[]> {
|
||||
function gatherReposts({ events, store, signal }: HydrateOpts): Promise<DittoEvent[]> {
|
||||
const ids = new Set<string>();
|
||||
|
||||
for (const event of events) {
|
||||
|
@ -135,14 +135,14 @@ function gatherReposts({ events, storage, signal }: HydrateOpts): Promise<DittoE
|
|||
}
|
||||
}
|
||||
|
||||
return storage.query(
|
||||
return store.query(
|
||||
[{ ids: [...ids], limit: ids.size }],
|
||||
{ signal },
|
||||
);
|
||||
}
|
||||
|
||||
/** Collect events being reacted to by the events. */
|
||||
function gatherReacted({ events, storage, signal }: HydrateOpts): Promise<DittoEvent[]> {
|
||||
function gatherReacted({ events, store, signal }: HydrateOpts): Promise<DittoEvent[]> {
|
||||
const ids = new Set<string>();
|
||||
|
||||
for (const event of events) {
|
||||
|
@ -154,14 +154,14 @@ function gatherReacted({ events, storage, signal }: HydrateOpts): Promise<DittoE
|
|||
}
|
||||
}
|
||||
|
||||
return storage.query(
|
||||
return store.query(
|
||||
[{ ids: [...ids], limit: ids.size }],
|
||||
{ signal },
|
||||
);
|
||||
}
|
||||
|
||||
/** Collect quotes from the events. */
|
||||
function gatherQuotes({ events, storage, signal }: HydrateOpts): Promise<DittoEvent[]> {
|
||||
function gatherQuotes({ events, store, signal }: HydrateOpts): Promise<DittoEvent[]> {
|
||||
const ids = new Set<string>();
|
||||
|
||||
for (const event of events) {
|
||||
|
@ -173,34 +173,34 @@ function gatherQuotes({ events, storage, signal }: HydrateOpts): Promise<DittoEv
|
|||
}
|
||||
}
|
||||
|
||||
return storage.query(
|
||||
return store.query(
|
||||
[{ ids: [...ids], limit: ids.size }],
|
||||
{ signal },
|
||||
);
|
||||
}
|
||||
|
||||
/** Collect authors from the events. */
|
||||
function gatherAuthors({ events, storage, signal }: HydrateOpts): Promise<DittoEvent[]> {
|
||||
function gatherAuthors({ events, store, signal }: HydrateOpts): Promise<DittoEvent[]> {
|
||||
const pubkeys = new Set(events.map((event) => event.pubkey));
|
||||
|
||||
return storage.query(
|
||||
return store.query(
|
||||
[{ kinds: [0], authors: [...pubkeys], limit: pubkeys.size }],
|
||||
{ signal },
|
||||
);
|
||||
}
|
||||
|
||||
/** Collect users from the events. */
|
||||
function gatherUsers({ events, storage, signal }: HydrateOpts): Promise<DittoEvent[]> {
|
||||
function gatherUsers({ events, store, signal }: HydrateOpts): Promise<DittoEvent[]> {
|
||||
const pubkeys = new Set(events.map((event) => event.pubkey));
|
||||
|
||||
return storage.query(
|
||||
return store.query(
|
||||
[{ kinds: [30361], authors: [Conf.pubkey], '#d': [...pubkeys], limit: pubkeys.size }],
|
||||
{ signal },
|
||||
);
|
||||
}
|
||||
|
||||
/** Collect reported notes from the events. */
|
||||
function gatherReportedNotes({ events, storage, signal }: HydrateOpts): Promise<DittoEvent[]> {
|
||||
function gatherReportedNotes({ events, store, signal }: HydrateOpts): Promise<DittoEvent[]> {
|
||||
const ids = new Set<string>();
|
||||
for (const event of events) {
|
||||
if (event.kind === 1984) {
|
||||
|
@ -213,14 +213,14 @@ function gatherReportedNotes({ events, storage, signal }: HydrateOpts): Promise<
|
|||
}
|
||||
}
|
||||
|
||||
return storage.query(
|
||||
return store.query(
|
||||
[{ kinds: [1], ids: [...ids], limit: ids.size }],
|
||||
{ signal },
|
||||
);
|
||||
}
|
||||
|
||||
/** Collect reported profiles from the events. */
|
||||
function gatherReportedProfiles({ events, storage, signal }: HydrateOpts): Promise<DittoEvent[]> {
|
||||
function gatherReportedProfiles({ events, store, signal }: HydrateOpts): Promise<DittoEvent[]> {
|
||||
const pubkeys = new Set<string>();
|
||||
|
||||
for (const event of events) {
|
||||
|
@ -232,14 +232,14 @@ function gatherReportedProfiles({ events, storage, signal }: HydrateOpts): Promi
|
|||
}
|
||||
}
|
||||
|
||||
return storage.query(
|
||||
return store.query(
|
||||
[{ kinds: [0], authors: [...pubkeys], limit: pubkeys.size }],
|
||||
{ signal },
|
||||
);
|
||||
}
|
||||
|
||||
/** Collect author stats from the events. */
|
||||
function gatherAuthorStats(events: DittoEvent[]): Promise<DittoTables['author_stats'][]> {
|
||||
async function gatherAuthorStats(events: DittoEvent[]): Promise<DittoTables['author_stats'][]> {
|
||||
const pubkeys = new Set<string>(
|
||||
events
|
||||
.filter((event) => event.kind === 0)
|
||||
|
@ -250,7 +250,8 @@ function gatherAuthorStats(events: DittoEvent[]): Promise<DittoTables['author_st
|
|||
return Promise.resolve([]);
|
||||
}
|
||||
|
||||
return db
|
||||
const kysely = await DittoDB.getInstance();
|
||||
return kysely
|
||||
.selectFrom('author_stats')
|
||||
.selectAll()
|
||||
.where('pubkey', 'in', [...pubkeys])
|
||||
|
@ -258,7 +259,7 @@ function gatherAuthorStats(events: DittoEvent[]): Promise<DittoTables['author_st
|
|||
}
|
||||
|
||||
/** Collect event stats from the events. */
|
||||
function gatherEventStats(events: DittoEvent[]): Promise<DittoTables['event_stats'][]> {
|
||||
async function gatherEventStats(events: DittoEvent[]): Promise<DittoTables['event_stats'][]> {
|
||||
const ids = new Set<string>(
|
||||
events
|
||||
.filter((event) => event.kind === 1)
|
||||
|
@ -269,7 +270,8 @@ function gatherEventStats(events: DittoEvent[]): Promise<DittoTables['event_stat
|
|||
return Promise.resolve([]);
|
||||
}
|
||||
|
||||
return db
|
||||
const kysely = await DittoDB.getInstance();
|
||||
return kysely
|
||||
.selectFrom('event_stats')
|
||||
.selectAll()
|
||||
.where('event_id', 'in', [...ids])
|
||||
|
|
|
@ -13,6 +13,7 @@ import { RelayPoolWorker } from 'nostr-relaypool';
|
|||
import { getFilterLimit, matchFilters } from 'nostr-tools';
|
||||
|
||||
import { Conf } from '@/config.ts';
|
||||
import { Storages } from '@/storages.ts';
|
||||
import { purifyEvent } from '@/storages/hydrate.ts';
|
||||
import { abortError } from '@/utils/abort.ts';
|
||||
import { getRelays } from '@/utils/outbox.ts';
|
||||
|
@ -35,7 +36,7 @@ class PoolStore implements NRelay {
|
|||
async event(event: NostrEvent, opts: { signal?: AbortSignal } = {}): Promise<void> {
|
||||
if (opts.signal?.aborted) return Promise.reject(abortError());
|
||||
|
||||
const relaySet = await getRelays(event.pubkey);
|
||||
const relaySet = await getRelays(await Storages.db(), event.pubkey);
|
||||
relaySet.delete(Conf.relay);
|
||||
|
||||
const relays = [...relaySet].slice(0, 4);
|
||||
|
|
|
@ -48,7 +48,7 @@ class SearchStore implements NStore {
|
|||
|
||||
return hydrateEvents({
|
||||
events,
|
||||
storage: this.#hydrator,
|
||||
store: this.#hydrator,
|
||||
signal: opts?.signal,
|
||||
});
|
||||
} else {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { assertEquals } from '@/deps-test.ts';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { addTag, deleteTag, getTagSet } from './tags.ts';
|
||||
|
||||
|
|
|
@ -9,8 +9,8 @@ import { z } from 'zod';
|
|||
import { type AppContext } from '@/app.ts';
|
||||
import { Conf } from '@/config.ts';
|
||||
import * as pipeline from '@/pipeline.ts';
|
||||
import { RelayError } from '@/RelayError.ts';
|
||||
import { AdminSigner } from '@/signers/AdminSigner.ts';
|
||||
import { APISigner } from '@/signers/APISigner.ts';
|
||||
import { Storages } from '@/storages.ts';
|
||||
import { nostrNow } from '@/utils.ts';
|
||||
|
||||
|
@ -21,7 +21,13 @@ type EventStub = TypeFest.SetOptional<EventTemplate, 'content' | 'created_at' |
|
|||
|
||||
/** Publish an event through the pipeline. */
|
||||
async function createEvent(t: EventStub, c: AppContext): Promise<NostrEvent> {
|
||||
const signer = new APISigner(c);
|
||||
const signer = c.get('signer');
|
||||
|
||||
if (!signer) {
|
||||
throw new HTTPException(401, {
|
||||
res: c.json({ error: 'No way to sign Nostr event' }, 401),
|
||||
});
|
||||
}
|
||||
|
||||
const event = await signer.signEvent({
|
||||
content: '',
|
||||
|
@ -36,7 +42,7 @@ async function createEvent(t: EventStub, c: AppContext): Promise<NostrEvent> {
|
|||
/** Filter for fetching an existing event to update. */
|
||||
interface UpdateEventFilter extends NostrFilter {
|
||||
kinds: [number];
|
||||
limit?: 1;
|
||||
limit: 1;
|
||||
}
|
||||
|
||||
/** Fetch existing event, update it, then publish the new event. */
|
||||
|
@ -45,7 +51,8 @@ async function updateEvent<E extends EventStub>(
|
|||
fn: (prev: NostrEvent | undefined) => E,
|
||||
c: AppContext,
|
||||
): Promise<NostrEvent> {
|
||||
const [prev] = await Storages.db.query([filter], { limit: 1, signal: c.req.raw.signal });
|
||||
const store = await Storages.db();
|
||||
const [prev] = await store.query([filter], { signal: c.req.raw.signal });
|
||||
return createEvent(fn(prev), c);
|
||||
}
|
||||
|
||||
|
@ -95,7 +102,8 @@ async function updateAdminEvent<E extends EventStub>(
|
|||
fn: (prev: NostrEvent | undefined) => E,
|
||||
c: AppContext,
|
||||
): Promise<NostrEvent> {
|
||||
const [prev] = await Storages.db.query([filter], { limit: 1, signal: c.req.raw.signal });
|
||||
const store = await Storages.db();
|
||||
const [prev] = await store.query([filter], { limit: 1, signal: c.req.raw.signal });
|
||||
return createAdminEvent(fn(prev), c);
|
||||
}
|
||||
|
||||
|
@ -103,12 +111,11 @@ async function updateAdminEvent<E extends EventStub>(
|
|||
async function publishEvent(event: NostrEvent, c: AppContext): Promise<NostrEvent> {
|
||||
debug('EVENT', event);
|
||||
try {
|
||||
await Promise.all([
|
||||
pipeline.handleEvent(event, c.req.raw.signal),
|
||||
Storages.client.event(event),
|
||||
]);
|
||||
await pipeline.handleEvent(event, c.req.raw.signal);
|
||||
const client = await Storages.client();
|
||||
await client.event(event);
|
||||
} catch (e) {
|
||||
if (e instanceof pipeline.RelayError) {
|
||||
if (e instanceof RelayError) {
|
||||
throw new HTTPException(422, {
|
||||
res: c.json({ error: e.message }, 422),
|
||||
});
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { Conf } from '@/config.ts';
|
||||
import { Storages } from '@/storages.ts';
|
||||
import { getInstanceMetadata } from '@/utils/instance.ts';
|
||||
|
||||
/** NIP-46 client-connect metadata. */
|
||||
|
@ -11,7 +12,7 @@ interface ConnectMetadata {
|
|||
/** Get NIP-46 `nostrconnect://` URI for the Ditto server. */
|
||||
export async function getClientConnectUri(signal?: AbortSignal): Promise<string> {
|
||||
const uri = new URL('nostrconnect://');
|
||||
const { name, tagline } = await getInstanceMetadata(signal);
|
||||
const { name, tagline } = await getInstanceMetadata(await Storages.db(), signal);
|
||||
|
||||
const metadata: ConnectMetadata = {
|
||||
name,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { assert } from '@/deps-test.ts';
|
||||
import { assert } from '@std/assert';
|
||||
|
||||
import ExpiringCache from './expiring-cache.ts';
|
||||
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
import { NostrEvent, NostrMetadata, NSchema as n } from '@nostrify/nostrify';
|
||||
import { NostrEvent, NostrMetadata, NSchema as n, NStore } from '@nostrify/nostrify';
|
||||
|
||||
import { Conf } from '@/config.ts';
|
||||
import { serverMetaSchema } from '@/schemas/nostr.ts';
|
||||
import { Storages } from '@/storages.ts';
|
||||
|
||||
/** Like NostrMetadata, but some fields are required and also contains some extra fields. */
|
||||
export interface InstanceMetadata extends NostrMetadata {
|
||||
|
@ -14,8 +13,8 @@ export interface InstanceMetadata extends NostrMetadata {
|
|||
}
|
||||
|
||||
/** Get and parse instance metadata from the kind 0 of the admin user. */
|
||||
export async function getInstanceMetadata(signal?: AbortSignal): Promise<InstanceMetadata> {
|
||||
const [event] = await Storages.db.query(
|
||||
export async function getInstanceMetadata(store: NStore, signal?: AbortSignal): Promise<InstanceMetadata> {
|
||||
const [event] = await store.query(
|
||||
[{ kinds: [0], authors: [Conf.pubkey], limit: 1 }],
|
||||
{ signal },
|
||||
);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { NIP05 } from '@nostrify/nostrify';
|
||||
import { NIP05, NStore } from '@nostrify/nostrify';
|
||||
import Debug from '@soapbox/stickynotes/debug';
|
||||
import { nip19 } from 'nostr-tools';
|
||||
|
||||
|
@ -16,7 +16,8 @@ const nip05Cache = new SimpleLRU<string, nip19.ProfilePointer>(
|
|||
const [name, domain] = key.split('@');
|
||||
try {
|
||||
if (domain === Conf.url.host) {
|
||||
const pointer = await localNip05Lookup(name);
|
||||
const store = await Storages.db();
|
||||
const pointer = await localNip05Lookup(store, name);
|
||||
if (pointer) {
|
||||
debug(`Found: ${key} is ${pointer.pubkey}`);
|
||||
return pointer;
|
||||
|
@ -36,8 +37,8 @@ const nip05Cache = new SimpleLRU<string, nip19.ProfilePointer>(
|
|||
{ max: 500, ttl: Time.hours(1) },
|
||||
);
|
||||
|
||||
async function localNip05Lookup(name: string): Promise<nip19.ProfilePointer | undefined> {
|
||||
const [label] = await Storages.db.query([{
|
||||
async function localNip05Lookup(store: NStore, name: string): Promise<nip19.ProfilePointer | undefined> {
|
||||
const [label] = await store.query([{
|
||||
kinds: [1985],
|
||||
authors: [Conf.pubkey],
|
||||
'#L': ['nip05'],
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import { Conf } from '@/config.ts';
|
||||
import { Storages } from '@/storages.ts';
|
||||
import { NStore } from '@nostrify/nostrify';
|
||||
|
||||
export async function getRelays(pubkey: string): Promise<Set<string>> {
|
||||
import { Conf } from '@/config.ts';
|
||||
|
||||
export async function getRelays(store: NStore, pubkey: string): Promise<Set<string>> {
|
||||
const relays = new Set<`wss://${string}`>();
|
||||
|
||||
const events = await Storages.db.query([
|
||||
const events = await store.query([
|
||||
{ kinds: [10002], authors: [pubkey, Conf.pubkey], limit: 2 },
|
||||
]);
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { assertEquals } from '@/deps-test.ts';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { generateDateRange } from './time.ts';
|
||||
|
||||
|
|
22
src/views.ts
22
src/views.ts
|
@ -12,15 +12,16 @@ async function renderEventAccounts(c: AppContext, filters: NostrFilter[], signal
|
|||
return c.json([]);
|
||||
}
|
||||
|
||||
const events = await Storages.db.query(filters, { signal });
|
||||
const store = await Storages.db();
|
||||
const events = await store.query(filters, { signal });
|
||||
const pubkeys = new Set(events.map(({ pubkey }) => pubkey));
|
||||
|
||||
if (!pubkeys.size) {
|
||||
return c.json([]);
|
||||
}
|
||||
|
||||
const authors = await Storages.db.query([{ kinds: [0], authors: [...pubkeys] }], { signal })
|
||||
.then((events) => hydrateEvents({ events, storage: Storages.db, signal }));
|
||||
const authors = await store.query([{ kinds: [0], authors: [...pubkeys] }], { signal })
|
||||
.then((events) => hydrateEvents({ events, store, signal }));
|
||||
|
||||
const accounts = await Promise.all(
|
||||
authors.map((event) => renderAccount(event)),
|
||||
|
@ -32,8 +33,10 @@ async function renderEventAccounts(c: AppContext, filters: NostrFilter[], signal
|
|||
async function renderAccounts(c: AppContext, authors: string[], signal = AbortSignal.timeout(1000)) {
|
||||
const { since, until, limit } = paginationSchema.parse(c.req.query());
|
||||
|
||||
const events = await Storages.db.query([{ kinds: [0], authors, since, until, limit }], { signal })
|
||||
.then((events) => hydrateEvents({ events, storage: Storages.db, signal }));
|
||||
const store = await Storages.db();
|
||||
|
||||
const events = await store.query([{ kinds: [0], authors, since, until, limit }], { signal })
|
||||
.then((events) => hydrateEvents({ events, store, signal }));
|
||||
|
||||
const accounts = await Promise.all(
|
||||
events.map((event) => renderAccount(event)),
|
||||
|
@ -48,10 +51,11 @@ async function renderStatuses(c: AppContext, ids: string[], signal = AbortSignal
|
|||
return c.json([]);
|
||||
}
|
||||
|
||||
const store = await Storages.db();
|
||||
const { limit } = paginationSchema.parse(c.req.query());
|
||||
|
||||
const events = await Storages.db.query([{ kinds: [1], ids, limit }], { signal })
|
||||
.then((events) => hydrateEvents({ events, storage: Storages.db, signal }));
|
||||
const events = await store.query([{ kinds: [1], ids, limit }], { signal })
|
||||
.then((events) => hydrateEvents({ events, store, signal }));
|
||||
|
||||
if (!events.length) {
|
||||
return c.json([]);
|
||||
|
@ -59,8 +63,10 @@ async function renderStatuses(c: AppContext, ids: string[], signal = AbortSignal
|
|||
|
||||
const sortedEvents = [...events].sort((a, b) => ids.indexOf(a.id) - ids.indexOf(b.id));
|
||||
|
||||
const viewerPubkey = await c.get('signer')?.getPublicKey();
|
||||
|
||||
const statuses = await Promise.all(
|
||||
sortedEvents.map((event) => renderStatus(event, { viewerPubkey: c.get('pubkey') })),
|
||||
sortedEvents.map((event) => renderStatus(event, { viewerPubkey })),
|
||||
);
|
||||
|
||||
// TODO: pagination with min_id and max_id based on the order of `ids`.
|
||||
|
|
|
@ -2,7 +2,9 @@ import { Storages } from '@/storages.ts';
|
|||
import { hasTag } from '@/tags.ts';
|
||||
|
||||
async function renderRelationship(sourcePubkey: string, targetPubkey: string) {
|
||||
const events = await Storages.db.query([
|
||||
const db = await Storages.db();
|
||||
|
||||
const events = await db.query([
|
||||
{ kinds: [3], authors: [sourcePubkey], limit: 1 },
|
||||
{ kinds: [3], authors: [targetPubkey], limit: 1 },
|
||||
{ kinds: [10000], authors: [sourcePubkey], limit: 1 },
|
||||
|
|
|
@ -22,7 +22,7 @@ interface RenderStatusOpts {
|
|||
async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise<any> {
|
||||
const { viewerPubkey, depth = 1 } = opts;
|
||||
|
||||
if (depth > 2 || depth < 0) return null;
|
||||
if (depth > 2 || depth < 0) return;
|
||||
|
||||
const note = nip19.noteEncode(event.id);
|
||||
|
||||
|
@ -40,7 +40,10 @@ async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise<
|
|||
),
|
||||
];
|
||||
|
||||
const mentionedProfiles = await Storages.optimizer.query(
|
||||
const db = await Storages.db();
|
||||
const optimizer = await Storages.optimizer();
|
||||
|
||||
const mentionedProfiles = await optimizer.query(
|
||||
[{ kinds: [0], authors: mentionedPubkeys, limit: mentionedPubkeys.length }],
|
||||
);
|
||||
|
||||
|
@ -53,7 +56,7 @@ async function renderStatus(event: DittoEvent, opts: RenderStatusOpts): Promise<
|
|||
),
|
||||
firstUrl ? unfurlCardCached(firstUrl) : null,
|
||||
viewerPubkey
|
||||
? await Storages.db.query([
|
||||
? await db.query([
|
||||
{ kinds: [6], '#e': [event.id], authors: [viewerPubkey], limit: 1 },
|
||||
{ kinds: [7], '#e': [event.id], authors: [viewerPubkey], limit: 1 },
|
||||
{ kinds: [9734], '#e': [event.id], authors: [viewerPubkey], limit: 1 },
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { assertEquals, assertRejects } from '@/deps-test.ts';
|
||||
import { assertEquals, assertRejects } from '@std/assert';
|
||||
|
||||
import { fetchWorker } from '@/workers/fetch.ts';
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { assertEquals } from '@/deps-test.ts';
|
||||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { TrendsWorker } from './trends.ts';
|
||||
|
||||
|
|
Loading…
Reference in New Issue