diff --git a/docs/debugging.md b/docs/debugging.md index 84e057a..6abc513 100644 --- a/docs/debugging.md +++ b/docs/debugging.md @@ -14,4 +14,14 @@ To access the debugger remotely, you can use SSH port forwarding. Run this comma ssh -L 9229:localhost:9229 @ ``` -Then, in Chromium, go to `chrome://inspect` and the Ditto server should be available. \ No newline at end of file +Then, in Chromium, go to `chrome://inspect` and the Ditto server should be available. + +## SQLite performance + +To track slow queries, first set `DEBUG=ditto:sqlite.worker` in the environment so only SQLite logs are shown. + +Then, grep for any logs above 0.001s: + +```sh +journalctl -fu ditto | grep -v '(0.00s)' +``` \ No newline at end of file diff --git a/src/controllers/api/pleroma.ts b/src/controllers/api/pleroma.ts index f76df01..9425878 100644 --- a/src/controllers/api/pleroma.ts +++ b/src/controllers/api/pleroma.ts @@ -72,9 +72,12 @@ async function getConfigs(signal: AbortSignal): Promise { limit: 1, }], { signal }); - return jsonSchema.pipe(configSchema.array()).catch([]).parse( - await new AdminSigner().nip44.decrypt(Conf.pubkey, event.content).catch(() => ''), - ); + try { + const decrypted = await new AdminSigner().nip44.decrypt(Conf.pubkey, event.content); + return jsonSchema.pipe(configSchema.array()).catch([]).parse(decrypted); + } catch (_e) { + return []; + } } export { configController, frontendConfigController, pleromaAdminDeleteStatusController, updateConfigController }; diff --git a/src/db.ts b/src/db.ts index 253fed9..e1039dc 100644 --- a/src/db.ts +++ b/src/db.ts @@ -38,6 +38,7 @@ interface EventRow { created_at: number; tags: string; sig: string; + deleted_at: number | null; } interface EventFTSRow { diff --git a/src/db/migrations/011_kind_author_index.ts b/src/db/migrations/011_kind_author_index.ts new file mode 100644 index 0000000..da21988 --- /dev/null +++ b/src/db/migrations/011_kind_author_index.ts @@ -0,0 +1,13 @@ +import { Kysely } from '@/deps.ts'; + +export async function up(db: Kysely): Promise { + await db.schema + .createIndex('idx_events_kind_pubkey_created_at') + .on('events') + .columns(['kind', 'pubkey', 'created_at']) + .execute(); +} + +export async function down(db: Kysely): Promise { + await db.schema.dropIndex('idx_events_kind_pubkey_created_at').execute(); +} diff --git a/src/db/migrations/012_tags_composite_index.ts b/src/db/migrations/012_tags_composite_index.ts new file mode 100644 index 0000000..8769289 --- /dev/null +++ b/src/db/migrations/012_tags_composite_index.ts @@ -0,0 +1,28 @@ +import { Kysely } from '@/deps.ts'; + +export async function up(db: Kysely): Promise { + await db.schema.dropIndex('idx_tags_tag').execute(); + await db.schema.dropIndex('idx_tags_value').execute(); + + await db.schema + .createIndex('idx_tags_tag_value') + .on('tags') + .columns(['tag', 'value']) + .execute(); +} + +export async function down(db: Kysely): Promise { + await db.schema.dropIndex('idx_tags_tag_value').execute(); + + await db.schema + .createIndex('idx_tags_tag') + .on('tags') + .column('tag') + .execute(); + + await db.schema + .createIndex('idx_tags_value') + .on('tags') + .column('value') + .execute(); +} diff --git a/src/db/migrations/013_soft_deletion.ts b/src/db/migrations/013_soft_deletion.ts new file mode 100644 index 0000000..3856ca0 --- /dev/null +++ b/src/db/migrations/013_soft_deletion.ts @@ -0,0 +1,9 @@ +import { Kysely } from '@/deps.ts'; + +export async function up(db: Kysely): Promise { + await db.schema.alterTable('events').addColumn('deleted_at', 'integer').execute(); +} + +export async function down(db: Kysely): Promise { + await db.schema.alterTable('events').dropColumn('deleted_at').execute(); +} diff --git a/src/db/migrations/014_stats_indexes.ts.ts b/src/db/migrations/014_stats_indexes.ts.ts new file mode 100644 index 0000000..d9071c6 --- /dev/null +++ b/src/db/migrations/014_stats_indexes.ts.ts @@ -0,0 +1,11 @@ +import { Kysely } from '@/deps.ts'; + +export async function up(db: Kysely): Promise { + await db.schema.createIndex('idx_author_stats_pubkey').on('author_stats').column('pubkey').execute(); + await db.schema.createIndex('idx_event_stats_event_id').on('event_stats').column('event_id').execute(); +} + +export async function down(db: Kysely): Promise { + await db.schema.dropIndex('idx_author_stats_pubkey').on('author_stats').execute(); + await db.schema.dropIndex('idx_event_stats_event_id').on('event_stats').execute(); +} diff --git a/src/storages/events-db.test.ts b/src/storages/events-db.test.ts index 32aee80..744935b 100644 --- a/src/storages/events-db.test.ts +++ b/src/storages/events-db.test.ts @@ -28,13 +28,6 @@ Deno.test('insert and filter events', async () => { ); }); -Deno.test('delete events', async () => { - await eventsDB.event(event1); - assertEquals(await eventsDB.query([{ kinds: [1] }]), [event1]); - await eventsDB.remove([{ kinds: [1] }]); - assertEquals(await eventsDB.query([{ kinds: [1] }]), []); -}); - Deno.test('query events with local filter', async () => { await eventsDB.event(event1); @@ -54,6 +47,13 @@ Deno.test('query events with local filter', async () => { assertEquals(await eventsDB.query([{ kinds: [1], local: false }]), []); }); +Deno.test('delete events', async () => { + await eventsDB.event(event1); + assertEquals(await eventsDB.query([{ kinds: [1] }]), [event1]); + await eventsDB.remove([{ kinds: [1] }]); + assertEquals(await eventsDB.query([{ kinds: [1] }]), []); +}); + Deno.test('inserting replaceable events', async () => { assertEquals((await eventsDB.count([{ kinds: [0], authors: [event0.pubkey] }])).count, 0); diff --git a/src/storages/events-db.ts b/src/storages/events-db.ts index b5c8ee7..304f283 100644 --- a/src/storages/events-db.ts +++ b/src/storages/events-db.ts @@ -155,6 +155,7 @@ class EventsDB implements NStore { 'events.created_at', 'events.sig', ]) + .where('events.deleted_at', 'is', null) .orderBy('events.created_at', 'desc'); for (const [key, value] of Object.entries(filter)) { @@ -329,12 +330,9 @@ class EventsDB implements NStore { const query = this.getEventsQuery(filters).clearSelect().select('id'); - await db.deleteFrom('events_fts') - .where('id', 'in', () => query) - .execute(); - - return db.deleteFrom('events') + return await db.updateTable('events') .where('id', 'in', () => query) + .set({ deleted_at: Math.floor(Date.now() / 1000) }) .execute(); } diff --git a/src/workers/sqlite.worker.ts b/src/workers/sqlite.worker.ts index 5ea1dd5..eb283a9 100644 --- a/src/workers/sqlite.worker.ts +++ b/src/workers/sqlite.worker.ts @@ -1,5 +1,5 @@ /// - +import { ScopedPerformance } from 'https://deno.land/x/scoped_performance@v2.0.0/mod.ts'; import { Comlink, type CompiledQuery, Debug, DenoSqlite3, type QueryResult } from '@/deps.ts'; import '@/sentry.ts'; @@ -12,12 +12,20 @@ export const SqliteWorker = { }, executeQuery({ sql, parameters }: CompiledQuery): QueryResult { if (!db) throw new Error('Database not open'); - debug(sql); - return { + + const perf = new ScopedPerformance(); + perf.mark('start'); + + const result = { rows: db!.prepare(sql).all(...parameters as any[]) as R[], numAffectedRows: BigInt(db!.changes), insertId: BigInt(db!.lastInsertRowId), }; + + const { duration } = perf.measure('end', 'start'); + debug(`${sql} \x1b[90m(${(duration / 1000).toFixed(2)}s)\x1b[0m`); + + return result; }, destroy() { db?.close();