Merge branch 'sqlite-perf' into 'main'
SQLite performance improvements See merge request soapbox-pub/ditto!115
This commit is contained in:
commit
07d7b3868d
|
@ -15,3 +15,13 @@ ssh -L 9229:localhost:9229 <user>@<host>
|
||||||
```
|
```
|
||||||
|
|
||||||
Then, in Chromium, go to `chrome://inspect` and the Ditto server should be available.
|
Then, in Chromium, go to `chrome://inspect` and the Ditto server should be available.
|
||||||
|
|
||||||
|
## SQLite performance
|
||||||
|
|
||||||
|
To track slow queries, first set `DEBUG=ditto:sqlite.worker` in the environment so only SQLite logs are shown.
|
||||||
|
|
||||||
|
Then, grep for any logs above 0.001s:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
journalctl -fu ditto | grep -v '(0.00s)'
|
||||||
|
```
|
|
@ -72,9 +72,12 @@ async function getConfigs(signal: AbortSignal): Promise<PleromaConfig[]> {
|
||||||
limit: 1,
|
limit: 1,
|
||||||
}], { signal });
|
}], { signal });
|
||||||
|
|
||||||
return jsonSchema.pipe(configSchema.array()).catch([]).parse(
|
try {
|
||||||
await new AdminSigner().nip44.decrypt(Conf.pubkey, event.content).catch(() => ''),
|
const decrypted = await new AdminSigner().nip44.decrypt(Conf.pubkey, event.content);
|
||||||
);
|
return jsonSchema.pipe(configSchema.array()).catch([]).parse(decrypted);
|
||||||
|
} catch (_e) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export { configController, frontendConfigController, pleromaAdminDeleteStatusController, updateConfigController };
|
export { configController, frontendConfigController, pleromaAdminDeleteStatusController, updateConfigController };
|
||||||
|
|
|
@ -38,6 +38,7 @@ interface EventRow {
|
||||||
created_at: number;
|
created_at: number;
|
||||||
tags: string;
|
tags: string;
|
||||||
sig: string;
|
sig: string;
|
||||||
|
deleted_at: number | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface EventFTSRow {
|
interface EventFTSRow {
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
import { Kysely } from '@/deps.ts';
|
||||||
|
|
||||||
|
export async function up(db: Kysely<any>): Promise<void> {
|
||||||
|
await db.schema
|
||||||
|
.createIndex('idx_events_kind_pubkey_created_at')
|
||||||
|
.on('events')
|
||||||
|
.columns(['kind', 'pubkey', 'created_at'])
|
||||||
|
.execute();
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(db: Kysely<any>): Promise<void> {
|
||||||
|
await db.schema.dropIndex('idx_events_kind_pubkey_created_at').execute();
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
import { Kysely } from '@/deps.ts';
|
||||||
|
|
||||||
|
export async function up(db: Kysely<any>): Promise<void> {
|
||||||
|
await db.schema.dropIndex('idx_tags_tag').execute();
|
||||||
|
await db.schema.dropIndex('idx_tags_value').execute();
|
||||||
|
|
||||||
|
await db.schema
|
||||||
|
.createIndex('idx_tags_tag_value')
|
||||||
|
.on('tags')
|
||||||
|
.columns(['tag', 'value'])
|
||||||
|
.execute();
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(db: Kysely<any>): Promise<void> {
|
||||||
|
await db.schema.dropIndex('idx_tags_tag_value').execute();
|
||||||
|
|
||||||
|
await db.schema
|
||||||
|
.createIndex('idx_tags_tag')
|
||||||
|
.on('tags')
|
||||||
|
.column('tag')
|
||||||
|
.execute();
|
||||||
|
|
||||||
|
await db.schema
|
||||||
|
.createIndex('idx_tags_value')
|
||||||
|
.on('tags')
|
||||||
|
.column('value')
|
||||||
|
.execute();
|
||||||
|
}
|
|
@ -0,0 +1,9 @@
|
||||||
|
import { Kysely } from '@/deps.ts';
|
||||||
|
|
||||||
|
export async function up(db: Kysely<any>): Promise<void> {
|
||||||
|
await db.schema.alterTable('events').addColumn('deleted_at', 'integer').execute();
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(db: Kysely<any>): Promise<void> {
|
||||||
|
await db.schema.alterTable('events').dropColumn('deleted_at').execute();
|
||||||
|
}
|
|
@ -0,0 +1,11 @@
|
||||||
|
import { Kysely } from '@/deps.ts';
|
||||||
|
|
||||||
|
export async function up(db: Kysely<any>): Promise<void> {
|
||||||
|
await db.schema.createIndex('idx_author_stats_pubkey').on('author_stats').column('pubkey').execute();
|
||||||
|
await db.schema.createIndex('idx_event_stats_event_id').on('event_stats').column('event_id').execute();
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function down(db: Kysely<any>): Promise<void> {
|
||||||
|
await db.schema.dropIndex('idx_author_stats_pubkey').on('author_stats').execute();
|
||||||
|
await db.schema.dropIndex('idx_event_stats_event_id').on('event_stats').execute();
|
||||||
|
}
|
|
@ -28,13 +28,6 @@ Deno.test('insert and filter events', async () => {
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
Deno.test('delete events', async () => {
|
|
||||||
await eventsDB.event(event1);
|
|
||||||
assertEquals(await eventsDB.query([{ kinds: [1] }]), [event1]);
|
|
||||||
await eventsDB.remove([{ kinds: [1] }]);
|
|
||||||
assertEquals(await eventsDB.query([{ kinds: [1] }]), []);
|
|
||||||
});
|
|
||||||
|
|
||||||
Deno.test('query events with local filter', async () => {
|
Deno.test('query events with local filter', async () => {
|
||||||
await eventsDB.event(event1);
|
await eventsDB.event(event1);
|
||||||
|
|
||||||
|
@ -54,6 +47,13 @@ Deno.test('query events with local filter', async () => {
|
||||||
assertEquals(await eventsDB.query([{ kinds: [1], local: false }]), []);
|
assertEquals(await eventsDB.query([{ kinds: [1], local: false }]), []);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Deno.test('delete events', async () => {
|
||||||
|
await eventsDB.event(event1);
|
||||||
|
assertEquals(await eventsDB.query([{ kinds: [1] }]), [event1]);
|
||||||
|
await eventsDB.remove([{ kinds: [1] }]);
|
||||||
|
assertEquals(await eventsDB.query([{ kinds: [1] }]), []);
|
||||||
|
});
|
||||||
|
|
||||||
Deno.test('inserting replaceable events', async () => {
|
Deno.test('inserting replaceable events', async () => {
|
||||||
assertEquals((await eventsDB.count([{ kinds: [0], authors: [event0.pubkey] }])).count, 0);
|
assertEquals((await eventsDB.count([{ kinds: [0], authors: [event0.pubkey] }])).count, 0);
|
||||||
|
|
||||||
|
|
|
@ -155,6 +155,7 @@ class EventsDB implements NStore {
|
||||||
'events.created_at',
|
'events.created_at',
|
||||||
'events.sig',
|
'events.sig',
|
||||||
])
|
])
|
||||||
|
.where('events.deleted_at', 'is', null)
|
||||||
.orderBy('events.created_at', 'desc');
|
.orderBy('events.created_at', 'desc');
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(filter)) {
|
for (const [key, value] of Object.entries(filter)) {
|
||||||
|
@ -329,12 +330,9 @@ class EventsDB implements NStore {
|
||||||
|
|
||||||
const query = this.getEventsQuery(filters).clearSelect().select('id');
|
const query = this.getEventsQuery(filters).clearSelect().select('id');
|
||||||
|
|
||||||
await db.deleteFrom('events_fts')
|
return await db.updateTable('events')
|
||||||
.where('id', 'in', () => query)
|
|
||||||
.execute();
|
|
||||||
|
|
||||||
return db.deleteFrom('events')
|
|
||||||
.where('id', 'in', () => query)
|
.where('id', 'in', () => query)
|
||||||
|
.set({ deleted_at: Math.floor(Date.now() / 1000) })
|
||||||
.execute();
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/// <reference lib="webworker" />
|
/// <reference lib="webworker" />
|
||||||
|
import { ScopedPerformance } from 'https://deno.land/x/scoped_performance@v2.0.0/mod.ts';
|
||||||
import { Comlink, type CompiledQuery, Debug, DenoSqlite3, type QueryResult } from '@/deps.ts';
|
import { Comlink, type CompiledQuery, Debug, DenoSqlite3, type QueryResult } from '@/deps.ts';
|
||||||
import '@/sentry.ts';
|
import '@/sentry.ts';
|
||||||
|
|
||||||
|
@ -12,12 +12,20 @@ export const SqliteWorker = {
|
||||||
},
|
},
|
||||||
executeQuery<R>({ sql, parameters }: CompiledQuery): QueryResult<R> {
|
executeQuery<R>({ sql, parameters }: CompiledQuery): QueryResult<R> {
|
||||||
if (!db) throw new Error('Database not open');
|
if (!db) throw new Error('Database not open');
|
||||||
debug(sql);
|
|
||||||
return {
|
const perf = new ScopedPerformance();
|
||||||
|
perf.mark('start');
|
||||||
|
|
||||||
|
const result = {
|
||||||
rows: db!.prepare(sql).all(...parameters as any[]) as R[],
|
rows: db!.prepare(sql).all(...parameters as any[]) as R[],
|
||||||
numAffectedRows: BigInt(db!.changes),
|
numAffectedRows: BigInt(db!.changes),
|
||||||
insertId: BigInt(db!.lastInsertRowId),
|
insertId: BigInt(db!.lastInsertRowId),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const { duration } = perf.measure('end', 'start');
|
||||||
|
debug(`${sql} \x1b[90m(${(duration / 1000).toFixed(2)}s)\x1b[0m`);
|
||||||
|
|
||||||
|
return result;
|
||||||
},
|
},
|
||||||
destroy() {
|
destroy() {
|
||||||
db?.close();
|
db?.close();
|
||||||
|
|
Loading…
Reference in New Issue