Merge remote-tracking branch 'origin/develop' into actor
This commit is contained in:
commit
4e6b9f4328
|
@ -14,6 +14,6 @@ lint:
|
||||||
stage: test
|
stage: test
|
||||||
script: deno lint
|
script: deno lint
|
||||||
|
|
||||||
# test:
|
test:
|
||||||
# stage: test
|
stage: test
|
||||||
# script: deno task test
|
script: deno task test
|
|
@ -0,0 +1,2 @@
|
||||||
|
*
|
||||||
|
!.gitignore
|
|
@ -2,8 +2,8 @@
|
||||||
"$schema": "https://deno.land/x/deno@v1.32.3/cli/schemas/config-file.v1.json",
|
"$schema": "https://deno.land/x/deno@v1.32.3/cli/schemas/config-file.v1.json",
|
||||||
"lock": false,
|
"lock": false,
|
||||||
"tasks": {
|
"tasks": {
|
||||||
"dev": "deno run --allow-read --allow-env --allow-net --allow-ffi --unstable --watch src/server.ts",
|
"dev": "deno run --allow-read --allow-write --allow-env --allow-net --allow-ffi --unstable --watch src/server.ts",
|
||||||
"test": "deno test"
|
"test": "deno test -A --unstable src"
|
||||||
},
|
},
|
||||||
"imports": {
|
"imports": {
|
||||||
"@/": "./src/"
|
"@/": "./src/"
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import { type Context, cors, type Handler, Hono, type HonoEnv, logger, type MiddlewareHandler } from '@/deps.ts';
|
import { type Context, cors, type Handler, Hono, type HonoEnv, logger, type MiddlewareHandler } from '@/deps.ts';
|
||||||
import { type Event } from '@/event.ts';
|
import { type Event } from '@/event.ts';
|
||||||
|
import '@/loopback.ts';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
accountController,
|
accountController,
|
||||||
|
@ -26,6 +27,7 @@ import {
|
||||||
statusController,
|
statusController,
|
||||||
} from './controllers/api/statuses.ts';
|
} from './controllers/api/statuses.ts';
|
||||||
import { streamingController } from './controllers/api/streaming.ts';
|
import { streamingController } from './controllers/api/streaming.ts';
|
||||||
|
import { trendingTagsController } from './controllers/api/trends.ts';
|
||||||
import { indexController } from './controllers/site.ts';
|
import { indexController } from './controllers/site.ts';
|
||||||
import { hostMetaController } from './controllers/well-known/host-meta.ts';
|
import { hostMetaController } from './controllers/well-known/host-meta.ts';
|
||||||
import { nodeInfoController, nodeInfoSchemaController } from './controllers/well-known/nodeinfo.ts';
|
import { nodeInfoController, nodeInfoSchemaController } from './controllers/well-known/nodeinfo.ts';
|
||||||
|
@ -100,6 +102,9 @@ app.get('/api/v2/search', searchController);
|
||||||
|
|
||||||
app.get('/api/pleroma/frontend_configurations', frontendConfigController);
|
app.get('/api/pleroma/frontend_configurations', frontendConfigController);
|
||||||
|
|
||||||
|
app.get('/api/v1/trends/tags', trendingTagsController);
|
||||||
|
app.get('/api/v1/trends', trendingTagsController);
|
||||||
|
|
||||||
// Not (yet) implemented.
|
// Not (yet) implemented.
|
||||||
app.get('/api/v1/notifications', emptyArrayController);
|
app.get('/api/v1/notifications', emptyArrayController);
|
||||||
app.get('/api/v1/bookmarks', emptyArrayController);
|
app.get('/api/v1/bookmarks', emptyArrayController);
|
||||||
|
|
|
@ -172,7 +172,7 @@ function getDescendants(eventId: string): Promise<SignedEvent<1>[]> {
|
||||||
|
|
||||||
/** Publish an event to the Nostr relay. */
|
/** Publish an event to the Nostr relay. */
|
||||||
function publish(event: SignedEvent, relays = Conf.publishRelays): void {
|
function publish(event: SignedEvent, relays = Conf.publishRelays): void {
|
||||||
console.log('Publishing event', event);
|
console.log('Publishing event', event, relays);
|
||||||
try {
|
try {
|
||||||
getPool().publish(event, relays);
|
getPool().publish(event, relays);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
|
|
@ -29,7 +29,11 @@ const Conf = {
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
get relay() {
|
get relay() {
|
||||||
return Deno.env.get('DITTO_RELAY');
|
const value = Deno.env.get('DITTO_RELAY');
|
||||||
|
if (!value) {
|
||||||
|
throw new Error('Missing DITTO_RELAY');
|
||||||
|
}
|
||||||
|
return value;
|
||||||
},
|
},
|
||||||
get localDomain() {
|
get localDomain() {
|
||||||
return Deno.env.get('LOCAL_DOMAIN') || 'http://localhost:8000';
|
return Deno.env.get('LOCAL_DOMAIN') || 'http://localhost:8000';
|
||||||
|
|
|
@ -0,0 +1,52 @@
|
||||||
|
import { type AppController } from '@/app.ts';
|
||||||
|
import { Conf } from '@/config.ts';
|
||||||
|
import { z } from '@/deps.ts';
|
||||||
|
import { trends } from '@/trends.ts';
|
||||||
|
import { Time } from '@/utils.ts';
|
||||||
|
import { stripTime } from '@/utils/time.ts';
|
||||||
|
|
||||||
|
const limitSchema = z.coerce.number().catch(10).transform((value) => Math.min(Math.max(value, 0), 20));
|
||||||
|
|
||||||
|
const trendingTagsController: AppController = (c) => {
|
||||||
|
const limit = limitSchema.parse(c.req.query('limit'));
|
||||||
|
if (limit < 1) return c.json([]);
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
const yesterday = new Date(now.getTime() - Time.days(1));
|
||||||
|
const lastWeek = new Date(now.getTime() - Time.days(7));
|
||||||
|
|
||||||
|
/** Most used hashtags within the past 24h. */
|
||||||
|
const tags = trends.getTrendingTags({
|
||||||
|
since: yesterday,
|
||||||
|
until: now,
|
||||||
|
limit,
|
||||||
|
});
|
||||||
|
|
||||||
|
return c.json(tags.map(({ name, uses, accounts }) => ({
|
||||||
|
name,
|
||||||
|
url: Conf.local(`/tags/${name}`),
|
||||||
|
history: [
|
||||||
|
// Use the full 24h query for the current day. Then use `offset: 1` to adjust for this below.
|
||||||
|
// This result is more accurate than what Mastodon returns.
|
||||||
|
{
|
||||||
|
day: String(Math.floor(stripTime(now).getTime() / 1000)),
|
||||||
|
accounts: String(accounts),
|
||||||
|
uses: String(uses),
|
||||||
|
},
|
||||||
|
...trends.getTagHistory({
|
||||||
|
tag: name,
|
||||||
|
since: lastWeek,
|
||||||
|
until: now,
|
||||||
|
limit: 6,
|
||||||
|
offset: 1,
|
||||||
|
}).map((history) => ({
|
||||||
|
// For some reason, Mastodon wants these to be strings... oh well.
|
||||||
|
day: String(Math.floor(history.day.getTime() / 1000)),
|
||||||
|
accounts: String(history.accounts),
|
||||||
|
uses: String(history.uses),
|
||||||
|
})),
|
||||||
|
],
|
||||||
|
})));
|
||||||
|
};
|
||||||
|
|
||||||
|
export { trendingTagsController };
|
|
@ -1,9 +1,10 @@
|
||||||
import { createPentagon, z } from '@/deps.ts';
|
import { createPentagon, z } from '@/deps.ts';
|
||||||
|
import { hexIdSchema } from '@/schema.ts';
|
||||||
|
|
||||||
const kv = await Deno.openKv();
|
const kv = await Deno.openKv();
|
||||||
|
|
||||||
const userSchema = z.object({
|
const userSchema = z.object({
|
||||||
pubkey: z.string().regex(/^[0-9a-f]{64}$/).describe('primary'),
|
pubkey: hexIdSchema.describe('primary'),
|
||||||
username: z.string().regex(/^\w{1,30}$/).describe('unique'),
|
username: z.string().regex(/^\w{1,30}$/).describe('unique'),
|
||||||
createdAt: z.date(),
|
createdAt: z.date(),
|
||||||
});
|
});
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
export { assert, assertEquals, assertThrows } from 'https://deno.land/std@0.177.0/testing/asserts.ts';
|
|
@ -49,3 +49,5 @@ export {
|
||||||
export { generateSeededRsa } from 'https://gitlab.com/soapbox-pub/seeded-rsa/-/raw/v1.0.0/mod.ts';
|
export { generateSeededRsa } from 'https://gitlab.com/soapbox-pub/seeded-rsa/-/raw/v1.0.0/mod.ts';
|
||||||
export * as secp from 'npm:@noble/secp256k1@^1.7.1';
|
export * as secp from 'npm:@noble/secp256k1@^1.7.1';
|
||||||
export { LRUCache } from 'npm:lru-cache@^10.0.0';
|
export { LRUCache } from 'npm:lru-cache@^10.0.0';
|
||||||
|
export { DB as Sqlite } from 'https://deno.land/x/sqlite@v3.7.0/mod.ts';
|
||||||
|
export { serve } from 'https://deno.land/std@0.177.0/http/server.ts';
|
||||||
|
|
|
@ -0,0 +1,44 @@
|
||||||
|
import { Conf } from '@/config.ts';
|
||||||
|
import { RelayPool } from '@/deps.ts';
|
||||||
|
import { trends } from '@/trends.ts';
|
||||||
|
import { nostrDate, nostrNow } from '@/utils.ts';
|
||||||
|
|
||||||
|
import type { Event } from '@/event.ts';
|
||||||
|
|
||||||
|
const relay = new RelayPool([Conf.relay]);
|
||||||
|
|
||||||
|
// This file watches all events on your Ditto relay and triggers
|
||||||
|
// side-effects based on them. This can be used for things like
|
||||||
|
// notifications, trending hashtag tracking, etc.
|
||||||
|
relay.subscribe(
|
||||||
|
[{ kinds: [1], since: nostrNow() }],
|
||||||
|
[Conf.relay],
|
||||||
|
handleEvent,
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
|
|
||||||
|
/** Handle events through the loopback pipeline. */
|
||||||
|
function handleEvent(event: Event): void {
|
||||||
|
console.info('loopback event:', event.id);
|
||||||
|
trackHashtags(event);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Track whenever a hashtag is used, for processing trending tags. */
|
||||||
|
function trackHashtags(event: Event): void {
|
||||||
|
const date = nostrDate(event.created_at);
|
||||||
|
|
||||||
|
const tags = event.tags
|
||||||
|
.filter((tag) => tag[0] === 't')
|
||||||
|
.map((tag) => tag[1])
|
||||||
|
.slice(0, 5);
|
||||||
|
|
||||||
|
if (!tags.length) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
console.info('tracking tags:', tags);
|
||||||
|
trends.addTagUsages(event.pubkey, tags, date);
|
||||||
|
} catch (_e) {
|
||||||
|
// do nothing
|
||||||
|
}
|
||||||
|
}
|
|
@ -67,15 +67,15 @@ const relaySchema = z.custom<URL>((relay) => {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const nostrIdSchema = z.string().regex(/^[0-9a-f]{64}$/);
|
const hexIdSchema = z.string().regex(/^[0-9a-f]{64}$/);
|
||||||
|
|
||||||
const eventSchema = z.object({
|
const eventSchema = z.object({
|
||||||
id: nostrIdSchema,
|
id: hexIdSchema,
|
||||||
kind: z.number(),
|
kind: z.number(),
|
||||||
tags: z.array(z.array(z.string())),
|
tags: z.array(z.array(z.string())),
|
||||||
content: z.string(),
|
content: z.string(),
|
||||||
created_at: z.number(),
|
created_at: z.number(),
|
||||||
pubkey: nostrIdSchema,
|
pubkey: hexIdSchema,
|
||||||
sig: z.string(),
|
sig: z.string(),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -95,10 +95,14 @@ const decode64Schema = z.string().transform((value, ctx) => {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const hashtagSchema = z.string().regex(/^\w{1,30}$/);
|
||||||
|
|
||||||
export {
|
export {
|
||||||
decode64Schema,
|
decode64Schema,
|
||||||
emojiTagSchema,
|
emojiTagSchema,
|
||||||
filteredArray,
|
filteredArray,
|
||||||
|
hashtagSchema,
|
||||||
|
hexIdSchema,
|
||||||
jsonSchema,
|
jsonSchema,
|
||||||
type MetaContent,
|
type MetaContent,
|
||||||
metaContentSchema,
|
metaContentSchema,
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import 'https://deno.land/std@0.177.0/dotenv/load.ts';
|
import 'https://deno.land/std@0.177.0/dotenv/load.ts';
|
||||||
import { serve } from 'https://deno.land/std@0.177.0/http/server.ts';
|
import { serve } from '@/deps.ts';
|
||||||
|
|
||||||
import app from './app.ts';
|
import app from './app.ts';
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,32 @@
|
||||||
|
import { assertEquals } from '@/deps-test.ts';
|
||||||
|
import { Sqlite } from '@/deps.ts';
|
||||||
|
|
||||||
|
import { TrendsDB } from './trends.ts';
|
||||||
|
|
||||||
|
const db = new Sqlite(':memory:');
|
||||||
|
const trends = new TrendsDB(db);
|
||||||
|
|
||||||
|
const p8 = (pubkey8: string) => `${pubkey8}00000000000000000000000000000000000000000000000000000000`;
|
||||||
|
|
||||||
|
Deno.test('getTrendingTags', () => {
|
||||||
|
trends.addTagUsages(p8('00000000'), ['ditto', 'hello', 'yolo']);
|
||||||
|
trends.addTagUsages(p8('00000000'), ['hello']);
|
||||||
|
trends.addTagUsages(p8('00000001'), ['Ditto', 'hello']);
|
||||||
|
trends.addTagUsages(p8('00000010'), ['DITTO']);
|
||||||
|
|
||||||
|
const result = trends.getTrendingTags({
|
||||||
|
since: new Date('1999-01-01T00:00:00'),
|
||||||
|
until: new Date('2999-01-01T00:00:00'),
|
||||||
|
threshold: 1,
|
||||||
|
});
|
||||||
|
|
||||||
|
const expected = [
|
||||||
|
{ name: 'ditto', accounts: 3, uses: 3 },
|
||||||
|
{ name: 'hello', accounts: 2, uses: 3 },
|
||||||
|
{ name: 'yolo', accounts: 1, uses: 1 },
|
||||||
|
];
|
||||||
|
|
||||||
|
assertEquals(result, expected);
|
||||||
|
|
||||||
|
trends.cleanupTagUsages(new Date('2999-01-01T00:00:00'));
|
||||||
|
});
|
|
@ -0,0 +1,124 @@
|
||||||
|
import { Sqlite } from '@/deps.ts';
|
||||||
|
import { hashtagSchema, hexIdSchema } from '@/schema.ts';
|
||||||
|
import { Time } from '@/utils.ts';
|
||||||
|
import { generateDateRange } from '@/utils/time.ts';
|
||||||
|
|
||||||
|
interface GetTrendingTagsOpts {
|
||||||
|
since: Date;
|
||||||
|
until: Date;
|
||||||
|
limit?: number;
|
||||||
|
threshold?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GetTagHistoryOpts {
|
||||||
|
tag: string;
|
||||||
|
since: Date;
|
||||||
|
until: Date;
|
||||||
|
limit?: number;
|
||||||
|
offset?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
class TrendsDB {
|
||||||
|
#db: Sqlite;
|
||||||
|
|
||||||
|
constructor(db: Sqlite) {
|
||||||
|
this.#db = db;
|
||||||
|
|
||||||
|
this.#db.execute(`
|
||||||
|
CREATE TABLE IF NOT EXISTS tag_usages (
|
||||||
|
tag TEXT NOT NULL COLLATE NOCASE,
|
||||||
|
pubkey8 TEXT NOT NULL,
|
||||||
|
inserted_at DATETIME NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_time_tag ON tag_usages(inserted_at, tag);
|
||||||
|
`);
|
||||||
|
|
||||||
|
const cleanup = () => {
|
||||||
|
console.info('Cleaning up old tag usages...');
|
||||||
|
const lastWeek = new Date(new Date().getTime() - Time.days(7));
|
||||||
|
this.cleanupTagUsages(lastWeek);
|
||||||
|
};
|
||||||
|
|
||||||
|
setInterval(cleanup, Time.hours(1));
|
||||||
|
cleanup();
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Gets the most used hashtags between the date range. */
|
||||||
|
getTrendingTags({ since, until, limit = 10, threshold = 3 }: GetTrendingTagsOpts) {
|
||||||
|
return this.#db.query<string[]>(
|
||||||
|
`
|
||||||
|
SELECT tag, COUNT(DISTINCT pubkey8), COUNT(*)
|
||||||
|
FROM tag_usages
|
||||||
|
WHERE inserted_at >= ? AND inserted_at < ?
|
||||||
|
GROUP BY tag
|
||||||
|
HAVING COUNT(DISTINCT pubkey8) >= ?
|
||||||
|
ORDER BY COUNT(DISTINCT pubkey8)
|
||||||
|
DESC LIMIT ?;
|
||||||
|
`,
|
||||||
|
[since, until, threshold, limit],
|
||||||
|
).map((row) => ({
|
||||||
|
name: row[0],
|
||||||
|
accounts: Number(row[1]),
|
||||||
|
uses: Number(row[2]),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the tag usage count for a specific tag.
|
||||||
|
* It returns an array with counts for each date between the range.
|
||||||
|
*/
|
||||||
|
getTagHistory({ tag, since, until, limit = 7, offset = 0 }: GetTagHistoryOpts) {
|
||||||
|
const result = this.#db.query<string[]>(
|
||||||
|
`
|
||||||
|
SELECT date(inserted_at), COUNT(DISTINCT pubkey8), COUNT(*)
|
||||||
|
FROM tag_usages
|
||||||
|
WHERE tag = ? AND inserted_at >= ? AND inserted_at < ?
|
||||||
|
GROUP BY date(inserted_at)
|
||||||
|
ORDER BY date(inserted_at) DESC
|
||||||
|
LIMIT ?
|
||||||
|
OFFSET ?;
|
||||||
|
`,
|
||||||
|
[tag, since, until, limit, offset],
|
||||||
|
).map((row) => ({
|
||||||
|
day: new Date(row[0]),
|
||||||
|
accounts: Number(row[1]),
|
||||||
|
uses: Number(row[2]),
|
||||||
|
}));
|
||||||
|
|
||||||
|
/** Full date range between `since` and `until`. */
|
||||||
|
const dateRange = generateDateRange(
|
||||||
|
new Date(since.getTime() + Time.days(1)),
|
||||||
|
new Date(until.getTime() - Time.days(offset)),
|
||||||
|
).reverse();
|
||||||
|
|
||||||
|
// Fill in missing dates with 0 usages.
|
||||||
|
return dateRange.map((day) => {
|
||||||
|
const data = result.find((item) => item.day.getTime() === day.getTime());
|
||||||
|
return data || { day, accounts: 0, uses: 0 };
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
addTagUsages(pubkey: string, hashtags: string[], date = new Date()): void {
|
||||||
|
const pubkey8 = hexIdSchema.parse(pubkey).substring(0, 8);
|
||||||
|
const tags = hashtagSchema.array().min(1).parse(hashtags);
|
||||||
|
|
||||||
|
this.#db.query(
|
||||||
|
'INSERT INTO tag_usages (tag, pubkey8, inserted_at) VALUES ' + tags.map(() => '(?, ?, ?)').join(', '),
|
||||||
|
tags.map((tag) => [tag, pubkey8, date]).flat(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanupTagUsages(until: Date): void {
|
||||||
|
this.#db.query(
|
||||||
|
'DELETE FROM tag_usages WHERE inserted_at < ?',
|
||||||
|
[until],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const trends = new TrendsDB(
|
||||||
|
new Sqlite('data/trends.sqlite3'),
|
||||||
|
);
|
||||||
|
|
||||||
|
export { trends, TrendsDB };
|
|
@ -81,7 +81,7 @@ async function parseBody(req: Request): Promise<unknown> {
|
||||||
const paginationSchema = z.object({
|
const paginationSchema = z.object({
|
||||||
since: z.coerce.number().optional().catch(undefined),
|
since: z.coerce.number().optional().catch(undefined),
|
||||||
until: z.lazy(() => z.coerce.number().catch(nostrNow())),
|
until: z.lazy(() => z.coerce.number().catch(nostrNow())),
|
||||||
limit: z.coerce.number().min(0).max(40).catch(20),
|
limit: z.coerce.number().catch(20).transform((value) => Math.min(Math.max(value, 0), 40)),
|
||||||
});
|
});
|
||||||
|
|
||||||
type PaginationParams = z.infer<typeof paginationSchema>;
|
type PaginationParams = z.infer<typeof paginationSchema>;
|
||||||
|
|
|
@ -0,0 +1,23 @@
|
||||||
|
import { assertEquals } from '@/deps-test.ts';
|
||||||
|
|
||||||
|
import { generateDateRange } from './time.ts';
|
||||||
|
|
||||||
|
Deno.test('generateDateRange', () => {
|
||||||
|
const since = new Date('2023-07-03T16:30:00.000Z');
|
||||||
|
const until = new Date('2023-07-07T09:01:00.000Z');
|
||||||
|
|
||||||
|
const expected = [
|
||||||
|
new Date('2023-07-03T00:00:00.000Z'),
|
||||||
|
new Date('2023-07-04T00:00:00.000Z'),
|
||||||
|
new Date('2023-07-05T00:00:00.000Z'),
|
||||||
|
new Date('2023-07-06T00:00:00.000Z'),
|
||||||
|
new Date('2023-07-07T00:00:00.000Z'),
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = generateDateRange(since, until);
|
||||||
|
|
||||||
|
assertEquals(
|
||||||
|
result.map((d) => d.getTime()),
|
||||||
|
expected.map((d) => d.getTime()),
|
||||||
|
);
|
||||||
|
});
|
|
@ -9,4 +9,24 @@ const Time = {
|
||||||
years: (y: number) => y * Time.days(365),
|
years: (y: number) => y * Time.days(365),
|
||||||
};
|
};
|
||||||
|
|
||||||
export { Time };
|
/** Strips the time off the date, giving 12am UTC. */
|
||||||
|
function stripTime(date: Date): Date {
|
||||||
|
return new Date(Date.UTC(date.getUTCFullYear(), date.getUTCMonth(), date.getUTCDate()));
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Strips times off the dates and generates all 24h intervals between them, inclusive of both inputs. */
|
||||||
|
function generateDateRange(since: Date, until: Date): Date[] {
|
||||||
|
const dates = [];
|
||||||
|
|
||||||
|
const sinceDate = stripTime(since);
|
||||||
|
const untilDate = stripTime(until);
|
||||||
|
|
||||||
|
while (sinceDate <= untilDate) {
|
||||||
|
dates.push(new Date(sinceDate));
|
||||||
|
sinceDate.setUTCDate(sinceDate.getUTCDate() + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return dates;
|
||||||
|
}
|
||||||
|
|
||||||
|
export { generateDateRange, stripTime, Time };
|
||||||
|
|
Loading…
Reference in New Issue