Improve chart performance (#7360)
* wip * wip * wip * wip * wip * Update chart.ts * wip * Improve server performance * wip * wip
This commit is contained in:
parent
0d19c2d42e
commit
4f249159d3
218
migration/1615965918224-chart-v2.ts
Normal file
218
migration/1615965918224-chart-v2.ts
Normal file
@ -0,0 +1,218 @@
|
|||||||
|
import {MigrationInterface, QueryRunner} from "typeorm";
|
||||||
|
|
||||||
|
export class chartV21615965918224 implements MigrationInterface {
|
||||||
|
name = 'chartV21615965918224'
|
||||||
|
|
||||||
|
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
await queryRunner.query(`DELETE FROM "__chart__active_users" WHERE "span" = 'day'`);
|
||||||
|
await queryRunner.query(`DELETE FROM "__chart__drive" WHERE "span" = 'day'`);
|
||||||
|
await queryRunner.query(`DELETE FROM "__chart__federation" WHERE "span" = 'day'`);
|
||||||
|
await queryRunner.query(`DELETE FROM "__chart__hashtag" WHERE "span" = 'day'`);
|
||||||
|
await queryRunner.query(`DELETE FROM "__chart__instance" WHERE "span" = 'day'`);
|
||||||
|
await queryRunner.query(`DELETE FROM "__chart__network" WHERE "span" = 'day'`);
|
||||||
|
await queryRunner.query(`DELETE FROM "__chart__notes" WHERE "span" = 'day'`);
|
||||||
|
await queryRunner.query(`DELETE FROM "__chart__per_user_drive" WHERE "span" = 'day'`);
|
||||||
|
await queryRunner.query(`DELETE FROM "__chart__per_user_following" WHERE "span" = 'day'`);
|
||||||
|
await queryRunner.query(`DELETE FROM "__chart__per_user_notes" WHERE "span" = 'day'`);
|
||||||
|
await queryRunner.query(`DELETE FROM "__chart__per_user_reaction" WHERE "span" = 'day'`);
|
||||||
|
await queryRunner.query(`DELETE FROM "__chart__test" WHERE "span" = 'day'`);
|
||||||
|
await queryRunner.query(`DELETE FROM "__chart__test_grouped" WHERE "span" = 'day'`);
|
||||||
|
await queryRunner.query(`DELETE FROM "__chart__test_unique" WHERE "span" = 'day'`);
|
||||||
|
await queryRunner.query(`DELETE FROM "__chart__users" WHERE "span" = 'day'`);
|
||||||
|
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_15e91a03aeeac9dbccdf43fc06"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_20f57cc8f142c131340ee16742"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_c26e2c1cbb6e911e0554b27416"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_3fa0d0f17ca72e3dc80999a032"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_6e1df243476e20cbf86572ecc0"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_06690fc959f1c9fdaf21928222"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_e447064455928cf627590ef527"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_2d416e6af791a82e338c79d480"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_e9cd07672b37d8966cf3709283"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_fcc181fb8283009c61cc4083ef"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_49975586f50ed7b800fdd88fbd"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_6d6f156ceefc6bc5f273a0e370"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_c12f0af4a66cdd30c2287ce8aa"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_d0a4f79af5a97b08f37b547197"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_f5448d9633cff74208d850aabe"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_f8dd01baeded2ffa833e0a610a"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_08fac0eb3b11f04c200c0b40dd"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_9ff6944f01acb756fdc92d7563"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_e69096589f11e3baa98ddd64d0"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_0c9a159c5082cbeef3ca6706b5"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_924fc196c80ca24bae01dd37e4"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_328f259961e60c4fa0bfcf55ca"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_42ea9381f0fda8dfe0fa1c8b53"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_f2aeafde2ae6fbad38e857631b"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_f92dd6d03f8d994f29987f6214"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_57b5458d0d3d6d1e7f13d4e57f"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_4db3b84c7be0d3464714f3e0b1"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_8d2cbbc8114d90d19b44d626b6"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_046feeb12e9ef5f783f409866a"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_f68a5ab958f9f5fa17a32ac23b"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_65633a106bce43fc7c5c30a5c7"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_edeb73c09c3143a81bcb34d569"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_e316f01a6d24eb31db27f88262"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_2be7ec6cebddc14dc11e206686"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_a5133470f4825902e170328ca5"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_84e661abb7bd1e51b690d4b017"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_5c73bf61da4f6e6f15bae88ed1"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_d70c86baedc68326be11f9c0ce"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_66e1e1ecd2f29e57778af35b59"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_92255988735563f0fe4aba1f05"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_c5870993e25c3d5771f91f5003"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_f170de677ea75ad4533de2723e"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_7c184198ecf66a8d3ecb253ab3"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_f091abb24193d50c653c6b77fc"`);
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_a770a57c70e668cc61590c9161"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__active_users" DROP COLUMN "span"`);
|
||||||
|
await queryRunner.query(`DROP TYPE "public"."__chart__active_users_span_enum"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__active_users" DROP COLUMN "unique"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__active_users" DROP COLUMN "___local_count"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__active_users" DROP COLUMN "___remote_count"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__drive" DROP COLUMN "span"`);
|
||||||
|
await queryRunner.query(`DROP TYPE "public"."__chart__drive_span_enum"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__drive" DROP COLUMN "unique"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__federation" DROP COLUMN "span"`);
|
||||||
|
await queryRunner.query(`DROP TYPE "public"."__chart__federation_span_enum"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__federation" DROP COLUMN "unique"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__hashtag" DROP COLUMN "span"`);
|
||||||
|
await queryRunner.query(`DROP TYPE "public"."__chart__hashtag_span_enum"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__hashtag" DROP COLUMN "unique"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__hashtag" DROP COLUMN "___local_count"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__hashtag" DROP COLUMN "___remote_count"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__instance" DROP COLUMN "span"`);
|
||||||
|
await queryRunner.query(`DROP TYPE "public"."__chart__instance_span_enum"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__instance" DROP COLUMN "unique"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__network" DROP COLUMN "span"`);
|
||||||
|
await queryRunner.query(`DROP TYPE "public"."__chart__network_span_enum"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__network" DROP COLUMN "unique"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__notes" DROP COLUMN "span"`);
|
||||||
|
await queryRunner.query(`DROP TYPE "public"."__chart__notes_span_enum"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__notes" DROP COLUMN "unique"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__per_user_drive" DROP COLUMN "span"`);
|
||||||
|
await queryRunner.query(`DROP TYPE "public"."__chart__per_user_drive_span_enum"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__per_user_drive" DROP COLUMN "unique"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__per_user_following" DROP COLUMN "span"`);
|
||||||
|
await queryRunner.query(`DROP TYPE "public"."__chart__per_user_following_span_enum"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__per_user_following" DROP COLUMN "unique"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__per_user_notes" DROP COLUMN "span"`);
|
||||||
|
await queryRunner.query(`DROP TYPE "public"."__chart__per_user_notes_span_enum"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__per_user_notes" DROP COLUMN "unique"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__per_user_reaction" DROP COLUMN "span"`);
|
||||||
|
await queryRunner.query(`DROP TYPE "public"."__chart__per_user_reaction_span_enum"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__per_user_reaction" DROP COLUMN "unique"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__test_grouped" DROP COLUMN "span"`);
|
||||||
|
await queryRunner.query(`DROP TYPE "public"."__chart__test_grouped_span_enum"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__test_grouped" DROP COLUMN "unique"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__test_unique" DROP COLUMN "span"`);
|
||||||
|
await queryRunner.query(`DROP TYPE "public"."__chart__test_unique_span_enum"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__test_unique" DROP COLUMN "unique"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__test_unique" DROP COLUMN "___foo"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__test" DROP COLUMN "span"`);
|
||||||
|
await queryRunner.query(`DROP TYPE "public"."__chart__test_span_enum"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__test" DROP COLUMN "unique"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__users" DROP COLUMN "span"`);
|
||||||
|
await queryRunner.query(`DROP TYPE "public"."__chart__users_span_enum"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__users" DROP COLUMN "unique"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__users" ADD "unique" jsonb NOT NULL DEFAULT '{}'`);
|
||||||
|
await queryRunner.query(`CREATE TYPE "public"."__chart__users_span_enum" AS ENUM('hour', 'day')`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__users" ADD "span" "__chart__users_span_enum" NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__test" ADD "unique" jsonb NOT NULL DEFAULT '{}'`);
|
||||||
|
await queryRunner.query(`CREATE TYPE "public"."__chart__test_span_enum" AS ENUM('hour', 'day')`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__test" ADD "span" "__chart__test_span_enum" NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__test_unique" ADD "___foo" bigint NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__test_unique" ADD "unique" jsonb NOT NULL DEFAULT '{}'`);
|
||||||
|
await queryRunner.query(`CREATE TYPE "public"."__chart__test_unique_span_enum" AS ENUM('hour', 'day')`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__test_unique" ADD "span" "__chart__test_unique_span_enum" NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__test_grouped" ADD "unique" jsonb NOT NULL DEFAULT '{}'`);
|
||||||
|
await queryRunner.query(`CREATE TYPE "public"."__chart__test_grouped_span_enum" AS ENUM('hour', 'day')`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__test_grouped" ADD "span" "__chart__test_grouped_span_enum" NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__per_user_reaction" ADD "unique" jsonb NOT NULL DEFAULT '{}'`);
|
||||||
|
await queryRunner.query(`CREATE TYPE "public"."__chart__per_user_reaction_span_enum" AS ENUM('hour', 'day')`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__per_user_reaction" ADD "span" "__chart__per_user_reaction_span_enum" NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__per_user_notes" ADD "unique" jsonb NOT NULL DEFAULT '{}'`);
|
||||||
|
await queryRunner.query(`CREATE TYPE "public"."__chart__per_user_notes_span_enum" AS ENUM('hour', 'day')`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__per_user_notes" ADD "span" "__chart__per_user_notes_span_enum" NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__per_user_following" ADD "unique" jsonb NOT NULL DEFAULT '{}'`);
|
||||||
|
await queryRunner.query(`CREATE TYPE "public"."__chart__per_user_following_span_enum" AS ENUM('hour', 'day')`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__per_user_following" ADD "span" "__chart__per_user_following_span_enum" NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__per_user_drive" ADD "unique" jsonb NOT NULL DEFAULT '{}'`);
|
||||||
|
await queryRunner.query(`CREATE TYPE "public"."__chart__per_user_drive_span_enum" AS ENUM('hour', 'day')`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__per_user_drive" ADD "span" "__chart__per_user_drive_span_enum" NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__notes" ADD "unique" jsonb NOT NULL DEFAULT '{}'`);
|
||||||
|
await queryRunner.query(`CREATE TYPE "public"."__chart__notes_span_enum" AS ENUM('hour', 'day')`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__notes" ADD "span" "__chart__notes_span_enum" NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__network" ADD "unique" jsonb NOT NULL DEFAULT '{}'`);
|
||||||
|
await queryRunner.query(`CREATE TYPE "public"."__chart__network_span_enum" AS ENUM('hour', 'day')`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__network" ADD "span" "__chart__network_span_enum" NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__instance" ADD "unique" jsonb NOT NULL DEFAULT '{}'`);
|
||||||
|
await queryRunner.query(`CREATE TYPE "public"."__chart__instance_span_enum" AS ENUM('hour', 'day')`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__instance" ADD "span" "__chart__instance_span_enum" NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__hashtag" ADD "___remote_count" bigint NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__hashtag" ADD "___local_count" bigint NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__hashtag" ADD "unique" jsonb NOT NULL DEFAULT '{}'`);
|
||||||
|
await queryRunner.query(`CREATE TYPE "public"."__chart__hashtag_span_enum" AS ENUM('hour', 'day')`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__hashtag" ADD "span" "__chart__hashtag_span_enum" NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__federation" ADD "unique" jsonb NOT NULL DEFAULT '{}'`);
|
||||||
|
await queryRunner.query(`CREATE TYPE "public"."__chart__federation_span_enum" AS ENUM('hour', 'day')`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__federation" ADD "span" "__chart__federation_span_enum" NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__drive" ADD "unique" jsonb NOT NULL DEFAULT '{}'`);
|
||||||
|
await queryRunner.query(`CREATE TYPE "public"."__chart__drive_span_enum" AS ENUM('hour', 'day')`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__drive" ADD "span" "__chart__drive_span_enum" NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__active_users" ADD "___remote_count" bigint NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__active_users" ADD "___local_count" bigint NOT NULL`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__active_users" ADD "unique" jsonb NOT NULL DEFAULT '{}'`);
|
||||||
|
await queryRunner.query(`CREATE TYPE "public"."__chart__active_users_span_enum" AS ENUM('hour', 'day')`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__active_users" ADD "span" "__chart__active_users_span_enum" NOT NULL`);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_a770a57c70e668cc61590c9161" ON "__chart__users" ("date", "group", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_f091abb24193d50c653c6b77fc" ON "__chart__users" ("date", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_7c184198ecf66a8d3ecb253ab3" ON "__chart__users" ("span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_f170de677ea75ad4533de2723e" ON "__chart__test" ("date", "group", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_c5870993e25c3d5771f91f5003" ON "__chart__test" ("date", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_92255988735563f0fe4aba1f05" ON "__chart__test" ("span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_66e1e1ecd2f29e57778af35b59" ON "__chart__test_unique" ("date", "group", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_d70c86baedc68326be11f9c0ce" ON "__chart__test_unique" ("date", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_5c73bf61da4f6e6f15bae88ed1" ON "__chart__test_unique" ("span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_84e661abb7bd1e51b690d4b017" ON "__chart__test_grouped" ("date", "group", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_a5133470f4825902e170328ca5" ON "__chart__test_grouped" ("date", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_2be7ec6cebddc14dc11e206686" ON "__chart__test_grouped" ("span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_e316f01a6d24eb31db27f88262" ON "__chart__per_user_reaction" ("date", "group", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_edeb73c09c3143a81bcb34d569" ON "__chart__per_user_reaction" ("date", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_65633a106bce43fc7c5c30a5c7" ON "__chart__per_user_reaction" ("span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_f68a5ab958f9f5fa17a32ac23b" ON "__chart__per_user_notes" ("date", "group", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_046feeb12e9ef5f783f409866a" ON "__chart__per_user_notes" ("date", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_8d2cbbc8114d90d19b44d626b6" ON "__chart__per_user_notes" ("span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_4db3b84c7be0d3464714f3e0b1" ON "__chart__per_user_following" ("date", "group", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_57b5458d0d3d6d1e7f13d4e57f" ON "__chart__per_user_following" ("date", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_f92dd6d03f8d994f29987f6214" ON "__chart__per_user_following" ("span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_f2aeafde2ae6fbad38e857631b" ON "__chart__per_user_drive" ("date", "group", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_42ea9381f0fda8dfe0fa1c8b53" ON "__chart__per_user_drive" ("date", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_328f259961e60c4fa0bfcf55ca" ON "__chart__per_user_drive" ("span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_924fc196c80ca24bae01dd37e4" ON "__chart__notes" ("date", "group", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_0c9a159c5082cbeef3ca6706b5" ON "__chart__notes" ("date", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_e69096589f11e3baa98ddd64d0" ON "__chart__notes" ("span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_9ff6944f01acb756fdc92d7563" ON "__chart__network" ("date", "group", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_08fac0eb3b11f04c200c0b40dd" ON "__chart__network" ("date", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_f8dd01baeded2ffa833e0a610a" ON "__chart__network" ("span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_f5448d9633cff74208d850aabe" ON "__chart__instance" ("date", "group", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_d0a4f79af5a97b08f37b547197" ON "__chart__instance" ("date", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_c12f0af4a66cdd30c2287ce8aa" ON "__chart__instance" ("span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_6d6f156ceefc6bc5f273a0e370" ON "__chart__hashtag" ("date", "group", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_49975586f50ed7b800fdd88fbd" ON "__chart__hashtag" ("date", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_fcc181fb8283009c61cc4083ef" ON "__chart__hashtag" ("span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_e9cd07672b37d8966cf3709283" ON "__chart__federation" ("date", "group", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_2d416e6af791a82e338c79d480" ON "__chart__federation" ("date", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_e447064455928cf627590ef527" ON "__chart__federation" ("span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_06690fc959f1c9fdaf21928222" ON "__chart__drive" ("date", "group", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_6e1df243476e20cbf86572ecc0" ON "__chart__drive" ("date", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_3fa0d0f17ca72e3dc80999a032" ON "__chart__drive" ("span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_c26e2c1cbb6e911e0554b27416" ON "__chart__active_users" ("date", "group", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_20f57cc8f142c131340ee16742" ON "__chart__active_users" ("date", "span") `);
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_15e91a03aeeac9dbccdf43fc06" ON "__chart__active_users" ("span") `);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
22
migration/1615966519402-chart-v2-2.ts
Normal file
22
migration/1615966519402-chart-v2-2.ts
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import {MigrationInterface, QueryRunner} from "typeorm";
|
||||||
|
|
||||||
|
export class chartV221615966519402 implements MigrationInterface {
|
||||||
|
name = 'chartV221615966519402'
|
||||||
|
|
||||||
|
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__active_users" ADD "___local_users" character varying array NOT NULL DEFAULT '{}'::varchar[]`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__active_users" ADD "___remote_users" character varying array NOT NULL DEFAULT '{}'::varchar[]`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__hashtag" ADD "___local_users" character varying array NOT NULL DEFAULT '{}'::varchar[]`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__hashtag" ADD "___remote_users" character varying array NOT NULL DEFAULT '{}'::varchar[]`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__test_unique" ADD "___foo" character varying array NOT NULL DEFAULT '{}'::varchar[]`);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__test_unique" DROP COLUMN "___foo"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__hashtag" DROP COLUMN "___remote_users"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__hashtag" DROP COLUMN "___local_users"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__active_users" DROP COLUMN "___remote_users"`);
|
||||||
|
await queryRunner.query(`ALTER TABLE "__chart__active_users" DROP COLUMN "___local_users"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -47,7 +47,7 @@
|
|||||||
"@koa/router": "9.0.1",
|
"@koa/router": "9.0.1",
|
||||||
"@sentry/browser": "5.29.2",
|
"@sentry/browser": "5.29.2",
|
||||||
"@sentry/tracing": "5.29.2",
|
"@sentry/tracing": "5.29.2",
|
||||||
"@sinonjs/fake-timers": "6.0.1",
|
"@sinonjs/fake-timers": "7.0.2",
|
||||||
"@syuilo/aiscript": "0.11.1",
|
"@syuilo/aiscript": "0.11.1",
|
||||||
"@types/bcryptjs": "2.4.2",
|
"@types/bcryptjs": "2.4.2",
|
||||||
"@types/bull": "3.15.0",
|
"@types/bull": "3.15.0",
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import Xev from 'xev';
|
import Xev from 'xev';
|
||||||
import { deliverQueue, inboxQueue } from '../queue';
|
import { deliverQueue, inboxQueue } from '../queue/queues';
|
||||||
|
|
||||||
const ev = new Xev();
|
const ev = new Xev();
|
||||||
|
|
||||||
|
@ -1,3 +1,7 @@
|
|||||||
|
// https://github.com/typeorm/typeorm/issues/2400
|
||||||
|
const types = require('pg').types;
|
||||||
|
types.setTypeParser(20, Number);
|
||||||
|
|
||||||
import { createConnection, Logger, getConnection } from 'typeorm';
|
import { createConnection, Logger, getConnection } from 'typeorm';
|
||||||
import config from '../config';
|
import config from '../config';
|
||||||
import { entities as charts } from '../services/chart/entities';
|
import { entities as charts } from '../services/chart/entities';
|
||||||
|
1
src/global.d.ts
vendored
Normal file
1
src/global.d.ts
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
type FIXME = any;
|
88
src/misc/before-shutdown.ts
Normal file
88
src/misc/before-shutdown.ts
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
// https://gist.github.com/nfantone/1eaa803772025df69d07f4dbf5df7e58
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @callback BeforeShutdownListener
|
||||||
|
* @param {string} [signalOrEvent] The exit signal or event name received on the process.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* System signals the app will listen to initiate shutdown.
|
||||||
|
* @const {string[]}
|
||||||
|
*/
|
||||||
|
const SHUTDOWN_SIGNALS = ['SIGINT', 'SIGTERM'];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Time in milliseconds to wait before forcing shutdown.
|
||||||
|
* @const {number}
|
||||||
|
*/
|
||||||
|
const SHUTDOWN_TIMEOUT = 15000;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A queue of listener callbacks to execute before shutting
|
||||||
|
* down the process.
|
||||||
|
* @type {BeforeShutdownListener[]}
|
||||||
|
*/
|
||||||
|
const shutdownListeners = [];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Listen for signals and execute given `fn` function once.
|
||||||
|
* @param {string[]} signals System signals to listen to.
|
||||||
|
* @param {function(string)} fn Function to execute on shutdown.
|
||||||
|
*/
|
||||||
|
const processOnce = (signals, fn) => {
|
||||||
|
return signals.forEach(sig => process.once(sig, fn));
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets a forced shutdown mechanism that will exit the process after `timeout` milliseconds.
|
||||||
|
* @param {number} timeout Time to wait before forcing shutdown (milliseconds)
|
||||||
|
*/
|
||||||
|
const forceExitAfter = timeout => () => {
|
||||||
|
setTimeout(() => {
|
||||||
|
// Force shutdown after timeout
|
||||||
|
console.warn(`Could not close resources gracefully after ${timeout}ms: forcing shutdown`);
|
||||||
|
return process.exit(1);
|
||||||
|
}, timeout).unref();
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main process shutdown handler. Will invoke every previously registered async shutdown listener
|
||||||
|
* in the queue and exit with a code of `0`. Any `Promise` rejections from any listener will
|
||||||
|
* be logged out as a warning, but won't prevent other callbacks from executing.
|
||||||
|
* @param {string} signalOrEvent The exit signal or event name received on the process.
|
||||||
|
*/
|
||||||
|
async function shutdownHandler(signalOrEvent) {
|
||||||
|
console.warn(`Shutting down: received [${signalOrEvent}] signal`);
|
||||||
|
|
||||||
|
for (const listener of shutdownListeners) {
|
||||||
|
try {
|
||||||
|
await listener(signalOrEvent);
|
||||||
|
} catch (err) {
|
||||||
|
console.warn(`A shutdown handler failed before completing with: ${err.message || err}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Registers a new shutdown listener to be invoked before exiting
|
||||||
|
* the main process. Listener handlers are guaranteed to be called in the order
|
||||||
|
* they were registered.
|
||||||
|
* @param {BeforeShutdownListener} listener The shutdown listener to register.
|
||||||
|
* @returns {BeforeShutdownListener} Echoes back the supplied `listener`.
|
||||||
|
*/
|
||||||
|
export function beforeShutdown(listener) {
|
||||||
|
shutdownListeners.push(listener);
|
||||||
|
return listener;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Register shutdown callback that kills the process after `SHUTDOWN_TIMEOUT` milliseconds
|
||||||
|
// This prevents custom shutdown handlers from hanging the process indefinitely
|
||||||
|
processOnce(SHUTDOWN_SIGNALS, forceExitAfter(SHUTDOWN_TIMEOUT));
|
||||||
|
|
||||||
|
// Register process shutdown callback
|
||||||
|
// Will listen to incoming signal events and execute all registered handlers in the stack
|
||||||
|
processOnce(SHUTDOWN_SIGNALS, shutdownHandler);
|
@ -1,4 +1,3 @@
|
|||||||
import * as Queue from 'bull';
|
|
||||||
import * as httpSignature from 'http-signature';
|
import * as httpSignature from 'http-signature';
|
||||||
|
|
||||||
import config from '../config';
|
import config from '../config';
|
||||||
@ -13,22 +12,7 @@ import { queueLogger } from './logger';
|
|||||||
import { DriveFile } from '../models/entities/drive-file';
|
import { DriveFile } from '../models/entities/drive-file';
|
||||||
import { getJobInfo } from './get-job-info';
|
import { getJobInfo } from './get-job-info';
|
||||||
import { IActivity } from '../remote/activitypub/type';
|
import { IActivity } from '../remote/activitypub/type';
|
||||||
|
import { dbQueue, deliverQueue, inboxQueue, objectStorageQueue } from './queues';
|
||||||
function initializeQueue(name: string, limitPerSec = -1) {
|
|
||||||
return new Queue(name, {
|
|
||||||
redis: {
|
|
||||||
port: config.redis.port,
|
|
||||||
host: config.redis.host,
|
|
||||||
password: config.redis.pass,
|
|
||||||
db: config.redis.db || 0,
|
|
||||||
},
|
|
||||||
prefix: config.redis.prefix ? `${config.redis.prefix}:queue` : 'queue',
|
|
||||||
limiter: limitPerSec > 0 ? {
|
|
||||||
max: limitPerSec * 5,
|
|
||||||
duration: 5000
|
|
||||||
} : undefined
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export type InboxJobData = {
|
export type InboxJobData = {
|
||||||
activity: IActivity,
|
activity: IActivity,
|
||||||
@ -44,11 +28,6 @@ function renderError(e: Error): any {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export const deliverQueue = initializeQueue('deliver', config.deliverJobPerSec || 128);
|
|
||||||
export const inboxQueue = initializeQueue('inbox', config.inboxJobPerSec || 16);
|
|
||||||
export const dbQueue = initializeQueue('db');
|
|
||||||
export const objectStorageQueue = initializeQueue('objectStorage');
|
|
||||||
|
|
||||||
const deliverLogger = queueLogger.createSubLogger('deliver');
|
const deliverLogger = queueLogger.createSubLogger('deliver');
|
||||||
const inboxLogger = queueLogger.createSubLogger('inbox');
|
const inboxLogger = queueLogger.createSubLogger('inbox');
|
||||||
const dbLogger = queueLogger.createSubLogger('db');
|
const dbLogger = queueLogger.createSubLogger('db');
|
||||||
|
18
src/queue/initialize.ts
Normal file
18
src/queue/initialize.ts
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
import * as Queue from 'bull';
|
||||||
|
import config from '../config';
|
||||||
|
|
||||||
|
export function initialize(name: string, limitPerSec = -1) {
|
||||||
|
return new Queue(name, {
|
||||||
|
redis: {
|
||||||
|
port: config.redis.port,
|
||||||
|
host: config.redis.host,
|
||||||
|
password: config.redis.pass,
|
||||||
|
db: config.redis.db || 0,
|
||||||
|
},
|
||||||
|
prefix: config.redis.prefix ? `${config.redis.prefix}:queue` : 'queue',
|
||||||
|
limiter: limitPerSec > 0 ? {
|
||||||
|
max: limitPerSec * 5,
|
||||||
|
duration: 5000
|
||||||
|
} : undefined
|
||||||
|
});
|
||||||
|
}
|
7
src/queue/queues.ts
Normal file
7
src/queue/queues.ts
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import config from '../config';
|
||||||
|
import { initialize as initializeQueue } from './initialize';
|
||||||
|
|
||||||
|
export const deliverQueue = initializeQueue('deliver', config.deliverJobPerSec || 128);
|
||||||
|
export const inboxQueue = initializeQueue('inbox', config.inboxJobPerSec || 16);
|
||||||
|
export const dbQueue = initializeQueue('db');
|
||||||
|
export const objectStorageQueue = initializeQueue('objectStorage');
|
@ -17,6 +17,18 @@ export default class ActiveUsersChart extends Chart<ActiveUsersLog> {
|
|||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@autobind
|
||||||
|
protected aggregate(logs: ActiveUsersLog[]): ActiveUsersLog {
|
||||||
|
return {
|
||||||
|
local: {
|
||||||
|
users: logs.reduce((a, b) => a.concat(b.local.users), [] as ActiveUsersLog['local']['users']),
|
||||||
|
},
|
||||||
|
remote: {
|
||||||
|
users: logs.reduce((a, b) => a.concat(b.remote.users), [] as ActiveUsersLog['remote']['users']),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async fetchActual(): Promise<DeepPartial<ActiveUsersLog>> {
|
protected async fetchActual(): Promise<DeepPartial<ActiveUsersLog>> {
|
||||||
return {};
|
return {};
|
||||||
@ -25,11 +37,11 @@ export default class ActiveUsersChart extends Chart<ActiveUsersLog> {
|
|||||||
@autobind
|
@autobind
|
||||||
public async update(user: User) {
|
public async update(user: User) {
|
||||||
const update: Obj = {
|
const update: Obj = {
|
||||||
count: 1
|
users: [user.id]
|
||||||
};
|
};
|
||||||
|
|
||||||
await this.incIfUnique({
|
await this.inc({
|
||||||
[Users.isLocalUser(user) ? 'local' : 'remote']: update
|
[Users.isLocalUser(user) ? 'local' : 'remote']: update
|
||||||
}, 'users', user.id);
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -27,6 +27,28 @@ export default class DriveChart extends Chart<DriveLog> {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@autobind
|
||||||
|
protected aggregate(logs: DriveLog[]): DriveLog {
|
||||||
|
return {
|
||||||
|
local: {
|
||||||
|
totalCount: logs[0].local.totalCount,
|
||||||
|
totalSize: logs[0].local.totalSize,
|
||||||
|
incCount: logs.reduce((a, b) => a + b.local.incCount, 0),
|
||||||
|
incSize: logs.reduce((a, b) => a + b.local.incSize, 0),
|
||||||
|
decCount: logs.reduce((a, b) => a + b.local.decCount, 0),
|
||||||
|
decSize: logs.reduce((a, b) => a + b.local.decSize, 0),
|
||||||
|
},
|
||||||
|
remote: {
|
||||||
|
totalCount: logs[0].remote.totalCount,
|
||||||
|
totalSize: logs[0].remote.totalSize,
|
||||||
|
incCount: logs.reduce((a, b) => a + b.remote.incCount, 0),
|
||||||
|
incSize: logs.reduce((a, b) => a + b.remote.incSize, 0),
|
||||||
|
decCount: logs.reduce((a, b) => a + b.remote.decCount, 0),
|
||||||
|
decSize: logs.reduce((a, b) => a + b.remote.decSize, 0),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async fetchActual(): Promise<DeepPartial<DriveLog>> {
|
protected async fetchActual(): Promise<DeepPartial<DriveLog>> {
|
||||||
const [localCount, remoteCount, localSize, remoteSize] = await Promise.all([
|
const [localCount, remoteCount, localSize, remoteSize] = await Promise.all([
|
||||||
|
@ -20,6 +20,17 @@ export default class FederationChart extends Chart<FederationLog> {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@autobind
|
||||||
|
protected aggregate(logs: FederationLog[]): FederationLog {
|
||||||
|
return {
|
||||||
|
instance: {
|
||||||
|
total: logs[0].instance.total,
|
||||||
|
inc: logs.reduce((a, b) => a + b.instance.inc, 0),
|
||||||
|
dec: logs.reduce((a, b) => a + b.instance.dec, 0),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async fetchActual(): Promise<DeepPartial<FederationLog>> {
|
protected async fetchActual(): Promise<DeepPartial<FederationLog>> {
|
||||||
const [total] = await Promise.all([
|
const [total] = await Promise.all([
|
||||||
|
@ -17,6 +17,18 @@ export default class HashtagChart extends Chart<HashtagLog> {
|
|||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@autobind
|
||||||
|
protected aggregate(logs: HashtagLog[]): HashtagLog {
|
||||||
|
return {
|
||||||
|
local: {
|
||||||
|
users: logs.reduce((a, b) => a.concat(b.local.users), [] as HashtagLog['local']['users']),
|
||||||
|
},
|
||||||
|
remote: {
|
||||||
|
users: logs.reduce((a, b) => a.concat(b.remote.users), [] as HashtagLog['remote']['users']),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async fetchActual(): Promise<DeepPartial<HashtagLog>> {
|
protected async fetchActual(): Promise<DeepPartial<HashtagLog>> {
|
||||||
return {};
|
return {};
|
||||||
@ -25,11 +37,11 @@ export default class HashtagChart extends Chart<HashtagLog> {
|
|||||||
@autobind
|
@autobind
|
||||||
public async update(hashtag: string, user: User) {
|
public async update(hashtag: string, user: User) {
|
||||||
const update: Obj = {
|
const update: Obj = {
|
||||||
count: 1
|
users: [user.id]
|
||||||
};
|
};
|
||||||
|
|
||||||
await this.incIfUnique({
|
await this.inc({
|
||||||
[Users.isLocalUser(user) ? 'local' : 'remote']: update
|
[Users.isLocalUser(user) ? 'local' : 'remote']: update
|
||||||
}, 'users', user.id, hashtag);
|
}, hashtag);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -36,6 +36,50 @@ export default class InstanceChart extends Chart<InstanceLog> {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@autobind
|
||||||
|
protected aggregate(logs: InstanceLog[]): InstanceLog {
|
||||||
|
return {
|
||||||
|
requests: {
|
||||||
|
failed: logs.reduce((a, b) => a + b.requests.failed, 0),
|
||||||
|
succeeded: logs.reduce((a, b) => a + b.requests.succeeded, 0),
|
||||||
|
received: logs.reduce((a, b) => a + b.requests.received, 0),
|
||||||
|
},
|
||||||
|
notes: {
|
||||||
|
total: logs[0].notes.total,
|
||||||
|
inc: logs.reduce((a, b) => a + b.notes.inc, 0),
|
||||||
|
dec: logs.reduce((a, b) => a + b.notes.dec, 0),
|
||||||
|
diffs: {
|
||||||
|
reply: logs.reduce((a, b) => a + b.notes.diffs.reply, 0),
|
||||||
|
renote: logs.reduce((a, b) => a + b.notes.diffs.renote, 0),
|
||||||
|
normal: logs.reduce((a, b) => a + b.notes.diffs.normal, 0),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
users: {
|
||||||
|
total: logs[0].users.total,
|
||||||
|
inc: logs.reduce((a, b) => a + b.users.inc, 0),
|
||||||
|
dec: logs.reduce((a, b) => a + b.users.dec, 0),
|
||||||
|
},
|
||||||
|
following: {
|
||||||
|
total: logs[0].following.total,
|
||||||
|
inc: logs.reduce((a, b) => a + b.following.inc, 0),
|
||||||
|
dec: logs.reduce((a, b) => a + b.following.dec, 0),
|
||||||
|
},
|
||||||
|
followers: {
|
||||||
|
total: logs[0].followers.total,
|
||||||
|
inc: logs.reduce((a, b) => a + b.followers.inc, 0),
|
||||||
|
dec: logs.reduce((a, b) => a + b.followers.dec, 0),
|
||||||
|
},
|
||||||
|
drive: {
|
||||||
|
totalFiles: logs[0].drive.totalFiles,
|
||||||
|
totalUsage: logs[0].drive.totalUsage,
|
||||||
|
incFiles: logs.reduce((a, b) => a + b.drive.incFiles, 0),
|
||||||
|
incUsage: logs.reduce((a, b) => a + b.drive.incUsage, 0),
|
||||||
|
decFiles: logs.reduce((a, b) => a + b.drive.decFiles, 0),
|
||||||
|
decUsage: logs.reduce((a, b) => a + b.drive.decUsage, 0),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async fetchActual(group: string): Promise<DeepPartial<InstanceLog>> {
|
protected async fetchActual(group: string): Promise<DeepPartial<InstanceLog>> {
|
||||||
const [
|
const [
|
||||||
|
@ -15,6 +15,17 @@ export default class NetworkChart extends Chart<NetworkLog> {
|
|||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@autobind
|
||||||
|
protected aggregate(logs: NetworkLog[]): NetworkLog {
|
||||||
|
return {
|
||||||
|
incomingRequests: logs.reduce((a, b) => a + b.incomingRequests, 0),
|
||||||
|
outgoingRequests: logs.reduce((a, b) => a + b.outgoingRequests, 0),
|
||||||
|
totalTime: logs.reduce((a, b) => a + b.totalTime, 0),
|
||||||
|
incomingBytes: logs.reduce((a, b) => a + b.incomingBytes, 0),
|
||||||
|
outgoingBytes: logs.reduce((a, b) => a + b.outgoingBytes, 0),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async fetchActual(): Promise<DeepPartial<NetworkLog>> {
|
protected async fetchActual(): Promise<DeepPartial<NetworkLog>> {
|
||||||
return {};
|
return {};
|
||||||
|
@ -25,6 +25,32 @@ export default class NotesChart extends Chart<NotesLog> {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@autobind
|
||||||
|
protected aggregate(logs: NotesLog[]): NotesLog {
|
||||||
|
return {
|
||||||
|
local: {
|
||||||
|
total: logs[0].local.total,
|
||||||
|
inc: logs.reduce((a, b) => a + b.local.inc, 0),
|
||||||
|
dec: logs.reduce((a, b) => a + b.local.dec, 0),
|
||||||
|
diffs: {
|
||||||
|
reply: logs.reduce((a, b) => a + b.local.diffs.reply, 0),
|
||||||
|
renote: logs.reduce((a, b) => a + b.local.diffs.renote, 0),
|
||||||
|
normal: logs.reduce((a, b) => a + b.local.diffs.normal, 0),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
remote: {
|
||||||
|
total: logs[0].remote.total,
|
||||||
|
inc: logs.reduce((a, b) => a + b.remote.inc, 0),
|
||||||
|
dec: logs.reduce((a, b) => a + b.remote.dec, 0),
|
||||||
|
diffs: {
|
||||||
|
reply: logs.reduce((a, b) => a + b.remote.diffs.reply, 0),
|
||||||
|
renote: logs.reduce((a, b) => a + b.remote.diffs.renote, 0),
|
||||||
|
normal: logs.reduce((a, b) => a + b.remote.diffs.normal, 0),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async fetchActual(): Promise<DeepPartial<NotesLog>> {
|
protected async fetchActual(): Promise<DeepPartial<NotesLog>> {
|
||||||
const [localCount, remoteCount] = await Promise.all([
|
const [localCount, remoteCount] = await Promise.all([
|
||||||
|
@ -20,6 +20,18 @@ export default class PerUserDriveChart extends Chart<PerUserDriveLog> {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@autobind
|
||||||
|
protected aggregate(logs: PerUserDriveLog[]): PerUserDriveLog {
|
||||||
|
return {
|
||||||
|
totalCount: logs[0].totalCount,
|
||||||
|
totalSize: logs[0].totalSize,
|
||||||
|
incCount: logs.reduce((a, b) => a + b.incCount, 0),
|
||||||
|
incSize: logs.reduce((a, b) => a + b.incSize, 0),
|
||||||
|
decCount: logs.reduce((a, b) => a + b.decCount, 0),
|
||||||
|
decSize: logs.reduce((a, b) => a + b.decSize, 0),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async fetchActual(group: string): Promise<DeepPartial<PerUserDriveLog>> {
|
protected async fetchActual(group: string): Promise<DeepPartial<PerUserDriveLog>> {
|
||||||
const [count, size] = await Promise.all([
|
const [count, size] = await Promise.all([
|
||||||
|
@ -35,6 +35,36 @@ export default class PerUserFollowingChart extends Chart<PerUserFollowingLog> {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@autobind
|
||||||
|
protected aggregate(logs: PerUserFollowingLog[]): PerUserFollowingLog {
|
||||||
|
return {
|
||||||
|
local: {
|
||||||
|
followings: {
|
||||||
|
total: logs[0].local.followings.total,
|
||||||
|
inc: logs.reduce((a, b) => a + b.local.followings.inc, 0),
|
||||||
|
dec: logs.reduce((a, b) => a + b.local.followings.dec, 0),
|
||||||
|
},
|
||||||
|
followers: {
|
||||||
|
total: logs[0].local.followers.total,
|
||||||
|
inc: logs.reduce((a, b) => a + b.local.followers.inc, 0),
|
||||||
|
dec: logs.reduce((a, b) => a + b.local.followers.dec, 0),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
remote: {
|
||||||
|
followings: {
|
||||||
|
total: logs[0].remote.followings.total,
|
||||||
|
inc: logs.reduce((a, b) => a + b.remote.followings.inc, 0),
|
||||||
|
dec: logs.reduce((a, b) => a + b.remote.followings.dec, 0),
|
||||||
|
},
|
||||||
|
followers: {
|
||||||
|
total: logs[0].remote.followers.total,
|
||||||
|
inc: logs.reduce((a, b) => a + b.remote.followers.inc, 0),
|
||||||
|
dec: logs.reduce((a, b) => a + b.remote.followers.dec, 0),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async fetchActual(group: string): Promise<DeepPartial<PerUserFollowingLog>> {
|
protected async fetchActual(group: string): Promise<DeepPartial<PerUserFollowingLog>> {
|
||||||
const [
|
const [
|
||||||
|
@ -20,6 +20,20 @@ export default class PerUserNotesChart extends Chart<PerUserNotesLog> {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@autobind
|
||||||
|
protected aggregate(logs: PerUserNotesLog[]): PerUserNotesLog {
|
||||||
|
return {
|
||||||
|
total: logs[0].total,
|
||||||
|
inc: logs.reduce((a, b) => a + b.inc, 0),
|
||||||
|
dec: logs.reduce((a, b) => a + b.dec, 0),
|
||||||
|
diffs: {
|
||||||
|
reply: logs.reduce((a, b) => a + b.diffs.reply, 0),
|
||||||
|
renote: logs.reduce((a, b) => a + b.diffs.renote, 0),
|
||||||
|
normal: logs.reduce((a, b) => a + b.diffs.normal, 0),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async fetchActual(group: string): Promise<DeepPartial<PerUserNotesLog>> {
|
protected async fetchActual(group: string): Promise<DeepPartial<PerUserNotesLog>> {
|
||||||
const [count] = await Promise.all([
|
const [count] = await Promise.all([
|
||||||
|
@ -18,6 +18,18 @@ export default class PerUserReactionsChart extends Chart<PerUserReactionsLog> {
|
|||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@autobind
|
||||||
|
protected aggregate(logs: PerUserReactionsLog[]): PerUserReactionsLog {
|
||||||
|
return {
|
||||||
|
local: {
|
||||||
|
count: logs.reduce((a, b) => a + b.local.count, 0),
|
||||||
|
},
|
||||||
|
remote: {
|
||||||
|
count: logs.reduce((a, b) => a + b.remote.count, 0),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async fetchActual(group: string): Promise<DeepPartial<PerUserReactionsLog>> {
|
protected async fetchActual(group: string): Promise<DeepPartial<PerUserReactionsLog>> {
|
||||||
return {};
|
return {};
|
||||||
|
@ -21,6 +21,17 @@ export default class TestGroupedChart extends Chart<TestGroupedLog> {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@autobind
|
||||||
|
protected aggregate(logs: TestGroupedLog[]): TestGroupedLog {
|
||||||
|
return {
|
||||||
|
foo: {
|
||||||
|
total: logs[0].foo.total,
|
||||||
|
inc: logs.reduce((a, b) => a + b.foo.inc, 0),
|
||||||
|
dec: logs.reduce((a, b) => a + b.foo.dec, 0),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async fetchActual(group: string): Promise<DeepPartial<TestGroupedLog>> {
|
protected async fetchActual(group: string): Promise<DeepPartial<TestGroupedLog>> {
|
||||||
return {
|
return {
|
||||||
|
@ -15,6 +15,13 @@ export default class TestUniqueChart extends Chart<TestUniqueLog> {
|
|||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@autobind
|
||||||
|
protected aggregate(logs: TestUniqueLog[]): TestUniqueLog {
|
||||||
|
return {
|
||||||
|
foo: logs.reduce((a, b) => a.concat(b.foo), [] as TestUniqueLog['foo']),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async fetchActual(): Promise<DeepPartial<TestUniqueLog>> {
|
protected async fetchActual(): Promise<DeepPartial<TestUniqueLog>> {
|
||||||
return {};
|
return {};
|
||||||
@ -22,8 +29,8 @@ export default class TestUniqueChart extends Chart<TestUniqueLog> {
|
|||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
public async uniqueIncrement(key: string) {
|
public async uniqueIncrement(key: string) {
|
||||||
await this.incIfUnique({
|
await this.inc({
|
||||||
foo: 1
|
foo: [key]
|
||||||
}, 'foos', key);
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -21,6 +21,17 @@ export default class TestChart extends Chart<TestLog> {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@autobind
|
||||||
|
protected aggregate(logs: TestLog[]): TestLog {
|
||||||
|
return {
|
||||||
|
foo: {
|
||||||
|
total: logs[0].foo.total,
|
||||||
|
inc: logs.reduce((a, b) => a + b.foo.inc, 0),
|
||||||
|
dec: logs.reduce((a, b) => a + b.foo.dec, 0),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async fetchActual(): Promise<DeepPartial<TestLog>> {
|
protected async fetchActual(): Promise<DeepPartial<TestLog>> {
|
||||||
return {
|
return {
|
||||||
|
@ -25,6 +25,22 @@ export default class UsersChart extends Chart<UsersLog> {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@autobind
|
||||||
|
protected aggregate(logs: UsersLog[]): UsersLog {
|
||||||
|
return {
|
||||||
|
local: {
|
||||||
|
total: logs[0].local.total,
|
||||||
|
inc: logs.reduce((a, b) => a + b.local.inc, 0),
|
||||||
|
dec: logs.reduce((a, b) => a + b.local.dec, 0),
|
||||||
|
},
|
||||||
|
remote: {
|
||||||
|
total: logs[0].remote.total,
|
||||||
|
inc: logs.reduce((a, b) => a + b.remote.inc, 0),
|
||||||
|
dec: logs.reduce((a, b) => a + b.remote.dec, 0),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async fetchActual(): Promise<DeepPartial<UsersLog>> {
|
protected async fetchActual(): Promise<DeepPartial<UsersLog>> {
|
||||||
const [localCount, remoteCount] = await Promise.all([
|
const [localCount, remoteCount] = await Promise.all([
|
||||||
|
@ -1,11 +1,15 @@
|
|||||||
export const logSchema = {
|
export const logSchema = {
|
||||||
/**
|
/**
|
||||||
* アクティブユーザー数
|
* アクティブユーザー
|
||||||
*/
|
*/
|
||||||
count: {
|
users: {
|
||||||
type: 'number' as const,
|
type: 'array' as const,
|
||||||
optional: false as const, nullable: false as const,
|
optional: false as const, nullable: false as const,
|
||||||
description: 'アクティブユーザー数',
|
description: 'アクティブユーザー',
|
||||||
|
items: {
|
||||||
|
type: 'string' as const,
|
||||||
|
optional: false as const, nullable: false as const,
|
||||||
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,11 +1,15 @@
|
|||||||
export const logSchema = {
|
export const logSchema = {
|
||||||
/**
|
/**
|
||||||
* 投稿された数
|
* 投稿したユーザー
|
||||||
*/
|
*/
|
||||||
count: {
|
users: {
|
||||||
type: 'number' as const,
|
type: 'array' as const,
|
||||||
optional: false as const, nullable: false as const,
|
optional: false as const, nullable: false as const,
|
||||||
description: '投稿された数',
|
description: '投稿したユーザー',
|
||||||
|
items: {
|
||||||
|
type: 'string' as const,
|
||||||
|
optional: false as const, nullable: false as const,
|
||||||
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -3,9 +3,12 @@ export const schema = {
|
|||||||
optional: false as const, nullable: false as const,
|
optional: false as const, nullable: false as const,
|
||||||
properties: {
|
properties: {
|
||||||
foo: {
|
foo: {
|
||||||
type: 'number' as const,
|
type: 'array' as const,
|
||||||
optional: false as const, nullable: false as const,
|
optional: false as const, nullable: false as const,
|
||||||
description: ''
|
items: {
|
||||||
|
type: 'string' as const,
|
||||||
|
optional: false as const, nullable: false as const,
|
||||||
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -24,8 +24,6 @@ type ArrayValue<T> = {
|
|||||||
[P in keyof T]: T[P] extends number ? T[P][] : ArrayValue<T[P]>;
|
[P in keyof T]: T[P] extends number ? T[P][] : ArrayValue<T[P]>;
|
||||||
};
|
};
|
||||||
|
|
||||||
type Span = 'day' | 'hour';
|
|
||||||
|
|
||||||
type Log = {
|
type Log = {
|
||||||
id: number;
|
id: number;
|
||||||
|
|
||||||
@ -38,22 +36,14 @@ type Log = {
|
|||||||
* 集計日時のUnixタイムスタンプ(秒)
|
* 集計日時のUnixタイムスタンプ(秒)
|
||||||
*/
|
*/
|
||||||
date: number;
|
date: number;
|
||||||
|
|
||||||
/**
|
|
||||||
* 集計期間
|
|
||||||
*/
|
|
||||||
span: Span;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* ユニークインクリメント用
|
|
||||||
*/
|
|
||||||
unique?: Record<string, any>;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const camelToSnake = (str: string) => {
|
const camelToSnake = (str: string) => {
|
||||||
return str.replace(/([A-Z])/g, s => '_' + s.charAt(0).toLowerCase());
|
return str.replace(/([A-Z])/g, s => '_' + s.charAt(0).toLowerCase());
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const removeDuplicates = (array: any[]) => Array.from(new Set(array));
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 様々なチャートの管理を司るクラス
|
* 様々なチャートの管理を司るクラス
|
||||||
*/
|
*/
|
||||||
@ -62,10 +52,21 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
private static readonly columnDot = '_';
|
private static readonly columnDot = '_';
|
||||||
|
|
||||||
private name: string;
|
private name: string;
|
||||||
|
private queue: {
|
||||||
|
diff: DeepPartial<T>;
|
||||||
|
group: string | null;
|
||||||
|
}[] = [];
|
||||||
public schema: Schema;
|
public schema: Schema;
|
||||||
protected repository: Repository<Log>;
|
protected repository: Repository<Log>;
|
||||||
|
|
||||||
protected abstract genNewLog(latest: T): DeepPartial<T>;
|
protected abstract genNewLog(latest: T): DeepPartial<T>;
|
||||||
protected abstract async fetchActual(group: string | null): Promise<DeepPartial<T>>;
|
|
||||||
|
/**
|
||||||
|
* @param logs 日時が新しい方が先頭
|
||||||
|
*/
|
||||||
|
protected abstract aggregate(logs: T[]): T;
|
||||||
|
|
||||||
|
protected abstract fetchActual(group: string | null): Promise<DeepPartial<T>>;
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
private static convertSchemaToFlatColumnDefinitions(schema: Schema) {
|
private static convertSchemaToFlatColumnDefinitions(schema: Schema) {
|
||||||
@ -75,10 +76,15 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
const p = path ? `${path}${this.columnDot}${k}` : k;
|
const p = path ? `${path}${this.columnDot}${k}` : k;
|
||||||
if (v.type === 'object') {
|
if (v.type === 'object') {
|
||||||
flatColumns(v.properties, p);
|
flatColumns(v.properties, p);
|
||||||
} else {
|
} else if (v.type === 'number') {
|
||||||
columns[this.columnPrefix + p] = {
|
columns[this.columnPrefix + p] = {
|
||||||
type: 'bigint',
|
type: 'bigint',
|
||||||
};
|
};
|
||||||
|
} else if (v.type === 'array' && v.items.type === 'string') {
|
||||||
|
columns[this.columnPrefix + p] = {
|
||||||
|
type: 'varchar',
|
||||||
|
array: true,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -99,11 +105,11 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
private static convertObjectToFlattenColumns(x: Record<string, any>) {
|
private static convertObjectToFlattenColumns(x: Record<string, any>) {
|
||||||
const columns = {} as Record<string, number>;
|
const columns = {} as Record<string, number | unknown[]>;
|
||||||
const flatten = (x: Obj, path?: string) => {
|
const flatten = (x: Obj, path?: string) => {
|
||||||
for (const [k, v] of Object.entries(x)) {
|
for (const [k, v] of Object.entries(x)) {
|
||||||
const p = path ? `${path}${this.columnDot}${k}` : k;
|
const p = path ? `${path}${this.columnDot}${k}` : k;
|
||||||
if (typeof v === 'object') {
|
if (typeof v === 'object' && !Array.isArray(v)) {
|
||||||
flatten(v, p);
|
flatten(v, p);
|
||||||
} else {
|
} else {
|
||||||
columns[this.columnPrefix + p] = v;
|
columns[this.columnPrefix + p] = v;
|
||||||
@ -115,14 +121,37 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
private static convertQuery(x: Record<string, any>) {
|
private static countUniqueFields(x: Record<string, any>) {
|
||||||
|
const exec = (x: Obj) => {
|
||||||
|
const res = {} as Record<string, any>;
|
||||||
|
for (const [k, v] of Object.entries(x)) {
|
||||||
|
if (typeof v === 'object' && !Array.isArray(v)) {
|
||||||
|
res[k] = exec(v);
|
||||||
|
} else if (Array.isArray(v)) {
|
||||||
|
res[k] = Array.from(new Set(v)).length;
|
||||||
|
} else {
|
||||||
|
res[k] = v;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
return exec(x);
|
||||||
|
}
|
||||||
|
|
||||||
|
@autobind
|
||||||
|
private static convertQuery(diff: Record<string, number | unknown[]>) {
|
||||||
const query: Record<string, Function> = {};
|
const query: Record<string, Function> = {};
|
||||||
|
|
||||||
const columns = Chart.convertObjectToFlattenColumns(x);
|
for (const [k, v] of Object.entries(diff)) {
|
||||||
|
if (typeof v === 'number') {
|
||||||
for (const [k, v] of Object.entries(columns)) {
|
|
||||||
if (v > 0) query[k] = () => `"${k}" + ${v}`;
|
if (v > 0) query[k] = () => `"${k}" + ${v}`;
|
||||||
if (v < 0) query[k] = () => `"${k}" - ${Math.abs(v)}`;
|
if (v < 0) query[k] = () => `"${k}" - ${Math.abs(v)}`;
|
||||||
|
} else if (Array.isArray(v)) {
|
||||||
|
// TODO: item が文字列以外の場合も対応
|
||||||
|
// TODO: item をSQLエスケープ
|
||||||
|
const items = v.map(item => `"${item}"`).join(',');
|
||||||
|
query[k] = () => `array_cat("${k}", '{${items}}'::varchar[])`;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return query;
|
return query;
|
||||||
@ -169,28 +198,14 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
length: 128,
|
length: 128,
|
||||||
nullable: true
|
nullable: true
|
||||||
},
|
},
|
||||||
span: {
|
|
||||||
type: 'enum',
|
|
||||||
enum: ['hour', 'day']
|
|
||||||
},
|
|
||||||
unique: {
|
|
||||||
type: 'jsonb',
|
|
||||||
default: {}
|
|
||||||
},
|
|
||||||
...Chart.convertSchemaToFlatColumnDefinitions(schema)
|
...Chart.convertSchemaToFlatColumnDefinitions(schema)
|
||||||
},
|
},
|
||||||
indices: [{
|
indices: [{
|
||||||
columns: ['date']
|
columns: ['date']
|
||||||
}, {
|
|
||||||
columns: ['span']
|
|
||||||
}, {
|
}, {
|
||||||
columns: ['group']
|
columns: ['group']
|
||||||
}, {
|
|
||||||
columns: ['span', 'date']
|
|
||||||
}, {
|
}, {
|
||||||
columns: ['date', 'group']
|
columns: ['date', 'group']
|
||||||
}, {
|
|
||||||
columns: ['span', 'date', 'group']
|
|
||||||
}]
|
}]
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -200,7 +215,7 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
this.schema = schema;
|
this.schema = schema;
|
||||||
const entity = Chart.schemaToEntity(name, schema);
|
const entity = Chart.schemaToEntity(name, schema);
|
||||||
|
|
||||||
const keys = ['span', 'date'];
|
const keys = ['date'];
|
||||||
if (grouped) keys.push('group');
|
if (grouped) keys.push('group');
|
||||||
|
|
||||||
entity.options.uniques = [{
|
entity.options.uniques = [{
|
||||||
@ -220,7 +235,8 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
flatColumns(v.properties, p);
|
flatColumns(v.properties, p);
|
||||||
} else {
|
} else {
|
||||||
if (nestedProperty.get(log, p) == null) {
|
if (nestedProperty.get(log, p) == null) {
|
||||||
nestedProperty.set(log, p, 0);
|
const emptyValue = v.type === 'number' ? 0 : [];
|
||||||
|
nestedProperty.set(log, p, emptyValue);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -230,10 +246,9 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
private getLatestLog(span: Span, group: string | null = null): Promise<Log | null> {
|
private getLatestLog(group: string | null = null): Promise<Log | null> {
|
||||||
return this.repository.findOne({
|
return this.repository.findOne({
|
||||||
group: group,
|
group: group,
|
||||||
span: span
|
|
||||||
}, {
|
}, {
|
||||||
order: {
|
order: {
|
||||||
date: -1
|
date: -1
|
||||||
@ -242,17 +257,13 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
private async getCurrentLog(span: Span, group: string | null = null): Promise<Log> {
|
private async getCurrentLog(group: string | null = null): Promise<Log> {
|
||||||
const [y, m, d, h] = Chart.getCurrentDate();
|
const [y, m, d, h] = Chart.getCurrentDate();
|
||||||
|
|
||||||
const current =
|
const current = dateUTC([y, m, d, h]);
|
||||||
span == 'day' ? dateUTC([y, m, d, 0]) :
|
|
||||||
span == 'hour' ? dateUTC([y, m, d, h]) :
|
|
||||||
null as never;
|
|
||||||
|
|
||||||
// 現在(今日または今のHour)のログ
|
// 現在(=今のHour)のログ
|
||||||
const currentLog = await this.repository.findOne({
|
const currentLog = await this.repository.findOne({
|
||||||
span: span,
|
|
||||||
date: Chart.dateToTimestamp(current),
|
date: Chart.dateToTimestamp(current),
|
||||||
...(group ? { group: group } : {})
|
...(group ? { group: group } : {})
|
||||||
});
|
});
|
||||||
@ -271,7 +282,7 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
// * 昨日何もチャートを更新するような出来事がなかった場合は、
|
// * 昨日何もチャートを更新するような出来事がなかった場合は、
|
||||||
// * ログがそもそも作られずドキュメントが存在しないということがあり得るため、
|
// * ログがそもそも作られずドキュメントが存在しないということがあり得るため、
|
||||||
// * 「昨日の」と決め打ちせずに「もっとも最近の」とします
|
// * 「昨日の」と決め打ちせずに「もっとも最近の」とします
|
||||||
const latest = await this.getLatestLog(span, group);
|
const latest = await this.getLatestLog(group);
|
||||||
|
|
||||||
if (latest != null) {
|
if (latest != null) {
|
||||||
const obj = Chart.convertFlattenColumnsToObject(
|
const obj = Chart.convertFlattenColumnsToObject(
|
||||||
@ -286,17 +297,16 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
// 初期ログデータを作成
|
// 初期ログデータを作成
|
||||||
data = this.getNewLog(null);
|
data = this.getNewLog(null);
|
||||||
|
|
||||||
logger.info(`${this.name + (group ? `:${group}` : '')} (${span}): Initial commit created`);
|
logger.info(`${this.name + (group ? `:${group}` : '')}: Initial commit created`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const date = Chart.dateToTimestamp(current);
|
const date = Chart.dateToTimestamp(current);
|
||||||
const lockKey = `${this.name}:${date}:${group}:${span}`;
|
const lockKey = `${this.name}:${date}:${group}`;
|
||||||
|
|
||||||
const unlock = await getChartInsertLock(lockKey);
|
const unlock = await getChartInsertLock(lockKey);
|
||||||
try {
|
try {
|
||||||
// ロック内でもう1回チェックする
|
// ロック内でもう1回チェックする
|
||||||
const currentLog = await this.repository.findOne({
|
const currentLog = await this.repository.findOne({
|
||||||
span: span,
|
|
||||||
date: date,
|
date: date,
|
||||||
...(group ? { group: group } : {})
|
...(group ? { group: group } : {})
|
||||||
});
|
});
|
||||||
@ -307,12 +317,11 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
// 新規ログ挿入
|
// 新規ログ挿入
|
||||||
log = await this.repository.save({
|
log = await this.repository.save({
|
||||||
group: group,
|
group: group,
|
||||||
span: span,
|
|
||||||
date: date,
|
date: date,
|
||||||
...Chart.convertObjectToFlattenColumns(data)
|
...Chart.convertObjectToFlattenColumns(data)
|
||||||
});
|
});
|
||||||
|
|
||||||
logger.info(`${this.name + (group ? `:${group}` : '')} (${span}): New commit created`);
|
logger.info(`${this.name + (group ? `:${group}` : '')}: New commit created`);
|
||||||
|
|
||||||
return log;
|
return log;
|
||||||
} finally {
|
} finally {
|
||||||
@ -321,25 +330,45 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected commit(query: Record<string, Function>, group: string | null = null, uniqueKey?: string, uniqueValue?: string): Promise<any> {
|
protected commit(diff: DeepPartial<T>, group: string | null = null): void {
|
||||||
const update = async (log: Log) => {
|
this.queue.push({
|
||||||
// ユニークインクリメントの場合、指定のキーに指定の値が既に存在していたら弾く
|
diff, group,
|
||||||
if (
|
});
|
||||||
uniqueKey && log.unique &&
|
}
|
||||||
log.unique[uniqueKey] &&
|
|
||||||
log.unique[uniqueKey].includes(uniqueValue)
|
|
||||||
) return;
|
|
||||||
|
|
||||||
// ユニークインクリメントの指定のキーに値を追加
|
@autobind
|
||||||
if (uniqueKey && log.unique) {
|
public async save() {
|
||||||
if (log.unique[uniqueKey]) {
|
if (this.queue.length === 0) {
|
||||||
const sql = `jsonb_set("unique", '{${uniqueKey}}', ("unique"->>'${uniqueKey}')::jsonb || '["${uniqueValue}"]'::jsonb)`;
|
logger.info(`${this.name}: Write skipped`);
|
||||||
query['unique'] = () => sql;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: 前の時間のログがqueueにあった場合のハンドリング
|
||||||
|
// 例えば、save が20分ごとに行われるとして、前回行われたのは 01:50 だったとする。
|
||||||
|
// 次に save が行われるのは 02:10 ということになるが、もし 01:55 に新規ログが queue に追加されたとすると、
|
||||||
|
// そのログは本来は 01:00~ のログとしてDBに保存されて欲しいのに、02:00~ のログ扱いになってしまう。
|
||||||
|
// これを回避するための実装は複雑になりそうなため、一旦保留。
|
||||||
|
|
||||||
|
const update = async (log: Log) => {
|
||||||
|
const finalDiffs = {} as Record<string, number | unknown[]>;
|
||||||
|
|
||||||
|
for (const diff of this.queue.filter(q => q.group === log.group).map(q => q.diff)) {
|
||||||
|
const columns = Chart.convertObjectToFlattenColumns(diff);
|
||||||
|
|
||||||
|
for (const [k, v] of Object.entries(columns)) {
|
||||||
|
if (finalDiffs[k] == null) {
|
||||||
|
finalDiffs[k] = v;
|
||||||
} else {
|
} else {
|
||||||
const sql = `jsonb_set("unique", '{${uniqueKey}}', '["${uniqueValue}"]')`;
|
if (typeof finalDiffs[k] === 'number') {
|
||||||
query['unique'] = () => sql;
|
(finalDiffs[k] as number) += v as number;
|
||||||
|
} else {
|
||||||
|
(finalDiffs[k] as unknown[]) = (finalDiffs[k] as unknown[]).concat(v);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const query = Chart.convertQuery(finalDiffs);
|
||||||
|
|
||||||
// ログ更新
|
// ログ更新
|
||||||
await this.repository.createQueryBuilder()
|
await this.repository.createQueryBuilder()
|
||||||
@ -347,12 +376,16 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
.set(query)
|
.set(query)
|
||||||
.where('id = :id', { id: log.id })
|
.where('id = :id', { id: log.id })
|
||||||
.execute();
|
.execute();
|
||||||
|
|
||||||
|
logger.info(`${this.name + (log.group ? `:${log.group}` : '')}: Updated`);
|
||||||
|
|
||||||
|
// TODO: この一連の処理が始まった後に新たにqueueに入ったものは消さないようにする
|
||||||
|
this.queue = this.queue.filter(q => q.group !== log.group);
|
||||||
};
|
};
|
||||||
|
|
||||||
return Promise.all([
|
const groups = removeDuplicates(this.queue.map(log => log.group));
|
||||||
this.getCurrentLog('day', group).then(log => update(log)),
|
|
||||||
this.getCurrentLog('hour', group).then(log => update(log)),
|
await Promise.all(groups.map(group => this.getCurrentLog(group).then(log => update(log))));
|
||||||
]);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
@ -367,39 +400,30 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
.execute();
|
.execute();
|
||||||
};
|
};
|
||||||
|
|
||||||
return Promise.all([
|
return this.getCurrentLog(group).then(log => update(log));
|
||||||
this.getCurrentLog('day', group).then(log => update(log)),
|
|
||||||
this.getCurrentLog('hour', group).then(log => update(log)),
|
|
||||||
]);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async inc(inc: DeepPartial<T>, group: string | null = null): Promise<void> {
|
protected async inc(inc: DeepPartial<T>, group: string | null = null): Promise<void> {
|
||||||
await this.commit(Chart.convertQuery(inc as any), group);
|
await this.commit(inc, group);
|
||||||
}
|
}
|
||||||
|
|
||||||
@autobind
|
@autobind
|
||||||
protected async incIfUnique(inc: DeepPartial<T>, key: string, value: string, group: string | null = null): Promise<void> {
|
public async getChart(span: 'hour' | 'day', amount: number, cursor: Date | null, group: string | null = null): Promise<ArrayValue<T>> {
|
||||||
await this.commit(Chart.convertQuery(inc as any), group, key, value);
|
const [y, m, d, h, _m, _s, _ms] = cursor ? Chart.parseDate(subtractTime(addTime(cursor, 1, span), 1)) : Chart.getCurrentDate();
|
||||||
}
|
const [y2, m2, d2, h2] = cursor ? Chart.parseDate(addTime(cursor, 1, span)) : [] as never;
|
||||||
|
|
||||||
@autobind
|
|
||||||
public async getChart(span: Span, amount: number, begin: Date | null, group: string | null = null): Promise<ArrayValue<T>> {
|
|
||||||
const [y, m, d, h, _m, _s, _ms] = begin ? Chart.parseDate(subtractTime(addTime(begin, 1, span), 1)) : Chart.getCurrentDate();
|
|
||||||
const [y2, m2, d2, h2] = begin ? Chart.parseDate(addTime(begin, 1, span)) : [] as never;
|
|
||||||
|
|
||||||
const lt = dateUTC([y, m, d, h, _m, _s, _ms]);
|
const lt = dateUTC([y, m, d, h, _m, _s, _ms]);
|
||||||
|
|
||||||
const gt =
|
const gt =
|
||||||
span === 'day' ? subtractTime(begin ? dateUTC([y2, m2, d2, 0]) : dateUTC([y, m, d, 0]), amount - 1, 'day') :
|
span === 'day' ? subtractTime(cursor ? dateUTC([y2, m2, d2, 0]) : dateUTC([y, m, d, 0]), amount - 1, 'day') :
|
||||||
span === 'hour' ? subtractTime(begin ? dateUTC([y2, m2, d2, h2]) : dateUTC([y, m, d, h]), amount - 1, 'hour') :
|
span === 'hour' ? subtractTime(cursor ? dateUTC([y2, m2, d2, h2]) : dateUTC([y, m, d, h]), amount - 1, 'hour') :
|
||||||
null as never;
|
null as never;
|
||||||
|
|
||||||
// ログ取得
|
// ログ取得
|
||||||
let logs = await this.repository.find({
|
let logs = await this.repository.find({
|
||||||
where: {
|
where: {
|
||||||
group: group,
|
group: group,
|
||||||
span: span,
|
|
||||||
date: Between(Chart.dateToTimestamp(gt), Chart.dateToTimestamp(lt))
|
date: Between(Chart.dateToTimestamp(gt), Chart.dateToTimestamp(lt))
|
||||||
},
|
},
|
||||||
order: {
|
order: {
|
||||||
@ -413,7 +437,6 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
// (すくなくともひとつログが無いと隙間埋めできないため)
|
// (すくなくともひとつログが無いと隙間埋めできないため)
|
||||||
const recentLog = await this.repository.findOne({
|
const recentLog = await this.repository.findOne({
|
||||||
group: group,
|
group: group,
|
||||||
span: span
|
|
||||||
}, {
|
}, {
|
||||||
order: {
|
order: {
|
||||||
date: -1
|
date: -1
|
||||||
@ -430,7 +453,6 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
// (隙間埋めできないため)
|
// (隙間埋めできないため)
|
||||||
const outdatedLog = await this.repository.findOne({
|
const outdatedLog = await this.repository.findOne({
|
||||||
group: group,
|
group: group,
|
||||||
span: span,
|
|
||||||
date: LessThan(Chart.dateToTimestamp(gt))
|
date: LessThan(Chart.dateToTimestamp(gt))
|
||||||
}, {
|
}, {
|
||||||
order: {
|
order: {
|
||||||
@ -445,23 +467,56 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
|
|
||||||
const chart: T[] = [];
|
const chart: T[] = [];
|
||||||
|
|
||||||
// 整形
|
if (span === 'hour') {
|
||||||
for (let i = (amount - 1); i >= 0; i--) {
|
for (let i = (amount - 1); i >= 0; i--) {
|
||||||
const current =
|
const current = subtractTime(dateUTC([y, m, d, h]), i, 'hour');
|
||||||
span === 'day' ? subtractTime(dateUTC([y, m, d, 0]), i, 'day') :
|
|
||||||
span === 'hour' ? subtractTime(dateUTC([y, m, d, h]), i, 'hour') :
|
|
||||||
null as never;
|
|
||||||
|
|
||||||
const log = logs.find(l => isTimeSame(new Date(l.date * 1000), current));
|
const log = logs.find(l => isTimeSame(new Date(l.date * 1000), current));
|
||||||
|
|
||||||
if (log) {
|
if (log) {
|
||||||
const data = Chart.convertFlattenColumnsToObject(log as Record<string, any>);
|
const data = Chart.convertFlattenColumnsToObject(log as Record<string, any>);
|
||||||
chart.unshift(data);
|
chart.unshift(Chart.countUniqueFields(data));
|
||||||
} else {
|
} else {
|
||||||
// 隙間埋め
|
// 隙間埋め
|
||||||
const latest = logs.find(l => isTimeBefore(new Date(l.date * 1000), current));
|
const latest = logs.find(l => isTimeBefore(new Date(l.date * 1000), current));
|
||||||
const data = latest ? Chart.convertFlattenColumnsToObject(latest as Record<string, any>) : null;
|
const data = latest ? Chart.convertFlattenColumnsToObject(latest as Record<string, any>) : null;
|
||||||
chart.unshift(this.getNewLog(data));
|
chart.unshift(Chart.countUniqueFields(this.getNewLog(data)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (span === 'day') {
|
||||||
|
const logsForEachDays: T[][] = [];
|
||||||
|
let currentDay = -1;
|
||||||
|
let currentDayIndex = -1;
|
||||||
|
for (let i = ((amount - 1) * 24) + h; i >= 0; i--) {
|
||||||
|
const current = subtractTime(dateUTC([y, m, d, h]), i, 'hour');
|
||||||
|
const _currentDay = Chart.parseDate(current)[2];
|
||||||
|
if (currentDay != _currentDay) currentDayIndex++;
|
||||||
|
currentDay = _currentDay;
|
||||||
|
|
||||||
|
const log = logs.find(l => isTimeSame(new Date(l.date * 1000), current));
|
||||||
|
|
||||||
|
if (log) {
|
||||||
|
if (logsForEachDays[currentDayIndex]) {
|
||||||
|
logsForEachDays[currentDayIndex].unshift(Chart.convertFlattenColumnsToObject(log));
|
||||||
|
} else {
|
||||||
|
logsForEachDays[currentDayIndex] = [Chart.convertFlattenColumnsToObject(log)];
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// 隙間埋め
|
||||||
|
const latest = logs.find(l => isTimeBefore(new Date(l.date * 1000), current));
|
||||||
|
const data = latest ? Chart.convertFlattenColumnsToObject(latest as Record<string, any>) : null;
|
||||||
|
const newLog = this.getNewLog(data);
|
||||||
|
if (logsForEachDays[currentDayIndex]) {
|
||||||
|
logsForEachDays[currentDayIndex].unshift(newLog);
|
||||||
|
} else {
|
||||||
|
logsForEachDays[currentDayIndex] = [newLog];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const logs of logsForEachDays) {
|
||||||
|
const log = this.aggregate(logs);
|
||||||
|
chart.unshift(Chart.countUniqueFields(log));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -473,20 +528,19 @@ export default abstract class Chart<T extends Record<string, any>> {
|
|||||||
* { foo: [1, 2, 3], bar: [5, 6, 7] }
|
* { foo: [1, 2, 3], bar: [5, 6, 7] }
|
||||||
* にする
|
* にする
|
||||||
*/
|
*/
|
||||||
const dive = (x: Obj, path?: string) => {
|
const compact = (x: Obj, path?: string) => {
|
||||||
for (const [k, v] of Object.entries(x)) {
|
for (const [k, v] of Object.entries(x)) {
|
||||||
const p = path ? `${path}.${k}` : k;
|
const p = path ? `${path}.${k}` : k;
|
||||||
if (typeof v == 'object') {
|
if (typeof v === 'object' && !Array.isArray(v)) {
|
||||||
dive(v, p);
|
compact(v, p);
|
||||||
} else {
|
} else {
|
||||||
const values = chart.map(s => nestedProperty.get(s, p))
|
const values = chart.map(s => nestedProperty.get(s, p));
|
||||||
.map(v => parseInt(v, 10)); // TypeORMのバグ(?)で何故か数値カラムの値が文字列型になっているので数値に戻す
|
|
||||||
nestedProperty.set(res, p, values);
|
nestedProperty.set(res, p, values);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
dive(chart[0]);
|
compact(chart[0]);
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
@ -10,6 +10,7 @@ import PerUserReactionsChart from './charts/classes/per-user-reactions';
|
|||||||
import HashtagChart from './charts/classes/hashtag';
|
import HashtagChart from './charts/classes/hashtag';
|
||||||
import PerUserFollowingChart from './charts/classes/per-user-following';
|
import PerUserFollowingChart from './charts/classes/per-user-following';
|
||||||
import PerUserDriveChart from './charts/classes/per-user-drive';
|
import PerUserDriveChart from './charts/classes/per-user-drive';
|
||||||
|
import { beforeShutdown } from '../../misc/before-shutdown';
|
||||||
|
|
||||||
export const federationChart = new FederationChart();
|
export const federationChart = new FederationChart();
|
||||||
export const notesChart = new NotesChart();
|
export const notesChart = new NotesChart();
|
||||||
@ -23,3 +24,27 @@ export const perUserReactionsChart = new PerUserReactionsChart();
|
|||||||
export const hashtagChart = new HashtagChart();
|
export const hashtagChart = new HashtagChart();
|
||||||
export const perUserFollowingChart = new PerUserFollowingChart();
|
export const perUserFollowingChart = new PerUserFollowingChart();
|
||||||
export const perUserDriveChart = new PerUserDriveChart();
|
export const perUserDriveChart = new PerUserDriveChart();
|
||||||
|
|
||||||
|
const charts = [
|
||||||
|
federationChart,
|
||||||
|
notesChart,
|
||||||
|
usersChart,
|
||||||
|
networkChart,
|
||||||
|
activeUsersChart,
|
||||||
|
instanceChart,
|
||||||
|
perUserNotesChart,
|
||||||
|
driveChart,
|
||||||
|
perUserReactionsChart,
|
||||||
|
hashtagChart,
|
||||||
|
perUserFollowingChart,
|
||||||
|
perUserDriveChart,
|
||||||
|
];
|
||||||
|
|
||||||
|
// 20分おきにメモリ情報をDBに書き込み
|
||||||
|
setInterval(() => {
|
||||||
|
for (const chart of charts) {
|
||||||
|
chart.save();
|
||||||
|
}
|
||||||
|
}, 1000 * 60 * 20);
|
||||||
|
|
||||||
|
beforeShutdown(() => Promise.all(charts.map(chart => chart.save())));
|
||||||
|
@ -72,7 +72,7 @@ describe('Chart', () => {
|
|||||||
testUniqueChart = new TestUniqueChart();
|
testUniqueChart = new TestUniqueChart();
|
||||||
|
|
||||||
clock = lolex.install({
|
clock = lolex.install({
|
||||||
now: new Date('2000-01-01 00:00:00')
|
now: new Date(Date.UTC(2000, 0, 1, 0, 0, 0))
|
||||||
});
|
});
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
@ -85,6 +85,7 @@ describe('Chart', () => {
|
|||||||
|
|
||||||
it('Can updates', async(async () => {
|
it('Can updates', async(async () => {
|
||||||
await testChart.increment();
|
await testChart.increment();
|
||||||
|
await testChart.save();
|
||||||
|
|
||||||
const chartHours = await testChart.getChart('hour', 3, null);
|
const chartHours = await testChart.getChart('hour', 3, null);
|
||||||
const chartDays = await testChart.getChart('day', 3, null);
|
const chartDays = await testChart.getChart('day', 3, null);
|
||||||
@ -108,6 +109,7 @@ describe('Chart', () => {
|
|||||||
|
|
||||||
it('Can updates (dec)', async(async () => {
|
it('Can updates (dec)', async(async () => {
|
||||||
await testChart.decrement();
|
await testChart.decrement();
|
||||||
|
await testChart.save();
|
||||||
|
|
||||||
const chartHours = await testChart.getChart('hour', 3, null);
|
const chartHours = await testChart.getChart('hour', 3, null);
|
||||||
const chartDays = await testChart.getChart('day', 3, null);
|
const chartDays = await testChart.getChart('day', 3, null);
|
||||||
@ -154,6 +156,7 @@ describe('Chart', () => {
|
|||||||
await testChart.increment();
|
await testChart.increment();
|
||||||
await testChart.increment();
|
await testChart.increment();
|
||||||
await testChart.increment();
|
await testChart.increment();
|
||||||
|
await testChart.save();
|
||||||
|
|
||||||
const chartHours = await testChart.getChart('hour', 3, null);
|
const chartHours = await testChart.getChart('hour', 3, null);
|
||||||
const chartDays = await testChart.getChart('day', 3, null);
|
const chartDays = await testChart.getChart('day', 3, null);
|
||||||
@ -177,10 +180,12 @@ describe('Chart', () => {
|
|||||||
|
|
||||||
it('Can updates at different times', async(async () => {
|
it('Can updates at different times', async(async () => {
|
||||||
await testChart.increment();
|
await testChart.increment();
|
||||||
|
await testChart.save();
|
||||||
|
|
||||||
clock.tick('01:00:00');
|
clock.tick('01:00:00');
|
||||||
|
|
||||||
await testChart.increment();
|
await testChart.increment();
|
||||||
|
await testChart.save();
|
||||||
|
|
||||||
const chartHours = await testChart.getChart('hour', 3, null);
|
const chartHours = await testChart.getChart('hour', 3, null);
|
||||||
const chartDays = await testChart.getChart('day', 3, null);
|
const chartDays = await testChart.getChart('day', 3, null);
|
||||||
@ -202,12 +207,45 @@ describe('Chart', () => {
|
|||||||
});
|
});
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
// 仕様上はこうなってほしいけど、実装は難しそうなのでskip
|
||||||
|
/*
|
||||||
|
it('Can updates at different times without save', async(async () => {
|
||||||
|
await testChart.increment();
|
||||||
|
|
||||||
|
clock.tick('01:00:00');
|
||||||
|
|
||||||
|
await testChart.increment();
|
||||||
|
await testChart.save();
|
||||||
|
|
||||||
|
const chartHours = await testChart.getChart('hour', 3, null);
|
||||||
|
const chartDays = await testChart.getChart('day', 3, null);
|
||||||
|
|
||||||
|
assert.deepStrictEqual(chartHours, {
|
||||||
|
foo: {
|
||||||
|
dec: [0, 0, 0],
|
||||||
|
inc: [1, 1, 0],
|
||||||
|
total: [2, 1, 0]
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepStrictEqual(chartDays, {
|
||||||
|
foo: {
|
||||||
|
dec: [0, 0, 0],
|
||||||
|
inc: [2, 0, 0],
|
||||||
|
total: [2, 0, 0]
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}));
|
||||||
|
*/
|
||||||
|
|
||||||
it('Can padding', async(async () => {
|
it('Can padding', async(async () => {
|
||||||
await testChart.increment();
|
await testChart.increment();
|
||||||
|
await testChart.save();
|
||||||
|
|
||||||
clock.tick('02:00:00');
|
clock.tick('02:00:00');
|
||||||
|
|
||||||
await testChart.increment();
|
await testChart.increment();
|
||||||
|
await testChart.save();
|
||||||
|
|
||||||
const chartHours = await testChart.getChart('hour', 3, null);
|
const chartHours = await testChart.getChart('hour', 3, null);
|
||||||
const chartDays = await testChart.getChart('day', 3, null);
|
const chartDays = await testChart.getChart('day', 3, null);
|
||||||
@ -232,6 +270,7 @@ describe('Chart', () => {
|
|||||||
// 要求された範囲にログがひとつもない場合でもパディングできる
|
// 要求された範囲にログがひとつもない場合でもパディングできる
|
||||||
it('Can padding from past range', async(async () => {
|
it('Can padding from past range', async(async () => {
|
||||||
await testChart.increment();
|
await testChart.increment();
|
||||||
|
await testChart.save();
|
||||||
|
|
||||||
clock.tick('05:00:00');
|
clock.tick('05:00:00');
|
||||||
|
|
||||||
@ -259,8 +298,12 @@ describe('Chart', () => {
|
|||||||
// Issue #3190
|
// Issue #3190
|
||||||
it('Can padding from past range 2', async(async () => {
|
it('Can padding from past range 2', async(async () => {
|
||||||
await testChart.increment();
|
await testChart.increment();
|
||||||
|
await testChart.save();
|
||||||
|
|
||||||
clock.tick('05:00:00');
|
clock.tick('05:00:00');
|
||||||
|
|
||||||
await testChart.increment();
|
await testChart.increment();
|
||||||
|
await testChart.save();
|
||||||
|
|
||||||
const chartHours = await testChart.getChart('hour', 3, null);
|
const chartHours = await testChart.getChart('hour', 3, null);
|
||||||
const chartDays = await testChart.getChart('day', 3, null);
|
const chartDays = await testChart.getChart('day', 3, null);
|
||||||
@ -284,10 +327,12 @@ describe('Chart', () => {
|
|||||||
|
|
||||||
it('Can specify offset', async(async () => {
|
it('Can specify offset', async(async () => {
|
||||||
await testChart.increment();
|
await testChart.increment();
|
||||||
|
await testChart.save();
|
||||||
|
|
||||||
clock.tick('01:00:00');
|
clock.tick('01:00:00');
|
||||||
|
|
||||||
await testChart.increment();
|
await testChart.increment();
|
||||||
|
await testChart.save();
|
||||||
|
|
||||||
const chartHours = await testChart.getChart('hour', 3, new Date(Date.UTC(2000, 0, 1, 0, 0, 0)));
|
const chartHours = await testChart.getChart('hour', 3, new Date(Date.UTC(2000, 0, 1, 0, 0, 0)));
|
||||||
const chartDays = await testChart.getChart('day', 3, new Date(Date.UTC(2000, 0, 1, 0, 0, 0)));
|
const chartDays = await testChart.getChart('day', 3, new Date(Date.UTC(2000, 0, 1, 0, 0, 0)));
|
||||||
@ -313,10 +358,12 @@ describe('Chart', () => {
|
|||||||
clock.tick('00:30:00');
|
clock.tick('00:30:00');
|
||||||
|
|
||||||
await testChart.increment();
|
await testChart.increment();
|
||||||
|
await testChart.save();
|
||||||
|
|
||||||
clock.tick('01:30:00');
|
clock.tick('01:30:00');
|
||||||
|
|
||||||
await testChart.increment();
|
await testChart.increment();
|
||||||
|
await testChart.save();
|
||||||
|
|
||||||
const chartHours = await testChart.getChart('hour', 3, new Date(Date.UTC(2000, 0, 1, 0, 0, 0)));
|
const chartHours = await testChart.getChart('hour', 3, new Date(Date.UTC(2000, 0, 1, 0, 0, 0)));
|
||||||
const chartDays = await testChart.getChart('day', 3, new Date(Date.UTC(2000, 0, 1, 0, 0, 0)));
|
const chartDays = await testChart.getChart('day', 3, new Date(Date.UTC(2000, 0, 1, 0, 0, 0)));
|
||||||
@ -341,6 +388,7 @@ describe('Chart', () => {
|
|||||||
describe('Grouped', () => {
|
describe('Grouped', () => {
|
||||||
it('Can updates', async(async () => {
|
it('Can updates', async(async () => {
|
||||||
await testGroupedChart.increment('alice');
|
await testGroupedChart.increment('alice');
|
||||||
|
await testGroupedChart.save();
|
||||||
|
|
||||||
const aliceChartHours = await testGroupedChart.getChart('hour', 3, null, 'alice');
|
const aliceChartHours = await testGroupedChart.getChart('hour', 3, null, 'alice');
|
||||||
const aliceChartDays = await testGroupedChart.getChart('day', 3, null, 'alice');
|
const aliceChartDays = await testGroupedChart.getChart('day', 3, null, 'alice');
|
||||||
@ -386,6 +434,7 @@ describe('Chart', () => {
|
|||||||
await testUniqueChart.uniqueIncrement('alice');
|
await testUniqueChart.uniqueIncrement('alice');
|
||||||
await testUniqueChart.uniqueIncrement('alice');
|
await testUniqueChart.uniqueIncrement('alice');
|
||||||
await testUniqueChart.uniqueIncrement('bob');
|
await testUniqueChart.uniqueIncrement('bob');
|
||||||
|
await testUniqueChart.save();
|
||||||
|
|
||||||
const chartHours = await testUniqueChart.getChart('hour', 3, null);
|
const chartHours = await testUniqueChart.getChart('hour', 3, null);
|
||||||
const chartDays = await testUniqueChart.getChart('day', 3, null);
|
const chartDays = await testUniqueChart.getChart('day', 3, null);
|
||||||
@ -428,6 +477,7 @@ describe('Chart', () => {
|
|||||||
|
|
||||||
it('Can resync (2)', async(async () => {
|
it('Can resync (2)', async(async () => {
|
||||||
await testChart.increment();
|
await testChart.increment();
|
||||||
|
await testChart.save();
|
||||||
|
|
||||||
clock.tick('01:00:00');
|
clock.tick('01:00:00');
|
||||||
|
|
||||||
|
@ -358,10 +358,10 @@
|
|||||||
dependencies:
|
dependencies:
|
||||||
type-detect "4.0.8"
|
type-detect "4.0.8"
|
||||||
|
|
||||||
"@sinonjs/fake-timers@6.0.1":
|
"@sinonjs/fake-timers@7.0.2":
|
||||||
version "6.0.1"
|
version "7.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-6.0.1.tgz#293674fccb3262ac782c7aadfdeca86b10c75c40"
|
resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-7.0.2.tgz#a53e71d4154ee704ea9b36a6d0b0780e246fadd1"
|
||||||
integrity sha512-MZPUxrmFubI36XS1DI3qmI0YdN1gks62JtFZvxR67ljjSNCeK6U08Zx4msEWOXuofgqUt6zPHSi1H9fbjR/NRA==
|
integrity sha512-dF84L5YC90gIOegPDCYymPIsDmwMWWSh7BwfDXQYePi8lVIEp7IZ1UVGkME8FjXOsDPxan12x4aaK+Lo6wVh9A==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@sinonjs/commons" "^1.7.0"
|
"@sinonjs/commons" "^1.7.0"
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user