feat: Cache count of group members (#7377)

This commit is contained in:
Tom Moor
2024-08-17 17:34:12 -04:00
committed by GitHub
parent c9d4f5038b
commit 0ab8b52582
11 changed files with 183 additions and 150 deletions
@@ -394,7 +394,7 @@ export default class DeliverWebhookTask extends BaseTask<Props> {
subscription,
payload: {
id: event.modelId,
model: model && presentGroup(model),
model: model && (await presentGroup(model)),
},
});
}
@@ -417,7 +417,7 @@ export default class DeliverWebhookTask extends BaseTask<Props> {
payload: {
id: `${event.userId}-${event.modelId}`,
model: model && presentGroupUser(model),
group: model && presentGroup(model.group),
group: model && (await presentGroup(model.group)),
user: model && presentUser(model.user),
},
});
@@ -510,7 +510,7 @@ export default class DeliverWebhookTask extends BaseTask<Props> {
id: event.modelId,
model: model && presentGroupMembership(model),
collection,
group: model && presentGroup(model.group),
group: model && (await presentGroup(model.group)),
},
});
}
@@ -613,7 +613,7 @@ export default class DeliverWebhookTask extends BaseTask<Props> {
id: event.modelId,
model: model && presentDocumentGroupMembership(model),
document,
group: model && presentGroup(model.group),
group: model && (await presentGroup(model.group)),
},
});
}
@@ -4,7 +4,9 @@ import DeleteAttachmentTask from "@server/queues/tasks/DeleteAttachmentTask";
import { buildAttachment, buildDocument } from "@server/test/factories";
import documentPermanentDeleter from "./documentPermanentDeleter";
jest.mock("@server/queues/tasks/DeleteAttachmentTask");
jest.mock("@server/queues/tasks/DeleteAttachmentTask", () => ({
schedule: jest.fn(),
}));
describe("documentPermanentDeleter", () => {
it("should destroy documents", async () => {
+9 -39
View File
@@ -1,13 +1,11 @@
import { InferAttributes, InferCreationAttributes, Op } from "sequelize";
import {
AfterDestroy,
BelongsTo,
Column,
ForeignKey,
Table,
HasMany,
BelongsToMany,
DefaultScope,
DataType,
Scopes,
} from "sequelize-typescript";
@@ -16,30 +14,19 @@ import GroupUser from "./GroupUser";
import Team from "./Team";
import User from "./User";
import ParanoidModel from "./base/ParanoidModel";
import { CounterCache } from "./decorators/CounterCache";
import Fix from "./decorators/Fix";
import Length from "./validators/Length";
import NotContainsUrl from "./validators/NotContainsUrl";
@DefaultScope(() => ({
include: [
{
association: "groupUsers",
required: false,
},
],
}))
@Scopes(() => ({
withMember: (memberId: string) => ({
withMembership: (userId: string) => ({
include: [
{
association: "groupUsers",
required: true,
},
{
association: "members",
required: true,
where: {
userId: memberId,
userId,
},
},
],
@@ -78,35 +65,15 @@ class Group extends ParanoidModel<
@Column
name: string;
// hooks
@AfterDestroy
static async deleteGroupUsers(model: Group) {
if (!model.deletedAt) {
return;
}
await GroupUser.destroy({
where: {
groupId: model.id,
},
});
await GroupMembership.destroy({
where: {
groupId: model.id,
},
});
}
static filterByMember(memberId: string | undefined) {
return memberId
? this.scope({ method: ["withMember", memberId] })
static filterByMember(userId: string | undefined) {
return userId
? this.scope({ method: ["withMembership", userId] })
: this.scope("defaultScope");
}
// associations
@HasMany(() => GroupUser, "groupId")
@HasMany(() => GroupUser, { as: "members", foreignKey: "groupId" })
groupUsers: GroupUser[];
@HasMany(() => GroupMembership, "groupId")
@@ -128,6 +95,9 @@ class Group extends ParanoidModel<
@BelongsToMany(() => User, () => GroupUser)
users: User[];
@CounterCache(() => GroupUser, { as: "members", foreignKey: "groupId" })
memberCount: Promise<number>;
}
export default Group;
+74
View File
@@ -0,0 +1,74 @@
import { InferAttributes } from "sequelize";
import { ModelClassGetter } from "sequelize-typescript";
import { CacheHelper } from "@server/utils/CacheHelper";
import Model from "../base/Model";
type RelationOptions = {
/** Reference name used in cache key. */
as: string;
/** The foreign key to use for the relationship query. */
foreignKey: string;
};
/**
* A decorator that caches the count of a relationship and registers model lifecycle hooks
* to invalidate the cache when models are added or removed from the relationship.
*/
export function CounterCache<
TCreationAttributes extends InferAttributes<Model>,
TModelAttributes extends InferAttributes<Model>,
T extends typeof Model
>(
classResolver: ModelClassGetter<TCreationAttributes, TModelAttributes>,
options: RelationOptions
) {
return function (target: InstanceType<T>, _propertyKey: string) {
const modelClass = classResolver() as typeof Model;
const cacheKeyPrefix = `count:${target.constructor.name}:${options.as}`;
// Add hooks after model is added to the sequelize instance
setImmediate(() => {
const recalculateCache =
(offset: number) => async (model: InstanceType<T>) => {
const cacheKey = `${cacheKeyPrefix}:${model[options.foreignKey]}`;
const count = await modelClass.count({
where: {
[options.foreignKey]: model[options.foreignKey],
},
});
await CacheHelper.setData(cacheKey, count + offset);
};
// Because the transaction is not complete until after the response is sent, we need to
// offset the count by 1 to account for the record. TODO: Need to find a better way to handle
// this as a rollback would not decrement the count.
modelClass.addHook("afterCreate", recalculateCache(1));
modelClass.addHook("afterDestroy", recalculateCache(-1));
});
return {
get() {
const cacheKey = `${cacheKeyPrefix}:${this.id}`;
return CacheHelper.getData<number>(cacheKey).then((value) => {
if (value !== null) {
return value;
}
// calculate and cache count
return modelClass
.count({
where: {
[options.foreignKey]: this.id,
},
})
.then((count) => {
void CacheHelper.setData(cacheKey, count);
return count;
});
});
},
} as any;
};
}
+2 -2
View File
@@ -1,10 +1,10 @@
import Group from "@server/models/Group";
export default function presentGroup(group: Group) {
export default async function presentGroup(group: Group) {
return {
id: group.id,
name: group.name,
memberCount: group.groupUsers.length,
memberCount: await group.memberCount,
createdAt: group.createdAt,
updatedAt: group.updatedAt,
};
+12 -15
View File
@@ -296,14 +296,13 @@ export default class WebsocketsProcessor {
}
case "collections.add_group": {
const group = await Group.findByPk(event.modelId);
if (!group) {
return;
}
const groupUsers = await GroupUser.findAll({
where: { groupId: event.modelId },
});
// the users being added are not yet in the websocket channel for the collection
// so they need to be notified separately
for (const groupUser of group.groupUsers) {
for (const groupUser of groupUsers) {
socketio.to(`user-${groupUser.userId}`).emit("collections.add_user", {
event: event.name,
userId: groupUser.userId,
@@ -320,16 +319,14 @@ export default class WebsocketsProcessor {
}
case "collections.remove_group": {
const group = await Group.findByPk(event.modelId);
if (!group) {
return;
}
const [groupUsers, membershipUserIds] = await Promise.all([
GroupUser.findAll({
where: { groupId: event.modelId },
}),
Collection.membershipUserIds(event.collectionId),
]);
const membershipUserIds = await Collection.membershipUserIds(
event.collectionId
);
for (const groupUser of group.groupUsers) {
for (const groupUser of groupUsers) {
if (membershipUserIds.includes(groupUser.userId)) {
// the user still has access through some means...
// treat this like an add, so that the client re-syncs policies
@@ -482,7 +479,7 @@ export default class WebsocketsProcessor {
}
return socketio
.to(`team-${group.teamId}`)
.emit(event.name, presentGroup(group));
.emit(event.name, await presentGroup(group));
}
case "groups.add_user": {
+3 -1
View File
@@ -379,7 +379,9 @@ router.post(
// `collectionGroupMemberships` retained for backwards compatibility remove after version v0.79.0
collectionGroupMemberships: groupMemberships,
groupMemberships,
groups: memberships.map((membership) => presentGroup(membership.group)),
groups: await Promise.all(
memberships.map((membership) => presentGroup(membership.group))
),
},
};
}
+51 -49
View File
@@ -60,14 +60,21 @@ router.post(
ctx.body = {
pagination: ctx.state.pagination,
data: {
groups: groups.map(presentGroup),
groupMemberships: groups
.map((group) =>
group.groupUsers
.filter((groupUser) => !!groupUser.user)
.slice(0, MAX_AVATAR_DISPLAY)
groups: await Promise.all(groups.map(presentGroup)),
groupMemberships: (
await Promise.all(
groups.map((group) =>
GroupUser.findAll({
where: {
groupId: group.id,
},
limit: MAX_AVATAR_DISPLAY,
})
)
)
)
.flat()
.filter((groupUser) => groupUser.user)
.map((groupUser) =>
presentGroupUser(groupUser, { includeUser: true })
),
@@ -89,7 +96,7 @@ router.post(
authorize(user, "read", group);
ctx.body = {
data: presentGroup(group),
data: await presentGroup(group),
policies: presentPolicies(user, [group]),
};
}
@@ -106,7 +113,7 @@ router.post(
const { user } = ctx.state.auth;
const { transaction } = ctx.state;
authorize(user, "createGroup", user.team);
const g = await Group.create(
const group = await Group.create(
{
name,
teamId: user.teamId,
@@ -115,12 +122,6 @@ router.post(
{ transaction }
);
// reload to get default scope
const group = await Group.findByPk(g.id, {
transaction,
rejectOnEmpty: true,
});
await Event.createFromContext(
ctx,
{
@@ -134,7 +135,7 @@ router.post(
);
ctx.body = {
data: presentGroup(group),
data: await presentGroup(group),
policies: presentPolicies(user, [group]),
};
}
@@ -171,7 +172,7 @@ router.post(
}
ctx.body = {
data: presentGroup(group),
data: await presentGroup(group),
policies: presentPolicies(user, [group]),
};
}
@@ -272,7 +273,7 @@ router.post(
const user = await User.findByPk(userId, { transaction });
authorize(actor, "read", user);
let group = await Group.findByPk(id, { transaction });
const group = await Group.findByPk(id, { transaction });
authorize(actor, "update", group);
let groupUser = await GroupUser.findOne({
@@ -284,24 +285,17 @@ router.post(
});
if (!groupUser) {
await group.$add("user", user, {
through: {
groupUser = await GroupUser.create(
{
groupId: group.id,
userId,
createdById: actor.id,
},
transaction,
});
// reload to get default scope
groupUser = await GroupUser.findOne({
where: {
groupId: id,
userId,
},
rejectOnEmpty: true,
transaction,
});
// reload to get default scope
group = await Group.findByPk(id, { transaction, rejectOnEmpty: true });
{
transaction,
}
);
groupUser.user = user;
await Event.createFromContext(
ctx,
@@ -321,7 +315,7 @@ router.post(
data: {
users: [presentUser(user)],
groupMemberships: [presentGroupUser(groupUser, { includeUser: true })],
groups: [presentGroup(group)],
groups: [await presentGroup(group)],
},
};
}
@@ -337,32 +331,40 @@ router.post(
const actor = ctx.state.auth.user;
const { transaction } = ctx.state;
let group = await Group.findByPk(id, { transaction });
const group = await Group.findByPk(id, { transaction });
authorize(actor, "update", group);
const user = await User.findByPk(userId, { transaction });
authorize(actor, "read", user);
await group.$remove("user", user, { transaction });
await Event.createFromContext(
ctx,
{
name: "groups.remove_user",
const groupUser = await GroupUser.unscoped().findOne({
where: {
groupId: id,
userId,
modelId: group.id,
data: {
name: user.name,
},
},
{ transaction }
);
transaction,
lock: transaction.LOCK.UPDATE,
});
// reload to get default scope
group = await Group.findByPk(id, { transaction, rejectOnEmpty: true });
if (groupUser) {
await groupUser.destroy({ transaction });
await Event.createFromContext(
ctx,
{
name: "groups.remove_user",
userId,
modelId: group.id,
data: {
name: user.name,
},
},
{ transaction }
);
}
ctx.body = {
data: {
groups: [presentGroup(group)],
groups: [await presentGroup(group)],
},
};
}
+4 -4
View File
@@ -12,7 +12,7 @@ import validate from "@server/middlewares/validate";
import { Document, Share, Team, User } from "@server/models";
import { authorize } from "@server/policies";
import presentUnfurl from "@server/presenters/unfurl";
import { APIContext } from "@server/types";
import { APIContext, Unfurl } from "@server/types";
import { CacheHelper } from "@server/utils/CacheHelper";
import { Hook, PluginManager } from "@server/utils/PluginManager";
import { RateLimiterStrategy } from "@server/utils/RateLimiter";
@@ -84,7 +84,7 @@ router.post(
}
// External resources
const cachedData = await CacheHelper.getData(
const cachedData = await CacheHelper.getData<Unfurl>(
CacheHelper.getUnfurlKey(actor.teamId, url)
);
if (cachedData) {
@@ -138,11 +138,11 @@ router.post(
let addresses;
try {
addresses = await new Promise<string[]>((resolve, reject) => {
dns.resolveCname(hostname, (err, addresses) => {
dns.resolveCname(hostname, (err, res) => {
if (err) {
return reject(err);
}
return resolve(addresses);
return resolve(res);
});
});
} catch (err) {
+2 -12
View File
@@ -1,9 +1,7 @@
import path from "path";
import { InferAttributes, InferCreationAttributes } from "sequelize";
import { Sequelize } from "sequelize-typescript";
import { Umzug, SequelizeStorage, MigrationError } from "umzug";
import env from "@server/env";
import Model from "@server/models/base/Model";
import Logger from "../logging/Logger";
import * as models from "../models";
@@ -13,15 +11,7 @@ const poolMin = env.DATABASE_CONNECTION_POOL_MIN ?? 0;
const url = env.DATABASE_CONNECTION_POOL_URL || env.DATABASE_URL;
const schema = env.DATABASE_SCHEMA;
export function createDatabaseInstance(
url: string,
models: {
[key: string]: typeof Model<
InferAttributes<Model>,
InferCreationAttributes<Model>
>;
}
) {
export function createDatabaseInstance() {
return new Sequelize(url, {
logging: (msg) =>
process.env.DEBUG?.includes("database") && Logger.debug("database", msg),
@@ -115,7 +105,7 @@ export function createMigrationRunner(
});
}
export const sequelize = createDatabaseInstance(url, models);
export const sequelize = createDatabaseInstance();
export const migrations = createMigrationRunner(sequelize, [
"migrations/*.js",
+19 -23
View File
@@ -1,12 +1,12 @@
import { Day } from "@shared/utils/time";
import Logger from "@server/logging/Logger";
import Redis from "@server/storage/redis";
import { Unfurl, UnfurlSignature } from "@server/types";
/**
* A Helper class for server-side cache management
*/
export class CacheHelper {
// Default expiry time for cache data in ms
private static defaultDataExpiry = Day;
/**
@@ -14,19 +14,17 @@ export class CacheHelper {
*
* @param key Key against which data will be accessed
*/
public static async getData(key: string): ReturnType<UnfurlSignature> {
public static async getData<T>(key: string): Promise<T | undefined> {
try {
const data = await Redis.defaultClient.get(key);
if (data) {
if (data !== null) {
return JSON.parse(data);
}
} catch (err) {
// just log it, response can still be obtained using the fetch call
return Logger.error(
`Could not fetch cached response against ${key}`,
err
);
Logger.error(`Could not fetch cached response against ${key}`, err);
}
return;
}
/**
@@ -36,17 +34,13 @@ export class CacheHelper {
* @param data Data to be saved against the key
* @param expiry Cache data expiry
*/
public static async setData(key: string, data: Unfurl, expiry?: number) {
if ("error" in data) {
return;
}
public static async setData<T>(key: string, data: T, expiry?: number) {
try {
await Redis.defaultClient.set(
key,
JSON.stringify(data),
"EX",
expiry || this.defaultDataExpiry
expiry || CacheHelper.defaultDataExpiry
);
} catch (err) {
// just log it, can skip caching and directly return response
@@ -54,16 +48,6 @@ export class CacheHelper {
}
}
/**
* Gets key against which unfurl response for the given url is stored
*
* @param teamId The team ID to generate a key for
* @param url The url to generate a key for
*/
public static getUnfurlKey(teamId: string, url = "") {
return `unfurl:${teamId}:${url}`;
}
/**
* Clears all cache data with the given prefix
*
@@ -78,4 +62,16 @@ export class CacheHelper {
})
);
}
// keys
/**
* Gets key against which unfurl response for the given url is stored
*
* @param teamId The team ID to generate a key for
* @param url The url to generate a key for
*/
public static getUnfurlKey(teamId: string, url = "") {
return `unfurl:${teamId}:${url}`;
}
}