mirror of
https://github.com/makeplane/plane.git
synced 2026-01-26 16:19:43 -06:00
* use common getIssues from issue service instead of multiple different services for modules and cycles * Use SQLite to store issues locally and load issues from it. * Fix incorrect total count and filtering on assignees. * enable parallel API calls * use common getIssues from issue service instead of multiple different services for modules and cycles * Use SQLite to store issues locally and load issues from it. * Fix incorrect total count and filtering on assignees. * enable parallel API calls * chore: deleted issue list * - Handle local mutations - Implement getting the updates - Use SWR to update/sync data * Wait for sync to complete in get issues * Fix build errors * Fix build issue * - Sync updates to local-db - Fallback to server when the local data is loading - Wait when the updates are being fetched * Add issues in batches * Disable skeleton loaders for first 10 issues * Load issues in bulk * working version of sql lite with grouped issues * Use window queries for group by * - Fix sort by date fields - Fix the total count * - Fix grouping by created by - Fix order by and limit * fix pagination * Fix sorting on issue priority * - Add secondary sort order - Fix group by priority * chore: added timestamp filter for deleted issues * - Extract local DB into its own class - Implement sorting by label names * Implement subgroup by * sub group by changes * Refactor query constructor * Insert or update issues instead of directly adding them. * Segregated queries. Not working though!! * - Get filtered issues and then group them. - Cleanup code. - Implement order by labels. * Fix build issues * Remove debuggers * remove loaders while changing sorting or applying filters * fix loader while clearing all filters * Fix issue with project being synced twice * Improve project sync * Optimize the queries * Make create dummy data more realistic * dev: added total pages in the global paginator * chore: updated total_paged count * chore: added state_group in the issues pagination * chore: removed deleted_at from the issue pagination payload * chore: replaced state_group with state__group * Integrate new getIssues API, and fix sync issues bug. * Fix issue with SWR running twice in workspace wrapper * Fix DB initialization called when opening project for the first time. * Add all the tables required for sorting * Exclude description from getIssues * Add getIssue function. * Add only selected fields to get query. * Fix the count query * Minor query optimization when no joins are required. * fetch issue description from local db * clear local db on signout * Correct dummy data creation * Fix sort by assignee * sync to local changes * chore: added archived issues in the deleted endpoint * Sync deletes to local db. * - Add missing indexes for tables used in sorting in spreadsheet layout. - Add options table * Make fallback optional in getOption * Kanban column virtualization * persist project sync readiness to sqlite and use that as the source of truth for the project issues to be ready * fix build errors * Fix calendar view * fetch slimed down version of modules in project wrapper * fetch toned down modules and then fetch complete modules * Fix multi value order by in spread sheet layout * Fix sort by * Fix the query when ordering by multi field names * Remove unused import * Fix sort by multi value fields * Format queries and fix order by * fix order by for multi issue * fix loaders for spreadsheet * Fallback to manual order whn moving away from spreadsheet layout * fix minor bug * Move fix for order_by when switching from spreadsheet layout to translateQueryParams * fix default rendering of kanban groups * Fix none priority being saved as null * Remove debugger statement * Fix issue load * chore: updated isue paginated query from to * Fix sub issues and start and target date filters * Fix active and backlog filter * Add default order by * Update the Query param to match with backend. * local sqlite db versioning * When window is hidden, do not perform any db versioning * fix error handling and fall back to server when database errors out * Add ability to disable local db cache * remove db version check from getIssues function * change db version to number and remove workspaceInitPromise in storage.sqlite * - Sync the entire workspace in the background - Add get sub issue method with distribution * Make changes to get issues for sync to match backend. * chore: handled workspace and project in v2 paginted issues * disable issue description and title until fetched from server * sync issues post bulk operations * fix server error * fix front end build * Remove full workspace sync * - Remove the toast message on sync. - Update the disable local message. * Add Hardcoded constant to disable the local db caching * fix lint errors * Fix order by in grouping * update yarn lock * fix build * fix plane-web imports * address review comments --------- Co-authored-by: rahulramesha <rahulramesham@gmail.com> Co-authored-by: NarayanBavisetti <narayan3119@gmail.com> Co-authored-by: gurusainath <gurusainath007@gmail.com>
421 lines
13 KiB
TypeScript
421 lines
13 KiB
TypeScript
import set from "lodash/set";
|
|
// plane
|
|
import { EIssueGroupBYServerToProperty } from "@plane/constants";
|
|
import { TIssue } from "@plane/types";
|
|
// lib
|
|
import { rootStore } from "@/lib/store-context";
|
|
// services
|
|
import { IssueService } from "@/services/issue/issue.service";
|
|
//
|
|
import { ARRAY_FIELDS } from "./utils/constants";
|
|
import { getSubIssuesWithDistribution } from "./utils/data.utils";
|
|
import createIndexes from "./utils/indexes";
|
|
import { addIssuesBulk, syncDeletesToLocal } from "./utils/load-issues";
|
|
import { loadWorkSpaceData } from "./utils/load-workspace";
|
|
import { issueFilterCountQueryConstructor, issueFilterQueryConstructor } from "./utils/query-constructor";
|
|
import { runQuery } from "./utils/query-executor";
|
|
import { createTables } from "./utils/tables";
|
|
import { getGroupedIssueResults, getSubGroupedIssueResults } from "./utils/utils";
|
|
|
|
declare module "@sqlite.org/sqlite-wasm" {
|
|
export function sqlite3Worker1Promiser(...args: any): any;
|
|
}
|
|
|
|
const DB_VERSION = 1;
|
|
const PAGE_SIZE = 1000;
|
|
const BATCH_SIZE = 200;
|
|
const log = console.log;
|
|
const error = console.error;
|
|
const info = console.info;
|
|
|
|
type TProjectStatus = {
|
|
issues: { status: undefined | "loading" | "ready" | "error" | "syncing"; sync: Promise<void> | undefined };
|
|
};
|
|
|
|
type TDBStatus = "initializing" | "ready" | "error" | undefined;
|
|
export class Storage {
|
|
db: any;
|
|
status: TDBStatus = undefined;
|
|
dbName = "plane";
|
|
projectStatus: Record<string, TProjectStatus> = {};
|
|
workspaceSlug: string = "";
|
|
|
|
constructor() {
|
|
this.db = null;
|
|
}
|
|
|
|
reset = () => {
|
|
this.db = null;
|
|
this.status = undefined;
|
|
this.projectStatus = {};
|
|
this.workspaceSlug = "";
|
|
};
|
|
|
|
clearStorage = async () => {
|
|
try {
|
|
const storageManager = window.navigator.storage;
|
|
const fileSystemDirectoryHandle = await storageManager.getDirectory();
|
|
//@ts-expect-error
|
|
await fileSystemDirectoryHandle.remove({ recursive: true });
|
|
} catch (e) {
|
|
console.error("Error clearing sqlite sync storage", e);
|
|
}
|
|
};
|
|
|
|
initialize = async (workspaceSlug: string): Promise<boolean> => {
|
|
if (document.hidden || !rootStore.user.localDBEnabled) return false; // return if the window gets hidden
|
|
|
|
if (workspaceSlug !== this.workspaceSlug) {
|
|
this.reset();
|
|
}
|
|
try {
|
|
await this._initialize(workspaceSlug);
|
|
return true;
|
|
} catch (err) {
|
|
error(err);
|
|
this.status = "error";
|
|
return false;
|
|
}
|
|
};
|
|
|
|
_initialize = async (workspaceSlug: string): Promise<boolean> => {
|
|
if (this.status === "initializing") {
|
|
console.warn(`Initialization already in progress for workspace ${workspaceSlug}`);
|
|
return false;
|
|
}
|
|
if (this.status === "ready") {
|
|
console.warn(`Already initialized for workspace ${workspaceSlug}`);
|
|
return true;
|
|
}
|
|
if (this.status === "error") {
|
|
console.warn(`Initialization failed for workspace ${workspaceSlug}`);
|
|
return false;
|
|
}
|
|
|
|
info("Loading and initializing SQLite3 module...");
|
|
|
|
this.workspaceSlug = workspaceSlug;
|
|
this.dbName = workspaceSlug;
|
|
const { sqlite3Worker1Promiser } = await import("@sqlite.org/sqlite-wasm");
|
|
|
|
try {
|
|
const promiser: any = await new Promise((resolve) => {
|
|
const _promiser = sqlite3Worker1Promiser({
|
|
onready: () => resolve(_promiser),
|
|
});
|
|
});
|
|
|
|
const configResponse = await promiser("config-get", {});
|
|
log("Running SQLite3 version", configResponse.result.version.libVersion);
|
|
|
|
const openResponse = await promiser("open", {
|
|
filename: `file:${this.dbName}.sqlite3?vfs=opfs`,
|
|
});
|
|
const { dbId } = openResponse;
|
|
this.db = {
|
|
dbId,
|
|
exec: async (val: any) => {
|
|
if (typeof val === "string") {
|
|
val = { sql: val };
|
|
}
|
|
return promiser("exec", { dbId, ...val });
|
|
},
|
|
};
|
|
|
|
// dump DB of db version is matching
|
|
const dbVersion = await this.getOption("DB_VERSION");
|
|
if (dbVersion !== "" && parseInt(dbVersion) !== DB_VERSION) {
|
|
await this.clearStorage();
|
|
this.reset();
|
|
await this._initialize(workspaceSlug);
|
|
return false;
|
|
}
|
|
|
|
log(
|
|
"OPFS is available, created persisted database at",
|
|
openResponse.result.filename.replace(/^file:(.*?)\?vfs=opfs$/, "$1")
|
|
);
|
|
this.status = "ready";
|
|
// Your SQLite code here.
|
|
await createTables();
|
|
|
|
await this.setOption("DB_VERSION", DB_VERSION.toString());
|
|
} catch (err) {
|
|
error(err);
|
|
throw err;
|
|
}
|
|
|
|
return true;
|
|
};
|
|
|
|
syncWorkspace = async () => {
|
|
if (document.hidden || !rootStore.user.localDBEnabled) return; // return if the window gets hidden
|
|
loadWorkSpaceData(this.workspaceSlug);
|
|
};
|
|
|
|
syncProject = async (projectId: string) => {
|
|
if (document.hidden || !rootStore.user.localDBEnabled) return false; // return if the window gets hidden
|
|
|
|
// Load labels, members, states, modules, cycles
|
|
await this.syncIssues(projectId);
|
|
|
|
// // Sync rest of the projects
|
|
// const projects = await getProjectIds();
|
|
|
|
// // Exclude the one we just synced
|
|
// const projectsToSync = projects.filter((p: string) => p !== projectId);
|
|
// for (const project of projectsToSync) {
|
|
// await delay(8000);
|
|
// await this.syncIssues(project);
|
|
// }
|
|
// this.setOption("workspace_synced_at", new Date().toISOString());
|
|
};
|
|
|
|
syncIssues = async (projectId: string) => {
|
|
if (document.hidden || !rootStore.user.localDBEnabled) return false; // return if the window gets hidden
|
|
|
|
try {
|
|
const sync = this._syncIssues(projectId);
|
|
this.setSync(projectId, sync);
|
|
await sync;
|
|
} catch (e) {
|
|
this.setStatus(projectId, "error");
|
|
}
|
|
};
|
|
|
|
_syncIssues = async (projectId: string) => {
|
|
console.log("### Sync started");
|
|
let status = this.getStatus(projectId);
|
|
if (status === "loading" || status === "syncing") {
|
|
info(`Project ${projectId} is already loading or syncing`);
|
|
return;
|
|
}
|
|
const syncPromise = this.getSync(projectId);
|
|
|
|
if (syncPromise) {
|
|
// Redundant check?
|
|
return;
|
|
}
|
|
|
|
const queryParams: { cursor: string; updated_at__gt?: string; description: boolean } = {
|
|
cursor: `${PAGE_SIZE}:0:0`,
|
|
description: true,
|
|
};
|
|
|
|
const syncedAt = await this.getLastSyncTime(projectId);
|
|
const projectSync = await this.getOption(projectId);
|
|
|
|
if (syncedAt) {
|
|
queryParams["updated_at__gt"] = syncedAt;
|
|
}
|
|
|
|
this.setStatus(projectId, projectSync === "ready" ? "syncing" : "loading");
|
|
status = this.getStatus(projectId);
|
|
|
|
log(`### ${projectSync === "ready" ? "Syncing" : "Loading"} issues to local db for project ${projectId}`);
|
|
|
|
const start = performance.now();
|
|
const issueService = new IssueService();
|
|
|
|
const response = await issueService.getIssuesForSync(this.workspaceSlug, projectId, queryParams);
|
|
addIssuesBulk(response.results, BATCH_SIZE);
|
|
|
|
if (response.total_pages > 1) {
|
|
const promiseArray = [];
|
|
for (let i = 1; i < response.total_pages; i++) {
|
|
queryParams.cursor = `${PAGE_SIZE}:${i}:0`;
|
|
promiseArray.push(issueService.getIssuesForSync(this.workspaceSlug, projectId, queryParams));
|
|
}
|
|
const pages = await Promise.all(promiseArray);
|
|
for (const page of pages) {
|
|
await addIssuesBulk(page.results, BATCH_SIZE);
|
|
}
|
|
}
|
|
|
|
if (syncedAt) {
|
|
await syncDeletesToLocal(this.workspaceSlug, projectId, { updated_at__gt: syncedAt });
|
|
}
|
|
console.log("### Time taken to add issues", performance.now() - start);
|
|
|
|
if (status === "loading") {
|
|
await createIndexes();
|
|
}
|
|
this.setOption(projectId, "ready");
|
|
this.setStatus(projectId, "ready");
|
|
this.setSync(projectId, undefined);
|
|
};
|
|
|
|
getIssueCount = async (projectId: string) => {
|
|
const count = await runQuery(`select count(*) as count from issues where project_id='${projectId}'`);
|
|
return count[0]["count"];
|
|
};
|
|
|
|
getLastUpdatedIssue = async (projectId: string) => {
|
|
const lastUpdatedIssue = await runQuery(
|
|
`select id, name, updated_at , sequence_id from issues where project_id='${projectId}' order by datetime(updated_at) desc limit 1`
|
|
);
|
|
|
|
if (lastUpdatedIssue.length) {
|
|
return lastUpdatedIssue[0];
|
|
}
|
|
return;
|
|
};
|
|
|
|
getLastSyncTime = async (projectId: string) => {
|
|
const issue = await this.getLastUpdatedIssue(projectId);
|
|
if (!issue) {
|
|
return false;
|
|
}
|
|
return issue.updated_at;
|
|
};
|
|
|
|
getIssues = async (workspaceSlug: string, projectId: string, queries: any, config: any) => {
|
|
console.log("#### Queries", queries);
|
|
|
|
const currentProjectStatus = this.getStatus(projectId);
|
|
if (
|
|
!currentProjectStatus ||
|
|
this.status !== "ready" ||
|
|
currentProjectStatus === "loading" ||
|
|
currentProjectStatus === "error" ||
|
|
!rootStore.user.localDBEnabled
|
|
) {
|
|
info(`Project ${projectId} is loading, falling back to server`);
|
|
const issueService = new IssueService();
|
|
return await issueService.getIssuesFromServer(workspaceSlug, projectId, queries);
|
|
}
|
|
|
|
const { cursor, group_by, sub_group_by } = queries;
|
|
|
|
const query = issueFilterQueryConstructor(this.workspaceSlug, projectId, queries);
|
|
const countQuery = issueFilterCountQueryConstructor(this.workspaceSlug, projectId, queries);
|
|
const start = performance.now();
|
|
const [issuesRaw, count] = await Promise.all([runQuery(query), runQuery(countQuery)]);
|
|
// const issuesRaw = await runQuery(query);
|
|
const end = performance.now();
|
|
|
|
const { total_count } = count[0];
|
|
// const total_count = 2300;
|
|
|
|
const [pageSize, page, offset] = cursor.split(":");
|
|
|
|
const groupByProperty: string =
|
|
EIssueGroupBYServerToProperty[group_by as keyof typeof EIssueGroupBYServerToProperty];
|
|
const subGroupByProperty =
|
|
EIssueGroupBYServerToProperty[sub_group_by as keyof typeof EIssueGroupBYServerToProperty];
|
|
|
|
const parsingStart = performance.now();
|
|
let issueResults = issuesRaw.map((issue: any) => formatLocalIssue(issue));
|
|
|
|
console.log("#### Issue Results", issueResults.length);
|
|
|
|
const parsingEnd = performance.now();
|
|
|
|
const grouping = performance.now();
|
|
if (groupByProperty && page === "0") {
|
|
if (subGroupByProperty) {
|
|
issueResults = getSubGroupedIssueResults(issueResults);
|
|
} else {
|
|
issueResults = getGroupedIssueResults(issueResults);
|
|
}
|
|
}
|
|
const groupingEnd = performance.now();
|
|
|
|
const times = {
|
|
IssueQuery: end - start,
|
|
Parsing: parsingEnd - parsingStart,
|
|
Grouping: groupingEnd - grouping,
|
|
};
|
|
console.log(issueResults);
|
|
console.table(times);
|
|
|
|
const total_pages = Math.ceil(total_count / Number(pageSize));
|
|
const next_page_results = total_pages > parseInt(page) + 1;
|
|
|
|
const out = {
|
|
results: issueResults,
|
|
next_cursor: `${pageSize}:${parseInt(page) + 1}:${Number(offset) + Number(pageSize)}`,
|
|
prev_cursor: `${pageSize}:${parseInt(page) - 1}:${Number(offset) - Number(pageSize)}`,
|
|
total_results: total_count,
|
|
total_count,
|
|
next_page_results,
|
|
total_pages,
|
|
};
|
|
|
|
return out;
|
|
};
|
|
|
|
getIssue = async (issueId: string) => {
|
|
try {
|
|
if (!rootStore.user.localDBEnabled) return;
|
|
|
|
const issues = await runQuery(`select * from issues where id='${issueId}'`);
|
|
if (issues.length) {
|
|
return formatLocalIssue(issues[0]);
|
|
}
|
|
} catch (err) {
|
|
console.warn("unable to fetch issue from local db");
|
|
}
|
|
|
|
return;
|
|
};
|
|
|
|
getSubIssues = async (workspaceSlug: string, projectId: string, issueId: string) => {
|
|
const workspace_synced_at = await this.getOption("workspace_synced_at");
|
|
if (!workspace_synced_at) {
|
|
const issueService = new IssueService();
|
|
return await issueService.subIssues(workspaceSlug, projectId, issueId);
|
|
}
|
|
return await getSubIssuesWithDistribution(issueId);
|
|
};
|
|
|
|
getStatus = (projectId: string) => this.projectStatus[projectId]?.issues?.status || undefined;
|
|
setStatus = (projectId: string, status: "loading" | "ready" | "error" | "syncing" | undefined = undefined) => {
|
|
set(this.projectStatus, `${projectId}.issues.status`, status);
|
|
};
|
|
|
|
getSync = (projectId: string) => this.projectStatus[projectId]?.issues?.sync;
|
|
setSync = (projectId: string, sync: Promise<void> | undefined) => {
|
|
set(this.projectStatus, `${projectId}.issues.sync`, sync);
|
|
};
|
|
|
|
getOption = async (key: string, fallback = "") => {
|
|
try {
|
|
const options = await runQuery(`select * from options where key='${key}'`);
|
|
if (options.length) {
|
|
return options[0].value;
|
|
}
|
|
|
|
return fallback;
|
|
} catch (e) {
|
|
return fallback;
|
|
}
|
|
};
|
|
setOption = async (key: string, value: string) => {
|
|
await runQuery(`insert or replace into options (key, value) values ('${key}', '${value}')`);
|
|
};
|
|
|
|
getOptions = async (keys: string[]) => {
|
|
const options = await runQuery(`select * from options where key in ('${keys.join("','")}')`);
|
|
return options.reduce((acc: any, option: any) => {
|
|
acc[option.key] = option.value;
|
|
return acc;
|
|
}, {});
|
|
};
|
|
}
|
|
|
|
export const persistence = new Storage();
|
|
|
|
/**
|
|
* format the issue fetched from local db into an issue
|
|
* @param issue
|
|
* @returns
|
|
*/
|
|
export const formatLocalIssue = (issue: any) => {
|
|
const currIssue = issue;
|
|
ARRAY_FIELDS.forEach((field: string) => {
|
|
currIssue[field] = currIssue[field] ? JSON.parse(currIssue[field]) : [];
|
|
});
|
|
return currIssue as TIssue;
|
|
};
|