mirror of
https://github.com/trailbaseio/trailbase.git
synced 2025-12-30 14:19:43 -06:00
Minor: Allow clients to override fetch params, fix resizable overflow, and some minor drive-by cleanups.
This commit is contained in:
@@ -1,13 +0,0 @@
|
||||
// .prettierrc.mjs
|
||||
/** @type {import("prettier").Config} */
|
||||
export default {
|
||||
plugins: ['prettier-plugin-astro'],
|
||||
overrides: [
|
||||
{
|
||||
files: '*.astro',
|
||||
options: {
|
||||
parser: 'astro',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
@@ -20,21 +20,20 @@ export function createWindowWidth(): Accessor<number> {
|
||||
return width;
|
||||
}
|
||||
|
||||
function setSizes(v: number[] | ((prev: number[]) => number[])) {
|
||||
function setSizes(next: number[]) {
|
||||
const prev = $sizes.get();
|
||||
const next: number[] = typeof v === "function" ? v(prev) : v;
|
||||
const width = window.innerWidth;
|
||||
|
||||
// This is a bit hacky. On destruction Corvu pops panes and removes sizes one by one.
|
||||
// So switching between pages we'd always start with empty sizes. We basically just avoid
|
||||
// shrinking the array. We also make sure the new relative dimension for element[0] is
|
||||
// within range.
|
||||
if (
|
||||
next.length >= prev.length &&
|
||||
next[0] >= minSizePx / width &&
|
||||
next[0] < maxSizePx / width
|
||||
) {
|
||||
return $sizes.set(next);
|
||||
if (next.length >= prev.length && next.length > 0) {
|
||||
const min = minSizePx / width;
|
||||
const max = maxSizePx / width;
|
||||
const first = Math.min(max, Math.max(min, next[0]));
|
||||
|
||||
return $sizes.set([first, ...next.slice(1)]);
|
||||
}
|
||||
return prev;
|
||||
}
|
||||
@@ -62,7 +61,7 @@ export function SplitView(props: {
|
||||
onSizesChange={setSizes}
|
||||
orientation="horizontal"
|
||||
>
|
||||
<ResizablePanel>
|
||||
<ResizablePanel class="overflow-hidden">
|
||||
<props.first horizontal={true} />
|
||||
</ResizablePanel>
|
||||
|
||||
@@ -83,7 +82,7 @@ export function SplitView(props: {
|
||||
}
|
||||
|
||||
const minSizePx = 160;
|
||||
const maxSizePx = 300;
|
||||
const maxSizePx = 400;
|
||||
|
||||
function initialSize(): number[] {
|
||||
const width = window.innerWidth;
|
||||
|
||||
@@ -31,79 +31,18 @@ import { Checkbox } from "@/components/ui/checkbox";
|
||||
import { DataTable } from "@/components/Table";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { AddUser } from "@/components/auth/AddUser";
|
||||
import { deleteUser, updateUser } from "@/lib/user";
|
||||
import type {
|
||||
UpdateUserRequest,
|
||||
UserJson,
|
||||
ListUsersResponse,
|
||||
} from "@/lib/bindings";
|
||||
import {
|
||||
deleteUser,
|
||||
updateUser,
|
||||
fetchUsers,
|
||||
type FetchUsersArgs,
|
||||
} from "@/lib/user";
|
||||
import type { UpdateUserRequest, UserJson } from "@/lib/bindings";
|
||||
import {
|
||||
buildTextFormField,
|
||||
buildOptionalTextFormField,
|
||||
} from "@/components/FormFields";
|
||||
import { SafeSheet, SheetContainer } from "@/components/SafeSheet";
|
||||
import { adminFetch } from "@/lib/fetch";
|
||||
|
||||
type FetchArgs = {
|
||||
filter: string | undefined;
|
||||
pageSize: number;
|
||||
pageIndex: number;
|
||||
cursors: string[];
|
||||
};
|
||||
|
||||
export async function fetchUsers(
|
||||
source: FetchArgs,
|
||||
{ value }: { value: ListUsersResponse | undefined },
|
||||
): Promise<ListUsersResponse> {
|
||||
const pageIndex = source.pageIndex;
|
||||
const limit = source.pageSize;
|
||||
const cursors = source.cursors;
|
||||
|
||||
const filter = source.filter ?? "";
|
||||
const filterQuery = filter
|
||||
.split("AND")
|
||||
.map((frag) => frag.trim().replaceAll(" ", ""))
|
||||
.join("&");
|
||||
|
||||
console.log("QUERY: ", filterQuery);
|
||||
|
||||
const params = new URLSearchParams(filterQuery);
|
||||
params.set("limit", limit.toString());
|
||||
|
||||
// Build the next UUIDv7 "cursor" from previous response and update local
|
||||
// cursor stack. If we're paging forward we add new cursors, otherwise we're
|
||||
// re-using previously seen cursors for consistency. We reset if we go back
|
||||
// to the start.
|
||||
if (pageIndex === 0) {
|
||||
cursors.length = 0;
|
||||
} else {
|
||||
const index = pageIndex - 1;
|
||||
if (index < cursors.length) {
|
||||
// Already known page
|
||||
params.set("cursor", cursors[index]);
|
||||
} else {
|
||||
// New page case: use cursor from previous response or fall back to more
|
||||
// expensive and inconsistent offset-based pagination.
|
||||
const cursor = value?.cursor;
|
||||
if (cursor) {
|
||||
cursors.push(cursor);
|
||||
params.set("cursor", cursor);
|
||||
} else {
|
||||
params.set("offset", `${pageIndex * source.pageSize}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await adminFetch(`/user?${params}`);
|
||||
return await response.json();
|
||||
} catch (err) {
|
||||
if (value) {
|
||||
return value;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const columnHelper = createColumnHelper<UserJson>();
|
||||
|
||||
@@ -114,12 +53,15 @@ function buildColumns(
|
||||
return [
|
||||
{
|
||||
header: "id",
|
||||
accessorFn: ({ id }) => id,
|
||||
accessorKey: "id",
|
||||
},
|
||||
{
|
||||
header: "email",
|
||||
accessorKey: "email",
|
||||
},
|
||||
columnHelper.accessor("email", { header: "email" }) as ColumnDef<UserJson>,
|
||||
{
|
||||
header: "verified",
|
||||
accessorFn: ({ verified }) => Boolean(verified),
|
||||
accessorKey: "verified",
|
||||
},
|
||||
columnHelper.accessor("id", {
|
||||
header: "Admin",
|
||||
@@ -257,7 +199,7 @@ export function UserTable() {
|
||||
});
|
||||
const cursors: string[] = [];
|
||||
|
||||
const buildFetchArgs = (): FetchArgs => ({
|
||||
const buildFetchArgs = (): FetchUsersArgs => ({
|
||||
pageSize: pagination().pageSize,
|
||||
pageIndex: pagination().pageIndex,
|
||||
cursors: cursors,
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
import type { UpdateUserRequest, CreateUserRequest } from "@/lib/bindings";
|
||||
import type {
|
||||
UpdateUserRequest,
|
||||
CreateUserRequest,
|
||||
ListUsersResponse,
|
||||
} from "@/lib/bindings";
|
||||
import { adminFetch } from "@/lib/fetch";
|
||||
|
||||
export async function createUser(request: CreateUserRequest) {
|
||||
@@ -33,3 +37,64 @@ export async function updateUser(request: UpdateUserRequest) {
|
||||
body: JSON.stringify(request),
|
||||
});
|
||||
}
|
||||
|
||||
export type FetchUsersArgs = {
|
||||
filter: string | undefined;
|
||||
pageSize: number;
|
||||
pageIndex: number;
|
||||
cursors: string[];
|
||||
};
|
||||
|
||||
export async function fetchUsers(
|
||||
source: FetchUsersArgs,
|
||||
{ value }: { value: ListUsersResponse | undefined },
|
||||
): Promise<ListUsersResponse> {
|
||||
const pageIndex = source.pageIndex;
|
||||
const limit = source.pageSize;
|
||||
const cursors = source.cursors;
|
||||
|
||||
const filter = source.filter ?? "";
|
||||
const filterQuery = filter
|
||||
.split("AND")
|
||||
.map((frag) => frag.trim().replaceAll(" ", ""))
|
||||
.join("&");
|
||||
|
||||
console.log("QUERY: ", filterQuery);
|
||||
|
||||
const params = new URLSearchParams(filterQuery);
|
||||
params.set("limit", limit.toString());
|
||||
|
||||
// Build the next UUIDv7 "cursor" from previous response and update local
|
||||
// cursor stack. If we're paging forward we add new cursors, otherwise we're
|
||||
// re-using previously seen cursors for consistency. We reset if we go back
|
||||
// to the start.
|
||||
if (pageIndex === 0) {
|
||||
cursors.length = 0;
|
||||
} else {
|
||||
const index = pageIndex - 1;
|
||||
if (index < cursors.length) {
|
||||
// Already known page
|
||||
params.set("cursor", cursors[index]);
|
||||
} else {
|
||||
// New page case: use cursor from previous response or fall back to more
|
||||
// expensive and inconsistent offset-based pagination.
|
||||
const cursor = value?.cursor;
|
||||
if (cursor) {
|
||||
cursors.push(cursor);
|
||||
params.set("cursor", cursor);
|
||||
} else {
|
||||
params.set("offset", `${pageIndex * source.pageSize}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await adminFetch(`/user?${params}`);
|
||||
return await response.json();
|
||||
} catch (err) {
|
||||
if (value) {
|
||||
return value;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -271,9 +271,9 @@ class ThinClient {
|
||||
}
|
||||
|
||||
const response = await fetch(`${this.site}/${path}`, {
|
||||
...init,
|
||||
credentials: isDev ? "include" : "same-origin",
|
||||
headers: tokenState.headers,
|
||||
...init,
|
||||
});
|
||||
|
||||
return response;
|
||||
@@ -439,6 +439,8 @@ export class Client {
|
||||
public async refreshAuthToken(): Promise<void> {
|
||||
const refreshToken = Client.shouldRefresh(this._tokenState);
|
||||
if (refreshToken) {
|
||||
// TODO: Unset token state if refresh fails with unauthorized status.
|
||||
// TODO: In either case we should call the authChange, e.g. so that users can persist the new token.
|
||||
this._tokenState = await this.refreshTokensImpl(refreshToken);
|
||||
}
|
||||
}
|
||||
@@ -516,14 +518,11 @@ function _isDev(): boolean {
|
||||
const isDev = _isDev();
|
||||
|
||||
export function headers(tokens?: Tokens): HeadersInit {
|
||||
const base = {
|
||||
"Content-Type": "application/json",
|
||||
};
|
||||
|
||||
if (tokens) {
|
||||
const { auth_token, refresh_token, csrf_token } = tokens;
|
||||
return {
|
||||
...base,
|
||||
"Content-Type": "application/json",
|
||||
|
||||
...(auth_token && {
|
||||
Authorization: `Bearer ${auth_token}`,
|
||||
}),
|
||||
@@ -536,7 +535,9 @@ export function headers(tokens?: Tokens): HeadersInit {
|
||||
};
|
||||
}
|
||||
|
||||
return base;
|
||||
return {
|
||||
"Content-Type": "application/json",
|
||||
};
|
||||
}
|
||||
|
||||
export function textEncode(s: string): Uint8Array {
|
||||
|
||||
Reference in New Issue
Block a user