fix(notifications): resolve ID mismatches, counter bugs, and pagination drift

This commit addresses several critical stability issues in the notification system spanning the legacy PHP script, the Node.js API, and the Vue frontend.

**Backend / API:**
- **Refactor `notify` script (PHP):** Added `-u` flag to accept a custom filename/ID from the caller. Added filename length sanitization (255 chars) to prevent filesystem errors.
- **Fix ID Mismatch:** The Node API now generates a UUID and passes it to the `notify` script via `-u`. This guarantees that the API, Frontend, and Filesystem all reference the same ID, removing the need for "Risky" notification logic.
- **Fix Counter Bugs:**
    - `handleNotificationAdd` no longer ignores duplicate files in the archive.
    - `archiveNotification` now checks if a file exists in the archive before moving. If it exists, it simply deletes the unread copy without double-counting.
    - `archiveAll` now leverages the robust single-archive logic.

**Frontend (Web):**
- **Fix Infinite Scroll "Drift":**
    - Switched `List.vue` to use a **Debounced Refetch** (500ms) for subscription updates instead of manual cache manipulation. This handles rapid-fire events (mass adds) without corruption.
    - Increased `pageSize` to `50` to minimize race conditions where new items shift pagination offsets.
    - Added **Drift Detection**: If `fetchMore` returns a full page of duplicates (indicating the list has shifted), the component now automatically triggers a full refetch to self-heal.
This commit is contained in:
Ajit Mehrotra
2025-12-11 14:46:31 -05:00
parent 6a04a06a72
commit e3bf57184b
2 changed files with 164 additions and 63 deletions

View File

@@ -114,16 +114,9 @@ export class NotificationsService {
const type = path.includes('/unread/') ? NotificationType.UNREAD : NotificationType.ARCHIVE;
this.logger.debug(`[handleNotificationAdd] Adding ${type} Notification: ${path}`);
// If it's an archive notification, and we have the same notification in unread, ignore it
// This prevents double counting when the legacy script writes to both locations
if (type === NotificationType.ARCHIVE) {
const id = this.getIdFromPath(path);
const unreadPath = join(this.paths().UNREAD, id);
if (await fileExists(unreadPath)) {
this.logger.debug(`[handleNotificationAdd] Ignoring ARCHIVE duplicate: ${path}`);
return;
}
}
// Note: We intentionally track duplicate files (files in both unread and archive)
// because the frontend relies on (Archive Total - Unread Total) to calculate the
// "Archived Only" count. If we ignore duplicates here, the math breaks.
let notification: Notification | undefined;
let lastError: unknown;
@@ -161,6 +154,10 @@ export class NotificationsService {
});
void this.publishWarningsAndAlerts();
}
// Also publish overview updates for archive adds, so counts stay in sync
if (type === NotificationType.ARCHIVE) {
this.publishOverview();
}
}
/**
@@ -261,28 +258,8 @@ export class NotificationsService {
const fileData = this.makeNotificationFileData(data);
try {
// For risky notifications (long titles, unicode), the legacy script might fail silently
// or produce unexpected filenames/collisions.
// We use the legacy script for email/agents, but handle unread file creation ourselves.
const hasNonAscii = (str: string) => [...str].some((char) => char.charCodeAt(0) >= 0x80);
const isRisky =
data.title.length > 100 || hasNonAscii(data.title) || hasNonAscii(data.subject);
const [command, args] = this.getLegacyScriptArgs(fileData);
if (isRisky) {
// Pass -b to suppress browser notification (unread file) creation in legacy script
args.push('-b');
}
const [command, args] = this.getLegacyScriptArgs(fileData, id);
await execa(command, args);
// If risky, we MUST create the file manually (since we suppressed it)
if (isRisky) {
const path = join(this.paths().UNREAD, id);
const ini = encodeIni(fileData);
await writeFile(path, ini);
}
} catch (error) {
// manually write the file if the script fails entirely
this.logger.debug(`[createNotification] legacy notifier failed: ${error}`);
@@ -308,7 +285,7 @@ export class NotificationsService {
* @param notification The notification to be converted to command line arguments.
* @returns A 2-element tuple containing the legacy notifier command and arguments.
*/
public getLegacyScriptArgs(notification: NotificationIni): [string, string[]] {
public getLegacyScriptArgs(notification: NotificationIni, id?: string): [string, string[]] {
const { event, subject, description, link, importance } = notification;
const args = [
['-i', importance],
@@ -319,6 +296,9 @@ export class NotificationsService {
if (link) {
args.push(['-l', link]);
}
if (id) {
args.push(['-u', id]);
}
return ['/usr/local/emhttp/webGui/scripts/notify', args.flat()];
}
@@ -496,6 +476,7 @@ export class NotificationsService {
public async archiveNotification({ id }: Pick<Notification, 'id'>): Promise<Notification> {
const unreadPath = join(this.paths().UNREAD, id);
const archivePath = join(this.paths().ARCHIVE, id);
// We expect to only archive 'unread' notifications, but it's possible that the notification
// has already been archived or deleted (e.g. retry logic, spike in network latency).
@@ -515,12 +496,32 @@ export class NotificationsService {
*------------------------**/
const snapshot = this.getOverview();
const notification = await this.loadNotificationFile(unreadPath, NotificationType.UNREAD);
const moveToArchive = this.moveNotification({
from: NotificationType.UNREAD,
to: NotificationType.ARCHIVE,
snapshot,
});
await moveToArchive(notification);
// Update stats
this.decrement(notification.importance, NotificationsService.overview.unread);
if (snapshot) {
this.decrement(notification.importance, snapshot.unread);
}
if (await fileExists(archivePath)) {
// File already in archive, just delete the unread one
await unlink(unreadPath);
// CRITICAL FIX: If the file already existed in archive, it should have been counted
// by handleNotificationAdd (since we removed the ignore logic).
// Therefore, we do NOT increment the archive count here to avoid double counting.
} else {
// File not in archive, move it there
await rename(unreadPath, archivePath);
// We moved a file to archive that wasn't there.
// We DO need to increment the stats.
this.increment(notification.importance, NotificationsService.overview.archive);
if (snapshot) {
this.increment(notification.importance, snapshot.archive);
}
}
void this.publishWarningsAndAlerts();
@@ -564,18 +565,20 @@ export class NotificationsService {
return { overview: NotificationsService.overview };
}
const overviewSnapshot = this.getOverview();
const unreads = await this.listFilesInFolder(UNREAD);
const [notifications] = await this.loadNotificationsFromPaths(unreads, { importance });
const archive = this.moveNotification({
from: NotificationType.UNREAD,
to: NotificationType.ARCHIVE,
snapshot: overviewSnapshot,
});
const stats = await batchProcess(notifications, archive);
const archiveAction = async (notification: Notification) => {
// Reuse archiveNotification which handles the "exists" check logic
await this.archiveNotification({ id: notification.id });
};
const stats = await batchProcess(notifications, archiveAction);
void this.publishWarningsAndAlerts();
return { ...stats, overview: overviewSnapshot };
// Return the *actual* current state of the service, which is properly updated
// by the individual archiveNotification calls.
return { ...stats, overview: this.getOverview() };
}
public async unarchiveAll(importance?: NotificationImportance) {

View File

@@ -4,15 +4,21 @@ import { useI18n } from 'vue-i18n';
import { storeToRefs } from 'pinia';
import { useQuery } from '@vue/apollo-composable';
import { vInfiniteScroll } from '@vueuse/components';
import { useDebounceFn } from '@vueuse/core';
import type { ApolloError } from '@apollo/client/errors';
import type { NotificationImportance as Importance, NotificationType } from '~/composables/gql/graphql';
import type {
NotificationImportance as Importance,
Notification,
NotificationType,
} from '~/composables/gql/graphql';
import type { GraphQLError } from 'graphql';
import {
getNotifications,
NOTIFICATION_FRAGMENT,
} from '~/components/Notifications/graphql/notification.query';
import { notificationAddedSubscription } from '~/components/Notifications/graphql/notification.subscription';
import NotificationsItem from '~/components/Notifications/Item.vue';
import { useHaveSeenNotifications } from '~/composables/api/use-notifications';
import { useFragment } from '~/composables/gql/fragment-masking';
@@ -28,7 +34,10 @@ const props = withDefaults(
importance?: Importance;
}>(),
{
pageSize: 15,
// Increased to 50 to minimize "pagination drift" (race conditions) where
// new items added during a fetch shift the offsets of subsequent pages,
// causing the client to fetch duplicate items it already has.
pageSize: 50,
importance: undefined,
}
);
@@ -43,14 +52,53 @@ watch(props, () => {
const unraidApiStore = useUnraidApiStore();
const { offlineError } = storeToRefs(unraidApiStore);
const { result, error, loading, fetchMore, refetch } = useQuery(getNotifications, () => ({
filter: {
offset: 0,
limit: props.pageSize,
type: props.type,
importance: props.importance,
const { result, error, loading, fetchMore, refetch, subscribeToMore } = useQuery(
getNotifications,
() => ({
filter: {
offset: 0,
limit: props.pageSize,
type: props.type,
importance: props.importance,
},
})
);
// Debounce refetch to handle mass-add scenarios efficiently.
// Increased to 500ms to ensure we capture the entire batch of events in a single refetch,
// preventing partial updates that can lead to race conditions.
const debouncedRefetch = useDebounceFn(() => {
console.log('[Notifications] Refetching due to subscription update');
canLoadMore.value = true; // Reset load state so infinite scroll works again from top
void refetch();
}, 500);
subscribeToMore({
document: notificationAddedSubscription,
updateQuery: (previousResult, { subscriptionData }) => {
if (!subscriptionData.data) return previousResult;
const newNotification = subscriptionData.data.notificationAdded;
// Check filters - only refetch if the new notification is relevant to this list
let isRelevant = newNotification.type === props.type;
if (isRelevant && props.importance) {
isRelevant = newNotification.importance === props.importance;
}
if (isRelevant) {
// Debug log to confirm event reception
console.log('[Notifications] Relevant subscription event received:', newNotification.id);
debouncedRefetch();
} else {
// console.log('[Notifications] Irrelevant subscription event ignored:', newNotification.id);
}
// Return previous result unchanged. We rely on refetch() to update the list.
// This avoids the "stale previousResult" issue where rapid updates overwrite each other.
return previousResult;
},
}));
});
function dbgApolloError(prefix: string, err: ApolloError | null | undefined) {
if (!err) return;
@@ -85,9 +133,9 @@ watch([error, offlineError], ([e, o]) => {
const notifications = computed(() => {
if (!result.value?.notifications.list) return [];
const list = useFragment(NOTIFICATION_FRAGMENT, result.value?.notifications.list);
// necessary because some items in this list may change their type (e.g. archival)
// and we don't want to display them in the wrong list client-side.
return list.filter((n) => n.type === props.type);
const filtered = list.filter((n) => n.type === props.type);
console.log('[Notifications] Computed list updated. Length:', filtered.length);
return filtered;
});
const { t } = useI18n();
@@ -100,7 +148,7 @@ watch(
const [latest] = notifications.value;
if (!latest?.timestamp) return;
if (new Date(latest.timestamp) > new Date(latestSeenTimestamp.value)) {
console.log('[notif list] setting last seen timestamp', latest.timestamp);
// console.log('[notif list] setting last seen timestamp', latest.timestamp);
latestSeenTimestamp.value = latest.timestamp;
}
},
@@ -108,25 +156,75 @@ watch(
);
async function onLoadMore() {
console.log('[getNotifications] onLoadMore');
const currentLength = notifications.value.length;
console.log('[Notifications] onLoadMore triggered. Current Offset:', currentLength);
if (loading.value) {
console.log('[Notifications] Skipping load more because loading is true');
return;
}
try {
const incoming = await fetchMore({
variables: {
filter: {
offset: notifications.value.length,
offset: currentLength,
limit: props.pageSize,
type: props.type,
importance: props.importance,
},
},
updateQuery: (previousResult, { fetchMoreResult }) => {
if (!fetchMoreResult) return previousResult;
const currentList = previousResult.notifications.list || [];
const incomingList = fetchMoreResult.notifications.list;
console.log('[Notifications] fetchMore UpdateQuery.');
console.log(' - Previous List Length:', currentList.length);
console.log(' - Incoming List Length:', incomingList.length);
const existingIds = new Set(currentList.map((n: Notification) => n.id));
const newUniqueItems = incomingList.filter((n: Notification) => !existingIds.has(n.id));
console.log(' - Unique Items to Append:', newUniqueItems.length);
// DETECT PAGINATION DRIFT (Shifted Offsets)
// If we fetched items, but they are ALL duplicates, it implies new items were added
// to the top of the list, pushing existing items down into our requested page range.
// In this case, our current list is stale/misaligned. We must force a full refetch.
if (incomingList.length > 0 && newUniqueItems.length === 0) {
console.warn(
'[Notifications] Pagination Drift Detected! Fetched items are all duplicates. Triggering Refetch.'
);
// Trigger refetch asynchronously to avoid side-effects during render cycle
setTimeout(() => {
debouncedRefetch();
}, 0);
return previousResult;
}
return {
...previousResult,
notifications: {
...previousResult.notifications,
list: [...currentList, ...newUniqueItems],
},
};
},
});
const incomingCount = incoming?.data.notifications.list.length ?? 0;
console.log('[Notifications] fetchMore Result.');
console.log(' - Incoming Count from Network:', incomingCount);
console.log(' - Page Size:', props.pageSize);
if (incomingCount === 0 || incomingCount < props.pageSize) {
console.log('[Notifications] Reached End (incoming < pageSize). Disabling Infinite Scroll.');
canLoadMore.value = false;
}
} catch (error) {
// Stop attempting while offline/error.
// UI has a Try Again button to recover
console.error('[Notifications] fetchMore Error:', error);
canLoadMore.value = false;
throw error;
}