From 27a372874597ad09ffda08044cfd6094194f79f5 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 1 Oct 2025 10:24:45 +0200 Subject: [PATCH 01/56] wip: PowerSync collections --- packages/powersync-db-collection/CHANGELOG.md | 1 + packages/powersync-db-collection/package.json | 69 ++++ .../src/PendingOperationStore.ts | 47 +++ .../src/PowerSyncTransactor.ts | 139 +++++++ .../powersync-db-collection/src/helpers.ts | 30 ++ packages/powersync-db-collection/src/index.ts | 1 + .../powersync-db-collection/src/powersync.ts | 340 ++++++++++++++++++ .../tests/powersync.test.ts | 257 +++++++++++++ .../tsconfig.docs.json | 9 + .../powersync-db-collection/tsconfig.json | 20 ++ .../powersync-db-collection/vite.config.ts | 21 ++ pnpm-lock.yaml | 139 ++++--- 12 files changed, 1031 insertions(+), 42 deletions(-) create mode 100644 packages/powersync-db-collection/CHANGELOG.md create mode 100644 packages/powersync-db-collection/package.json create mode 100644 packages/powersync-db-collection/src/PendingOperationStore.ts create mode 100644 packages/powersync-db-collection/src/PowerSyncTransactor.ts create mode 100644 packages/powersync-db-collection/src/helpers.ts create mode 100644 packages/powersync-db-collection/src/index.ts create mode 100644 packages/powersync-db-collection/src/powersync.ts create mode 100644 packages/powersync-db-collection/tests/powersync.test.ts create mode 100644 packages/powersync-db-collection/tsconfig.docs.json create mode 100644 packages/powersync-db-collection/tsconfig.json create mode 100644 packages/powersync-db-collection/vite.config.ts diff --git a/packages/powersync-db-collection/CHANGELOG.md b/packages/powersync-db-collection/CHANGELOG.md new file mode 100644 index 000000000..1c804e7e2 --- /dev/null +++ b/packages/powersync-db-collection/CHANGELOG.md @@ -0,0 +1 @@ +# @tanstack/powersync-db-collection diff --git a/packages/powersync-db-collection/package.json b/packages/powersync-db-collection/package.json new file mode 100644 index 000000000..9fa1f15be --- /dev/null +++ b/packages/powersync-db-collection/package.json @@ -0,0 +1,69 @@ +{ + "name": "@tanstack/powersync-db-collection", + "description": "PowerSync collection for TanStack DB", + "version": "0.0.0", + "dependencies": { + "@powersync/common": "^1.39.0", + "@standard-schema/spec": "^1.0.0", + "@tanstack/db": "workspace:*", + "@tanstack/store": "^0.7.7", + "debug": "^4.4.3", + "p-defer": "^4.0.1" + }, + "devDependencies": { + "@powersync/better-sqlite3": "^0.2.0", + "@powersync/node": "^0.11.0", + "@types/debug": "^4.1.12", + "@vitest/coverage-istanbul": "^3.2.4" + }, + "exports": { + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.cts", + "default": "./dist/cjs/index.cjs" + } + }, + "./package.json": "./package.json" + }, + "files": [ + "dist", + "src" + ], + "main": "dist/cjs/index.cjs", + "module": "dist/esm/index.js", + "packageManager": "pnpm@10.17.0", + "author": "JOURNEYAPPS", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "https://github.com/TanStack/db.git", + "directory": "packages/powersync-db-collection" + }, + "homepage": "https://tanstack.com/db", + "keywords": [ + "powersync", + "realtime", + "local-first", + "sync-engine", + "sync", + "replication", + "opfs", + "indexeddb", + "localstorage", + "optimistic", + "typescript" + ], + "scripts": { + "build": "vite build", + "dev": "vite build --watch", + "lint": "eslint . --fix", + "test": "npx vitest --run" + }, + "sideEffects": false, + "type": "module", + "types": "dist/esm/index.d.ts" +} diff --git a/packages/powersync-db-collection/src/PendingOperationStore.ts b/packages/powersync-db-collection/src/PendingOperationStore.ts new file mode 100644 index 000000000..9353cd03e --- /dev/null +++ b/packages/powersync-db-collection/src/PendingOperationStore.ts @@ -0,0 +1,47 @@ +import pDefer from "p-defer" +import type { DiffTriggerOperation } from "@powersync/common" +import type { DeferredPromise } from "p-defer" + +export type PendingOperation = { + operation: DiffTriggerOperation + id: string + timestamp: string +} + +/** + * Optimistic mutations have their optimistic state discarded once transactions have + * been applied. + * We need to ensure that an applied transaction has been observed by the sync diff trigger + * before resoling the transaction application call. + * This store allows registering a wait for a pending operation to have been observed. + */ +export class PendingOperationStore { + private pendingOperations = new Map>() + + /** + * @returns A promise which will resolve once the specified operation has been seen. + */ + waitFor(operation: PendingOperation): Promise { + const managedPromise = pDefer() + this.pendingOperations.set(operation, managedPromise) + return managedPromise.promise + } + + /** + * Marks a set of operations as seen. This will resolve any pending promises. + */ + resolvePendingFor(operations: Array) { + for (const operation of operations) { + for (const [pendingOp, deferred] of this.pendingOperations.entries()) { + if ( + pendingOp.operation == operation.operation && + pendingOp.id == operation.id && + pendingOp.timestamp == operation.timestamp + ) { + deferred.resolve() + this.pendingOperations.delete(pendingOp) + } + } + } + } +} diff --git a/packages/powersync-db-collection/src/PowerSyncTransactor.ts b/packages/powersync-db-collection/src/PowerSyncTransactor.ts new file mode 100644 index 000000000..5d145e270 --- /dev/null +++ b/packages/powersync-db-collection/src/PowerSyncTransactor.ts @@ -0,0 +1,139 @@ +import { sanitizeSQL } from "@powersync/common" +import DebugModule from "debug" +import { asPowerSyncRecord } from "./helpers" +import type { AbstractPowerSyncDatabase, LockContext } from "@powersync/common" +import type { Transaction } from "@tanstack/db" +import type { + PendingOperation, + PendingOperationStore, +} from "./PendingOperationStore" +import type { PowerSyncRecord } from "./helpers" + +const debug = DebugModule.debug(`ts/db:powersync`) + +export type TransactorOptions = { + database: AbstractPowerSyncDatabase + tableName: string + pendingOperationStore: PendingOperationStore + trackedTableName: string +} + +/** + * Handles persisting Tanstack DB transactions to the PowerSync SQLite DB. + */ +export class PowerSyncTransactor> { + database: AbstractPowerSyncDatabase + pendingOperationStore: PendingOperationStore + tableName: string + trackedTableName: string + + constructor(options: TransactorOptions) { + this.database = options.database + this.pendingOperationStore = options.pendingOperationStore + this.tableName = sanitizeSQL`${options.tableName}` + this.trackedTableName = sanitizeSQL`${options.trackedTableName}` + } + + /** + * Persists a {@link Transaction} to PowerSync's SQLite DB. + */ + async applyTransaction(transaction: Transaction) { + const { mutations } = transaction + + // Persist to PowerSync + const { whenComplete } = await this.database.writeTransaction( + async (tx) => { + for (const mutation of mutations) { + switch (mutation.type) { + case `insert`: + await this.handleInsert(asPowerSyncRecord(mutation.modified), tx) + break + case `update`: + await this.handleUpdate(asPowerSyncRecord(mutation.modified), tx) + break + case `delete`: + await this.handleDelete(asPowerSyncRecord(mutation.original), tx) + break + } + } + + /** + * Fetch the last diff operation in the queue. + * We need to wait for this operation to be seen by the + * sync handler before returning from the application call. + */ + const lastDiffOp = await tx.getOptional(` + SELECT + id, operation, timestamp + FROM + ${this.trackedTableName} + ORDER BY + timestamp DESC + LIMIT 1 + `) + + /** + * Return a promise from the writeTransaction, without awaiting it. + * This promise will resolve once the entire transaction has been + * observed via the diff triggers. + * We return without awaiting in order to free the writeLock. + */ + return { + whenComplete: lastDiffOp + ? this.pendingOperationStore.waitFor(lastDiffOp) + : Promise.resolve(), + } + } + ) + + // Wait for the change to be observed via the diff trigger + await whenComplete + } + + protected async handleInsert( + mutation: PowerSyncRecord, + context: LockContext + ) { + debug(`insert`, mutation) + const keys = Object.keys(mutation).map((key) => sanitizeSQL`${key}`) + await context.execute( + ` + INSERT into ${this.tableName} + (${keys.join(`, `)}) + VALUES + (${keys.map((_) => `?`).join(`, `)}) + `, + Object.values(mutation) + ) + } + + protected async handleUpdate( + mutation: PowerSyncRecord, + context: LockContext + ) { + debug(`update`, mutation) + + const keys = Object.keys(mutation).map((key) => sanitizeSQL`${key}`) + await context.execute( + ` + UPDATE ${this.tableName} + SET ${keys.map((key) => `${key} = ?`).join(`, `)} + WHERE id = ? + `, + [...Object.values(mutation), mutation.id] + ) + } + + protected async handleDelete( + mutation: PowerSyncRecord, + context: LockContext + ) { + debug(`delete`, mutation) + await context.execute( + ` + DELETE FROM ${this.tableName} WHERE id = ? + `, + [mutation.id] + ) + } +} diff --git a/packages/powersync-db-collection/src/helpers.ts b/packages/powersync-db-collection/src/helpers.ts new file mode 100644 index 000000000..29c42b7a9 --- /dev/null +++ b/packages/powersync-db-collection/src/helpers.ts @@ -0,0 +1,30 @@ +import { DiffTriggerOperation } from "@powersync/common" + +/** + * All PowerSync table records have a uuid `id` column. + */ +export type PowerSyncRecord = { + id: string + [key: string]: unknown +} + +export function asPowerSyncRecord(record: any): PowerSyncRecord { + if (typeof record.id !== `string`) { + throw new Error(`Record must have a string id field`) + } + return record as PowerSyncRecord +} + +/** + * Maps Tanstack DB operations to {@link DiffTriggerOperation} + */ +export function mapOperation(operation: DiffTriggerOperation) { + switch (operation) { + case DiffTriggerOperation.INSERT: + return `insert` + case DiffTriggerOperation.UPDATE: + return `update` + case DiffTriggerOperation.DELETE: + return `delete` + } +} diff --git a/packages/powersync-db-collection/src/index.ts b/packages/powersync-db-collection/src/index.ts new file mode 100644 index 000000000..30e35e857 --- /dev/null +++ b/packages/powersync-db-collection/src/index.ts @@ -0,0 +1 @@ +export * from "./powersync" diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts new file mode 100644 index 000000000..b204c04bf --- /dev/null +++ b/packages/powersync-db-collection/src/powersync.ts @@ -0,0 +1,340 @@ +import { DiffTriggerOperation } from "@powersync/common" +import { PendingOperationStore } from "./PendingOperationStore" +import { PowerSyncTransactor } from "./PowerSyncTransactor" +import { mapOperation } from "./helpers" +import type { PendingOperation } from "./PendingOperationStore" +import type { + BaseCollectionConfig, + CollectionConfig, + InferSchemaOutput, + SyncConfig, + Transaction, +} from "@tanstack/db" +import type { + AbstractPowerSyncDatabase, + TriggerDiffRecord, +} from "@powersync/common" +import type { StandardSchemaV1 } from "@standard-schema/spec" + +/** + * Configuration interface for PowerSync collection options + * @template T - The type of items in the collection + * @template TSchema - The schema type for validation + */ +/** + * Configuration options for creating a PowerSync collection. + * + * @example + * ```typescript + * const APP_SCHEMA = new Schema({ + * documents: new Table({ + * name: column.text, + * }), + * }) + * + * type Document = (typeof APP_SCHEMA)["types"]["documents"] + * + * const db = new PowerSyncDatabase({ + * database: { + * dbFilename: "test.sqlite", + * }, + * schema: APP_SCHEMA, + * }) + * + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "documents", + * }) + * ) + * ``` + */ +export type PowerSyncCollectionConfig< + T extends object = Record, + TSchema extends StandardSchemaV1 = never, +> = Omit< + BaseCollectionConfig, + `onInsert` | `onUpdate` | `onDelete` | `getKey` +> & { + /** The name of the table in PowerSync database */ + tableName: string + /** The PowerSync database instance */ + database: AbstractPowerSyncDatabase +} + +export type PowerSyncCollectionUtils = { + /** + * Applies mutations to the PowerSync database. This method is called automatically by the collection's + * insert, update, and delete operations. You typically don't need to call this directly unless you + * have special transaction requirements. + * + * @example + * ```typescript + * // Create a collection + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "documents", + * }) + * ) + * + * const addTx = createTransaction({ + * autoCommit: false, + * mutationFn: async ({ transaction }) => { + * await collection.utils.mutateTransaction(transaction) + * }, + * }) + * + * addTx.mutate(() => { + * for (let i = 0; i < 5; i++) { + * collection.insert({ id: randomUUID(), name: `tx-${i}` }) + * } + * }) + * + * await addTx.commit() + * await addTx.isPersisted.promise + * ``` + * + * @param transaction - The transaction containing mutations to apply + * @returns A promise that resolves when the mutations have been persisted to PowerSync + */ + mutateTransaction: (transaction: Transaction) => Promise +} + +/** + * Creates PowerSync collection options for use with a standard Collection + * + * @template TExplicit - The explicit type of items in the collection (highest priority) + * @template TSchema - The schema type for validation and type inference (second priority) + * @param config - Configuration options for the PowerSync collection + * @returns Collection options with utilities + */ + +// Overload for when schema is provided +/** + * Creates a PowerSync collection configuration with schema validation. + * + * @example + * ```typescript + * // With schema validation + * const APP_SCHEMA = new Schema({ + * documents: new Table({ + * name: column.text, + * }), + * }) + * + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "documents", + * schema: APP_SCHEMA, + * }) + * ) + * ``` + */ +export function powerSyncCollectionOptions( + config: PowerSyncCollectionConfig, T> +): CollectionConfig, string, T> & { + schema: T + utils: PowerSyncCollectionUtils +} + +/** + * Creates a PowerSync collection configuration without schema validation. + * + * @example + * ```typescript + * const APP_SCHEMA = new Schema({ + * documents: new Table({ + * name: column.text, + * }), + * }) + * + * type Document = (typeof APP_SCHEMA)["types"]["documents"] + * + * const db = new PowerSyncDatabase({ + * database: { + * dbFilename: "test.sqlite", + * }, + * schema: APP_SCHEMA, + * }) + * + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "documents", + * }) + * ) + * ``` + */ +export function powerSyncCollectionOptions( + config: PowerSyncCollectionConfig & { + schema?: never + } +): CollectionConfig & { + schema?: never + utils: PowerSyncCollectionUtils +} + +/** + * Implementation of powerSyncCollectionOptions that handles both schema and non-schema configurations. + */ +export function powerSyncCollectionOptions< + T extends object = Record, + TSchema extends StandardSchemaV1 = never, +>( + config: PowerSyncCollectionConfig +): CollectionConfig & { + id?: string + utils: PowerSyncCollectionUtils + schema?: TSchema +} { + type Row = Record + type Key = string // we always use uuids for keys + + const { database, tableName, ...restConfig } = config + + /** + * The onInsert, onUpdate, onDelete handlers should only return + * after we have written the changes to Tanstack DB. + * We currently only write to Tanstack DB from a diff trigger. + * We wait for the diff trigger to observe the change, + * and only then return from the on[X] handlers. + * This ensures that when the transaction is reported as + * complete to the caller, the in-memory state is already + * consistent with the database. + */ + const pendingOperationStore = new PendingOperationStore() + const trackedTableName = `__${tableName}_tracking` + + const transactor = new PowerSyncTransactor({ + database, + pendingOperationStore, + tableName, + trackedTableName, + }) + + /** + * "sync" + * Notice that this describes the Sync between the local SQLite table + * and the in-memory tanstack-db collection. + * It is not about sync between a client and a server! + */ + type SyncParams = Parameters[`sync`]>[0] + const sync: SyncConfig = { + sync: async (params: SyncParams) => { + const { begin, write, commit, markReady } = params + + // Manually create a tracking operation for optimization purposes + const abortController = new AbortController() + + database.onChangeWithCallback( + { + onChange: async () => { + await database.writeTransaction(async (context) => { + begin() + const operations = await context.getAll( + `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC` + ) + const pendingOperations: Array = [] + + for (const op of operations) { + const { id, operation, timestamp, value } = op + const parsedValue = { + id, + ...JSON.parse(value), + } + const parsedPreviousValue = + op.operation == DiffTriggerOperation.UPDATE + ? { id, ...JSON.parse(op.previous_value) } + : null + write({ + type: mapOperation(operation), + value: parsedValue, + previousValue: parsedPreviousValue, + }) + pendingOperations.push({ + id, + operation, + timestamp, + }) + } + + // clear the current operations + await context.execute(`DELETE FROM ${trackedTableName}`) + + commit() + pendingOperationStore.resolvePendingFor(pendingOperations) + }) + }, + }, + { + signal: abortController.signal, + triggerImmediate: false, + tables: [trackedTableName], + } + ) + + const disposeTracking = await database.triggers.createDiffTrigger({ + source: tableName, + destination: trackedTableName, + when: { + [DiffTriggerOperation.INSERT]: `TRUE`, + [DiffTriggerOperation.UPDATE]: `TRUE`, + [DiffTriggerOperation.DELETE]: `TRUE`, + }, + hooks: { + beforeCreate: async (context) => { + begin() + for (const row of await context.getAll>( + `SELECT * FROM ${tableName}` + )) { + write({ + type: `insert`, + value: row, + }) + } + commit() + markReady() + }, + }, + }) + + return () => { + abortController.abort() + disposeTracking() + } + }, + // Expose the getSyncMetadata function + getSyncMetadata: undefined, + } + + const getKey = (record: Record) => record.id as string + + return { + ...restConfig, + getKey, + sync, + onInsert: async (params) => { + // The transaction here should only ever contain a single insert mutation + return await transactor.applyTransaction(params.transaction) + }, + onUpdate: async (params) => { + // The transaction here should only ever contain a single update mutation + return await transactor.applyTransaction(params.transaction) + }, + onDelete: async (params) => { + // The transaction here should only ever contain a single delete mutation + return await transactor.applyTransaction(params.transaction) + }, + utils: { + mutateTransaction: async (transaction: Transaction) => { + return await transactor.applyTransaction(transaction) + }, + }, + } as CollectionConfig & { + id?: string + utils: PowerSyncCollectionUtils + schema?: TSchema + } +} diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts new file mode 100644 index 000000000..4e9676ef0 --- /dev/null +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -0,0 +1,257 @@ +import { randomUUID } from "node:crypto" +import { + CrudEntry, + PowerSyncDatabase, + Schema, + Table, + column, +} from "@powersync/node" +import { createCollection, createTransaction } from "@tanstack/db" +import { describe, expect, it, onTestFinished, vi } from "vitest" +import { powerSyncCollectionOptions } from "../src" +import type { AbstractPowerSyncDatabase } from "@powersync/node" + +const APP_SCHEMA = new Schema({ + documents: new Table({ + name: column.text, + }), +}) + +type Document = (typeof APP_SCHEMA)[`types`][`documents`] + +describe(`PowerSync Integration`, () => { + async function createDatabase() { + const db = new PowerSyncDatabase({ + database: { + dbFilename: `test.sqlite`, + }, + schema: APP_SCHEMA, + }) + onTestFinished(async () => { + await db.disconnectAndClear() + await db.close() + }) + await db.disconnectAndClear() + return db + } + + async function createTestData(db: AbstractPowerSyncDatabase) { + await db.execute(` + INSERT into documents (id, name) + VALUES + (uuid(), 'one'), + (uuid(), 'two'), + (uuid(), 'three') + `) + } + + describe(`sync`, () => { + it(`should initialize and fetch initial data`, async () => { + const db = await createDatabase() + await createTestData(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + + await collection.stateWhenReady() + + // Verify the collection state contains our items + expect(collection.size).toBe(3) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `one`, + `two`, + `three`, + ]) + }) + }) + + it(`should update when data syncs`, async () => { + const db = await createDatabase() + await createTestData(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + + await collection.stateWhenReady() + + // Verify the collection state contains our items + expect(collection.size).toBe(3) + + // Make an update, simulates a sync from another client + await db.execute(` + INSERT into documents (id, name) + VALUES + (uuid(), 'four') + `) + + // The collection should update + await vi.waitFor( + () => { + expect(collection.size).toBe(4) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `one`, + `two`, + `three`, + `four`, + ]) + }, + { timeout: 1000 } + ) + + await db.execute(` + DELETE from documents + WHERE name = 'two' + `) + + // The collection should update + await vi.waitFor( + () => { + expect(collection.size).toBe(3) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `one`, + `three`, + `four`, + ]) + }, + { timeout: 1000 } + ) + + await db.execute(` + UPDATE documents + SET name = 'updated' + WHERE name = 'one' + `) + + // The collection should update + await vi.waitFor( + () => { + expect(collection.size).toBe(3) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `updated`, + `three`, + `four`, + ]) + }, + { timeout: 1000 } + ) + }) + + it(`should propagate collection mutations to PowerSync`, async () => { + const db = await createDatabase() + await createTestData(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + + await collection.stateWhenReady() + + // Verify the collection state contains our items + expect(collection.size).toBe(3) + + const id = randomUUID() + const tx = collection.insert({ + id, + name: `new`, + }) + + // The insert should optimistically update the collection + const newDoc = collection.get(id) + expect(newDoc).toBeDefined() + expect(newDoc!.name).toBe(`new`) + + await tx.isPersisted.promise + // The item should now be present in PowerSync + // We should also have patched it back in to Tanstack DB (removing the optimistic state) + + // Now do an update + await collection.update(id, (d) => (d.name = `updatedNew`)).isPersisted + .promise + + const updatedDoc = collection.get(id) + expect(updatedDoc).toBeDefined() + expect(updatedDoc!.name).toBe(`updatedNew`) + + await collection.delete(id).isPersisted.promise + + // There should be a crud entries for this + const _crudEntries = await db.getAll(` + SELECT * FROM ps_crud ORDER BY id`) + + const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) + + expect(crudEntries.length).toBe(6) + // We can only group transactions for similar operations + expect(crudEntries.map((e) => e.op)).toEqual([ + `PUT`, + `PUT`, + `PUT`, + `PUT`, + `PATCH`, + `DELETE`, + ]) + }) + + it(`should handle transactions`, async () => { + const db = await createDatabase() + await createTestData(db) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + + await collection.stateWhenReady() + + expect(collection.size).toBe(3) + + const addTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + await collection.utils.mutateTransaction(transaction) + }, + }) + + addTx.mutate(() => { + for (let i = 0; i < 5; i++) { + collection.insert({ id: randomUUID(), name: `tx-${i}` }) + } + }) + + await addTx.commit() + await addTx.isPersisted.promise + + expect(collection.size).toBe(8) + + // fetch the ps_crud items + // There should be a crud entries for this + const _crudEntries = await db.getAll(` + SELECT * FROM ps_crud ORDER BY id`) + const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) + + const lastTransactionId = crudEntries[crudEntries.length - 1]?.transactionId + /** + * The last items, created in the same transaction, should be in the same + * PowerSync transaction. + */ + expect( + crudEntries + .reverse() + .slice(0, 5) + .every((crudEntry) => crudEntry.transactionId == lastTransactionId) + ).true + }) +}) diff --git a/packages/powersync-db-collection/tsconfig.docs.json b/packages/powersync-db-collection/tsconfig.docs.json new file mode 100644 index 000000000..5a73feb02 --- /dev/null +++ b/packages/powersync-db-collection/tsconfig.docs.json @@ -0,0 +1,9 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "paths": { + "@tanstack/db": ["../db/src"] + } + }, + "include": ["src"] +} diff --git a/packages/powersync-db-collection/tsconfig.json b/packages/powersync-db-collection/tsconfig.json new file mode 100644 index 000000000..7e586bab3 --- /dev/null +++ b/packages/powersync-db-collection/tsconfig.json @@ -0,0 +1,20 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "target": "ES2020", + "module": "ESNext", + "moduleResolution": "Bundler", + "declaration": true, + "outDir": "dist", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "jsx": "react", + "paths": { + "@tanstack/store": ["../store/src"] + } + }, + "include": ["src", "tests", "vite.config.ts"], + "exclude": ["node_modules", "dist"] +} diff --git a/packages/powersync-db-collection/vite.config.ts b/packages/powersync-db-collection/vite.config.ts new file mode 100644 index 000000000..af20d0da3 --- /dev/null +++ b/packages/powersync-db-collection/vite.config.ts @@ -0,0 +1,21 @@ +import { tanstackViteConfig } from "@tanstack/config/vite" +import { defineConfig, mergeConfig } from "vitest/config" +import packageJson from "./package.json" + +const config = defineConfig({ + test: { + name: packageJson.name, + dir: `./tests`, + environment: `node`, + coverage: { enabled: true, provider: `istanbul`, include: [`src/**/*`] }, + typecheck: { enabled: true }, + }, +}) + +export default mergeConfig( + config, + tanstackViteConfig({ + entry: `./src/index.ts`, + srcDir: `./src`, + }) +) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 4ce29fc7f..1c453d7a0 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -475,7 +475,7 @@ importers: version: 0.44.5(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) drizzle-zod: specifier: ^0.8.3 - version: 0.8.3(drizzle-orm@0.44.5(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.5) + version: 0.8.3(drizzle-orm@0.44.5(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@3.25.76) express: specifier: ^4.21.2 version: 4.21.2 @@ -647,6 +647,40 @@ importers: specifier: ^3.2.4 version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.3.1)(jiti@2.5.1)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) + packages/powersync-db-collection: + dependencies: + '@powersync/common': + specifier: ^1.39.0 + version: 1.39.0 + '@standard-schema/spec': + specifier: ^1.0.0 + version: 1.0.0 + '@tanstack/db': + specifier: workspace:* + version: link:../db + '@tanstack/store': + specifier: ^0.7.7 + version: 0.7.7 + debug: + specifier: ^4.4.3 + version: 4.4.3 + p-defer: + specifier: ^4.0.1 + version: 4.0.1 + devDependencies: + '@powersync/better-sqlite3': + specifier: ^0.2.0 + version: 0.2.0 + '@powersync/node': + specifier: ^0.11.0 + version: 0.11.0(@powersync/common@1.39.0) + '@types/debug': + specifier: ^4.1.12 + version: 4.1.12 + '@vitest/coverage-istanbul': + specifier: ^3.2.4 + version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.3.1)(jiti@2.5.1)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) + packages/query-db-collection: dependencies: '@standard-schema/spec': @@ -754,7 +788,7 @@ importers: version: 1.9.9 vite-plugin-solid: specifier: ^2.11.8 - version: 2.11.8(@testing-library/jest-dom@6.8.0)(solid-js@1.9.9)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) + version: 2.11.8(@testing-library/jest-dom@6.8.0)(solid-js@1.9.9)(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) vitest: specifier: ^3.2.4 version: 3.2.4(@types/debug@4.1.12)(@types/node@24.3.1)(jiti@2.5.1)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) @@ -770,7 +804,7 @@ importers: version: 2.5.3(svelte@5.39.4)(typescript@5.9.2) '@sveltejs/vite-plugin-svelte': specifier: ^6.2.0 - version: 6.2.0(svelte@5.39.4)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) + version: 6.2.0(svelte@5.39.4)(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) '@vitest/coverage-istanbul': specifier: ^3.2.4 version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.3.1)(jiti@2.5.1)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) @@ -823,7 +857,7 @@ importers: version: 1.0.10 '@vitejs/plugin-vue': specifier: ^5.2.4 - version: 5.2.4(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))(vue@3.5.21(typescript@5.9.2)) + version: 5.2.4(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))(vue@3.5.21(typescript@5.9.2)) '@vitest/coverage-istanbul': specifier: ^3.2.4 version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.3.1)(jiti@2.5.1)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) @@ -2695,6 +2729,17 @@ packages: '@poppinss/exception@1.2.2': resolution: {integrity: sha512-m7bpKCD4QMlFCjA/nKTs23fuvoVFoA83brRKmObCUNmi/9tVu8Ve3w4YQAnJu4q3Tjf5fr685HYIC/IA2zHRSg==} + '@powersync/better-sqlite3@0.2.0': + resolution: {integrity: sha512-8otwueqHJqwilUz/vLENlpMp2c4k/TV6hGX016XrZxSkizDAil99yRm7lAVwpbYYGuSgyzidyDh6vy6PY+m4kw==} + + '@powersync/common@1.39.0': + resolution: {integrity: sha512-qGPl/LPRoopNWjduGXfN+P3PsdTMfFR9YI2TbsLA++VRMK+10To9ey3Z6yprKoVbdLmisPde9mAaTvb1ugkeyg==} + + '@powersync/node@0.11.0': + resolution: {integrity: sha512-33J3/TnZ+s9mu0pHFfJCZhSQp7C+Ai4/1sBxC7aNdiRCyvg8DBYY8P7gMYXlyZfyMQHc0hfs3GDJzxYOBMNAyQ==} + peerDependencies: + '@powersync/common': ^1.39.0 + '@protobufjs/aspromise@1.1.2': resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} @@ -4192,6 +4237,9 @@ packages: resolution: {integrity: sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==} engines: {node: '>= 0.4'} + async-lock@1.4.1: + resolution: {integrity: sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ==} + async-sema@3.1.1: resolution: {integrity: sha512-tLRNUXati5MFePdAk8dw7Qt7DpxPB60ofAgn8WRhW6a2rcimZnYBP9oxHiv0OHy+Wz7kPMG+t4LGdt31+4EmGg==} @@ -4492,6 +4540,9 @@ packages: colorette@2.0.20: resolution: {integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==} + comlink@4.4.2: + resolution: {integrity: sha512-OxGdvBmJuNKSCMO4NTl1L47VRp6xn2wG4F/2hYzB6tiCb709otOxtEYCSvK80PtjODfXXZu8ds+Nw5kVCjqd2g==} + commander@11.1.0: resolution: {integrity: sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ==} engines: {node: '>=16'} @@ -6165,6 +6216,9 @@ packages: js-base64@3.7.8: resolution: {integrity: sha512-hNngCeKxIUQiEUN3GPJOkz4wF/YvdUdbNL9hsBcMQTkKzboD7T/q3OYOuuPZLUE6dBxSGpwhk5mwuDud7JVAow==} + js-logger@1.6.1: + resolution: {integrity: sha512-yTgMCPXVjhmg28CuUH8CKjU+cIKL/G+zTu4Fn4lQxs8mRFH/03QTNvEFngcxfg/gRDiQAOoyCKmMTOm9ayOzXA==} + js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} @@ -7035,6 +7089,10 @@ packages: oxc-resolver@11.8.2: resolution: {integrity: sha512-SM31gnF1l4T8YA7dkAcBhA+jc336bc8scy0Tetz6ndzGmV6c0R99SRnx6In0V5ffwvn1Isjo9I9EGSLF4xi3TA==} + p-defer@4.0.1: + resolution: {integrity: sha512-Mr5KC5efvAK5VUptYEIopP1bakB85k2IWXaRC0rsh1uwn1L6M0LVml8OIQ4Gudg4oyZakf7FmeRLkMMtZW1i5A==} + engines: {node: '>=12'} + p-filter@2.1.0: resolution: {integrity: sha512-ZBxxZ5sL2HghephhpGAQdoskxplTwr7ICaehZwLIlfL6acuVgZPm8yBNuRAFBGEqtD/hmUeq9eqLg2ys9Xr/yw==} engines: {node: '>=8'} @@ -11086,6 +11144,23 @@ snapshots: '@poppinss/exception@1.2.2': {} + '@powersync/better-sqlite3@0.2.0': + dependencies: + bindings: 1.5.0 + + '@powersync/common@1.39.0': + dependencies: + js-logger: 1.6.1 + + '@powersync/node@0.11.0(@powersync/common@1.39.0)': + dependencies: + '@powersync/better-sqlite3': 0.2.0 + '@powersync/common': 1.39.0 + async-lock: 1.4.1 + bson: 6.10.4 + comlink: 4.4.2 + undici: 7.16.0 + '@protobufjs/aspromise@1.1.2': {} '@protobufjs/base64@1.1.2': {} @@ -11548,24 +11623,24 @@ snapshots: transitivePeerDependencies: - typescript - '@sveltejs/vite-plugin-svelte-inspector@5.0.1(@sveltejs/vite-plugin-svelte@6.2.0(svelte@5.39.4)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)))(svelte@5.39.4)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))': + '@sveltejs/vite-plugin-svelte-inspector@5.0.1(@sveltejs/vite-plugin-svelte@6.2.0(svelte@5.39.4)(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)))(svelte@5.39.4)(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))': dependencies: - '@sveltejs/vite-plugin-svelte': 6.2.0(svelte@5.39.4)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) + '@sveltejs/vite-plugin-svelte': 6.2.0(svelte@5.39.4)(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) debug: 4.4.3 svelte: 5.39.4 - vite: 7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) + vite: 6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) transitivePeerDependencies: - supports-color - '@sveltejs/vite-plugin-svelte@6.2.0(svelte@5.39.4)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))': + '@sveltejs/vite-plugin-svelte@6.2.0(svelte@5.39.4)(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))': dependencies: - '@sveltejs/vite-plugin-svelte-inspector': 5.0.1(@sveltejs/vite-plugin-svelte@6.2.0(svelte@5.39.4)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)))(svelte@5.39.4)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) + '@sveltejs/vite-plugin-svelte-inspector': 5.0.1(@sveltejs/vite-plugin-svelte@6.2.0(svelte@5.39.4)(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)))(svelte@5.39.4)(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) debug: 4.4.3 deepmerge: 4.3.1 magic-string: 0.30.19 svelte: 5.39.4 - vite: 7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) - vitefu: 1.1.1(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) + vite: 6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) + vitefu: 1.1.1(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) transitivePeerDependencies: - supports-color @@ -12825,9 +12900,9 @@ snapshots: transitivePeerDependencies: - supports-color - '@vitejs/plugin-vue@5.2.4(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))(vue@3.5.21(typescript@5.9.2))': + '@vitejs/plugin-vue@5.2.4(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))(vue@3.5.21(typescript@5.9.2))': dependencies: - vite: 7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) + vite: 6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) vue: 3.5.21(typescript@5.9.2) '@vitest/coverage-istanbul@3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.3.1)(jiti@2.5.1)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))': @@ -12862,14 +12937,6 @@ snapshots: optionalDependencies: vite: 6.3.6(@types/node@22.18.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) - '@vitest/mocker@3.2.4(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1))': - dependencies: - '@vitest/spy': 3.2.4 - estree-walker: 3.0.3 - magic-string: 0.30.19 - optionalDependencies: - vite: 6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) - '@vitest/pretty-format@3.2.4': dependencies: tinyrainbow: 2.0.0 @@ -13258,6 +13325,8 @@ snapshots: async-function@1.0.0: {} + async-lock@1.4.1: {} + async-sema@3.1.1: {} async@3.2.6: {} @@ -13621,6 +13690,8 @@ snapshots: colorette@2.0.20: {} + comlink@4.4.2: {} + commander@11.1.0: optional: true @@ -15424,6 +15495,8 @@ snapshots: js-base64@3.7.8: {} + js-logger@1.6.1: {} + js-tokens@4.0.0: {} js-tokens@9.0.1: {} @@ -16471,6 +16544,8 @@ snapshots: '@oxc-resolver/binding-win32-ia32-msvc': 11.8.2 '@oxc-resolver/binding-win32-x64-msvc': 11.8.2 + p-defer@4.0.1: {} + p-filter@2.1.0: dependencies: p-map: 2.1.0 @@ -18336,22 +18411,6 @@ snapshots: '@testing-library/jest-dom': 6.8.0 transitivePeerDependencies: - supports-color - optional: true - - vite-plugin-solid@2.11.8(@testing-library/jest-dom@6.8.0)(solid-js@1.9.9)(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)): - dependencies: - '@babel/core': 7.28.4 - '@types/babel__core': 7.20.5 - babel-preset-solid: 1.9.9(@babel/core@7.28.4)(solid-js@1.9.9) - merge-anything: 5.1.7 - solid-js: 1.9.9 - solid-refresh: 0.6.3(solid-js@1.9.9) - vite: 7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) - vitefu: 1.1.1(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) - optionalDependencies: - '@testing-library/jest-dom': 6.8.0 - transitivePeerDependencies: - - supports-color vite-tsconfig-paths@5.1.4(typescript@5.9.2)(vite@6.3.6(@types/node@22.18.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)): dependencies: @@ -18437,10 +18496,6 @@ snapshots: optionalDependencies: vite: 6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) - vitefu@1.1.1(vite@7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)): - optionalDependencies: - vite: 7.1.5(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1) - vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.18.1)(jiti@2.5.1)(jsdom@27.0.0(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1): dependencies: '@types/chai': 5.2.2 @@ -18488,7 +18543,7 @@ snapshots: dependencies: '@types/chai': 5.2.2 '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@6.3.6(@types/node@24.3.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) + '@vitest/mocker': 3.2.4(vite@6.3.6(@types/node@22.18.1)(jiti@2.5.1)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.5)(yaml@2.8.1)) '@vitest/pretty-format': 3.2.4 '@vitest/runner': 3.2.4 '@vitest/snapshot': 3.2.4 From e88623c5f2a0dfcafe74e3f97f1b07edfbfaa9e5 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 1 Oct 2025 11:53:28 +0200 Subject: [PATCH 02/56] Add support for transactions with multiple collection types --- .../src/PendingOperationStore.ts | 7 + .../src/PowerSyncTransactor.ts | 233 +++++++++++++----- .../src/definitions.ts | 73 ++++++ .../powersync-db-collection/src/helpers.ts | 18 +- packages/powersync-db-collection/src/index.ts | 2 + .../powersync-db-collection/src/powersync.ts | 137 ++-------- .../tests/powersync.test.ts | 74 +++++- 7 files changed, 363 insertions(+), 181 deletions(-) create mode 100644 packages/powersync-db-collection/src/definitions.ts diff --git a/packages/powersync-db-collection/src/PendingOperationStore.ts b/packages/powersync-db-collection/src/PendingOperationStore.ts index 9353cd03e..c804067b3 100644 --- a/packages/powersync-db-collection/src/PendingOperationStore.ts +++ b/packages/powersync-db-collection/src/PendingOperationStore.ts @@ -3,6 +3,7 @@ import type { DiffTriggerOperation } from "@powersync/common" import type { DeferredPromise } from "p-defer" export type PendingOperation = { + tableName: string operation: DiffTriggerOperation id: string timestamp: string @@ -18,6 +19,11 @@ export type PendingOperation = { export class PendingOperationStore { private pendingOperations = new Map>() + /** + * Globally accessible PendingOperationStore + */ + static GLOBAL = new PendingOperationStore() + /** * @returns A promise which will resolve once the specified operation has been seen. */ @@ -34,6 +40,7 @@ export class PendingOperationStore { for (const operation of operations) { for (const [pendingOp, deferred] of this.pendingOperations.entries()) { if ( + pendingOp.tableName == operation.tableName && pendingOp.operation == operation.operation && pendingOp.id == operation.id && pendingOp.timestamp == operation.timestamp diff --git a/packages/powersync-db-collection/src/PowerSyncTransactor.ts b/packages/powersync-db-collection/src/PowerSyncTransactor.ts index 5d145e270..9ce47e908 100644 --- a/packages/powersync-db-collection/src/PowerSyncTransactor.ts +++ b/packages/powersync-db-collection/src/PowerSyncTransactor.ts @@ -1,37 +1,60 @@ import { sanitizeSQL } from "@powersync/common" import DebugModule from "debug" -import { asPowerSyncRecord } from "./helpers" +import { PendingOperationStore } from "./PendingOperationStore" +import { asPowerSyncRecord, mapOperationToPowerSync } from "./helpers" import type { AbstractPowerSyncDatabase, LockContext } from "@powersync/common" -import type { Transaction } from "@tanstack/db" -import type { - PendingOperation, - PendingOperationStore, -} from "./PendingOperationStore" -import type { PowerSyncRecord } from "./helpers" +import type { PendingMutation, Transaction } from "@tanstack/db" +import type { PendingOperation } from "./PendingOperationStore" +import type { EnhancedPowerSyncCollectionConfig } from "./definitions" const debug = DebugModule.debug(`ts/db:powersync`) export type TransactorOptions = { database: AbstractPowerSyncDatabase - tableName: string - pendingOperationStore: PendingOperationStore - trackedTableName: string } /** - * Handles persisting Tanstack DB transactions to the PowerSync SQLite DB. + * Applies mutations to the PowerSync database. This method is called automatically by the collection's + * insert, update, and delete operations. You typically don't need to call this directly unless you + * have special transaction requirements. + * + * @example + * ```typescript + * // Create a collection + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "documents", + * }) + * ) + * + * const addTx = createTransaction({ + * autoCommit: false, + * mutationFn: async ({ transaction }) => { + * await new PowerSyncTransactor({database: db}).applyTransaction(transaction) + * }, + * }) + * + * addTx.mutate(() => { + * for (let i = 0; i < 5; i++) { + * collection.insert({ id: randomUUID(), name: `tx-${i}` }) + * } + * }) + * + * await addTx.commit() + * await addTx.isPersisted.promise + * ``` + * + * @param transaction - The transaction containing mutations to apply + * @returns A promise that resolves when the mutations have been persisted to PowerSync */ export class PowerSyncTransactor> { database: AbstractPowerSyncDatabase pendingOperationStore: PendingOperationStore - tableName: string - trackedTableName: string constructor(options: TransactorOptions) { this.database = options.database - this.pendingOperationStore = options.pendingOperationStore - this.tableName = sanitizeSQL`${options.tableName}` - this.trackedTableName = sanitizeSQL`${options.trackedTableName}` + this.pendingOperationStore = PendingOperationStore.GLOBAL } /** @@ -40,38 +63,49 @@ export class PowerSyncTransactor> { async applyTransaction(transaction: Transaction) { const { mutations } = transaction + if (mutations.length == 0) { + return + } + /** + * The transaction might contain ops for different collections. + * We can do some optimizations for single collection transactions. + */ + const isMixedTransaction = mutations.some( + (mutation) => mutation.collection.id !== mutations[0]?.collection.id + ) // Persist to PowerSync const { whenComplete } = await this.database.writeTransaction( async (tx) => { - for (const mutation of mutations) { + const pendingOperations: Array = [] + + for (const [index, mutation] of mutations.entries()) { + /** + * For mixed transactions we need to check every operation has been seen. + * This is since the individual tables are watched independently. + * + * For a single collection transaction, we only need to check the last operation + * has been seen. + */ + const shouldWait = isMixedTransaction || index == mutations.length - 1 switch (mutation.type) { case `insert`: - await this.handleInsert(asPowerSyncRecord(mutation.modified), tx) + pendingOperations.push( + await this.handleInsert(mutation, tx, shouldWait) + ) break case `update`: - await this.handleUpdate(asPowerSyncRecord(mutation.modified), tx) + pendingOperations.push( + await this.handleUpdate(mutation, tx, shouldWait) + ) break case `delete`: - await this.handleDelete(asPowerSyncRecord(mutation.original), tx) + pendingOperations.push( + await this.handleDelete(mutation, tx, shouldWait) + ) break } } - /** - * Fetch the last diff operation in the queue. - * We need to wait for this operation to be seen by the - * sync handler before returning from the application call. - */ - const lastDiffOp = await tx.getOptional(` - SELECT - id, operation, timestamp - FROM - ${this.trackedTableName} - ORDER BY - timestamp DESC - LIMIT 1 - `) - /** * Return a promise from the writeTransaction, without awaiting it. * This promise will resolve once the entire transaction has been @@ -79,9 +113,11 @@ export class PowerSyncTransactor> { * We return without awaiting in order to free the writeLock. */ return { - whenComplete: lastDiffOp - ? this.pendingOperationStore.waitFor(lastDiffOp) - : Promise.resolve(), + whenComplete: Promise.all( + pendingOperations + .filter((op) => !!op) + .map((op) => this.pendingOperationStore.waitFor(op)) + ), } } ) @@ -91,49 +127,122 @@ export class PowerSyncTransactor> { } protected async handleInsert( - mutation: PowerSyncRecord, - context: LockContext - ) { + mutation: PendingMutation, + context: LockContext, + waitForCompletion: boolean = false + ): Promise { debug(`insert`, mutation) - const keys = Object.keys(mutation).map((key) => sanitizeSQL`${key}`) - await context.execute( - ` - INSERT into ${this.tableName} + + return this.handleOperationWithCompletion( + mutation, + context, + waitForCompletion, + async (tableName, mutation) => { + const keys = Object.keys(mutation.modified).map( + (key) => sanitizeSQL`${key}` + ) + + await context.execute( + ` + INSERT into ${tableName} (${keys.join(`, `)}) VALUES (${keys.map((_) => `?`).join(`, `)}) `, - Object.values(mutation) + Object.values(mutation.modified) + ) + } ) } protected async handleUpdate( - mutation: PowerSyncRecord, - context: LockContext - ) { + mutation: PendingMutation, + context: LockContext, + waitForCompletion: boolean = false + ): Promise { debug(`update`, mutation) - const keys = Object.keys(mutation).map((key) => sanitizeSQL`${key}`) - await context.execute( - ` - UPDATE ${this.tableName} + return this.handleOperationWithCompletion( + mutation, + context, + waitForCompletion, + async (tableName, mutation) => { + const keys = Object.keys(mutation.modified).map( + (key) => sanitizeSQL`${key}` + ) + await context.execute( + ` + UPDATE ${tableName} SET ${keys.map((key) => `${key} = ?`).join(`, `)} WHERE id = ? `, - [...Object.values(mutation), mutation.id] + [ + ...Object.values(mutation.modified), + asPowerSyncRecord(mutation.modified).id, + ] + ) + } ) } protected async handleDelete( - mutation: PowerSyncRecord, - context: LockContext - ) { - debug(`delete`, mutation) - await context.execute( - ` - DELETE FROM ${this.tableName} WHERE id = ? + mutation: PendingMutation, + context: LockContext, + waitForCompletion: boolean = false + ): Promise { + debug(`update`, mutation) + + return this.handleOperationWithCompletion( + mutation, + context, + waitForCompletion, + async (tableName, mutation) => { + await context.execute( + ` + DELETE FROM ${tableName} WHERE id = ? `, - [mutation.id] + [asPowerSyncRecord(mutation.original).id] + ) + } + ) + } + + /** + * Helper function which wraps a persistence operation by: + * - Fetching the mutation's collection's SQLite table details + * - Executing the mutation + * - Returning the last pending diff op if required + */ + protected async handleOperationWithCompletion( + mutation: PendingMutation, + context: LockContext, + waitForCompletion: boolean, + handler: (tableName: string, mutation: PendingMutation) => Promise + ): Promise { + const { tableName, trackedTableName } = ( + mutation.collection.config as EnhancedPowerSyncCollectionConfig + ).utils.getMeta() + + if (!tableName) { + throw new Error(`Could not get tableName from mutation's collection config. + The provided mutation might not have originated from PowerSync.`) + } + + await handler(sanitizeSQL`${tableName}`, mutation) + + if (!waitForCompletion) { + return null + } + + // Need to get the operation in order to wait for it + const diffOperation = await context.get<{ id: string; timestamp: string }>( + sanitizeSQL`SELECT id, timestamp FROM ${trackedTableName} ORDER BY timestamp DESC LIMIT 1` ) + return { + tableName, + id: diffOperation.id, + operation: mapOperationToPowerSync(mutation.type), + timestamp: diffOperation.timestamp, + } } } diff --git a/packages/powersync-db-collection/src/definitions.ts b/packages/powersync-db-collection/src/definitions.ts new file mode 100644 index 000000000..7af17a065 --- /dev/null +++ b/packages/powersync-db-collection/src/definitions.ts @@ -0,0 +1,73 @@ +import type { AbstractPowerSyncDatabase } from "@powersync/common" +import type { StandardSchemaV1 } from "@standard-schema/spec" +import type { BaseCollectionConfig, CollectionConfig } from "@tanstack/db" + +/** + * Configuration interface for PowerSync collection options + * @template T - The type of items in the collection + * @template TSchema - The schema type for validation + */ +/** + * Configuration options for creating a PowerSync collection. + * + * @example + * ```typescript + * const APP_SCHEMA = new Schema({ + * documents: new Table({ + * name: column.text, + * }), + * }) + * + * type Document = (typeof APP_SCHEMA)["types"]["documents"] + * + * const db = new PowerSyncDatabase({ + * database: { + * dbFilename: "test.sqlite", + * }, + * schema: APP_SCHEMA, + * }) + * + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "documents", + * }) + * ) + * ``` + */ +export type PowerSyncCollectionConfig< + T extends object = Record, + TSchema extends StandardSchemaV1 = never, +> = Omit< + BaseCollectionConfig, + `onInsert` | `onUpdate` | `onDelete` | `getKey` +> & { + /** The name of the table in PowerSync database */ + tableName: string + /** The PowerSync database instance */ + database: AbstractPowerSyncDatabase +} + +export type PowerSyncCollectionMeta = { + /** + * The SQLite table representing the collection. + */ + tableName: string + /** + * The internal table used to track diff for the collection. + */ + trackedTableName: string +} + +export type EnhancedPowerSyncCollectionConfig< + T extends object = Record, + TSchema extends StandardSchemaV1 = never, +> = CollectionConfig & { + id?: string + utils: PowerSyncCollectionUtils + schema?: TSchema +} + +export type PowerSyncCollectionUtils = { + getMeta: () => PowerSyncCollectionMeta +} diff --git a/packages/powersync-db-collection/src/helpers.ts b/packages/powersync-db-collection/src/helpers.ts index 29c42b7a9..13ace1fb6 100644 --- a/packages/powersync-db-collection/src/helpers.ts +++ b/packages/powersync-db-collection/src/helpers.ts @@ -16,7 +16,7 @@ export function asPowerSyncRecord(record: any): PowerSyncRecord { } /** - * Maps Tanstack DB operations to {@link DiffTriggerOperation} + * Maps {@link DiffTriggerOperation} to TanstackDB operations */ export function mapOperation(operation: DiffTriggerOperation) { switch (operation) { @@ -28,3 +28,19 @@ export function mapOperation(operation: DiffTriggerOperation) { return `delete` } } + +/** + * Maps TanstackDB operations to {@link DiffTriggerOperation} + */ +export function mapOperationToPowerSync(operation: string) { + switch (operation) { + case `insert`: + return DiffTriggerOperation.INSERT + case `update`: + return DiffTriggerOperation.UPDATE + case `delete`: + return DiffTriggerOperation.DELETE + default: + throw new Error(`Unknown operation ${operation} received`) + } +} diff --git a/packages/powersync-db-collection/src/index.ts b/packages/powersync-db-collection/src/index.ts index 30e35e857..6c8111f4c 100644 --- a/packages/powersync-db-collection/src/index.ts +++ b/packages/powersync-db-collection/src/index.ts @@ -1 +1,3 @@ +export * from "./definitions" export * from "./powersync" +export * from "./PowerSyncTransactor" diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index b204c04bf..b67c672ee 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -1,105 +1,20 @@ import { DiffTriggerOperation } from "@powersync/common" +import { asPowerSyncRecord, mapOperation } from "./helpers" import { PendingOperationStore } from "./PendingOperationStore" import { PowerSyncTransactor } from "./PowerSyncTransactor" -import { mapOperation } from "./helpers" +import type { + EnhancedPowerSyncCollectionConfig, + PowerSyncCollectionConfig, + PowerSyncCollectionUtils, +} from "./definitions" import type { PendingOperation } from "./PendingOperationStore" import type { - BaseCollectionConfig, CollectionConfig, InferSchemaOutput, SyncConfig, - Transaction, } from "@tanstack/db" -import type { - AbstractPowerSyncDatabase, - TriggerDiffRecord, -} from "@powersync/common" import type { StandardSchemaV1 } from "@standard-schema/spec" - -/** - * Configuration interface for PowerSync collection options - * @template T - The type of items in the collection - * @template TSchema - The schema type for validation - */ -/** - * Configuration options for creating a PowerSync collection. - * - * @example - * ```typescript - * const APP_SCHEMA = new Schema({ - * documents: new Table({ - * name: column.text, - * }), - * }) - * - * type Document = (typeof APP_SCHEMA)["types"]["documents"] - * - * const db = new PowerSyncDatabase({ - * database: { - * dbFilename: "test.sqlite", - * }, - * schema: APP_SCHEMA, - * }) - * - * const collection = createCollection( - * powerSyncCollectionOptions({ - * database: db, - * tableName: "documents", - * }) - * ) - * ``` - */ -export type PowerSyncCollectionConfig< - T extends object = Record, - TSchema extends StandardSchemaV1 = never, -> = Omit< - BaseCollectionConfig, - `onInsert` | `onUpdate` | `onDelete` | `getKey` -> & { - /** The name of the table in PowerSync database */ - tableName: string - /** The PowerSync database instance */ - database: AbstractPowerSyncDatabase -} - -export type PowerSyncCollectionUtils = { - /** - * Applies mutations to the PowerSync database. This method is called automatically by the collection's - * insert, update, and delete operations. You typically don't need to call this directly unless you - * have special transaction requirements. - * - * @example - * ```typescript - * // Create a collection - * const collection = createCollection( - * powerSyncCollectionOptions({ - * database: db, - * tableName: "documents", - * }) - * ) - * - * const addTx = createTransaction({ - * autoCommit: false, - * mutationFn: async ({ transaction }) => { - * await collection.utils.mutateTransaction(transaction) - * }, - * }) - * - * addTx.mutate(() => { - * for (let i = 0; i < 5; i++) { - * collection.insert({ id: randomUUID(), name: `tx-${i}` }) - * } - * }) - * - * await addTx.commit() - * await addTx.isPersisted.promise - * ``` - * - * @param transaction - The transaction containing mutations to apply - * @returns A promise that resolves when the mutations have been persisted to PowerSync - */ - mutateTransaction: (transaction: Transaction) => Promise -} +import type { TriggerDiffRecord } from "@powersync/common" /** * Creates PowerSync collection options for use with a standard Collection @@ -184,14 +99,7 @@ export function powerSyncCollectionOptions< TSchema extends StandardSchemaV1 = never, >( config: PowerSyncCollectionConfig -): CollectionConfig & { - id?: string - utils: PowerSyncCollectionUtils - schema?: TSchema -} { - type Row = Record - type Key = string // we always use uuids for keys - +): EnhancedPowerSyncCollectionConfig { const { database, tableName, ...restConfig } = config /** @@ -204,14 +112,11 @@ export function powerSyncCollectionOptions< * complete to the caller, the in-memory state is already * consistent with the database. */ - const pendingOperationStore = new PendingOperationStore() + const pendingOperationStore = PendingOperationStore.GLOBAL const trackedTableName = `__${tableName}_tracking` const transactor = new PowerSyncTransactor({ database, - pendingOperationStore, - tableName, - trackedTableName, }) /** @@ -220,9 +125,8 @@ export function powerSyncCollectionOptions< * and the in-memory tanstack-db collection. * It is not about sync between a client and a server! */ - type SyncParams = Parameters[`sync`]>[0] - const sync: SyncConfig = { - sync: async (params: SyncParams) => { + const sync: SyncConfig = { + sync: async (params) => { const { begin, write, commit, markReady } = params // Manually create a tracking operation for optimization purposes @@ -257,6 +161,7 @@ export function powerSyncCollectionOptions< id, operation, timestamp, + tableName, }) } @@ -286,7 +191,7 @@ export function powerSyncCollectionOptions< hooks: { beforeCreate: async (context) => { begin() - for (const row of await context.getAll>( + for (const row of await context.getAll( `SELECT * FROM ${tableName}` )) { write({ @@ -309,9 +214,9 @@ export function powerSyncCollectionOptions< getSyncMetadata: undefined, } - const getKey = (record: Record) => record.id as string + const getKey = (record: T) => asPowerSyncRecord(record).id - return { + const outputConfig: EnhancedPowerSyncCollectionConfig = { ...restConfig, getKey, sync, @@ -328,13 +233,11 @@ export function powerSyncCollectionOptions< return await transactor.applyTransaction(params.transaction) }, utils: { - mutateTransaction: async (transaction: Transaction) => { - return await transactor.applyTransaction(transaction) - }, + getMeta: () => ({ + tableName, + trackedTableName, + }), }, - } as CollectionConfig & { - id?: string - utils: PowerSyncCollectionUtils - schema?: TSchema } + return outputConfig } diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 4e9676ef0..86e4bcf42 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -9,15 +9,20 @@ import { import { createCollection, createTransaction } from "@tanstack/db" import { describe, expect, it, onTestFinished, vi } from "vitest" import { powerSyncCollectionOptions } from "../src" +import { PowerSyncTransactor } from "../src/PowerSyncTransactor" import type { AbstractPowerSyncDatabase } from "@powersync/node" const APP_SCHEMA = new Schema({ + users: new Table({ + name: column.text, + }), documents: new Table({ name: column.text, }), }) type Document = (typeof APP_SCHEMA)[`types`][`documents`] +type User = (typeof APP_SCHEMA)[`types`][`users`] describe(`PowerSync Integration`, () => { async function createDatabase() { @@ -221,7 +226,9 @@ describe(`PowerSync Integration`, () => { const addTx = createTransaction({ autoCommit: false, mutationFn: async ({ transaction }) => { - await collection.utils.mutateTransaction(transaction) + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) }, }) @@ -254,4 +261,69 @@ describe(`PowerSync Integration`, () => { .every((crudEntry) => crudEntry.transactionId == lastTransactionId) ).true }) + + it(`should handle transactions with multiple collections`, async () => { + const db = await createDatabase() + await createTestData(db) + + const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + + const usersCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `users`, + }) + ) + + await documentsCollection.stateWhenReady() + await usersCollection.stateWhenReady() + + expect(documentsCollection.size).toBe(3) + expect(usersCollection.size).toBe(0) + + const addTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) + }, + }) + + addTx.mutate(() => { + for (let i = 0; i < 5; i++) { + documentsCollection.insert({ id: randomUUID(), name: `tx-${i}` }) + usersCollection.insert({ id: randomUUID(), name: `user` }) + } + }) + + await addTx.commit() + await addTx.isPersisted.promise + + expect(documentsCollection.size).toBe(8) + expect(usersCollection.size).toBe(5) + + // fetch the ps_crud items + // There should be a crud entries for this + const _crudEntries = await db.getAll(` + SELECT * FROM ps_crud ORDER BY id`) + const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) + + const lastTransactionId = crudEntries[crudEntries.length - 1]?.transactionId + /** + * The last items, created in the same transaction, should be in the same + * PowerSync transaction. + */ + expect( + crudEntries + .reverse() + .slice(0, 10) + .every((crudEntry) => crudEntry.transactionId == lastTransactionId) + ).true + }) }) From 352829ea0ea740989db7d8731a65a43b3b18f234 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 11:02:27 +0200 Subject: [PATCH 03/56] Optimize transaction waiting --- .../src/PowerSyncTransactor.ts | 23 ++++++++++++------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/packages/powersync-db-collection/src/PowerSyncTransactor.ts b/packages/powersync-db-collection/src/PowerSyncTransactor.ts index 9ce47e908..fade901c1 100644 --- a/packages/powersync-db-collection/src/PowerSyncTransactor.ts +++ b/packages/powersync-db-collection/src/PowerSyncTransactor.ts @@ -70,9 +70,18 @@ export class PowerSyncTransactor> { * The transaction might contain ops for different collections. * We can do some optimizations for single collection transactions. */ - const isMixedTransaction = mutations.some( - (mutation) => mutation.collection.id !== mutations[0]?.collection.id + const mutationsCollections = mutations.map( + (mutation) => mutation.collection.id ) + const collectionIds = Array.from(new Set(mutationsCollections)) + const lastCollectionMutationIndexes = new Map() + for (const collectionId of collectionIds) { + lastCollectionMutationIndexes.set( + collectionId, + mutationsCollections.lastIndexOf(collectionId) + ) + } + // Persist to PowerSync const { whenComplete } = await this.database.writeTransaction( async (tx) => { @@ -80,13 +89,11 @@ export class PowerSyncTransactor> { for (const [index, mutation] of mutations.entries()) { /** - * For mixed transactions we need to check every operation has been seen. - * This is since the individual tables are watched independently. - * - * For a single collection transaction, we only need to check the last operation - * has been seen. + * Each collection processes events independently. We need to make sure the + * last operation for each collection has been seen. */ - const shouldWait = isMixedTransaction || index == mutations.length - 1 + const shouldWait = + index == lastCollectionMutationIndexes.get(mutation.collection.id) switch (mutation.type) { case `insert`: pendingOperations.push( From 1c75d3de3aab3356856b51f6a5758854411eb2d1 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 11:02:42 +0200 Subject: [PATCH 04/56] Improve test stability --- packages/powersync-db-collection/src/powersync.ts | 15 ++++++++------- .../tests/powersync.test.ts | 13 +++++++++---- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index b67c672ee..f98985eef 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -2,19 +2,19 @@ import { DiffTriggerOperation } from "@powersync/common" import { asPowerSyncRecord, mapOperation } from "./helpers" import { PendingOperationStore } from "./PendingOperationStore" import { PowerSyncTransactor } from "./PowerSyncTransactor" +import type { TriggerDiffRecord } from "@powersync/common" +import type { StandardSchemaV1 } from "@standard-schema/spec" +import type { + CollectionConfig, + InferSchemaOutput, + SyncConfig, +} from "@tanstack/db" import type { EnhancedPowerSyncCollectionConfig, PowerSyncCollectionConfig, PowerSyncCollectionUtils, } from "./definitions" import type { PendingOperation } from "./PendingOperationStore" -import type { - CollectionConfig, - InferSchemaOutput, - SyncConfig, -} from "@tanstack/db" -import type { StandardSchemaV1 } from "@standard-schema/spec" -import type { TriggerDiffRecord } from "@powersync/common" /** * Creates PowerSync collection options for use with a standard Collection @@ -207,6 +207,7 @@ export function powerSyncCollectionOptions< return () => { abortController.abort() + // We unfortunately cannot await this disposeTracking() } }, diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 86e4bcf42..3e66a0411 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -1,4 +1,5 @@ import { randomUUID } from "node:crypto" +import { tmpdir } from "node:os" import { CrudEntry, PowerSyncDatabase, @@ -28,15 +29,20 @@ describe(`PowerSync Integration`, () => { async function createDatabase() { const db = new PowerSyncDatabase({ database: { - dbFilename: `test.sqlite`, + dbFilename: `test-${randomUUID()}.sqlite`, + dbLocation: tmpdir(), }, schema: APP_SCHEMA, }) onTestFinished(async () => { - await db.disconnectAndClear() + /** + * We don't clear the DB here since that would cause deletes + * which would trigger collection updates while the DB is closing. + * We currently can't await the async cleanup of TanStack collections (since that method is not async). + * So we use unique temporary databases for each test. + */ await db.close() }) - await db.disconnectAndClear() return db } @@ -84,7 +90,6 @@ describe(`PowerSync Integration`, () => { tableName: `documents`, }) ) - await collection.stateWhenReady() // Verify the collection state contains our items From a892accc24edfa732a5913e57ced64a7f715e4c5 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 11:50:06 +0200 Subject: [PATCH 05/56] Improve cleanup behaviour --- .../powersync-db-collection/src/powersync.ts | 155 +++--- .../tests/powersync.test.ts | 483 ++++++++++-------- 2 files changed, 368 insertions(+), 270 deletions(-) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index f98985eef..694424077 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -113,7 +113,12 @@ export function powerSyncCollectionOptions< * consistent with the database. */ const pendingOperationStore = PendingOperationStore.GLOBAL - const trackedTableName = `__${tableName}_tracking` + // Keep the tracked table unique in case of multiple tabs. + const trackedTableName = `__${tableName}_tracking_${Math.floor( + Math.random() * 0xffffffff + ) + .toString(16) + .padStart(8, `0`)}` const transactor = new PowerSyncTransactor({ database, @@ -126,89 +131,101 @@ export function powerSyncCollectionOptions< * It is not about sync between a client and a server! */ const sync: SyncConfig = { - sync: async (params) => { + sync: (params) => { const { begin, write, commit, markReady } = params // Manually create a tracking operation for optimization purposes const abortController = new AbortController() - database.onChangeWithCallback( - { - onChange: async () => { - await database.writeTransaction(async (context) => { - begin() - const operations = await context.getAll( - `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC` - ) - const pendingOperations: Array = [] - - for (const op of operations) { - const { id, operation, timestamp, value } = op - const parsedValue = { - id, - ...JSON.parse(value), + // The sync function needs to be synchronous + async function start() { + database.onChangeWithCallback( + { + onChange: async () => { + await database.writeTransaction(async (context) => { + begin() + const operations = await context.getAll( + `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC` + ) + const pendingOperations: Array = [] + + for (const op of operations) { + const { id, operation, timestamp, value } = op + const parsedValue = { + id, + ...JSON.parse(value), + } + const parsedPreviousValue = + op.operation == DiffTriggerOperation.UPDATE + ? { id, ...JSON.parse(op.previous_value) } + : null + write({ + type: mapOperation(operation), + value: parsedValue, + previousValue: parsedPreviousValue, + }) + pendingOperations.push({ + id, + operation, + timestamp, + tableName, + }) } - const parsedPreviousValue = - op.operation == DiffTriggerOperation.UPDATE - ? { id, ...JSON.parse(op.previous_value) } - : null + + // clear the current operations + await context.execute(`DELETE FROM ${trackedTableName}`) + + commit() + pendingOperationStore.resolvePendingFor(pendingOperations) + }) + }, + }, + { + signal: abortController.signal, + triggerImmediate: false, + tables: [trackedTableName], + } + ) + + const disposeTracking = await database.triggers.createDiffTrigger({ + source: tableName, + destination: trackedTableName, + when: { + [DiffTriggerOperation.INSERT]: `TRUE`, + [DiffTriggerOperation.UPDATE]: `TRUE`, + [DiffTriggerOperation.DELETE]: `TRUE`, + }, + hooks: { + beforeCreate: async (context) => { + begin() + for (const row of await context.getAll( + `SELECT * FROM ${tableName}` + )) { write({ - type: mapOperation(operation), - value: parsedValue, - previousValue: parsedPreviousValue, - }) - pendingOperations.push({ - id, - operation, - timestamp, - tableName, + type: `insert`, + value: row, }) } - - // clear the current operations - await context.execute(`DELETE FROM ${trackedTableName}`) - commit() - pendingOperationStore.resolvePendingFor(pendingOperations) - }) + markReady() + }, }, - }, - { - signal: abortController.signal, - triggerImmediate: false, - tables: [trackedTableName], + }) + + // If the abort controller was aborted while processing the request above + if (abortController.signal.aborted) { + await disposeTracking() + } else { + abortController.signal.addEventListener(`abort`, () => { + disposeTracking() + }) } - ) - - const disposeTracking = await database.triggers.createDiffTrigger({ - source: tableName, - destination: trackedTableName, - when: { - [DiffTriggerOperation.INSERT]: `TRUE`, - [DiffTriggerOperation.UPDATE]: `TRUE`, - [DiffTriggerOperation.DELETE]: `TRUE`, - }, - hooks: { - beforeCreate: async (context) => { - begin() - for (const row of await context.getAll( - `SELECT * FROM ${tableName}` - )) { - write({ - type: `insert`, - value: row, - }) - } - commit() - markReady() - }, - }, - }) + } + + start() return () => { abortController.abort() - // We unfortunately cannot await this - disposeTracking() } }, // Expose the getSyncMetadata function diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 3e66a0411..fdaecb148 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -29,18 +29,13 @@ describe(`PowerSync Integration`, () => { async function createDatabase() { const db = new PowerSyncDatabase({ database: { - dbFilename: `test-${randomUUID()}.sqlite`, + dbFilename: `test.sqlite`, dbLocation: tmpdir(), }, schema: APP_SCHEMA, }) onTestFinished(async () => { - /** - * We don't clear the DB here since that would cause deletes - * which would trigger collection updates while the DB is closing. - * We currently can't await the async cleanup of TanStack collections (since that method is not async). - * So we use unique temporary databases for each test. - */ + await db.disconnectAndClear() await db.close() }) return db @@ -67,6 +62,7 @@ describe(`PowerSync Integration`, () => { tableName: `documents`, }) ) + onTestFinished(() => collection.cleanup()) await collection.stateWhenReady() @@ -78,257 +74,342 @@ describe(`PowerSync Integration`, () => { `three`, ]) }) - }) - it(`should update when data syncs`, async () => { - const db = await createDatabase() - await createTestData(db) + it(`should update when data syncs`, async () => { + const db = await createDatabase() + await createTestData(db) - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - await collection.stateWhenReady() + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collection.cleanup()) - // Verify the collection state contains our items - expect(collection.size).toBe(3) + await collection.stateWhenReady() - // Make an update, simulates a sync from another client - await db.execute(` + // Verify the collection state contains our items + expect(collection.size).toBe(3) + + // Make an update, simulates a sync from another client + await db.execute(` INSERT into documents (id, name) VALUES (uuid(), 'four') `) - // The collection should update - await vi.waitFor( - () => { - expect(collection.size).toBe(4) - expect(collection.toArray.map((entry) => entry.name)).deep.equals([ - `one`, - `two`, - `three`, - `four`, - ]) - }, - { timeout: 1000 } - ) + // The collection should update + await vi.waitFor( + () => { + expect(collection.size).toBe(4) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `one`, + `two`, + `three`, + `four`, + ]) + }, + { timeout: 1000 } + ) - await db.execute(` + await db.execute(` DELETE from documents WHERE name = 'two' `) - // The collection should update - await vi.waitFor( - () => { - expect(collection.size).toBe(3) - expect(collection.toArray.map((entry) => entry.name)).deep.equals([ - `one`, - `three`, - `four`, - ]) - }, - { timeout: 1000 } - ) + // The collection should update + await vi.waitFor( + () => { + expect(collection.size).toBe(3) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `one`, + `three`, + `four`, + ]) + }, + { timeout: 1000 } + ) - await db.execute(` + await db.execute(` UPDATE documents SET name = 'updated' WHERE name = 'one' `) - // The collection should update - await vi.waitFor( - () => { - expect(collection.size).toBe(3) - expect(collection.toArray.map((entry) => entry.name)).deep.equals([ - `updated`, - `three`, - `four`, - ]) - }, - { timeout: 1000 } - ) - }) + // The collection should update + await vi.waitFor( + () => { + expect(collection.size).toBe(3) + expect(collection.toArray.map((entry) => entry.name)).deep.equals([ + `updated`, + `three`, + `four`, + ]) + }, + { timeout: 1000 } + ) + }) - it(`should propagate collection mutations to PowerSync`, async () => { - const db = await createDatabase() - await createTestData(db) + it(`should propagate collection mutations to PowerSync`, async () => { + const db = await createDatabase() + await createTestData(db) - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collection.cleanup()) - await collection.stateWhenReady() + await collection.stateWhenReady() - // Verify the collection state contains our items - expect(collection.size).toBe(3) + // Verify the collection state contains our items + expect(collection.size).toBe(3) - const id = randomUUID() - const tx = collection.insert({ - id, - name: `new`, - }) + const id = randomUUID() + const tx = collection.insert({ + id, + name: `new`, + }) - // The insert should optimistically update the collection - const newDoc = collection.get(id) - expect(newDoc).toBeDefined() - expect(newDoc!.name).toBe(`new`) + // The insert should optimistically update the collection + const newDoc = collection.get(id) + expect(newDoc).toBeDefined() + expect(newDoc!.name).toBe(`new`) - await tx.isPersisted.promise - // The item should now be present in PowerSync - // We should also have patched it back in to Tanstack DB (removing the optimistic state) + await tx.isPersisted.promise + // The item should now be present in PowerSync + // We should also have patched it back in to Tanstack DB (removing the optimistic state) - // Now do an update - await collection.update(id, (d) => (d.name = `updatedNew`)).isPersisted - .promise + // Now do an update + await collection.update(id, (d) => (d.name = `updatedNew`)).isPersisted + .promise - const updatedDoc = collection.get(id) - expect(updatedDoc).toBeDefined() - expect(updatedDoc!.name).toBe(`updatedNew`) + const updatedDoc = collection.get(id) + expect(updatedDoc).toBeDefined() + expect(updatedDoc!.name).toBe(`updatedNew`) - await collection.delete(id).isPersisted.promise + await collection.delete(id).isPersisted.promise - // There should be a crud entries for this - const _crudEntries = await db.getAll(` + // There should be a crud entries for this + const _crudEntries = await db.getAll(` SELECT * FROM ps_crud ORDER BY id`) - const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) - - expect(crudEntries.length).toBe(6) - // We can only group transactions for similar operations - expect(crudEntries.map((e) => e.op)).toEqual([ - `PUT`, - `PUT`, - `PUT`, - `PUT`, - `PATCH`, - `DELETE`, - ]) - }) + const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) + + expect(crudEntries.length).toBe(6) + // We can only group transactions for similar operations + expect(crudEntries.map((e) => e.op)).toEqual([ + `PUT`, + `PUT`, + `PUT`, + `PUT`, + `PATCH`, + `DELETE`, + ]) + }) - it(`should handle transactions`, async () => { - const db = await createDatabase() - await createTestData(db) + it(`should handle transactions`, async () => { + const db = await createDatabase() + await createTestData(db) - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collection.cleanup()) + + await collection.stateWhenReady() + + expect(collection.size).toBe(3) + + const addTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) + }, }) - ) - await collection.stateWhenReady() + addTx.mutate(() => { + for (let i = 0; i < 5; i++) { + collection.insert({ id: randomUUID(), name: `tx-${i}` }) + } + }) - expect(collection.size).toBe(3) + await addTx.commit() + await addTx.isPersisted.promise - const addTx = createTransaction({ - autoCommit: false, - mutationFn: async ({ transaction }) => { - await new PowerSyncTransactor({ database: db }).applyTransaction( - transaction - ) - }, - }) + expect(collection.size).toBe(8) - addTx.mutate(() => { - for (let i = 0; i < 5; i++) { - collection.insert({ id: randomUUID(), name: `tx-${i}` }) - } + // fetch the ps_crud items + // There should be a crud entries for this + const _crudEntries = await db.getAll(` + SELECT * FROM ps_crud ORDER BY id`) + const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) + + const lastTransactionId = + crudEntries[crudEntries.length - 1]?.transactionId + /** + * The last items, created in the same transaction, should be in the same + * PowerSync transaction. + */ + expect( + crudEntries + .reverse() + .slice(0, 5) + .every((crudEntry) => crudEntry.transactionId == lastTransactionId) + ).true }) - await addTx.commit() - await addTx.isPersisted.promise + it(`should handle transactions with multiple collections`, async () => { + const db = await createDatabase() + await createTestData(db) - expect(collection.size).toBe(8) + const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => documentsCollection.cleanup()) - // fetch the ps_crud items - // There should be a crud entries for this - const _crudEntries = await db.getAll(` - SELECT * FROM ps_crud ORDER BY id`) - const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) - - const lastTransactionId = crudEntries[crudEntries.length - 1]?.transactionId - /** - * The last items, created in the same transaction, should be in the same - * PowerSync transaction. - */ - expect( - crudEntries - .reverse() - .slice(0, 5) - .every((crudEntry) => crudEntry.transactionId == lastTransactionId) - ).true - }) + const usersCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `users`, + }) + ) + onTestFinished(() => usersCollection.cleanup()) - it(`should handle transactions with multiple collections`, async () => { - const db = await createDatabase() - await createTestData(db) + await documentsCollection.stateWhenReady() + await usersCollection.stateWhenReady() - const documentsCollection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, + expect(documentsCollection.size).toBe(3) + expect(usersCollection.size).toBe(0) + + const addTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) + }, }) - ) - const usersCollection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `users`, + addTx.mutate(() => { + for (let i = 0; i < 5; i++) { + documentsCollection.insert({ id: randomUUID(), name: `tx-${i}` }) + usersCollection.insert({ id: randomUUID(), name: `user` }) + } }) - ) - await documentsCollection.stateWhenReady() - await usersCollection.stateWhenReady() + await addTx.commit() + await addTx.isPersisted.promise - expect(documentsCollection.size).toBe(3) - expect(usersCollection.size).toBe(0) + expect(documentsCollection.size).toBe(8) + expect(usersCollection.size).toBe(5) - const addTx = createTransaction({ - autoCommit: false, - mutationFn: async ({ transaction }) => { - await new PowerSyncTransactor({ database: db }).applyTransaction( - transaction - ) - }, + // fetch the ps_crud items + // There should be a crud entries for this + const _crudEntries = await db.getAll(` + SELECT * FROM ps_crud ORDER BY id`) + const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) + + const lastTransactionId = + crudEntries[crudEntries.length - 1]?.transactionId + /** + * The last items, created in the same transaction, should be in the same + * PowerSync transaction. + */ + expect( + crudEntries + .reverse() + .slice(0, 10) + .every((crudEntry) => crudEntry.transactionId == lastTransactionId) + ).true + }) + }) + + describe(`Multiple Clients`, async () => { + it(`should sync updates between multiple clients`, async () => { + const db = await createDatabase() + + // Create two collections for the same table + const collectionA = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collectionA.cleanup()) + await collectionA.stateWhenReady() + + const collectionB = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collectionB.cleanup()) + await collectionB.stateWhenReady() + + await createTestData(db) + + // Both collections should have the data present after insertion + await vi.waitFor( + () => { + expect(collectionA.size).eq(3) + expect(collectionB.size).eq(3) + }, + { timeout: 1000 } + ) }) + }) - addTx.mutate(() => { - for (let i = 0; i < 5; i++) { - documentsCollection.insert({ id: randomUUID(), name: `tx-${i}` }) - usersCollection.insert({ id: randomUUID(), name: `user` }) + describe(`Lifecycle`, async () => { + it(`should cleanup resources`, async () => { + const db = await createDatabase() + const collectionOptions = powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + + const meta = collectionOptions.utils.getMeta() + + const tableExists = async (): Promise => { + const result = await db.writeLock(async (tx) => { + return tx.get<{ count: number }>( + ` + SELECT COUNT(*) as count + FROM sqlite_temp_master + WHERE type='table' AND name = ? + `, + [meta.trackedTableName] + ) + }) + return result.count > 0 } - }) - await addTx.commit() - await addTx.isPersisted.promise + const collection = createCollection(collectionOptions) + await collection.stateWhenReady() + expect(await tableExists()).true - expect(documentsCollection.size).toBe(8) - expect(usersCollection.size).toBe(5) + await collection.cleanup() - // fetch the ps_crud items - // There should be a crud entries for this - const _crudEntries = await db.getAll(` - SELECT * FROM ps_crud ORDER BY id`) - const crudEntries = _crudEntries.map((r) => CrudEntry.fromRow(r as any)) - - const lastTransactionId = crudEntries[crudEntries.length - 1]?.transactionId - /** - * The last items, created in the same transaction, should be in the same - * PowerSync transaction. - */ - expect( - crudEntries - .reverse() - .slice(0, 10) - .every((crudEntry) => crudEntry.transactionId == lastTransactionId) - ).true + // It seems that even though `cleanup` is async, the sync disposer cannot be async + // We wait for the table to be deleted + await vi.waitFor( + async () => { + expect(await tableExists()).false + }, + { timeout: 1000 } + ) + }) }) }) From 7d9ff7341693dc229e9e2c4d031e2fce650239be Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 12:00:52 +0200 Subject: [PATCH 06/56] Add rollback test --- .../tests/powersync.test.ts | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index fdaecb148..66e3d39ea 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -336,6 +336,55 @@ describe(`PowerSync Integration`, () => { }) }) + describe(`General use`, async () => { + it(`should rollback transactions on error`, async () => { + const db = await createDatabase() + + // Create two collections for the same table + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collection.cleanup()) + + const addTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) + }, + }) + + expect(collection.size).eq(0) + const id = randomUUID() + // Attempt to insert invalid data + // We can only do this since we aren't using schema validation here + addTx.mutate(() => { + collection.insert({ + id, + name: new Error() as unknown as string, // This will cause a SQL error eventually + }) + }) + + // This should be present in the optimisic state, but should be reverted when attempting to persist + expect(collection.size).eq(1) + expect((collection.get(id)?.name as any) instanceof Error).true + + try { + await addTx.commit() + await addTx.isPersisted.promise + expect.fail(`Should have thrown an error`) + } catch (error) { + expect(error).toBeDefined() + // The collection should be in a clean state + expect(collection.size).toBe(0) + } + }) + }) + describe(`Multiple Clients`, async () => { it(`should sync updates between multiple clients`, async () => { const db = await createDatabase() From d5b3d9995e2e00a67e2658b73911432d94ffd4ee Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 12:02:27 +0200 Subject: [PATCH 07/56] update dependencies --- packages/powersync-db-collection/package.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/powersync-db-collection/package.json b/packages/powersync-db-collection/package.json index 9fa1f15be..5588491e4 100644 --- a/packages/powersync-db-collection/package.json +++ b/packages/powersync-db-collection/package.json @@ -3,14 +3,17 @@ "description": "PowerSync collection for TanStack DB", "version": "0.0.0", "dependencies": { - "@powersync/common": "^1.39.0", "@standard-schema/spec": "^1.0.0", "@tanstack/db": "workspace:*", "@tanstack/store": "^0.7.7", "debug": "^4.4.3", "p-defer": "^4.0.1" }, + "peerDependencies": { + "@powersync/common": "^1.39.0" + }, "devDependencies": { + "@powersync/common": "^1.39.0", "@powersync/better-sqlite3": "^0.2.0", "@powersync/node": "^0.11.0", "@types/debug": "^4.1.12", From cc42e946d4403aa275f371960d78a68d9319b064 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 12:37:13 +0200 Subject: [PATCH 08/56] Add live query test --- .../tests/powersync.test.ts | 62 ++++++++++++++++++- 1 file changed, 61 insertions(+), 1 deletion(-) diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 66e3d39ea..e3c82cf26 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -7,7 +7,12 @@ import { Table, column, } from "@powersync/node" -import { createCollection, createTransaction } from "@tanstack/db" +import { + createCollection, + createTransaction, + eq, + liveQueryCollectionOptions, +} from "@tanstack/db" import { describe, expect, it, onTestFinished, vi } from "vitest" import { powerSyncCollectionOptions } from "../src" import { PowerSyncTransactor } from "../src/PowerSyncTransactor" @@ -383,6 +388,61 @@ describe(`PowerSync Integration`, () => { expect(collection.size).toBe(0) } }) + + it(`should work with live queries`, async () => { + const db = await createDatabase() + + // Create two collections for the same table + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + }) + ) + onTestFinished(() => collection.cleanup()) + + await collection.stateWhenReady() + + const liveDocuments = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ document: collection }) + .where(({ document }) => eq(document.name, `book`)) + .select(({ document }) => ({ + id: document.id, + name: document.name, + })), + }) + ) + + expect(liveDocuments.size).eq(0) + + const bookNames = new Set() + + liveDocuments.subscribeChanges((changes) => { + changes + .map((change) => change.value.name) + .forEach((change) => bookNames.add(change!)) + }) + + await collection.insert({ + id: randomUUID(), + name: `notabook`, + }).isPersisted.promise + await collection.insert({ + id: randomUUID(), + name: `book`, + }).isPersisted.promise + + expect(collection.size).eq(2) + await vi.waitFor( + () => { + expect(Array.from(bookNames)).deep.equals([`book`]) + }, + { timeout: 1000 } + ) + }) }) describe(`Multiple Clients`, async () => { From d1de549884315d666b6a68911a5c6ce0e683d8fc Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 12:45:52 +0200 Subject: [PATCH 09/56] Add docs for PowerSync collection --- docs/collections/powersync-collection.md | 175 +++++++++++++++++++++++ 1 file changed, 175 insertions(+) create mode 100644 docs/collections/powersync-collection.md diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md new file mode 100644 index 000000000..21aef4f30 --- /dev/null +++ b/docs/collections/powersync-collection.md @@ -0,0 +1,175 @@ +--- +title: PowerSync Collection +--- + +# PowerSync Collection + +PowerSync collections provide seamless integration between TanStack DB and [PowerSync](https://powersync.com), enabling automatic synchronization between your in-memory TanStack DB collections and PowerSync's SQLite database. This gives you offline-ready persistence, real-time sync capabilities, and powerful conflict resolution. + +## Overview + +The `@tanstack/powersync-db-collection` package allows you to create collections that: + +- Automatically mirror the state of an underlying PowerSync SQLite database +- Reactively update when PowerSync records change +- Support optimistic mutations with rollback on error +- Provide persistence handlers to keep PowerSync in sync with TanStack DB transactions +- Use PowerSync's efficient SQLite-based storage engine +- Work with PowerSync's real-time sync features for offline-first scenarios +- Leverage PowerSync's built-in conflict resolution and data consistency guarantees +- Enable real-time synchronization with PostgreSQL, MongoDB and MySQL backends + +## 1. Installation + +Install the PowerSync collection package along with your preferred framework integration. +PowerSync currently works with Web, React Native and Node.js. The examples below use the Web SDK. +See the PowerSync quickstart [docs](https://docs.powersync.com/installation/quickstart-guide) for more details. + +```bash +npm install @tanstack/powersync-db-collection @powersync/web @journeyapps/wa-sqlite +``` + +### 2. Create a PowerSync Database and Schema + +```ts +import { Schema, Table, column } from "@powersync/web" + +// Define your schema +const APP_SCHEMA = new Schema({ + documents: new Table({ + name: column.text, + content: column.text, + created_at: column.text, + updated_at: column.text, + }), +}) + +type Document = (typeof APP_SCHEMA)["types"]["documents"] + +// Initialize PowerSync database +const db = new PowerSyncDatabase({ + database: { + dbFilename: "app.sqlite", + }, + schema: APP_SCHEMA, +}) +``` + +### 3. (optional) Configure Sync with a Backend + +```ts +import { + AbstractPowerSyncDatabase, + PowerSyncBackendConnector, + PowerSyncCredentials, +} from "@powersync/web" + +// TODO implement your logic here +class Connector implements PowerSyncBackendConnector { + fetchCredentials: () => Promise + + /** Upload local changes to the app backend. + * + * Use {@link AbstractPowerSyncDatabase.getCrudBatch} to get a batch of changes to upload. + * + * Any thrown errors will result in a retry after the configured wait period (default: 5 seconds). + */ + uploadData: (database: AbstractPowerSyncDatabase) => Promise +} + +// Configure the client to connect to a PowerSync service and your backend +db.connect(new Connector()) +``` + +### 4. Create a TanStack DB Collection + +```ts +import { createCollection } from "@tanstack/react-db" +import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" + +const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: "documents", + }) +) +``` + +## Features + +### Offline-First + +PowerSync collections are offline-first by default. All data is stored locally in a SQLite database, allowing your app to work without an internet connection. Changes are automatically synced when connectivity is restored. + +### Real-Time Sync + +When connected to a PowerSync backend, changes are automatically synchronized in real-time across all connected clients. The sync process handles: + +- Bi-directional sync with the server +- Conflict resolution +- Queue management for offline changes +- Automatic retries on connection loss + +### Optimistic Updates + +Updates to the collection are applied optimistically to the local state first, then synchronized with PowerSync and the backend. If an error occurs during sync, the changes are automatically rolled back. + +## Configuration Options + +The `powerSyncCollectionOptions` function accepts the following options: + +```ts +interface PowerSyncCollectionConfig { + database: PowerSyncDatabase // PowerSync database instance + tableName: string // Name of the table in PowerSync + schema?: Schema // Optional schema for validation +} +``` + +## Advanced Transactions + +When you need more control over transaction handling, such as batching multiple operations or handling complex transaction scenarios, you can use PowerSync's transaction system directly with TanStack DB transactions. + +```ts +import { createTransaction } from "@tanstack/react-db" +import { PowerSyncTransactor } from "@tanstack/powersync-db-collection" + +// Create a transaction that won't auto-commit +const batchTx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + // Use PowerSyncTransactor to apply the transaction to PowerSync + await new PowerSyncTransactor({ database: db }).applyTransaction( + transaction + ) + }, +}) + +// Perform multiple operations in the transaction +batchTx.mutate(() => { + // Add multiple documents in a single transaction + for (let i = 0; i < 5; i++) { + documentsCollection.insert({ + id: crypto.randomUUID(), + name: `Document ${i}`, + content: `Content ${i}`, + created_at: new Date().toISOString(), + updated_at: new Date().toISOString(), + }) + } +}) + +// Commit the transaction +await batchTx.commit() + +// Wait for the changes to be persisted +await batchTx.isPersisted.promise +``` + +This approach allows you to: + +- Batch multiple operations into a single transaction +- Control when the transaction is committed +- Ensure all operations are atomic +- Wait for persistence confirmation +- Handle complex transaction scenarios From ccba6ef234ee73b00631176cf543d4f102e7430c Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 12:57:23 +0200 Subject: [PATCH 10/56] Add Changeset --- .changeset/dark-items-dig.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .changeset/dark-items-dig.md diff --git a/.changeset/dark-items-dig.md b/.changeset/dark-items-dig.md new file mode 100644 index 000000000..d11921c5e --- /dev/null +++ b/.changeset/dark-items-dig.md @@ -0,0 +1,5 @@ +--- +"@tanstack/powersync-db-collection": minor +--- + +Initial Release From c887d90ed5b08e60520b42cbcd06f36f26ca1205 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 15:04:03 +0200 Subject: [PATCH 11/56] Added schema conversion and validation --- docs/collections/powersync-collection.md | 27 +++ packages/powersync-db-collection/src/index.ts | 1 + .../powersync-db-collection/src/schema.ts | 197 +++++++++++++++++ .../tests/powersync.test.ts | 44 +++- .../tests/schema.test.ts | 198 ++++++++++++++++++ pnpm-lock.yaml | 6 +- 6 files changed, 469 insertions(+), 4 deletions(-) create mode 100644 packages/powersync-db-collection/src/schema.ts create mode 100644 packages/powersync-db-collection/tests/schema.test.ts diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index 21aef4f30..0274a838c 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -83,6 +83,10 @@ db.connect(new Connector()) ### 4. Create a TanStack DB Collection +There are two ways to create a collection: using type inference or using schema validation. + +#### Option 1: Using Type Inference + ```ts import { createCollection } from "@tanstack/react-db" import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" @@ -95,6 +99,29 @@ const documentsCollection = createCollection( ) ``` +#### Option 2: Using Schema Validation + +```ts +import { createCollection } from "@tanstack/react-db" +import { + powerSyncCollectionOptions, + convertPowerSyncSchemaToSpecs, +} from "@tanstack/powersync-db-collection" + +// Convert PowerSync schema to TanStack DB schema +const schemas = convertPowerSyncSchemaToSpecs(APP_SCHEMA) + +const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: "documents", + schema: schemas.documents, // Use schema for runtime type validation + }) +) +``` + +With schema validation, the collection will validate all inputs at runtime to ensure they match the PowerSync schema types. This provides an extra layer of type safety beyond TypeScript's compile-time checks. + ## Features ### Offline-First diff --git a/packages/powersync-db-collection/src/index.ts b/packages/powersync-db-collection/src/index.ts index 6c8111f4c..152f09076 100644 --- a/packages/powersync-db-collection/src/index.ts +++ b/packages/powersync-db-collection/src/index.ts @@ -1,3 +1,4 @@ export * from "./definitions" export * from "./powersync" export * from "./PowerSyncTransactor" +export * from "./schema" diff --git a/packages/powersync-db-collection/src/schema.ts b/packages/powersync-db-collection/src/schema.ts new file mode 100644 index 000000000..dc894bf58 --- /dev/null +++ b/packages/powersync-db-collection/src/schema.ts @@ -0,0 +1,197 @@ +import { ColumnType } from "@powersync/common" +import type { + ColumnsType, + ExtractColumnValueType, + Schema, + Table, +} from "@powersync/common" +import type { StandardSchemaV1 } from "@standard-schema/spec" + +/** + * Utility type that extracts the typed structure of a table based on its column definitions. + * Maps each column to its corresponding TypeScript type using ExtractColumnValueType. + * + * @template Columns - The ColumnsType definition containing column configurations + * @example + * ```typescript + * const table = new Table({ + * name: column.text, + * age: column.integer + * }) + * type TableType = ExtractedTable + * // Results in: { name: string | null, age: number | null } + * ``` + */ +type ExtractedTable = { + [K in keyof Columns]: ExtractColumnValueType +} & { + id: string +} + +/** + * Converts a PowerSync Table instance to a StandardSchemaV1 schema. + * Creates a schema that validates the structure and types of table records + * according to the PowerSync table definition. + * + * @template Columns - The ColumnsType definition containing column configurations + * @param table - The PowerSync Table instance to convert + * @returns A StandardSchemaV1 compatible schema with proper type validation + * + * @example + * ```typescript + * const usersTable = new Table({ + * name: column.text, + * age: column.integer + * }) + * + * const schema = convertTableToSchema(usersTable) + * // Now you can use this schema with powerSyncCollectionOptions + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "users", + * schema: schema + * }) + * ) + * ``` + */ +export function convertTableToSchema( + table: Table +): StandardSchemaV1> { + // Create validate function that checks types according to column definitions + const validate = ( + value: unknown + ): + | StandardSchemaV1.SuccessResult> + | StandardSchemaV1.FailureResult => { + if (typeof value != `object` || value == null) { + return { + issues: [ + { + message: `Value must be an object`, + }, + ], + } + } + + const issues: Array = [] + + // Check id field + if (!(`id` in value) || typeof (value as any).id != `string`) { + issues.push({ + message: `id field must be a string`, + path: [`id`], + }) + } + + // Check each column + for (const column of table.columns) { + const val = (value as ExtractedTable)[column.name] + + if (val == null) { + continue + } + + switch (column.type) { + case ColumnType.TEXT: + if (typeof val != `string`) { + issues.push({ + message: `${column.name} must be a string or null`, + path: [column.name], + }) + } + break + case ColumnType.INTEGER: + case ColumnType.REAL: + if (typeof val != `number`) { + issues.push({ + message: `${column.name} must be a number or null`, + path: [column.name], + }) + } + break + } + } + + if (issues.length > 0) { + return { issues } + } + + return { value: { ...value } as ExtractedTable } + } + + return { + "~standard": { + version: 1, + vendor: `powersync`, + validate, + types: { + input: {} as ExtractedTable, + output: {} as ExtractedTable, + }, + }, + } +} + +/** + * Converts an entire PowerSync Schema (containing multiple tables) into a collection of StandardSchemaV1 schemas. + * Each table in the schema is converted to its own StandardSchemaV1 schema while preserving all type information. + * + * @template Tables - A record type mapping table names to their Table definitions + * @param schema - The PowerSync Schema containing multiple table definitions + * @returns An object where each key is a table name and each value is that table's StandardSchemaV1 schema + * + * @example + * ```typescript + * const mySchema = new Schema({ + * users: new Table({ + * name: column.text, + * age: column.integer + * }), + * posts: new Table({ + * title: column.text, + * views: column.integer + * }) + * }) + * + * const standardizedSchemas = convertSchemaToSpecs(mySchema) + * // Result has type: + * // { + * // users: StandardSchemaV1<{ name: string | null, age: number | null }>, + * // posts: StandardSchemaV1<{ title: string | null, views: number | null }> + * // } + * + * // Can be used with collections: + * const usersCollection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * tableName: "users", + * schema: standardizedSchemas.users + * }) + * ) + * ``` + */ +export function convertPowerSyncSchemaToSpecs< + Tables extends Record>, +>( + schema: Schema +): { + [TableName in keyof Tables]: StandardSchemaV1< + ExtractedTable + > +} { + // Create a map to store the standardized schemas + const standardizedSchemas = {} as { + [TableName in keyof Tables]: StandardSchemaV1< + ExtractedTable + > + } + + // Iterate through each table in the schema + schema.tables.forEach((table) => { + // Convert each table to a StandardSchemaV1 and store it in the result map + ;(standardizedSchemas as any)[table.name] = convertTableToSchema(table) + }) + + return standardizedSchemas +} diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index e3c82cf26..9c9b95ae0 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -8,6 +8,7 @@ import { column, } from "@powersync/node" import { + SchemaValidationError, createCollection, createTransaction, eq, @@ -16,6 +17,7 @@ import { import { describe, expect, it, onTestFinished, vi } from "vitest" import { powerSyncCollectionOptions } from "../src" import { PowerSyncTransactor } from "../src/PowerSyncTransactor" +import { convertPowerSyncSchemaToSpecs } from "../src/schema" import type { AbstractPowerSyncDatabase } from "@powersync/node" const APP_SCHEMA = new Schema({ @@ -43,6 +45,8 @@ describe(`PowerSync Integration`, () => { await db.disconnectAndClear() await db.close() }) + // Initial clear in case a test might have failed + await db.disconnectAndClear() return db } @@ -56,6 +60,44 @@ describe(`PowerSync Integration`, () => { `) } + describe(`schema`, () => { + it(`should accept a schema`, async () => { + const db = await createDatabase() + + // the collection should infer types and validate with the schema + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + tableName: `documents`, + schema: convertPowerSyncSchemaToSpecs(APP_SCHEMA).documents, + }) + ) + + collection.insert({ + id: randomUUID(), + name: `aname`, + }) + + collection.insert({ + id: randomUUID(), + name: null, + }) + + expect(collection.size).eq(2) + + // should validate inputs + try { + collection.insert({} as any) + console.log(`failed`) + } catch (ex) { + expect(ex instanceof SchemaValidationError).true + if (ex instanceof SchemaValidationError) { + expect(ex.message).contains(`id field must be a string`) + } + } + }) + }) + describe(`sync`, () => { it(`should initialize and fetch initial data`, async () => { const db = await createDatabase() @@ -341,7 +383,7 @@ describe(`PowerSync Integration`, () => { }) }) - describe(`General use`, async () => { + describe(`General use`, () => { it(`should rollback transactions on error`, async () => { const db = await createDatabase() diff --git a/packages/powersync-db-collection/tests/schema.test.ts b/packages/powersync-db-collection/tests/schema.test.ts new file mode 100644 index 000000000..62c562b76 --- /dev/null +++ b/packages/powersync-db-collection/tests/schema.test.ts @@ -0,0 +1,198 @@ +import { Schema, Table, column } from "@powersync/common" +import { describe, expect, it } from "vitest" +import { + convertPowerSyncSchemaToSpecs, + convertTableToSchema, +} from "../src/schema" +import type { StandardSchemaV1 } from "@standard-schema/spec" + +describe(`Schema Conversion`, () => { + describe(`convertTableToSchema`, () => { + it(`should convert a simple table with text and integer columns`, () => { + const table = new Table({ + name: column.text, + age: column.integer, + }) + + const schema = convertTableToSchema(table) + + // Test schema structure + expect(schema).toHaveProperty(`~standard`) + expect(schema[`~standard`].version).toBe(1) + expect(schema[`~standard`].vendor).toBe(`powersync`) + expect(schema[`~standard`].validate).toBeTypeOf(`function`) + + // Test validation with valid data + const validResult = schema[`~standard`].validate({ + id: `123`, + name: `John`, + age: 25, + }) as StandardSchemaV1.SuccessResult + + expect(validResult.issues).toBeUndefined() + expect(validResult.value).toEqual({ + id: `123`, + name: `John`, + age: 25, + }) + + // Test validation with invalid data + const invalidResult = schema[`~standard`].validate({ + id: `123`, + name: 123, // wrong type + age: `25`, // wrong type + }) as StandardSchemaV1.FailureResult + + expect(invalidResult.issues).toHaveLength(2) + expect(invalidResult.issues[0]?.message).toContain(`must be a string`) + expect(invalidResult.issues[1]?.message).toContain(`must be a number`) + }) + + it(`should handle null values correctly`, () => { + const table = new Table({ + name: column.text, + age: column.integer, + }) + + const schema = convertTableToSchema(table) + + // Test validation with null values + const result = schema[`~standard`].validate({ + id: `123`, + name: null, + age: null, + }) as StandardSchemaV1.SuccessResult + + expect(result.issues).toBeUndefined() + expect(result.value).toEqual({ + id: `123`, + name: null, + age: null, + }) + }) + + it(`should require id field`, () => { + const table = new Table({ + name: column.text, + }) + + const schema = convertTableToSchema(table) + + // Test validation without id + const result = schema[`~standard`].validate({ + name: `John`, + }) as StandardSchemaV1.FailureResult + + expect(result.issues).toHaveLength(1) + expect(result.issues[0]?.message).toContain(`id field must be a string`) + }) + + it(`should handle all column types`, () => { + const table = new Table({ + text_col: column.text, + int_col: column.integer, + real_col: column.real, + }) + + const schema = convertTableToSchema(table) + + // Test validation with all types + const result = schema[`~standard`].validate({ + id: `123`, + text_col: `text`, + int_col: 42, + real_col: 3.14, + }) as StandardSchemaV1.SuccessResult + + expect(result.issues).toBeUndefined() + expect(result.value).toEqual({ + id: `123`, + text_col: `text`, + int_col: 42, + real_col: 3.14, + }) + }) + }) + + describe(`convertPowerSyncSchemaToSpecs`, () => { + it(`should convert multiple tables in a schema`, () => { + const schema = new Schema({ + users: new Table({ + name: column.text, + age: column.integer, + }), + posts: new Table({ + title: column.text, + views: column.integer, + }), + }) + + const result = convertPowerSyncSchemaToSpecs(schema) + + // Test structure + expect(result).toHaveProperty(`users`) + expect(result).toHaveProperty(`posts`) + + // Test users table schema + const userValidResult = result.users[`~standard`].validate({ + id: `123`, + name: `John`, + age: 25, + }) as StandardSchemaV1.SuccessResult + + expect(userValidResult.issues).toBeUndefined() + expect(userValidResult.value).toEqual({ + id: `123`, + name: `John`, + age: 25, + }) + + // Test posts table schema + const postValidResult = result.posts[`~standard`].validate({ + id: `456`, + title: `Hello`, + views: 100, + }) as StandardSchemaV1.SuccessResult + + expect(postValidResult.issues).toBeUndefined() + expect(postValidResult.value).toEqual({ + id: `456`, + title: `Hello`, + views: 100, + }) + }) + + it(`should handle empty schema`, () => { + const schema = new Schema({}) + const result = convertPowerSyncSchemaToSpecs(schema) + expect(result).toEqual({}) + }) + + it(`should validate each table independently`, () => { + const schema = new Schema({ + users: new Table({ + name: column.text, + }), + posts: new Table({ + views: column.integer, + }), + }) + + const result = convertPowerSyncSchemaToSpecs(schema) + + // Test that invalid data in one table doesn't affect the other + const userInvalidResult = result.users[`~standard`].validate({ + id: `123`, + name: 42, // wrong type + }) as StandardSchemaV1.FailureResult + + const postValidResult = result.posts[`~standard`].validate({ + id: `456`, + views: 100, + }) as StandardSchemaV1.SuccessResult + + expect(userInvalidResult.issues).toHaveLength(1) + expect(postValidResult.issues).toBeUndefined() + }) + }) +}) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index eade117ab..3661de7ff 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -652,9 +652,6 @@ importers: packages/powersync-db-collection: dependencies: - '@powersync/common': - specifier: ^1.39.0 - version: 1.39.0 '@standard-schema/spec': specifier: ^1.0.0 version: 1.0.0 @@ -674,6 +671,9 @@ importers: '@powersync/better-sqlite3': specifier: ^0.2.0 version: 0.2.0 + '@powersync/common': + specifier: ^1.39.0 + version: 1.39.0 '@powersync/node': specifier: ^0.11.0 version: 0.11.0(@powersync/common@1.39.0) From 860fa2679d3d49b8a2677bdfbfe4af2938ca65d9 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Thu, 2 Oct 2025 18:00:20 +0200 Subject: [PATCH 12/56] ensure observers are ready before proceeding with mutations --- .../src/PowerSyncTransactor.ts | 19 ++++++++++++++++--- .../powersync-db-collection/src/powersync.ts | 3 ++- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/packages/powersync-db-collection/src/PowerSyncTransactor.ts b/packages/powersync-db-collection/src/PowerSyncTransactor.ts index fade901c1..1c8b7445a 100644 --- a/packages/powersync-db-collection/src/PowerSyncTransactor.ts +++ b/packages/powersync-db-collection/src/PowerSyncTransactor.ts @@ -70,18 +70,31 @@ export class PowerSyncTransactor> { * The transaction might contain ops for different collections. * We can do some optimizations for single collection transactions. */ - const mutationsCollections = mutations.map( + const mutationsCollectionIds = mutations.map( (mutation) => mutation.collection.id ) - const collectionIds = Array.from(new Set(mutationsCollections)) + const collectionIds = Array.from(new Set(mutationsCollectionIds)) const lastCollectionMutationIndexes = new Map() + const allCollections = collectionIds + .map((id) => mutations.find((mutation) => mutation.collection.id == id)!) + .map((mutation) => mutation.collection) for (const collectionId of collectionIds) { lastCollectionMutationIndexes.set( collectionId, - mutationsCollections.lastIndexOf(collectionId) + mutationsCollectionIds.lastIndexOf(collectionId) ) } + // Check all the observers are ready before taking a lock + await Promise.all( + allCollections.map(async (collection) => { + if (collection.isReady()) { + return + } + await new Promise((resolve) => collection.onFirstReady(resolve)) + }) + ) + // Persist to PowerSync const { whenComplete } = await this.database.writeTransaction( async (tx) => { diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 694424077..a50115abf 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -133,7 +133,6 @@ export function powerSyncCollectionOptions< const sync: SyncConfig = { sync: (params) => { const { begin, write, commit, markReady } = params - // Manually create a tracking operation for optimization purposes const abortController = new AbortController() @@ -237,6 +236,8 @@ export function powerSyncCollectionOptions< const outputConfig: EnhancedPowerSyncCollectionConfig = { ...restConfig, getKey, + // Syncing should start immediately since we need to monitor the changes for mutations + startSync: true, sync, onInsert: async (params) => { // The transaction here should only ever contain a single insert mutation From ffa68d1d66b6bb4428ac3f9f869f22fd4f7bc946 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Fri, 3 Oct 2025 09:24:28 +0200 Subject: [PATCH 13/56] Add logging --- .../powersync-db-collection/src/powersync.ts | 90 +++++++++++-------- 1 file changed, 52 insertions(+), 38 deletions(-) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index a50115abf..fcd146669 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -128,55 +128,61 @@ export function powerSyncCollectionOptions< * "sync" * Notice that this describes the Sync between the local SQLite table * and the in-memory tanstack-db collection. - * It is not about sync between a client and a server! */ const sync: SyncConfig = { sync: (params) => { const { begin, write, commit, markReady } = params - // Manually create a tracking operation for optimization purposes const abortController = new AbortController() // The sync function needs to be synchronous async function start() { + database.logger.info(`Sync is starting`) database.onChangeWithCallback( { onChange: async () => { - await database.writeTransaction(async (context) => { - begin() - const operations = await context.getAll( - `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC` - ) - const pendingOperations: Array = [] + await database + .writeTransaction(async (context) => { + begin() + const operations = await context.getAll( + `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC` + ) + const pendingOperations: Array = [] - for (const op of operations) { - const { id, operation, timestamp, value } = op - const parsedValue = { - id, - ...JSON.parse(value), + for (const op of operations) { + const { id, operation, timestamp, value } = op + const parsedValue = { + id, + ...JSON.parse(value), + } + const parsedPreviousValue = + op.operation == DiffTriggerOperation.UPDATE + ? { id, ...JSON.parse(op.previous_value) } + : null + write({ + type: mapOperation(operation), + value: parsedValue, + previousValue: parsedPreviousValue, + }) + pendingOperations.push({ + id, + operation, + timestamp, + tableName, + }) } - const parsedPreviousValue = - op.operation == DiffTriggerOperation.UPDATE - ? { id, ...JSON.parse(op.previous_value) } - : null - write({ - type: mapOperation(operation), - value: parsedValue, - previousValue: parsedPreviousValue, - }) - pendingOperations.push({ - id, - operation, - timestamp, - tableName, - }) - } - // clear the current operations - await context.execute(`DELETE FROM ${trackedTableName}`) + // clear the current operations + await context.execute(`DELETE FROM ${trackedTableName}`) - commit() - pendingOperationStore.resolvePendingFor(pendingOperations) - }) + commit() + pendingOperationStore.resolvePendingFor(pendingOperations) + }) + .catch((error) => { + database.logger.error( + `An error has been detected in the sync handler`, + error + ) + }) }, }, { @@ -207,6 +213,7 @@ export function powerSyncCollectionOptions< } commit() markReady() + database.logger.info(`Sync is ready`) }, }, }) @@ -215,15 +222,22 @@ export function powerSyncCollectionOptions< if (abortController.signal.aborted) { await disposeTracking() } else { - abortController.signal.addEventListener(`abort`, () => { - disposeTracking() - }) + abortController.signal.addEventListener( + `abort`, + () => { + disposeTracking() + }, + { once: true } + ) } } - start() + start().catch((error) => + database.logger.error(`Could not start syncing process`, error) + ) return () => { + database.logger.info(`Sync has been stopped`) abortController.abort() } }, From 79abf05564129ab1c3743c6d47030621a7689ac6 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Fri, 3 Oct 2025 09:37:29 +0200 Subject: [PATCH 14/56] Implement batching during initial sync --- .../src/definitions.ts | 18 +++++++ .../powersync-db-collection/src/powersync.ts | 50 ++++++++++++------- 2 files changed, 50 insertions(+), 18 deletions(-) diff --git a/packages/powersync-db-collection/src/definitions.ts b/packages/powersync-db-collection/src/definitions.ts index 7af17a065..69f3b243f 100644 --- a/packages/powersync-db-collection/src/definitions.ts +++ b/packages/powersync-db-collection/src/definitions.ts @@ -46,6 +46,19 @@ export type PowerSyncCollectionConfig< tableName: string /** The PowerSync database instance */ database: AbstractPowerSyncDatabase + /** + * The maximum number of documents to read from the SQLite table + * in a single batch during the initial sync between PowerSync and the + * in-memory TanStack DB collection. + * + * @remarks + * - Defaults to {@link DEFAULT_BATCH_SIZE} if not specified. + * - Larger values reduce the number of round trips to the storage + * engine but increase memory usage per batch. + * - Smaller values may lower memory usage and allow earlier + * streaming of initial results, at the cost of more query calls. + */ + syncBatchSize?: number } export type PowerSyncCollectionMeta = { @@ -71,3 +84,8 @@ export type EnhancedPowerSyncCollectionConfig< export type PowerSyncCollectionUtils = { getMeta: () => PowerSyncCollectionMeta } + +/** + * Default value for {@link PowerSyncCollectionConfig#syncBatchSize} + */ +export const DEFAULT_BATCH_SIZE = 1000 diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index fcd146669..86a69d07c 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -1,20 +1,21 @@ -import { DiffTriggerOperation } from "@powersync/common" +import { DiffTriggerOperation, sanitizeSQL } from "@powersync/common" +import { DEFAULT_BATCH_SIZE } from "./definitions" import { asPowerSyncRecord, mapOperation } from "./helpers" import { PendingOperationStore } from "./PendingOperationStore" import { PowerSyncTransactor } from "./PowerSyncTransactor" -import type { TriggerDiffRecord } from "@powersync/common" -import type { StandardSchemaV1 } from "@standard-schema/spec" -import type { - CollectionConfig, - InferSchemaOutput, - SyncConfig, -} from "@tanstack/db" import type { EnhancedPowerSyncCollectionConfig, PowerSyncCollectionConfig, PowerSyncCollectionUtils, } from "./definitions" import type { PendingOperation } from "./PendingOperationStore" +import type { + CollectionConfig, + InferSchemaOutput, + SyncConfig, +} from "@tanstack/db" +import type { StandardSchemaV1 } from "@standard-schema/spec" +import type { TriggerDiffRecord } from "@powersync/common" /** * Creates PowerSync collection options for use with a standard Collection @@ -100,7 +101,12 @@ export function powerSyncCollectionOptions< >( config: PowerSyncCollectionConfig ): EnhancedPowerSyncCollectionConfig { - const { database, tableName, ...restConfig } = config + const { + database, + tableName, + syncBatchSize = DEFAULT_BATCH_SIZE, + ...restConfig + } = config /** * The onInsert, onUpdate, onDelete handlers should only return @@ -202,16 +208,24 @@ export function powerSyncCollectionOptions< }, hooks: { beforeCreate: async (context) => { - begin() - for (const row of await context.getAll( - `SELECT * FROM ${tableName}` - )) { - write({ - type: `insert`, - value: row, - }) + let currentBatchCount = syncBatchSize + let cursor = 0 + while (currentBatchCount == syncBatchSize) { + begin() + const batchItems = await context.getAll( + sanitizeSQL`SELECT * FROM ${tableName} LIMIT ? OFFSET ?`, + [syncBatchSize, cursor] + ) + currentBatchCount = batchItems.length + cursor += currentBatchCount + for (const row of batchItems) { + write({ + type: `insert`, + value: row, + }) + } + commit() } - commit() markReady() database.logger.info(`Sync is ready`) }, From 237ed35cd277adde349e3737859e8f1272fd92d6 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Fri, 3 Oct 2025 10:52:52 +0200 Subject: [PATCH 15/56] Update log messages. Avoid requirement for NPM install scripts. --- packages/powersync-db-collection/package.json | 5 +- .../powersync-db-collection/src/powersync.ts | 31 +++++++---- .../tests/powersync.test.ts | 1 + pnpm-lock.yaml | 51 ++++++------------- 4 files changed, 39 insertions(+), 49 deletions(-) diff --git a/packages/powersync-db-collection/package.json b/packages/powersync-db-collection/package.json index 5588491e4..9e1e0213e 100644 --- a/packages/powersync-db-collection/package.json +++ b/packages/powersync-db-collection/package.json @@ -13,9 +13,8 @@ "@powersync/common": "^1.39.0" }, "devDependencies": { - "@powersync/common": "^1.39.0", - "@powersync/better-sqlite3": "^0.2.0", - "@powersync/node": "^0.11.0", + "@powersync/common": "0.0.0-dev-20251003085035", + "@powersync/node": "0.0.0-dev-20251003085035", "@types/debug": "^4.1.12", "@vitest/coverage-istanbul": "^3.2.4" }, diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 86a69d07c..ed24ba369 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -3,19 +3,19 @@ import { DEFAULT_BATCH_SIZE } from "./definitions" import { asPowerSyncRecord, mapOperation } from "./helpers" import { PendingOperationStore } from "./PendingOperationStore" import { PowerSyncTransactor } from "./PowerSyncTransactor" +import type { TriggerDiffRecord } from "@powersync/common" +import type { StandardSchemaV1 } from "@standard-schema/spec" +import type { + CollectionConfig, + InferSchemaOutput, + SyncConfig, +} from "@tanstack/db" import type { EnhancedPowerSyncCollectionConfig, PowerSyncCollectionConfig, PowerSyncCollectionUtils, } from "./definitions" import type { PendingOperation } from "./PendingOperationStore" -import type { - CollectionConfig, - InferSchemaOutput, - SyncConfig, -} from "@tanstack/db" -import type { StandardSchemaV1 } from "@standard-schema/spec" -import type { TriggerDiffRecord } from "@powersync/common" /** * Creates PowerSync collection options for use with a standard Collection @@ -142,7 +142,9 @@ export function powerSyncCollectionOptions< // The sync function needs to be synchronous async function start() { - database.logger.info(`Sync is starting`) + database.logger.info( + `Sync is starting for ${tableName} into ${trackedTableName}` + ) database.onChangeWithCallback( { onChange: async () => { @@ -227,7 +229,9 @@ export function powerSyncCollectionOptions< commit() } markReady() - database.logger.info(`Sync is ready`) + database.logger.info( + `Sync is ready for ${tableName} into ${trackedTableName}` + ) }, }, }) @@ -247,11 +251,16 @@ export function powerSyncCollectionOptions< } start().catch((error) => - database.logger.error(`Could not start syncing process`, error) + database.logger.error( + `Could not start syncing process for ${tableName} into ${trackedTableName}`, + error + ) ) return () => { - database.logger.info(`Sync has been stopped`) + database.logger.info( + `Sync has been stopped for ${tableName} into ${trackedTableName}` + ) abortController.abort() } }, diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 9c9b95ae0..78dcca83b 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -38,6 +38,7 @@ describe(`PowerSync Integration`, () => { database: { dbFilename: `test.sqlite`, dbLocation: tmpdir(), + implementation: { type: `node:sqlite` }, }, schema: APP_SCHEMA, }) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 3661de7ff..c0815337d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -668,15 +668,12 @@ importers: specifier: ^4.0.1 version: 4.0.1 devDependencies: - '@powersync/better-sqlite3': - specifier: ^0.2.0 - version: 0.2.0 '@powersync/common': - specifier: ^1.39.0 - version: 1.39.0 + specifier: 0.0.0-dev-20251003085035 + version: 0.0.0-dev-20251003085035 '@powersync/node': - specifier: ^0.11.0 - version: 0.11.0(@powersync/common@1.39.0) + specifier: 0.0.0-dev-20251003085035 + version: 0.0.0-dev-20251003085035(@powersync/common@0.0.0-dev-20251003085035) '@types/debug': specifier: ^4.1.12 version: 4.1.12 @@ -2860,16 +2857,17 @@ packages: resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} - '@powersync/better-sqlite3@0.2.0': - resolution: {integrity: sha512-8otwueqHJqwilUz/vLENlpMp2c4k/TV6hGX016XrZxSkizDAil99yRm7lAVwpbYYGuSgyzidyDh6vy6PY+m4kw==} + '@powersync/common@0.0.0-dev-20251003085035': + resolution: {integrity: sha512-k69aY8onIM4eXvj/obFkCadGmKgqMKSgk90Sih8lKF9BrGPGpQU/MtB6673LmhavURQnaS340FpBsL/4p/gk0g==} - '@powersync/common@1.39.0': - resolution: {integrity: sha512-qGPl/LPRoopNWjduGXfN+P3PsdTMfFR9YI2TbsLA++VRMK+10To9ey3Z6yprKoVbdLmisPde9mAaTvb1ugkeyg==} - - '@powersync/node@0.11.0': - resolution: {integrity: sha512-33J3/TnZ+s9mu0pHFfJCZhSQp7C+Ai4/1sBxC7aNdiRCyvg8DBYY8P7gMYXlyZfyMQHc0hfs3GDJzxYOBMNAyQ==} + '@powersync/node@0.0.0-dev-20251003085035': + resolution: {integrity: sha512-PbmpmiaHxWNDLQw4bNBakezQwPXCxqeef8E6uzxUH+baYmsIfXx5OY+lI6XtdQ+PbLjY4hFtxHnDb2qpSzJZPg==} peerDependencies: - '@powersync/common': ^1.39.0 + '@powersync/common': 0.0.0-dev-20251003085035 + better-sqlite3: 12.x + peerDependenciesMeta: + better-sqlite3: + optional: true '@protobufjs/aspromise@1.1.2': resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} @@ -4335,9 +4333,6 @@ packages: resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} engines: {node: '>=8'} - bindings@1.5.0: - resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==} - body-parser@1.20.3: resolution: {integrity: sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==} engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} @@ -5322,9 +5317,6 @@ packages: resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==} engines: {node: '>=16.0.0'} - file-uri-to-path@1.0.0: - resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==} - fill-range@7.1.1: resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} engines: {node: '>=8'} @@ -10650,18 +10642,13 @@ snapshots: '@pkgr/core@0.2.9': {} - '@powersync/better-sqlite3@0.2.0': - dependencies: - bindings: 1.5.0 - - '@powersync/common@1.39.0': + '@powersync/common@0.0.0-dev-20251003085035': dependencies: js-logger: 1.6.1 - '@powersync/node@0.11.0(@powersync/common@1.39.0)': + '@powersync/node@0.0.0-dev-20251003085035(@powersync/common@0.0.0-dev-20251003085035)': dependencies: - '@powersync/better-sqlite3': 0.2.0 - '@powersync/common': 1.39.0 + '@powersync/common': 0.0.0-dev-20251003085035 async-lock: 1.4.1 bson: 6.10.4 comlink: 4.4.2 @@ -12514,10 +12501,6 @@ snapshots: binary-extensions@2.3.0: {} - bindings@1.5.0: - dependencies: - file-uri-to-path: 1.0.0 - body-parser@1.20.3: dependencies: bytes: 3.1.2 @@ -13682,8 +13665,6 @@ snapshots: dependencies: flat-cache: 4.0.1 - file-uri-to-path@1.0.0: {} - fill-range@7.1.1: dependencies: to-regex-range: 5.0.1 From 8d489e9556182120c97f2742001c066fd0c54210 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Tue, 21 Oct 2025 13:59:39 +0200 Subject: [PATCH 16/56] Schemas Step 1: Infer types from PowerSync schema table. --- docs/collections/powersync-collection.md | 10 +- packages/powersync-db-collection/package.json | 6 +- .../src/definitions.ts | 23 ++- .../powersync-db-collection/src/helpers.ts | 22 +++ .../powersync-db-collection/src/powersync.ts | 87 ++++++----- .../powersync-db-collection/src/schema.ts | 29 +--- .../tests/powersync.test.ts | 138 ++++++------------ pnpm-lock.yaml | 24 +-- 8 files changed, 158 insertions(+), 181 deletions(-) diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index 0274a838c..4b070eb8a 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -85,22 +85,26 @@ db.connect(new Connector()) There are two ways to create a collection: using type inference or using schema validation. -#### Option 1: Using Type Inference +#### Option 1: Using Table Type Inference + +The collection types are automatically inferred from the PowerSync Schema Table definition. The table is used to construct a default StandardSchema validator which is used internally to validate collection data and operations. ```ts import { createCollection } from "@tanstack/react-db" import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" const documentsCollection = createCollection( - powerSyncCollectionOptions({ + powerSyncCollectionOptions({ database: db, - tableName: "documents", + table: APP_SCHEMA.props.documents, }) ) ``` #### Option 2: Using Schema Validation +TODO + ```ts import { createCollection } from "@tanstack/react-db" import { diff --git a/packages/powersync-db-collection/package.json b/packages/powersync-db-collection/package.json index 9e1e0213e..f5af86b27 100644 --- a/packages/powersync-db-collection/package.json +++ b/packages/powersync-db-collection/package.json @@ -10,11 +10,11 @@ "p-defer": "^4.0.1" }, "peerDependencies": { - "@powersync/common": "^1.39.0" + "@powersync/common": "0.0.0-dev-20251021113138" }, "devDependencies": { - "@powersync/common": "0.0.0-dev-20251003085035", - "@powersync/node": "0.0.0-dev-20251003085035", + "@powersync/common": "0.0.0-dev-20251021113138", + "@powersync/node": "0.0.0-dev-20251021113138", "@types/debug": "^4.1.12", "@vitest/coverage-istanbul": "^3.2.4" }, diff --git a/packages/powersync-db-collection/src/definitions.ts b/packages/powersync-db-collection/src/definitions.ts index 69f3b243f..dec66a38d 100644 --- a/packages/powersync-db-collection/src/definitions.ts +++ b/packages/powersync-db-collection/src/definitions.ts @@ -1,4 +1,9 @@ -import type { AbstractPowerSyncDatabase } from "@powersync/common" +import type { ExtractedTable } from "./helpers" +import type { + AbstractPowerSyncDatabase, + ColumnsType, + Table, +} from "@powersync/common" import type { StandardSchemaV1 } from "@standard-schema/spec" import type { BaseCollectionConfig, CollectionConfig } from "@tanstack/db" @@ -36,14 +41,14 @@ import type { BaseCollectionConfig, CollectionConfig } from "@tanstack/db" * ``` */ export type PowerSyncCollectionConfig< - T extends object = Record, + TableType extends Table = Table, TSchema extends StandardSchemaV1 = never, > = Omit< - BaseCollectionConfig, + BaseCollectionConfig, string, TSchema>, `onInsert` | `onUpdate` | `onDelete` | `getKey` > & { - /** The name of the table in PowerSync database */ - tableName: string + /** The PowerSync Schema Table definition */ + table: TableType /** The PowerSync database instance */ database: AbstractPowerSyncDatabase /** @@ -73,9 +78,13 @@ export type PowerSyncCollectionMeta = { } export type EnhancedPowerSyncCollectionConfig< - T extends object = Record, + TableType extends Table = Table, TSchema extends StandardSchemaV1 = never, -> = CollectionConfig & { +> = CollectionConfig< + ExtractedTable, + string, + TSchema +> & { id?: string utils: PowerSyncCollectionUtils schema?: TSchema diff --git a/packages/powersync-db-collection/src/helpers.ts b/packages/powersync-db-collection/src/helpers.ts index 13ace1fb6..6cd309990 100644 --- a/packages/powersync-db-collection/src/helpers.ts +++ b/packages/powersync-db-collection/src/helpers.ts @@ -1,4 +1,5 @@ import { DiffTriggerOperation } from "@powersync/common" +import type { ColumnsType, ExtractColumnValueType } from "@powersync/common" /** * All PowerSync table records have a uuid `id` column. @@ -8,6 +9,27 @@ export type PowerSyncRecord = { [key: string]: unknown } +/** + * Utility type that extracts the typed structure of a table based on its column definitions. + * Maps each column to its corresponding TypeScript type using ExtractColumnValueType. + * + * @template Columns - The ColumnsType definition containing column configurations + * @example + * ```typescript + * const table = new Table({ + * name: column.text, + * age: column.integer + * }) + * type TableType = ExtractedTable + * // Results in: { name: string | null, age: number | null } + * ``` + */ +export type ExtractedTable = { + [K in keyof Columns]: ExtractColumnValueType +} & { + id: string +} + export function asPowerSyncRecord(record: any): PowerSyncRecord { if (typeof record.id !== `string`) { throw new Error(`Record must have a string id field`) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index ed24ba369..049647d5a 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -3,19 +3,17 @@ import { DEFAULT_BATCH_SIZE } from "./definitions" import { asPowerSyncRecord, mapOperation } from "./helpers" import { PendingOperationStore } from "./PendingOperationStore" import { PowerSyncTransactor } from "./PowerSyncTransactor" -import type { TriggerDiffRecord } from "@powersync/common" -import type { StandardSchemaV1 } from "@standard-schema/spec" -import type { - CollectionConfig, - InferSchemaOutput, - SyncConfig, -} from "@tanstack/db" +import { convertTableToSchema } from "./schema" +import type { ExtractedTable } from "./helpers" +import type { PendingOperation } from "./PendingOperationStore" import type { EnhancedPowerSyncCollectionConfig, PowerSyncCollectionConfig, PowerSyncCollectionUtils, } from "./definitions" -import type { PendingOperation } from "./PendingOperationStore" +import type { CollectionConfig, SyncConfig } from "@tanstack/db" +import type { StandardSchemaV1 } from "@standard-schema/spec" +import type { ColumnsType, Table, TriggerDiffRecord } from "@powersync/common" /** * Creates PowerSync collection options for use with a standard Collection @@ -42,18 +40,19 @@ import type { PendingOperation } from "./PendingOperationStore" * const collection = createCollection( * powerSyncCollectionOptions({ * database: db, - * tableName: "documents", - * schema: APP_SCHEMA, + * table: APP_SCHEMA.props.documents, + * schema: TODO * }) * ) * ``` */ -export function powerSyncCollectionOptions( - config: PowerSyncCollectionConfig, T> -): CollectionConfig, string, T> & { - schema: T - utils: PowerSyncCollectionUtils -} +// TODO!!! +// export function powerSyncCollectionOptions( +// config: PowerSyncCollectionConfig, T> +// ): CollectionConfig, string, T> & { +// schema: T +// utils: PowerSyncCollectionUtils +// } /** * Creates a PowerSync collection configuration without schema validation. @@ -76,18 +75,20 @@ export function powerSyncCollectionOptions( * }) * * const collection = createCollection( - * powerSyncCollectionOptions({ + * powerSyncCollectionOptions({ * database: db, - * tableName: "documents", + * table: APP_SCHEMA.props.documents * }) * ) * ``` */ -export function powerSyncCollectionOptions( - config: PowerSyncCollectionConfig & { +export function powerSyncCollectionOptions< + TableType extends Table = Table, +>( + config: PowerSyncCollectionConfig & { schema?: never } -): CollectionConfig & { +): CollectionConfig, string> & { schema?: never utils: PowerSyncCollectionUtils } @@ -96,18 +97,24 @@ export function powerSyncCollectionOptions( * Implementation of powerSyncCollectionOptions that handles both schema and non-schema configurations. */ export function powerSyncCollectionOptions< - T extends object = Record, + TableType extends Table = Table, TSchema extends StandardSchemaV1 = never, >( - config: PowerSyncCollectionConfig -): EnhancedPowerSyncCollectionConfig { + config: PowerSyncCollectionConfig +): EnhancedPowerSyncCollectionConfig { const { database, - tableName, + table, + schema: inputSchema, syncBatchSize = DEFAULT_BATCH_SIZE, ...restConfig } = config + type RecordType = ExtractedTable + const { viewName } = table + + // We can do basic runtime validations for columns if not explicit schema has been provided + const schema = inputSchema ?? (convertTableToSchema(table) as TSchema) /** * The onInsert, onUpdate, onDelete handlers should only return * after we have written the changes to Tanstack DB. @@ -120,13 +127,13 @@ export function powerSyncCollectionOptions< */ const pendingOperationStore = PendingOperationStore.GLOBAL // Keep the tracked table unique in case of multiple tabs. - const trackedTableName = `__${tableName}_tracking_${Math.floor( + const trackedTableName = `__${viewName}_tracking_${Math.floor( Math.random() * 0xffffffff ) .toString(16) .padStart(8, `0`)}` - const transactor = new PowerSyncTransactor({ + const transactor = new PowerSyncTransactor({ database, }) @@ -135,7 +142,7 @@ export function powerSyncCollectionOptions< * Notice that this describes the Sync between the local SQLite table * and the in-memory tanstack-db collection. */ - const sync: SyncConfig = { + const sync: SyncConfig = { sync: (params) => { const { begin, write, commit, markReady } = params const abortController = new AbortController() @@ -143,7 +150,7 @@ export function powerSyncCollectionOptions< // The sync function needs to be synchronous async function start() { database.logger.info( - `Sync is starting for ${tableName} into ${trackedTableName}` + `Sync is starting for ${viewName} into ${trackedTableName}` ) database.onChangeWithCallback( { @@ -175,7 +182,7 @@ export function powerSyncCollectionOptions< id, operation, timestamp, - tableName, + tableName: viewName, }) } @@ -201,7 +208,7 @@ export function powerSyncCollectionOptions< ) const disposeTracking = await database.triggers.createDiffTrigger({ - source: tableName, + source: viewName, destination: trackedTableName, when: { [DiffTriggerOperation.INSERT]: `TRUE`, @@ -214,8 +221,8 @@ export function powerSyncCollectionOptions< let cursor = 0 while (currentBatchCount == syncBatchSize) { begin() - const batchItems = await context.getAll( - sanitizeSQL`SELECT * FROM ${tableName} LIMIT ? OFFSET ?`, + const batchItems = await context.getAll( + sanitizeSQL`SELECT * FROM ${viewName} LIMIT ? OFFSET ?`, [syncBatchSize, cursor] ) currentBatchCount = batchItems.length @@ -230,7 +237,7 @@ export function powerSyncCollectionOptions< } markReady() database.logger.info( - `Sync is ready for ${tableName} into ${trackedTableName}` + `Sync is ready for ${viewName} into ${trackedTableName}` ) }, }, @@ -252,14 +259,14 @@ export function powerSyncCollectionOptions< start().catch((error) => database.logger.error( - `Could not start syncing process for ${tableName} into ${trackedTableName}`, + `Could not start syncing process for ${viewName} into ${trackedTableName}`, error ) ) return () => { database.logger.info( - `Sync has been stopped for ${tableName} into ${trackedTableName}` + `Sync has been stopped for ${viewName} into ${trackedTableName}` ) abortController.abort() } @@ -268,16 +275,18 @@ export function powerSyncCollectionOptions< getSyncMetadata: undefined, } - const getKey = (record: T) => asPowerSyncRecord(record).id + const getKey = (record: RecordType) => asPowerSyncRecord(record).id - const outputConfig: EnhancedPowerSyncCollectionConfig = { + const outputConfig: EnhancedPowerSyncCollectionConfig = { ...restConfig, + schema, getKey, // Syncing should start immediately since we need to monitor the changes for mutations startSync: true, sync, onInsert: async (params) => { // The transaction here should only ever contain a single insert mutation + params.transaction return await transactor.applyTransaction(params.transaction) }, onUpdate: async (params) => { @@ -290,7 +299,7 @@ export function powerSyncCollectionOptions< }, utils: { getMeta: () => ({ - tableName, + tableName: viewName, trackedTableName, }), }, diff --git a/packages/powersync-db-collection/src/schema.ts b/packages/powersync-db-collection/src/schema.ts index dc894bf58..008a88424 100644 --- a/packages/powersync-db-collection/src/schema.ts +++ b/packages/powersync-db-collection/src/schema.ts @@ -1,32 +1,7 @@ import { ColumnType } from "@powersync/common" -import type { - ColumnsType, - ExtractColumnValueType, - Schema, - Table, -} from "@powersync/common" +import type { ColumnsType, Schema, Table } from "@powersync/common" import type { StandardSchemaV1 } from "@standard-schema/spec" - -/** - * Utility type that extracts the typed structure of a table based on its column definitions. - * Maps each column to its corresponding TypeScript type using ExtractColumnValueType. - * - * @template Columns - The ColumnsType definition containing column configurations - * @example - * ```typescript - * const table = new Table({ - * name: column.text, - * age: column.integer - * }) - * type TableType = ExtractedTable - * // Results in: { name: string | null, age: number | null } - * ``` - */ -type ExtractedTable = { - [K in keyof Columns]: ExtractColumnValueType -} & { - id: string -} +import type { ExtractedTable } from "./helpers" /** * Converts a PowerSync Table instance to a StandardSchemaV1 schema. diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 78dcca83b..7e182a92c 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -17,21 +17,20 @@ import { import { describe, expect, it, onTestFinished, vi } from "vitest" import { powerSyncCollectionOptions } from "../src" import { PowerSyncTransactor } from "../src/PowerSyncTransactor" -import { convertPowerSyncSchemaToSpecs } from "../src/schema" import type { AbstractPowerSyncDatabase } from "@powersync/node" const APP_SCHEMA = new Schema({ users: new Table({ name: column.text, }), - documents: new Table({ - name: column.text, - }), + documents: new Table( + { + name: column.text, + }, + { viewName: `documents` } + ), }) -type Document = (typeof APP_SCHEMA)[`types`][`documents`] -type User = (typeof APP_SCHEMA)[`types`][`users`] - describe(`PowerSync Integration`, () => { async function createDatabase() { const db = new PowerSyncDatabase({ @@ -51,6 +50,18 @@ describe(`PowerSync Integration`, () => { return db } + function createDocumentsCollection(db: PowerSyncDatabase) { + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + // We get typing and a default validator from this + table: APP_SCHEMA.props.documents, + }) + ) + onTestFinished(() => collection.cleanup()) + return collection + } + async function createTestData(db: AbstractPowerSyncDatabase) { await db.execute(` INSERT into documents (id, name) @@ -62,17 +73,11 @@ describe(`PowerSync Integration`, () => { } describe(`schema`, () => { - it(`should accept a schema`, async () => { + it(`should use basic runtime validations from automatic schema`, async () => { const db = await createDatabase() // the collection should infer types and validate with the schema - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - schema: convertPowerSyncSchemaToSpecs(APP_SCHEMA).documents, - }) - ) + const collection = createDocumentsCollection(db) collection.insert({ id: randomUUID(), @@ -103,14 +108,7 @@ describe(`PowerSync Integration`, () => { it(`should initialize and fetch initial data`, async () => { const db = await createDatabase() await createTestData(db) - - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => collection.cleanup()) + const collection = createDocumentsCollection(db) await collection.stateWhenReady() @@ -127,13 +125,7 @@ describe(`PowerSync Integration`, () => { const db = await createDatabase() await createTestData(db) - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => collection.cleanup()) + const collection = createDocumentsCollection(db) await collection.stateWhenReady() @@ -203,14 +195,7 @@ describe(`PowerSync Integration`, () => { const db = await createDatabase() await createTestData(db) - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => collection.cleanup()) - + const collection = createDocumentsCollection(db) await collection.stateWhenReady() // Verify the collection state contains our items @@ -263,14 +248,7 @@ describe(`PowerSync Integration`, () => { const db = await createDatabase() await createTestData(db) - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => collection.cleanup()) - + const collection = createDocumentsCollection(db) await collection.stateWhenReady() expect(collection.size).toBe(3) @@ -319,18 +297,12 @@ describe(`PowerSync Integration`, () => { const db = await createDatabase() await createTestData(db) - const documentsCollection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => documentsCollection.cleanup()) + const documentsCollection = createDocumentsCollection(db) const usersCollection = createCollection( - powerSyncCollectionOptions({ + powerSyncCollectionOptions({ database: db, - tableName: `users`, + table: APP_SCHEMA.props.users, }) ) onTestFinished(() => usersCollection.cleanup()) @@ -388,15 +360,20 @@ describe(`PowerSync Integration`, () => { it(`should rollback transactions on error`, async () => { const db = await createDatabase() + const options = powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + }) + + // This will cause the transactor to fail when writing to SQLite + vi.spyOn(options.utils, `getMeta`).mockImplementation(() => ({ + tableName: `fakeTable`, + trackedTableName: `error`, + })) // Create two collections for the same table - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => collection.cleanup()) + const collection = createCollection(options) + onTestFinished(() => collection.cleanup()) const addTx = createTransaction({ autoCommit: false, mutationFn: async ({ transaction }) => { @@ -407,19 +384,18 @@ describe(`PowerSync Integration`, () => { }) expect(collection.size).eq(0) + await collection.stateWhenReady() + const id = randomUUID() - // Attempt to insert invalid data - // We can only do this since we aren't using schema validation here addTx.mutate(() => { collection.insert({ id, - name: new Error() as unknown as string, // This will cause a SQL error eventually + name: `aname`, }) }) - // This should be present in the optimisic state, but should be reverted when attempting to persist + // This should be present in the optimistic state, but should be reverted when attempting to persist expect(collection.size).eq(1) - expect((collection.get(id)?.name as any) instanceof Error).true try { await addTx.commit() @@ -436,13 +412,7 @@ describe(`PowerSync Integration`, () => { const db = await createDatabase() // Create two collections for the same table - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => collection.cleanup()) + const collection = createDocumentsCollection(db) await collection.stateWhenReady() @@ -493,22 +463,10 @@ describe(`PowerSync Integration`, () => { const db = await createDatabase() // Create two collections for the same table - const collectionA = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => collectionA.cleanup()) + const collectionA = createDocumentsCollection(db) await collectionA.stateWhenReady() - const collectionB = createCollection( - powerSyncCollectionOptions({ - database: db, - tableName: `documents`, - }) - ) - onTestFinished(() => collectionB.cleanup()) + const collectionB = createDocumentsCollection(db) await collectionB.stateWhenReady() await createTestData(db) @@ -527,9 +485,9 @@ describe(`PowerSync Integration`, () => { describe(`Lifecycle`, async () => { it(`should cleanup resources`, async () => { const db = await createDatabase() - const collectionOptions = powerSyncCollectionOptions({ + const collectionOptions = powerSyncCollectionOptions({ database: db, - tableName: `documents`, + table: APP_SCHEMA.props.documents, }) const meta = collectionOptions.utils.getMeta() diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c0815337d..ae36530b8 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -669,11 +669,11 @@ importers: version: 4.0.1 devDependencies: '@powersync/common': - specifier: 0.0.0-dev-20251003085035 - version: 0.0.0-dev-20251003085035 + specifier: 0.0.0-dev-20251021113138 + version: 0.0.0-dev-20251021113138 '@powersync/node': - specifier: 0.0.0-dev-20251003085035 - version: 0.0.0-dev-20251003085035(@powersync/common@0.0.0-dev-20251003085035) + specifier: 0.0.0-dev-20251021113138 + version: 0.0.0-dev-20251021113138(@powersync/common@0.0.0-dev-20251021113138) '@types/debug': specifier: ^4.1.12 version: 4.1.12 @@ -2857,13 +2857,13 @@ packages: resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} - '@powersync/common@0.0.0-dev-20251003085035': - resolution: {integrity: sha512-k69aY8onIM4eXvj/obFkCadGmKgqMKSgk90Sih8lKF9BrGPGpQU/MtB6673LmhavURQnaS340FpBsL/4p/gk0g==} + '@powersync/common@0.0.0-dev-20251021113138': + resolution: {integrity: sha512-+E9+4p5xgzdIPr38EayS83+J04G9E89cVUMTCihgJ+P1VqZljuTBj/fnqshuALfcPofOpTryobsO+7CeWNuyEw==} - '@powersync/node@0.0.0-dev-20251003085035': - resolution: {integrity: sha512-PbmpmiaHxWNDLQw4bNBakezQwPXCxqeef8E6uzxUH+baYmsIfXx5OY+lI6XtdQ+PbLjY4hFtxHnDb2qpSzJZPg==} + '@powersync/node@0.0.0-dev-20251021113138': + resolution: {integrity: sha512-1fvtRWQdxgWMjmnOLXE1PbM6JqmxDn5xy4eI46VzSEOMjgkkah0Vn6C9mjPrs7gtlkuDMKRqPQm024Fl3CmPZQ==} peerDependencies: - '@powersync/common': 0.0.0-dev-20251003085035 + '@powersync/common': 0.0.0-dev-20251021113138 better-sqlite3: 12.x peerDependenciesMeta: better-sqlite3: @@ -10642,13 +10642,13 @@ snapshots: '@pkgr/core@0.2.9': {} - '@powersync/common@0.0.0-dev-20251003085035': + '@powersync/common@0.0.0-dev-20251021113138': dependencies: js-logger: 1.6.1 - '@powersync/node@0.0.0-dev-20251003085035(@powersync/common@0.0.0-dev-20251003085035)': + '@powersync/node@0.0.0-dev-20251021113138(@powersync/common@0.0.0-dev-20251021113138)': dependencies: - '@powersync/common': 0.0.0-dev-20251003085035 + '@powersync/common': 0.0.0-dev-20251021113138 async-lock: 1.4.1 bson: 6.10.4 comlink: 4.4.2 From 4692c8b2d0ed4f9199337c9e16a5cd150ac74fbb Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Tue, 21 Oct 2025 17:57:19 +0200 Subject: [PATCH 17/56] Support input schema validations with Zod --- docs/collections/powersync-collection.md | 16 ++-- .../src/definitions.ts | 22 ++--- .../powersync-db-collection/src/helpers.ts | 10 ++- .../powersync-db-collection/src/powersync.ts | 85 ++++++++++--------- .../tests/powersync.test.ts | 53 ++++++++++++ 5 files changed, 121 insertions(+), 65 deletions(-) diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index 4b070eb8a..e7893a040 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -101,9 +101,9 @@ const documentsCollection = createCollection( ) ``` -#### Option 2: Using Schema Validation +#### Option 2: Using Advanced Schema Validation -TODO +Additional validations can be performed by supplying a Standard Schema. ```ts import { createCollection } from "@tanstack/react-db" @@ -111,15 +111,19 @@ import { powerSyncCollectionOptions, convertPowerSyncSchemaToSpecs, } from "@tanstack/powersync-db-collection" +import { z } from "zod" -// Convert PowerSync schema to TanStack DB schema -const schemas = convertPowerSyncSchemaToSpecs(APP_SCHEMA) +// The output of this schema must correspond to the SQLite schema +const schema = z.object({ + id: z.string(), + name: z.string().min(3, { message: errorMessage }).nullable(), +}) const documentsCollection = createCollection( powerSyncCollectionOptions({ database: db, - tableName: "documents", - schema: schemas.documents, // Use schema for runtime type validation + table: APP_SCHEMA.props.documents, + schema, }) ) ``` diff --git a/packages/powersync-db-collection/src/definitions.ts b/packages/powersync-db-collection/src/definitions.ts index dec66a38d..9f8e66c14 100644 --- a/packages/powersync-db-collection/src/definitions.ts +++ b/packages/powersync-db-collection/src/definitions.ts @@ -1,11 +1,7 @@ -import type { ExtractedTable } from "./helpers" -import type { - AbstractPowerSyncDatabase, - ColumnsType, - Table, -} from "@powersync/common" +import type { AbstractPowerSyncDatabase, Table } from "@powersync/common" import type { StandardSchemaV1 } from "@standard-schema/spec" import type { BaseCollectionConfig, CollectionConfig } from "@tanstack/db" +import type { ExtractedTable } from "./helpers" /** * Configuration interface for PowerSync collection options @@ -41,14 +37,14 @@ import type { BaseCollectionConfig, CollectionConfig } from "@tanstack/db" * ``` */ export type PowerSyncCollectionConfig< - TableType extends Table = Table, + TTable extends Table = Table, TSchema extends StandardSchemaV1 = never, > = Omit< - BaseCollectionConfig, string, TSchema>, + BaseCollectionConfig, string, TSchema>, `onInsert` | `onUpdate` | `onDelete` | `getKey` > & { /** The PowerSync Schema Table definition */ - table: TableType + table: TTable /** The PowerSync database instance */ database: AbstractPowerSyncDatabase /** @@ -78,13 +74,9 @@ export type PowerSyncCollectionMeta = { } export type EnhancedPowerSyncCollectionConfig< - TableType extends Table = Table, + TTable extends Table = Table, TSchema extends StandardSchemaV1 = never, -> = CollectionConfig< - ExtractedTable, - string, - TSchema -> & { +> = CollectionConfig, string, TSchema> & { id?: string utils: PowerSyncCollectionUtils schema?: TSchema diff --git a/packages/powersync-db-collection/src/helpers.ts b/packages/powersync-db-collection/src/helpers.ts index 6cd309990..76313caed 100644 --- a/packages/powersync-db-collection/src/helpers.ts +++ b/packages/powersync-db-collection/src/helpers.ts @@ -1,5 +1,5 @@ import { DiffTriggerOperation } from "@powersync/common" -import type { ColumnsType, ExtractColumnValueType } from "@powersync/common" +import type { ExtractColumnValueType, Table } from "@powersync/common" /** * All PowerSync table records have a uuid `id` column. @@ -20,12 +20,14 @@ export type PowerSyncRecord = { * name: column.text, * age: column.integer * }) - * type TableType = ExtractedTable + * type TableType = ExtractedTable * // Results in: { name: string | null, age: number | null } * ``` */ -export type ExtractedTable = { - [K in keyof Columns]: ExtractColumnValueType +export type ExtractedTable = { + [K in keyof TTable[`columnMap`]]: ExtractColumnValueType< + TTable[`columnMap`][K] + > } & { id: string } diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 049647d5a..cba14eda6 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -4,104 +4,110 @@ import { asPowerSyncRecord, mapOperation } from "./helpers" import { PendingOperationStore } from "./PendingOperationStore" import { PowerSyncTransactor } from "./PowerSyncTransactor" import { convertTableToSchema } from "./schema" -import type { ExtractedTable } from "./helpers" -import type { PendingOperation } from "./PendingOperationStore" +import type { Table, TriggerDiffRecord } from "@powersync/common" +import type { StandardSchemaV1 } from "@standard-schema/spec" +import type { CollectionConfig, SyncConfig } from "@tanstack/db" import type { EnhancedPowerSyncCollectionConfig, PowerSyncCollectionConfig, PowerSyncCollectionUtils, } from "./definitions" -import type { CollectionConfig, SyncConfig } from "@tanstack/db" -import type { StandardSchemaV1 } from "@standard-schema/spec" -import type { ColumnsType, Table, TriggerDiffRecord } from "@powersync/common" +import type { ExtractedTable } from "./helpers" +import type { PendingOperation } from "./PendingOperationStore" /** * Creates PowerSync collection options for use with a standard Collection * - * @template TExplicit - The explicit type of items in the collection (highest priority) - * @template TSchema - The schema type for validation and type inference (second priority) + * @template TTable - The SQLite based typing + * @template TSchema - The schema type for validation - optionally supports a custom input type * @param config - Configuration options for the PowerSync collection * @returns Collection options with utilities */ -// Overload for when schema is provided /** - * Creates a PowerSync collection configuration with schema validation. + * Creates a PowerSync collection configuration with basic default validation. * * @example * ```typescript - * // With schema validation * const APP_SCHEMA = new Schema({ * documents: new Table({ * name: column.text, * }), * }) * + * type Document = (typeof APP_SCHEMA)["types"]["documents"] + * + * const db = new PowerSyncDatabase({ + * database: { + * dbFilename: "test.sqlite", + * }, + * schema: APP_SCHEMA, + * }) + * * const collection = createCollection( * powerSyncCollectionOptions({ * database: db, - * table: APP_SCHEMA.props.documents, - * schema: TODO + * table: APP_SCHEMA.props.documents * }) * ) * ``` */ -// TODO!!! -// export function powerSyncCollectionOptions( -// config: PowerSyncCollectionConfig, T> -// ): CollectionConfig, string, T> & { -// schema: T -// utils: PowerSyncCollectionUtils -// } +export function powerSyncCollectionOptions( + config: PowerSyncCollectionConfig +): CollectionConfig, string, never> & { + utils: PowerSyncCollectionUtils +} +// Overload for when schema is provided /** - * Creates a PowerSync collection configuration without schema validation. + * Creates a PowerSync collection configuration with schema validation. * * @example * ```typescript + * import { z } from "zod" + * + * // The PowerSync SQLite Schema * const APP_SCHEMA = new Schema({ * documents: new Table({ * name: column.text, * }), * }) * - * type Document = (typeof APP_SCHEMA)["types"]["documents"] - * - * const db = new PowerSyncDatabase({ - * database: { - * dbFilename: "test.sqlite", - * }, - * schema: APP_SCHEMA, + * // Advanced Zod validations. The output type of this schema + * // is constrained to the SQLite schema of APP_SCHEMA + * const schema = z.object({ + * id: z.string(), + * name: z.string().min(3, { message: "Should be at least 3 characters" }).nullable(), * }) * * const collection = createCollection( * powerSyncCollectionOptions({ * database: db, - * table: APP_SCHEMA.props.documents + * table: APP_SCHEMA.props.documents, + * schema * }) * ) * ``` */ export function powerSyncCollectionOptions< - TableType extends Table = Table, + TTable extends Table, + TSchema extends StandardSchemaV1, any>, >( - config: PowerSyncCollectionConfig & { - schema?: never - } -): CollectionConfig, string> & { - schema?: never + config: PowerSyncCollectionConfig +): CollectionConfig, string, TSchema> & { utils: PowerSyncCollectionUtils + schema: TSchema } /** * Implementation of powerSyncCollectionOptions that handles both schema and non-schema configurations. */ export function powerSyncCollectionOptions< - TableType extends Table = Table, + TTable extends Table = Table, TSchema extends StandardSchemaV1 = never, >( - config: PowerSyncCollectionConfig -): EnhancedPowerSyncCollectionConfig { + config: PowerSyncCollectionConfig +): EnhancedPowerSyncCollectionConfig { const { database, table, @@ -110,7 +116,7 @@ export function powerSyncCollectionOptions< ...restConfig } = config - type RecordType = ExtractedTable + type RecordType = ExtractedTable const { viewName } = table // We can do basic runtime validations for columns if not explicit schema has been provided @@ -277,7 +283,7 @@ export function powerSyncCollectionOptions< const getKey = (record: RecordType) => asPowerSyncRecord(record).id - const outputConfig: EnhancedPowerSyncCollectionConfig = { + const outputConfig: EnhancedPowerSyncCollectionConfig = { ...restConfig, schema, getKey, @@ -286,7 +292,6 @@ export function powerSyncCollectionOptions< sync, onInsert: async (params) => { // The transaction here should only ever contain a single insert mutation - params.transaction return await transactor.applyTransaction(params.transaction) }, onUpdate: async (params) => { diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 7e182a92c..81f973255 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -15,6 +15,7 @@ import { liveQueryCollectionOptions, } from "@tanstack/db" import { describe, expect, it, onTestFinished, vi } from "vitest" +import { z } from "zod" import { powerSyncCollectionOptions } from "../src" import { PowerSyncTransactor } from "../src/PowerSyncTransactor" import type { AbstractPowerSyncDatabase } from "@powersync/node" @@ -102,6 +103,58 @@ describe(`PowerSync Integration`, () => { } } }) + + it(`should allow for advanced validations`, async () => { + const db = await createDatabase() + + const errorMessage = `Name must be at least 3 characters` + const schema = z.object({ + id: z.string(), + name: z.string().min(3, { message: errorMessage }).nullable(), + }) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + }) + ) + onTestFinished(() => collection.cleanup()) + + try { + collection.insert({ + id: randomUUID(), + name: `2`, + }) + expect.fail(`Should throw a validation error`) + } catch (ex) { + expect(ex instanceof SchemaValidationError).true + if (ex instanceof SchemaValidationError) { + console.log(ex) + expect(ex.message).contains(errorMessage) + } + } + + collection.insert({ + id: randomUUID(), + name: null, + }) + + expect(collection.size).eq(1) + + // should validate inputs + try { + collection.insert({} as any) + console.log(`failed`) + } catch (ex) { + expect(ex instanceof SchemaValidationError).true + if (ex instanceof SchemaValidationError) { + console.log(ex) + expect(ex.message).contains(`Required - path: id`) + } + } + }) }) describe(`sync`, () => { From fb45f0271b966a8a3fa77707d552f162e845ef8b Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Tue, 21 Oct 2025 18:15:28 +0200 Subject: [PATCH 18/56] update readme --- docs/collections/powersync-collection.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index e7893a040..cf72445b6 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -156,8 +156,8 @@ The `powerSyncCollectionOptions` function accepts the following options: ```ts interface PowerSyncCollectionConfig { database: PowerSyncDatabase // PowerSync database instance - tableName: string // Name of the table in PowerSync - schema?: Schema // Optional schema for validation + table: Table // PowerSync schema table definition + schema?: Schema // Optional schema for additional validation } ``` From 7030117b654b191e08397a810d9c53ec5201fd8b Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 22 Oct 2025 10:17:46 +0200 Subject: [PATCH 19/56] Update doc comments. Code cleanup. --- docs/collections/powersync-collection.md | 8 +- .../src/PowerSyncTransactor.ts | 20 ++-- .../src/definitions.ts | 17 +++- .../powersync-db-collection/src/helpers.ts | 22 ++++- packages/powersync-db-collection/src/index.ts | 1 - .../powersync-db-collection/src/schema.ts | 94 +++---------------- .../tests/powersync.test.ts | 15 ++- .../tests/schema.test.ts | 90 ++++++------------ 8 files changed, 93 insertions(+), 174 deletions(-) diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index cf72445b6..30655d14d 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -44,8 +44,6 @@ const APP_SCHEMA = new Schema({ }), }) -type Document = (typeof APP_SCHEMA)["types"]["documents"] - // Initialize PowerSync database const db = new PowerSyncDatabase({ database: { @@ -87,7 +85,7 @@ There are two ways to create a collection: using type inference or using schema #### Option 1: Using Table Type Inference -The collection types are automatically inferred from the PowerSync Schema Table definition. The table is used to construct a default StandardSchema validator which is used internally to validate collection data and operations. +The collection types are automatically inferred from the PowerSync Schema Table definition. The table is used to construct a default StandardSchema validator which is used internally to validate collection operations. ```ts import { createCollection } from "@tanstack/react-db" @@ -103,7 +101,7 @@ const documentsCollection = createCollection( #### Option 2: Using Advanced Schema Validation -Additional validations can be performed by supplying a Standard Schema. +Additional validations can be performed by supplying a Standard Schema. The typing of the validator is constrained to match the typing of the SQLite table. ```ts import { createCollection } from "@tanstack/react-db" @@ -128,8 +126,6 @@ const documentsCollection = createCollection( ) ``` -With schema validation, the collection will validate all inputs at runtime to ensure they match the PowerSync schema types. This provides an extra layer of type safety beyond TypeScript's compile-time checks. - ## Features ### Offline-First diff --git a/packages/powersync-db-collection/src/PowerSyncTransactor.ts b/packages/powersync-db-collection/src/PowerSyncTransactor.ts index 1c8b7445a..668ce48ff 100644 --- a/packages/powersync-db-collection/src/PowerSyncTransactor.ts +++ b/packages/powersync-db-collection/src/PowerSyncTransactor.ts @@ -2,10 +2,10 @@ import { sanitizeSQL } from "@powersync/common" import DebugModule from "debug" import { PendingOperationStore } from "./PendingOperationStore" import { asPowerSyncRecord, mapOperationToPowerSync } from "./helpers" -import type { AbstractPowerSyncDatabase, LockContext } from "@powersync/common" -import type { PendingMutation, Transaction } from "@tanstack/db" -import type { PendingOperation } from "./PendingOperationStore" import type { EnhancedPowerSyncCollectionConfig } from "./definitions" +import type { PendingOperation } from "./PendingOperationStore" +import type { PendingMutation, Transaction } from "@tanstack/db" +import type { AbstractPowerSyncDatabase, LockContext } from "@powersync/common" const debug = DebugModule.debug(`ts/db:powersync`) @@ -24,7 +24,7 @@ export type TransactorOptions = { * const collection = createCollection( * powerSyncCollectionOptions({ * database: db, - * tableName: "documents", + * table: APP_SCHEMA.props.documents, * }) * ) * @@ -239,15 +239,17 @@ export class PowerSyncTransactor> { waitForCompletion: boolean, handler: (tableName: string, mutation: PendingMutation) => Promise ): Promise { - const { tableName, trackedTableName } = ( - mutation.collection.config as EnhancedPowerSyncCollectionConfig - ).utils.getMeta() - - if (!tableName) { + if ( + typeof (mutation.collection.config as any).utils?.getMeta != `function` + ) { throw new Error(`Could not get tableName from mutation's collection config. The provided mutation might not have originated from PowerSync.`) } + const { tableName, trackedTableName } = ( + mutation.collection.config as unknown as EnhancedPowerSyncCollectionConfig + ).utils.getMeta() + await handler(sanitizeSQL`${tableName}`, mutation) if (!waitForCompletion) { diff --git a/packages/powersync-db-collection/src/definitions.ts b/packages/powersync-db-collection/src/definitions.ts index 9f8e66c14..826444190 100644 --- a/packages/powersync-db-collection/src/definitions.ts +++ b/packages/powersync-db-collection/src/definitions.ts @@ -5,7 +5,7 @@ import type { ExtractedTable } from "./helpers" /** * Configuration interface for PowerSync collection options - * @template T - The type of items in the collection + * @template TTable - The PowerSync table schema definition * @template TSchema - The schema type for validation */ /** @@ -19,8 +19,6 @@ import type { ExtractedTable } from "./helpers" * }), * }) * - * type Document = (typeof APP_SCHEMA)["types"]["documents"] - * * const db = new PowerSyncDatabase({ * database: { * dbFilename: "test.sqlite", @@ -29,9 +27,9 @@ import type { ExtractedTable } from "./helpers" * }) * * const collection = createCollection( - * powerSyncCollectionOptions({ + * powerSyncCollectionOptions({ * database: db, - * tableName: "documents", + * table: APP_SCHEMA.props.documents * }) * ) * ``` @@ -62,6 +60,9 @@ export type PowerSyncCollectionConfig< syncBatchSize?: number } +/** + * Meta data for the PowerSync Collection + */ export type PowerSyncCollectionMeta = { /** * The SQLite table representing the collection. @@ -73,6 +74,9 @@ export type PowerSyncCollectionMeta = { trackedTableName: string } +/** + * A CollectionConfig which includes utilities for PowerSync + */ export type EnhancedPowerSyncCollectionConfig< TTable extends Table = Table, TSchema extends StandardSchemaV1 = never, @@ -82,6 +86,9 @@ export type EnhancedPowerSyncCollectionConfig< schema?: TSchema } +/** + * Collection level utilities for PowerSync + */ export type PowerSyncCollectionUtils = { getMeta: () => PowerSyncCollectionMeta } diff --git a/packages/powersync-db-collection/src/helpers.ts b/packages/powersync-db-collection/src/helpers.ts index 76313caed..bdc3d90a7 100644 --- a/packages/powersync-db-collection/src/helpers.ts +++ b/packages/powersync-db-collection/src/helpers.ts @@ -9,11 +9,23 @@ export type PowerSyncRecord = { [key: string]: unknown } +/** + * Utility type: If T includes null, add undefined. + * PowerSync records typically are typed as `string | null`, where insert + * and update operations also allow not specifying a value at all (optional) + * */ +type WithUndefinedIfNull = null extends T ? T | undefined : T +type OptionalIfUndefined = { + [K in keyof T as undefined extends T[K] ? K : never]?: T[K] +} & { + [K in keyof T as undefined extends T[K] ? never : K]: T[K] +} + /** * Utility type that extracts the typed structure of a table based on its column definitions. * Maps each column to its corresponding TypeScript type using ExtractColumnValueType. * - * @template Columns - The ColumnsType definition containing column configurations + * @template TTable - The PowerSync table definition * @example * ```typescript * const table = new Table({ @@ -24,11 +36,11 @@ export type PowerSyncRecord = { * // Results in: { name: string | null, age: number | null } * ``` */ -export type ExtractedTable = { - [K in keyof TTable[`columnMap`]]: ExtractColumnValueType< - TTable[`columnMap`][K] +export type ExtractedTable = OptionalIfUndefined<{ + [K in keyof TTable[`columnMap`]]: WithUndefinedIfNull< + ExtractColumnValueType > -} & { +}> & { id: string } diff --git a/packages/powersync-db-collection/src/index.ts b/packages/powersync-db-collection/src/index.ts index 152f09076..6c8111f4c 100644 --- a/packages/powersync-db-collection/src/index.ts +++ b/packages/powersync-db-collection/src/index.ts @@ -1,4 +1,3 @@ export * from "./definitions" export * from "./powersync" export * from "./PowerSyncTransactor" -export * from "./schema" diff --git a/packages/powersync-db-collection/src/schema.ts b/packages/powersync-db-collection/src/schema.ts index 008a88424..c5c5750c2 100644 --- a/packages/powersync-db-collection/src/schema.ts +++ b/packages/powersync-db-collection/src/schema.ts @@ -1,5 +1,5 @@ import { ColumnType } from "@powersync/common" -import type { ColumnsType, Schema, Table } from "@powersync/common" +import type { Table } from "@powersync/common" import type { StandardSchemaV1 } from "@standard-schema/spec" import type { ExtractedTable } from "./helpers" @@ -8,7 +8,7 @@ import type { ExtractedTable } from "./helpers" * Creates a schema that validates the structure and types of table records * according to the PowerSync table definition. * - * @template Columns - The ColumnsType definition containing column configurations + * @template TTable - The PowerSync schema typed Table definition * @param table - The PowerSync Table instance to convert * @returns A StandardSchemaV1 compatible schema with proper type validation * @@ -18,26 +18,17 @@ import type { ExtractedTable } from "./helpers" * name: column.text, * age: column.integer * }) - * - * const schema = convertTableToSchema(usersTable) - * // Now you can use this schema with powerSyncCollectionOptions - * const collection = createCollection( - * powerSyncCollectionOptions({ - * database: db, - * tableName: "users", - * schema: schema - * }) - * ) * ``` */ -export function convertTableToSchema( - table: Table -): StandardSchemaV1> { +export function convertTableToSchema( + table: TTable +): StandardSchemaV1> { + type TExtracted = ExtractedTable // Create validate function that checks types according to column definitions const validate = ( value: unknown ): - | StandardSchemaV1.SuccessResult> + | StandardSchemaV1.SuccessResult | StandardSchemaV1.FailureResult => { if (typeof value != `object` || value == null) { return { @@ -61,7 +52,7 @@ export function convertTableToSchema( // Check each column for (const column of table.columns) { - const val = (value as ExtractedTable)[column.name] + const val = (value as TExtracted)[column.name as keyof TExtracted] if (val == null) { continue @@ -92,7 +83,7 @@ export function convertTableToSchema( return { issues } } - return { value: { ...value } as ExtractedTable } + return { value: { ...value } as TExtracted } } return { @@ -101,72 +92,9 @@ export function convertTableToSchema( vendor: `powersync`, validate, types: { - input: {} as ExtractedTable, - output: {} as ExtractedTable, + input: {} as TExtracted, + output: {} as TExtracted, }, }, } } - -/** - * Converts an entire PowerSync Schema (containing multiple tables) into a collection of StandardSchemaV1 schemas. - * Each table in the schema is converted to its own StandardSchemaV1 schema while preserving all type information. - * - * @template Tables - A record type mapping table names to their Table definitions - * @param schema - The PowerSync Schema containing multiple table definitions - * @returns An object where each key is a table name and each value is that table's StandardSchemaV1 schema - * - * @example - * ```typescript - * const mySchema = new Schema({ - * users: new Table({ - * name: column.text, - * age: column.integer - * }), - * posts: new Table({ - * title: column.text, - * views: column.integer - * }) - * }) - * - * const standardizedSchemas = convertSchemaToSpecs(mySchema) - * // Result has type: - * // { - * // users: StandardSchemaV1<{ name: string | null, age: number | null }>, - * // posts: StandardSchemaV1<{ title: string | null, views: number | null }> - * // } - * - * // Can be used with collections: - * const usersCollection = createCollection( - * powerSyncCollectionOptions({ - * database: db, - * tableName: "users", - * schema: standardizedSchemas.users - * }) - * ) - * ``` - */ -export function convertPowerSyncSchemaToSpecs< - Tables extends Record>, ->( - schema: Schema -): { - [TableName in keyof Tables]: StandardSchemaV1< - ExtractedTable - > -} { - // Create a map to store the standardized schemas - const standardizedSchemas = {} as { - [TableName in keyof Tables]: StandardSchemaV1< - ExtractedTable - > - } - - // Iterate through each table in the schema - schema.tables.forEach((table) => { - // Convert each table to a StandardSchemaV1 and store it in the result map - ;(standardizedSchemas as any)[table.name] = convertTableToSchema(table) - }) - - return standardizedSchemas -} diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 81f973255..c12f48c3c 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -27,6 +27,7 @@ const APP_SCHEMA = new Schema({ documents: new Table( { name: column.text, + author: column.text, }, { viewName: `documents` } ), @@ -110,7 +111,11 @@ describe(`PowerSync Integration`, () => { const errorMessage = `Name must be at least 3 characters` const schema = z.object({ id: z.string(), - name: z.string().min(3, { message: errorMessage }).nullable(), + name: z + .string() + .min(3, { message: errorMessage }) + .nullable() + .optional(), }) const collection = createCollection( @@ -258,12 +263,14 @@ describe(`PowerSync Integration`, () => { const tx = collection.insert({ id, name: `new`, + author: `somebody`, }) // The insert should optimistically update the collection const newDoc = collection.get(id) expect(newDoc).toBeDefined() expect(newDoc!.name).toBe(`new`) + expect(newDoc!.author).toBe(`somebody`) await tx.isPersisted.promise // The item should now be present in PowerSync @@ -276,6 +283,8 @@ describe(`PowerSync Integration`, () => { const updatedDoc = collection.get(id) expect(updatedDoc).toBeDefined() expect(updatedDoc!.name).toBe(`updatedNew`) + // Only the updated field should be updated + expect(updatedDoc!.author).toBe(`somebody`) await collection.delete(id).isPersisted.promise @@ -511,7 +520,7 @@ describe(`PowerSync Integration`, () => { }) }) - describe(`Multiple Clients`, async () => { + describe(`Multiple Clients`, () => { it(`should sync updates between multiple clients`, async () => { const db = await createDatabase() @@ -535,7 +544,7 @@ describe(`PowerSync Integration`, () => { }) }) - describe(`Lifecycle`, async () => { + describe(`Lifecycle`, () => { it(`should cleanup resources`, async () => { const db = await createDatabase() const collectionOptions = powerSyncCollectionOptions({ diff --git a/packages/powersync-db-collection/tests/schema.test.ts b/packages/powersync-db-collection/tests/schema.test.ts index 62c562b76..2dc6fb25d 100644 --- a/packages/powersync-db-collection/tests/schema.test.ts +++ b/packages/powersync-db-collection/tests/schema.test.ts @@ -1,9 +1,6 @@ import { Schema, Table, column } from "@powersync/common" import { describe, expect, it } from "vitest" -import { - convertPowerSyncSchemaToSpecs, - convertTableToSchema, -} from "../src/schema" +import { convertTableToSchema } from "../src/schema" import type { StandardSchemaV1 } from "@standard-schema/spec" describe(`Schema Conversion`, () => { @@ -71,6 +68,29 @@ describe(`Schema Conversion`, () => { }) }) + it(`should handle optional values correctly`, () => { + const table = new Table({ + name: column.text, + age: column.integer, + }) + + const schema = convertTableToSchema(table) + + // Test validation with null values + const result = schema[`~standard`].validate({ + id: `123`, + name: null, + // Don't specify age + }) as StandardSchemaV1.SuccessResult + + expect(result.issues).toBeUndefined() + expect(result.value).toEqual({ + id: `123`, + name: null, + }) + expect(result.value.age).undefined + }) + it(`should require id field`, () => { const table = new Table({ name: column.text, @@ -112,61 +132,6 @@ describe(`Schema Conversion`, () => { real_col: 3.14, }) }) - }) - - describe(`convertPowerSyncSchemaToSpecs`, () => { - it(`should convert multiple tables in a schema`, () => { - const schema = new Schema({ - users: new Table({ - name: column.text, - age: column.integer, - }), - posts: new Table({ - title: column.text, - views: column.integer, - }), - }) - - const result = convertPowerSyncSchemaToSpecs(schema) - - // Test structure - expect(result).toHaveProperty(`users`) - expect(result).toHaveProperty(`posts`) - - // Test users table schema - const userValidResult = result.users[`~standard`].validate({ - id: `123`, - name: `John`, - age: 25, - }) as StandardSchemaV1.SuccessResult - - expect(userValidResult.issues).toBeUndefined() - expect(userValidResult.value).toEqual({ - id: `123`, - name: `John`, - age: 25, - }) - - // Test posts table schema - const postValidResult = result.posts[`~standard`].validate({ - id: `456`, - title: `Hello`, - views: 100, - }) as StandardSchemaV1.SuccessResult - - expect(postValidResult.issues).toBeUndefined() - expect(postValidResult.value).toEqual({ - id: `456`, - title: `Hello`, - views: 100, - }) - }) - - it(`should handle empty schema`, () => { - const schema = new Schema({}) - const result = convertPowerSyncSchemaToSpecs(schema) - expect(result).toEqual({}) - }) it(`should validate each table independently`, () => { const schema = new Schema({ @@ -178,15 +143,16 @@ describe(`Schema Conversion`, () => { }), }) - const result = convertPowerSyncSchemaToSpecs(schema) + const usersSchema = convertTableToSchema(schema.props.users) + const postsSchema = convertTableToSchema(schema.props.posts) // Test that invalid data in one table doesn't affect the other - const userInvalidResult = result.users[`~standard`].validate({ + const userInvalidResult = usersSchema[`~standard`].validate({ id: `123`, name: 42, // wrong type }) as StandardSchemaV1.FailureResult - const postValidResult = result.posts[`~standard`].validate({ + const postValidResult = postsSchema[`~standard`].validate({ id: `456`, views: 100, }) as StandardSchemaV1.SuccessResult From 829ce64011cb416879ab067958609879592fc595 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 22 Oct 2025 10:21:15 +0200 Subject: [PATCH 20/56] More doc cleanup --- .../src/PendingOperationStore.ts | 2 +- .../src/PowerSyncTransactor.ts | 32 +++++++++---------- .../src/definitions.ts | 16 +++++----- .../powersync-db-collection/src/helpers.ts | 10 +++--- .../powersync-db-collection/src/powersync.ts | 18 +++++------ .../powersync-db-collection/src/schema.ts | 4 +-- 6 files changed, 41 insertions(+), 41 deletions(-) diff --git a/packages/powersync-db-collection/src/PendingOperationStore.ts b/packages/powersync-db-collection/src/PendingOperationStore.ts index c804067b3..80edf3c14 100644 --- a/packages/powersync-db-collection/src/PendingOperationStore.ts +++ b/packages/powersync-db-collection/src/PendingOperationStore.ts @@ -13,7 +13,7 @@ export type PendingOperation = { * Optimistic mutations have their optimistic state discarded once transactions have * been applied. * We need to ensure that an applied transaction has been observed by the sync diff trigger - * before resoling the transaction application call. + * before resolving the transaction application call. * This store allows registering a wait for a pending operation to have been observed. */ export class PendingOperationStore { diff --git a/packages/powersync-db-collection/src/PowerSyncTransactor.ts b/packages/powersync-db-collection/src/PowerSyncTransactor.ts index 668ce48ff..262ad6e4d 100644 --- a/packages/powersync-db-collection/src/PowerSyncTransactor.ts +++ b/packages/powersync-db-collection/src/PowerSyncTransactor.ts @@ -1,11 +1,11 @@ import { sanitizeSQL } from "@powersync/common" import DebugModule from "debug" -import { PendingOperationStore } from "./PendingOperationStore" import { asPowerSyncRecord, mapOperationToPowerSync } from "./helpers" +import { PendingOperationStore } from "./PendingOperationStore" +import type { AbstractPowerSyncDatabase, LockContext } from "@powersync/common" +import type { PendingMutation, Transaction } from "@tanstack/db" import type { EnhancedPowerSyncCollectionConfig } from "./definitions" import type { PendingOperation } from "./PendingOperationStore" -import type { PendingMutation, Transaction } from "@tanstack/db" -import type { AbstractPowerSyncDatabase, LockContext } from "@powersync/common" const debug = DebugModule.debug(`ts/db:powersync`) @@ -29,16 +29,16 @@ export type TransactorOptions = { * ) * * const addTx = createTransaction({ - * autoCommit: false, - * mutationFn: async ({ transaction }) => { - * await new PowerSyncTransactor({database: db}).applyTransaction(transaction) - * }, + * autoCommit: false, + * mutationFn: async ({ transaction }) => { + * await new PowerSyncTransactor({ database: db }).applyTransaction(transaction) + * }, * }) * * addTx.mutate(() => { - * for (let i = 0; i < 5; i++) { - * collection.insert({ id: randomUUID(), name: `tx-${i}` }) - * } + * for (let i = 0; i < 5; i++) { + * collection.insert({ id: randomUUID(), name: `tx-${i}` }) + * } * }) * * await addTx.commit() @@ -58,7 +58,7 @@ export class PowerSyncTransactor> { } /** - * Persists a {@link Transaction} to PowerSync's SQLite DB. + * Persists a {@link Transaction} to the PowerSync SQLite database. */ async applyTransaction(transaction: Transaction) { const { mutations } = transaction @@ -67,8 +67,8 @@ export class PowerSyncTransactor> { return } /** - * The transaction might contain ops for different collections. - * We can do some optimizations for single collection transactions. + * The transaction might contain operations for different collections. + * We can do some optimizations for single-collection transactions. */ const mutationsCollectionIds = mutations.map( (mutation) => mutation.collection.id @@ -103,7 +103,7 @@ export class PowerSyncTransactor> { for (const [index, mutation] of mutations.entries()) { /** * Each collection processes events independently. We need to make sure the - * last operation for each collection has been seen. + * last operation for each collection has been observed. */ const shouldWait = index == lastCollectionMutationIndexes.get(mutation.collection.id) @@ -130,7 +130,7 @@ export class PowerSyncTransactor> { * Return a promise from the writeTransaction, without awaiting it. * This promise will resolve once the entire transaction has been * observed via the diff triggers. - * We return without awaiting in order to free the writeLock. + * We return without awaiting in order to free the write lock. */ return { whenComplete: Promise.all( @@ -231,7 +231,7 @@ export class PowerSyncTransactor> { * Helper function which wraps a persistence operation by: * - Fetching the mutation's collection's SQLite table details * - Executing the mutation - * - Returning the last pending diff op if required + * - Returning the last pending diff operation if required */ protected async handleOperationWithCompletion( mutation: PendingMutation, diff --git a/packages/powersync-db-collection/src/definitions.ts b/packages/powersync-db-collection/src/definitions.ts index 826444190..5c3b63d33 100644 --- a/packages/powersync-db-collection/src/definitions.ts +++ b/packages/powersync-db-collection/src/definitions.ts @@ -4,9 +4,9 @@ import type { BaseCollectionConfig, CollectionConfig } from "@tanstack/db" import type { ExtractedTable } from "./helpers" /** - * Configuration interface for PowerSync collection options + * Configuration interface for PowerSync collection options. * @template TTable - The PowerSync table schema definition - * @template TSchema - The schema type for validation + * @template TSchema - The validation schema type */ /** * Configuration options for creating a PowerSync collection. @@ -41,7 +41,7 @@ export type PowerSyncCollectionConfig< BaseCollectionConfig, string, TSchema>, `onInsert` | `onUpdate` | `onDelete` | `getKey` > & { - /** The PowerSync Schema Table definition */ + /** The PowerSync schema Table definition */ table: TTable /** The PowerSync database instance */ database: AbstractPowerSyncDatabase @@ -61,7 +61,7 @@ export type PowerSyncCollectionConfig< } /** - * Meta data for the PowerSync Collection + * Metadata for the PowerSync Collection. */ export type PowerSyncCollectionMeta = { /** @@ -69,13 +69,13 @@ export type PowerSyncCollectionMeta = { */ tableName: string /** - * The internal table used to track diff for the collection. + * The internal table used to track diffs for the collection. */ trackedTableName: string } /** - * A CollectionConfig which includes utilities for PowerSync + * A CollectionConfig which includes utilities for PowerSync. */ export type EnhancedPowerSyncCollectionConfig< TTable extends Table = Table, @@ -87,13 +87,13 @@ export type EnhancedPowerSyncCollectionConfig< } /** - * Collection level utilities for PowerSync + * Collection-level utilities for PowerSync. */ export type PowerSyncCollectionUtils = { getMeta: () => PowerSyncCollectionMeta } /** - * Default value for {@link PowerSyncCollectionConfig#syncBatchSize} + * Default value for {@link PowerSyncCollectionConfig#syncBatchSize}. */ export const DEFAULT_BATCH_SIZE = 1000 diff --git a/packages/powersync-db-collection/src/helpers.ts b/packages/powersync-db-collection/src/helpers.ts index bdc3d90a7..7d23dc171 100644 --- a/packages/powersync-db-collection/src/helpers.ts +++ b/packages/powersync-db-collection/src/helpers.ts @@ -2,7 +2,7 @@ import { DiffTriggerOperation } from "@powersync/common" import type { ExtractColumnValueType, Table } from "@powersync/common" /** - * All PowerSync table records have a uuid `id` column. + * All PowerSync table records include a UUID `id` column. */ export type PowerSyncRecord = { id: string @@ -10,10 +10,10 @@ export type PowerSyncRecord = { } /** - * Utility type: If T includes null, add undefined. - * PowerSync records typically are typed as `string | null`, where insert - * and update operations also allow not specifying a value at all (optional) - * */ + * Utility type: If T includes null, also allow undefined (to support optional fields in insert/update operations). + * PowerSync records are typically typed as `string | null`, where insert + * and update operations may also allow not specifying a value at all (optional). + */ type WithUndefinedIfNull = null extends T ? T | undefined : T type OptionalIfUndefined = { [K in keyof T as undefined extends T[K] ? K : never]?: T[K] diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index cba14eda6..52340cbb9 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -16,10 +16,10 @@ import type { ExtractedTable } from "./helpers" import type { PendingOperation } from "./PendingOperationStore" /** - * Creates PowerSync collection options for use with a standard Collection + * Creates PowerSync collection options for use with a standard Collection. * - * @template TTable - The SQLite based typing - * @template TSchema - The schema type for validation - optionally supports a custom input type + * @template TTable - The SQLite-based typing + * @template TSchema - The validation schema type (optionally supports a custom input type) * @param config - Configuration options for the PowerSync collection * @returns Collection options with utilities */ @@ -66,7 +66,7 @@ export function powerSyncCollectionOptions( * ```typescript * import { z } from "zod" * - * // The PowerSync SQLite Schema + * // The PowerSync SQLite schema * const APP_SCHEMA = new Schema({ * documents: new Table({ * name: column.text, @@ -76,8 +76,8 @@ export function powerSyncCollectionOptions( * // Advanced Zod validations. The output type of this schema * // is constrained to the SQLite schema of APP_SCHEMA * const schema = z.object({ - * id: z.string(), - * name: z.string().min(3, { message: "Should be at least 3 characters" }).nullable(), + * id: z.string(), + * name: z.string().min(3, { message: "Should be at least 3 characters" }).nullable(), * }) * * const collection = createCollection( @@ -122,9 +122,9 @@ export function powerSyncCollectionOptions< // We can do basic runtime validations for columns if not explicit schema has been provided const schema = inputSchema ?? (convertTableToSchema(table) as TSchema) /** - * The onInsert, onUpdate, onDelete handlers should only return - * after we have written the changes to Tanstack DB. - * We currently only write to Tanstack DB from a diff trigger. + * The onInsert, onUpdate, and onDelete handlers should only return + * after we have written the changes to TanStack DB. + * We currently only write to TanStack DB from a diff trigger. * We wait for the diff trigger to observe the change, * and only then return from the on[X] handlers. * This ensures that when the transaction is reported as diff --git a/packages/powersync-db-collection/src/schema.ts b/packages/powersync-db-collection/src/schema.ts index c5c5750c2..ca56c71b4 100644 --- a/packages/powersync-db-collection/src/schema.ts +++ b/packages/powersync-db-collection/src/schema.ts @@ -8,9 +8,9 @@ import type { ExtractedTable } from "./helpers" * Creates a schema that validates the structure and types of table records * according to the PowerSync table definition. * - * @template TTable - The PowerSync schema typed Table definition + * @template TTable - The PowerSync schema-typed Table definition * @param table - The PowerSync Table instance to convert - * @returns A StandardSchemaV1 compatible schema with proper type validation + * @returns A StandardSchemaV1-compatible schema with proper type validation * * @example * ```typescript From dd0cbc8aa120c7c2c1c4b34cd1b0aad7d7d7c95a Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 22 Oct 2025 10:25:09 +0200 Subject: [PATCH 21/56] README cleanup --- docs/collections/powersync-collection.md | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index 30655d14d..492d4bde7 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -85,7 +85,7 @@ There are two ways to create a collection: using type inference or using schema #### Option 1: Using Table Type Inference -The collection types are automatically inferred from the PowerSync Schema Table definition. The table is used to construct a default StandardSchema validator which is used internally to validate collection operations. +The collection types are automatically inferred from the PowerSync schema table definition. The table is used to construct a default standard schema validator which is used internally to validate collection operations. ```ts import { createCollection } from "@tanstack/react-db" @@ -101,20 +101,20 @@ const documentsCollection = createCollection( #### Option 2: Using Advanced Schema Validation -Additional validations can be performed by supplying a Standard Schema. The typing of the validator is constrained to match the typing of the SQLite table. +Additional validations can be performed by supplying a compatible validation schema (such as a Zod schema). The typing of the validator is constrained to match the typing of the SQLite table. ```ts import { createCollection } from "@tanstack/react-db" -import { - powerSyncCollectionOptions, - convertPowerSyncSchemaToSpecs, -} from "@tanstack/powersync-db-collection" +import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" import { z } from "zod" -// The output of this schema must correspond to the SQLite schema +// The output of this schema must match the SQLite schema const schema = z.object({ id: z.string(), - name: z.string().min(3, { message: errorMessage }).nullable(), + name: z + .string() + .min(3, { message: "Should be at least 3 characters" }) + .nullable(), }) const documentsCollection = createCollection( @@ -153,7 +153,7 @@ The `powerSyncCollectionOptions` function accepts the following options: interface PowerSyncCollectionConfig { database: PowerSyncDatabase // PowerSync database instance table: Table // PowerSync schema table definition - schema?: Schema // Optional schema for additional validation + schema?: StandardSchemaV1 // Optional schema for additional validation (e.g., Zod schema) } ``` From e26bf27421a3e9e6d2ab6038c298010d01361fe3 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 22 Oct 2025 10:35:08 +0200 Subject: [PATCH 22/56] Cleanup tests --- packages/powersync-db-collection/package.json | 2 +- packages/powersync-db-collection/tests/powersync.test.ts | 4 ---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/packages/powersync-db-collection/package.json b/packages/powersync-db-collection/package.json index f5af86b27..10e20d84e 100644 --- a/packages/powersync-db-collection/package.json +++ b/packages/powersync-db-collection/package.json @@ -5,7 +5,7 @@ "dependencies": { "@standard-schema/spec": "^1.0.0", "@tanstack/db": "workspace:*", - "@tanstack/store": "^0.7.7", + "@tanstack/store": "^0.8.0", "debug": "^4.4.3", "p-defer": "^4.0.1" }, diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index c12f48c3c..47c0dad64 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -96,7 +96,6 @@ describe(`PowerSync Integration`, () => { // should validate inputs try { collection.insert({} as any) - console.log(`failed`) } catch (ex) { expect(ex instanceof SchemaValidationError).true if (ex instanceof SchemaValidationError) { @@ -136,7 +135,6 @@ describe(`PowerSync Integration`, () => { } catch (ex) { expect(ex instanceof SchemaValidationError).true if (ex instanceof SchemaValidationError) { - console.log(ex) expect(ex.message).contains(errorMessage) } } @@ -151,11 +149,9 @@ describe(`PowerSync Integration`, () => { // should validate inputs try { collection.insert({} as any) - console.log(`failed`) } catch (ex) { expect(ex instanceof SchemaValidationError).true if (ex instanceof SchemaValidationError) { - console.log(ex) expect(ex.message).contains(`Required - path: id`) } } From e207268c598d868b0fc9cfa64e6790ed90ddc82a Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 22 Oct 2025 10:35:44 +0200 Subject: [PATCH 23/56] Update PowerSync dependencies --- pnpm-lock.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 7d36fdb81..0e74bbc01 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -659,8 +659,8 @@ importers: specifier: workspace:* version: link:../db '@tanstack/store': - specifier: ^0.7.7 - version: 0.7.7 + specifier: ^0.8.0 + version: 0.8.0 debug: specifier: ^4.4.3 version: 4.4.3 From e94cadfc066cb7db2f0b8d770f4dcc4816c6dc33 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 22 Oct 2025 12:04:55 +0200 Subject: [PATCH 24/56] Properly constrain types --- packages/powersync-db-collection/src/powersync.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 52340cbb9..b5d440d74 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -91,7 +91,10 @@ export function powerSyncCollectionOptions( */ export function powerSyncCollectionOptions< TTable extends Table, - TSchema extends StandardSchemaV1, any>, + TSchema extends StandardSchemaV1< + ExtractedTable, + ExtractedTable + >, >( config: PowerSyncCollectionConfig ): CollectionConfig, string, TSchema> & { From b7fc0ffa87ab4231f7505d0255c2a672ff7eca86 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Wed, 22 Oct 2025 12:44:46 +0200 Subject: [PATCH 25/56] Allow custom input schema types --- docs/collections/powersync-collection.md | 7 ++--- .../powersync-db-collection/src/powersync.ts | 13 +++++---- .../tests/powersync.test.ts | 29 +++++++++++++++++++ 3 files changed, 38 insertions(+), 11 deletions(-) diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index 492d4bde7..5cea78c2f 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -101,7 +101,7 @@ const documentsCollection = createCollection( #### Option 2: Using Advanced Schema Validation -Additional validations can be performed by supplying a compatible validation schema (such as a Zod schema). The typing of the validator is constrained to match the typing of the SQLite table. +Additional validations can be performed by supplying a compatible validation schema (such as a Zod schema). The output typing of the validator is constrained to match the typing of the SQLite table. The input typing can be arbitrary. ```ts import { createCollection } from "@tanstack/react-db" @@ -111,10 +111,7 @@ import { z } from "zod" // The output of this schema must match the SQLite schema const schema = z.object({ id: z.string(), - name: z - .string() - .min(3, { message: "Should be at least 3 characters" }) - .nullable(), + name: z.string().min(3, { message: "Should be at least 3 characters" }), }) const documentsCollection = createCollection( diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index b5d440d74..882deb1b1 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -6,7 +6,11 @@ import { PowerSyncTransactor } from "./PowerSyncTransactor" import { convertTableToSchema } from "./schema" import type { Table, TriggerDiffRecord } from "@powersync/common" import type { StandardSchemaV1 } from "@standard-schema/spec" -import type { CollectionConfig, SyncConfig } from "@tanstack/db" +import type { + CollectionConfig, + InferSchemaOutput, + SyncConfig, +} from "@tanstack/db" import type { EnhancedPowerSyncCollectionConfig, PowerSyncCollectionConfig, @@ -91,13 +95,10 @@ export function powerSyncCollectionOptions( */ export function powerSyncCollectionOptions< TTable extends Table, - TSchema extends StandardSchemaV1< - ExtractedTable, - ExtractedTable - >, + TSchema extends StandardSchemaV1>, >( config: PowerSyncCollectionConfig -): CollectionConfig, string, TSchema> & { +): CollectionConfig, string, TSchema> & { utils: PowerSyncCollectionUtils schema: TSchema } diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 47c0dad64..8a7a06c6b 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -156,6 +156,35 @@ describe(`PowerSync Integration`, () => { } } }) + + it(`should allow custom input types`, async () => { + const db = await createDatabase() + + // The input can be arbitrarily typed, as long as it converts to SQLite + const schema = z.object({ + id: z.string(), + name: z.number().transform((val) => `Number: ${val}`), + }) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + }) + ) + onTestFinished(() => collection.cleanup()) + + const id = randomUUID() + collection.insert({ + id, + name: 42, + }) + + const item = collection.get(id) + + expect(item?.name).eq(`Number: 42`) + }) }) describe(`sync`, () => { From 8187c6d69c4b498e306ac2eb5fc7115e4f8193a5 Mon Sep 17 00:00:00 2001 From: Kevin Date: Wed, 22 Oct 2025 13:08:56 +0200 Subject: [PATCH 26/56] Support `orderBy` and `limit` in `currentStateAsChanges` (#701) * Add unit tests that reproduce the problems with moveWindow on infinite limits * Handle Infinity limit in move * Changeset * Add support for orderBy and limit options in currentStateAsChanges * Unit tests for currentStateAsChanges * changeset * fix for eslint * Move helper functions to the end of the file --- .changeset/legal-cooks-sink.md | 5 + .changeset/open-cups-lose.md | 5 + .../src/operators/topKWithFractionalIndex.ts | 21 +- .../operators/topKWithFractionalIndex.test.ts | 580 +++++++++++++++++- packages/db/src/collection/change-events.ts | 188 +++++- .../db/tests/collection-change-events.test.ts | 439 +++++++++++++ 6 files changed, 1225 insertions(+), 13 deletions(-) create mode 100644 .changeset/legal-cooks-sink.md create mode 100644 .changeset/open-cups-lose.md create mode 100644 packages/db/tests/collection-change-events.test.ts diff --git a/.changeset/legal-cooks-sink.md b/.changeset/legal-cooks-sink.md new file mode 100644 index 000000000..ddddba41d --- /dev/null +++ b/.changeset/legal-cooks-sink.md @@ -0,0 +1,5 @@ +--- +"@tanstack/db-ivm": patch +--- + +Fix bug with setWindow on ordered queries that have no limit. diff --git a/.changeset/open-cups-lose.md b/.changeset/open-cups-lose.md new file mode 100644 index 000000000..44eee1a35 --- /dev/null +++ b/.changeset/open-cups-lose.md @@ -0,0 +1,5 @@ +--- +"@tanstack/db": patch +--- + +Add support for orderBy and limit in currentStateAsChanges function diff --git a/packages/db-ivm/src/operators/topKWithFractionalIndex.ts b/packages/db-ivm/src/operators/topKWithFractionalIndex.ts index 3b75521e3..858503f6f 100644 --- a/packages/db-ivm/src/operators/topKWithFractionalIndex.ts +++ b/packages/db-ivm/src/operators/topKWithFractionalIndex.ts @@ -87,12 +87,25 @@ class TopKArray implements TopK { }): TopKMoveChanges { const oldOffset = this.#topKStart const oldLimit = this.#topKEnd - this.#topKStart - const oldRange: HRange = [this.#topKStart, this.#topKEnd] - this.#topKStart = offset ?? oldOffset - this.#topKEnd = this.#topKStart + (limit ?? oldLimit) + // `this.#topKEnd` can be `Infinity` if it has no limit + // but `diffHalfOpen` expects a finite range + // so we restrict it to the size of the topK if topKEnd is infinite + const oldRange: HRange = [ + this.#topKStart, + this.#topKEnd === Infinity ? this.#topKStart + this.size : this.#topKEnd, + ] - const newRange: HRange = [this.#topKStart, this.#topKEnd] + this.#topKStart = offset ?? oldOffset + this.#topKEnd = this.#topKStart + (limit ?? oldLimit) // can be `Infinity` if limit is `Infinity` + + // Also handle `Infinity` in the newRange + const newRange: HRange = [ + this.#topKStart, + this.#topKEnd === Infinity + ? Math.max(this.#topKStart + this.size, oldRange[1]) // since the new limit is Infinity we need to take everything (so we need to take the biggest (finite) topKEnd) + : this.#topKEnd, + ] const { onlyInA, onlyInB } = diffHalfOpen(oldRange, newRange) const moveIns: Array> = [] diff --git a/packages/db-ivm/tests/operators/topKWithFractionalIndex.test.ts b/packages/db-ivm/tests/operators/topKWithFractionalIndex.test.ts index 5739cfb36..149fffb93 100644 --- a/packages/db-ivm/tests/operators/topKWithFractionalIndex.test.ts +++ b/packages/db-ivm/tests/operators/topKWithFractionalIndex.test.ts @@ -28,8 +28,8 @@ function checkLexicographicOrder(results: Array) { // Check that indices are in the same order as the sorted values for (let i = 0; i < sortedByValue.length - 1; i++) { - const currentIndex = sortedByValue[i].index - const nextIndex = sortedByValue[i + 1].index + const currentIndex = sortedByValue[i]!.index + const nextIndex = sortedByValue[i + 1]!.index // Indices should be in lexicographic order if (!(currentIndex < nextIndex)) { @@ -1120,5 +1120,581 @@ describe(`Operators`, () => { ) expect(moveSortedValues4).toEqual([`a`, `b`]) }) + + it(`should handle moving window from infinite limit to finite limit with same offset`, () => { + const graph = new D2() + const input = graph.newInput<[number, { id: number; value: string }]>() + const tracker = new MessageTracker< + [number, [{ id: number; value: string }, string]] + >() + + let windowFn: + | ((options: { offset?: number; limit?: number }) => void) + | null = null + + // Start with no limit (infinite limit) + input.pipe( + topKWithFractionalIndex((a, b) => a.value.localeCompare(b.value), { + setWindowFn: (fn) => { + windowFn = fn + }, + }), + output((message) => { + tracker.addMessage(message) + }) + ) + + graph.finalize() + + // Initial data - a, b, c, d, e, f + input.sendData( + new MultiSet([ + [[1, { id: 1, value: `a` }], 1], + [[2, { id: 2, value: `b` }], 1], + [[3, { id: 3, value: `c` }], 1], + [[4, { id: 4, value: `d` }], 1], + [[5, { id: 5, value: `e` }], 1], + [[6, { id: 6, value: `f` }], 1], + ]) + ) + graph.run() + + // Initial result should have all 6 elements (no limit) + const initialResult = tracker.getResult(compareFractionalIndex) + expect(initialResult.sortedResults.length).toBe(6) + + const initialSortedValues = initialResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(initialSortedValues).toEqual([`a`, `b`, `c`, `d`, `e`, `f`]) + + // Verify windowFn was set + expect(windowFn).toBeDefined() + + // Move to finite limit of 3 (should show a, b, c) + windowFn!({ offset: 0, limit: 3 }) + graph.run() + + const moveResult = tracker.getResult(compareFractionalIndex) + + // Should now show only first 3 elements + const moveSortedValues = moveResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(moveSortedValues).toEqual([`a`, `b`, `c`]) + + // Check that we have changes (elements d, e, f should be removed) + expect(moveResult.messageCount).toBeGreaterThan(0) + }) + + it(`should handle moving window from infinite limit to finite limit while moving offset forward`, () => { + const graph = new D2() + const input = graph.newInput<[number, { id: number; value: string }]>() + const tracker = new MessageTracker< + [number, [{ id: number; value: string }, string]] + >() + + let windowFn: + | ((options: { offset?: number; limit?: number }) => void) + | null = null + + // Start with no limit (infinite limit) + input.pipe( + topKWithFractionalIndex((a, b) => a.value.localeCompare(b.value), { + offset: 0, + setWindowFn: (fn) => { + windowFn = fn + }, + }), + output((message) => { + tracker.addMessage(message) + }) + ) + + graph.finalize() + + // Initial data - a, b, c, d, e, f + input.sendData( + new MultiSet([ + [[1, { id: 1, value: `a` }], 1], + [[2, { id: 2, value: `b` }], 1], + [[3, { id: 3, value: `c` }], 1], + [[4, { id: 4, value: `d` }], 1], + [[5, { id: 5, value: `e` }], 1], + [[6, { id: 6, value: `f` }], 1], + ]) + ) + graph.run() + + // Initial result should have all 6 elements (no limit) + const initialResult = tracker.getResult(compareFractionalIndex) + expect(initialResult.sortedResults.length).toBe(6) + + // Move to offset 2, limit 3 (should show c, d, e) + windowFn!({ offset: 2, limit: 3 }) + graph.run() + + const moveResult = tracker.getResult(compareFractionalIndex) + + // Should now show elements c, d, e + const moveSortedValues = moveResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(moveSortedValues).toEqual([`c`, `d`, `e`]) + + // Check that we have changes + expect(moveResult.messageCount).toBeGreaterThan(0) + }) + + it(`should handle moving window from infinite limit to finite limit while moving offset backward`, () => { + const graph = new D2() + const input = graph.newInput<[number, { id: number; value: string }]>() + const tracker = new MessageTracker< + [number, [{ id: number; value: string }, string]] + >() + + let windowFn: + | ((options: { offset?: number; limit?: number }) => void) + | null = null + + // Start with no limit (infinite limit) and offset 3 + input.pipe( + topKWithFractionalIndex((a, b) => a.value.localeCompare(b.value), { + offset: 3, + setWindowFn: (fn) => { + windowFn = fn + }, + }), + output((message) => { + tracker.addMessage(message) + }) + ) + + graph.finalize() + + // Initial data - a, b, c, d, e, f + input.sendData( + new MultiSet([ + [[1, { id: 1, value: `a` }], 1], + [[2, { id: 2, value: `b` }], 1], + [[3, { id: 3, value: `c` }], 1], + [[4, { id: 4, value: `d` }], 1], + [[5, { id: 5, value: `e` }], 1], + [[6, { id: 6, value: `f` }], 1], + ]) + ) + graph.run() + + // Initial result should have elements d, e, f (no limit, offset 3) + const initialResult = tracker.getResult(compareFractionalIndex) + expect(initialResult.sortedResults.length).toBe(3) + + const initialSortedValues = initialResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(initialSortedValues).toEqual([`d`, `e`, `f`]) + + // Move to finite limit of 2, moving offset backward to 1 (should show b, c) + windowFn!({ offset: 1, limit: 2 }) + graph.run() + + const moveResult = tracker.getResult(compareFractionalIndex) + + // Should now show elements b, c (offset 1, limit 2) + const moveSortedValues = moveResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(moveSortedValues).toEqual([`b`, `c`]) + + // Check that we have changes (elements d, e, f should be removed, b, c should be added) + expect(moveResult.messageCount).toBeGreaterThan(0) + }) + + it(`should handle moving window from infinite limit to infinite limit with same offset (no-op)`, () => { + const graph = new D2() + const input = graph.newInput<[number, { id: number; value: string }]>() + const tracker = new MessageTracker< + [number, [{ id: number; value: string }, string]] + >() + + let windowFn: + | ((options: { offset?: number; limit?: number }) => void) + | null = null + + // Start with no limit (infinite limit) and offset 2 + input.pipe( + topKWithFractionalIndex((a, b) => a.value.localeCompare(b.value), { + offset: 2, + setWindowFn: (fn) => { + windowFn = fn + }, + }), + output((message) => { + tracker.addMessage(message) + }) + ) + + graph.finalize() + + // Initial data - a, b, c, d, e, f + input.sendData( + new MultiSet([ + [[1, { id: 1, value: `a` }], 1], + [[2, { id: 2, value: `b` }], 1], + [[3, { id: 3, value: `c` }], 1], + [[4, { id: 4, value: `d` }], 1], + [[5, { id: 5, value: `e` }], 1], + [[6, { id: 6, value: `f` }], 1], + ]) + ) + graph.run() + + // Initial result should have elements c, d, e, f (no limit, offset 2) + const initialResult = tracker.getResult(compareFractionalIndex) + expect(initialResult.sortedResults.length).toBe(4) + + const initialSortedValues = initialResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(initialSortedValues).toEqual([`c`, `d`, `e`, `f`]) + + // Move to same offset, still no limit (should show same elements c, d, e, f) + windowFn!({ offset: 2 }) + graph.run() + + const moveResult = tracker.getResult(compareFractionalIndex) + + // Should show same elements c, d, e, f (offset 2, no limit) + const moveSortedValues = moveResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(moveSortedValues).toEqual([`c`, `d`, `e`, `f`]) + + // Check that we have no more changes (this should be a no-op) + expect(moveResult.messageCount).toBe(initialResult.messageCount) + }) + + it(`should handle moving window from infinite limit to infinite limit while moving offset forward`, () => { + const graph = new D2() + const input = graph.newInput<[number, { id: number; value: string }]>() + const tracker = new MessageTracker< + [number, [{ id: number; value: string }, string]] + >() + + let windowFn: + | ((options: { offset?: number; limit?: number }) => void) + | null = null + + // Start with no limit (infinite limit) and offset 0 + input.pipe( + topKWithFractionalIndex((a, b) => a.value.localeCompare(b.value), { + offset: 0, + setWindowFn: (fn) => { + windowFn = fn + }, + }), + output((message) => { + tracker.addMessage(message) + }) + ) + + graph.finalize() + + // Initial data - a, b, c, d, e, f + input.sendData( + new MultiSet([ + [[1, { id: 1, value: `a` }], 1], + [[2, { id: 2, value: `b` }], 1], + [[3, { id: 3, value: `c` }], 1], + [[4, { id: 4, value: `d` }], 1], + [[5, { id: 5, value: `e` }], 1], + [[6, { id: 6, value: `f` }], 1], + ]) + ) + graph.run() + + // Initial result should have all 6 elements (no limit, offset 0) + const initialResult = tracker.getResult(compareFractionalIndex) + expect(initialResult.sortedResults.length).toBe(6) + + const initialSortedValues = initialResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(initialSortedValues).toEqual([`a`, `b`, `c`, `d`, `e`, `f`]) + + // Move to offset 2, still no limit (should show c, d, e, f) + windowFn!({ offset: 2 }) + graph.run() + + const moveResult = tracker.getResult(compareFractionalIndex) + + // Should now show elements c, d, e, f (offset 2, no limit) + const moveSortedValues = moveResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(moveSortedValues).toEqual([`c`, `d`, `e`, `f`]) + + // Check that we have changes (elements a, b should be removed) + expect(moveResult.messageCount).toBeGreaterThan(0) + }) + + it(`should handle moving window from infinite limit to infinite limit while moving offset backward`, () => { + const graph = new D2() + const input = graph.newInput<[number, { id: number; value: string }]>() + const tracker = new MessageTracker< + [number, [{ id: number; value: string }, string]] + >() + + let windowFn: + | ((options: { offset?: number; limit?: number }) => void) + | null = null + + // Start with no limit (infinite limit) and offset 3 + input.pipe( + topKWithFractionalIndex((a, b) => a.value.localeCompare(b.value), { + offset: 3, + setWindowFn: (fn) => { + windowFn = fn + }, + }), + output((message) => { + tracker.addMessage(message) + }) + ) + + graph.finalize() + + // Initial data - a, b, c, d, e, f + input.sendData( + new MultiSet([ + [[1, { id: 1, value: `a` }], 1], + [[2, { id: 2, value: `b` }], 1], + [[3, { id: 3, value: `c` }], 1], + [[4, { id: 4, value: `d` }], 1], + [[5, { id: 5, value: `e` }], 1], + [[6, { id: 6, value: `f` }], 1], + ]) + ) + graph.run() + + // Initial result should have elements d, e, f (no limit, offset 3) + const initialResult = tracker.getResult(compareFractionalIndex) + expect(initialResult.sortedResults.length).toBe(3) + + const initialSortedValues = initialResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(initialSortedValues).toEqual([`d`, `e`, `f`]) + + // Move to offset 1, still no limit (should show b, c, d, e, f) + windowFn!({ offset: 1 }) + graph.run() + + const moveResult = tracker.getResult(compareFractionalIndex) + + // Should now show elements b, c, d, e, f (offset 1, no limit) + const moveSortedValues = moveResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(moveSortedValues).toEqual([`b`, `c`, `d`, `e`, `f`]) + + // Check that we have changes (elements b, c should be added) + expect(moveResult.messageCount).toBeGreaterThan(0) + }) + + it(`should handle moving window from finite limit to infinite limit with same offset`, () => { + const graph = new D2() + const input = graph.newInput<[number, { id: number; value: string }]>() + const tracker = new MessageTracker< + [number, [{ id: number; value: string }, string]] + >() + + let windowFn: + | ((options: { offset?: number; limit?: number }) => void) + | null = null + + // Start with finite limit of 2 and offset 2 + input.pipe( + topKWithFractionalIndex((a, b) => a.value.localeCompare(b.value), { + limit: 2, + offset: 2, + setWindowFn: (fn) => { + windowFn = fn + }, + }), + output((message) => { + tracker.addMessage(message) + }) + ) + + graph.finalize() + + // Initial data - a, b, c, d, e, f + input.sendData( + new MultiSet([ + [[1, { id: 1, value: `a` }], 1], + [[2, { id: 2, value: `b` }], 1], + [[3, { id: 3, value: `c` }], 1], + [[4, { id: 4, value: `d` }], 1], + [[5, { id: 5, value: `e` }], 1], + [[6, { id: 6, value: `f` }], 1], + ]) + ) + graph.run() + + // Initial result should have 2 elements starting from offset 2 (c, d) + const initialResult = tracker.getResult(compareFractionalIndex) + expect(initialResult.sortedResults.length).toBe(2) + + const initialSortedValues = initialResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(initialSortedValues).toEqual([`c`, `d`]) + + // Move to infinite limit, keeping offset 2 (should show c, d, e, f) + windowFn!({ offset: 2, limit: Infinity }) + graph.run() + + const moveResult = tracker.getResult(compareFractionalIndex) + + // Should now show elements c, d, e, f (offset 2, no limit) + const moveSortedValues = moveResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(moveSortedValues).toEqual([`c`, `d`, `e`, `f`]) + + // Check that we have changes (elements e, f should be added) + expect(moveResult.messageCount).toBeGreaterThan(0) + }) + + it(`should handle moving window from finite limit to infinite limit while moving offset forward`, () => { + const graph = new D2() + const input = graph.newInput<[number, { id: number; value: string }]>() + const tracker = new MessageTracker< + [number, [{ id: number; value: string }, string]] + >() + + let windowFn: + | ((options: { offset?: number; limit?: number }) => void) + | null = null + + // Start with finite limit of 2 and offset 1 + input.pipe( + topKWithFractionalIndex((a, b) => a.value.localeCompare(b.value), { + limit: 2, + offset: 1, + setWindowFn: (fn) => { + windowFn = fn + }, + }), + output((message) => { + tracker.addMessage(message) + }) + ) + + graph.finalize() + + // Initial data - a, b, c, d, e, f + input.sendData( + new MultiSet([ + [[1, { id: 1, value: `a` }], 1], + [[2, { id: 2, value: `b` }], 1], + [[3, { id: 3, value: `c` }], 1], + [[4, { id: 4, value: `d` }], 1], + [[5, { id: 5, value: `e` }], 1], + [[6, { id: 6, value: `f` }], 1], + ]) + ) + graph.run() + + // Initial result should have 2 elements starting from offset 1 (b, c) + const initialResult = tracker.getResult(compareFractionalIndex) + expect(initialResult.sortedResults.length).toBe(2) + + const initialSortedValues = initialResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(initialSortedValues).toEqual([`b`, `c`]) + + // Move to infinite limit, moving offset forward to 3 (should show d, e, f) + windowFn!({ offset: 3, limit: Infinity }) + graph.run() + + const moveResult = tracker.getResult(compareFractionalIndex) + + // Should now show elements d, e, f (offset 3, no limit) + const moveSortedValues = moveResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(moveSortedValues).toEqual([`d`, `e`, `f`]) + + // Check that we have changes (elements b, c should be removed, d, e, f should be added) + expect(moveResult.messageCount).toBeGreaterThan(0) + }) + + it(`should handle moving window from finite limit to infinite limit while moving offset backward`, () => { + const graph = new D2() + const input = graph.newInput<[number, { id: number; value: string }]>() + const tracker = new MessageTracker< + [number, [{ id: number; value: string }, string]] + >() + + let windowFn: + | ((options: { offset?: number; limit?: number }) => void) + | null = null + + // Start with finite limit of 2 and offset 3 + input.pipe( + topKWithFractionalIndex((a, b) => a.value.localeCompare(b.value), { + limit: 2, + offset: 3, + setWindowFn: (fn) => { + windowFn = fn + }, + }), + output((message) => { + tracker.addMessage(message) + }) + ) + + graph.finalize() + + // Initial data - a, b, c, d, e, f + input.sendData( + new MultiSet([ + [[1, { id: 1, value: `a` }], 1], + [[2, { id: 2, value: `b` }], 1], + [[3, { id: 3, value: `c` }], 1], + [[4, { id: 4, value: `d` }], 1], + [[5, { id: 5, value: `e` }], 1], + [[6, { id: 6, value: `f` }], 1], + ]) + ) + graph.run() + + // Initial result should have 2 elements starting from offset 3 (d, e) + const initialResult = tracker.getResult(compareFractionalIndex) + expect(initialResult.sortedResults.length).toBe(2) + + const initialSortedValues = initialResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(initialSortedValues).toEqual([`d`, `e`]) + + // Move to infinite limit, moving offset backward to 1 (should show b, c, d, e, f) + windowFn!({ offset: 1, limit: Infinity }) + graph.run() + + const moveResult = tracker.getResult(compareFractionalIndex) + + // Should now show elements b, c, d, e, f (offset 1, no limit) + const moveSortedValues = moveResult.sortedResults.map( + ([_key, [value, _index]]) => value.value + ) + expect(moveSortedValues).toEqual([`b`, `c`, `d`, `e`, `f`]) + + // Check that we have changes (elements b, c, f should be added, d, e should remain) + expect(moveResult.messageCount).toBeGreaterThan(0) + }) }) }) diff --git a/packages/db/src/collection/change-events.ts b/packages/db/src/collection/change-events.ts index 7a0e41acf..10c05ae71 100644 --- a/packages/db/src/collection/change-events.ts +++ b/packages/db/src/collection/change-events.ts @@ -3,15 +3,20 @@ import { toExpression, } from "../query/builder/ref-proxy" import { compileSingleRowExpression } from "../query/compiler/evaluators.js" -import { optimizeExpressionWithIndexes } from "../utils/index-optimization.js" +import { + findIndexForField, + optimizeExpressionWithIndexes, +} from "../utils/index-optimization.js" +import { ensureIndexForField } from "../indexes/auto-index.js" +import { makeComparator } from "../utils/comparison.js" import type { ChangeMessage, CurrentStateAsChangesOptions, SubscribeChangesOptions, } from "../types" -import type { Collection } from "./index.js" +import type { Collection, CollectionImpl } from "./index.js" import type { SingleRowRefProxy } from "../query/builder/ref-proxy" -import type { BasicExpression } from "../query/ir.js" +import type { BasicExpression, OrderBy } from "../query/ir.js" /** * Interface for a collection-like object that provides the necessary methods @@ -28,7 +33,7 @@ export interface CollectionLike< /** * Returns the current state of the collection as an array of changes * @param collection - The collection to get changes from - * @param options - Options including optional where filter + * @param options - Options including optional where filter, orderBy, and limit * @returns An array of changes * @example * // Get all items as changes @@ -41,7 +46,19 @@ export interface CollectionLike< * * // Get only items using a pre-compiled expression * const activeChanges = currentStateAsChanges(collection, { - * whereExpression: eq(row.status, 'active') + * where: eq(row.status, 'active') + * }) + * + * // Get items ordered by name with limit + * const topUsers = currentStateAsChanges(collection, { + * orderBy: [{ expression: row.name, compareOptions: { direction: 'asc' } }], + * limit: 10 + * }) + * + * // Get active users ordered by score (highest score first) + * const topActiveUsers = currentStateAsChanges(collection, { + * where: eq(row.status, 'active'), + * orderBy: [{ expression: row.score, compareOptions: { direction: 'desc' } }], * }) */ export function currentStateAsChanges< @@ -69,9 +86,48 @@ export function currentStateAsChanges< return result } - // TODO: handle orderBy and limit options - // by calling optimizeOrderedLimit + // Validate that limit without orderBy doesn't happen + if (options.limit !== undefined && !options.orderBy) { + throw new Error(`limit cannot be used without orderBy`) + } + + // First check if orderBy is present (optionally with limit) + if (options.orderBy) { + // Create where filter function if present + const whereFilter = options.where + ? createFilterFunctionFromExpression(options.where) + : undefined + + // Get ordered keys using index optimization when possible + const orderedKeys = getOrderedKeys( + collection, + options.orderBy, + options.limit, + whereFilter, + options.optimizedOnly + ) + + if (orderedKeys === undefined) { + // `getOrderedKeys` returned undefined because we asked for `optimizedOnly` and there was no index to use + return + } + + // Convert keys to change messages + const result: Array> = [] + for (const key of orderedKeys) { + const value = collection.get(key) + if (value !== undefined) { + result.push({ + type: `insert`, + key, + value, + }) + } + } + return result + } + // If no orderBy OR orderBy optimization failed, use where clause optimization if (!options.where) { // No filtering, return all items return collectFilteredResults() @@ -246,3 +302,121 @@ export function createFilteredCallback( } } } + +/** + * Gets ordered keys from a collection using index optimization when possible + * @param collection - The collection to get keys from + * @param orderBy - The order by clause + * @param limit - Optional limit on number of keys to return + * @param whereFilter - Optional filter function to apply while traversing + * @returns Array of keys in sorted order + */ +function getOrderedKeys( + collection: CollectionLike, + orderBy: OrderBy, + limit?: number, + whereFilter?: (item: T) => boolean, + optimizedOnly?: boolean +): Array | undefined { + // For single-column orderBy on a ref expression, try index optimization + if (orderBy.length === 1) { + const clause = orderBy[0]! + const orderByExpression = clause.expression + + if (orderByExpression.type === `ref`) { + const propRef = orderByExpression + const fieldPath = propRef.path + + // Ensure index exists for this field + ensureIndexForField( + fieldPath[0]!, + fieldPath, + collection as CollectionImpl, + clause.compareOptions + ) + + // Find the index + const index = findIndexForField( + collection.indexes, + fieldPath, + clause.compareOptions + ) + + if (index && index.supports(`gt`)) { + // Use index optimization + const filterFn = (key: TKey): boolean => { + const value = collection.get(key) + if (value === undefined) { + return false + } + return whereFilter?.(value) ?? true + } + + // Take the keys that match the filter and limit + // if no limit is provided `index.keyCount` is used, + // i.e. we will take all keys that match the filter + return index.take(limit ?? index.keyCount, undefined, filterFn) + } + } + } + + if (optimizedOnly) { + return + } + + // Fallback: collect all items and sort in memory + const allItems: Array<{ key: TKey; value: T }> = [] + for (const [key, value] of collection.entries()) { + if (whereFilter?.(value) ?? true) { + allItems.push({ key, value }) + } + } + + // Sort using makeComparator + const compare = (a: { key: TKey; value: T }, b: { key: TKey; value: T }) => { + for (const clause of orderBy) { + const compareFn = makeComparator(clause.compareOptions) + + // Extract values for comparison + const aValue = extractValueFromItem(a.value, clause.expression) + const bValue = extractValueFromItem(b.value, clause.expression) + + const result = compareFn(aValue, bValue) + if (result !== 0) { + return result + } + } + return 0 + } + + allItems.sort(compare) + const sortedKeys = allItems.map((item) => item.key) + + // Apply limit if provided + if (limit !== undefined) { + return sortedKeys.slice(0, limit) + } + + // if no limit is provided, we will return all keys + return sortedKeys +} + +/** + * Helper function to extract a value from an item based on an expression + */ +function extractValueFromItem(item: any, expression: BasicExpression): any { + if (expression.type === `ref`) { + const propRef = expression + let value = item + for (const pathPart of propRef.path) { + value = value?.[pathPart] + } + return value + } else if (expression.type === `val`) { + return expression.value + } else { + // It must be a function + const evaluator = compileSingleRowExpression(expression) + return evaluator(item as Record) + } +} diff --git a/packages/db/tests/collection-change-events.test.ts b/packages/db/tests/collection-change-events.test.ts new file mode 100644 index 000000000..25b7dbb1f --- /dev/null +++ b/packages/db/tests/collection-change-events.test.ts @@ -0,0 +1,439 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest" +import { createCollection } from "../src/collection/index.js" +import { currentStateAsChanges } from "../src/collection/change-events.js" +import { Func, PropRef, Value } from "../src/query/ir.js" +import { DEFAULT_COMPARE_OPTIONS } from "../src/utils.js" + +interface TestUser { + id: string + name: string + age: number + score: number + status: `active` | `inactive` +} + +describe(`currentStateAsChanges`, () => { + let mockSync: ReturnType + + beforeEach(() => { + mockSync = vi.fn() + }) + + afterEach(() => { + vi.clearAllMocks() + }) + + const users: Array = [ + { id: `1`, name: `Alice`, age: 25, score: 100, status: `active` }, + { id: `2`, name: `Bob`, age: 30, score: 80, status: `inactive` }, + { id: `3`, name: `Charlie`, age: 35, score: 90, status: `active` }, + { id: `4`, name: `David`, age: 20, score: 70, status: `active` }, + { id: `5`, name: `Eve`, age: 28, score: 95, status: `inactive` }, + ] + + // Helper function to create and populate collection with test data + async function createAndPopulateCollection( + autoIndex: `eager` | `off` = `eager` + ) { + const collection = createCollection({ + id: `test-collection-${autoIndex}`, + getKey: (user) => user.id, + autoIndex, + sync: { + sync: mockSync, + }, + }) + + // Insert users via sync + mockSync.mockImplementation(({ begin, write, commit }) => { + begin() + users.forEach((user) => { + write({ + type: `insert`, + value: user, + }) + }) + commit() + }) + + collection.startSyncImmediate() + await collection.stateWhenReady() + + return collection + } + + describe.each([ + [`with auto-indexing`, `eager`], + [`without auto-indexing`, `off`], + ])(`%s`, (testName, autoIndex) => { + describe(`where clause without orderBy or limit`, () => { + it(`should return all items when no where clause is provided`, async () => { + const collection = await createAndPopulateCollection( + autoIndex as `eager` | `off` + ) + + const result = currentStateAsChanges(collection) + + expect(result).toHaveLength(5) + expect(result?.map((change) => change.value.name)).toEqual([ + `Alice`, + `Bob`, + `Charlie`, + `David`, + `Eve`, + ]) + }) + + it(`should filter items based on where clause`, async () => { + const collection = await createAndPopulateCollection( + autoIndex as `eager` | `off` + ) + + const result = currentStateAsChanges(collection, { + where: new Func(`eq`, [new PropRef([`status`]), new Value(`active`)]), + }) + + expect(result).toHaveLength(3) + expect(result?.map((change) => change.value.name)).toEqual([ + `Alice`, + `Charlie`, + `David`, + ]) + }) + + it(`should filter items based on numeric where clause`, async () => { + const collection = await createAndPopulateCollection( + autoIndex as `eager` | `off` + ) + + const result = currentStateAsChanges(collection, { + where: new Func(`gt`, [new PropRef([`age`]), new Value(25)]), + }) + + expect(result).toHaveLength(3) + expect(result?.map((change) => change.value.name)).toEqual([ + `Bob`, + `Charlie`, + `Eve`, + ]) + }) + }) + + describe(`orderBy without limit and no where clause`, () => { + it(`should return all items ordered by name ascending`, async () => { + const collection = await createAndPopulateCollection( + autoIndex as `eager` | `off` + ) + + const result = currentStateAsChanges(collection, { + orderBy: [ + { + expression: new PropRef([`name`]), + compareOptions: { ...DEFAULT_COMPARE_OPTIONS, direction: `asc` }, + }, + ], + }) + + expect(result).toHaveLength(5) + expect(result?.map((change) => change.value.name)).toEqual([ + `Alice`, + `Bob`, + `Charlie`, + `David`, + `Eve`, + ]) + }) + + it(`should return all items ordered by score descending`, async () => { + const collection = await createAndPopulateCollection( + autoIndex as `eager` | `off` + ) + + const result = currentStateAsChanges(collection, { + orderBy: [ + { + expression: new PropRef([`score`]), + compareOptions: { ...DEFAULT_COMPARE_OPTIONS, direction: `desc` }, + }, + ], + }) + + expect(result).toHaveLength(5) + expect(result?.map((change) => change.value.name)).toEqual([ + `Alice`, // score: 100 + `Eve`, // score: 95 + `Charlie`, // score: 90 + `Bob`, // score: 80 + `David`, // score: 70 + ]) + }) + + it(`should return all items ordered by age ascending`, async () => { + const collection = await createAndPopulateCollection( + autoIndex as `eager` | `off` + ) + + const result = currentStateAsChanges(collection, { + orderBy: [ + { + expression: new PropRef([`age`]), + compareOptions: { ...DEFAULT_COMPARE_OPTIONS, direction: `asc` }, + }, + ], + }) + + expect(result).toHaveLength(5) + expect(result?.map((change) => change.value.name)).toEqual([ + `David`, // age: 20 + `Alice`, // age: 25 + `Eve`, // age: 28 + `Bob`, // age: 30 + `Charlie`, // age: 35 + ]) + }) + }) + + describe(`orderBy with limit and no where clause`, () => { + it(`should return top 3 items ordered by score descending`, async () => { + const collection = await createAndPopulateCollection( + autoIndex as `eager` | `off` + ) + + const result = currentStateAsChanges(collection, { + orderBy: [ + { + expression: new PropRef([`score`]), + compareOptions: { ...DEFAULT_COMPARE_OPTIONS, direction: `desc` }, + }, + ], + limit: 3, + }) + + expect(result).toHaveLength(3) + expect(result?.map((change) => change.value.name)).toEqual([ + `Alice`, // score: 100 + `Eve`, // score: 95 + `Charlie`, // score: 90 + ]) + }) + }) + + describe(`orderBy with limit and where clause`, () => { + it(`should return top active users ordered by score descending`, async () => { + const collection = await createAndPopulateCollection( + autoIndex as `eager` | `off` + ) + + const result = currentStateAsChanges(collection, { + where: new Func(`eq`, [new PropRef([`status`]), new Value(`active`)]), + orderBy: [ + { + expression: new PropRef([`score`]), + compareOptions: { ...DEFAULT_COMPARE_OPTIONS, direction: `desc` }, + }, + ], + limit: 2, + }) + + expect(result).toHaveLength(2) + expect(result?.map((change) => change.value.name)).toEqual([ + `Alice`, // score: 100, status: active + `Charlie`, // score: 90, status: active + ]) + }) + + it(`should return top users over 25 ordered by age ascending`, async () => { + const collection = await createAndPopulateCollection( + autoIndex as `eager` | `off` + ) + + const result = currentStateAsChanges(collection, { + where: new Func(`gt`, [new PropRef([`age`]), new Value(25)]), + orderBy: [ + { + expression: new PropRef([`age`]), + compareOptions: { ...DEFAULT_COMPARE_OPTIONS, direction: `asc` }, + }, + ], + limit: 2, + }) + + expect(result).toHaveLength(2) + expect(result?.map((change) => change.value.name)).toEqual([ + `Eve`, // age: 28 + `Bob`, // age: 30 + ]) + }) + + it(`should handle multi-column orderBy with where clause`, async () => { + const collection = await createAndPopulateCollection( + autoIndex as `eager` | `off` + ) + + const result = currentStateAsChanges(collection, { + where: new Func(`eq`, [new PropRef([`status`]), new Value(`active`)]), + orderBy: [ + { + expression: new PropRef([`score`]), + compareOptions: { ...DEFAULT_COMPARE_OPTIONS, direction: `desc` }, + }, + { + expression: new PropRef([`age`]), + compareOptions: { ...DEFAULT_COMPARE_OPTIONS, direction: `asc` }, + }, + ], + limit: 2, + }) + + expect(result).toHaveLength(2) + // Should be ordered by score desc, then age asc for ties + expect(result?.map((change) => change.value.name)).toEqual([ + `Alice`, // score: 100, age: 25 + `Charlie`, // score: 90, age: 35 + ]) + }) + }) + + describe(`error cases`, () => { + it(`should throw error when limit is provided without orderBy`, async () => { + const collection = await createAndPopulateCollection( + autoIndex as `eager` | `off` + ) + + expect(() => { + currentStateAsChanges(collection, { + limit: 5, + }) + }).toThrow(`limit cannot be used without orderBy`) + }) + + it(`should throw error when limit is provided without orderBy even with where clause`, async () => { + const collection = await createAndPopulateCollection( + autoIndex as `eager` | `off` + ) + + expect(() => { + currentStateAsChanges(collection, { + where: new Func(`eq`, [ + new PropRef([`status`]), + new Value(`active`), + ]), + limit: 3, + }) + }).toThrow(`limit cannot be used without orderBy`) + }) + }) + + describe(`optimizedOnly option`, () => { + it(`should return undefined when optimizedOnly is true and no index is available`, async () => { + // Only test this with auto-indexing disabled + if (autoIndex === `off`) { + const collection = await createAndPopulateCollection(`off`) + + const result = currentStateAsChanges(collection, { + orderBy: [ + { + expression: new PropRef([`score`]), + compareOptions: { + ...DEFAULT_COMPARE_OPTIONS, + direction: `desc`, + }, + }, + ], + limit: 1, + optimizedOnly: true, + }) + + expect(result).toBeUndefined() + } + }) + + it(`should return results when optimizedOnly is true and index is available`, async () => { + // Only test this with auto-indexing enabled + if (autoIndex === `eager`) { + const collection = await createAndPopulateCollection(`eager`) + + const result = currentStateAsChanges(collection, { + orderBy: [ + { + expression: new PropRef([`score`]), + compareOptions: { + ...DEFAULT_COMPARE_OPTIONS, + direction: `desc`, + }, + }, + ], + limit: 1, + optimizedOnly: true, + }) + + expect(result).toHaveLength(1) + expect(result?.[0]?.value.name).toBe(`Alice`) + } + }) + }) + + describe(`edge cases`, () => { + it(`should handle empty collection`, () => { + const collection = createCollection({ + id: `test-collection-empty-${autoIndex}`, + getKey: (user) => user.id, + autoIndex: autoIndex as `eager` | `off`, + sync: { + sync: mockSync, + }, + }) + + // Don't populate the collection + collection.startSyncImmediate() + + const result = currentStateAsChanges(collection) + + expect(result).toHaveLength(0) + }) + + it(`should handle limit larger than collection size`, async () => { + const collection = await createAndPopulateCollection( + autoIndex as `eager` | `off` + ) + + const result = currentStateAsChanges(collection, { + orderBy: [ + { + expression: new PropRef([`name`]), + compareOptions: { ...DEFAULT_COMPARE_OPTIONS, direction: `asc` }, + }, + ], + limit: 10, // More than the 5 items in collection + }) + + expect(result).toHaveLength(5) + expect(result?.map((change) => change.value.name)).toEqual([ + `Alice`, + `Bob`, + `Charlie`, + `David`, + `Eve`, + ]) + }) + + it(`should handle limit of 0`, async () => { + const collection = await createAndPopulateCollection( + autoIndex as `eager` | `off` + ) + + const result = currentStateAsChanges(collection, { + orderBy: [ + { + expression: new PropRef([`name`]), + compareOptions: { ...DEFAULT_COMPARE_OPTIONS, direction: `asc` }, + }, + ], + limit: 0, + }) + + expect(result).toHaveLength(0) + }) + }) + }) +}) From 96ad9d3eb466f41081c78e76a5837b4f111c3092 Mon Sep 17 00:00:00 2001 From: Kevin Date: Wed, 22 Oct 2025 22:36:38 +0200 Subject: [PATCH 27/56] Fix bug when moving an orderBy window that has an infinite limit (#705) * Add unit tests that reproduce the problems with moveWindow on infinite limits * Handle Infinity limit in move * Changeset From 36d2439d07c8db093561dfc6a83125f8e7b03057 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 22 Oct 2025 14:42:11 -0600 Subject: [PATCH 28/56] ci: Version Packages (#702) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .changeset/in-memory-fallback-for-ssr.md | 11 ----------- .changeset/legal-cooks-sink.md | 5 ----- .changeset/open-cups-lose.md | 5 ----- examples/angular/todos/CHANGELOG.md | 8 ++++++++ examples/angular/todos/package.json | 2 +- examples/react/projects/package.json | 4 ++-- examples/react/todo/CHANGELOG.md | 10 ++++++++++ examples/react/todo/package.json | 2 +- packages/angular-db/CHANGELOG.md | 7 +++++++ packages/angular-db/package.json | 2 +- packages/db-ivm/CHANGELOG.md | 6 ++++++ packages/db-ivm/package.json | 2 +- packages/db/CHANGELOG.md | 17 +++++++++++++++++ packages/db/package.json | 2 +- packages/electric-db-collection/CHANGELOG.md | 7 +++++++ packages/electric-db-collection/package.json | 2 +- packages/query-db-collection/CHANGELOG.md | 7 +++++++ packages/query-db-collection/package.json | 2 +- packages/react-db/CHANGELOG.md | 7 +++++++ packages/react-db/package.json | 2 +- packages/rxdb-db-collection/CHANGELOG.md | 7 +++++++ packages/rxdb-db-collection/package.json | 2 +- packages/solid-db/CHANGELOG.md | 7 +++++++ packages/solid-db/package.json | 2 +- packages/svelte-db/CHANGELOG.md | 7 +++++++ packages/svelte-db/package.json | 2 +- packages/trailbase-db-collection/CHANGELOG.md | 7 +++++++ packages/trailbase-db-collection/package.json | 2 +- packages/vue-db/CHANGELOG.md | 7 +++++++ packages/vue-db/package.json | 2 +- pnpm-lock.yaml | 11 +++-------- 31 files changed, 122 insertions(+), 44 deletions(-) delete mode 100644 .changeset/in-memory-fallback-for-ssr.md delete mode 100644 .changeset/legal-cooks-sink.md delete mode 100644 .changeset/open-cups-lose.md diff --git a/.changeset/in-memory-fallback-for-ssr.md b/.changeset/in-memory-fallback-for-ssr.md deleted file mode 100644 index 5afb61018..000000000 --- a/.changeset/in-memory-fallback-for-ssr.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"@tanstack/db": patch ---- - -Add in-memory fallback for localStorage collections in SSR environments - -Prevents errors when localStorage collections are imported on the server by automatically falling back to an in-memory store. This allows isomorphic JavaScript applications to safely import localStorage collection modules without errors during module initialization. - -When localStorage is not available (e.g., in server-side rendering environments), the collection automatically uses an in-memory storage implementation. Data will not persist across page reloads or be shared across tabs when using the in-memory fallback, but the collection will function normally otherwise. - -Fixes #691 diff --git a/.changeset/legal-cooks-sink.md b/.changeset/legal-cooks-sink.md deleted file mode 100644 index ddddba41d..000000000 --- a/.changeset/legal-cooks-sink.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@tanstack/db-ivm": patch ---- - -Fix bug with setWindow on ordered queries that have no limit. diff --git a/.changeset/open-cups-lose.md b/.changeset/open-cups-lose.md deleted file mode 100644 index 44eee1a35..000000000 --- a/.changeset/open-cups-lose.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@tanstack/db": patch ---- - -Add support for orderBy and limit in currentStateAsChanges function diff --git a/examples/angular/todos/CHANGELOG.md b/examples/angular/todos/CHANGELOG.md index d8c163b81..8e34c7dfa 100644 --- a/examples/angular/todos/CHANGELOG.md +++ b/examples/angular/todos/CHANGELOG.md @@ -1,5 +1,13 @@ # todos +## 0.0.13 + +### Patch Changes + +- Updated dependencies [[`8b29841`](https://github.com/TanStack/db/commit/8b298417964340bbac5ad08a831766f8f1497477), [`8187c6d`](https://github.com/TanStack/db/commit/8187c6d69c4b498e306ac2eb5fc7115e4f8193a5)]: + - @tanstack/db@0.4.12 + - @tanstack/angular-db@0.1.17 + ## 0.0.12 ### Patch Changes diff --git a/examples/angular/todos/package.json b/examples/angular/todos/package.json index a5ae2e80e..d918473b7 100644 --- a/examples/angular/todos/package.json +++ b/examples/angular/todos/package.json @@ -1,6 +1,6 @@ { "name": "todos", - "version": "0.0.12", + "version": "0.0.13", "scripts": { "ng": "ng", "start": "ng serve", diff --git a/examples/react/projects/package.json b/examples/react/projects/package.json index 1b7be2f9e..efb99ae3a 100644 --- a/examples/react/projects/package.json +++ b/examples/react/projects/package.json @@ -17,8 +17,8 @@ "dependencies": { "@tailwindcss/vite": "^4.1.14", "@tanstack/query-core": "^5.90.5", - "@tanstack/query-db-collection": "^0.2.32", - "@tanstack/react-db": "^0.1.33", + "@tanstack/query-db-collection": "^0.2.33", + "@tanstack/react-db": "^0.1.34", "@tanstack/react-router": "^1.133.15", "@tanstack/react-router-devtools": "^1.133.15", "@tanstack/react-router-with-query": "^1.130.17", diff --git a/examples/react/todo/CHANGELOG.md b/examples/react/todo/CHANGELOG.md index 8c3c7da0a..929e0de98 100644 --- a/examples/react/todo/CHANGELOG.md +++ b/examples/react/todo/CHANGELOG.md @@ -1,5 +1,15 @@ # examples/react/todo +## 0.1.12 + +### Patch Changes + +- Updated dependencies []: + - @tanstack/electric-db-collection@0.1.36 + - @tanstack/query-db-collection@0.2.33 + - @tanstack/react-db@0.1.34 + - @tanstack/trailbase-db-collection@0.1.34 + ## 0.1.11 ### Patch Changes diff --git a/examples/react/todo/package.json b/examples/react/todo/package.json index bbf47b067..0b23519b4 100644 --- a/examples/react/todo/package.json +++ b/examples/react/todo/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/db-example-react-todo", "private": true, - "version": "0.1.11", + "version": "0.1.12", "dependencies": { "@tanstack/electric-db-collection": "workspace:^", "@tanstack/query-core": "^5.90.5", diff --git a/packages/angular-db/CHANGELOG.md b/packages/angular-db/CHANGELOG.md index 139e5fddc..78faf1576 100644 --- a/packages/angular-db/CHANGELOG.md +++ b/packages/angular-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/angular-db +## 0.1.17 + +### Patch Changes + +- Updated dependencies [[`8b29841`](https://github.com/TanStack/db/commit/8b298417964340bbac5ad08a831766f8f1497477), [`8187c6d`](https://github.com/TanStack/db/commit/8187c6d69c4b498e306ac2eb5fc7115e4f8193a5)]: + - @tanstack/db@0.4.12 + ## 0.1.16 ### Patch Changes diff --git a/packages/angular-db/package.json b/packages/angular-db/package.json index b30556162..4eb0c1f4b 100644 --- a/packages/angular-db/package.json +++ b/packages/angular-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/angular-db", "description": "Angular integration for @tanstack/db", - "version": "0.1.16", + "version": "0.1.17", "author": "Ethan McDaniel", "license": "MIT", "repository": { diff --git a/packages/db-ivm/CHANGELOG.md b/packages/db-ivm/CHANGELOG.md index 571175e93..609a9602d 100644 --- a/packages/db-ivm/CHANGELOG.md +++ b/packages/db-ivm/CHANGELOG.md @@ -1,5 +1,11 @@ # @tanstack/db-ivm +## 0.1.12 + +### Patch Changes + +- Fix bug with setWindow on ordered queries that have no limit. ([#701](https://github.com/TanStack/db/pull/701)) + ## 0.1.11 ### Patch Changes diff --git a/packages/db-ivm/package.json b/packages/db-ivm/package.json index 2fb5d3e89..0d18d7dab 100644 --- a/packages/db-ivm/package.json +++ b/packages/db-ivm/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/db-ivm", "description": "Incremental View Maintenance for TanStack DB based on Differential Dataflow", - "version": "0.1.11", + "version": "0.1.12", "dependencies": { "fractional-indexing": "^3.2.0", "sorted-btree": "^1.8.1" diff --git a/packages/db/CHANGELOG.md b/packages/db/CHANGELOG.md index 0103ace18..418f62f44 100644 --- a/packages/db/CHANGELOG.md +++ b/packages/db/CHANGELOG.md @@ -1,5 +1,22 @@ # @tanstack/db +## 0.4.12 + +### Patch Changes + +- Add in-memory fallback for localStorage collections in SSR environments ([#696](https://github.com/TanStack/db/pull/696)) + + Prevents errors when localStorage collections are imported on the server by automatically falling back to an in-memory store. This allows isomorphic JavaScript applications to safely import localStorage collection modules without errors during module initialization. + + When localStorage is not available (e.g., in server-side rendering environments), the collection automatically uses an in-memory storage implementation. Data will not persist across page reloads or be shared across tabs when using the in-memory fallback, but the collection will function normally otherwise. + + Fixes #691 + +- Add support for orderBy and limit in currentStateAsChanges function ([#701](https://github.com/TanStack/db/pull/701)) + +- Updated dependencies [[`8187c6d`](https://github.com/TanStack/db/commit/8187c6d69c4b498e306ac2eb5fc7115e4f8193a5)]: + - @tanstack/db-ivm@0.1.12 + ## 0.4.11 ### Patch Changes diff --git a/packages/db/package.json b/packages/db/package.json index 932d17e67..138be21e3 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/db", "description": "A reactive client store for building super fast apps on sync", - "version": "0.4.11", + "version": "0.4.12", "dependencies": { "@standard-schema/spec": "^1.0.0", "@tanstack/db-ivm": "workspace:*" diff --git a/packages/electric-db-collection/CHANGELOG.md b/packages/electric-db-collection/CHANGELOG.md index f9316f247..52197e781 100644 --- a/packages/electric-db-collection/CHANGELOG.md +++ b/packages/electric-db-collection/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/electric-db-collection +## 0.1.36 + +### Patch Changes + +- Updated dependencies [[`8b29841`](https://github.com/TanStack/db/commit/8b298417964340bbac5ad08a831766f8f1497477), [`8187c6d`](https://github.com/TanStack/db/commit/8187c6d69c4b498e306ac2eb5fc7115e4f8193a5)]: + - @tanstack/db@0.4.12 + ## 0.1.35 ### Patch Changes diff --git a/packages/electric-db-collection/package.json b/packages/electric-db-collection/package.json index 1661d9ddb..1bdd4afc9 100644 --- a/packages/electric-db-collection/package.json +++ b/packages/electric-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/electric-db-collection", "description": "ElectricSQL collection for TanStack DB", - "version": "0.1.35", + "version": "0.1.36", "dependencies": { "@electric-sql/client": "^1.0.14", "@standard-schema/spec": "^1.0.0", diff --git a/packages/query-db-collection/CHANGELOG.md b/packages/query-db-collection/CHANGELOG.md index 48106f1d9..1a7a17208 100644 --- a/packages/query-db-collection/CHANGELOG.md +++ b/packages/query-db-collection/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/query-db-collection +## 0.2.33 + +### Patch Changes + +- Updated dependencies [[`8b29841`](https://github.com/TanStack/db/commit/8b298417964340bbac5ad08a831766f8f1497477), [`8187c6d`](https://github.com/TanStack/db/commit/8187c6d69c4b498e306ac2eb5fc7115e4f8193a5)]: + - @tanstack/db@0.4.12 + ## 0.2.32 ### Patch Changes diff --git a/packages/query-db-collection/package.json b/packages/query-db-collection/package.json index 35f161069..59ed1efcf 100644 --- a/packages/query-db-collection/package.json +++ b/packages/query-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/query-db-collection", "description": "TanStack Query collection for TanStack DB", - "version": "0.2.32", + "version": "0.2.33", "dependencies": { "@standard-schema/spec": "^1.0.0", "@tanstack/db": "workspace:*" diff --git a/packages/react-db/CHANGELOG.md b/packages/react-db/CHANGELOG.md index 7760660ca..b2ce7ebf2 100644 --- a/packages/react-db/CHANGELOG.md +++ b/packages/react-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/react-db +## 0.1.34 + +### Patch Changes + +- Updated dependencies [[`8b29841`](https://github.com/TanStack/db/commit/8b298417964340bbac5ad08a831766f8f1497477), [`8187c6d`](https://github.com/TanStack/db/commit/8187c6d69c4b498e306ac2eb5fc7115e4f8193a5)]: + - @tanstack/db@0.4.12 + ## 0.1.33 ### Patch Changes diff --git a/packages/react-db/package.json b/packages/react-db/package.json index c12c7916d..34e488275 100644 --- a/packages/react-db/package.json +++ b/packages/react-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/react-db", "description": "React integration for @tanstack/db", - "version": "0.1.33", + "version": "0.1.34", "author": "Kyle Mathews", "license": "MIT", "repository": { diff --git a/packages/rxdb-db-collection/CHANGELOG.md b/packages/rxdb-db-collection/CHANGELOG.md index a9de728fb..cbf63e24d 100644 --- a/packages/rxdb-db-collection/CHANGELOG.md +++ b/packages/rxdb-db-collection/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/rxdb-db-collection +## 0.1.23 + +### Patch Changes + +- Updated dependencies [[`8b29841`](https://github.com/TanStack/db/commit/8b298417964340bbac5ad08a831766f8f1497477), [`8187c6d`](https://github.com/TanStack/db/commit/8187c6d69c4b498e306ac2eb5fc7115e4f8193a5)]: + - @tanstack/db@0.4.12 + ## 0.1.22 ### Patch Changes diff --git a/packages/rxdb-db-collection/package.json b/packages/rxdb-db-collection/package.json index 9a7f241a3..75ed0153f 100644 --- a/packages/rxdb-db-collection/package.json +++ b/packages/rxdb-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/rxdb-db-collection", "description": "RxDB collection for TanStack DB", - "version": "0.1.22", + "version": "0.1.23", "dependencies": { "rxdb": "16.19.1", "@standard-schema/spec": "^1.0.0", diff --git a/packages/solid-db/CHANGELOG.md b/packages/solid-db/CHANGELOG.md index d19fb4024..754c2a203 100644 --- a/packages/solid-db/CHANGELOG.md +++ b/packages/solid-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/react-db +## 0.1.34 + +### Patch Changes + +- Updated dependencies [[`8b29841`](https://github.com/TanStack/db/commit/8b298417964340bbac5ad08a831766f8f1497477), [`8187c6d`](https://github.com/TanStack/db/commit/8187c6d69c4b498e306ac2eb5fc7115e4f8193a5)]: + - @tanstack/db@0.4.12 + ## 0.1.33 ### Patch Changes diff --git a/packages/solid-db/package.json b/packages/solid-db/package.json index 0aab6539b..bfd6f253a 100644 --- a/packages/solid-db/package.json +++ b/packages/solid-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/solid-db", "description": "Solid integration for @tanstack/db", - "version": "0.1.33", + "version": "0.1.34", "author": "Kyle Mathews", "license": "MIT", "repository": { diff --git a/packages/svelte-db/CHANGELOG.md b/packages/svelte-db/CHANGELOG.md index 34b1c1211..91158d9c8 100644 --- a/packages/svelte-db/CHANGELOG.md +++ b/packages/svelte-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/svelte-db +## 0.1.34 + +### Patch Changes + +- Updated dependencies [[`8b29841`](https://github.com/TanStack/db/commit/8b298417964340bbac5ad08a831766f8f1497477), [`8187c6d`](https://github.com/TanStack/db/commit/8187c6d69c4b498e306ac2eb5fc7115e4f8193a5)]: + - @tanstack/db@0.4.12 + ## 0.1.33 ### Patch Changes diff --git a/packages/svelte-db/package.json b/packages/svelte-db/package.json index 09fc9d7ca..53a0c49fa 100644 --- a/packages/svelte-db/package.json +++ b/packages/svelte-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/svelte-db", "description": "Svelte integration for @tanstack/db", - "version": "0.1.33", + "version": "0.1.34", "dependencies": { "@tanstack/db": "workspace:*" }, diff --git a/packages/trailbase-db-collection/CHANGELOG.md b/packages/trailbase-db-collection/CHANGELOG.md index 62ca3a17b..363b388d3 100644 --- a/packages/trailbase-db-collection/CHANGELOG.md +++ b/packages/trailbase-db-collection/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/trailbase-db-collection +## 0.1.34 + +### Patch Changes + +- Updated dependencies [[`8b29841`](https://github.com/TanStack/db/commit/8b298417964340bbac5ad08a831766f8f1497477), [`8187c6d`](https://github.com/TanStack/db/commit/8187c6d69c4b498e306ac2eb5fc7115e4f8193a5)]: + - @tanstack/db@0.4.12 + ## 0.1.33 ### Patch Changes diff --git a/packages/trailbase-db-collection/package.json b/packages/trailbase-db-collection/package.json index 83c301d46..c9464f42d 100644 --- a/packages/trailbase-db-collection/package.json +++ b/packages/trailbase-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/trailbase-db-collection", "description": "TrailBase collection for TanStack DB", - "version": "0.1.33", + "version": "0.1.34", "dependencies": { "@standard-schema/spec": "^1.0.0", "@tanstack/db": "workspace:*", diff --git a/packages/vue-db/CHANGELOG.md b/packages/vue-db/CHANGELOG.md index 10421f8c7..12a9e1840 100644 --- a/packages/vue-db/CHANGELOG.md +++ b/packages/vue-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/vue-db +## 0.0.67 + +### Patch Changes + +- Updated dependencies [[`8b29841`](https://github.com/TanStack/db/commit/8b298417964340bbac5ad08a831766f8f1497477), [`8187c6d`](https://github.com/TanStack/db/commit/8187c6d69c4b498e306ac2eb5fc7115e4f8193a5)]: + - @tanstack/db@0.4.12 + ## 0.0.66 ### Patch Changes diff --git a/packages/vue-db/package.json b/packages/vue-db/package.json index 1fd2a1679..4da09c07d 100644 --- a/packages/vue-db/package.json +++ b/packages/vue-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/vue-db", "description": "Vue integration for @tanstack/db", - "version": "0.0.66", + "version": "0.0.67", "author": "Kyle Mathews", "license": "MIT", "repository": { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 449535b38..43841c6f1 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -196,10 +196,10 @@ importers: specifier: ^5.90.5 version: 5.90.5 '@tanstack/query-db-collection': - specifier: ^0.2.32 + specifier: ^0.2.33 version: link:../../../packages/query-db-collection '@tanstack/react-db': - specifier: ^0.1.33 + specifier: ^0.1.34 version: link:../../../packages/react-db '@tanstack/react-router': specifier: ^1.133.15 @@ -478,7 +478,7 @@ importers: version: 0.44.6(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) drizzle-zod: specifier: ^0.8.3 - version: 0.8.3(drizzle-orm@0.44.6(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@3.25.76) + version: 0.8.3(drizzle-orm@0.44.6(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11) express: specifier: ^4.21.2 version: 4.21.2 @@ -13116,11 +13116,6 @@ snapshots: pg: 8.16.3 postgres: 3.4.7 - drizzle-zod@0.8.3(drizzle-orm@0.44.6(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@3.25.76): - dependencies: - drizzle-orm: 0.44.6(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) - zod: 3.25.76 - drizzle-zod@0.8.3(drizzle-orm@0.44.6(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11): dependencies: drizzle-orm: 0.44.6(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) From af6a4e42b7969c108fc599f4fe5ca4c35c4402ac Mon Sep 17 00:00:00 2001 From: Kyle Mathews Date: Thu, 23 Oct 2025 07:35:17 -0600 Subject: [PATCH 29/56] docs: document findOne method in live queries guide (#699) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: document findOne method in live queries guide Add comprehensive documentation for the .findOne() method including: - Method signature and basic usage - Integration with React hooks - Combining with select projections - Live update behavior - Return type behavior - Best practices and when to use it 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * docs: remove redundant sections from findOne documentation Remove "Live Updates" and "Handling missing results" sections as they are obvious/redundant with existing documentation patterns. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --------- Co-authored-by: Claude --- docs/guides/live-queries.md | 101 ++++++++++++++++++++++++++++++++++++ 1 file changed, 101 insertions(+) diff --git a/docs/guides/live-queries.md b/docs/guides/live-queries.md index 5b1664b20..d738327fc 100644 --- a/docs/guides/live-queries.md +++ b/docs/guides/live-queries.md @@ -38,6 +38,7 @@ The result types are automatically inferred from your query structure, providing - [Joins](#joins) - [Subqueries](#subqueries) - [groupBy and Aggregations](#groupby-and-aggregations) +- [findOne](#findone) - [Distinct](#distinct) - [Order By, Limit, and Offset](#order-by-limit-and-offset) - [Composable Queries](#composable-queries) @@ -988,6 +989,106 @@ const engineeringStats = deptStats.get(1) > - **Single column grouping**: Keyed by the actual value (e.g., `deptStats.get(1)`) > - **Multiple column grouping**: Keyed by a JSON string of the grouped values (e.g., `userStats.get('[1,"admin"]')`) +## findOne + +Use `findOne` to return a single result instead of an array. This is useful when you expect to find at most one matching record, such as when querying by a unique identifier. + +The `findOne` method changes the return type from an array to a single object or `undefined`. When no matching record is found, the result is `undefined`. + +### Method Signature + +```ts +findOne(): Query +``` + +### Basic Usage + +Find a specific user by ID: + +```ts +const user = createLiveQueryCollection((q) => + q + .from({ users: usersCollection }) + .where(({ users }) => eq(users.id, 1)) + .findOne() +) + +// Result type: User | undefined +// If user with id=1 exists: { id: 1, name: 'John', ... } +// If not found: undefined +``` + +### With React Hooks + +Use `findOne` with `useLiveQuery` to get a single record: + +```tsx +import { useLiveQuery } from '@tanstack/react-db' +import { eq } from '@tanstack/db' + +function UserProfile({ userId }: { userId: string }) { + const { data: user, isLoading } = useLiveQuery((q) => + q + .from({ users: usersCollection }) + .where(({ users }) => eq(users.id, userId)) + .findOne() + , [userId]) + + if (isLoading) return
Loading...
+ if (!user) return
User not found
+ + return
{user.name}
+} +``` + +### With Select + +Combine `findOne` with `select` to project specific fields: + +```ts +const userEmail = createLiveQueryCollection((q) => + q + .from({ users: usersCollection }) + .where(({ users }) => eq(users.id, 1)) + .select(({ users }) => ({ + id: users.id, + email: users.email, + })) + .findOne() +) + +// Result type: { id: number, email: string } | undefined +``` + +### Return Type Behavior + +The return type changes based on whether `findOne` is used: + +```ts +// Without findOne - returns array +const users = createLiveQueryCollection((q) => + q.from({ users: usersCollection }) +) +// Type: Array + +// With findOne - returns single object or undefined +const user = createLiveQueryCollection((q) => + q.from({ users: usersCollection }).findOne() +) +// Type: User | undefined +``` + +### Best Practices + +**Use when:** +- Querying by unique identifiers (ID, email, etc.) +- You expect at most one result +- You want type-safe single-record access without array indexing + +**Avoid when:** +- You might have multiple matching records (use regular queries instead) +- You need to iterate over results + ## Distinct Use `distinct` to remove duplicate rows from your query results based on the selected columns. The `distinct` operator ensures that each unique combination of selected values appears only once in the result set. From 595058384c88730f325ac23415bdb0643eea89fa Mon Sep 17 00:00:00 2001 From: Kyle Mathews Date: Thu, 23 Oct 2025 07:35:55 -0600 Subject: [PATCH 30/56] Manual writes should validate against the synced store, not the combined synced + optimistic store (#708) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add failing test for issue #706: writeDelete timing bug in onDelete handler This test reproduces issue #706 where calling writeDelete() inside an onDelete handler causes unexpected behavior. The Root Cause: When collection.delete() is called, it creates a transaction and calls commit() before calling recomputeOptimisticState(). Because commit() is async but starts executing immediately, the onDelete handler runs BEFORE the optimistic delete is applied to the collection state. Timeline: 1. collection.delete('1') is called 2. Transaction is created with autoCommit: true 3. commit() is called (async, but starts immediately) 4. Handler runs inside commit() - optimisticDeletes is empty! 5. commit() completes 6. recomputeOptimisticState() is finally called - too late Expected Behavior: - optimisticDeletes.has('1') should be TRUE when handler runs - writeDelete('1') should throw DeleteOperationItemNotFoundError Actual Behavior (BUG): - optimisticDeletes.has('1') is FALSE when handler runs - writeDelete('1') succeeds instead of throwing - This causes state inconsistencies and silent failures The test will fail until this timing issue is fixed. Related: packages/db/src/collection/mutations.ts lines 529-537 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * Update test for issue #706: Root cause is automatic refetch after writeDelete This test reproduces issue #706 where calling writeDelete() inside an onDelete handler causes the deleted item to reappear. The Complete Root Cause: ======================== When collection.delete() is called with an onDelete handler that uses writeDelete(): 1. Transaction is created and commit() starts (mutations.ts:531) 2. Transaction NOT yet added to state.transactions (line 533 runs after) 3. onDelete handler runs while transaction.state = 'persisting' 4. Handler calls writeDelete('1') 5. writeDelete checks for persisting transactions in state.transactions 6. Finds NONE (transaction not added yet), commits synced delete immediately 7. Item removed from syncedData ✓ 8. Handler completes 9. wrappedOnDelete automatically calls refetch() (query.ts:681) 10. Refetch fetches data from server 11. Server still has item (transaction delete hasn't executed yet) 12. Refetch OVERWRITES syncedData with server data ✗ 13. Item reappears! The Two-Part Bug: ================ Part 1: Transaction added to state.transactions AFTER commit() starts - In mutations.ts:529-537, commit() is called on line 531 - Transaction added to state.transactions on line 533 (too late) - Handler runs before transaction is in the map - This allows writeDelete to commit immediately Part 2: Automatic refetch undoes the synced write - In query.ts:674-686, wrappedOnDelete automatically refetches - Unless handler returns { refetch: false } - Refetch restores server data, overwriting synced changes - This is the reason the item reappears Test Demonstrates: - writeDelete succeeds (no error) - Synced transaction committed immediately (persisting transactions: 0) - queryFn called twice (initial + refetch) - Final state: item still present (BUG!) Expected: Item should stay deleted after writeDelete Actual: Automatic refetch restores it 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * Fix test for issue #706: Demonstrate silent error swallowing in onDelete Corrected understanding of the bug based on issue details: - User returns { refetch: false } so automatic refetch is not the cause - User also deletes on backend, so refetch would work anyway The Real Bug: ============= The issue is that errors thrown by writeDelete() inside onDelete handlers are silently swallowed by .catch(() => undefined) in mutations.ts:531 When optimistic delete IS applied before handler runs: 1. collection.delete('1') creates optimistic delete 2. collection.has('1') returns false 3. onDelete handler runs 4. Handler calls writeDelete('1') 5. writeDelete validates: !collection.has('1') → throws DeleteOperationItemNotFoundError 6. Error propagates, commit() rejects 7. .catch(() => undefined) SILENTLY SWALLOWS error 8. User sees: execution stops, no error message, item flickers and reappears The test demonstrates calling writeDelete in onDelete with refetch: false (the exact pattern from the issue). The .catch(() => undefined) is the root cause that prevents users from seeing errors. Note: Due to timing (transaction not in state.transactions when handler runs), this test hits the scenario where writeDelete succeeds. The bug manifests when the optimistic delete IS applied, causing writeDelete to throw. Related code: mutations.ts:531 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * Fix issue #706: writeDelete should check synced store only, not combined view The bug: When calling writeDelete() inside an onDelete handler, it would throw DeleteOperationItemNotFoundError because it checked the combined view (synced + optimistic) which already had the item optimistically deleted. The fix: Change manual-sync.ts to check only the synced store, not the combined view. Changes in packages/query-db-collection/src/manual-sync.ts: - Line 116: Changed from ctx.collection.has(op.key) to ctx.collection._state.syncedData.has(op.key) - Line 120: Same change for delete validation - Line 155: Changed from ctx.collection.get(op.key) to ctx.collection._state.syncedData.get(op.key) - Line 173: Same change for delete operation - Line 182: Changed ctx.collection.has(op.key) to ctx.collection._state.syncedData.has(op.key) for upsert Why this fixes the issue: - writeDelete operates on the synced store, not the optimistic state - Validation should match the store being modified - This allows write operations to work correctly even when items are optimistically modified - Now handlers can safely call writeDelete/writeUpdate regardless of optimistic state Test updated: - Renamed test to reflect it now verifies the fix works - Test passes: writeDelete succeeds, handler completes, item deleted successfully - No errors thrown, execution continues as expected Fixes #706 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * Clean up test and add changeset for issue #706 - Removed console.log and debugging output from test - Removed lengthy comment explanations - Simplified test to be concise and focused - Added changeset describing the fix 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --------- Co-authored-by: Claude --- .changeset/fix-write-delete-in-handlers.md | 11 +++++ .../query-db-collection/src/manual-sync.ts | 18 +++++--- .../query-db-collection/tests/query.test.ts | 44 +++++++++++++++++++ 3 files changed, 67 insertions(+), 6 deletions(-) create mode 100644 .changeset/fix-write-delete-in-handlers.md diff --git a/.changeset/fix-write-delete-in-handlers.md b/.changeset/fix-write-delete-in-handlers.md new file mode 100644 index 000000000..fa4a38980 --- /dev/null +++ b/.changeset/fix-write-delete-in-handlers.md @@ -0,0 +1,11 @@ +--- +"@tanstack/query-db-collection": patch +--- + +Fix writeDelete/writeUpdate validation to check synced store only + +Fixed issue where calling `writeDelete()` or `writeUpdate()` inside mutation handlers (like `onDelete`) would throw errors when optimistic updates were active. These write operations now correctly validate against the synced store only, not the combined view (synced + optimistic). + +This allows patterns like calling `writeDelete()` inside an `onDelete` handler to work correctly, enabling users to write directly to the synced store while the mutation is being persisted to the backend. + +Fixes #706 diff --git a/packages/query-db-collection/src/manual-sync.ts b/packages/query-db-collection/src/manual-sync.ts index f783eb119..4300553f9 100644 --- a/packages/query-db-collection/src/manual-sync.ts +++ b/packages/query-db-collection/src/manual-sync.ts @@ -110,12 +110,14 @@ function validateOperations< seenKeys.add(op.key) // Validate operation-specific requirements + // NOTE: These validations check the synced store only, not the combined view (synced + optimistic) + // This allows write operations to work correctly even when items are optimistically modified if (op.type === `update`) { - if (!ctx.collection.has(op.key)) { + if (!ctx.collection._state.syncedData.has(op.key)) { throw new UpdateOperationItemNotFoundError(op.key) } } else if (op.type === `delete`) { - if (!ctx.collection.has(op.key)) { + if (!ctx.collection._state.syncedData.has(op.key)) { throw new DeleteOperationItemNotFoundError(op.key) } } @@ -149,7 +151,8 @@ export function performWriteOperations< break } case `update`: { - const currentItem = ctx.collection.get(op.key)! + // Get from synced store only, not the combined view + const currentItem = ctx.collection._state.syncedData.get(op.key)! const updatedItem = { ...currentItem, ...op.data, @@ -166,7 +169,8 @@ export function performWriteOperations< break } case `delete`: { - const currentItem = ctx.collection.get(op.key)! + // Get from synced store only, not the combined view + const currentItem = ctx.collection._state.syncedData.get(op.key)! ctx.write({ type: `delete`, value: currentItem, @@ -174,12 +178,14 @@ export function performWriteOperations< break } case `upsert`: { + // Check synced store only, not the combined view + const existsInSyncedStore = ctx.collection._state.syncedData.has(op.key) const resolved = ctx.collection.validateData( op.data, - ctx.collection.has(op.key) ? `update` : `insert`, + existsInSyncedStore ? `update` : `insert`, op.key ) - if (ctx.collection.has(op.key)) { + if (existsInSyncedStore) { ctx.write({ type: `update`, value: resolved, diff --git a/packages/query-db-collection/tests/query.test.ts b/packages/query-db-collection/tests/query.test.ts index b87caf67c..42f413e9a 100644 --- a/packages/query-db-collection/tests/query.test.ts +++ b/packages/query-db-collection/tests/query.test.ts @@ -2339,5 +2339,49 @@ describe(`QueryCollection`, () => { expect(collection.status).toBe(`ready`) expect(collection.size).toBe(items.length) }) + + it(`should allow writeDelete in onDelete handler to write to synced store`, async () => { + const queryKey = [`writeDelete-in-onDelete-test`] + const items: Array = [ + { id: `1`, name: `Item 1` }, + { id: `2`, name: `Item 2` }, + ] + + const queryFn = vi.fn().mockResolvedValue(items) + + const onDelete = vi.fn(async ({ transaction, collection }) => { + const deletedItem = transaction.mutations[0]?.original + // Call writeDelete inside onDelete handler - this should work without throwing + collection.utils.writeDelete(deletedItem.id) + return { refetch: false } + }) + + const config: QueryCollectionConfig = { + id: `writeDelete-in-onDelete-test`, + queryClient, + queryKey, + queryFn, + getKey, + startSync: true, + onDelete, + } + + const options = queryCollectionOptions(config) + const collection = createCollection(options) + + await vi.waitFor(() => { + expect(collection.status).toBe(`ready`) + expect(collection.size).toBe(2) + }) + + const transaction = collection.delete(`1`) + await transaction.isPersisted.promise + + // Verify the fix: writeDelete should work, transaction completes, item is deleted + expect(transaction.state).toBe(`completed`) + expect(onDelete).toHaveBeenCalledTimes(1) + expect(collection.has(`1`)).toBe(false) + expect(collection.size).toBe(1) + }) }) }) From 16dbfe34d5a5c075211453566f3a0453aa0ee76f Mon Sep 17 00:00:00 2001 From: Kyle Mathews Date: Thu, 23 Oct 2025 07:36:18 -0600 Subject: [PATCH 31/56] fix(query-db-collection): respect QueryClient defaultOptions when not overriden (#707) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(query-db-collection): respect QueryClient defaultOptions when not overridden Previously, queryCollectionOptions would set query options (staleTime, retry, retryDelay, refetchInterval, enabled, meta) to undefined even when not provided in the config. This prevented QueryClient's defaultOptions from being used as fallbacks. The fix conditionally includes these options in the observerOptions object only when they are explicitly defined (not undefined), allowing TanStack Query to properly use defaultOptions from the QueryClient. Added comprehensive tests to verify: 1. defaultOptions are respected when not overridden in queryCollectionOptions 2. explicit options in queryCollectionOptions override defaultOptions 3. retry behavior from defaultOptions works correctly Fixes issue where users couldn't use QueryClient defaultOptions with QueryCollection 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * chore: add changeset for queryCollectionOptions defaultOptions fix 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --------- Co-authored-by: Claude --- .../fix-query-collection-default-options.md | 29 ++++ packages/query-db-collection/src/query.ts | 13 +- .../query-db-collection/tests/query.test.ts | 137 ++++++++++++++++++ 3 files changed, 173 insertions(+), 6 deletions(-) create mode 100644 .changeset/fix-query-collection-default-options.md diff --git a/.changeset/fix-query-collection-default-options.md b/.changeset/fix-query-collection-default-options.md new file mode 100644 index 000000000..70edd58f2 --- /dev/null +++ b/.changeset/fix-query-collection-default-options.md @@ -0,0 +1,29 @@ +--- +"@tanstack/query-db-collection": patch +--- + +Fix queryCollectionOptions to respect QueryClient defaultOptions when not overridden + +Previously, when creating a QueryClient with defaultOptions (e.g., staleTime, retry, refetchOnWindowFocus), these options were ignored by queryCollectionOptions unless explicitly specified again in the collection config. This required duplicating configuration and prevented users from setting global defaults. + +Now, queryCollectionOptions properly respects the QueryClient's defaultOptions as fallbacks. Options explicitly provided in queryCollectionOptions will still override the defaults. + +Example - this now works as expected: + +```typescript +const dbQueryClient = new QueryClient({ + defaultOptions: { + queries: { + refetchOnWindowFocus: false, + staleTime: Infinity, + }, + }, +}) + +queryCollectionOptions({ + id: "wallet-accounts", + queryKey: ["wallet-accounts"], + queryClient: dbQueryClient, + // staleTime: Infinity is now inherited from defaultOptions +}) +``` diff --git a/packages/query-db-collection/src/query.ts b/packages/query-db-collection/src/query.ts index 2d53105cc..9512bf47c 100644 --- a/packages/query-db-collection/src/query.ts +++ b/packages/query-db-collection/src/query.ts @@ -433,14 +433,15 @@ export function queryCollectionOptions( > = { queryKey: queryKey, queryFn: queryFn, - meta: meta, - enabled: enabled, - refetchInterval: refetchInterval, - retry: retry, - retryDelay: retryDelay, - staleTime: staleTime, structuralSharing: true, notifyOnChangeProps: `all`, + // Only include options that are explicitly defined to allow QueryClient defaultOptions to be used + ...(meta !== undefined && { meta }), + ...(enabled !== undefined && { enabled }), + ...(refetchInterval !== undefined && { refetchInterval }), + ...(retry !== undefined && { retry }), + ...(retryDelay !== undefined && { retryDelay }), + ...(staleTime !== undefined && { staleTime }), } const localObserver = new QueryObserver< diff --git a/packages/query-db-collection/tests/query.test.ts b/packages/query-db-collection/tests/query.test.ts index 42f413e9a..5eb888485 100644 --- a/packages/query-db-collection/tests/query.test.ts +++ b/packages/query-db-collection/tests/query.test.ts @@ -2384,4 +2384,141 @@ describe(`QueryCollection`, () => { expect(collection.size).toBe(1) }) }) + + describe(`QueryClient defaultOptions`, () => { + it(`should respect defaultOptions from QueryClient when not overridden`, async () => { + // Create a QueryClient with custom defaultOptions + const customQueryClient = new QueryClient({ + defaultOptions: { + queries: { + staleTime: 10000, // 10 seconds + retry: 2, + refetchOnWindowFocus: false, + }, + }, + }) + + const queryKey = [`defaultOptionsTest`] + const items: Array = [{ id: `1`, name: `Item 1` }] + const queryFn = vi.fn().mockResolvedValue(items) + + // Create a collection without specifying staleTime or retry + const config: QueryCollectionConfig = { + id: `defaultOptionsTest`, + queryClient: customQueryClient, + queryKey, + queryFn, + getKey, + startSync: true, + } + + const options = queryCollectionOptions(config) + const collection = createCollection(options) + + await vi.waitFor(() => { + expect(collection.status).toBe(`ready`) + }) + + // Verify queryFn was called once + expect(queryFn).toHaveBeenCalledTimes(1) + + // Verify the query has the correct staleTime from defaultOptions + const query = customQueryClient.getQueryCache().find({ queryKey }) + expect((query?.options as any).staleTime).toBe(10000) + + // Clean up + customQueryClient.clear() + }) + + it(`should override defaultOptions when explicitly provided in queryCollectionOptions`, async () => { + // Create a QueryClient with custom defaultOptions + const customQueryClient = new QueryClient({ + defaultOptions: { + queries: { + staleTime: 10000, // 10 seconds default + retry: 2, + }, + }, + }) + + const queryKey = [`overrideOptionsTest`] + const items: Array = [{ id: `1`, name: `Item 1` }] + const queryFn = vi.fn().mockResolvedValue(items) + + // Create a collection WITH explicit staleTime override + const config: QueryCollectionConfig = { + id: `overrideOptionsTest`, + queryClient: customQueryClient, + queryKey, + queryFn, + getKey, + startSync: true, + staleTime: 100, // Override to 100ms + } + + const options = queryCollectionOptions(config) + const collection = createCollection(options) + + await vi.waitFor(() => { + expect(collection.status).toBe(`ready`) + }) + + // Verify the query uses the overridden staleTime (100ms), not the default (10000ms) + const query = customQueryClient.getQueryCache().find({ queryKey }) + expect((query?.options as any).staleTime).toBe(100) + + // Clean up + customQueryClient.clear() + }) + + it(`should use retry from QueryClient defaultOptions when not overridden`, async () => { + let callCount = 0 + // Create a QueryClient with custom retry defaultOption + const customQueryClient = new QueryClient({ + defaultOptions: { + queries: { + retry: 2, // Retry 2 times + retryDelay: 1, // 1ms delay for fast test + }, + }, + }) + + const queryKey = [`retryDefaultOptionsTest`] + const queryFn = vi.fn().mockImplementation(() => { + callCount++ + // Fail on first 2 attempts, succeed on 3rd + if (callCount <= 2) { + return Promise.reject(new Error(`Attempt ${callCount} failed`)) + } + return Promise.resolve([{ id: `1`, name: `Item 1` }]) + }) + + // Create a collection without specifying retry + const config: QueryCollectionConfig = { + id: `retryDefaultOptionsTest`, + queryClient: customQueryClient, + queryKey, + queryFn, + getKey, + startSync: true, + } + + const options = queryCollectionOptions(config) + const collection = createCollection(options) + + // Wait for the query to eventually succeed (after retries) + await vi.waitFor( + () => { + expect(collection.status).toBe(`ready`) + }, + { timeout: 2000 } + ) + + // Should have called queryFn 3 times (initial + 2 retries) + expect(callCount).toBe(3) + + // Clean up + customQueryClient.clear() + }) + }) }) From 5ab979cca82d8d94f041e92a0edd4ddb874df6e5 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 23 Oct 2025 08:02:34 -0600 Subject: [PATCH 32/56] ci: Version Packages (#711) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .../fix-query-collection-default-options.md | 29 -------------- .changeset/fix-write-delete-in-handlers.md | 11 ------ examples/react/projects/package.json | 2 +- examples/react/todo/CHANGELOG.md | 7 ++++ examples/react/todo/package.json | 2 +- packages/query-db-collection/CHANGELOG.md | 38 +++++++++++++++++++ packages/query-db-collection/package.json | 2 +- pnpm-lock.yaml | 2 +- 8 files changed, 49 insertions(+), 44 deletions(-) delete mode 100644 .changeset/fix-query-collection-default-options.md delete mode 100644 .changeset/fix-write-delete-in-handlers.md diff --git a/.changeset/fix-query-collection-default-options.md b/.changeset/fix-query-collection-default-options.md deleted file mode 100644 index 70edd58f2..000000000 --- a/.changeset/fix-query-collection-default-options.md +++ /dev/null @@ -1,29 +0,0 @@ ---- -"@tanstack/query-db-collection": patch ---- - -Fix queryCollectionOptions to respect QueryClient defaultOptions when not overridden - -Previously, when creating a QueryClient with defaultOptions (e.g., staleTime, retry, refetchOnWindowFocus), these options were ignored by queryCollectionOptions unless explicitly specified again in the collection config. This required duplicating configuration and prevented users from setting global defaults. - -Now, queryCollectionOptions properly respects the QueryClient's defaultOptions as fallbacks. Options explicitly provided in queryCollectionOptions will still override the defaults. - -Example - this now works as expected: - -```typescript -const dbQueryClient = new QueryClient({ - defaultOptions: { - queries: { - refetchOnWindowFocus: false, - staleTime: Infinity, - }, - }, -}) - -queryCollectionOptions({ - id: "wallet-accounts", - queryKey: ["wallet-accounts"], - queryClient: dbQueryClient, - // staleTime: Infinity is now inherited from defaultOptions -}) -``` diff --git a/.changeset/fix-write-delete-in-handlers.md b/.changeset/fix-write-delete-in-handlers.md deleted file mode 100644 index fa4a38980..000000000 --- a/.changeset/fix-write-delete-in-handlers.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -"@tanstack/query-db-collection": patch ---- - -Fix writeDelete/writeUpdate validation to check synced store only - -Fixed issue where calling `writeDelete()` or `writeUpdate()` inside mutation handlers (like `onDelete`) would throw errors when optimistic updates were active. These write operations now correctly validate against the synced store only, not the combined view (synced + optimistic). - -This allows patterns like calling `writeDelete()` inside an `onDelete` handler to work correctly, enabling users to write directly to the synced store while the mutation is being persisted to the backend. - -Fixes #706 diff --git a/examples/react/projects/package.json b/examples/react/projects/package.json index efb99ae3a..f3ca8fdbc 100644 --- a/examples/react/projects/package.json +++ b/examples/react/projects/package.json @@ -17,7 +17,7 @@ "dependencies": { "@tailwindcss/vite": "^4.1.14", "@tanstack/query-core": "^5.90.5", - "@tanstack/query-db-collection": "^0.2.33", + "@tanstack/query-db-collection": "^0.2.34", "@tanstack/react-db": "^0.1.34", "@tanstack/react-router": "^1.133.15", "@tanstack/react-router-devtools": "^1.133.15", diff --git a/examples/react/todo/CHANGELOG.md b/examples/react/todo/CHANGELOG.md index 929e0de98..5efeceb1e 100644 --- a/examples/react/todo/CHANGELOG.md +++ b/examples/react/todo/CHANGELOG.md @@ -1,5 +1,12 @@ # examples/react/todo +## 0.1.13 + +### Patch Changes + +- Updated dependencies [[`16dbfe3`](https://github.com/TanStack/db/commit/16dbfe34d5a5c075211453566f3a0453aa0ee76f), [`5950583`](https://github.com/TanStack/db/commit/595058384c88730f325ac23415bdb0643eea89fa)]: + - @tanstack/query-db-collection@0.2.34 + ## 0.1.12 ### Patch Changes diff --git a/examples/react/todo/package.json b/examples/react/todo/package.json index 0b23519b4..475856fd7 100644 --- a/examples/react/todo/package.json +++ b/examples/react/todo/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/db-example-react-todo", "private": true, - "version": "0.1.12", + "version": "0.1.13", "dependencies": { "@tanstack/electric-db-collection": "workspace:^", "@tanstack/query-core": "^5.90.5", diff --git a/packages/query-db-collection/CHANGELOG.md b/packages/query-db-collection/CHANGELOG.md index 1a7a17208..b472a5705 100644 --- a/packages/query-db-collection/CHANGELOG.md +++ b/packages/query-db-collection/CHANGELOG.md @@ -1,5 +1,43 @@ # @tanstack/query-db-collection +## 0.2.34 + +### Patch Changes + +- Fix queryCollectionOptions to respect QueryClient defaultOptions when not overridden ([#707](https://github.com/TanStack/db/pull/707)) + + Previously, when creating a QueryClient with defaultOptions (e.g., staleTime, retry, refetchOnWindowFocus), these options were ignored by queryCollectionOptions unless explicitly specified again in the collection config. This required duplicating configuration and prevented users from setting global defaults. + + Now, queryCollectionOptions properly respects the QueryClient's defaultOptions as fallbacks. Options explicitly provided in queryCollectionOptions will still override the defaults. + + Example - this now works as expected: + + ```typescript + const dbQueryClient = new QueryClient({ + defaultOptions: { + queries: { + refetchOnWindowFocus: false, + staleTime: Infinity, + }, + }, + }) + + queryCollectionOptions({ + id: "wallet-accounts", + queryKey: ["wallet-accounts"], + queryClient: dbQueryClient, + // staleTime: Infinity is now inherited from defaultOptions + }) + ``` + +- Fix writeDelete/writeUpdate validation to check synced store only ([#708](https://github.com/TanStack/db/pull/708)) + + Fixed issue where calling `writeDelete()` or `writeUpdate()` inside mutation handlers (like `onDelete`) would throw errors when optimistic updates were active. These write operations now correctly validate against the synced store only, not the combined view (synced + optimistic). + + This allows patterns like calling `writeDelete()` inside an `onDelete` handler to work correctly, enabling users to write directly to the synced store while the mutation is being persisted to the backend. + + Fixes #706 + ## 0.2.33 ### Patch Changes diff --git a/packages/query-db-collection/package.json b/packages/query-db-collection/package.json index 59ed1efcf..7c681b873 100644 --- a/packages/query-db-collection/package.json +++ b/packages/query-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/query-db-collection", "description": "TanStack Query collection for TanStack DB", - "version": "0.2.33", + "version": "0.2.34", "dependencies": { "@standard-schema/spec": "^1.0.0", "@tanstack/db": "workspace:*" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 43841c6f1..eacd703ee 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -196,7 +196,7 @@ importers: specifier: ^5.90.5 version: 5.90.5 '@tanstack/query-db-collection': - specifier: ^0.2.33 + specifier: ^0.2.34 version: link:../../../packages/query-db-collection '@tanstack/react-db': specifier: ^0.1.34 From 3c9526cd1fd80032ddddff32cf4a23dfa8376888 Mon Sep 17 00:00:00 2001 From: Mike Date: Thu, 23 Oct 2025 14:04:06 -0400 Subject: [PATCH 33/56] fix: dedupe filtering for non-optimistic mutations (#715) * Fix optimistic mutation check in transaction processing * Add test for synced delete after non-optimistic delete * Add changeset for dedupe filtering non-optimistic mutations fix --- .changeset/full-meals-ask.md | 5 ++ packages/db/src/collection/state.ts | 1 + .../collection-subscribe-changes.test.ts | 64 +++++++++++++++++++ 3 files changed, 70 insertions(+) create mode 100644 .changeset/full-meals-ask.md diff --git a/.changeset/full-meals-ask.md b/.changeset/full-meals-ask.md new file mode 100644 index 000000000..2b4785a2b --- /dev/null +++ b/.changeset/full-meals-ask.md @@ -0,0 +1,5 @@ +--- +"@tanstack/db": patch +--- + +Fix synced propagation when preceding mutation was non-optimistic diff --git a/packages/db/src/collection/state.ts b/packages/db/src/collection/state.ts index 02dd1708e..f7b03da33 100644 --- a/packages/db/src/collection/state.ts +++ b/packages/db/src/collection/state.ts @@ -700,6 +700,7 @@ export class CollectionStateManager< if (transaction.state === `completed`) { for (const mutation of transaction.mutations) { if ( + mutation.optimistic && this.isThisCollection(mutation.collection) && changedKeys.has(mutation.key) ) { diff --git a/packages/db/tests/collection-subscribe-changes.test.ts b/packages/db/tests/collection-subscribe-changes.test.ts index 710f2122a..816fbc85c 100644 --- a/packages/db/tests/collection-subscribe-changes.test.ts +++ b/packages/db/tests/collection-subscribe-changes.test.ts @@ -1250,6 +1250,70 @@ describe(`Collection.subscribeChanges`, () => { expect(collection.state.has(1)).toBe(false) }) + it(`should emit synced delete after a non-optimistic delete`, async () => { + const emitter = mitt() + const callback = vi.fn() + + const collection = createCollection<{ id: number; value: string }>({ + id: `non-optimistic-delete-sync`, + getKey: (item) => item.id, + sync: { + sync: ({ begin, write, commit }) => { + // replay any pending mutations emitted via mitt + // @ts-expect-error Mitt typings are loose for our test helpers + emitter.on(`*`, (_, changes: Array) => { + begin() + changes.forEach((change) => { + write({ + type: change.type, + // @ts-expect-error test helper + value: change.modified, + }) + }) + commit() + }) + + // seed initial row + begin() + write({ + type: `insert`, + value: { id: 1, value: `initial` }, + }) + commit() + }, + }, + onDelete: async ({ transaction }) => { + emitter.emit(`sync`, transaction.mutations) + }, + }) + + const subscription = collection.subscribeChanges(callback, { + includeInitialState: true, + }) + + // initial insert emitted + expect(callback).toHaveBeenCalledTimes(1) + callback.mockClear() + + const tx = collection.delete(1, { optimistic: false }) + await tx.isPersisted.promise + + expect(callback).toHaveBeenCalledTimes(1) + const deleteChanges = callback.mock.calls[0]![0] as ChangesPayload<{ + value: string + }> + expect(deleteChanges).toEqual([ + { + type: `delete`, + key: 1, + value: { id: 1, value: `initial` }, + }, + ]) + expect(collection.state.has(1)).toBe(false) + + subscription.unsubscribe() + }) + it(`truncate + optimistic insert: server did NOT reinsert key -> inserted optimistically`, async () => { const changeEvents: Array = [] let f: any = null From d8ef559b2e6e814a5ba05a8470c966c608ce0d40 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 23 Oct 2025 12:10:51 -0600 Subject: [PATCH 34/56] ci: Version Packages (#716) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .changeset/full-meals-ask.md | 5 ----- examples/angular/todos/CHANGELOG.md | 8 ++++++++ examples/angular/todos/package.json | 2 +- examples/react/projects/package.json | 4 ++-- examples/react/todo/CHANGELOG.md | 10 ++++++++++ examples/react/todo/package.json | 2 +- packages/angular-db/CHANGELOG.md | 7 +++++++ packages/angular-db/package.json | 2 +- packages/db/CHANGELOG.md | 6 ++++++ packages/db/package.json | 2 +- packages/electric-db-collection/CHANGELOG.md | 7 +++++++ packages/electric-db-collection/package.json | 2 +- packages/query-db-collection/CHANGELOG.md | 7 +++++++ packages/query-db-collection/package.json | 2 +- packages/react-db/CHANGELOG.md | 7 +++++++ packages/react-db/package.json | 2 +- packages/rxdb-db-collection/CHANGELOG.md | 7 +++++++ packages/rxdb-db-collection/package.json | 2 +- packages/solid-db/CHANGELOG.md | 7 +++++++ packages/solid-db/package.json | 2 +- packages/svelte-db/CHANGELOG.md | 7 +++++++ packages/svelte-db/package.json | 2 +- packages/trailbase-db-collection/CHANGELOG.md | 7 +++++++ packages/trailbase-db-collection/package.json | 2 +- packages/vue-db/CHANGELOG.md | 7 +++++++ packages/vue-db/package.json | 2 +- pnpm-lock.yaml | 4 ++-- 27 files changed, 103 insertions(+), 21 deletions(-) delete mode 100644 .changeset/full-meals-ask.md diff --git a/.changeset/full-meals-ask.md b/.changeset/full-meals-ask.md deleted file mode 100644 index 2b4785a2b..000000000 --- a/.changeset/full-meals-ask.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@tanstack/db": patch ---- - -Fix synced propagation when preceding mutation was non-optimistic diff --git a/examples/angular/todos/CHANGELOG.md b/examples/angular/todos/CHANGELOG.md index 8e34c7dfa..98dacedab 100644 --- a/examples/angular/todos/CHANGELOG.md +++ b/examples/angular/todos/CHANGELOG.md @@ -1,5 +1,13 @@ # todos +## 0.0.14 + +### Patch Changes + +- Updated dependencies [[`3c9526c`](https://github.com/TanStack/db/commit/3c9526cd1fd80032ddddff32cf4a23dfa8376888)]: + - @tanstack/db@0.4.13 + - @tanstack/angular-db@0.1.18 + ## 0.0.13 ### Patch Changes diff --git a/examples/angular/todos/package.json b/examples/angular/todos/package.json index d918473b7..9cd7e248e 100644 --- a/examples/angular/todos/package.json +++ b/examples/angular/todos/package.json @@ -1,6 +1,6 @@ { "name": "todos", - "version": "0.0.13", + "version": "0.0.14", "scripts": { "ng": "ng", "start": "ng serve", diff --git a/examples/react/projects/package.json b/examples/react/projects/package.json index f3ca8fdbc..025431139 100644 --- a/examples/react/projects/package.json +++ b/examples/react/projects/package.json @@ -17,8 +17,8 @@ "dependencies": { "@tailwindcss/vite": "^4.1.14", "@tanstack/query-core": "^5.90.5", - "@tanstack/query-db-collection": "^0.2.34", - "@tanstack/react-db": "^0.1.34", + "@tanstack/query-db-collection": "^0.2.35", + "@tanstack/react-db": "^0.1.35", "@tanstack/react-router": "^1.133.15", "@tanstack/react-router-devtools": "^1.133.15", "@tanstack/react-router-with-query": "^1.130.17", diff --git a/examples/react/todo/CHANGELOG.md b/examples/react/todo/CHANGELOG.md index 5efeceb1e..5a6738885 100644 --- a/examples/react/todo/CHANGELOG.md +++ b/examples/react/todo/CHANGELOG.md @@ -1,5 +1,15 @@ # examples/react/todo +## 0.1.14 + +### Patch Changes + +- Updated dependencies []: + - @tanstack/electric-db-collection@0.1.37 + - @tanstack/query-db-collection@0.2.35 + - @tanstack/react-db@0.1.35 + - @tanstack/trailbase-db-collection@0.1.35 + ## 0.1.13 ### Patch Changes diff --git a/examples/react/todo/package.json b/examples/react/todo/package.json index 475856fd7..b14ffcff2 100644 --- a/examples/react/todo/package.json +++ b/examples/react/todo/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/db-example-react-todo", "private": true, - "version": "0.1.13", + "version": "0.1.14", "dependencies": { "@tanstack/electric-db-collection": "workspace:^", "@tanstack/query-core": "^5.90.5", diff --git a/packages/angular-db/CHANGELOG.md b/packages/angular-db/CHANGELOG.md index 78faf1576..b96eafafa 100644 --- a/packages/angular-db/CHANGELOG.md +++ b/packages/angular-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/angular-db +## 0.1.18 + +### Patch Changes + +- Updated dependencies [[`3c9526c`](https://github.com/TanStack/db/commit/3c9526cd1fd80032ddddff32cf4a23dfa8376888)]: + - @tanstack/db@0.4.13 + ## 0.1.17 ### Patch Changes diff --git a/packages/angular-db/package.json b/packages/angular-db/package.json index 4eb0c1f4b..0288528d5 100644 --- a/packages/angular-db/package.json +++ b/packages/angular-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/angular-db", "description": "Angular integration for @tanstack/db", - "version": "0.1.17", + "version": "0.1.18", "author": "Ethan McDaniel", "license": "MIT", "repository": { diff --git a/packages/db/CHANGELOG.md b/packages/db/CHANGELOG.md index 418f62f44..a5435084f 100644 --- a/packages/db/CHANGELOG.md +++ b/packages/db/CHANGELOG.md @@ -1,5 +1,11 @@ # @tanstack/db +## 0.4.13 + +### Patch Changes + +- Fix synced propagation when preceding mutation was non-optimistic ([#715](https://github.com/TanStack/db/pull/715)) + ## 0.4.12 ### Patch Changes diff --git a/packages/db/package.json b/packages/db/package.json index 138be21e3..7bc4a7b31 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/db", "description": "A reactive client store for building super fast apps on sync", - "version": "0.4.12", + "version": "0.4.13", "dependencies": { "@standard-schema/spec": "^1.0.0", "@tanstack/db-ivm": "workspace:*" diff --git a/packages/electric-db-collection/CHANGELOG.md b/packages/electric-db-collection/CHANGELOG.md index 52197e781..32c5161d6 100644 --- a/packages/electric-db-collection/CHANGELOG.md +++ b/packages/electric-db-collection/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/electric-db-collection +## 0.1.37 + +### Patch Changes + +- Updated dependencies [[`3c9526c`](https://github.com/TanStack/db/commit/3c9526cd1fd80032ddddff32cf4a23dfa8376888)]: + - @tanstack/db@0.4.13 + ## 0.1.36 ### Patch Changes diff --git a/packages/electric-db-collection/package.json b/packages/electric-db-collection/package.json index 1bdd4afc9..8459ddd2e 100644 --- a/packages/electric-db-collection/package.json +++ b/packages/electric-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/electric-db-collection", "description": "ElectricSQL collection for TanStack DB", - "version": "0.1.36", + "version": "0.1.37", "dependencies": { "@electric-sql/client": "^1.0.14", "@standard-schema/spec": "^1.0.0", diff --git a/packages/query-db-collection/CHANGELOG.md b/packages/query-db-collection/CHANGELOG.md index b472a5705..14d085792 100644 --- a/packages/query-db-collection/CHANGELOG.md +++ b/packages/query-db-collection/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/query-db-collection +## 0.2.35 + +### Patch Changes + +- Updated dependencies [[`3c9526c`](https://github.com/TanStack/db/commit/3c9526cd1fd80032ddddff32cf4a23dfa8376888)]: + - @tanstack/db@0.4.13 + ## 0.2.34 ### Patch Changes diff --git a/packages/query-db-collection/package.json b/packages/query-db-collection/package.json index 7c681b873..5e4c9da38 100644 --- a/packages/query-db-collection/package.json +++ b/packages/query-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/query-db-collection", "description": "TanStack Query collection for TanStack DB", - "version": "0.2.34", + "version": "0.2.35", "dependencies": { "@standard-schema/spec": "^1.0.0", "@tanstack/db": "workspace:*" diff --git a/packages/react-db/CHANGELOG.md b/packages/react-db/CHANGELOG.md index b2ce7ebf2..4a78be379 100644 --- a/packages/react-db/CHANGELOG.md +++ b/packages/react-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/react-db +## 0.1.35 + +### Patch Changes + +- Updated dependencies [[`3c9526c`](https://github.com/TanStack/db/commit/3c9526cd1fd80032ddddff32cf4a23dfa8376888)]: + - @tanstack/db@0.4.13 + ## 0.1.34 ### Patch Changes diff --git a/packages/react-db/package.json b/packages/react-db/package.json index 34e488275..3811a0b71 100644 --- a/packages/react-db/package.json +++ b/packages/react-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/react-db", "description": "React integration for @tanstack/db", - "version": "0.1.34", + "version": "0.1.35", "author": "Kyle Mathews", "license": "MIT", "repository": { diff --git a/packages/rxdb-db-collection/CHANGELOG.md b/packages/rxdb-db-collection/CHANGELOG.md index cbf63e24d..69d921c1c 100644 --- a/packages/rxdb-db-collection/CHANGELOG.md +++ b/packages/rxdb-db-collection/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/rxdb-db-collection +## 0.1.24 + +### Patch Changes + +- Updated dependencies [[`3c9526c`](https://github.com/TanStack/db/commit/3c9526cd1fd80032ddddff32cf4a23dfa8376888)]: + - @tanstack/db@0.4.13 + ## 0.1.23 ### Patch Changes diff --git a/packages/rxdb-db-collection/package.json b/packages/rxdb-db-collection/package.json index 75ed0153f..41e7538df 100644 --- a/packages/rxdb-db-collection/package.json +++ b/packages/rxdb-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/rxdb-db-collection", "description": "RxDB collection for TanStack DB", - "version": "0.1.23", + "version": "0.1.24", "dependencies": { "rxdb": "16.19.1", "@standard-schema/spec": "^1.0.0", diff --git a/packages/solid-db/CHANGELOG.md b/packages/solid-db/CHANGELOG.md index 754c2a203..17e00114c 100644 --- a/packages/solid-db/CHANGELOG.md +++ b/packages/solid-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/react-db +## 0.1.35 + +### Patch Changes + +- Updated dependencies [[`3c9526c`](https://github.com/TanStack/db/commit/3c9526cd1fd80032ddddff32cf4a23dfa8376888)]: + - @tanstack/db@0.4.13 + ## 0.1.34 ### Patch Changes diff --git a/packages/solid-db/package.json b/packages/solid-db/package.json index bfd6f253a..2f924741e 100644 --- a/packages/solid-db/package.json +++ b/packages/solid-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/solid-db", "description": "Solid integration for @tanstack/db", - "version": "0.1.34", + "version": "0.1.35", "author": "Kyle Mathews", "license": "MIT", "repository": { diff --git a/packages/svelte-db/CHANGELOG.md b/packages/svelte-db/CHANGELOG.md index 91158d9c8..a5a0d9e15 100644 --- a/packages/svelte-db/CHANGELOG.md +++ b/packages/svelte-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/svelte-db +## 0.1.35 + +### Patch Changes + +- Updated dependencies [[`3c9526c`](https://github.com/TanStack/db/commit/3c9526cd1fd80032ddddff32cf4a23dfa8376888)]: + - @tanstack/db@0.4.13 + ## 0.1.34 ### Patch Changes diff --git a/packages/svelte-db/package.json b/packages/svelte-db/package.json index 53a0c49fa..2021e85b6 100644 --- a/packages/svelte-db/package.json +++ b/packages/svelte-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/svelte-db", "description": "Svelte integration for @tanstack/db", - "version": "0.1.34", + "version": "0.1.35", "dependencies": { "@tanstack/db": "workspace:*" }, diff --git a/packages/trailbase-db-collection/CHANGELOG.md b/packages/trailbase-db-collection/CHANGELOG.md index 363b388d3..355398438 100644 --- a/packages/trailbase-db-collection/CHANGELOG.md +++ b/packages/trailbase-db-collection/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/trailbase-db-collection +## 0.1.35 + +### Patch Changes + +- Updated dependencies [[`3c9526c`](https://github.com/TanStack/db/commit/3c9526cd1fd80032ddddff32cf4a23dfa8376888)]: + - @tanstack/db@0.4.13 + ## 0.1.34 ### Patch Changes diff --git a/packages/trailbase-db-collection/package.json b/packages/trailbase-db-collection/package.json index c9464f42d..b16f2d58f 100644 --- a/packages/trailbase-db-collection/package.json +++ b/packages/trailbase-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/trailbase-db-collection", "description": "TrailBase collection for TanStack DB", - "version": "0.1.34", + "version": "0.1.35", "dependencies": { "@standard-schema/spec": "^1.0.0", "@tanstack/db": "workspace:*", diff --git a/packages/vue-db/CHANGELOG.md b/packages/vue-db/CHANGELOG.md index 12a9e1840..a5de097a0 100644 --- a/packages/vue-db/CHANGELOG.md +++ b/packages/vue-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/vue-db +## 0.0.68 + +### Patch Changes + +- Updated dependencies [[`3c9526c`](https://github.com/TanStack/db/commit/3c9526cd1fd80032ddddff32cf4a23dfa8376888)]: + - @tanstack/db@0.4.13 + ## 0.0.67 ### Patch Changes diff --git a/packages/vue-db/package.json b/packages/vue-db/package.json index 4da09c07d..c757fba19 100644 --- a/packages/vue-db/package.json +++ b/packages/vue-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/vue-db", "description": "Vue integration for @tanstack/db", - "version": "0.0.67", + "version": "0.0.68", "author": "Kyle Mathews", "license": "MIT", "repository": { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index eacd703ee..b0199ca35 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -196,10 +196,10 @@ importers: specifier: ^5.90.5 version: 5.90.5 '@tanstack/query-db-collection': - specifier: ^0.2.34 + specifier: ^0.2.35 version: link:../../../packages/query-db-collection '@tanstack/react-db': - specifier: ^0.1.34 + specifier: ^0.1.35 version: link:../../../packages/react-db '@tanstack/react-router': specifier: ^1.133.15 From 970616b6db723d1716eecd5076417de5d6e9a884 Mon Sep 17 00:00:00 2001 From: Kyle Mathews Date: Fri, 24 Oct 2025 12:13:48 -0600 Subject: [PATCH 35/56] fix(collection): fire status:change event before cleaning up event handlers (#714) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(collection): fire status:change event before cleaning up event handlers Event handlers are now cleaned up after the status is changed to 'cleaned-up', allowing status:change listeners to properly detect the cleaned-up state. The cleanup process now: 1. Cleans up sync, state, changes, and indexes 2. Sets status to 'cleaned-up' (fires the event) 3. Finally cleans up event handlers This fixes the collection factory pattern where collections listen for the 'cleaned-up' status to remove themselves from the cache. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * style: format changeset with prettier 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --------- Co-authored-by: Claude --- .changeset/fix-collection-cleanup-order.md | 34 ++++++++++++++ packages/db/src/collection/lifecycle.ts | 10 ++-- .../db/tests/collection-lifecycle.test.ts | 47 +++++++++++++++++++ 3 files changed, 88 insertions(+), 3 deletions(-) create mode 100644 .changeset/fix-collection-cleanup-order.md diff --git a/.changeset/fix-collection-cleanup-order.md b/.changeset/fix-collection-cleanup-order.md new file mode 100644 index 000000000..f81aa84d2 --- /dev/null +++ b/.changeset/fix-collection-cleanup-order.md @@ -0,0 +1,34 @@ +--- +"@tanstack/db": patch +--- + +Fix collection cleanup to fire status:change event with 'cleaned-up' status + +Previously, when a collection was garbage collected, event handlers were removed before the status was changed to 'cleaned-up'. This prevented listeners from receiving the status:change event, breaking the collection factory pattern where collections listen for cleanup to remove themselves from a cache. + +Now, the cleanup process: + +1. Cleans up sync, state, changes, and indexes +2. Sets status to 'cleaned-up' (fires the event) +3. Finally cleans up event handlers + +This enables the collection factory pattern: + +```typescript +const cache = new Map>() + +const getTodoCollection = (id: string) => { + if (!cache.has(id)) { + const collection = createCollection(/* ... */) + + collection.on("status:change", ({ status }) => { + if (status === "cleaned-up") { + cache.delete(id) // This now works! + } + }) + + cache.set(id, collection) + } + return cache.get(id)! +} +``` diff --git a/packages/db/src/collection/lifecycle.ts b/packages/db/src/collection/lifecycle.ts index be607a3c9..5181ab216 100644 --- a/packages/db/src/collection/lifecycle.ts +++ b/packages/db/src/collection/lifecycle.ts @@ -250,8 +250,7 @@ export class CollectionLifecycleManager< !deadline || deadline.timeRemaining() > 0 || deadline.didTimeout if (hasTime) { - // Perform all cleanup operations - this.events.cleanup() + // Perform all cleanup operations except events this.sync.cleanup() this.state.cleanup() this.changes.cleanup() @@ -265,8 +264,13 @@ export class CollectionLifecycleManager< this.hasBeenReady = false this.onFirstReadyCallbacks = [] - // Set status to cleaned-up + // Set status to cleaned-up after everything is cleaned up + // This fires the status:change event to notify listeners this.setStatus(`cleaned-up`) + + // Finally, cleanup event handlers after the event has been fired + this.events.cleanup() + return true } else { // If we don't have time, reschedule for the next idle period diff --git a/packages/db/tests/collection-lifecycle.test.ts b/packages/db/tests/collection-lifecycle.test.ts index 5af642c08..4094ee813 100644 --- a/packages/db/tests/collection-lifecycle.test.ts +++ b/packages/db/tests/collection-lifecycle.test.ts @@ -474,5 +474,52 @@ describe(`Collection Lifecycle Management`, () => { subscription.unsubscribe() }) + + it(`should fire status:change event with 'cleaned-up' status before clearing event handlers`, () => { + const collection = createCollection<{ id: string; name: string }>({ + id: `cleanup-event-test`, + getKey: (item) => item.id, + gcTime: 1000, + sync: { + sync: () => {}, + }, + }) + + // Track status changes + const statusChanges: Array<{ status: string; previousStatus: string }> = + [] + + // Add event listener for status changes + collection.on(`status:change`, ({ status, previousStatus }) => { + statusChanges.push({ status, previousStatus }) + }) + + // Subscribe and unsubscribe to trigger GC + const subscription = collection.subscribeChanges(() => {}) + subscription.unsubscribe() + + expect(statusChanges).toHaveLength(1) + expect(statusChanges[0]).toEqual({ + status: `loading`, + previousStatus: `idle`, + }) + + // Trigger GC timeout to schedule cleanup + const gcTimerId = mockSetTimeout.mock.results[0]?.value + if (gcTimerId) { + triggerTimeout(gcTimerId) + } + + // Trigger all remaining timeouts to handle the idle callback + triggerAllTimeouts() + + // Verify that the listener received the 'cleaned-up' status change event + expect(statusChanges).toHaveLength(2) + expect(statusChanges[1]).toEqual({ + status: `cleaned-up`, + previousStatus: `loading`, + }) + expect(collection.status).toBe(`cleaned-up`) + }) }) }) From ac4295129777f38ad55f085f7a1a21ae23c09d4c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 24 Oct 2025 12:33:32 -0600 Subject: [PATCH 36/56] ci: Version Packages (#718) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .changeset/fix-collection-cleanup-order.md | 34 ------------------- examples/angular/todos/CHANGELOG.md | 8 +++++ examples/angular/todos/package.json | 2 +- examples/react/projects/package.json | 4 +-- examples/react/todo/CHANGELOG.md | 10 ++++++ examples/react/todo/package.json | 2 +- packages/angular-db/CHANGELOG.md | 7 ++++ packages/angular-db/package.json | 2 +- packages/db/CHANGELOG.md | 34 +++++++++++++++++++ packages/db/package.json | 2 +- packages/electric-db-collection/CHANGELOG.md | 7 ++++ packages/electric-db-collection/package.json | 2 +- packages/query-db-collection/CHANGELOG.md | 7 ++++ packages/query-db-collection/package.json | 2 +- packages/react-db/CHANGELOG.md | 7 ++++ packages/react-db/package.json | 2 +- packages/rxdb-db-collection/CHANGELOG.md | 7 ++++ packages/rxdb-db-collection/package.json | 2 +- packages/solid-db/CHANGELOG.md | 7 ++++ packages/solid-db/package.json | 2 +- packages/svelte-db/CHANGELOG.md | 7 ++++ packages/svelte-db/package.json | 2 +- packages/trailbase-db-collection/CHANGELOG.md | 7 ++++ packages/trailbase-db-collection/package.json | 2 +- packages/vue-db/CHANGELOG.md | 7 ++++ packages/vue-db/package.json | 2 +- pnpm-lock.yaml | 4 +-- 27 files changed, 131 insertions(+), 50 deletions(-) delete mode 100644 .changeset/fix-collection-cleanup-order.md diff --git a/.changeset/fix-collection-cleanup-order.md b/.changeset/fix-collection-cleanup-order.md deleted file mode 100644 index f81aa84d2..000000000 --- a/.changeset/fix-collection-cleanup-order.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -"@tanstack/db": patch ---- - -Fix collection cleanup to fire status:change event with 'cleaned-up' status - -Previously, when a collection was garbage collected, event handlers were removed before the status was changed to 'cleaned-up'. This prevented listeners from receiving the status:change event, breaking the collection factory pattern where collections listen for cleanup to remove themselves from a cache. - -Now, the cleanup process: - -1. Cleans up sync, state, changes, and indexes -2. Sets status to 'cleaned-up' (fires the event) -3. Finally cleans up event handlers - -This enables the collection factory pattern: - -```typescript -const cache = new Map>() - -const getTodoCollection = (id: string) => { - if (!cache.has(id)) { - const collection = createCollection(/* ... */) - - collection.on("status:change", ({ status }) => { - if (status === "cleaned-up") { - cache.delete(id) // This now works! - } - }) - - cache.set(id, collection) - } - return cache.get(id)! -} -``` diff --git a/examples/angular/todos/CHANGELOG.md b/examples/angular/todos/CHANGELOG.md index 98dacedab..7278a465c 100644 --- a/examples/angular/todos/CHANGELOG.md +++ b/examples/angular/todos/CHANGELOG.md @@ -1,5 +1,13 @@ # todos +## 0.0.15 + +### Patch Changes + +- Updated dependencies [[`970616b`](https://github.com/TanStack/db/commit/970616b6db723d1716eecd5076417de5d6e9a884)]: + - @tanstack/db@0.4.14 + - @tanstack/angular-db@0.1.19 + ## 0.0.14 ### Patch Changes diff --git a/examples/angular/todos/package.json b/examples/angular/todos/package.json index 9cd7e248e..7193be00a 100644 --- a/examples/angular/todos/package.json +++ b/examples/angular/todos/package.json @@ -1,6 +1,6 @@ { "name": "todos", - "version": "0.0.14", + "version": "0.0.15", "scripts": { "ng": "ng", "start": "ng serve", diff --git a/examples/react/projects/package.json b/examples/react/projects/package.json index 025431139..9f1fb5bca 100644 --- a/examples/react/projects/package.json +++ b/examples/react/projects/package.json @@ -17,8 +17,8 @@ "dependencies": { "@tailwindcss/vite": "^4.1.14", "@tanstack/query-core": "^5.90.5", - "@tanstack/query-db-collection": "^0.2.35", - "@tanstack/react-db": "^0.1.35", + "@tanstack/query-db-collection": "^0.2.36", + "@tanstack/react-db": "^0.1.36", "@tanstack/react-router": "^1.133.15", "@tanstack/react-router-devtools": "^1.133.15", "@tanstack/react-router-with-query": "^1.130.17", diff --git a/examples/react/todo/CHANGELOG.md b/examples/react/todo/CHANGELOG.md index 5a6738885..3bf337be2 100644 --- a/examples/react/todo/CHANGELOG.md +++ b/examples/react/todo/CHANGELOG.md @@ -1,5 +1,15 @@ # examples/react/todo +## 0.1.15 + +### Patch Changes + +- Updated dependencies []: + - @tanstack/electric-db-collection@0.1.38 + - @tanstack/query-db-collection@0.2.36 + - @tanstack/react-db@0.1.36 + - @tanstack/trailbase-db-collection@0.1.36 + ## 0.1.14 ### Patch Changes diff --git a/examples/react/todo/package.json b/examples/react/todo/package.json index b14ffcff2..9b43e3fdb 100644 --- a/examples/react/todo/package.json +++ b/examples/react/todo/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/db-example-react-todo", "private": true, - "version": "0.1.14", + "version": "0.1.15", "dependencies": { "@tanstack/electric-db-collection": "workspace:^", "@tanstack/query-core": "^5.90.5", diff --git a/packages/angular-db/CHANGELOG.md b/packages/angular-db/CHANGELOG.md index b96eafafa..fb7488154 100644 --- a/packages/angular-db/CHANGELOG.md +++ b/packages/angular-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/angular-db +## 0.1.19 + +### Patch Changes + +- Updated dependencies [[`970616b`](https://github.com/TanStack/db/commit/970616b6db723d1716eecd5076417de5d6e9a884)]: + - @tanstack/db@0.4.14 + ## 0.1.18 ### Patch Changes diff --git a/packages/angular-db/package.json b/packages/angular-db/package.json index 0288528d5..170e3a35c 100644 --- a/packages/angular-db/package.json +++ b/packages/angular-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/angular-db", "description": "Angular integration for @tanstack/db", - "version": "0.1.18", + "version": "0.1.19", "author": "Ethan McDaniel", "license": "MIT", "repository": { diff --git a/packages/db/CHANGELOG.md b/packages/db/CHANGELOG.md index a5435084f..921139ba0 100644 --- a/packages/db/CHANGELOG.md +++ b/packages/db/CHANGELOG.md @@ -1,5 +1,39 @@ # @tanstack/db +## 0.4.14 + +### Patch Changes + +- Fix collection cleanup to fire status:change event with 'cleaned-up' status ([#714](https://github.com/TanStack/db/pull/714)) + + Previously, when a collection was garbage collected, event handlers were removed before the status was changed to 'cleaned-up'. This prevented listeners from receiving the status:change event, breaking the collection factory pattern where collections listen for cleanup to remove themselves from a cache. + + Now, the cleanup process: + 1. Cleans up sync, state, changes, and indexes + 2. Sets status to 'cleaned-up' (fires the event) + 3. Finally cleans up event handlers + + This enables the collection factory pattern: + + ```typescript + const cache = new Map>() + + const getTodoCollection = (id: string) => { + if (!cache.has(id)) { + const collection = createCollection(/* ... */) + + collection.on("status:change", ({ status }) => { + if (status === "cleaned-up") { + cache.delete(id) // This now works! + } + }) + + cache.set(id, collection) + } + return cache.get(id)! + } + ``` + ## 0.4.13 ### Patch Changes diff --git a/packages/db/package.json b/packages/db/package.json index 7bc4a7b31..ce41d80bc 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/db", "description": "A reactive client store for building super fast apps on sync", - "version": "0.4.13", + "version": "0.4.14", "dependencies": { "@standard-schema/spec": "^1.0.0", "@tanstack/db-ivm": "workspace:*" diff --git a/packages/electric-db-collection/CHANGELOG.md b/packages/electric-db-collection/CHANGELOG.md index 32c5161d6..8c099cfca 100644 --- a/packages/electric-db-collection/CHANGELOG.md +++ b/packages/electric-db-collection/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/electric-db-collection +## 0.1.38 + +### Patch Changes + +- Updated dependencies [[`970616b`](https://github.com/TanStack/db/commit/970616b6db723d1716eecd5076417de5d6e9a884)]: + - @tanstack/db@0.4.14 + ## 0.1.37 ### Patch Changes diff --git a/packages/electric-db-collection/package.json b/packages/electric-db-collection/package.json index 8459ddd2e..5abc858c2 100644 --- a/packages/electric-db-collection/package.json +++ b/packages/electric-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/electric-db-collection", "description": "ElectricSQL collection for TanStack DB", - "version": "0.1.37", + "version": "0.1.38", "dependencies": { "@electric-sql/client": "^1.0.14", "@standard-schema/spec": "^1.0.0", diff --git a/packages/query-db-collection/CHANGELOG.md b/packages/query-db-collection/CHANGELOG.md index 14d085792..ca9f6e096 100644 --- a/packages/query-db-collection/CHANGELOG.md +++ b/packages/query-db-collection/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/query-db-collection +## 0.2.36 + +### Patch Changes + +- Updated dependencies [[`970616b`](https://github.com/TanStack/db/commit/970616b6db723d1716eecd5076417de5d6e9a884)]: + - @tanstack/db@0.4.14 + ## 0.2.35 ### Patch Changes diff --git a/packages/query-db-collection/package.json b/packages/query-db-collection/package.json index 5e4c9da38..8fa8a8934 100644 --- a/packages/query-db-collection/package.json +++ b/packages/query-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/query-db-collection", "description": "TanStack Query collection for TanStack DB", - "version": "0.2.35", + "version": "0.2.36", "dependencies": { "@standard-schema/spec": "^1.0.0", "@tanstack/db": "workspace:*" diff --git a/packages/react-db/CHANGELOG.md b/packages/react-db/CHANGELOG.md index 4a78be379..8c668a4a0 100644 --- a/packages/react-db/CHANGELOG.md +++ b/packages/react-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/react-db +## 0.1.36 + +### Patch Changes + +- Updated dependencies [[`970616b`](https://github.com/TanStack/db/commit/970616b6db723d1716eecd5076417de5d6e9a884)]: + - @tanstack/db@0.4.14 + ## 0.1.35 ### Patch Changes diff --git a/packages/react-db/package.json b/packages/react-db/package.json index 3811a0b71..b5bbe0f87 100644 --- a/packages/react-db/package.json +++ b/packages/react-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/react-db", "description": "React integration for @tanstack/db", - "version": "0.1.35", + "version": "0.1.36", "author": "Kyle Mathews", "license": "MIT", "repository": { diff --git a/packages/rxdb-db-collection/CHANGELOG.md b/packages/rxdb-db-collection/CHANGELOG.md index 69d921c1c..957d87cfb 100644 --- a/packages/rxdb-db-collection/CHANGELOG.md +++ b/packages/rxdb-db-collection/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/rxdb-db-collection +## 0.1.25 + +### Patch Changes + +- Updated dependencies [[`970616b`](https://github.com/TanStack/db/commit/970616b6db723d1716eecd5076417de5d6e9a884)]: + - @tanstack/db@0.4.14 + ## 0.1.24 ### Patch Changes diff --git a/packages/rxdb-db-collection/package.json b/packages/rxdb-db-collection/package.json index 41e7538df..168ee7ccc 100644 --- a/packages/rxdb-db-collection/package.json +++ b/packages/rxdb-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/rxdb-db-collection", "description": "RxDB collection for TanStack DB", - "version": "0.1.24", + "version": "0.1.25", "dependencies": { "rxdb": "16.19.1", "@standard-schema/spec": "^1.0.0", diff --git a/packages/solid-db/CHANGELOG.md b/packages/solid-db/CHANGELOG.md index 17e00114c..e3c0b7f7c 100644 --- a/packages/solid-db/CHANGELOG.md +++ b/packages/solid-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/react-db +## 0.1.36 + +### Patch Changes + +- Updated dependencies [[`970616b`](https://github.com/TanStack/db/commit/970616b6db723d1716eecd5076417de5d6e9a884)]: + - @tanstack/db@0.4.14 + ## 0.1.35 ### Patch Changes diff --git a/packages/solid-db/package.json b/packages/solid-db/package.json index 2f924741e..5bb4bd8e9 100644 --- a/packages/solid-db/package.json +++ b/packages/solid-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/solid-db", "description": "Solid integration for @tanstack/db", - "version": "0.1.35", + "version": "0.1.36", "author": "Kyle Mathews", "license": "MIT", "repository": { diff --git a/packages/svelte-db/CHANGELOG.md b/packages/svelte-db/CHANGELOG.md index a5a0d9e15..e789ee0ea 100644 --- a/packages/svelte-db/CHANGELOG.md +++ b/packages/svelte-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/svelte-db +## 0.1.36 + +### Patch Changes + +- Updated dependencies [[`970616b`](https://github.com/TanStack/db/commit/970616b6db723d1716eecd5076417de5d6e9a884)]: + - @tanstack/db@0.4.14 + ## 0.1.35 ### Patch Changes diff --git a/packages/svelte-db/package.json b/packages/svelte-db/package.json index 2021e85b6..bc28f6381 100644 --- a/packages/svelte-db/package.json +++ b/packages/svelte-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/svelte-db", "description": "Svelte integration for @tanstack/db", - "version": "0.1.35", + "version": "0.1.36", "dependencies": { "@tanstack/db": "workspace:*" }, diff --git a/packages/trailbase-db-collection/CHANGELOG.md b/packages/trailbase-db-collection/CHANGELOG.md index 355398438..598b8d35e 100644 --- a/packages/trailbase-db-collection/CHANGELOG.md +++ b/packages/trailbase-db-collection/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/trailbase-db-collection +## 0.1.36 + +### Patch Changes + +- Updated dependencies [[`970616b`](https://github.com/TanStack/db/commit/970616b6db723d1716eecd5076417de5d6e9a884)]: + - @tanstack/db@0.4.14 + ## 0.1.35 ### Patch Changes diff --git a/packages/trailbase-db-collection/package.json b/packages/trailbase-db-collection/package.json index b16f2d58f..c32f1df96 100644 --- a/packages/trailbase-db-collection/package.json +++ b/packages/trailbase-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/trailbase-db-collection", "description": "TrailBase collection for TanStack DB", - "version": "0.1.35", + "version": "0.1.36", "dependencies": { "@standard-schema/spec": "^1.0.0", "@tanstack/db": "workspace:*", diff --git a/packages/vue-db/CHANGELOG.md b/packages/vue-db/CHANGELOG.md index a5de097a0..43d15c282 100644 --- a/packages/vue-db/CHANGELOG.md +++ b/packages/vue-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/vue-db +## 0.0.69 + +### Patch Changes + +- Updated dependencies [[`970616b`](https://github.com/TanStack/db/commit/970616b6db723d1716eecd5076417de5d6e9a884)]: + - @tanstack/db@0.4.14 + ## 0.0.68 ### Patch Changes diff --git a/packages/vue-db/package.json b/packages/vue-db/package.json index c757fba19..76764a08b 100644 --- a/packages/vue-db/package.json +++ b/packages/vue-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/vue-db", "description": "Vue integration for @tanstack/db", - "version": "0.0.68", + "version": "0.0.69", "author": "Kyle Mathews", "license": "MIT", "repository": { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b0199ca35..2008b7330 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -196,10 +196,10 @@ importers: specifier: ^5.90.5 version: 5.90.5 '@tanstack/query-db-collection': - specifier: ^0.2.35 + specifier: ^0.2.36 version: link:../../../packages/query-db-collection '@tanstack/react-db': - specifier: ^0.1.35 + specifier: ^0.1.36 version: link:../../../packages/react-db '@tanstack/react-router': specifier: ^1.133.15 From 518ecda57d9cc258d2047a725e2192fdacc19fbd Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 27 Oct 2025 05:35:26 +0000 Subject: [PATCH 37/56] chore(deps): update all non-major dependencies (#724) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- examples/angular/todos/package.json | 18 +- examples/react/projects/package.json | 22 +- examples/react/todo/package.json | 16 +- examples/solid/todo/package.json | 26 +- package.json | 16 +- packages/angular-db/package.json | 2 +- packages/db-ivm/package.json | 2 +- packages/db/package.json | 2 +- packages/electric-db-collection/package.json | 4 +- packages/query-db-collection/package.json | 2 +- packages/react-db/package.json | 4 +- packages/rxdb-db-collection/package.json | 4 +- packages/solid-db/package.json | 6 +- packages/svelte-db/package.json | 4 +- packages/trailbase-db-collection/package.json | 2 +- packages/vue-db/package.json | 4 +- pnpm-lock.yaml | 1654 +++++++++-------- 17 files changed, 900 insertions(+), 888 deletions(-) diff --git a/examples/angular/todos/package.json b/examples/angular/todos/package.json index 7193be00a..e500a400a 100644 --- a/examples/angular/todos/package.json +++ b/examples/angular/todos/package.json @@ -22,12 +22,12 @@ }, "private": true, "dependencies": { - "@angular/common": "^20.3.6", - "@angular/compiler": "^20.3.6", - "@angular/core": "^20.3.6", - "@angular/forms": "^20.3.6", - "@angular/platform-browser": "^20.3.6", - "@angular/router": "^20.3.6", + "@angular/common": "^20.3.7", + "@angular/compiler": "^20.3.7", + "@angular/core": "^20.3.7", + "@angular/forms": "^20.3.7", + "@angular/platform-browser": "^20.3.7", + "@angular/router": "^20.3.7", "@tanstack/angular-db": "workspace:*", "@tanstack/db": "workspace:*", "rxjs": "~7.8.2", @@ -35,9 +35,9 @@ "zone.js": "~0.15.1" }, "devDependencies": { - "@angular/build": "^20.3.6", - "@angular/cli": "^20.3.6", - "@angular/compiler-cli": "^20.3.6", + "@angular/build": "^20.3.7", + "@angular/cli": "^20.3.7", + "@angular/compiler-cli": "^20.3.7", "@types/jasmine": "~5.1.12", "autoprefixer": "^10.4.21", "jasmine-core": "~5.12.0", diff --git a/examples/react/projects/package.json b/examples/react/projects/package.json index 9f1fb5bca..0b8a83c61 100644 --- a/examples/react/projects/package.json +++ b/examples/react/projects/package.json @@ -15,25 +15,25 @@ "format:check": "prettier --check ." }, "dependencies": { - "@tailwindcss/vite": "^4.1.14", + "@tailwindcss/vite": "^4.1.16", "@tanstack/query-core": "^5.90.5", "@tanstack/query-db-collection": "^0.2.36", "@tanstack/react-db": "^0.1.36", - "@tanstack/react-router": "^1.133.15", - "@tanstack/react-router-devtools": "^1.133.15", + "@tanstack/react-router": "^1.133.32", + "@tanstack/react-router-devtools": "^1.133.32", "@tanstack/react-router-with-query": "^1.130.17", - "@tanstack/react-start": "^1.133.15", - "@tanstack/router-plugin": "^1.133.15", - "@trpc/client": "^11.6.0", - "@trpc/server": "^11.6.0", + "@tanstack/react-start": "^1.133.32", + "@tanstack/router-plugin": "^1.133.32", + "@trpc/client": "^11.7.0", + "@trpc/server": "^11.7.0", "better-auth": "^1.3.26", "dotenv": "^17.2.3", - "drizzle-orm": "^0.44.6", + "drizzle-orm": "^0.44.7", "drizzle-zod": "^0.8.3", "pg": "^8.16.3", "react": "^19.2.0", "react-dom": "^19.2.0", - "tailwindcss": "^4.1.14", + "tailwindcss": "^4.1.16", "vite": "^6.3.5", "vite-tsconfig-paths": "^5.1.4", "zod": "^4.1.11" @@ -46,8 +46,8 @@ "@types/pg": "^8.15.5", "@types/react": "^19.2.2", "@types/react-dom": "^19.2.2", - "@typescript-eslint/eslint-plugin": "^8.46.1", - "@typescript-eslint/parser": "^8.46.1", + "@typescript-eslint/eslint-plugin": "^8.46.2", + "@typescript-eslint/parser": "^8.46.2", "@vitejs/plugin-react": "^5.0.4", "concurrently": "^9.2.1", "drizzle-kit": "^0.31.5", diff --git a/examples/react/todo/package.json b/examples/react/todo/package.json index 9b43e3fdb..d80cbc492 100644 --- a/examples/react/todo/package.json +++ b/examples/react/todo/package.json @@ -7,32 +7,32 @@ "@tanstack/query-core": "^5.90.5", "@tanstack/query-db-collection": "workspace:*", "@tanstack/react-db": "workspace:*", - "@tanstack/react-router": "^1.133.15", - "@tanstack/react-start": "^1.133.15", + "@tanstack/react-router": "^1.133.32", + "@tanstack/react-start": "^1.133.32", "@tanstack/trailbase-db-collection": "workspace:^", "cors": "^2.8.5", - "drizzle-orm": "^0.44.6", + "drizzle-orm": "^0.44.7", "drizzle-zod": "^0.8.3", "express": "^4.21.2", "postgres": "^3.4.7", "react": "^19.2.0", "react-dom": "^19.2.0", - "tailwindcss": "^4.1.14", + "tailwindcss": "^4.1.16", "trailbase": "^0.8.0", "vite-tsconfig-paths": "^5.1.4", "zod": "^4.1.11" }, "devDependencies": { "@eslint/js": "^9.38.0", - "@tailwindcss/vite": "^4.1.14", + "@tailwindcss/vite": "^4.1.16", "@types/cors": "^2.8.19", - "@types/express": "^4.17.23", + "@types/express": "^4.17.24", "@types/node": "^24.5.2", "@types/pg": "^8.15.5", "@types/react": "^19.2.2", "@types/react-dom": "^19.2.2", - "@typescript-eslint/eslint-plugin": "^8.46.1", - "@typescript-eslint/parser": "^8.46.1", + "@typescript-eslint/eslint-plugin": "^8.46.2", + "@typescript-eslint/parser": "^8.46.2", "@vitejs/plugin-react": "^5.0.3", "concurrently": "^9.2.1", "dotenv": "^17.2.2", diff --git a/examples/solid/todo/package.json b/examples/solid/todo/package.json index 46adbc1d5..bcd4f99a1 100644 --- a/examples/solid/todo/package.json +++ b/examples/solid/todo/package.json @@ -3,32 +3,32 @@ "private": true, "version": "0.0.33", "dependencies": { - "@tanstack/electric-db-collection": "^0.1.35", + "@tanstack/electric-db-collection": "^0.1.38", "@tanstack/query-core": "^5.90.5", - "@tanstack/query-db-collection": "^0.2.32", - "@tanstack/solid-db": "^0.1.33", - "@tanstack/solid-router": "^1.133.15", - "@tanstack/solid-start": "^1.133.15", - "@tanstack/trailbase-db-collection": "^0.1.33", + "@tanstack/query-db-collection": "^0.2.36", + "@tanstack/solid-db": "^0.1.36", + "@tanstack/solid-router": "^1.133.31", + "@tanstack/solid-start": "^1.133.32", + "@tanstack/trailbase-db-collection": "^0.1.36", "cors": "^2.8.5", - "drizzle-orm": "^0.44.6", + "drizzle-orm": "^0.44.7", "drizzle-zod": "^0.8.3", "express": "^4.21.2", "postgres": "^3.4.7", "solid-js": "^1.9.9", - "tailwindcss": "^4.1.14", + "tailwindcss": "^4.1.16", "trailbase": "^0.8.0", "vite-tsconfig-paths": "^5.1.4" }, "devDependencies": { "@eslint/js": "^9.38.0", - "@tailwindcss/vite": "^4.1.14", + "@tailwindcss/vite": "^4.1.16", "@types/cors": "^2.8.19", - "@types/express": "^4.17.23", + "@types/express": "^4.17.24", "@types/node": "^22.18.1", "@types/pg": "^8.15.5", - "@typescript-eslint/eslint-plugin": "^8.46.1", - "@typescript-eslint/parser": "^8.46.1", + "@typescript-eslint/eslint-plugin": "^8.46.2", + "@typescript-eslint/parser": "^8.46.2", "concurrently": "^9.2.1", "dotenv": "^16.6.1", "drizzle-kit": "^0.31.5", @@ -38,7 +38,7 @@ "tsx": "^4.20.6", "typescript": "^5.9.2", "vite": "^6.3.6", - "vite-plugin-solid": "^2.11.9" + "vite-plugin-solid": "^2.11.10" }, "scripts": { "build": "vite build", diff --git a/package.json b/package.json index 6be9d8530..be8334fcc 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "type": "git", "url": "https://github.com/tanstack/db.git" }, - "packageManager": "pnpm@10.18.3", + "packageManager": "pnpm@10.19.0", "type": "module", "scripts": { "build": "pnpm --filter \"./packages/**\" build", @@ -31,9 +31,9 @@ "@types/react": "^19.2.2", "@types/react-dom": "^19.2.2", "@types/use-sync-external-store": "^1.5.0", - "@typescript-eslint/eslint-plugin": "^8.46.1", - "@typescript-eslint/parser": "^8.46.1", - "@vitejs/plugin-react": "^5.0.4", + "@typescript-eslint/eslint-plugin": "^8.46.2", + "@typescript-eslint/parser": "^8.46.2", + "@vitejs/plugin-react": "^5.1.0", "eslint": "^9.38.0", "eslint-config-prettier": "^10.1.8", "eslint-import-resolver-typescript": "^4.4.4", @@ -41,17 +41,17 @@ "eslint-plugin-react": "^7.37.5", "husky": "^9.1.7", "jsdom": "^27.0.1", - "knip": "^5.66.1", + "knip": "^5.66.3", "lint-staged": "^15.5.2", "markdown-link-extractor": "^4.0.2", "mitt": "^3.0.1", "prettier": "^3.6.2", - "publint": "^0.3.14", - "sherif": "^1.6.1", + "publint": "^0.3.15", + "sherif": "^1.7.0", "shx": "^0.4.0", "tinyglobby": "^0.2.15", "typescript": "^5.9.2", - "vite": "^7.1.10", + "vite": "^7.1.12", "vitest": "^3.2.4", "zod": "^3.25.76" }, diff --git a/packages/angular-db/package.json b/packages/angular-db/package.json index 170e3a35c..bdb37d443 100644 --- a/packages/angular-db/package.json +++ b/packages/angular-db/package.json @@ -15,7 +15,7 @@ "angular", "typescript" ], - "packageManager": "pnpm@10.18.3", + "packageManager": "pnpm@10.19.0", "dependencies": { "@tanstack/db": "workspace:*" }, diff --git a/packages/db-ivm/package.json b/packages/db-ivm/package.json index 0d18d7dab..e6e8608e3 100644 --- a/packages/db-ivm/package.json +++ b/packages/db-ivm/package.json @@ -29,7 +29,7 @@ ], "main": "dist/cjs/index.cjs", "module": "dist/esm/index.js", - "packageManager": "pnpm@10.18.3", + "packageManager": "pnpm@10.19.0", "peerDependencies": { "typescript": ">=4.7" }, diff --git a/packages/db/package.json b/packages/db/package.json index ce41d80bc..9cfb30767 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -30,7 +30,7 @@ ], "main": "dist/cjs/index.cjs", "module": "dist/esm/index.js", - "packageManager": "pnpm@10.18.3", + "packageManager": "pnpm@10.19.0", "peerDependencies": { "typescript": ">=4.7" }, diff --git a/packages/electric-db-collection/package.json b/packages/electric-db-collection/package.json index 5abc858c2..7e9405d61 100644 --- a/packages/electric-db-collection/package.json +++ b/packages/electric-db-collection/package.json @@ -3,7 +3,7 @@ "description": "ElectricSQL collection for TanStack DB", "version": "0.1.38", "dependencies": { - "@electric-sql/client": "^1.0.14", + "@electric-sql/client": "^1.1.0", "@standard-schema/spec": "^1.0.0", "@tanstack/db": "workspace:*", "@tanstack/store": "^0.8.0", @@ -32,7 +32,7 @@ ], "main": "dist/cjs/index.cjs", "module": "dist/esm/index.js", - "packageManager": "pnpm@10.18.3", + "packageManager": "pnpm@10.19.0", "author": "Kyle Mathews", "license": "MIT", "repository": { diff --git a/packages/query-db-collection/package.json b/packages/query-db-collection/package.json index 8fa8a8934..972b5a0dd 100644 --- a/packages/query-db-collection/package.json +++ b/packages/query-db-collection/package.json @@ -29,7 +29,7 @@ ], "main": "dist/cjs/index.cjs", "module": "dist/esm/index.js", - "packageManager": "pnpm@10.18.3", + "packageManager": "pnpm@10.19.0", "peerDependencies": { "@tanstack/query-core": "^5.0.0", "typescript": ">=4.7" diff --git a/packages/react-db/package.json b/packages/react-db/package.json index b5bbe0f87..06accb5b4 100644 --- a/packages/react-db/package.json +++ b/packages/react-db/package.json @@ -15,13 +15,13 @@ "react", "typescript" ], - "packageManager": "pnpm@10.18.3", + "packageManager": "pnpm@10.19.0", "dependencies": { "@tanstack/db": "workspace:*", "use-sync-external-store": "^1.6.0" }, "devDependencies": { - "@electric-sql/client": "1.0.14", + "@electric-sql/client": "1.1.0", "@testing-library/react": "^16.3.0", "@types/react": "^19.2.2", "@types/react-dom": "^19.2.2", diff --git a/packages/rxdb-db-collection/package.json b/packages/rxdb-db-collection/package.json index 168ee7ccc..87b9e31e7 100644 --- a/packages/rxdb-db-collection/package.json +++ b/packages/rxdb-db-collection/package.json @@ -3,7 +3,7 @@ "description": "RxDB collection for TanStack DB", "version": "0.1.25", "dependencies": { - "rxdb": "16.19.1", + "rxdb": "16.20.0", "@standard-schema/spec": "^1.0.0", "@tanstack/db": "workspace:*", "@tanstack/store": "^0.8.0", @@ -32,7 +32,7 @@ ], "main": "dist/cjs/index.cjs", "module": "dist/esm/index.js", - "packageManager": "pnpm@10.18.3", + "packageManager": "pnpm@10.19.0", "peerDependencies": { "rxdb": ">=16.17.2", "rxjs": ">=7.8.2", diff --git a/packages/solid-db/package.json b/packages/solid-db/package.json index 5bb4bd8e9..5959bd75e 100644 --- a/packages/solid-db/package.json +++ b/packages/solid-db/package.json @@ -15,18 +15,18 @@ "solid", "typescript" ], - "packageManager": "pnpm@10.18.3", + "packageManager": "pnpm@10.19.0", "dependencies": { "@solid-primitives/map": "^0.7.2", "@tanstack/db": "workspace:*" }, "devDependencies": { - "@electric-sql/client": "1.0.14", + "@electric-sql/client": "1.1.0", "@solidjs/testing-library": "^0.8.10", "@vitest/coverage-istanbul": "^3.2.4", "jsdom": "^27.0.1", "solid-js": "^1.9.9", - "vite-plugin-solid": "^2.11.9", + "vite-plugin-solid": "^2.11.10", "vitest": "^3.2.4" }, "exports": { diff --git a/packages/svelte-db/package.json b/packages/svelte-db/package.json index bc28f6381..7f5c51cdc 100644 --- a/packages/svelte-db/package.json +++ b/packages/svelte-db/package.json @@ -9,8 +9,8 @@ "@sveltejs/package": "^2.5.4", "@sveltejs/vite-plugin-svelte": "^6.2.1", "@vitest/coverage-istanbul": "^3.2.4", - "publint": "^0.3.14", - "svelte": "^5.41.0", + "publint": "^0.3.15", + "svelte": "^5.42.2", "svelte-check": "^4.3.3" }, "exports": { diff --git a/packages/trailbase-db-collection/package.json b/packages/trailbase-db-collection/package.json index c32f1df96..7521bdf94 100644 --- a/packages/trailbase-db-collection/package.json +++ b/packages/trailbase-db-collection/package.json @@ -32,7 +32,7 @@ ], "main": "dist/cjs/index.cjs", "module": "dist/esm/index.js", - "packageManager": "pnpm@10.18.3", + "packageManager": "pnpm@10.19.0", "peerDependencies": { "typescript": ">=4.7" }, diff --git a/packages/vue-db/package.json b/packages/vue-db/package.json index 76764a08b..0a04934ff 100644 --- a/packages/vue-db/package.json +++ b/packages/vue-db/package.json @@ -15,12 +15,12 @@ "vue", "typescript" ], - "packageManager": "pnpm@10.18.3", + "packageManager": "pnpm@10.19.0", "dependencies": { "@tanstack/db": "workspace:*" }, "devDependencies": { - "@electric-sql/client": "1.0.14", + "@electric-sql/client": "1.1.0", "@vitejs/plugin-vue": "^6.0.1", "@vitest/coverage-istanbul": "^3.2.4", "vue": "^3.5.22" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 2008b7330..c00207ff1 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -16,13 +16,13 @@ importers: version: 9.38.0 '@stylistic/eslint-plugin': specifier: ^4.4.1 - version: 4.4.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + version: 4.4.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) '@svitejs/changesets-changelog-github-compact': specifier: ^1.2.0 version: 1.2.0(encoding@0.1.13) '@tanstack/config': specifier: ^0.22.0 - version: 0.22.0(@types/node@24.7.0)(@typescript-eslint/utils@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.0))(rollup@4.52.5)(typescript@5.9.3)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 0.22.0(@types/node@24.7.0)(@typescript-eslint/utils@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.1))(rollup@4.52.5)(typescript@5.9.3)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) '@testing-library/jest-dom': specifier: ^6.9.1 version: 6.9.1 @@ -39,29 +39,29 @@ importers: specifier: ^1.5.0 version: 1.5.0 '@typescript-eslint/eslint-plugin': - specifier: ^8.46.1 - version: 8.46.1(@typescript-eslint/parser@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + specifier: ^8.46.2 + version: 8.46.2(@typescript-eslint/parser@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) '@typescript-eslint/parser': - specifier: ^8.46.1 - version: 8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + specifier: ^8.46.2 + version: 8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) '@vitejs/plugin-react': - specifier: ^5.0.4 - version: 5.0.4(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + specifier: ^5.1.0 + version: 5.1.0(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) eslint: specifier: ^9.38.0 - version: 9.38.0(jiti@2.6.0) + version: 9.38.0(jiti@2.6.1) eslint-config-prettier: specifier: ^10.1.8 - version: 10.1.8(eslint@9.38.0(jiti@2.6.0)) + version: 10.1.8(eslint@9.38.0(jiti@2.6.1)) eslint-import-resolver-typescript: specifier: ^4.4.4 - version: 4.4.4(eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.0)))(eslint@9.38.0(jiti@2.6.0)) + version: 4.4.4(eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.1)))(eslint@9.38.0(jiti@2.6.1)) eslint-plugin-prettier: specifier: ^5.5.4 - version: 5.5.4(eslint-config-prettier@10.1.8(eslint@9.38.0(jiti@2.6.0)))(eslint@9.38.0(jiti@2.6.0))(prettier@3.6.2) + version: 5.5.4(eslint-config-prettier@10.1.8(eslint@9.38.0(jiti@2.6.1)))(eslint@9.38.0(jiti@2.6.1))(prettier@3.6.2) eslint-plugin-react: specifier: ^7.37.5 - version: 7.37.5(eslint@9.38.0(jiti@2.6.0)) + version: 7.37.5(eslint@9.38.0(jiti@2.6.1)) husky: specifier: ^9.1.7 version: 9.1.7 @@ -69,8 +69,8 @@ importers: specifier: ^27.0.1 version: 27.0.1(postcss@8.5.6) knip: - specifier: ^5.66.1 - version: 5.66.1(@types/node@24.7.0)(typescript@5.9.3) + specifier: ^5.66.3 + version: 5.66.3(@types/node@24.7.0)(typescript@5.9.3) lint-staged: specifier: ^15.5.2 version: 15.5.2 @@ -84,11 +84,11 @@ importers: specifier: ^3.6.2 version: 3.6.2 publint: - specifier: ^0.3.14 - version: 0.3.14 + specifier: ^0.3.15 + version: 0.3.15 sherif: - specifier: ^1.6.1 - version: 1.6.1 + specifier: ^1.7.0 + version: 1.7.0 shx: specifier: ^0.4.0 version: 0.4.0 @@ -99,11 +99,11 @@ importers: specifier: ^5.9.2 version: 5.9.3 vite: - specifier: ^7.1.10 - version: 7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + specifier: ^7.1.12 + version: 7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) vitest: specifier: ^3.2.4 - version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) zod: specifier: ^3.25.76 version: 3.25.76 @@ -111,23 +111,23 @@ importers: examples/angular/todos: dependencies: '@angular/common': - specifier: ^20.3.6 - version: 20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2) + specifier: ^20.3.7 + version: 20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2) '@angular/compiler': - specifier: ^20.3.6 - version: 20.3.6 + specifier: ^20.3.7 + version: 20.3.7 '@angular/core': - specifier: ^20.3.6 - version: 20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1) + specifier: ^20.3.7 + version: 20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1) '@angular/forms': - specifier: ^20.3.6 - version: 20.3.6(@angular/common@20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@20.3.6(@angular/common@20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1)))(rxjs@7.8.2) + specifier: ^20.3.7 + version: 20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1)))(rxjs@7.8.2) '@angular/platform-browser': - specifier: ^20.3.6 - version: 20.3.6(@angular/common@20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1)) + specifier: ^20.3.7 + version: 20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1)) '@angular/router': - specifier: ^20.3.6 - version: 20.3.6(@angular/common@20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@20.3.6(@angular/common@20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1)))(rxjs@7.8.2) + specifier: ^20.3.7 + version: 20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1)))(rxjs@7.8.2) '@tanstack/angular-db': specifier: workspace:* version: link:../../../packages/angular-db @@ -145,14 +145,14 @@ importers: version: 0.15.1 devDependencies: '@angular/build': - specifier: ^20.3.6 - version: 20.3.6(@angular/compiler-cli@20.3.6(@angular/compiler@20.3.6)(typescript@5.8.3))(@angular/compiler@20.3.6)(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@20.3.6(@angular/common@20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1)))(@types/node@24.7.0)(chokidar@4.0.3)(jiti@2.6.0)(karma@6.4.4)(lightningcss@1.30.1)(postcss@8.5.6)(tailwindcss@3.4.18)(terser@5.44.0)(tslib@2.8.1)(tsx@4.20.6)(typescript@5.8.3)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))(yaml@2.8.1) + specifier: ^20.3.7 + version: 20.3.7(@angular/compiler-cli@20.3.7(@angular/compiler@20.3.7)(typescript@5.8.3))(@angular/compiler@20.3.7)(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1)))(@types/node@24.7.0)(chokidar@4.0.3)(jiti@2.6.1)(karma@6.4.4)(lightningcss@1.30.2)(postcss@8.5.6)(tailwindcss@3.4.18)(terser@5.44.0)(tslib@2.8.1)(tsx@4.20.6)(typescript@5.8.3)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))(yaml@2.8.1) '@angular/cli': - specifier: ^20.3.6 - version: 20.3.6(@types/node@24.7.0)(chokidar@4.0.3) + specifier: ^20.3.7 + version: 20.3.7(@types/node@24.7.0)(chokidar@4.0.3) '@angular/compiler-cli': - specifier: ^20.3.6 - version: 20.3.6(@angular/compiler@20.3.6)(typescript@5.8.3) + specifier: ^20.3.7 + version: 20.3.7(@angular/compiler@20.3.7)(typescript@5.8.3) '@types/jasmine': specifier: ~5.1.12 version: 5.1.12 @@ -190,8 +190,8 @@ importers: examples/react/projects: dependencies: '@tailwindcss/vite': - specifier: ^4.1.14 - version: 4.1.14(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + specifier: ^4.1.16 + version: 4.1.16(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) '@tanstack/query-core': specifier: ^5.90.5 version: 5.90.5 @@ -202,26 +202,26 @@ importers: specifier: ^0.1.36 version: link:../../../packages/react-db '@tanstack/react-router': - specifier: ^1.133.15 - version: 1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + specifier: ^1.133.32 + version: 1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@tanstack/react-router-devtools': - specifier: ^1.133.15 - version: 1.133.15(@tanstack/react-router@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(@tanstack/router-core@1.133.15)(@types/node@24.7.0)(csstype@3.1.3)(jiti@2.6.0)(lightningcss@1.30.1)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(sass@1.90.0)(solid-js@1.9.9)(terser@5.44.0)(tiny-invariant@1.3.3)(tsx@4.20.6)(yaml@2.8.1) + specifier: ^1.133.32 + version: 1.133.32(@tanstack/react-router@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(@tanstack/router-core@1.133.28)(@types/node@24.7.0)(csstype@3.1.3)(jiti@2.6.1)(lightningcss@1.30.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(sass@1.90.0)(solid-js@1.9.9)(terser@5.44.0)(tiny-invariant@1.3.3)(tsx@4.20.6)(yaml@2.8.1) '@tanstack/react-router-with-query': specifier: ^1.130.17 - version: 1.130.17(@tanstack/react-query@5.83.0(react@19.2.0))(@tanstack/react-router@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(@tanstack/router-core@1.133.15)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + version: 1.130.17(@tanstack/react-query@5.83.0(react@19.2.0))(@tanstack/react-router@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(@tanstack/router-core@1.133.28)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@tanstack/react-start': - specifier: ^1.133.15 - version: 1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + specifier: ^1.133.32 + version: 1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) '@tanstack/router-plugin': - specifier: ^1.133.15 - version: 1.133.15(@tanstack/react-router@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + specifier: ^1.133.32 + version: 1.133.32(@tanstack/react-router@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) '@trpc/client': - specifier: ^11.6.0 - version: 11.6.0(@trpc/server@11.6.0(typescript@5.9.3))(typescript@5.9.3) + specifier: ^11.7.0 + version: 11.7.0(@trpc/server@11.7.0(typescript@5.9.3))(typescript@5.9.3) '@trpc/server': - specifier: ^11.6.0 - version: 11.6.0(typescript@5.9.3) + specifier: ^11.7.0 + version: 11.7.0(typescript@5.9.3) better-auth: specifier: ^1.3.26 version: 1.3.27(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(solid-js@1.9.9) @@ -229,11 +229,11 @@ importers: specifier: ^17.2.3 version: 17.2.3 drizzle-orm: - specifier: ^0.44.6 - version: 0.44.6(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) + specifier: ^0.44.7 + version: 0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) drizzle-zod: specifier: ^0.8.3 - version: 0.8.3(drizzle-orm@0.44.6(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11) + version: 0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11) pg: specifier: ^8.16.3 version: 8.16.3 @@ -244,21 +244,21 @@ importers: specifier: ^19.2.0 version: 19.2.0(react@19.2.0) tailwindcss: - specifier: ^4.1.14 - version: 4.1.14 + specifier: ^4.1.16 + version: 4.1.16 vite: specifier: ^6.3.5 - version: 6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + version: 6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) vite-tsconfig-paths: specifier: ^5.1.4 - version: 5.1.4(typescript@5.9.3)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 5.1.4(typescript@5.9.3)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) zod: specifier: ^4.1.11 version: 4.1.11 devDependencies: '@eslint/compat': specifier: ^1.4.0 - version: 1.4.0(eslint@9.38.0(jiti@2.6.0)) + version: 1.4.0(eslint@9.38.0(jiti@2.6.1)) '@eslint/js': specifier: ^9.38.0 version: 9.38.0 @@ -278,14 +278,14 @@ importers: specifier: ^19.2.2 version: 19.2.2(@types/react@19.2.2) '@typescript-eslint/eslint-plugin': - specifier: ^8.46.1 - version: 8.46.1(@typescript-eslint/parser@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + specifier: ^8.46.2 + version: 8.46.2(@typescript-eslint/parser@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) '@typescript-eslint/parser': - specifier: ^8.46.1 - version: 8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + specifier: ^8.46.2 + version: 8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) '@vitejs/plugin-react': specifier: ^5.0.4 - version: 5.0.4(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 5.1.0(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) concurrently: specifier: ^9.2.1 version: 9.2.1 @@ -294,16 +294,16 @@ importers: version: 0.31.5 eslint: specifier: ^9.38.0 - version: 9.38.0(jiti@2.6.0) + version: 9.38.0(jiti@2.6.1) eslint-config-prettier: specifier: ^10.1.8 - version: 10.1.8(eslint@9.38.0(jiti@2.6.0)) + version: 10.1.8(eslint@9.38.0(jiti@2.6.1)) eslint-plugin-prettier: specifier: ^5.5.4 - version: 5.5.4(eslint-config-prettier@10.1.8(eslint@9.38.0(jiti@2.6.0)))(eslint@9.38.0(jiti@2.6.0))(prettier@3.6.2) + version: 5.5.4(eslint-config-prettier@10.1.8(eslint@9.38.0(jiti@2.6.1)))(eslint@9.38.0(jiti@2.6.1))(prettier@3.6.2) eslint-plugin-react: specifier: ^7.37.5 - version: 7.37.5(eslint@9.38.0(jiti@2.6.0)) + version: 7.37.5(eslint@9.38.0(jiti@2.6.1)) globals: specifier: ^16.4.0 version: 16.4.0 @@ -321,7 +321,7 @@ importers: version: 5.9.3 vitest: specifier: ^3.2.4 - version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) web-vitals: specifier: ^5.1.0 version: 5.1.0 @@ -341,11 +341,11 @@ importers: specifier: workspace:* version: link:../../../packages/react-db '@tanstack/react-router': - specifier: ^1.133.15 - version: 1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + specifier: ^1.133.32 + version: 1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@tanstack/react-start': - specifier: ^1.133.15 - version: 1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + specifier: ^1.133.32 + version: 1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) '@tanstack/trailbase-db-collection': specifier: workspace:^ version: link:../../../packages/trailbase-db-collection @@ -353,11 +353,11 @@ importers: specifier: ^2.8.5 version: 2.8.5 drizzle-orm: - specifier: ^0.44.6 - version: 0.44.6(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) + specifier: ^0.44.7 + version: 0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) drizzle-zod: specifier: ^0.8.3 - version: 0.8.3(drizzle-orm@0.44.6(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11) + version: 0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11) express: specifier: ^4.21.2 version: 4.21.2 @@ -371,14 +371,14 @@ importers: specifier: ^19.2.0 version: 19.2.0(react@19.2.0) tailwindcss: - specifier: ^4.1.14 - version: 4.1.14 + specifier: ^4.1.16 + version: 4.1.16 trailbase: specifier: ^0.8.0 version: 0.8.0 vite-tsconfig-paths: specifier: ^5.1.4 - version: 5.1.4(typescript@5.9.3)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 5.1.4(typescript@5.9.3)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) zod: specifier: ^4.1.11 version: 4.1.11 @@ -387,14 +387,14 @@ importers: specifier: ^9.38.0 version: 9.38.0 '@tailwindcss/vite': - specifier: ^4.1.14 - version: 4.1.14(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + specifier: ^4.1.16 + version: 4.1.16(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) '@types/cors': specifier: ^2.8.19 version: 2.8.19 '@types/express': - specifier: ^4.17.23 - version: 4.17.23 + specifier: ^4.17.24 + version: 4.17.24 '@types/node': specifier: ^24.5.2 version: 24.7.0 @@ -408,14 +408,14 @@ importers: specifier: ^19.2.2 version: 19.2.2(@types/react@19.2.2) '@typescript-eslint/eslint-plugin': - specifier: ^8.46.1 - version: 8.46.1(@typescript-eslint/parser@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + specifier: ^8.46.2 + version: 8.46.2(@typescript-eslint/parser@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) '@typescript-eslint/parser': - specifier: ^8.46.1 - version: 8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + specifier: ^8.46.2 + version: 8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) '@vitejs/plugin-react': specifier: ^5.0.3 - version: 5.0.4(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 5.1.0(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) concurrently: specifier: ^9.2.1 version: 9.2.1 @@ -427,13 +427,13 @@ importers: version: 0.31.5 eslint: specifier: ^9.38.0 - version: 9.38.0(jiti@2.6.0) + version: 9.38.0(jiti@2.6.1) eslint-plugin-react-hooks: specifier: ^5.2.0 - version: 5.2.0(eslint@9.38.0(jiti@2.6.0)) + version: 5.2.0(eslint@9.38.0(jiti@2.6.1)) eslint-plugin-react-refresh: specifier: ^0.4.24 - version: 0.4.24(eslint@9.38.0(jiti@2.6.0)) + version: 0.4.24(eslint@9.38.0(jiti@2.6.1)) pg: specifier: ^8.16.3 version: 8.16.3 @@ -445,40 +445,40 @@ importers: version: 5.9.3 vite: specifier: ^6.1.1 - version: 6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + version: 6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) examples/solid/todo: dependencies: '@tanstack/electric-db-collection': - specifier: ^0.1.35 + specifier: ^0.1.38 version: link:../../../packages/electric-db-collection '@tanstack/query-core': specifier: ^5.90.5 version: 5.90.5 '@tanstack/query-db-collection': - specifier: ^0.2.32 + specifier: ^0.2.36 version: link:../../../packages/query-db-collection '@tanstack/solid-db': - specifier: ^0.1.33 + specifier: ^0.1.36 version: link:../../../packages/solid-db '@tanstack/solid-router': - specifier: ^1.133.15 - version: 1.133.15(solid-js@1.9.9) + specifier: ^1.133.31 + version: 1.133.31(solid-js@1.9.9) '@tanstack/solid-start': - specifier: ^1.133.15 - version: 1.133.15(@tanstack/react-router@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(solid-js@1.9.9)(vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + specifier: ^1.133.32 + version: 1.133.32(@tanstack/react-router@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(solid-js@1.9.9)(vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) '@tanstack/trailbase-db-collection': - specifier: ^0.1.33 + specifier: ^0.1.36 version: link:../../../packages/trailbase-db-collection cors: specifier: ^2.8.5 version: 2.8.5 drizzle-orm: - specifier: ^0.44.6 - version: 0.44.6(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) + specifier: ^0.44.7 + version: 0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) drizzle-zod: specifier: ^0.8.3 - version: 0.8.3(drizzle-orm@0.44.6(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11) + version: 0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@3.25.76) express: specifier: ^4.21.2 version: 4.21.2 @@ -489,27 +489,27 @@ importers: specifier: ^1.9.9 version: 1.9.9 tailwindcss: - specifier: ^4.1.14 - version: 4.1.14 + specifier: ^4.1.16 + version: 4.1.16 trailbase: specifier: ^0.8.0 version: 0.8.0 vite-tsconfig-paths: specifier: ^5.1.4 - version: 5.1.4(typescript@5.9.3)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 5.1.4(typescript@5.9.3)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) devDependencies: '@eslint/js': specifier: ^9.38.0 version: 9.38.0 '@tailwindcss/vite': - specifier: ^4.1.14 - version: 4.1.14(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + specifier: ^4.1.16 + version: 4.1.16(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) '@types/cors': specifier: ^2.8.19 version: 2.8.19 '@types/express': - specifier: ^4.17.23 - version: 4.17.23 + specifier: ^4.17.24 + version: 4.17.24 '@types/node': specifier: ^22.18.1 version: 22.18.1 @@ -517,11 +517,11 @@ importers: specifier: ^8.15.5 version: 8.15.5 '@typescript-eslint/eslint-plugin': - specifier: ^8.46.1 - version: 8.46.1(@typescript-eslint/parser@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + specifier: ^8.46.2 + version: 8.46.2(@typescript-eslint/parser@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) '@typescript-eslint/parser': - specifier: ^8.46.1 - version: 8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + specifier: ^8.46.2 + version: 8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) concurrently: specifier: ^9.2.1 version: 9.2.1 @@ -533,10 +533,10 @@ importers: version: 0.31.5 eslint: specifier: ^9.38.0 - version: 9.38.0(jiti@2.6.0) + version: 9.38.0(jiti@2.6.1) eslint-plugin-solid: specifier: ^0.14.5 - version: 0.14.5(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + version: 0.14.5(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) pg: specifier: ^8.16.3 version: 8.16.3 @@ -548,10 +548,10 @@ importers: version: 5.9.3 vite: specifier: ^6.3.6 - version: 6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + version: 6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) vite-plugin-solid: - specifier: ^2.11.9 - version: 2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + specifier: ^2.11.10 + version: 2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) packages/angular-db: dependencies: @@ -576,7 +576,7 @@ importers: version: 19.2.15(@angular/common@19.2.15(@angular/core@19.2.15(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/compiler@19.2.15)(@angular/core@19.2.15(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@19.2.15(@angular/common@19.2.15(@angular/core@19.2.15(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@19.2.15(rxjs@7.8.2)(zone.js@0.15.1))) '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) rxjs: specifier: ^7.8.2 version: 7.8.2 @@ -598,7 +598,7 @@ importers: devDependencies: '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) arktype: specifier: ^2.1.23 version: 2.1.23 @@ -623,13 +623,13 @@ importers: version: 4.1.12 '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) packages/electric-db-collection: dependencies: '@electric-sql/client': - specifier: ^1.0.14 - version: 1.0.14 + specifier: ^1.1.0 + version: 1.1.0 '@standard-schema/spec': specifier: ^1.0.0 version: 1.0.0 @@ -648,7 +648,7 @@ importers: version: 4.1.12 '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) packages/query-db-collection: dependencies: @@ -667,7 +667,7 @@ importers: version: 5.90.5 '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) packages/react-db: dependencies: @@ -679,8 +679,8 @@ importers: version: 1.6.0(react@19.2.0) devDependencies: '@electric-sql/client': - specifier: 1.0.14 - version: 1.0.14 + specifier: 1.1.0 + version: 1.1.0 '@testing-library/react': specifier: ^16.3.0 version: 16.3.0(@testing-library/dom@10.4.1)(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) @@ -695,7 +695,7 @@ importers: version: 1.5.0 '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) react: specifier: ^19.2.0 version: 19.2.0 @@ -718,8 +718,8 @@ importers: specifier: ^4.4.3 version: 4.4.3 rxdb: - specifier: 16.19.1 - version: 16.19.1(rxjs@7.8.2)(socks@2.8.7) + specifier: 16.20.0 + version: 16.20.0(rxjs@7.8.2)(socks@2.8.7) rxjs: specifier: '>=7.8.2' version: 7.8.2 @@ -732,7 +732,7 @@ importers: version: 4.1.12 '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) packages/solid-db: dependencies: @@ -744,14 +744,14 @@ importers: version: link:../db devDependencies: '@electric-sql/client': - specifier: 1.0.14 - version: 1.0.14 + specifier: 1.1.0 + version: 1.1.0 '@solidjs/testing-library': specifier: ^0.8.10 version: 0.8.10(solid-js@1.9.9) '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) jsdom: specifier: ^27.0.1 version: 27.0.1(postcss@8.5.6) @@ -759,11 +759,11 @@ importers: specifier: ^1.9.9 version: 1.9.9 vite-plugin-solid: - specifier: ^2.11.9 - version: 2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + specifier: ^2.11.10 + version: 2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) vitest: specifier: ^3.2.4 - version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + version: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) packages/svelte-db: dependencies: @@ -773,22 +773,22 @@ importers: devDependencies: '@sveltejs/package': specifier: ^2.5.4 - version: 2.5.4(svelte@5.41.0)(typescript@5.9.3) + version: 2.5.4(svelte@5.42.2)(typescript@5.9.3) '@sveltejs/vite-plugin-svelte': specifier: ^6.2.1 - version: 6.2.1(svelte@5.41.0)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 6.2.1(svelte@5.42.2)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) publint: - specifier: ^0.3.14 - version: 0.3.14 + specifier: ^0.3.15 + version: 0.3.15 svelte: - specifier: ^5.41.0 - version: 5.41.0 + specifier: ^5.42.2 + version: 5.42.2 svelte-check: specifier: ^4.3.3 - version: 4.3.3(picomatch@4.0.3)(svelte@5.41.0)(typescript@5.9.3) + version: 4.3.3(picomatch@4.0.3)(svelte@5.42.2)(typescript@5.9.3) packages/trailbase-db-collection: dependencies: @@ -816,7 +816,7 @@ importers: version: 4.1.12 '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) packages/vue-db: dependencies: @@ -825,14 +825,14 @@ importers: version: link:../db devDependencies: '@electric-sql/client': - specifier: 1.0.14 - version: 1.0.14 + specifier: 1.1.0 + version: 1.1.0 '@vitejs/plugin-vue': specifier: ^6.0.1 - version: 6.0.1(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))(vue@3.5.22(typescript@5.9.3)) + version: 6.0.1(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))(vue@3.5.22(typescript@5.9.3)) '@vitest/coverage-istanbul': specifier: ^3.2.4 - version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + version: 3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) vue: specifier: ^3.5.22 version: 3.5.22(typescript@5.9.3) @@ -906,12 +906,12 @@ packages: resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==} engines: {node: '>=6.0.0'} - '@angular-devkit/architect@0.2003.6': - resolution: {integrity: sha512-VtXxfJzrBZ8MQN83shXNaTUaLSOIwa+4/3LD5drxSnHuYJrz+d3FIApWAxcA9QzucsTDZwXyFxaWZN/e5XVm6g==} + '@angular-devkit/architect@0.2003.7': + resolution: {integrity: sha512-NGHLfrNQNjwWwvyQomMM1AqRaqH3UU0TwySJh9XlSc9dC/roB5zD2NjLf98K4LfAIfHvDBwkQ+dMo3F556/Xuw==} engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0, npm: ^6.11.0 || ^7.5.6 || >=8.0.0, yarn: '>= 1.13.0'} - '@angular-devkit/core@20.3.6': - resolution: {integrity: sha512-uLRk3865Iz/EO9Zm/mrFfdyoZinJBihXE6HVDYRYjAqsgW14LsD8pkpWy9+LYlOwcH96Ndnev+msxaTJaNXtPg==} + '@angular-devkit/core@20.3.7': + resolution: {integrity: sha512-psmcjwYcXve4sLrcdnARc15/Wfd3RpydbtLo9+mViNzk5HQ6L2eEztKl/2QVYMgzZVIa1GfhjwUllVCyLAv3sg==} engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0, npm: ^6.11.0 || ^7.5.6 || >=8.0.0, yarn: '>= 1.13.0'} peerDependencies: chokidar: ^4.0.0 @@ -919,12 +919,12 @@ packages: chokidar: optional: true - '@angular-devkit/schematics@20.3.6': - resolution: {integrity: sha512-QD7QS1oR0XcZ9ZI4D1c4JjKmSn2up/ocOU2FS1mMO7S5RtAZMsPv4J3r+6ywHA2ev2sRySOQ0D8OYBcEuYX9Jw==} + '@angular-devkit/schematics@20.3.7': + resolution: {integrity: sha512-DUxcQBPKO69p56ZgIdVfxWyLiSjdcUoD6BH9/nWHp0QiqRAR6GcXP4SFax76JPl2WsiCp4hHZ233Hf69AP1xew==} engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0, npm: ^6.11.0 || ^7.5.6 || >=8.0.0, yarn: '>= 1.13.0'} - '@angular/build@20.3.6': - resolution: {integrity: sha512-O5qyxCCe77tu1zy9XudKxqFqi5zih0ZI8J8Anra/ZZdtTKbLMprXMGFzMYzwCqvcIzzbmOumkSJKoXbFazHaaw==} + '@angular/build@20.3.7': + resolution: {integrity: sha512-NHN5JNDqUc0Ux4IZPCe/fpFAnuRHujkxVfRHSqDFW5+jtj2JuW1XO6qlX+kDheFRlj/NvFgTpidKsE9IjpfMWQ==} engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0, npm: ^6.11.0 || ^7.5.6 || >=8.0.0, yarn: '>= 1.13.0'} peerDependencies: '@angular/compiler': ^20.0.0 @@ -934,7 +934,7 @@ packages: '@angular/platform-browser': ^20.0.0 '@angular/platform-server': ^20.0.0 '@angular/service-worker': ^20.0.0 - '@angular/ssr': ^20.3.6 + '@angular/ssr': ^20.3.7 karma: ^6.4.0 less: ^4.2.0 ng-packagr: ^20.0.0 @@ -969,8 +969,8 @@ packages: vitest: optional: true - '@angular/cli@20.3.6': - resolution: {integrity: sha512-1RozAub7Gcl5ES3vBYatIgoMDgujlvySwHARoYT+1VhbYvM0RTt4sn2aDhHxqG0GcyiXR5zISkzJvldaY2nQCQ==} + '@angular/cli@20.3.7': + resolution: {integrity: sha512-hNurF7g/e9cDHFBRCKLPSmQJs0n28jZsC3sTl/XuWE8PYtv5egh2EuqrxdruYB5GdANpIqSQNgDGQJrKrk/XnQ==} engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0, npm: ^6.11.0 || ^7.5.6 || >=8.0.0, yarn: '>= 1.13.0'} hasBin: true @@ -981,19 +981,19 @@ packages: '@angular/core': 19.2.15 rxjs: ^6.5.3 || ^7.4.0 - '@angular/common@20.3.6': - resolution: {integrity: sha512-+gHMuFe0wz4f+vfGZ2q+fSQSYaY7KlN7QdDrFqLnA7H2sythzhXvRbXEtp4DkPjihh9gupXg2MeLh1ROy5AfSw==} + '@angular/common@20.3.7': + resolution: {integrity: sha512-uf8dXYTJbedk/wudkt2MfbtvN/T97aEZBtOTq8/IFQQZ3722rag6D+Cg76e5hBccROOn+ueGJX2gpxz02phTwA==} engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} peerDependencies: - '@angular/core': 20.3.6 + '@angular/core': 20.3.7 rxjs: ^6.5.3 || ^7.4.0 - '@angular/compiler-cli@20.3.6': - resolution: {integrity: sha512-VOFRBx9fBt2jW9I8qD23fwGeKxBI8JssJBAMqnFPl3k59VJWHQi6LlXZCLCBNdfwflTJdKeRvdgT51Q0k6tnFQ==} + '@angular/compiler-cli@20.3.7': + resolution: {integrity: sha512-viZwWlwc1BAqryRJE0Wq2WgAxDaW9fuwtYHYrOWnIn9sy9KemKmR6RmU9VRydrwUROOlqK49R9+RC1wQ6sYwqA==} engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} hasBin: true peerDependencies: - '@angular/compiler': 20.3.6 + '@angular/compiler': 20.3.7 typescript: '>=5.8 <6.0' peerDependenciesMeta: typescript: @@ -1003,8 +1003,8 @@ packages: resolution: {integrity: sha512-hMHZU6/03xG0tbPDIm1hbVSTFLnRkGYfh+xdBwUMnIFYYTS0QJ2hdPfEZKCJIXm+fz9IAI5MPdDTfeyp0sgaHQ==} engines: {node: ^18.19.1 || ^20.11.1 || >=22.0.0} - '@angular/compiler@20.3.6': - resolution: {integrity: sha512-OdjXBsAsnn7qiW6fSHClwn9XwjVxhtO9+RbDc6Mf+YPCnJq0s8T78H2fc8VdJFp/Rs+tMZcwwjd9VZPm8+2XWA==} + '@angular/compiler@20.3.7': + resolution: {integrity: sha512-EouHO15dUsgnFArj0M25R8cOPVoUfiFYSt6iXnMO8+S4dY1fDEmbFqkW5smlP66HL5Gys59Nwb5inejfIWHrLw==} engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} '@angular/core@19.2.15': @@ -1014,11 +1014,11 @@ packages: rxjs: ^6.5.3 || ^7.4.0 zone.js: ~0.15.0 - '@angular/core@20.3.6': - resolution: {integrity: sha512-sDURQWnjwE4Y750u/5qwkZEYMoI4CrKghnx4aKulxCnohR3//C78wvz6p8MtCuqYfzGkdQZDYFg8tgAz17qgPw==} + '@angular/core@20.3.7': + resolution: {integrity: sha512-2UuYzC2A5SUtu33tYTN411Wk0WilA+2Uld/GP3O6mragw1O7v/M8pMFmbe9TR5Ah/abRJIocWGlNqeztZmQmrw==} engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} peerDependencies: - '@angular/compiler': 20.3.6 + '@angular/compiler': 20.3.7 rxjs: ^6.5.3 || ^7.4.0 zone.js: ~0.15.0 peerDependenciesMeta: @@ -1027,13 +1027,13 @@ packages: zone.js: optional: true - '@angular/forms@20.3.6': - resolution: {integrity: sha512-tBGo/LBtCtSrClMY4DTm/3UiSjqLLMEYXS/4E0nW1mFDv7ulKnaAQB+KbfBmmTHYxlKLs+SxjKv6GoydMPSurA==} + '@angular/forms@20.3.7': + resolution: {integrity: sha512-uOCGCoqXeAWIlQMWiIeed/W8g8h2tk91YemMI+Ce1VQ/36Xfft40Bouz4eKcvJV6kLXGygdpWjzFGz32CE+3Og==} engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} peerDependencies: - '@angular/common': 20.3.6 - '@angular/core': 20.3.6 - '@angular/platform-browser': 20.3.6 + '@angular/common': 20.3.7 + '@angular/core': 20.3.7 + '@angular/platform-browser': 20.3.7 rxjs: ^6.5.3 || ^7.4.0 '@angular/platform-browser-dynamic@19.2.15': @@ -1056,24 +1056,24 @@ packages: '@angular/animations': optional: true - '@angular/platform-browser@20.3.6': - resolution: {integrity: sha512-gFp1yd+HtRN8XdpMatRLO5w6FLIzsnF31lD2Duo4BUTCoMAMdfaNT6FtcvNdKu7ANo27Ke26fxEEE2bh6FU98A==} + '@angular/platform-browser@20.3.7': + resolution: {integrity: sha512-AbLtyR7fVEGDYyrz95dP2pc69J5XIjLLsFNAuNQPzNX02WPoAxtrWrNY6UnTzGoSrCc5F52hiL2Uo6yPZTiJcg==} engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} peerDependencies: - '@angular/animations': 20.3.6 - '@angular/common': 20.3.6 - '@angular/core': 20.3.6 + '@angular/animations': 20.3.7 + '@angular/common': 20.3.7 + '@angular/core': 20.3.7 peerDependenciesMeta: '@angular/animations': optional: true - '@angular/router@20.3.6': - resolution: {integrity: sha512-fSAYOR9nKpH5PoBYFNdII3nAFl2maUrYiISU33CnGwb7J7Q0s09k231c/P5tVN4URi+jdADVwiBI8cIYk8SVrg==} + '@angular/router@20.3.7': + resolution: {integrity: sha512-Lq7mCNcLP1npmNh2JlNEe02YS2jNnaLnCy/t//o+Qq0c6DGV78JRl7pHubiB2R6XXlgvOcZWg88v94Li+y85Iw==} engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0} peerDependencies: - '@angular/common': 20.3.6 - '@angular/core': 20.3.6 - '@angular/platform-browser': 20.3.6 + '@angular/common': 20.3.7 + '@angular/core': 20.3.7 + '@angular/platform-browser': 20.3.7 rxjs: ^6.5.3 || ^7.4.0 '@ark/regex@0.0.0': @@ -1375,8 +1375,8 @@ packages: '@drizzle-team/brocli@0.10.2': resolution: {integrity: sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w==} - '@electric-sql/client@1.0.14': - resolution: {integrity: sha512-LtPAfeMxXRiYS0hyDQ5hue2PjljUiK9stvzsVyVb4nwxWQxfOWTSF42bHTs/o5i3x1T4kAQ7mwHpxa4A+f8X7Q==} + '@electric-sql/client@1.1.0': + resolution: {integrity: sha512-pBb66/A7p7pgmgLl3T+JmTA4r58ix4EZyw24IWqiCUqvzdW1Wtkp3PbXom1T8leguo402RWgsVLuxV0+9wejmg==} '@emnapi/core@1.5.0': resolution: {integrity: sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg==} @@ -2843,12 +2843,12 @@ packages: '@rolldown/pluginutils@1.0.0-beta.29': resolution: {integrity: sha512-NIJgOsMjbxAXvoGq/X0gD7VPMQ8j9g0BiDaNjVNVjvl+iKXxL3Jre0v31RmBYeLEmkbj2s02v8vFTbUXi5XS2Q==} - '@rolldown/pluginutils@1.0.0-beta.38': - resolution: {integrity: sha512-N/ICGKleNhA5nc9XXQG/kkKHJ7S55u0x0XUJbbkmdCnFuoRkM1Il12q9q0eX19+M7KKUEPw/daUPIRnxhcxAIw==} - '@rolldown/pluginutils@1.0.0-beta.40': resolution: {integrity: sha512-s3GeJKSQOwBlzdUrj4ISjJj5SfSh+aqn0wjOar4Bx95iV1ETI7F6S/5hLcfAxZ9kXDcyrAkxPlqmd1ZITttf+w==} + '@rolldown/pluginutils@1.0.0-beta.43': + resolution: {integrity: sha512-5Uxg7fQUCmfhax7FJke2+8B6cqgeUJUD9o2uXIKXhD+mG0mL6NObmVoi9wXEU1tY89mZKgAYA6fTbftx3q2ZPQ==} + '@rollup/pluginutils@5.3.0': resolution: {integrity: sha512-5EdhGZtnu3V88ces7s53hhfK5KSASnJZv8Lulpc04cWO3REESroJXg73DFsOmgbU2BhwV0E20bu2IDZb3VKW4Q==} engines: {node: '>=14.0.0'} @@ -3100,8 +3100,8 @@ packages: '@rushstack/ts-command-line@4.22.6': resolution: {integrity: sha512-QSRqHT/IfoC5nk9zn6+fgyqOPXHME0BfchII9EUPR19pocsNp/xSbeBCbD3PIR2Lg+Q5qk7OFqk1VhWPMdKHJg==} - '@schematics/angular@20.3.6': - resolution: {integrity: sha512-YPIEyKPBOyJYlda5fA49kMThzZ4WidomEMDghshux8xidbjDaPWBZdyVPQj3IXyW0teGlUM/TH0TH2weumMZrg==} + '@schematics/angular@20.3.7': + resolution: {integrity: sha512-jR2LPJVGK6yzPTNXkGJZYtdeLGkNdqJhVow2E+ILt3pk/LZuT/iSdr9V4nArU9yysifGuJFTyZapVOYkEYaykg==} engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0, npm: ^6.11.0 || ^7.5.6 || >=8.0.0, yarn: '>= 1.13.0'} '@shikijs/engine-oniguruma@3.13.0': @@ -3294,65 +3294,65 @@ packages: resolution: {integrity: sha512-08eKiDAjj4zLug1taXSIJ0kGL5cawjVCyJkBb6EWSg5fEPX6L+Wtr0CH2If4j5KYylz85iaZiFlUItvgJvll5g==} engines: {node: ^14.13.1 || ^16.0.0 || >=18} - '@tailwindcss/node@4.1.14': - resolution: {integrity: sha512-hpz+8vFk3Ic2xssIA3e01R6jkmsAhvkQdXlEbRTk6S10xDAtiQiM3FyvZVGsucefq764euO/b8WUW9ysLdThHw==} + '@tailwindcss/node@4.1.16': + resolution: {integrity: sha512-BX5iaSsloNuvKNHRN3k2RcCuTEgASTo77mofW0vmeHkfrDWaoFAFvNHpEgtu0eqyypcyiBkDWzSMxJhp3AUVcw==} - '@tailwindcss/oxide-android-arm64@4.1.14': - resolution: {integrity: sha512-a94ifZrGwMvbdeAxWoSuGcIl6/DOP5cdxagid7xJv6bwFp3oebp7y2ImYsnZBMTwjn5Ev5xESvS3FFYUGgPODQ==} + '@tailwindcss/oxide-android-arm64@4.1.16': + resolution: {integrity: sha512-8+ctzkjHgwDJ5caq9IqRSgsP70xhdhJvm+oueS/yhD5ixLhqTw9fSL1OurzMUhBwE5zK26FXLCz2f/RtkISqHA==} engines: {node: '>= 10'} cpu: [arm64] os: [android] - '@tailwindcss/oxide-darwin-arm64@4.1.14': - resolution: {integrity: sha512-HkFP/CqfSh09xCnrPJA7jud7hij5ahKyWomrC3oiO2U9i0UjP17o9pJbxUN0IJ471GTQQmzwhp0DEcpbp4MZTA==} + '@tailwindcss/oxide-darwin-arm64@4.1.16': + resolution: {integrity: sha512-C3oZy5042v2FOALBZtY0JTDnGNdS6w7DxL/odvSny17ORUnaRKhyTse8xYi3yKGyfnTUOdavRCdmc8QqJYwFKA==} engines: {node: '>= 10'} cpu: [arm64] os: [darwin] - '@tailwindcss/oxide-darwin-x64@4.1.14': - resolution: {integrity: sha512-eVNaWmCgdLf5iv6Qd3s7JI5SEFBFRtfm6W0mphJYXgvnDEAZ5sZzqmI06bK6xo0IErDHdTA5/t7d4eTfWbWOFw==} + '@tailwindcss/oxide-darwin-x64@4.1.16': + resolution: {integrity: sha512-vjrl/1Ub9+JwU6BP0emgipGjowzYZMjbWCDqwA2Z4vCa+HBSpP4v6U2ddejcHsolsYxwL5r4bPNoamlV0xDdLg==} engines: {node: '>= 10'} cpu: [x64] os: [darwin] - '@tailwindcss/oxide-freebsd-x64@4.1.14': - resolution: {integrity: sha512-QWLoRXNikEuqtNb0dhQN6wsSVVjX6dmUFzuuiL09ZeXju25dsei2uIPl71y2Ic6QbNBsB4scwBoFnlBfabHkEw==} + '@tailwindcss/oxide-freebsd-x64@4.1.16': + resolution: {integrity: sha512-TSMpPYpQLm+aR1wW5rKuUuEruc/oOX3C7H0BTnPDn7W/eMw8W+MRMpiypKMkXZfwH8wqPIRKppuZoedTtNj2tg==} engines: {node: '>= 10'} cpu: [x64] os: [freebsd] - '@tailwindcss/oxide-linux-arm-gnueabihf@4.1.14': - resolution: {integrity: sha512-VB4gjQni9+F0VCASU+L8zSIyjrLLsy03sjcR3bM0V2g4SNamo0FakZFKyUQ96ZVwGK4CaJsc9zd/obQy74o0Fw==} + '@tailwindcss/oxide-linux-arm-gnueabihf@4.1.16': + resolution: {integrity: sha512-p0GGfRg/w0sdsFKBjMYvvKIiKy/LNWLWgV/plR4lUgrsxFAoQBFrXkZ4C0w8IOXfslB9vHK/JGASWD2IefIpvw==} engines: {node: '>= 10'} cpu: [arm] os: [linux] - '@tailwindcss/oxide-linux-arm64-gnu@4.1.14': - resolution: {integrity: sha512-qaEy0dIZ6d9vyLnmeg24yzA8XuEAD9WjpM5nIM1sUgQ/Zv7cVkharPDQcmm/t/TvXoKo/0knI3me3AGfdx6w1w==} + '@tailwindcss/oxide-linux-arm64-gnu@4.1.16': + resolution: {integrity: sha512-DoixyMmTNO19rwRPdqviTrG1rYzpxgyYJl8RgQvdAQUzxC1ToLRqtNJpU/ATURSKgIg6uerPw2feW0aS8SNr/w==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - '@tailwindcss/oxide-linux-arm64-musl@4.1.14': - resolution: {integrity: sha512-ISZjT44s59O8xKsPEIesiIydMG/sCXoMBCqsphDm/WcbnuWLxxb+GcvSIIA5NjUw6F8Tex7s5/LM2yDy8RqYBQ==} + '@tailwindcss/oxide-linux-arm64-musl@4.1.16': + resolution: {integrity: sha512-H81UXMa9hJhWhaAUca6bU2wm5RRFpuHImrwXBUvPbYb+3jo32I9VIwpOX6hms0fPmA6f2pGVlybO6qU8pF4fzQ==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - '@tailwindcss/oxide-linux-x64-gnu@4.1.14': - resolution: {integrity: sha512-02c6JhLPJj10L2caH4U0zF8Hji4dOeahmuMl23stk0MU1wfd1OraE7rOloidSF8W5JTHkFdVo/O7uRUJJnUAJg==} + '@tailwindcss/oxide-linux-x64-gnu@4.1.16': + resolution: {integrity: sha512-ZGHQxDtFC2/ruo7t99Qo2TTIvOERULPl5l0K1g0oK6b5PGqjYMga+FcY1wIUnrUxY56h28FxybtDEla+ICOyew==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - '@tailwindcss/oxide-linux-x64-musl@4.1.14': - resolution: {integrity: sha512-TNGeLiN1XS66kQhxHG/7wMeQDOoL0S33x9BgmydbrWAb9Qw0KYdd8o1ifx4HOGDWhVmJ+Ul+JQ7lyknQFilO3Q==} + '@tailwindcss/oxide-linux-x64-musl@4.1.16': + resolution: {integrity: sha512-Oi1tAaa0rcKf1Og9MzKeINZzMLPbhxvm7rno5/zuP1WYmpiG0bEHq4AcRUiG2165/WUzvxkW4XDYCscZWbTLZw==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - '@tailwindcss/oxide-wasm32-wasi@4.1.14': - resolution: {integrity: sha512-uZYAsaW/jS/IYkd6EWPJKW/NlPNSkWkBlaeVBi/WsFQNP05/bzkebUL8FH1pdsqx4f2fH/bWFcUABOM9nfiJkQ==} + '@tailwindcss/oxide-wasm32-wasi@4.1.16': + resolution: {integrity: sha512-B01u/b8LteGRwucIBmCQ07FVXLzImWESAIMcUU6nvFt/tYsQ6IHz8DmZ5KtvmwxD+iTYBtM1xwoGXswnlu9v0Q==} engines: {node: '>=14.0.0'} cpu: [wasm32] bundledDependencies: @@ -3363,24 +3363,24 @@ packages: - '@emnapi/wasi-threads' - tslib - '@tailwindcss/oxide-win32-arm64-msvc@4.1.14': - resolution: {integrity: sha512-Az0RnnkcvRqsuoLH2Z4n3JfAef0wElgzHD5Aky/e+0tBUxUhIeIqFBTMNQvmMRSP15fWwmvjBxZ3Q8RhsDnxAA==} + '@tailwindcss/oxide-win32-arm64-msvc@4.1.16': + resolution: {integrity: sha512-zX+Q8sSkGj6HKRTMJXuPvOcP8XfYON24zJBRPlszcH1Np7xuHXhWn8qfFjIujVzvH3BHU+16jBXwgpl20i+v9A==} engines: {node: '>= 10'} cpu: [arm64] os: [win32] - '@tailwindcss/oxide-win32-x64-msvc@4.1.14': - resolution: {integrity: sha512-ttblVGHgf68kEE4om1n/n44I0yGPkCPbLsqzjvybhpwa6mKKtgFfAzy6btc3HRmuW7nHe0OOrSeNP9sQmmH9XA==} + '@tailwindcss/oxide-win32-x64-msvc@4.1.16': + resolution: {integrity: sha512-m5dDFJUEejbFqP+UXVstd4W/wnxA4F61q8SoL+mqTypId2T2ZpuxosNSgowiCnLp2+Z+rivdU0AqpfgiD7yCBg==} engines: {node: '>= 10'} cpu: [x64] os: [win32] - '@tailwindcss/oxide@4.1.14': - resolution: {integrity: sha512-23yx+VUbBwCg2x5XWdB8+1lkPajzLmALEfMb51zZUBYaYVPDQvBSD/WYDqiVyBIo2BZFa3yw1Rpy3G2Jp+K0dw==} + '@tailwindcss/oxide@4.1.16': + resolution: {integrity: sha512-2OSv52FRuhdlgyOQqgtQHuCgXnS8nFSYRp2tJ+4WZXKgTxqPy7SMSls8c3mPT5pkZ17SBToGM5LHEJBO7miEdg==} engines: {node: '>= 10'} - '@tailwindcss/vite@4.1.14': - resolution: {integrity: sha512-BoFUoU0XqgCUS1UXWhmDJroKKhNXeDzD7/XwabjkDIAbMnc4ULn5e2FuEuBbhZ6ENZoSYzKlzvZ44Yr6EUDUSA==} + '@tailwindcss/vite@4.1.16': + resolution: {integrity: sha512-bbguNBcDxsRmi9nnlWJxhfDWamY3lmcyACHcdO1crxfzuLpOhHLLtEIN/nCbbAtj5rchUgQD17QVAKi1f7IsKg==} peerDependencies: vite: ^5.2.0 || ^6 || ^7 @@ -3388,8 +3388,8 @@ packages: resolution: {integrity: sha512-7Wwfw6wBv2Kc+OBNIJQzBSJ6q7GABtwVT+VOQ/7/Gl7z8z1rtEYUZrxUrNvbbrHY+J5/WNZNZjJjTWDf8nTUBw==} engines: {node: '>=18'} - '@tanstack/directive-functions-plugin@1.133.9': - resolution: {integrity: sha512-mnJXMQNovd+BhAp5SoSF6BXWfB8e/s0PdxY1AW3wqGOoGA1T7cLXHXhvzuiefX0FVKyAbvsyejs1d5usHePjEA==} + '@tanstack/directive-functions-plugin@1.133.19': + resolution: {integrity: sha512-U6nBlxxc624Q7Yta3UUe805WJfi0R029N/vUOVNxggZ432nt+0Hx7gLQO2P9zIUt+N6VYPuyKLKq047bxCJWOw==} engines: {node: '>=12'} peerDependencies: vite: '>=6.0.0 || >=7.0.0' @@ -3398,8 +3398,8 @@ packages: resolution: {integrity: sha512-2g+PuGR3GuvvCiR3xZs+IMqAvnYU9bvH+jRml0BFBSxHBj22xFCTNvJWhvgj7uICFF9IchDkFUto91xDPMu5cg==} engines: {node: '>=18'} - '@tanstack/history@1.133.3': - resolution: {integrity: sha512-zFQnGdX0S4g5xRuS+95iiEXM+qlGvYG7ksmOKx7LaMv60lDWa0imR8/24WwXXvBWJT1KnwVdZcjvhCwz9IiJCw==} + '@tanstack/history@1.133.28': + resolution: {integrity: sha512-B7+x7eP2FFvi3fgd3rNH9o/Eixt+pp0zCIdGhnQbAJjFrlwIKGjGnwyJjhWJ5fMQlGks/E2LdDTqEV4W9Plx7g==} engines: {node: '>=12'} '@tanstack/publish-config@0.2.1': @@ -3417,11 +3417,11 @@ packages: peerDependencies: react: ^18 || ^19 - '@tanstack/react-router-devtools@1.133.15': - resolution: {integrity: sha512-EBkWLTdafkWY+M0A32qeFMSJc6SLU3DBg2oPQ4zDOy55BTeFSRMw7Y2z3V00BwO2eGI+yB73Ym/Noy28qGySvQ==} + '@tanstack/react-router-devtools@1.133.32': + resolution: {integrity: sha512-dd0PzcAjzaDY9rPq9zoiItit3PKDVAekLgMLtNyXneyYrTXqPDSZwSge/l+cB0V1QcuAq6w+qx3LyFd3+zK40g==} engines: {node: '>=12'} peerDependencies: - '@tanstack/react-router': ^1.133.15 + '@tanstack/react-router': ^1.133.32 react: '>=18.0.0 || >=19.0.0' react-dom: '>=18.0.0 || >=19.0.0' @@ -3435,29 +3435,29 @@ packages: react: '>=18.0.0 || >=19.0.0' react-dom: '>=18.0.0 || >=19.0.0' - '@tanstack/react-router@1.133.15': - resolution: {integrity: sha512-3gQitqq/5lL//KSv9Ro34Fw7xak2ZQcPbR7x6bu5X4W0v97xTE7+bMbBS5UAg9zXTq0FNyB124GabgyBgeQ0NA==} + '@tanstack/react-router@1.133.32': + resolution: {integrity: sha512-UqakhaFJ+r5S+b4/AvLux70Hk+1MpmwfKwbdV7vuM2bVRUSztM8xtcttVpzZwRV45Ls8YsStY58prEVdCpvkNg==} engines: {node: '>=12'} peerDependencies: react: '>=18.0.0 || >=19.0.0' react-dom: '>=18.0.0 || >=19.0.0' - '@tanstack/react-start-client@1.133.15': - resolution: {integrity: sha512-eQ8n4+61G5PizQpuso9MpyOsW8dVL5ZlTMa7BNdGt96OGkenj/dXnqXi3gZ9xYAIkn7VRcFZMpagZQo/Yksp5Q==} + '@tanstack/react-start-client@1.133.32': + resolution: {integrity: sha512-42eZzacxIFGhcgwgO7tdF+V9MhRuBvgZiu9YIVM3EcIuu/eD1/cznGMpZd+ly5MnrB7opIYdQi7yIuNxjLNpFw==} engines: {node: '>=22.12.0'} peerDependencies: react: '>=18.0.0 || >=19.0.0' react-dom: '>=18.0.0 || >=19.0.0' - '@tanstack/react-start-server@1.133.15': - resolution: {integrity: sha512-dF5PyB1BGOYRKY1B+p1e617pSpg7BEeHT3mDPH6XMy3whzMKnlo9vXSOvRekoZ0phtMloYq4/TlP7qK4lWdS2g==} + '@tanstack/react-start-server@1.133.32': + resolution: {integrity: sha512-OE7hhGX3YpX26Uu0XiNM39j2BNymdnVyipv8DMwW/sR4MH64eE4Waizy641sNkui25ODvsomLcWRgTLw7aXqqA==} engines: {node: '>=22.12.0'} peerDependencies: react: '>=18.0.0 || >=19.0.0' react-dom: '>=18.0.0 || >=19.0.0' - '@tanstack/react-start@1.133.15': - resolution: {integrity: sha512-E2VOqrPp9D28QHwig5jvIZMEQYjUYoLBZ4PQt6esymKr5DduI3KdFFD495jLm4dxRIH3rE4BHZjkEKGvuB2XLw==} + '@tanstack/react-start@1.133.32': + resolution: {integrity: sha512-1jzaUaGHW2afyKikjW7EB75NgPpTjDIoszqJu0h9ydgBXD3bUOXHtdfkcgEg0+VD3EMRc2TKmzxqh+E+7m7fmA==} engines: {node: '>=22.12.0'} peerDependencies: react: '>=18.0.0 || >=19.0.0' @@ -3470,15 +3470,15 @@ packages: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 - '@tanstack/router-core@1.133.15': - resolution: {integrity: sha512-ZWAmoFcgi27Ojv2FH3Dq3D6Vt73LswdTnA1tyHShNWQf7wOMH/VKKB9JxiXJqpLTK4NJqpnUp/x0/3nvmdrIqg==} + '@tanstack/router-core@1.133.28': + resolution: {integrity: sha512-HErb1X9F+u4VzguJKafX1p5fSnNnfo7aK8Xxh5cICFMShPBxt9i1K45nUbQaRkf45N1tg4gJ3l8mlmwWRm9WWA==} engines: {node: '>=12'} - '@tanstack/router-devtools-core@1.133.15': - resolution: {integrity: sha512-TseqoP0fRfgkdb1kYzPC0S8td3pRt04BudOpenCabn8/f1EDmraxHdWh5O7S5x0VXr9dpvnj0KAUG+ip7x+iEg==} + '@tanstack/router-devtools-core@1.133.28': + resolution: {integrity: sha512-V4wwfVV9dGJ+OoEkAvVF6LWyST+s6MMSt+N73453lNbIbzQrODu5xdVr5svnzsuH3Kt5Fms5dMBlvb77gRcn4w==} engines: {node: '>=12'} peerDependencies: - '@tanstack/router-core': ^1.133.15 + '@tanstack/router-core': ^1.133.28 csstype: ^3.0.10 solid-js: '>=1.9.5' tiny-invariant: ^1.3.3 @@ -3486,18 +3486,18 @@ packages: csstype: optional: true - '@tanstack/router-generator@1.133.15': - resolution: {integrity: sha512-TXI07UzV5t1j1LeJ2eOErV9TxvzBRx2oSCEmkVaWMXaGKuQL7I4VB9e9w15ylHnvCO2Z/4DgIhUVF6h9/ZS3Mw==} + '@tanstack/router-generator@1.133.29': + resolution: {integrity: sha512-Nngp1i7ch22qqLkjsQwoyos8mawL5bG2GjHg8IMOzFbMR+dVQfh9gSkpkhUbLMv8TMb8Na31tyR0/tDr+E/NCA==} engines: {node: '>=12'} - '@tanstack/router-plugin@1.133.15': - resolution: {integrity: sha512-c3m7Pfuth/TXiRol0OpTw+cJyE7RxJpiMXDLooCiZgRDu2VhyXaanPLuuti9vyZiVdSrVZTQ7tJBFABymWbX5w==} + '@tanstack/router-plugin@1.133.32': + resolution: {integrity: sha512-4YtRrGs5nq/sZmwwT/jmS1LiucEevsWjxzTEilHRBtjeMgFbrhnQ7jBHHRPym71C40W/9tEgzcQS3dGq5LC5+g==} engines: {node: '>=12'} peerDependencies: '@rsbuild/core': '>=1.0.2' - '@tanstack/react-router': ^1.133.15 + '@tanstack/react-router': ^1.133.32 vite: '>=5.0.0 || >=6.0.0 || >=7.0.0' - vite-plugin-solid: ^2.11.8 + vite-plugin-solid: ^2.11.10 webpack: '>=5.92.0' peerDependenciesMeta: '@rsbuild/core': @@ -3511,34 +3511,34 @@ packages: webpack: optional: true - '@tanstack/router-utils@1.133.3': - resolution: {integrity: sha512-miPFlt0aG6ID5VDolYuRXgLS7cofvbZGMvHwf2Wmyxjo6GLp/kxxpkQrfM4T1I5cwjwYZZAQmdUKbVHwFZz9sQ==} + '@tanstack/router-utils@1.133.19': + resolution: {integrity: sha512-WEp5D2gPxvlLDRXwD/fV7RXjYtqaqJNXKB/L6OyZEbT+9BG/Ib2d7oG9GSUZNNMGPGYAlhBUOi3xutySsk6rxA==} engines: {node: '>=12'} - '@tanstack/server-functions-plugin@1.133.11': - resolution: {integrity: sha512-i6w1fmnXCWsbIEq5LZ1+tSVsqy0Iy7zAUVJzfNw6AzfbWZymwl508xwQJkIowv7v+kH1yIqNaeZ75gyWNZuVEg==} + '@tanstack/server-functions-plugin@1.133.25': + resolution: {integrity: sha512-jyb+Z6umAgZncEAB4OKLJiP8338n17xxxw3tO344gcnYCcqeZ9VAOMq3RVOoBUBDtV2DTLj/LVO62A5vDZ6WJw==} engines: {node: '>=12'} - '@tanstack/solid-router@1.133.15': - resolution: {integrity: sha512-P1ymkQDeoQHdpfkNibol8IhVTvDVt6mrefIjGXZBa+hY/Cbt4/6VOQ5OYbmo5YYLHQpaMmYrZYdsAmdtM0ynfw==} + '@tanstack/solid-router@1.133.31': + resolution: {integrity: sha512-WCpNrFvyytWZ8Yr0DfqmD8rQPdqGR4Pdex2oT24F5oXFcIlMR6YTN0vGyxCHeE5evwFP6qqUNKngAlfkxtXaQw==} engines: {node: '>=12'} peerDependencies: - solid-js: ^1.9.5 + solid-js: ^1.9.9 - '@tanstack/solid-start-client@1.133.15': - resolution: {integrity: sha512-EgOMR3J4wv1oHfwwkyJga9ZhrKfPLbYB0zEFJVsjK6hwOtUGNSE+jqBss50VcMK5zbvJIP23XSE0QRO/NCCt5w==} + '@tanstack/solid-start-client@1.133.31': + resolution: {integrity: sha512-bUSRoz0FJH6rpoEZDaCljPdhLvchCYWa1C54yLY8mL88lfPavYnmUwdB05gQNjvzCgQMB1fOJ8mgS3j3dhn5zA==} engines: {node: '>=22.12.0'} peerDependencies: solid-js: '>=1.0.0' - '@tanstack/solid-start-server@1.133.15': - resolution: {integrity: sha512-ANLqKIxDyRQA4pIUARWRjvwpDaYpU15QcUrsk58a5+QmcGEWx2oxiVAJ+3ounASx/XOhpbk6gKqE/FidSijSjQ==} + '@tanstack/solid-start-server@1.133.31': + resolution: {integrity: sha512-THI4053ONLHFvm13l1JudLHEkeFa+XiZhahiNk3bpengeyRu95nogBEyb3kd5/Xj01+tklcQ6ACNiryZDPf8qg==} engines: {node: '>=22.12.0'} peerDependencies: solid-js: ^1.0.0 - '@tanstack/solid-start@1.133.15': - resolution: {integrity: sha512-EoXEfVr5tYw2DkGXXZ/cskkuDn1JcKsFUEL9c6sTnF19ZlnsQXzfkJ0fICTf4n8kS453mIiYlw4uvD9Sb+ulVQ==} + '@tanstack/solid-start@1.133.32': + resolution: {integrity: sha512-OqAWzLZNFcOqYhdeQXWiy0d4pE+udWibvg3BWiirFCYop2tefz178XTjB6SS7uKqmsq5JuFwdv7I3mGsNOP8qg==} engines: {node: '>=22.12.0'} peerDependencies: solid-js: '>=1.0.0' @@ -3549,22 +3549,22 @@ packages: peerDependencies: solid-js: ^1.6.0 - '@tanstack/start-client-core@1.133.15': - resolution: {integrity: sha512-Rnr2grPF+7ygtc6Dy6SnJrAlTeF+tr+cKv12SMvtGq1Tg2WkjmFXmGe6ac5pHqNTPs+jVBAD3MtYo3FJmIK/Fw==} + '@tanstack/start-client-core@1.133.28': + resolution: {integrity: sha512-+skTCT1kX0E2J+VTnDPDufbzzTpdgMseTmxKbT+05wQNXQddwoeykvl+Ji/dKy7dq7OBGNPB86k4zPQd5YGzVg==} engines: {node: '>=22.12.0'} - '@tanstack/start-plugin-core@1.133.15': - resolution: {integrity: sha512-t8z45y0wc3zQISvVaZIRspzJ+52nIEBE4J1mGHncxQN+43EO+sHWhN7HHA60vbYVI+PNI57QmBGZEsa23SmdBg==} + '@tanstack/start-plugin-core@1.133.32': + resolution: {integrity: sha512-oBujZkLno1UBR4UpWx9t9GxrBQFZBBRzB6etVZpGuOORjyBjIkxuriQG7XnxtTtytpGX+w8NDAMagnLwXX2qaA==} engines: {node: '>=22.12.0'} peerDependencies: vite: '>=7.0.0' - '@tanstack/start-server-core@1.133.15': - resolution: {integrity: sha512-mw7Sv+Tk2oFcFpVYSRVEZv+u5GQVU8VhgyA/h8K3i9xViuBztzqsUABEPPI+sU7Nz+jnnVSfQLH6mzy9rrr12g==} + '@tanstack/start-server-core@1.133.31': + resolution: {integrity: sha512-g95+nDzJnc4kCN4Bei1g09rbyDzUk44JDTTx0rMgrXvV26qkPXOUFRfBOALw5anqQTW9o//PxHHakXhRibxfMg==} engines: {node: '>=22.12.0'} - '@tanstack/start-storage-context@1.133.15': - resolution: {integrity: sha512-nkC/U2Ul8oTcdBJJRHcp0prpbsw9pHQEQIJW9G+BTRRZMFV5DeZPmjMmw6W29VZyXT5TMm/kHcXMqlc7x6ppPg==} + '@tanstack/start-storage-context@1.133.28': + resolution: {integrity: sha512-tWRIhf93DWFQLqQ5mk82K2pkLhvJwgZWQC5mMaPAcnSVk9Jz3JcaBhiKLdx76h8r3EYTSOO33U6/8FpH2KGCig==} engines: {node: '>=22.12.0'} '@tanstack/store@0.7.0': @@ -3583,8 +3583,8 @@ packages: resolution: {integrity: sha512-g7sfxscIq0wYUGtOLegnTbiMTsNiAz6r28CDgdZqIIjI1naWZoIlABpWH2qdI3IIJUDWvhOaVwAo6sfqzm6GsQ==} engines: {node: '>=18'} - '@tanstack/virtual-file-routes@1.133.3': - resolution: {integrity: sha512-6d2AP9hAjEi8mcIew2RkxBX+wClH1xedhfaYhs8fUiX+V2Cedk7RBD9E9ww2z6BGUYD8Es4fS0OIrzXZWHKGhw==} + '@tanstack/virtual-file-routes@1.133.19': + resolution: {integrity: sha512-IKwZENsK7owmW1Lm5FhuHegY/SyQ8KqtL/7mTSnzoKJgfzhrrf9qwKB1rmkKkt+svUuy/Zw3uVEpZtUzQruWtA==} engines: {node: '>=12'} '@tanstack/vite-config@0.4.0': @@ -3614,14 +3614,14 @@ packages: '@types/react-dom': optional: true - '@trpc/client@11.6.0': - resolution: {integrity: sha512-DyWbYk2hd50BaVrXWVkaUnaSwgAF5g/lfBkXtkF1Aqlk6BtSzGUo3owPkgqQO2I5LwWy1+ra9TsSfBBvIZpTwg==} + '@trpc/client@11.7.0': + resolution: {integrity: sha512-VLMoA9KUIItrD/tXKLw2Hvu71V7cRMBNQhXbmm5weSkFVWdAGwLXzV5f85QPdOM8669dD+a1QbGJCK0LNBPbgQ==} peerDependencies: - '@trpc/server': 11.6.0 + '@trpc/server': 11.7.0 typescript: '>=5.7.2' - '@trpc/server@11.6.0': - resolution: {integrity: sha512-skTso0AWbOZck40jwNeYv++AMZXNWLUWdyk+pB5iVaYmEKTuEeMoPrEudR12VafbEU6tZa8HK3QhBfTYYHDCdg==} + '@trpc/server@11.7.0': + resolution: {integrity: sha512-BPpr3roKWpZnS92fWCg29e6hEwn1kzH8m65yXRkJfFqemgwckiyJ15NgR8zTe7RcMAeBEPpvqyAfL71rwaSJJw==} peerDependencies: typescript: '>=5.7.2' @@ -3690,8 +3690,8 @@ packages: '@types/express-serve-static-core@5.0.7': resolution: {integrity: sha512-R+33OsgWw7rOhD1emjU7dzCDHucJrgJXMA5PYCzJxVil0dsyx5iBEPHqpPfiKNJQb7lZ1vxwoLR4Z87bBUpeGQ==} - '@types/express@4.17.23': - resolution: {integrity: sha512-Crp6WY9aTYP3qPi2wGDo9iUe/rceX01UMhnF1jmwDcKCFM6cx7YhGP/Mpr3y9AASpfHixIG0E6azCcL5OcDHsQ==} + '@types/express@4.17.24': + resolution: {integrity: sha512-Mbrt4SRlXSTWryOnHAh2d4UQ/E7n9lZyGSi6KgX+4hkuL9soYbLOVXVhnk/ODp12YsGc95f4pOvqywJ6kngUwg==} '@types/express@5.0.3': resolution: {integrity: sha512-wGA0NX93b19/dZC1J18tKWVIYWyyF2ZjT9vin/NRu0qzzvfVzWjs04iq2rQ3H65vCTQYlRqs3YHfY7zjdV+9Kw==} @@ -3775,11 +3775,11 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/eslint-plugin@8.46.1': - resolution: {integrity: sha512-rUsLh8PXmBjdiPY+Emjz9NX2yHvhS11v0SR6xNJkm5GM1MO9ea/1GoDKlHHZGrOJclL/cZ2i/vRUYVtjRhrHVQ==} + '@typescript-eslint/eslint-plugin@8.46.2': + resolution: {integrity: sha512-ZGBMToy857/NIPaaCucIUQgqueOiq7HeAKkhlvqVV4lm089zUFW6ikRySx2v+cAhKeUCPuWVHeimyk6Dw1iY3w==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - '@typescript-eslint/parser': ^8.46.1 + '@typescript-eslint/parser': ^8.46.2 eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' @@ -3790,8 +3790,8 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/parser@8.46.1': - resolution: {integrity: sha512-6JSSaBZmsKvEkbRUkf7Zj7dru/8ZCrJxAqArcLaVMee5907JdtEbKGsZ7zNiIm/UAkpGUkaSMZEXShnN2D1HZA==} + '@typescript-eslint/parser@8.46.2': + resolution: {integrity: sha512-BnOroVl1SgrPLywqxyqdJ4l3S2MsKVLDVxZvjI1Eoe8ev2r3kGDo+PcMihNmDE+6/KjkTubSJnmqGZZjQSBq/g==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 @@ -3803,8 +3803,8 @@ packages: peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/project-service@8.46.1': - resolution: {integrity: sha512-FOIaFVMHzRskXr5J4Jp8lFVV0gz5ngv3RHmn+E4HYxSJ3DgDzU7fVI1/M7Ijh1zf6S7HIoaIOtln1H5y8V+9Zg==} + '@typescript-eslint/project-service@8.46.2': + resolution: {integrity: sha512-PULOLZ9iqwI7hXcmL4fVfIsBi6AN9YxRc0frbvmg8f+4hQAjQ5GYNKK0DIArNo+rOKmR/iBYwkpBmnIwin4wBg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' @@ -3813,8 +3813,8 @@ packages: resolution: {integrity: sha512-NdhWHgmynpSvyhchGLXh+w12OMT308Gm25JoRIyTZqEbApiBiQHD/8xgb6LqCWCFcxFtWwaVdFsLPQI3jvhywg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/scope-manager@8.46.1': - resolution: {integrity: sha512-weL9Gg3/5F0pVQKiF8eOXFZp8emqWzZsOJuWRUNtHT+UNV2xSJegmpCNQHy37aEQIbToTq7RHKhWvOsmbM680A==} + '@typescript-eslint/scope-manager@8.46.2': + resolution: {integrity: sha512-LF4b/NmGvdWEHD2H4MsHD8ny6JpiVNDzrSZr3CsckEgCbAGZbYM4Cqxvi9L+WqDMT+51Ozy7lt2M+d0JLEuBqA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@typescript-eslint/tsconfig-utils@8.44.1': @@ -3823,8 +3823,8 @@ packages: peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/tsconfig-utils@8.46.1': - resolution: {integrity: sha512-X88+J/CwFvlJB+mK09VFqx5FE4H5cXD+H/Bdza2aEWkSb8hnWIQorNcscRl4IEo1Cz9VI/+/r/jnGWkbWPx54g==} + '@typescript-eslint/tsconfig-utils@8.46.2': + resolution: {integrity: sha512-a7QH6fw4S57+F5y2FIxxSDyi5M4UfGF+Jl1bCGd7+L4KsaUY80GsiF/t0UoRFDHAguKlBaACWJRmdrc6Xfkkag==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' @@ -3836,8 +3836,8 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/type-utils@8.46.1': - resolution: {integrity: sha512-+BlmiHIiqufBxkVnOtFwjah/vrkF4MtKKvpXrKSPLCkCtAp8H01/VV43sfqA98Od7nJpDcFnkwgyfQbOG0AMvw==} + '@typescript-eslint/type-utils@8.46.2': + resolution: {integrity: sha512-HbPM4LbaAAt/DjxXaG9yiS9brOOz6fabal4uvUmaUYe6l3K1phQDMQKBRUrr06BQkxkvIZVVHttqiybM9nJsLA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 @@ -3847,8 +3847,8 @@ packages: resolution: {integrity: sha512-Lk7uj7y9uQUOEguiDIDLYLJOrYHQa7oBiURYVFqIpGxclAFQ78f6VUOM8lI2XEuNOKNB7XuvM2+2cMXAoq4ALQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/types@8.46.1': - resolution: {integrity: sha512-C+soprGBHwWBdkDpbaRC4paGBrkIXxVlNohadL5o0kfhsXqOC6GYH2S/Obmig+I0HTDl8wMaRySwrfrXVP8/pQ==} + '@typescript-eslint/types@8.46.2': + resolution: {integrity: sha512-lNCWCbq7rpg7qDsQrd3D6NyWYu+gkTENkG5IKYhUIcxSb59SQC/hEQ+MrG4sTgBVghTonNWq42bA/d4yYumldQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@typescript-eslint/typescript-estree@8.44.1': @@ -3857,8 +3857,8 @@ packages: peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/typescript-estree@8.46.1': - resolution: {integrity: sha512-uIifjT4s8cQKFQ8ZBXXyoUODtRoAd7F7+G8MKmtzj17+1UbdzFl52AzRyZRyKqPHhgzvXunnSckVu36flGy8cg==} + '@typescript-eslint/typescript-estree@8.46.2': + resolution: {integrity: sha512-f7rW7LJ2b7Uh2EiQ+7sza6RDZnajbNbemn54Ob6fRwQbgcIn+GWfyuHDHRYgRoZu1P4AayVScrRW+YfbTvPQoQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' @@ -3870,8 +3870,8 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/utils@8.46.1': - resolution: {integrity: sha512-vkYUy6LdZS7q1v/Gxb2Zs7zziuXN0wxqsetJdeZdRe/f5dwJFglmuvZBfTUivCtjH725C1jWCDfpadadD95EDQ==} + '@typescript-eslint/utils@8.46.2': + resolution: {integrity: sha512-sExxzucx0Tud5tE0XqR0lT0psBQvEpnpiul9XbGUB1QwpWJJAps1O/Z7hJxLGiZLBKMCutjTzDgmd1muEhBnVg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 @@ -3881,8 +3881,8 @@ packages: resolution: {integrity: sha512-576+u0QD+Jp3tZzvfRfxon0EA2lzcDt3lhUbsC6Lgzy9x2VR4E+JUiNyGHi5T8vk0TV+fpJ5GLG1JsJuWCaKhw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/visitor-keys@8.46.1': - resolution: {integrity: sha512-ptkmIf2iDkNUjdeu2bQqhFPV1m6qTnFFjg7PPDjxKWaMaP0Z6I9l30Jr3g5QqbZGdw8YdYvLp+XnqnWWZOg/NA==} + '@typescript-eslint/visitor-keys@8.46.2': + resolution: {integrity: sha512-tUFMXI4gxzzMXt4xpGJEsBsTox0XbNQ1y94EwlD/CuZwFcQP79xfQqMhau9HsRc/J0cAPA/HZt1dZPtGn9V/7w==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@ungap/raw-json@0.4.4': @@ -3989,8 +3989,8 @@ packages: peerDependencies: vite: ^6.0.0 || ^7.0.0 - '@vitejs/plugin-react@5.0.4': - resolution: {integrity: sha512-La0KD0vGkVkSk6K+piWDKRUyg8Rl5iAIKRMH0vMJI0Eg47bq1eOxmoObAaQG37WMW9MSyk7Cs8EIWwJC1PtzKA==} + '@vitejs/plugin-react@5.1.0': + resolution: {integrity: sha512-4LuWrg7EKWgQaMJfnN+wcmbAW+VSsCmqGohftWjuct47bv8uE4n/nPpq4XjJPsxgq00GGG5J8dvBczp8uxScew==} engines: {node: ^20.19.0 || >=22.12.0} peerDependencies: vite: ^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 @@ -4843,8 +4843,8 @@ packages: resolution: {integrity: sha512-+CHgPFzuoTQTt7cOYCV6MOw2w8vqEn/ap1yv4bpZOWL03u7rlVRQhUY0WYT3rHsgVTXwYQDZaSUJSQrMBUKuWg==} hasBin: true - drizzle-orm@0.44.6: - resolution: {integrity: sha512-uy6uarrrEOc9K1u5/uhBFJbdF5VJ5xQ/Yzbecw3eAYOunv5FDeYkR2m8iitocdHBOHbvorviKOW5GVw0U1j4LQ==} + drizzle-orm@0.44.7: + resolution: {integrity: sha512-quIpnYznjU9lHshEOAYLoZ9s3jweleHlZIAWR/jX9gAWNg/JhQ1wj0KGRf7/Zm+obRrYd9GjPVJg790QY9N5AQ==} peerDependencies: '@aws-sdk/client-rds-data': '>=3' '@cloudflare/workers-types': '>=4' @@ -6017,8 +6017,8 @@ packages: resolution: {integrity: sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==} hasBin: true - jiti@2.6.0: - resolution: {integrity: sha512-VXe6RjJkBPj0ohtqaO8vSWP3ZhAKo66fKrFNCll4BTcwljPLz03pCbaNKfzGP5MbrCYcbJ7v0nOYYwUzTEIdXQ==} + jiti@2.6.1: + resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==} hasBin: true jju@1.4.0: @@ -6142,8 +6142,8 @@ packages: resolution: {integrity: sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==} engines: {node: '>=6'} - knip@5.66.1: - resolution: {integrity: sha512-Ad3VUPIk9GZYovKuwKtGMheupek7IoPGaDEBAvnCYLKJXnwmqNLyXqMp+l5r3OOpFVjF7DdkFIZFVrXESDNylQ==} + knip@5.66.3: + resolution: {integrity: sha512-BEe9ZCI8fm4TJzehnrCt+L/Faqu6qfMH6VrwSfck+lCGotQzf0jh5dVXysPWjWqMpdUSr6+MpMu9JW/G6wiAcQ==} engines: {node: '>=18.18.0'} hasBin: true peerDependencies: @@ -6164,68 +6164,74 @@ packages: resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} engines: {node: '>= 0.8.0'} - lightningcss-darwin-arm64@1.30.1: - resolution: {integrity: sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ==} + lightningcss-android-arm64@1.30.2: + resolution: {integrity: sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [android] + + lightningcss-darwin-arm64@1.30.2: + resolution: {integrity: sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA==} engines: {node: '>= 12.0.0'} cpu: [arm64] os: [darwin] - lightningcss-darwin-x64@1.30.1: - resolution: {integrity: sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA==} + lightningcss-darwin-x64@1.30.2: + resolution: {integrity: sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ==} engines: {node: '>= 12.0.0'} cpu: [x64] os: [darwin] - lightningcss-freebsd-x64@1.30.1: - resolution: {integrity: sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig==} + lightningcss-freebsd-x64@1.30.2: + resolution: {integrity: sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA==} engines: {node: '>= 12.0.0'} cpu: [x64] os: [freebsd] - lightningcss-linux-arm-gnueabihf@1.30.1: - resolution: {integrity: sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q==} + lightningcss-linux-arm-gnueabihf@1.30.2: + resolution: {integrity: sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA==} engines: {node: '>= 12.0.0'} cpu: [arm] os: [linux] - lightningcss-linux-arm64-gnu@1.30.1: - resolution: {integrity: sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw==} + lightningcss-linux-arm64-gnu@1.30.2: + resolution: {integrity: sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A==} engines: {node: '>= 12.0.0'} cpu: [arm64] os: [linux] - lightningcss-linux-arm64-musl@1.30.1: - resolution: {integrity: sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ==} + lightningcss-linux-arm64-musl@1.30.2: + resolution: {integrity: sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA==} engines: {node: '>= 12.0.0'} cpu: [arm64] os: [linux] - lightningcss-linux-x64-gnu@1.30.1: - resolution: {integrity: sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw==} + lightningcss-linux-x64-gnu@1.30.2: + resolution: {integrity: sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w==} engines: {node: '>= 12.0.0'} cpu: [x64] os: [linux] - lightningcss-linux-x64-musl@1.30.1: - resolution: {integrity: sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ==} + lightningcss-linux-x64-musl@1.30.2: + resolution: {integrity: sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA==} engines: {node: '>= 12.0.0'} cpu: [x64] os: [linux] - lightningcss-win32-arm64-msvc@1.30.1: - resolution: {integrity: sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA==} + lightningcss-win32-arm64-msvc@1.30.2: + resolution: {integrity: sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ==} engines: {node: '>= 12.0.0'} cpu: [arm64] os: [win32] - lightningcss-win32-x64-msvc@1.30.1: - resolution: {integrity: sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg==} + lightningcss-win32-x64-msvc@1.30.2: + resolution: {integrity: sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw==} engines: {node: '>= 12.0.0'} cpu: [x64] os: [win32] - lightningcss@1.30.1: - resolution: {integrity: sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg==} + lightningcss@1.30.2: + resolution: {integrity: sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ==} engines: {node: '>= 12.0.0'} lilconfig@3.1.3: @@ -6532,8 +6538,8 @@ packages: mongodb-connection-string-url@3.0.2: resolution: {integrity: sha512-rMO7CGo/9BFwyZABcKAWL8UJwH/Kc2x0g72uhDWzG48URRax5TCIcJ7Rc3RZqffZzO/Gwff/jyKwCU9TN8gehA==} - mongodb@6.18.0: - resolution: {integrity: sha512-fO5ttN9VC8P0F5fqtQmclAkgXZxbIkYRTUi1j8JO6IYwvamkhtYDilJr35jOPELR49zqCJgXZWwCtW7B+TM8vQ==} + mongodb@6.20.0: + resolution: {integrity: sha512-Tl6MEIU3K4Rq3TSHd+sZQqRBoGlFsOgNrH5ltAcFBV62Re3Fd+FcaVf8uSEQFOJ51SDowDVttBTONMfoYWrWlQ==} engines: {node: '>=16.20.1'} peerDependencies: '@aws-sdk/credential-providers': ^3.188.0 @@ -6541,7 +6547,7 @@ packages: gcp-metadata: ^5.2.0 kerberos: ^2.0.1 mongodb-client-encryption: '>=6.0.0 <7' - snappy: ^7.2.2 + snappy: ^7.3.2 socks: ^2.7.1 peerDependenciesMeta: '@aws-sdk/credential-providers': @@ -7104,8 +7110,8 @@ packages: resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} engines: {node: '>= 0.10'} - publint@0.3.14: - resolution: {integrity: sha512-14/VNBvWsrBeqWNDw8c/DK5ERcZBUwL1rnkVx18cQnF3zadr3GfoYtvD8mxi1dhkWpaPHp8kfi92MDbjMeW3qw==} + publint@0.3.15: + resolution: {integrity: sha512-xPbRAPW+vqdiaKy5sVVY0uFAu3LaviaPO3pZ9FaRx59l9+U/RKR1OEbLhkug87cwiVKxPXyB4txsv5cad67u+A==} engines: {node: '>=18'} hasBin: true @@ -7174,8 +7180,8 @@ packages: react-is@17.0.2: resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==} - react-refresh@0.17.0: - resolution: {integrity: sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==} + react-refresh@0.18.0: + resolution: {integrity: sha512-QgT5//D3jfjJb6Gsjxv0Slpj23ip+HtOpnNgnb2S5zU3CB26G/IDPGoy4RJB42wzFE46DRsstbW6tKHoKbhAxw==} engines: {node: '>=0.10.0'} react@19.2.0: @@ -7312,8 +7318,8 @@ packages: run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} - rxdb@16.19.1: - resolution: {integrity: sha512-q+1GSgnNthoUY/mbOv9dlCc7008/QJiBfPbjaKtWxbBCR4nPVU7qf8dPatvvSImrezpMAFjSs/uvISQZxfmkhA==} + rxdb@16.20.0: + resolution: {integrity: sha512-2yg+MZ75PXKrFOEoGaq10C1xi684k1fNTQJLJvMhJIU8dCXb1BqDWSUSzaZ/aRHGJU64hVrgqO/lho1Si3/+hw==} engines: {node: '>=18'} peerDependencies: rxjs: ^7.8.0 @@ -7447,38 +7453,38 @@ packages: engines: {node: '>=18'} hasBin: true - sherif-darwin-arm64@1.6.1: - resolution: {integrity: sha512-J15oBJcrnCAZ0rQE8WbMShYw3204A18akCH6C/uZrILTwX/vZyJIqi7lAt5L00LzsadA3HcyQqVjLNNCvuihoQ==} + sherif-darwin-arm64@1.7.0: + resolution: {integrity: sha512-ziIJoGx+VFcP6G01XwOJh8cNLfDXj3CWSdDtaj9kXHaGT9oPj68z1xAoFuKHQCukj2jhmwyBrSZy4Zvli9MmaQ==} cpu: [arm64] os: [darwin] - sherif-darwin-x64@1.6.1: - resolution: {integrity: sha512-oLA/GtvUasi+qCl35LczOhQ4g/xY2mxE5/eiTYQGT3Ow7FKLscnkE6v5l28bgkFeR/uke0AgZ/CgHhozAf0ulg==} + sherif-darwin-x64@1.7.0: + resolution: {integrity: sha512-GKQw0zFqUWGbYrk+HU1Nzhr93p8VlEsjAkUhJ5UQVPxGtAf8VyRHQFJKWdoqYdhs0kckV04LfSfOHeaj/VFu5w==} cpu: [x64] os: [darwin] - sherif-linux-arm64@1.6.1: - resolution: {integrity: sha512-OoltlucT7v9BZdkYZRbs1QU0DYMCQ5qgpMqQdMW1Rq3w3amr7+oEiV9NHntD83udOo8xRxKq0uPXfNYu+VptJw==} + sherif-linux-arm64@1.7.0: + resolution: {integrity: sha512-qW08gpfjhrURkKoi0OaLlk+O/xvMqpD1oVRpeHLtyFqTof6msBqmfPKdQful2tPLWFVmNoU27emOgRUh5pAPBQ==} cpu: [arm64] os: [linux] - sherif-linux-x64@1.6.1: - resolution: {integrity: sha512-qyDyYqpi3ABGkRuCnjnxN3OMT8DxMiiLzhS9p9xC05Y9nr5hjkxvqP4DdJ4e5opm4E7vzRAS7VQoZ6m7h6tsgQ==} + sherif-linux-x64@1.7.0: + resolution: {integrity: sha512-1yPKSPXXZqIbbIbQPjoxO8yL8ASNy3lbRXpEvj+NT7rk6KNkGqarG3BlI6PtNdN2JZnFPwIp0FAVNVGTv01yqQ==} cpu: [x64] os: [linux] - sherif-windows-arm64@1.6.1: - resolution: {integrity: sha512-wAbCiqP//lo7bZUlHmZUV3/sGjnJxo6QB5/fqhz5/GUeWh4CTyvlSacJKZxLnXnzpiUSeFnWutquWnHkRov5Ug==} + sherif-windows-arm64@1.7.0: + resolution: {integrity: sha512-edVI8PScUI42i11IH3SP9CIqLVrHS9CNzXFJHgbbSAZ/EC7B0ixcFYQxD4eiYpz/K2QIVZOPYXTt+FMQWqgyGg==} cpu: [arm64] os: [win32] - sherif-windows-x64@1.6.1: - resolution: {integrity: sha512-2r0qMxZGCMO2aq8Hlq7npxtAsUFVDsEFtUM/6dFo1npa/jHe2mbU7ii/Ymy0bloSa/qw/azrSfRV6GLU7Gjtxg==} + sherif-windows-x64@1.7.0: + resolution: {integrity: sha512-QKzMkpk6S1Tp8Q6L/wChA44ZHyJ1Uah+WuM4X+O6+vSG4cQ/xWGCF+7AqDJpuC0y6D1k7GgC1lUEt0ZwTEuHLA==} cpu: [x64] os: [win32] - sherif@1.6.1: - resolution: {integrity: sha512-ZnwyTnmXoUOPClkOA37JWIyFxCoozMGHmhk/p7XbTREI554XXCnBAn3BMX8UsqkhSzQ9eNQsq4U+jnImEIppsQ==} + sherif@1.7.0: + resolution: {integrity: sha512-kf+WTg/oEpG7O5QX1t67LsY+dXB4hkdqbR/nNNepVPH6OsKuZ4lIR74OESuQkvRGdU2vytrRTmWTXLQrx4Kc/A==} hasBin: true shx@0.4.0: @@ -7762,8 +7768,8 @@ packages: svelte: ^3.55 || ^4.0.0-next.0 || ^4.0 || ^5.0.0-next.0 typescript: ^4.9.4 || ^5.0.0 - svelte@5.41.0: - resolution: {integrity: sha512-mP3vFFv5OUM5JN189+nJVW74kQ1dGqUrXTEzvCEVZqessY0GxZDls1nWVvt4Sxyv2USfQvAZO68VRaeIZvpzKg==} + svelte@5.42.2: + resolution: {integrity: sha512-iSry5jsBHispVczyt9UrBX/1qu3HQ/UyKPAIjqlvlu3o/eUvc+kpyMyRS2O4HLLx4MvLurLGIUOyyP11pyD59g==} engines: {node: '>=18'} symbol-tree@3.2.4: @@ -7778,8 +7784,8 @@ packages: engines: {node: '>=14.0.0'} hasBin: true - tailwindcss@4.1.14: - resolution: {integrity: sha512-b7pCxjGO98LnxVkKjaZSDeNuljC4ueKUddjENJOADtubtdo8llTaJy7HwBMeLNSSo2N5QIAgklslK1+Ir8r6CA==} + tailwindcss@4.1.16: + resolution: {integrity: sha512-pONL5awpaQX4LN5eiv7moSiSPd/DLDzKVRJz8Q9PgzmAdd1R4307GQS2ZpfiN7ZmekdQrfhZZiSE5jkLR4WNaA==} tapable@2.2.3: resolution: {integrity: sha512-ZL6DDuAlRlLGghwcfmSn9sK3Hr6ArtyudlSAiCqQ6IfE+b+HHbydbYDIG15IfS5do+7XQQBdBiubF/cV2dnDzg==} @@ -8133,8 +8139,8 @@ packages: peerDependencies: vite: ^2.0.0 || ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 - vite-plugin-solid@2.11.9: - resolution: {integrity: sha512-bTA6p+bspXZsuulSd2y6aTzegF8xGaJYcq1Uyh/mv+W4DQtzCgL9nN6n2fsTaxp/dMk+ZHHKgGndlNeooqHLKw==} + vite-plugin-solid@2.11.10: + resolution: {integrity: sha512-Yr1dQybmtDtDAHkii6hXuc1oVH9CPcS/Zb2jN/P36qqcrkNnVPsMTzQ06jyzFPFjj3U1IYKMVt/9ZqcwGCEbjw==} peerDependencies: '@testing-library/jest-dom': ^5.16.6 || ^5.17.0 || ^6.* solid-js: ^1.7.2 @@ -8231,8 +8237,8 @@ packages: yaml: optional: true - vite@7.1.5: - resolution: {integrity: sha512-4cKBO9wR75r0BeIWWWId9XK9Lj6La5X846Zw9dFfzMRw38IlTk2iCcUt6hsyiDRcPidc55ZParFYDXi0nXOeLQ==} + vite@7.1.12: + resolution: {integrity: sha512-ZWyE8YXEXqJrrSLvYgrRP7p62OziLW7xI5HYGWFzOvupfAlrLvURSzv/FyGyy0eidogEM3ujU+kUG1zuHgb6Ug==} engines: {node: ^20.19.0 || >=22.12.0} hasBin: true peerDependencies: @@ -8660,14 +8666,14 @@ snapshots: '@jridgewell/gen-mapping': 0.3.13 '@jridgewell/trace-mapping': 0.3.31 - '@angular-devkit/architect@0.2003.6(chokidar@4.0.3)': + '@angular-devkit/architect@0.2003.7(chokidar@4.0.3)': dependencies: - '@angular-devkit/core': 20.3.6(chokidar@4.0.3) + '@angular-devkit/core': 20.3.7(chokidar@4.0.3) rxjs: 7.8.2 transitivePeerDependencies: - chokidar - '@angular-devkit/core@20.3.6(chokidar@4.0.3)': + '@angular-devkit/core@20.3.7(chokidar@4.0.3)': dependencies: ajv: 8.17.1 ajv-formats: 3.0.1(ajv@8.17.1) @@ -8678,9 +8684,9 @@ snapshots: optionalDependencies: chokidar: 4.0.3 - '@angular-devkit/schematics@20.3.6(chokidar@4.0.3)': + '@angular-devkit/schematics@20.3.7(chokidar@4.0.3)': dependencies: - '@angular-devkit/core': 20.3.6(chokidar@4.0.3) + '@angular-devkit/core': 20.3.7(chokidar@4.0.3) jsonc-parser: 3.3.1 magic-string: 0.30.17 ora: 8.2.0 @@ -8688,17 +8694,17 @@ snapshots: transitivePeerDependencies: - chokidar - '@angular/build@20.3.6(@angular/compiler-cli@20.3.6(@angular/compiler@20.3.6)(typescript@5.8.3))(@angular/compiler@20.3.6)(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@20.3.6(@angular/common@20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1)))(@types/node@24.7.0)(chokidar@4.0.3)(jiti@2.6.0)(karma@6.4.4)(lightningcss@1.30.1)(postcss@8.5.6)(tailwindcss@3.4.18)(terser@5.44.0)(tslib@2.8.1)(tsx@4.20.6)(typescript@5.8.3)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))(yaml@2.8.1)': + '@angular/build@20.3.7(@angular/compiler-cli@20.3.7(@angular/compiler@20.3.7)(typescript@5.8.3))(@angular/compiler@20.3.7)(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1)))(@types/node@24.7.0)(chokidar@4.0.3)(jiti@2.6.1)(karma@6.4.4)(lightningcss@1.30.2)(postcss@8.5.6)(tailwindcss@3.4.18)(terser@5.44.0)(tslib@2.8.1)(tsx@4.20.6)(typescript@5.8.3)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))(yaml@2.8.1)': dependencies: '@ampproject/remapping': 2.3.0 - '@angular-devkit/architect': 0.2003.6(chokidar@4.0.3) - '@angular/compiler': 20.3.6 - '@angular/compiler-cli': 20.3.6(@angular/compiler@20.3.6)(typescript@5.8.3) + '@angular-devkit/architect': 0.2003.7(chokidar@4.0.3) + '@angular/compiler': 20.3.7 + '@angular/compiler-cli': 20.3.7(@angular/compiler@20.3.7)(typescript@5.8.3) '@babel/core': 7.28.3 '@babel/helper-annotate-as-pure': 7.27.3 '@babel/helper-split-export-declaration': 7.24.7 '@inquirer/confirm': 5.1.14(@types/node@24.7.0) - '@vitejs/plugin-basic-ssl': 2.1.0(vite@7.1.5(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@vitejs/plugin-basic-ssl': 2.1.0(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) beasties: 0.3.5 browserslist: 4.25.4 esbuild: 0.25.9 @@ -8718,16 +8724,16 @@ snapshots: tinyglobby: 0.2.14 tslib: 2.8.1 typescript: 5.8.3 - vite: 7.1.5(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 7.1.11(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) watchpack: 2.4.4 optionalDependencies: - '@angular/core': 20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1) - '@angular/platform-browser': 20.3.6(@angular/common@20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1)) + '@angular/core': 20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1) + '@angular/platform-browser': 20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1)) karma: 6.4.4 lmdb: 3.4.2 postcss: 8.5.6 tailwindcss: 3.4.18 - vitest: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vitest: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - '@types/node' - chokidar @@ -8741,15 +8747,15 @@ snapshots: - tsx - yaml - '@angular/cli@20.3.6(@types/node@24.7.0)(chokidar@4.0.3)': + '@angular/cli@20.3.7(@types/node@24.7.0)(chokidar@4.0.3)': dependencies: - '@angular-devkit/architect': 0.2003.6(chokidar@4.0.3) - '@angular-devkit/core': 20.3.6(chokidar@4.0.3) - '@angular-devkit/schematics': 20.3.6(chokidar@4.0.3) + '@angular-devkit/architect': 0.2003.7(chokidar@4.0.3) + '@angular-devkit/core': 20.3.7(chokidar@4.0.3) + '@angular-devkit/schematics': 20.3.7(chokidar@4.0.3) '@inquirer/prompts': 7.8.2(@types/node@24.7.0) '@listr2/prompt-adapter-inquirer': 3.0.1(@inquirer/prompts@7.8.2(@types/node@24.7.0))(@types/node@24.7.0)(listr2@9.0.1) '@modelcontextprotocol/sdk': 1.17.3 - '@schematics/angular': 20.3.6(chokidar@4.0.3) + '@schematics/angular': 20.3.7(chokidar@4.0.3) '@yarnpkg/lockfile': 1.1.0 algoliasearch: 5.35.0 ini: 5.0.0 @@ -8772,15 +8778,15 @@ snapshots: rxjs: 7.8.2 tslib: 2.8.1 - '@angular/common@20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2)': + '@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2)': dependencies: - '@angular/core': 20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1) + '@angular/core': 20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1) rxjs: 7.8.2 tslib: 2.8.1 - '@angular/compiler-cli@20.3.6(@angular/compiler@20.3.6)(typescript@5.8.3)': + '@angular/compiler-cli@20.3.7(@angular/compiler@20.3.7)(typescript@5.8.3)': dependencies: - '@angular/compiler': 20.3.6 + '@angular/compiler': 20.3.7 '@babel/core': 7.28.3 '@jridgewell/sourcemap-codec': 1.5.5 chokidar: 4.0.3 @@ -8798,7 +8804,7 @@ snapshots: dependencies: tslib: 2.8.1 - '@angular/compiler@20.3.6': + '@angular/compiler@20.3.7': dependencies: tslib: 2.8.1 @@ -8808,19 +8814,19 @@ snapshots: tslib: 2.8.1 zone.js: 0.15.1 - '@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1)': + '@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1)': dependencies: rxjs: 7.8.2 tslib: 2.8.1 optionalDependencies: - '@angular/compiler': 20.3.6 + '@angular/compiler': 20.3.7 zone.js: 0.15.1 - '@angular/forms@20.3.6(@angular/common@20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@20.3.6(@angular/common@20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1)))(rxjs@7.8.2)': + '@angular/forms@20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1)))(rxjs@7.8.2)': dependencies: - '@angular/common': 20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2) - '@angular/core': 20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1) - '@angular/platform-browser': 20.3.6(@angular/common@20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1)) + '@angular/common': 20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2) + '@angular/core': 20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1) + '@angular/platform-browser': 20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1)) rxjs: 7.8.2 tslib: 2.8.1 @@ -8838,17 +8844,17 @@ snapshots: '@angular/core': 19.2.15(rxjs@7.8.2)(zone.js@0.15.1) tslib: 2.8.1 - '@angular/platform-browser@20.3.6(@angular/common@20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))': + '@angular/platform-browser@20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))': dependencies: - '@angular/common': 20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2) - '@angular/core': 20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1) + '@angular/common': 20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2) + '@angular/core': 20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1) tslib: 2.8.1 - '@angular/router@20.3.6(@angular/common@20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@20.3.6(@angular/common@20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1)))(rxjs@7.8.2)': + '@angular/router@20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(@angular/platform-browser@20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1)))(rxjs@7.8.2)': dependencies: - '@angular/common': 20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2) - '@angular/core': 20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1) - '@angular/platform-browser': 20.3.6(@angular/common@20.3.6(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.6(@angular/compiler@20.3.6)(rxjs@7.8.2)(zone.js@0.15.1)) + '@angular/common': 20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2) + '@angular/core': 20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1) + '@angular/platform-browser': 20.3.7(@angular/common@20.3.7(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1))(rxjs@7.8.2))(@angular/core@20.3.7(@angular/compiler@20.3.7)(rxjs@7.8.2)(zone.js@0.15.1)) rxjs: 7.8.2 tslib: 2.8.1 @@ -9324,7 +9330,7 @@ snapshots: '@drizzle-team/brocli@0.10.2': {} - '@electric-sql/client@1.0.14': + '@electric-sql/client@1.1.0': dependencies: '@microsoft/fetch-event-source': 2.0.1 optionalDependencies: @@ -9578,18 +9584,18 @@ snapshots: '@esbuild/win32-x64@0.25.9': optional: true - '@eslint-community/eslint-utils@4.9.0(eslint@9.38.0(jiti@2.6.0))': + '@eslint-community/eslint-utils@4.9.0(eslint@9.38.0(jiti@2.6.1))': dependencies: - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) eslint-visitor-keys: 3.4.3 '@eslint-community/regexpp@4.12.1': {} - '@eslint/compat@1.4.0(eslint@9.38.0(jiti@2.6.0))': + '@eslint/compat@1.4.0(eslint@9.38.0(jiti@2.6.1))': dependencies: '@eslint/core': 0.16.0 optionalDependencies: - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) '@eslint/config-array@0.21.1': dependencies: @@ -10656,10 +10662,10 @@ snapshots: '@rolldown/pluginutils@1.0.0-beta.29': {} - '@rolldown/pluginutils@1.0.0-beta.38': {} - '@rolldown/pluginutils@1.0.0-beta.40': {} + '@rolldown/pluginutils@1.0.0-beta.43': {} + '@rollup/pluginutils@5.3.0(rollup@4.52.5)': dependencies: '@types/estree': 1.0.8 @@ -10834,10 +10840,10 @@ snapshots: transitivePeerDependencies: - '@types/node' - '@schematics/angular@20.3.6(chokidar@4.0.3)': + '@schematics/angular@20.3.7(chokidar@4.0.3)': dependencies: - '@angular-devkit/core': 20.3.6(chokidar@4.0.3) - '@angular-devkit/schematics': 20.3.6(chokidar@4.0.3) + '@angular-devkit/core': 20.3.7(chokidar@4.0.3) + '@angular-devkit/schematics': 20.3.7(chokidar@4.0.3) jsonc-parser: 3.3.1 transitivePeerDependencies: - chokidar @@ -11028,10 +11034,10 @@ snapshots: '@standard-schema/spec@1.0.0': {} - '@stylistic/eslint-plugin@4.4.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3)': + '@stylistic/eslint-plugin@4.4.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3)': dependencies: - '@typescript-eslint/utils': 8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) - eslint: 9.38.0(jiti@2.6.0) + '@typescript-eslint/utils': 8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) + eslint: 9.38.0(jiti@2.6.1) eslint-visitor-keys: 4.2.1 espree: 10.4.0 estraverse: 5.3.0 @@ -11040,11 +11046,11 @@ snapshots: - supports-color - typescript - '@stylistic/eslint-plugin@5.4.0(eslint@9.38.0(jiti@2.6.0))': + '@stylistic/eslint-plugin@5.4.0(eslint@9.38.0(jiti@2.6.1))': dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.38.0(jiti@2.6.0)) - '@typescript-eslint/types': 8.46.1 - eslint: 9.38.0(jiti@2.6.0) + '@eslint-community/eslint-utils': 4.9.0(eslint@9.38.0(jiti@2.6.1)) + '@typescript-eslint/types': 8.46.2 + eslint: 9.38.0(jiti@2.6.1) eslint-visitor-keys: 4.2.1 espree: 10.4.0 estraverse: 5.3.0 @@ -11054,35 +11060,35 @@ snapshots: dependencies: acorn: 8.15.0 - '@sveltejs/package@2.5.4(svelte@5.41.0)(typescript@5.9.3)': + '@sveltejs/package@2.5.4(svelte@5.42.2)(typescript@5.9.3)': dependencies: chokidar: 4.0.3 kleur: 4.1.5 sade: 1.8.1 semver: 7.7.3 - svelte: 5.41.0 - svelte2tsx: 0.7.42(svelte@5.41.0)(typescript@5.9.3) + svelte: 5.42.2 + svelte2tsx: 0.7.42(svelte@5.42.2)(typescript@5.9.3) transitivePeerDependencies: - typescript - '@sveltejs/vite-plugin-svelte-inspector@5.0.1(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.41.0)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(svelte@5.41.0)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@sveltejs/vite-plugin-svelte-inspector@5.0.1(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.42.2)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(svelte@5.42.2)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: - '@sveltejs/vite-plugin-svelte': 6.2.1(svelte@5.41.0)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@sveltejs/vite-plugin-svelte': 6.2.1(svelte@5.42.2)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) debug: 4.4.3 - svelte: 5.41.0 - vite: 7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + svelte: 5.42.2 + vite: 7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - supports-color - '@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.41.0)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.42.2)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: - '@sveltejs/vite-plugin-svelte-inspector': 5.0.1(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.41.0)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(svelte@5.41.0)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@sveltejs/vite-plugin-svelte-inspector': 5.0.1(@sveltejs/vite-plugin-svelte@6.2.1(svelte@5.42.2)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(svelte@5.42.2)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) debug: 4.4.3 deepmerge: 4.3.1 magic-string: 0.30.19 - svelte: 5.41.0 - vite: 7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - vitefu: 1.1.1(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + svelte: 5.42.2 + vite: 7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vitefu: 1.1.1(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) transitivePeerDependencies: - supports-color @@ -11093,90 +11099,87 @@ snapshots: transitivePeerDependencies: - encoding - '@tailwindcss/node@4.1.14': + '@tailwindcss/node@4.1.16': dependencies: '@jridgewell/remapping': 2.3.5 enhanced-resolve: 5.18.3 - jiti: 2.6.0 - lightningcss: 1.30.1 + jiti: 2.6.1 + lightningcss: 1.30.2 magic-string: 0.30.19 source-map-js: 1.2.1 - tailwindcss: 4.1.14 + tailwindcss: 4.1.16 - '@tailwindcss/oxide-android-arm64@4.1.14': + '@tailwindcss/oxide-android-arm64@4.1.16': optional: true - '@tailwindcss/oxide-darwin-arm64@4.1.14': + '@tailwindcss/oxide-darwin-arm64@4.1.16': optional: true - '@tailwindcss/oxide-darwin-x64@4.1.14': + '@tailwindcss/oxide-darwin-x64@4.1.16': optional: true - '@tailwindcss/oxide-freebsd-x64@4.1.14': + '@tailwindcss/oxide-freebsd-x64@4.1.16': optional: true - '@tailwindcss/oxide-linux-arm-gnueabihf@4.1.14': + '@tailwindcss/oxide-linux-arm-gnueabihf@4.1.16': optional: true - '@tailwindcss/oxide-linux-arm64-gnu@4.1.14': + '@tailwindcss/oxide-linux-arm64-gnu@4.1.16': optional: true - '@tailwindcss/oxide-linux-arm64-musl@4.1.14': + '@tailwindcss/oxide-linux-arm64-musl@4.1.16': optional: true - '@tailwindcss/oxide-linux-x64-gnu@4.1.14': + '@tailwindcss/oxide-linux-x64-gnu@4.1.16': optional: true - '@tailwindcss/oxide-linux-x64-musl@4.1.14': + '@tailwindcss/oxide-linux-x64-musl@4.1.16': optional: true - '@tailwindcss/oxide-wasm32-wasi@4.1.14': + '@tailwindcss/oxide-wasm32-wasi@4.1.16': optional: true - '@tailwindcss/oxide-win32-arm64-msvc@4.1.14': + '@tailwindcss/oxide-win32-arm64-msvc@4.1.16': optional: true - '@tailwindcss/oxide-win32-x64-msvc@4.1.14': + '@tailwindcss/oxide-win32-x64-msvc@4.1.16': optional: true - '@tailwindcss/oxide@4.1.14': - dependencies: - detect-libc: 2.0.4 - tar: 7.5.1 + '@tailwindcss/oxide@4.1.16': optionalDependencies: - '@tailwindcss/oxide-android-arm64': 4.1.14 - '@tailwindcss/oxide-darwin-arm64': 4.1.14 - '@tailwindcss/oxide-darwin-x64': 4.1.14 - '@tailwindcss/oxide-freebsd-x64': 4.1.14 - '@tailwindcss/oxide-linux-arm-gnueabihf': 4.1.14 - '@tailwindcss/oxide-linux-arm64-gnu': 4.1.14 - '@tailwindcss/oxide-linux-arm64-musl': 4.1.14 - '@tailwindcss/oxide-linux-x64-gnu': 4.1.14 - '@tailwindcss/oxide-linux-x64-musl': 4.1.14 - '@tailwindcss/oxide-wasm32-wasi': 4.1.14 - '@tailwindcss/oxide-win32-arm64-msvc': 4.1.14 - '@tailwindcss/oxide-win32-x64-msvc': 4.1.14 - - '@tailwindcss/vite@4.1.14(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': - dependencies: - '@tailwindcss/node': 4.1.14 - '@tailwindcss/oxide': 4.1.14 - tailwindcss: 4.1.14 - vite: 6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - - '@tailwindcss/vite@4.1.14(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': - dependencies: - '@tailwindcss/node': 4.1.14 - '@tailwindcss/oxide': 4.1.14 - tailwindcss: 4.1.14 - vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - - '@tanstack/config@0.22.0(@types/node@24.7.0)(@typescript-eslint/utils@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.0))(rollup@4.52.5)(typescript@5.9.3)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': - dependencies: - '@tanstack/eslint-config': 0.3.2(@typescript-eslint/utils@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + '@tailwindcss/oxide-android-arm64': 4.1.16 + '@tailwindcss/oxide-darwin-arm64': 4.1.16 + '@tailwindcss/oxide-darwin-x64': 4.1.16 + '@tailwindcss/oxide-freebsd-x64': 4.1.16 + '@tailwindcss/oxide-linux-arm-gnueabihf': 4.1.16 + '@tailwindcss/oxide-linux-arm64-gnu': 4.1.16 + '@tailwindcss/oxide-linux-arm64-musl': 4.1.16 + '@tailwindcss/oxide-linux-x64-gnu': 4.1.16 + '@tailwindcss/oxide-linux-x64-musl': 4.1.16 + '@tailwindcss/oxide-wasm32-wasi': 4.1.16 + '@tailwindcss/oxide-win32-arm64-msvc': 4.1.16 + '@tailwindcss/oxide-win32-x64-msvc': 4.1.16 + + '@tailwindcss/vite@4.1.16(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + dependencies: + '@tailwindcss/node': 4.1.16 + '@tailwindcss/oxide': 4.1.16 + tailwindcss: 4.1.16 + vite: 6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + + '@tailwindcss/vite@4.1.16(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + dependencies: + '@tailwindcss/node': 4.1.16 + '@tailwindcss/oxide': 4.1.16 + tailwindcss: 4.1.16 + vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + + '@tanstack/config@0.22.0(@types/node@24.7.0)(@typescript-eslint/utils@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.1))(rollup@4.52.5)(typescript@5.9.3)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + dependencies: + '@tanstack/eslint-config': 0.3.2(@typescript-eslint/utils@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) '@tanstack/publish-config': 0.2.1 '@tanstack/typedoc-config': 0.3.0(typescript@5.9.3) - '@tanstack/vite-config': 0.4.0(@types/node@24.7.0)(rollup@4.52.5)(typescript@5.9.3)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@tanstack/vite-config': 0.4.0(@types/node@24.7.0)(rollup@4.52.5)(typescript@5.9.3)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) transitivePeerDependencies: - '@types/node' - '@typescript-eslint/utils' @@ -11187,43 +11190,43 @@ snapshots: - typescript - vite - '@tanstack/directive-functions-plugin@1.133.9(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@tanstack/directive-functions-plugin@1.133.19(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: '@babel/code-frame': 7.27.1 '@babel/core': 7.28.4 '@babel/traverse': 7.28.4 '@babel/types': 7.28.4 - '@tanstack/router-utils': 1.133.3 + '@tanstack/router-utils': 1.133.19 babel-dead-code-elimination: 1.0.10 pathe: 2.0.3 tiny-invariant: 1.3.3 - vite: 6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - supports-color - '@tanstack/directive-functions-plugin@1.133.9(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@tanstack/directive-functions-plugin@1.133.19(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: '@babel/code-frame': 7.27.1 '@babel/core': 7.28.4 '@babel/traverse': 7.28.4 '@babel/types': 7.28.4 - '@tanstack/router-utils': 1.133.3 + '@tanstack/router-utils': 1.133.19 babel-dead-code-elimination: 1.0.10 pathe: 2.0.3 tiny-invariant: 1.3.3 - vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - supports-color - '@tanstack/eslint-config@0.3.2(@typescript-eslint/utils@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3)': + '@tanstack/eslint-config@0.3.2(@typescript-eslint/utils@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3)': dependencies: '@eslint/js': 9.38.0 - '@stylistic/eslint-plugin': 5.4.0(eslint@9.38.0(jiti@2.6.0)) - eslint-plugin-import-x: 4.16.1(@typescript-eslint/utils@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.0)) - eslint-plugin-n: 17.23.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + '@stylistic/eslint-plugin': 5.4.0(eslint@9.38.0(jiti@2.6.1)) + eslint-plugin-import-x: 4.16.1(@typescript-eslint/utils@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.1)) + eslint-plugin-n: 17.23.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) globals: 16.4.0 - typescript-eslint: 8.44.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) - vue-eslint-parser: 10.2.0(eslint@9.38.0(jiti@2.6.0)) + typescript-eslint: 8.44.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) + vue-eslint-parser: 10.2.0(eslint@9.38.0(jiti@2.6.1)) transitivePeerDependencies: - '@typescript-eslint/utils' - eslint @@ -11231,7 +11234,7 @@ snapshots: - supports-color - typescript - '@tanstack/history@1.133.3': {} + '@tanstack/history@1.133.28': {} '@tanstack/publish-config@0.2.1': dependencies: @@ -11251,13 +11254,13 @@ snapshots: '@tanstack/query-core': 5.83.0 react: 19.2.0 - '@tanstack/react-router-devtools@1.133.15(@tanstack/react-router@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(@tanstack/router-core@1.133.15)(@types/node@24.7.0)(csstype@3.1.3)(jiti@2.6.0)(lightningcss@1.30.1)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(sass@1.90.0)(solid-js@1.9.9)(terser@5.44.0)(tiny-invariant@1.3.3)(tsx@4.20.6)(yaml@2.8.1)': + '@tanstack/react-router-devtools@1.133.32(@tanstack/react-router@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(@tanstack/router-core@1.133.28)(@types/node@24.7.0)(csstype@3.1.3)(jiti@2.6.1)(lightningcss@1.30.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(sass@1.90.0)(solid-js@1.9.9)(terser@5.44.0)(tiny-invariant@1.3.3)(tsx@4.20.6)(yaml@2.8.1)': dependencies: - '@tanstack/react-router': 1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0) - '@tanstack/router-devtools-core': 1.133.15(@tanstack/router-core@1.133.15)(@types/node@24.7.0)(csstype@3.1.3)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(solid-js@1.9.9)(terser@5.44.0)(tiny-invariant@1.3.3)(tsx@4.20.6)(yaml@2.8.1) + '@tanstack/react-router': 1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@tanstack/router-devtools-core': 1.133.28(@tanstack/router-core@1.133.28)(@types/node@24.7.0)(csstype@3.1.3)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(solid-js@1.9.9)(terser@5.44.0)(tiny-invariant@1.3.3)(tsx@4.20.6)(yaml@2.8.1) react: 19.2.0 react-dom: 19.2.0(react@19.2.0) - vite: 7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - '@tanstack/router-core' - '@types/node' @@ -11275,60 +11278,60 @@ snapshots: - tsx - yaml - '@tanstack/react-router-with-query@1.130.17(@tanstack/react-query@5.83.0(react@19.2.0))(@tanstack/react-router@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(@tanstack/router-core@1.133.15)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': + '@tanstack/react-router-with-query@1.130.17(@tanstack/react-query@5.83.0(react@19.2.0))(@tanstack/react-router@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(@tanstack/router-core@1.133.28)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: '@tanstack/react-query': 5.83.0(react@19.2.0) - '@tanstack/react-router': 1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0) - '@tanstack/router-core': 1.133.15 + '@tanstack/react-router': 1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@tanstack/router-core': 1.133.28 react: 19.2.0 react-dom: 19.2.0(react@19.2.0) - '@tanstack/react-router@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': + '@tanstack/react-router@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@tanstack/history': 1.133.3 + '@tanstack/history': 1.133.28 '@tanstack/react-store': 0.7.5(react-dom@19.2.0(react@19.2.0))(react@19.2.0) - '@tanstack/router-core': 1.133.15 + '@tanstack/router-core': 1.133.28 isbot: 5.1.30 react: 19.2.0 react-dom: 19.2.0(react@19.2.0) tiny-invariant: 1.3.3 tiny-warning: 1.0.3 - '@tanstack/react-start-client@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': + '@tanstack/react-start-client@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@tanstack/react-router': 1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0) - '@tanstack/router-core': 1.133.15 - '@tanstack/start-client-core': 1.133.15 + '@tanstack/react-router': 1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@tanstack/router-core': 1.133.28 + '@tanstack/start-client-core': 1.133.28 react: 19.2.0 react-dom: 19.2.0(react@19.2.0) tiny-invariant: 1.3.3 tiny-warning: 1.0.3 - '@tanstack/react-start-server@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': + '@tanstack/react-start-server@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@tanstack/history': 1.133.3 - '@tanstack/react-router': 1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0) - '@tanstack/router-core': 1.133.15 - '@tanstack/start-client-core': 1.133.15 - '@tanstack/start-server-core': 1.133.15 + '@tanstack/history': 1.133.28 + '@tanstack/react-router': 1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@tanstack/router-core': 1.133.28 + '@tanstack/start-client-core': 1.133.28 + '@tanstack/start-server-core': 1.133.31 react: 19.2.0 react-dom: 19.2.0(react@19.2.0) transitivePeerDependencies: - crossws - '@tanstack/react-start@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@tanstack/react-start@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: - '@tanstack/react-router': 1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0) - '@tanstack/react-start-client': 1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0) - '@tanstack/react-start-server': 1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0) - '@tanstack/router-utils': 1.133.3 - '@tanstack/start-client-core': 1.133.15 - '@tanstack/start-plugin-core': 1.133.15(@tanstack/react-router@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) - '@tanstack/start-server-core': 1.133.15 + '@tanstack/react-router': 1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@tanstack/react-start-client': 1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@tanstack/react-start-server': 1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@tanstack/router-utils': 1.133.19 + '@tanstack/start-client-core': 1.133.28 + '@tanstack/start-plugin-core': 1.133.32(@tanstack/react-router@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@tanstack/start-server-core': 1.133.31 pathe: 2.0.3 react: 19.2.0 react-dom: 19.2.0(react@19.2.0) - vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - '@rsbuild/core' - crossws @@ -11343,9 +11346,9 @@ snapshots: react-dom: 19.2.0(react@19.2.0) use-sync-external-store: 1.6.0(react@19.2.0) - '@tanstack/router-core@1.133.15': + '@tanstack/router-core@1.133.28': dependencies: - '@tanstack/history': 1.133.3 + '@tanstack/history': 1.133.28 '@tanstack/store': 0.7.7 cookie-es: 2.0.0 seroval: 1.3.2 @@ -11353,14 +11356,14 @@ snapshots: tiny-invariant: 1.3.3 tiny-warning: 1.0.3 - '@tanstack/router-devtools-core@1.133.15(@tanstack/router-core@1.133.15)(@types/node@24.7.0)(csstype@3.1.3)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(solid-js@1.9.9)(terser@5.44.0)(tiny-invariant@1.3.3)(tsx@4.20.6)(yaml@2.8.1)': + '@tanstack/router-devtools-core@1.133.28(@tanstack/router-core@1.133.28)(@types/node@24.7.0)(csstype@3.1.3)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(solid-js@1.9.9)(terser@5.44.0)(tiny-invariant@1.3.3)(tsx@4.20.6)(yaml@2.8.1)': dependencies: - '@tanstack/router-core': 1.133.15 + '@tanstack/router-core': 1.133.28 clsx: 2.1.1 goober: 2.1.16(csstype@3.1.3) solid-js: 1.9.9 tiny-invariant: 1.3.3 - vite: 7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) optionalDependencies: csstype: 3.1.3 transitivePeerDependencies: @@ -11376,11 +11379,11 @@ snapshots: - tsx - yaml - '@tanstack/router-generator@1.133.15': + '@tanstack/router-generator@1.133.29': dependencies: - '@tanstack/router-core': 1.133.15 - '@tanstack/router-utils': 1.133.3 - '@tanstack/virtual-file-routes': 1.133.3 + '@tanstack/router-core': 1.133.28 + '@tanstack/router-utils': 1.133.19 + '@tanstack/virtual-file-routes': 1.133.19 prettier: 3.6.2 recast: 0.23.11 source-map: 0.7.6 @@ -11389,7 +11392,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@tanstack/router-plugin@1.133.15(@tanstack/react-router@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@tanstack/router-plugin@1.133.32(@tanstack/react-router@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: '@babel/core': 7.28.4 '@babel/plugin-syntax-jsx': 7.27.1(@babel/core@7.28.4) @@ -11397,22 +11400,22 @@ snapshots: '@babel/template': 7.27.2 '@babel/traverse': 7.28.4 '@babel/types': 7.28.4 - '@tanstack/router-core': 1.133.15 - '@tanstack/router-generator': 1.133.15 - '@tanstack/router-utils': 1.133.3 - '@tanstack/virtual-file-routes': 1.133.3 + '@tanstack/router-core': 1.133.28 + '@tanstack/router-generator': 1.133.29 + '@tanstack/router-utils': 1.133.19 + '@tanstack/virtual-file-routes': 1.133.19 babel-dead-code-elimination: 1.0.10 chokidar: 3.6.0 unplugin: 2.3.10 zod: 3.25.76 optionalDependencies: - '@tanstack/react-router': 1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0) - vite: 6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - vite-plugin-solid: 2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@tanstack/react-router': 1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + vite: 6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite-plugin-solid: 2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) transitivePeerDependencies: - supports-color - '@tanstack/router-plugin@1.133.15(@tanstack/react-router@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@tanstack/router-plugin@1.133.32(@tanstack/react-router@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: '@babel/core': 7.28.4 '@babel/plugin-syntax-jsx': 7.27.1(@babel/core@7.28.4) @@ -11420,22 +11423,22 @@ snapshots: '@babel/template': 7.27.2 '@babel/traverse': 7.28.4 '@babel/types': 7.28.4 - '@tanstack/router-core': 1.133.15 - '@tanstack/router-generator': 1.133.15 - '@tanstack/router-utils': 1.133.3 - '@tanstack/virtual-file-routes': 1.133.3 + '@tanstack/router-core': 1.133.28 + '@tanstack/router-generator': 1.133.29 + '@tanstack/router-utils': 1.133.19 + '@tanstack/virtual-file-routes': 1.133.19 babel-dead-code-elimination: 1.0.10 chokidar: 3.6.0 unplugin: 2.3.10 zod: 3.25.76 optionalDependencies: - '@tanstack/react-router': 1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0) - vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - vite-plugin-solid: 2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@tanstack/react-router': 1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite-plugin-solid: 2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) transitivePeerDependencies: - supports-color - '@tanstack/router-utils@1.133.3': + '@tanstack/router-utils@1.133.19': dependencies: '@babel/core': 7.28.4 '@babel/generator': 7.28.3 @@ -11448,7 +11451,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@tanstack/server-functions-plugin@1.133.11(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@tanstack/server-functions-plugin@1.133.25(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: '@babel/code-frame': 7.27.1 '@babel/core': 7.28.4 @@ -11457,14 +11460,14 @@ snapshots: '@babel/template': 7.27.2 '@babel/traverse': 7.28.4 '@babel/types': 7.28.4 - '@tanstack/directive-functions-plugin': 1.133.9(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@tanstack/directive-functions-plugin': 1.133.19(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) babel-dead-code-elimination: 1.0.10 tiny-invariant: 1.3.3 transitivePeerDependencies: - supports-color - vite - '@tanstack/server-functions-plugin@1.133.11(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@tanstack/server-functions-plugin@1.133.25(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: '@babel/code-frame': 7.27.1 '@babel/core': 7.28.4 @@ -11473,58 +11476,58 @@ snapshots: '@babel/template': 7.27.2 '@babel/traverse': 7.28.4 '@babel/types': 7.28.4 - '@tanstack/directive-functions-plugin': 1.133.9(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@tanstack/directive-functions-plugin': 1.133.19(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) babel-dead-code-elimination: 1.0.10 tiny-invariant: 1.3.3 transitivePeerDependencies: - supports-color - vite - '@tanstack/solid-router@1.133.15(solid-js@1.9.9)': + '@tanstack/solid-router@1.133.31(solid-js@1.9.9)': dependencies: '@solid-devtools/logger': 0.9.11(solid-js@1.9.9) '@solid-primitives/refs': 1.1.2(solid-js@1.9.9) '@solidjs/meta': 0.29.4(solid-js@1.9.9) - '@tanstack/history': 1.133.3 - '@tanstack/router-core': 1.133.15 + '@tanstack/history': 1.133.28 + '@tanstack/router-core': 1.133.28 '@tanstack/solid-store': 0.7.0(solid-js@1.9.9) isbot: 5.1.30 solid-js: 1.9.9 tiny-invariant: 1.3.3 tiny-warning: 1.0.3 - '@tanstack/solid-start-client@1.133.15(solid-js@1.9.9)': + '@tanstack/solid-start-client@1.133.31(solid-js@1.9.9)': dependencies: - '@tanstack/router-core': 1.133.15 - '@tanstack/solid-router': 1.133.15(solid-js@1.9.9) - '@tanstack/start-client-core': 1.133.15 + '@tanstack/router-core': 1.133.28 + '@tanstack/solid-router': 1.133.31(solid-js@1.9.9) + '@tanstack/start-client-core': 1.133.28 solid-js: 1.9.9 tiny-invariant: 1.3.3 tiny-warning: 1.0.3 - '@tanstack/solid-start-server@1.133.15(solid-js@1.9.9)': + '@tanstack/solid-start-server@1.133.31(solid-js@1.9.9)': dependencies: '@solidjs/meta': 0.29.4(solid-js@1.9.9) - '@tanstack/history': 1.133.3 - '@tanstack/router-core': 1.133.15 - '@tanstack/solid-router': 1.133.15(solid-js@1.9.9) - '@tanstack/start-client-core': 1.133.15 - '@tanstack/start-server-core': 1.133.15 + '@tanstack/history': 1.133.28 + '@tanstack/router-core': 1.133.28 + '@tanstack/solid-router': 1.133.31(solid-js@1.9.9) + '@tanstack/start-client-core': 1.133.28 + '@tanstack/start-server-core': 1.133.31 solid-js: 1.9.9 transitivePeerDependencies: - crossws - '@tanstack/solid-start@1.133.15(@tanstack/react-router@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(solid-js@1.9.9)(vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@tanstack/solid-start@1.133.32(@tanstack/react-router@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(solid-js@1.9.9)(vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: - '@tanstack/solid-router': 1.133.15(solid-js@1.9.9) - '@tanstack/solid-start-client': 1.133.15(solid-js@1.9.9) - '@tanstack/solid-start-server': 1.133.15(solid-js@1.9.9) - '@tanstack/start-client-core': 1.133.15 - '@tanstack/start-plugin-core': 1.133.15(@tanstack/react-router@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) - '@tanstack/start-server-core': 1.133.15 + '@tanstack/solid-router': 1.133.31(solid-js@1.9.9) + '@tanstack/solid-start-client': 1.133.31(solid-js@1.9.9) + '@tanstack/solid-start-server': 1.133.31(solid-js@1.9.9) + '@tanstack/start-client-core': 1.133.28 + '@tanstack/start-plugin-core': 1.133.32(@tanstack/react-router@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@tanstack/start-server-core': 1.133.31 pathe: 2.0.3 solid-js: 1.9.9 - vite: 6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - '@rsbuild/core' - '@tanstack/react-router' @@ -11538,27 +11541,27 @@ snapshots: '@tanstack/store': 0.7.0 solid-js: 1.9.9 - '@tanstack/start-client-core@1.133.15': + '@tanstack/start-client-core@1.133.28': dependencies: - '@tanstack/router-core': 1.133.15 - '@tanstack/start-storage-context': 1.133.15 + '@tanstack/router-core': 1.133.28 + '@tanstack/start-storage-context': 1.133.28 seroval: 1.3.2 tiny-invariant: 1.3.3 tiny-warning: 1.0.3 - '@tanstack/start-plugin-core@1.133.15(@tanstack/react-router@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@tanstack/start-plugin-core@1.133.32(@tanstack/react-router@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: '@babel/code-frame': 7.26.2 '@babel/core': 7.28.4 '@babel/types': 7.28.4 '@rolldown/pluginutils': 1.0.0-beta.40 - '@tanstack/router-core': 1.133.15 - '@tanstack/router-generator': 1.133.15 - '@tanstack/router-plugin': 1.133.15(@tanstack/react-router@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) - '@tanstack/router-utils': 1.133.3 - '@tanstack/server-functions-plugin': 1.133.11(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) - '@tanstack/start-client-core': 1.133.15 - '@tanstack/start-server-core': 1.133.15 + '@tanstack/router-core': 1.133.28 + '@tanstack/router-generator': 1.133.29 + '@tanstack/router-plugin': 1.133.32(@tanstack/react-router@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@tanstack/router-utils': 1.133.19 + '@tanstack/server-functions-plugin': 1.133.25(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@tanstack/start-client-core': 1.133.28 + '@tanstack/start-server-core': 1.133.31 babel-dead-code-elimination: 1.0.10 cheerio: 1.1.2 exsolve: 1.0.7 @@ -11566,8 +11569,8 @@ snapshots: srvx: 0.8.16 tinyglobby: 0.2.15 ufo: 1.6.1 - vite: 6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - vitefu: 1.1.1(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + vite: 6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vitefu: 1.1.1(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) xmlbuilder2: 3.1.1 zod: 3.25.76 transitivePeerDependencies: @@ -11578,19 +11581,19 @@ snapshots: - vite-plugin-solid - webpack - '@tanstack/start-plugin-core@1.133.15(@tanstack/react-router@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@tanstack/start-plugin-core@1.133.32(@tanstack/react-router@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: '@babel/code-frame': 7.26.2 '@babel/core': 7.28.4 '@babel/types': 7.28.4 '@rolldown/pluginutils': 1.0.0-beta.40 - '@tanstack/router-core': 1.133.15 - '@tanstack/router-generator': 1.133.15 - '@tanstack/router-plugin': 1.133.15(@tanstack/react-router@1.133.15(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) - '@tanstack/router-utils': 1.133.3 - '@tanstack/server-functions-plugin': 1.133.11(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) - '@tanstack/start-client-core': 1.133.15 - '@tanstack/start-server-core': 1.133.15 + '@tanstack/router-core': 1.133.28 + '@tanstack/router-generator': 1.133.29 + '@tanstack/router-plugin': 1.133.32(@tanstack/react-router@1.133.32(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)))(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@tanstack/router-utils': 1.133.19 + '@tanstack/server-functions-plugin': 1.133.25(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@tanstack/start-client-core': 1.133.28 + '@tanstack/start-server-core': 1.133.31 babel-dead-code-elimination: 1.0.10 cheerio: 1.1.2 exsolve: 1.0.7 @@ -11598,8 +11601,8 @@ snapshots: srvx: 0.8.16 tinyglobby: 0.2.15 ufo: 1.6.1 - vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - vitefu: 1.1.1(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vitefu: 1.1.1(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) xmlbuilder2: 3.1.1 zod: 3.25.76 transitivePeerDependencies: @@ -11610,21 +11613,21 @@ snapshots: - vite-plugin-solid - webpack - '@tanstack/start-server-core@1.133.15': + '@tanstack/start-server-core@1.133.31': dependencies: - '@tanstack/history': 1.133.3 - '@tanstack/router-core': 1.133.15 - '@tanstack/start-client-core': 1.133.15 - '@tanstack/start-storage-context': 1.133.15 + '@tanstack/history': 1.133.28 + '@tanstack/router-core': 1.133.28 + '@tanstack/start-client-core': 1.133.28 + '@tanstack/start-storage-context': 1.133.28 h3-v2: h3@2.0.0-beta.4 seroval: 1.3.2 tiny-invariant: 1.3.3 transitivePeerDependencies: - crossws - '@tanstack/start-storage-context@1.133.15': + '@tanstack/start-storage-context@1.133.28': dependencies: - '@tanstack/router-core': 1.133.15 + '@tanstack/router-core': 1.133.28 '@tanstack/store@0.7.0': {} @@ -11642,14 +11645,14 @@ snapshots: transitivePeerDependencies: - typescript - '@tanstack/virtual-file-routes@1.133.3': {} + '@tanstack/virtual-file-routes@1.133.19': {} - '@tanstack/vite-config@0.4.0(@types/node@24.7.0)(rollup@4.52.5)(typescript@5.9.3)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@tanstack/vite-config@0.4.0(@types/node@24.7.0)(rollup@4.52.5)(typescript@5.9.3)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: rollup-plugin-preserve-directives: 0.4.0(rollup@4.52.5) - vite-plugin-dts: 4.2.3(@types/node@24.7.0)(rollup@4.52.5)(typescript@5.9.3)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) - vite-plugin-externalize-deps: 0.10.0(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) - vite-tsconfig-paths: 5.1.4(typescript@5.9.3)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + vite-plugin-dts: 4.2.3(@types/node@24.7.0)(rollup@4.52.5)(typescript@5.9.3)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + vite-plugin-externalize-deps: 0.10.0(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + vite-tsconfig-paths: 5.1.4(typescript@5.9.3)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) transitivePeerDependencies: - '@types/node' - rollup @@ -11687,12 +11690,12 @@ snapshots: '@types/react': 19.2.2 '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@trpc/client@11.6.0(@trpc/server@11.6.0(typescript@5.9.3))(typescript@5.9.3)': + '@trpc/client@11.7.0(@trpc/server@11.7.0(typescript@5.9.3))(typescript@5.9.3)': dependencies: - '@trpc/server': 11.6.0(typescript@5.9.3) + '@trpc/server': 11.7.0(typescript@5.9.3) typescript: 5.9.3 - '@trpc/server@11.6.0(typescript@5.9.3)': + '@trpc/server@11.7.0(typescript@5.9.3)': dependencies: typescript: 5.9.3 @@ -11780,7 +11783,7 @@ snapshots: '@types/range-parser': 1.2.7 '@types/send': 0.17.5 - '@types/express@4.17.23': + '@types/express@4.17.24': dependencies: '@types/body-parser': 1.19.6 '@types/express-serve-static-core': 4.19.6 @@ -11866,15 +11869,15 @@ snapshots: dependencies: '@types/node': 24.7.0 - '@typescript-eslint/eslint-plugin@8.44.1(@typescript-eslint/parser@8.44.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3)': + '@typescript-eslint/eslint-plugin@8.44.1(@typescript-eslint/parser@8.44.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3)': dependencies: '@eslint-community/regexpp': 4.12.1 - '@typescript-eslint/parser': 8.44.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + '@typescript-eslint/parser': 8.44.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) '@typescript-eslint/scope-manager': 8.44.1 - '@typescript-eslint/type-utils': 8.44.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) - '@typescript-eslint/utils': 8.44.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + '@typescript-eslint/type-utils': 8.44.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/utils': 8.44.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) '@typescript-eslint/visitor-keys': 8.44.1 - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) graphemer: 1.4.0 ignore: 7.0.5 natural-compare: 1.4.0 @@ -11883,15 +11886,15 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/eslint-plugin@8.46.1(@typescript-eslint/parser@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3)': + '@typescript-eslint/eslint-plugin@8.46.2(@typescript-eslint/parser@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3)': dependencies: '@eslint-community/regexpp': 4.12.1 - '@typescript-eslint/parser': 8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) - '@typescript-eslint/scope-manager': 8.46.1 - '@typescript-eslint/type-utils': 8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) - '@typescript-eslint/utils': 8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.46.1 - eslint: 9.38.0(jiti@2.6.0) + '@typescript-eslint/parser': 8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.46.2 + '@typescript-eslint/type-utils': 8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/utils': 8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.46.2 + eslint: 9.38.0(jiti@2.6.1) graphemer: 1.4.0 ignore: 7.0.5 natural-compare: 1.4.0 @@ -11900,43 +11903,43 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.44.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3)': + '@typescript-eslint/parser@8.44.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3)': dependencies: '@typescript-eslint/scope-manager': 8.44.1 '@typescript-eslint/types': 8.44.1 '@typescript-eslint/typescript-estree': 8.44.1(typescript@5.9.3) '@typescript-eslint/visitor-keys': 8.44.1 debug: 4.4.3 - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3)': + '@typescript-eslint/parser@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3)': dependencies: - '@typescript-eslint/scope-manager': 8.46.1 - '@typescript-eslint/types': 8.46.1 - '@typescript-eslint/typescript-estree': 8.46.1(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.46.1 + '@typescript-eslint/scope-manager': 8.46.2 + '@typescript-eslint/types': 8.46.2 + '@typescript-eslint/typescript-estree': 8.46.2(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.46.2 debug: 4.4.3 - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) typescript: 5.9.3 transitivePeerDependencies: - supports-color '@typescript-eslint/project-service@8.44.1(typescript@5.9.3)': dependencies: - '@typescript-eslint/tsconfig-utils': 8.46.1(typescript@5.9.3) - '@typescript-eslint/types': 8.46.1 + '@typescript-eslint/tsconfig-utils': 8.46.2(typescript@5.9.3) + '@typescript-eslint/types': 8.46.2 debug: 4.4.3 typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/project-service@8.46.1(typescript@5.9.3)': + '@typescript-eslint/project-service@8.46.2(typescript@5.9.3)': dependencies: - '@typescript-eslint/tsconfig-utils': 8.46.1(typescript@5.9.3) - '@typescript-eslint/types': 8.46.1 + '@typescript-eslint/tsconfig-utils': 8.46.2(typescript@5.9.3) + '@typescript-eslint/types': 8.46.2 debug: 4.4.3 typescript: 5.9.3 transitivePeerDependencies: @@ -11947,38 +11950,38 @@ snapshots: '@typescript-eslint/types': 8.44.1 '@typescript-eslint/visitor-keys': 8.44.1 - '@typescript-eslint/scope-manager@8.46.1': + '@typescript-eslint/scope-manager@8.46.2': dependencies: - '@typescript-eslint/types': 8.46.1 - '@typescript-eslint/visitor-keys': 8.46.1 + '@typescript-eslint/types': 8.46.2 + '@typescript-eslint/visitor-keys': 8.46.2 '@typescript-eslint/tsconfig-utils@8.44.1(typescript@5.9.3)': dependencies: typescript: 5.9.3 - '@typescript-eslint/tsconfig-utils@8.46.1(typescript@5.9.3)': + '@typescript-eslint/tsconfig-utils@8.46.2(typescript@5.9.3)': dependencies: typescript: 5.9.3 - '@typescript-eslint/type-utils@8.44.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3)': + '@typescript-eslint/type-utils@8.44.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3)': dependencies: '@typescript-eslint/types': 8.44.1 '@typescript-eslint/typescript-estree': 8.44.1(typescript@5.9.3) - '@typescript-eslint/utils': 8.44.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + '@typescript-eslint/utils': 8.44.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) debug: 4.4.3 - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) ts-api-utils: 2.1.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/type-utils@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3)': + '@typescript-eslint/type-utils@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3)': dependencies: - '@typescript-eslint/types': 8.46.1 - '@typescript-eslint/typescript-estree': 8.46.1(typescript@5.9.3) - '@typescript-eslint/utils': 8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + '@typescript-eslint/types': 8.46.2 + '@typescript-eslint/typescript-estree': 8.46.2(typescript@5.9.3) + '@typescript-eslint/utils': 8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) debug: 4.4.3 - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) ts-api-utils: 2.1.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: @@ -11986,7 +11989,7 @@ snapshots: '@typescript-eslint/types@8.44.1': {} - '@typescript-eslint/types@8.46.1': {} + '@typescript-eslint/types@8.46.2': {} '@typescript-eslint/typescript-estree@8.44.1(typescript@5.9.3)': dependencies: @@ -12004,12 +12007,12 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/typescript-estree@8.46.1(typescript@5.9.3)': + '@typescript-eslint/typescript-estree@8.46.2(typescript@5.9.3)': dependencies: - '@typescript-eslint/project-service': 8.46.1(typescript@5.9.3) - '@typescript-eslint/tsconfig-utils': 8.46.1(typescript@5.9.3) - '@typescript-eslint/types': 8.46.1 - '@typescript-eslint/visitor-keys': 8.46.1 + '@typescript-eslint/project-service': 8.46.2(typescript@5.9.3) + '@typescript-eslint/tsconfig-utils': 8.46.2(typescript@5.9.3) + '@typescript-eslint/types': 8.46.2 + '@typescript-eslint/visitor-keys': 8.46.2 debug: 4.4.3 fast-glob: 3.3.3 is-glob: 4.0.3 @@ -12020,24 +12023,24 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.44.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3)': + '@typescript-eslint/utils@8.44.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3)': dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.38.0(jiti@2.6.0)) + '@eslint-community/eslint-utils': 4.9.0(eslint@9.38.0(jiti@2.6.1)) '@typescript-eslint/scope-manager': 8.44.1 '@typescript-eslint/types': 8.44.1 '@typescript-eslint/typescript-estree': 8.44.1(typescript@5.9.3) - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3)': + '@typescript-eslint/utils@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3)': dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.38.0(jiti@2.6.0)) - '@typescript-eslint/scope-manager': 8.46.1 - '@typescript-eslint/types': 8.46.1 - '@typescript-eslint/typescript-estree': 8.46.1(typescript@5.9.3) - eslint: 9.38.0(jiti@2.6.0) + '@eslint-community/eslint-utils': 4.9.0(eslint@9.38.0(jiti@2.6.1)) + '@typescript-eslint/scope-manager': 8.46.2 + '@typescript-eslint/types': 8.46.2 + '@typescript-eslint/typescript-estree': 8.46.2(typescript@5.9.3) + eslint: 9.38.0(jiti@2.6.1) typescript: 5.9.3 transitivePeerDependencies: - supports-color @@ -12047,9 +12050,9 @@ snapshots: '@typescript-eslint/types': 8.44.1 eslint-visitor-keys: 4.2.1 - '@typescript-eslint/visitor-keys@8.46.1': + '@typescript-eslint/visitor-keys@8.46.2': dependencies: - '@typescript-eslint/types': 8.46.1 + '@typescript-eslint/types': 8.46.2 eslint-visitor-keys: 4.2.1 '@ungap/raw-json@0.4.4': {} @@ -12113,41 +12116,41 @@ snapshots: '@unrs/resolver-binding-win32-x64-msvc@1.11.1': optional: true - '@vitejs/plugin-basic-ssl@2.1.0(vite@7.1.5(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@vitejs/plugin-basic-ssl@2.1.0(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: - vite: 7.1.5(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 7.1.11(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - '@vitejs/plugin-react@5.0.4(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@vitejs/plugin-react@5.1.0(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: '@babel/core': 7.28.4 '@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.28.4) '@babel/plugin-transform-react-jsx-source': 7.27.1(@babel/core@7.28.4) - '@rolldown/pluginutils': 1.0.0-beta.38 + '@rolldown/pluginutils': 1.0.0-beta.43 '@types/babel__core': 7.20.5 - react-refresh: 0.17.0 - vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + react-refresh: 0.18.0 + vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - supports-color - '@vitejs/plugin-react@5.0.4(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@vitejs/plugin-react@5.1.0(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: '@babel/core': 7.28.4 '@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.28.4) '@babel/plugin-transform-react-jsx-source': 7.27.1(@babel/core@7.28.4) - '@rolldown/pluginutils': 1.0.0-beta.38 + '@rolldown/pluginutils': 1.0.0-beta.43 '@types/babel__core': 7.20.5 - react-refresh: 0.17.0 - vite: 7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + react-refresh: 0.18.0 + vite: 7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - supports-color - '@vitejs/plugin-vue@6.0.1(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))(vue@3.5.22(typescript@5.9.3))': + '@vitejs/plugin-vue@6.0.1(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))(vue@3.5.22(typescript@5.9.3))': dependencies: '@rolldown/pluginutils': 1.0.0-beta.29 - vite: 7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) vue: 3.5.22(typescript@5.9.3) - '@vitest/coverage-istanbul@3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@vitest/coverage-istanbul@3.2.4(vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: '@istanbuljs/schema': 0.1.3 debug: 4.4.3 @@ -12159,7 +12162,7 @@ snapshots: magicast: 0.3.5 test-exclude: 7.0.1 tinyrainbow: 2.0.0 - vitest: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vitest: 3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - supports-color @@ -12171,13 +12174,13 @@ snapshots: chai: 5.3.3 tinyrainbow: 2.0.0 - '@vitest/mocker@3.2.4(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': + '@vitest/mocker@3.2.4(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1))': dependencies: '@vitest/spy': 3.2.4 estree-walker: 3.0.3 magic-string: 0.30.19 optionalDependencies: - vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) '@vitest/pretty-format@3.2.4': dependencies: @@ -13108,7 +13111,7 @@ snapshots: transitivePeerDependencies: - supports-color - drizzle-orm@0.44.6(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7): + drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7): optionalDependencies: '@types/pg': 8.15.5 gel: 2.1.1 @@ -13116,9 +13119,14 @@ snapshots: pg: 8.16.3 postgres: 3.4.7 - drizzle-zod@0.8.3(drizzle-orm@0.44.6(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11): + drizzle-zod@0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@3.25.76): dependencies: - drizzle-orm: 0.44.6(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) + drizzle-orm: 0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) + zod: 3.25.76 + + drizzle-zod@0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11): + dependencies: + drizzle-orm: 0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) zod: 4.1.11 dunder-proto@1.0.1: @@ -13406,14 +13414,14 @@ snapshots: escape-string-regexp@4.0.0: {} - eslint-compat-utils@0.5.1(eslint@9.38.0(jiti@2.6.0)): + eslint-compat-utils@0.5.1(eslint@9.38.0(jiti@2.6.1)): dependencies: - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) semver: 7.7.3 - eslint-config-prettier@10.1.8(eslint@9.38.0(jiti@2.6.0)): + eslint-config-prettier@10.1.8(eslint@9.38.0(jiti@2.6.1)): dependencies: - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) eslint-import-context@0.1.9(unrs-resolver@1.11.1): dependencies: @@ -13422,10 +13430,10 @@ snapshots: optionalDependencies: unrs-resolver: 1.11.1 - eslint-import-resolver-typescript@4.4.4(eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.0)))(eslint@9.38.0(jiti@2.6.0)): + eslint-import-resolver-typescript@4.4.4(eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.1)))(eslint@9.38.0(jiti@2.6.1)): dependencies: debug: 4.4.3 - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) eslint-import-context: 0.1.9(unrs-resolver@1.11.1) get-tsconfig: 4.10.1 is-bun-module: 2.0.0 @@ -13433,23 +13441,23 @@ snapshots: tinyglobby: 0.2.15 unrs-resolver: 1.11.1 optionalDependencies: - eslint-plugin-import-x: 4.16.1(@typescript-eslint/utils@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.0)) + eslint-plugin-import-x: 4.16.1(@typescript-eslint/utils@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.1)) transitivePeerDependencies: - supports-color - eslint-plugin-es-x@7.8.0(eslint@9.38.0(jiti@2.6.0)): + eslint-plugin-es-x@7.8.0(eslint@9.38.0(jiti@2.6.1)): dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.38.0(jiti@2.6.0)) + '@eslint-community/eslint-utils': 4.9.0(eslint@9.38.0(jiti@2.6.1)) '@eslint-community/regexpp': 4.12.1 - eslint: 9.38.0(jiti@2.6.0) - eslint-compat-utils: 0.5.1(eslint@9.38.0(jiti@2.6.0)) + eslint: 9.38.0(jiti@2.6.1) + eslint-compat-utils: 0.5.1(eslint@9.38.0(jiti@2.6.1)) - eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.0)): + eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.1)): dependencies: - '@typescript-eslint/types': 8.46.1 + '@typescript-eslint/types': 8.46.2 comment-parser: 1.4.1 debug: 4.4.3 - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) eslint-import-context: 0.1.9(unrs-resolver@1.11.1) is-glob: 4.0.3 minimatch: 10.0.3 @@ -13457,16 +13465,16 @@ snapshots: stable-hash-x: 0.2.0 unrs-resolver: 1.11.1 optionalDependencies: - '@typescript-eslint/utils': 8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + '@typescript-eslint/utils': 8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) transitivePeerDependencies: - supports-color - eslint-plugin-n@17.23.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3): + eslint-plugin-n@17.23.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3): dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.38.0(jiti@2.6.0)) + '@eslint-community/eslint-utils': 4.9.0(eslint@9.38.0(jiti@2.6.1)) enhanced-resolve: 5.18.3 - eslint: 9.38.0(jiti@2.6.0) - eslint-plugin-es-x: 7.8.0(eslint@9.38.0(jiti@2.6.0)) + eslint: 9.38.0(jiti@2.6.1) + eslint-plugin-es-x: 7.8.0(eslint@9.38.0(jiti@2.6.1)) get-tsconfig: 4.10.1 globals: 15.15.0 globrex: 0.1.2 @@ -13476,24 +13484,24 @@ snapshots: transitivePeerDependencies: - typescript - eslint-plugin-prettier@5.5.4(eslint-config-prettier@10.1.8(eslint@9.38.0(jiti@2.6.0)))(eslint@9.38.0(jiti@2.6.0))(prettier@3.6.2): + eslint-plugin-prettier@5.5.4(eslint-config-prettier@10.1.8(eslint@9.38.0(jiti@2.6.1)))(eslint@9.38.0(jiti@2.6.1))(prettier@3.6.2): dependencies: - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) prettier: 3.6.2 prettier-linter-helpers: 1.0.0 synckit: 0.11.11 optionalDependencies: - eslint-config-prettier: 10.1.8(eslint@9.38.0(jiti@2.6.0)) + eslint-config-prettier: 10.1.8(eslint@9.38.0(jiti@2.6.1)) - eslint-plugin-react-hooks@5.2.0(eslint@9.38.0(jiti@2.6.0)): + eslint-plugin-react-hooks@5.2.0(eslint@9.38.0(jiti@2.6.1)): dependencies: - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) - eslint-plugin-react-refresh@0.4.24(eslint@9.38.0(jiti@2.6.0)): + eslint-plugin-react-refresh@0.4.24(eslint@9.38.0(jiti@2.6.1)): dependencies: - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) - eslint-plugin-react@7.37.5(eslint@9.38.0(jiti@2.6.0)): + eslint-plugin-react@7.37.5(eslint@9.38.0(jiti@2.6.1)): dependencies: array-includes: 3.1.9 array.prototype.findlast: 1.2.5 @@ -13501,7 +13509,7 @@ snapshots: array.prototype.tosorted: 1.1.4 doctrine: 2.1.0 es-iterator-helpers: 1.2.1 - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) estraverse: 5.3.0 hasown: 2.0.2 jsx-ast-utils: 3.3.5 @@ -13515,10 +13523,10 @@ snapshots: string.prototype.matchall: 4.0.12 string.prototype.repeat: 1.0.0 - eslint-plugin-solid@0.14.5(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3): + eslint-plugin-solid@0.14.5(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3): dependencies: - '@typescript-eslint/utils': 8.46.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) - eslint: 9.38.0(jiti@2.6.0) + '@typescript-eslint/utils': 8.46.2(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) + eslint: 9.38.0(jiti@2.6.1) estraverse: 5.3.0 is-html: 2.0.0 kebab-case: 1.0.2 @@ -13537,9 +13545,9 @@ snapshots: eslint-visitor-keys@4.2.1: {} - eslint@9.38.0(jiti@2.6.0): + eslint@9.38.0(jiti@2.6.1): dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.38.0(jiti@2.6.0)) + '@eslint-community/eslint-utils': 4.9.0(eslint@9.38.0(jiti@2.6.1)) '@eslint-community/regexpp': 4.12.1 '@eslint/config-array': 0.21.1 '@eslint/config-helpers': 0.4.1 @@ -13574,7 +13582,7 @@ snapshots: natural-compare: 1.4.0 optionator: 0.9.4 optionalDependencies: - jiti: 2.6.0 + jiti: 2.6.1 transitivePeerDependencies: - supports-color @@ -14478,7 +14486,7 @@ snapshots: jiti@1.21.7: {} - jiti@2.6.0: {} + jiti@2.6.1: {} jju@1.4.0: {} @@ -14634,13 +14642,13 @@ snapshots: kleur@4.1.5: {} - knip@5.66.1(@types/node@24.7.0)(typescript@5.9.3): + knip@5.66.3(@types/node@24.7.0)(typescript@5.9.3): dependencies: '@nodelib/fs.walk': 1.2.8 '@types/node': 24.7.0 fast-glob: 3.3.3 formatly: 0.3.0 - jiti: 2.6.0 + jiti: 2.6.1 js-yaml: 4.1.0 minimist: 1.2.8 oxc-resolver: 11.8.4 @@ -14662,50 +14670,54 @@ snapshots: prelude-ls: 1.2.1 type-check: 0.4.0 - lightningcss-darwin-arm64@1.30.1: + lightningcss-android-arm64@1.30.2: + optional: true + + lightningcss-darwin-arm64@1.30.2: optional: true - lightningcss-darwin-x64@1.30.1: + lightningcss-darwin-x64@1.30.2: optional: true - lightningcss-freebsd-x64@1.30.1: + lightningcss-freebsd-x64@1.30.2: optional: true - lightningcss-linux-arm-gnueabihf@1.30.1: + lightningcss-linux-arm-gnueabihf@1.30.2: optional: true - lightningcss-linux-arm64-gnu@1.30.1: + lightningcss-linux-arm64-gnu@1.30.2: optional: true - lightningcss-linux-arm64-musl@1.30.1: + lightningcss-linux-arm64-musl@1.30.2: optional: true - lightningcss-linux-x64-gnu@1.30.1: + lightningcss-linux-x64-gnu@1.30.2: optional: true - lightningcss-linux-x64-musl@1.30.1: + lightningcss-linux-x64-musl@1.30.2: optional: true - lightningcss-win32-arm64-msvc@1.30.1: + lightningcss-win32-arm64-msvc@1.30.2: optional: true - lightningcss-win32-x64-msvc@1.30.1: + lightningcss-win32-x64-msvc@1.30.2: optional: true - lightningcss@1.30.1: + lightningcss@1.30.2: dependencies: detect-libc: 2.0.4 optionalDependencies: - lightningcss-darwin-arm64: 1.30.1 - lightningcss-darwin-x64: 1.30.1 - lightningcss-freebsd-x64: 1.30.1 - lightningcss-linux-arm-gnueabihf: 1.30.1 - lightningcss-linux-arm64-gnu: 1.30.1 - lightningcss-linux-arm64-musl: 1.30.1 - lightningcss-linux-x64-gnu: 1.30.1 - lightningcss-linux-x64-musl: 1.30.1 - lightningcss-win32-arm64-msvc: 1.30.1 - lightningcss-win32-x64-msvc: 1.30.1 + lightningcss-android-arm64: 1.30.2 + lightningcss-darwin-arm64: 1.30.2 + lightningcss-darwin-x64: 1.30.2 + lightningcss-freebsd-x64: 1.30.2 + lightningcss-linux-arm-gnueabihf: 1.30.2 + lightningcss-linux-arm64-gnu: 1.30.2 + lightningcss-linux-arm64-musl: 1.30.2 + lightningcss-linux-x64-gnu: 1.30.2 + lightningcss-linux-x64-musl: 1.30.2 + lightningcss-win32-arm64-msvc: 1.30.2 + lightningcss-win32-x64-msvc: 1.30.2 lilconfig@3.1.3: {} @@ -15029,7 +15041,7 @@ snapshots: '@types/whatwg-url': 11.0.5 whatwg-url: 14.2.0 - mongodb@6.18.0(socks@2.8.7): + mongodb@6.20.0(socks@2.8.7): dependencies: '@mongodb-js/saslprep': 1.3.0 bson: 6.10.4 @@ -15614,7 +15626,7 @@ snapshots: forwarded: 0.2.0 ipaddr.js: 1.9.1 - publint@0.3.14: + publint@0.3.15: dependencies: '@publint/pack': 0.1.2 package-manager-detector: 1.3.0 @@ -15681,7 +15693,7 @@ snapshots: react-is@17.0.2: {} - react-refresh@0.17.0: {} + react-refresh@0.18.0: {} react@19.2.0: {} @@ -15872,7 +15884,7 @@ snapshots: dependencies: queue-microtask: 1.2.3 - rxdb@16.19.1(rxjs@7.8.2)(socks@2.8.7): + rxdb@16.20.0(rxjs@7.8.2)(socks@2.8.7): dependencies: '@babel/runtime': 7.28.4 '@types/clone': 2.1.4 @@ -15898,7 +15910,7 @@ snapshots: js-base64: 3.7.8 jsonschema-key-compression: 1.7.0 mingo: 6.5.6 - mongodb: 6.18.0(socks@2.8.7) + mongodb: 6.20.0(socks@2.8.7) nats: 2.29.3 oblivious-set: 1.4.0 reconnecting-websocket: 4.4.0 @@ -16082,32 +16094,32 @@ snapshots: interpret: 1.4.0 rechoir: 0.6.2 - sherif-darwin-arm64@1.6.1: + sherif-darwin-arm64@1.7.0: optional: true - sherif-darwin-x64@1.6.1: + sherif-darwin-x64@1.7.0: optional: true - sherif-linux-arm64@1.6.1: + sherif-linux-arm64@1.7.0: optional: true - sherif-linux-x64@1.6.1: + sherif-linux-x64@1.7.0: optional: true - sherif-windows-arm64@1.6.1: + sherif-windows-arm64@1.7.0: optional: true - sherif-windows-x64@1.6.1: + sherif-windows-x64@1.7.0: optional: true - sherif@1.6.1: + sherif@1.7.0: optionalDependencies: - sherif-darwin-arm64: 1.6.1 - sherif-darwin-x64: 1.6.1 - sherif-linux-arm64: 1.6.1 - sherif-linux-x64: 1.6.1 - sherif-windows-arm64: 1.6.1 - sherif-windows-x64: 1.6.1 + sherif-darwin-arm64: 1.7.0 + sherif-darwin-x64: 1.7.0 + sherif-linux-arm64: 1.7.0 + sherif-linux-x64: 1.7.0 + sherif-windows-arm64: 1.7.0 + sherif-windows-x64: 1.7.0 shx@0.4.0: dependencies: @@ -16444,26 +16456,26 @@ snapshots: supports-preserve-symlinks-flag@1.0.0: {} - svelte-check@4.3.3(picomatch@4.0.3)(svelte@5.41.0)(typescript@5.9.3): + svelte-check@4.3.3(picomatch@4.0.3)(svelte@5.42.2)(typescript@5.9.3): dependencies: '@jridgewell/trace-mapping': 0.3.31 chokidar: 4.0.3 fdir: 6.5.0(picomatch@4.0.3) picocolors: 1.1.1 sade: 1.8.1 - svelte: 5.41.0 + svelte: 5.42.2 typescript: 5.9.3 transitivePeerDependencies: - picomatch - svelte2tsx@0.7.42(svelte@5.41.0)(typescript@5.9.3): + svelte2tsx@0.7.42(svelte@5.42.2)(typescript@5.9.3): dependencies: dedent-js: 1.0.1 pascal-case: 3.1.2 - svelte: 5.41.0 + svelte: 5.42.2 typescript: 5.9.3 - svelte@5.41.0: + svelte@5.42.2: dependencies: '@jridgewell/remapping': 2.3.5 '@jridgewell/sourcemap-codec': 1.5.5 @@ -16513,7 +16525,7 @@ snapshots: transitivePeerDependencies: - ts-node - tailwindcss@4.1.14: {} + tailwindcss@4.1.16: {} tapable@2.2.3: {} @@ -16730,13 +16742,13 @@ snapshots: typescript: 5.9.3 yaml: 2.8.1 - typescript-eslint@8.44.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3): + typescript-eslint@8.44.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3): dependencies: - '@typescript-eslint/eslint-plugin': 8.44.1(@typescript-eslint/parser@8.44.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) - '@typescript-eslint/parser': 8.44.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) + '@typescript-eslint/eslint-plugin': 8.44.1(@typescript-eslint/parser@8.44.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/parser': 8.44.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) '@typescript-eslint/typescript-estree': 8.44.1(typescript@5.9.3) - '@typescript-eslint/utils': 8.44.1(eslint@9.38.0(jiti@2.6.0))(typescript@5.9.3) - eslint: 9.38.0(jiti@2.6.0) + '@typescript-eslint/utils': 8.44.1(eslint@9.38.0(jiti@2.6.1))(typescript@5.9.3) + eslint: 9.38.0(jiti@2.6.1) typescript: 5.9.3 transitivePeerDependencies: - supports-color @@ -16856,13 +16868,13 @@ snapshots: vary@1.1.2: {} - vite-node@3.2.4(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1): + vite-node@3.2.4(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1): dependencies: cac: 6.7.14 debug: 4.4.3 es-module-lexer: 1.7.0 pathe: 2.0.3 - vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - '@types/node' - jiti @@ -16877,7 +16889,7 @@ snapshots: - tsx - yaml - vite-plugin-dts@4.2.3(@types/node@24.7.0)(rollup@4.52.5)(typescript@5.9.3)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): + vite-plugin-dts@4.2.3(@types/node@24.7.0)(rollup@4.52.5)(typescript@5.9.3)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): dependencies: '@microsoft/api-extractor': 7.47.7(@types/node@24.7.0) '@rollup/pluginutils': 5.3.0(rollup@4.52.5) @@ -16890,17 +16902,17 @@ snapshots: magic-string: 0.30.19 typescript: 5.9.3 optionalDependencies: - vite: 7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - '@types/node' - rollup - supports-color - vite-plugin-externalize-deps@0.10.0(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): + vite-plugin-externalize-deps@0.10.0(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): dependencies: - vite: 7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): + vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): dependencies: '@babel/core': 7.28.4 '@types/babel__core': 7.20.5 @@ -16908,14 +16920,14 @@ snapshots: merge-anything: 5.1.7 solid-js: 1.9.9 solid-refresh: 0.6.3(solid-js@1.9.9) - vite: 6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - vitefu: 1.1.1(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + vite: 6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vitefu: 1.1.1(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) optionalDependencies: '@testing-library/jest-dom': 6.9.1 transitivePeerDependencies: - supports-color - vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): + vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): dependencies: '@babel/core': 7.28.4 '@types/babel__core': 7.20.5 @@ -16923,15 +16935,15 @@ snapshots: merge-anything: 5.1.7 solid-js: 1.9.9 solid-refresh: 0.6.3(solid-js@1.9.9) - vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - vitefu: 1.1.1(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vitefu: 1.1.1(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) optionalDependencies: '@testing-library/jest-dom': 6.9.1 transitivePeerDependencies: - supports-color optional: true - vite-plugin-solid@2.11.9(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): + vite-plugin-solid@2.11.10(@testing-library/jest-dom@6.9.1)(solid-js@1.9.9)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): dependencies: '@babel/core': 7.28.4 '@types/babel__core': 7.20.5 @@ -16939,47 +16951,47 @@ snapshots: merge-anything: 5.1.7 solid-js: 1.9.9 solid-refresh: 0.6.3(solid-js@1.9.9) - vite: 7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - vitefu: 1.1.1(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + vite: 7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vitefu: 1.1.1(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) optionalDependencies: '@testing-library/jest-dom': 6.9.1 transitivePeerDependencies: - supports-color - vite-tsconfig-paths@5.1.4(typescript@5.9.3)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): + vite-tsconfig-paths@5.1.4(typescript@5.9.3)(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): dependencies: debug: 4.4.3 globrex: 0.1.2 tsconfck: 3.1.6(typescript@5.9.3) optionalDependencies: - vite: 6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - supports-color - typescript - vite-tsconfig-paths@5.1.4(typescript@5.9.3)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): + vite-tsconfig-paths@5.1.4(typescript@5.9.3)(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): dependencies: debug: 4.4.3 globrex: 0.1.2 tsconfck: 3.1.6(typescript@5.9.3) optionalDependencies: - vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - supports-color - typescript - vite-tsconfig-paths@5.1.4(typescript@5.9.3)(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): + vite-tsconfig-paths@5.1.4(typescript@5.9.3)(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): dependencies: debug: 4.4.3 globrex: 0.1.2 tsconfck: 3.1.6(typescript@5.9.3) optionalDependencies: - vite: 7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - supports-color - typescript - vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1): + vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1): dependencies: esbuild: 0.25.11 fdir: 6.5.0(picomatch@4.0.3) @@ -16990,14 +17002,14 @@ snapshots: optionalDependencies: '@types/node': 22.18.1 fsevents: 2.3.3 - jiti: 2.6.0 - lightningcss: 1.30.1 + jiti: 2.6.1 + lightningcss: 1.30.2 sass: 1.90.0 terser: 5.44.0 tsx: 4.20.6 yaml: 2.8.1 - vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1): + vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1): dependencies: esbuild: 0.25.11 fdir: 6.5.0(picomatch@4.0.3) @@ -17008,14 +17020,14 @@ snapshots: optionalDependencies: '@types/node': 24.7.0 fsevents: 2.3.3 - jiti: 2.6.0 - lightningcss: 1.30.1 + jiti: 2.6.1 + lightningcss: 1.30.2 sass: 1.90.0 terser: 5.44.0 tsx: 4.20.6 yaml: 2.8.1 - vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1): + vite@7.1.11(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1): dependencies: esbuild: 0.25.11 fdir: 6.5.0(picomatch@4.0.3) @@ -17026,14 +17038,14 @@ snapshots: optionalDependencies: '@types/node': 24.7.0 fsevents: 2.3.3 - jiti: 2.6.0 - lightningcss: 1.30.1 + jiti: 2.6.1 + lightningcss: 1.30.2 sass: 1.90.0 terser: 5.44.0 tsx: 4.20.6 yaml: 2.8.1 - vite@7.1.5(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1): + vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1): dependencies: esbuild: 0.25.11 fdir: 6.5.0(picomatch@4.0.3) @@ -17044,30 +17056,30 @@ snapshots: optionalDependencies: '@types/node': 24.7.0 fsevents: 2.3.3 - jiti: 2.6.0 - lightningcss: 1.30.1 + jiti: 2.6.1 + lightningcss: 1.30.2 sass: 1.90.0 terser: 5.44.0 tsx: 4.20.6 yaml: 2.8.1 - vitefu@1.1.1(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): + vitefu@1.1.1(vite@6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): optionalDependencies: - vite: 6.4.1(@types/node@22.18.1)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 6.4.1(@types/node@22.18.1)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - vitefu@1.1.1(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): + vitefu@1.1.1(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): optionalDependencies: - vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - vitefu@1.1.1(vite@7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): + vitefu@1.1.1(vite@7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)): optionalDependencies: - vite: 7.1.11(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 7.1.12(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.0)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1): + vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.7.0)(jiti@2.6.1)(jsdom@27.0.1(postcss@8.5.6))(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1): dependencies: '@types/chai': 5.2.2 '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) + '@vitest/mocker': 3.2.4(vite@6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1)) '@vitest/pretty-format': 3.2.4 '@vitest/runner': 3.2.4 '@vitest/snapshot': 3.2.4 @@ -17085,8 +17097,8 @@ snapshots: tinyglobby: 0.2.15 tinypool: 1.1.1 tinyrainbow: 2.0.0 - vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) - vite-node: 3.2.4(@types/node@24.7.0)(jiti@2.6.0)(lightningcss@1.30.1)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite: 6.4.1(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) + vite-node: 3.2.4(@types/node@24.7.0)(jiti@2.6.1)(lightningcss@1.30.2)(sass@1.90.0)(terser@5.44.0)(tsx@4.20.6)(yaml@2.8.1) why-is-node-running: 2.3.0 optionalDependencies: '@types/debug': 4.1.12 @@ -17110,10 +17122,10 @@ snapshots: vscode-uri@3.1.0: {} - vue-eslint-parser@10.2.0(eslint@9.38.0(jiti@2.6.0)): + vue-eslint-parser@10.2.0(eslint@9.38.0(jiti@2.6.1)): dependencies: debug: 4.4.3 - eslint: 9.38.0(jiti@2.6.0) + eslint: 9.38.0(jiti@2.6.1) eslint-scope: 8.4.0 eslint-visitor-keys: 4.2.1 espree: 10.4.0 From c2a5c28919733ee33cbd57903d3ed81656a0d842 Mon Sep 17 00:00:00 2001 From: Lucas Weng <30640930+lucasweng@users.noreply.github.com> Date: Mon, 27 Oct 2025 23:30:47 +0800 Subject: [PATCH 38/56] feat: add exact refetch targeting and improve utils.refetch() behavior (#552) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: implement exact targeting for refetching queries to prevent unintended cascading effects * feat: add refetchType option for more granular refetching control * chore: add changeset * refactor: make utils.refetch() bypass enabled: false and remove refetchType Changes: - Use queryObserver.refetch() for all refetch calls (both utils and internal handlers) - Bypasses enabled: false to support manual fetch patterns (matches TanStack Query hook behavior) - Fixes clearError() to work even when enabled: false - Return QueryObserverResult instead of void for better DX - Remove refetchType option - not needed with exact targeting via observer - Add tests for clearError() exact targeting and throwOnError behavior - Update docs to clarify refetch semantics With exact targeting via queryObserver, refetchType filtering doesn't add value. Users always want their collection data refetched, whether from utils.refetch() or internal mutation handlers. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * fix: clearError should return Promise not QueryObserverResult * fix: type error in query.test --------- Co-authored-by: Kyle Mathews Co-authored-by: Claude --- .changeset/soft-doodles-cover.md | 7 + docs/collections/query-collection.md | 131 ++++---- packages/query-db-collection/src/query.ts | 50 ++- .../query-db-collection/tests/query.test.ts | 301 +++++++++++++++++- 4 files changed, 404 insertions(+), 85 deletions(-) create mode 100644 .changeset/soft-doodles-cover.md diff --git a/.changeset/soft-doodles-cover.md b/.changeset/soft-doodles-cover.md new file mode 100644 index 000000000..102b18448 --- /dev/null +++ b/.changeset/soft-doodles-cover.md @@ -0,0 +1,7 @@ +--- +"@tanstack/query-db-collection": patch +--- + +**Behavior change**: `utils.refetch()` now uses exact query key targeting (previously used prefix matching). This prevents unintended cascading refetches of related queries. For example, refetching `['todos', 'project-1']` will no longer trigger refetches of `['todos']` or `['todos', 'project-2']`. + +Additionally, `utils.refetch()` now bypasses `enabled: false` to support manual/imperative refetch patterns (matching TanStack Query hook behavior) and returns `QueryObserverResult` instead of `void` for better DX. diff --git a/docs/collections/query-collection.md b/docs/collections/query-collection.md index cd4e7eedc..91a0f7dea 100644 --- a/docs/collections/query-collection.md +++ b/docs/collections/query-collection.md @@ -9,6 +9,7 @@ Query collections provide seamless integration between TanStack DB and TanStack ## Overview The `@tanstack/query-db-collection` package allows you to create collections that: + - Automatically sync with remote data via TanStack Query - Support optimistic updates with automatic rollback on errors - Handle persistence through customizable mutation handlers @@ -23,17 +24,17 @@ npm install @tanstack/query-db-collection @tanstack/query-core @tanstack/db ## Basic Usage ```typescript -import { QueryClient } from '@tanstack/query-core' -import { createCollection } from '@tanstack/db' -import { queryCollectionOptions } from '@tanstack/query-db-collection' +import { QueryClient } from "@tanstack/query-core" +import { createCollection } from "@tanstack/db" +import { queryCollectionOptions } from "@tanstack/query-db-collection" const queryClient = new QueryClient() const todosCollection = createCollection( queryCollectionOptions({ - queryKey: ['todos'], + queryKey: ["todos"], queryFn: async () => { - const response = await fetch('/api/todos') + const response = await fetch("/api/todos") return response.json() }, queryClient, @@ -55,7 +56,7 @@ The `queryCollectionOptions` function accepts the following options: ### Query Options -- `select`: Function that lets extract array items when they’re wrapped with metadata +- `select`: Function that lets extract array items when they're wrapped with metadata - `enabled`: Whether the query should automatically run (default: `true`) - `refetchInterval`: Refetch interval in milliseconds - `retry`: Retry configuration for failed queries @@ -83,30 +84,30 @@ You can define handlers that are called when mutations occur. These handlers can ```typescript const todosCollection = createCollection( queryCollectionOptions({ - queryKey: ['todos'], + queryKey: ["todos"], queryFn: fetchTodos, queryClient, getKey: (item) => item.id, - + onInsert: async ({ transaction }) => { - const newItems = transaction.mutations.map(m => m.modified) + const newItems = transaction.mutations.map((m) => m.modified) await api.createTodos(newItems) // Returning nothing or { refetch: true } will trigger a refetch // Return { refetch: false } to skip automatic refetch }, - + onUpdate: async ({ transaction }) => { - const updates = transaction.mutations.map(m => ({ + const updates = transaction.mutations.map((m) => ({ id: m.key, - changes: m.changes + changes: m.changes, })) await api.updateTodos(updates) }, - + onDelete: async ({ transaction }) => { - const ids = transaction.mutations.map(m => m.key) + const ids = transaction.mutations.map((m) => m.key) await api.deleteTodos(ids) - } + }, }) ) ``` @@ -119,14 +120,15 @@ You can control this behavior by returning an object with a `refetch` property: ```typescript onInsert: async ({ transaction }) => { - await api.createTodos(transaction.mutations.map(m => m.modified)) - + await api.createTodos(transaction.mutations.map((m) => m.modified)) + // Skip the automatic refetch return { refetch: false } } ``` This is useful when: + - You're confident the server state matches what you sent - You want to avoid unnecessary network requests - You're handling state updates through other mechanisms (like WebSockets) @@ -135,7 +137,10 @@ This is useful when: The collection provides these utility methods via `collection.utils`: -- `refetch()`: Manually trigger a refetch of the query +- `refetch(opts?)`: Manually trigger a refetch of the query + - `opts.throwOnError`: Whether to throw an error if the refetch fails (default: `false`) + - Bypasses `enabled: false` to support imperative/manual refetching patterns (similar to hook `refetch()` behavior) + - Returns `QueryObserverResult` for inspecting the result ## Direct Writes @@ -144,10 +149,12 @@ Direct writes are intended for scenarios where the normal query/mutation flow do ### Understanding the Data Stores Query Collections maintain two data stores: + 1. **Synced Data Store** - The authoritative state synchronized with the server via `queryFn` 2. **Optimistic Mutations Store** - Temporary changes that are applied optimistically before server confirmation Normal collection operations (insert, update, delete) create optimistic mutations that are: + - Applied immediately to the UI - Sent to the server via persistence handlers - Rolled back automatically if the server request fails @@ -158,6 +165,7 @@ Direct writes bypass this system entirely and write directly to the synced data ### When to Use Direct Writes Direct writes should be used when: + - You need to sync real-time updates from WebSockets or server-sent events - You're dealing with large datasets where refetching everything is too expensive - You receive incremental updates or server-computed field updates @@ -167,19 +175,28 @@ Direct writes should be used when: ```typescript // Insert a new item directly to the synced data store -todosCollection.utils.writeInsert({ id: '1', text: 'Buy milk', completed: false }) +todosCollection.utils.writeInsert({ + id: "1", + text: "Buy milk", + completed: false, +}) // Update an existing item in the synced data store -todosCollection.utils.writeUpdate({ id: '1', completed: true }) +todosCollection.utils.writeUpdate({ id: "1", completed: true }) // Delete an item from the synced data store -todosCollection.utils.writeDelete('1') +todosCollection.utils.writeDelete("1") // Upsert (insert or update) in the synced data store -todosCollection.utils.writeUpsert({ id: '1', text: 'Buy milk', completed: false }) +todosCollection.utils.writeUpsert({ + id: "1", + text: "Buy milk", + completed: false, +}) ``` These operations: + - Write directly to the synced data store - Do NOT create optimistic mutations - Do NOT trigger automatic query refetches @@ -192,10 +209,10 @@ The `writeBatch` method allows you to perform multiple operations atomically. An ```typescript todosCollection.utils.writeBatch(() => { - todosCollection.utils.writeInsert({ id: '1', text: 'Buy milk' }) - todosCollection.utils.writeInsert({ id: '2', text: 'Walk dog' }) - todosCollection.utils.writeUpdate({ id: '3', completed: true }) - todosCollection.utils.writeDelete('4') + todosCollection.utils.writeInsert({ id: "1", text: "Buy milk" }) + todosCollection.utils.writeInsert({ id: "2", text: "Walk dog" }) + todosCollection.utils.writeUpdate({ id: "3", completed: true }) + todosCollection.utils.writeDelete("4") }) ``` @@ -203,17 +220,17 @@ todosCollection.utils.writeBatch(() => { ```typescript // Handle real-time updates from WebSocket without triggering full refetches -ws.on('todos:update', (changes) => { +ws.on("todos:update", (changes) => { todosCollection.utils.writeBatch(() => { - changes.forEach(change => { + changes.forEach((change) => { switch (change.type) { - case 'insert': + case "insert": todosCollection.utils.writeInsert(change.data) break - case 'update': + case "update": todosCollection.utils.writeUpdate(change.data) break - case 'delete': + case "delete": todosCollection.utils.writeDelete(change.id) break } @@ -229,13 +246,13 @@ When the server returns computed fields (like server-generated IDs or timestamps ```typescript const todosCollection = createCollection( queryCollectionOptions({ - queryKey: ['todos'], + queryKey: ["todos"], queryFn: fetchTodos, queryClient, getKey: (item) => item.id, onInsert: async ({ transaction }) => { - const newItems = transaction.mutations.map(m => m.modified) + const newItems = transaction.mutations.map((m) => m.modified) // Send to server and get back items with server-computed fields const serverItems = await api.createTodos(newItems) @@ -243,7 +260,7 @@ const todosCollection = createCollection( // Sync server-computed fields (like server-generated IDs, timestamps, etc.) // to the collection's synced data store todosCollection.utils.writeBatch(() => { - serverItems.forEach(serverItem => { + serverItems.forEach((serverItem) => { todosCollection.utils.writeInsert(serverItem) }) }) @@ -254,26 +271,26 @@ const todosCollection = createCollection( }, onUpdate: async ({ transaction }) => { - const updates = transaction.mutations.map(m => ({ + const updates = transaction.mutations.map((m) => ({ id: m.key, - changes: m.changes + changes: m.changes, })) const serverItems = await api.updateTodos(updates) // Sync server-computed fields from the update response todosCollection.utils.writeBatch(() => { - serverItems.forEach(serverItem => { + serverItems.forEach((serverItem) => { todosCollection.utils.writeUpdate(serverItem) }) }) return { refetch: false } - } + }, }) ) // Usage is just like a regular collection -todosCollection.insert({ text: 'Buy milk', completed: false }) +todosCollection.insert({ text: "Buy milk", completed: false }) ``` ### Example: Large Dataset Pagination @@ -282,10 +299,10 @@ todosCollection.insert({ text: 'Buy milk', completed: false }) // Load additional pages without refetching existing data const loadMoreTodos = async (page) => { const newTodos = await api.getTodos({ page, limit: 50 }) - + // Add new items without affecting existing ones todosCollection.utils.writeBatch(() => { - newTodos.forEach(todo => { + newTodos.forEach((todo) => { todosCollection.utils.writeInsert(todo) }) }) @@ -318,31 +335,33 @@ Since the query collection expects `queryFn` to return the complete state, you c ```typescript const todosCollection = createCollection( queryCollectionOptions({ - queryKey: ['todos'], + queryKey: ["todos"], queryFn: async ({ queryKey }) => { // Get existing data from cache const existingData = queryClient.getQueryData(queryKey) || [] - + // Fetch only new/updated items (e.g., changes since last sync) - const lastSyncTime = localStorage.getItem('todos-last-sync') - const newData = await fetch(`/api/todos?since=${lastSyncTime}`).then(r => r.json()) - + const lastSyncTime = localStorage.getItem("todos-last-sync") + const newData = await fetch(`/api/todos?since=${lastSyncTime}`).then( + (r) => r.json() + ) + // Merge new data with existing data - const existingMap = new Map(existingData.map(item => [item.id, item])) - + const existingMap = new Map(existingData.map((item) => [item.id, item])) + // Apply updates and additions - newData.forEach(item => { + newData.forEach((item) => { existingMap.set(item.id, item) }) - + // Handle deletions if your API provides them if (newData.deletions) { - newData.deletions.forEach(id => existingMap.delete(id)) + newData.deletions.forEach((id) => existingMap.delete(id)) } - + // Update sync time - localStorage.setItem('todos-last-sync', new Date().toISOString()) - + localStorage.setItem("todos-last-sync", new Date().toISOString()) + // Return the complete merged state return Array.from(existingMap.values()) }, @@ -353,6 +372,7 @@ const todosCollection = createCollection( ``` This pattern allows you to: + - Fetch only incremental changes from your API - Merge those changes with existing data - Return the complete state that the collection expects @@ -363,6 +383,7 @@ This pattern allows you to: Direct writes update the collection immediately and also update the TanStack Query cache. However, they do not prevent the normal query sync behavior. If your `queryFn` returns data that conflicts with your direct writes, the query data will take precedence. To handle this properly: + 1. Use `{ refetch: false }` in your persistence handlers when using direct writes 2. Set appropriate `staleTime` to prevent unnecessary refetches 3. Design your `queryFn` to be aware of incremental updates (e.g., only fetch new data) @@ -376,4 +397,4 @@ All direct write methods are available on `collection.utils`: - `writeDelete(keys)`: Delete one or more items directly - `writeUpsert(data)`: Insert or update one or more items directly - `writeBatch(callback)`: Perform multiple operations atomically -- `refetch()`: Manually trigger a refetch of the query +- `refetch(opts?)`: Manually trigger a refetch of the query diff --git a/packages/query-db-collection/src/query.ts b/packages/query-db-collection/src/query.ts index 9512bf47c..bbfd6db56 100644 --- a/packages/query-db-collection/src/query.ts +++ b/packages/query-db-collection/src/query.ts @@ -11,6 +11,7 @@ import type { QueryFunctionContext, QueryKey, QueryObserverOptions, + QueryObserverResult, } from "@tanstack/query-core" import type { BaseCollectionConfig, @@ -131,8 +132,11 @@ export interface QueryCollectionConfig< /** * Type for the refetch utility function + * Returns the QueryObserverResult from TanStack Query */ -export type RefetchFn = (opts?: { throwOnError?: boolean }) => Promise +export type RefetchFn = (opts?: { + throwOnError?: boolean +}) => Promise | void> /** * Utility methods available on Query Collections for direct writes and manual operations. @@ -420,6 +424,8 @@ export function queryCollectionOptions( let errorCount = 0 /** The timestamp for when the query most recently returned the status as "error" */ let lastErrorUpdatedAt = 0 + /** Reference to the QueryObserver for imperative refetch */ + let queryObserver: QueryObserver, any, Array, Array, any> const internalSync: SyncConfig[`sync`] = (params) => { const { begin, write, commit, markReady, collection } = params @@ -452,6 +458,9 @@ export function queryCollectionOptions( any >(queryClient, observerOptions) + // Store reference for imperative refetch + queryObserver = localObserver + let isSubscribed = false let actualUnsubscribeFn: (() => void) | null = null @@ -595,17 +604,32 @@ export function queryCollectionOptions( /** * Refetch the query data - * @returns Promise that resolves when the refetch is complete + * + * Uses queryObserver.refetch() because: + * - Bypasses `enabled: false` to support manual/imperative refetch patterns (e.g., button-triggered fetch) + * - Ensures clearError() works even when enabled: false + * - Always refetches THIS specific collection (exact targeting via observer) + * - Respects retry, retryDelay, and other observer options + * + * This matches TanStack Query's hook behavior where refetch() bypasses enabled: false. + * See: https://tanstack.com/query/latest/docs/framework/react/guides/disabling-queries + * + * Used by both: + * - utils.refetch() - for explicit user-triggered refetches + * - Internal handlers (onInsert/onUpdate/onDelete) - after mutations to get fresh data + * + * @returns Promise that resolves when the refetch is complete, with QueryObserverResult */ - const refetch: RefetchFn = (opts) => { - return queryClient.refetchQueries( - { - queryKey: queryKey, - }, - { - throwOnError: opts?.throwOnError, - } - ) + const refetch: RefetchFn = async (opts) => { + // Observer is created when sync starts. If never synced, nothing to refetch. + // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition + if (!queryObserver) { + return + } + // Return the QueryObserverResult for users to inspect + return queryObserver.refetch({ + throwOnError: opts?.throwOnError, + }) } // Create write context for manual write operations @@ -699,11 +723,11 @@ export function queryCollectionOptions( lastError: () => lastError, isError: () => !!lastError, errorCount: () => errorCount, - clearError: () => { + clearError: async () => { lastError = undefined errorCount = 0 lastErrorUpdatedAt = 0 - return refetch({ throwOnError: true }) + await refetch({ throwOnError: true }) }, }, } diff --git a/packages/query-db-collection/tests/query.test.ts b/packages/query-db-collection/tests/query.test.ts index 5eb888485..b3a6dd712 100644 --- a/packages/query-db-collection/tests/query.test.ts +++ b/packages/query-db-collection/tests/query.test.ts @@ -669,52 +669,78 @@ describe(`QueryCollection`, () => { const onInsertDefault = vi.fn().mockResolvedValue(undefined) // Default behavior should refetch const onInsertFalse = vi.fn().mockResolvedValue({ refetch: false }) // No refetch - // Create a spy on the refetch function itself - const refetchSpy = vi.fn().mockResolvedValue(undefined) - // Create configs with the handlers + const queryFnDefault = vi + .fn() + .mockResolvedValue([{ id: `1`, name: `Item 1` }]) + const queryFnFalse = vi + .fn() + .mockResolvedValue([{ id: `1`, name: `Item 1` }]) + const configDefault: QueryCollectionConfig = { id: `test-default`, queryClient, queryKey: [`refetchTest`, `default`], - queryFn: vi.fn().mockResolvedValue([{ id: `1`, name: `Item 1` }]), + queryFn: queryFnDefault, getKey, onInsert: onInsertDefault, + startSync: true, } const configFalse: QueryCollectionConfig = { id: `test-false`, queryClient, queryKey: [`refetchTest`, `false`], - queryFn: vi.fn().mockResolvedValue([{ id: `1`, name: `Item 1` }]), + queryFn: queryFnFalse, getKey, onInsert: onInsertFalse, + startSync: true, } - // Mock the queryClient.refetchQueries method which is called by collection.utils.refetch() - vi.spyOn(queryClient, `refetchQueries`).mockImplementation(refetchSpy) - // Test case 1: Default behavior (undefined return) should trigger refetch const optionsDefault = queryCollectionOptions(configDefault) + const collectionDefault = createCollection(optionsDefault) + + // Wait for initial sync + await vi.waitFor(() => { + expect(collectionDefault.status).toBe(`ready`) + }) + + // Clear initial call + queryFnDefault.mockClear() + await optionsDefault.onInsert!(insertMockParams) - // Verify handler was called and refetch was triggered + // Verify handler was called and refetch was triggered (queryFn called again) expect(onInsertDefault).toHaveBeenCalledWith(insertMockParams) - expect(refetchSpy).toHaveBeenCalledTimes(1) - - // Reset mocks - refetchSpy.mockClear() + await vi.waitFor(() => { + expect(queryFnDefault).toHaveBeenCalledTimes(1) + }) // Test case 2: Explicit { refetch: false } should not trigger refetch const optionsFalse = queryCollectionOptions(configFalse) + const collectionFalse = createCollection(optionsFalse) + + // Wait for initial sync + await vi.waitFor(() => { + expect(collectionFalse.status).toBe(`ready`) + }) + + // Clear initial call + queryFnFalse.mockClear() + await optionsFalse.onInsert!(insertMockParams) - // Verify handler was called but refetch was NOT triggered + // Verify handler was called but refetch was NOT triggered (queryFn not called) expect(onInsertFalse).toHaveBeenCalledWith(insertMockParams) - expect(refetchSpy).not.toHaveBeenCalled() + // Wait a bit to ensure no refetch happens + await new Promise((resolve) => setTimeout(resolve, 50)) + expect(queryFnFalse).not.toHaveBeenCalled() - // Restore original function - vi.restoreAllMocks() + await Promise.all([ + collectionDefault.cleanup(), + collectionFalse.cleanup(), + ]) }) }) @@ -1958,6 +1984,247 @@ describe(`QueryCollection`, () => { }) }) + it(`should use exact targeting when refetching to avoid unintended cascading of related queries`, async () => { + // Create multiple collections with related but distinct query keys + const queryKey = [`todos`] + const queryKey1 = [`todos`, `project-1`] + const queryKey2 = [`todos`, `project-2`] + + const mockItems = [{ id: `1`, name: `Item 1` }] + const queryFn = vi.fn().mockResolvedValue(mockItems) + const queryFn1 = vi.fn().mockResolvedValue(mockItems) + const queryFn2 = vi.fn().mockResolvedValue(mockItems) + + const config: QueryCollectionConfig = { + id: `all-todos`, + queryClient, + queryKey: queryKey, + queryFn: queryFn, + getKey, + startSync: true, + } + const config1: QueryCollectionConfig = { + id: `project-1-todos`, + queryClient, + queryKey: queryKey1, + queryFn: queryFn1, + getKey, + startSync: true, + } + const config2: QueryCollectionConfig = { + id: `project-2-todos`, + queryClient, + queryKey: queryKey2, + queryFn: queryFn2, + getKey, + startSync: true, + } + + const options = queryCollectionOptions(config) + const options1 = queryCollectionOptions(config1) + const options2 = queryCollectionOptions(config2) + + const collection = createCollection(options) + const collection1 = createCollection(options1) + const collection2 = createCollection(options2) + + // Wait for initial queries to complete + await vi.waitFor(() => { + expect(queryFn).toHaveBeenCalledTimes(1) + expect(queryFn1).toHaveBeenCalledTimes(1) + expect(queryFn2).toHaveBeenCalledTimes(1) + expect(collection.status).toBe(`ready`) + }) + + // Reset call counts to test refetch behavior + queryFn.mockClear() + queryFn1.mockClear() + queryFn2.mockClear() + + // Refetch the target collection with key ['todos', 'project-1'] + await collection1.utils.refetch() + + // Verify that only the target query was refetched + await vi.waitFor(() => { + expect(queryFn1).toHaveBeenCalledTimes(1) + expect(queryFn).not.toHaveBeenCalled() + expect(queryFn2).not.toHaveBeenCalled() + }) + + // Cleanup + await Promise.all([ + collection.cleanup(), + collection1.cleanup(), + collection2.cleanup(), + ]) + }) + + it(`should use exact targeting when clearError() refetches to avoid unintended cascading`, async () => { + const queryKey1 = [`todos`, `project-1`] + const queryKey2 = [`todos`, `project-2`] + + const testError = new Error(`Test error`) + const mockItems = [{ id: `1`, name: `Item 1` }] + const queryFn1 = vi + .fn() + .mockRejectedValueOnce(testError) + .mockResolvedValue(mockItems) + const queryFn2 = vi.fn().mockResolvedValue(mockItems) + + const config1: QueryCollectionConfig = { + id: `project-1-todos-clear-error`, + queryClient, + queryKey: queryKey1, + queryFn: queryFn1, + getKey, + startSync: true, + retry: false, + } + const config2: QueryCollectionConfig = { + id: `project-2-todos-clear-error`, + queryClient, + queryKey: queryKey2, + queryFn: queryFn2, + getKey, + startSync: true, + retry: false, + } + + const options1 = queryCollectionOptions(config1) + const options2 = queryCollectionOptions(config2) + + const collection1 = createCollection(options1) + const collection2 = createCollection(options2) + + await vi.waitFor(() => { + expect(collection1.utils.isError()).toBe(true) + expect(collection2.status).toBe(`ready`) + }) + + queryFn1.mockClear() + queryFn2.mockClear() + + await collection1.utils.clearError() + + await vi.waitFor(() => { + expect(queryFn1).toHaveBeenCalledTimes(1) + expect(queryFn2).not.toHaveBeenCalled() + }) + + await Promise.all([collection1.cleanup(), collection2.cleanup()]) + }) + + it(`should propagate errors when throwOnError is true in refetch`, async () => { + const testError = new Error(`Refetch error`) + const queryKey = [`throw-on-error-test`] + const queryFn = vi.fn().mockRejectedValue(testError) + + await queryClient.prefetchQuery({ queryKey, queryFn }) + + const collection = createCollection( + queryCollectionOptions({ + id: `throw-on-error-test`, + queryClient, + queryKey, + queryFn, + getKey, + retry: false, + startSync: true, + }) + ) + + await vi.waitFor(() => { + expect(collection.utils.isError()).toBe(true) + }) + + await expect( + collection.utils.refetch({ throwOnError: true }) + ).rejects.toThrow(testError) + + // Should not throw when throwOnError is false + await collection.utils.refetch({ throwOnError: false }) + + await collection.cleanup() + }) + + describe(`refetch() behavior`, () => { + it(`should refetch when collection is syncing (startSync: true)`, async () => { + const queryKey = [`refetch-test-syncing`] + const queryFn = vi.fn().mockResolvedValue([{ id: `1`, name: `A` }]) + + const collection = createCollection( + queryCollectionOptions({ + id: `refetch-test-syncing`, + queryClient, + queryKey, + queryFn, + getKey, + startSync: true, + }) + ) + + await vi.waitFor(() => { + expect(collection.status).toBe(`ready`) + }) + + queryFn.mockClear() + + await collection.utils.refetch() + expect(queryFn).toHaveBeenCalledTimes(1) + + await collection.cleanup() + }) + + it(`should refetch even when enabled: false (imperative refetch pattern)`, async () => { + const mockItems: Array = [{ id: `1`, name: `Item 1` }] + const queryKey = [`manual-fetch-test`] + const queryFn = vi.fn().mockResolvedValue(mockItems) + + const collection = createCollection( + queryCollectionOptions({ + id: `manual-fetch-test`, + queryClient, + queryKey, + queryFn, + getKey, + enabled: false, + startSync: true, + }) + ) + + // Query should not auto-fetch due to enabled: false + expect(queryFn).not.toHaveBeenCalled() + + // But manual refetch should work + await collection.utils.refetch() + expect(queryFn).toHaveBeenCalledTimes(1) + + await collection.cleanup() + }) + + it(`should be no-op when sync has not started (no observer created)`, async () => { + const queryKey = [`refetch-test-no-sync`] + const queryFn = vi.fn().mockResolvedValue([{ id: `1`, name: `A` }]) + + const collection = createCollection( + queryCollectionOptions({ + id: `refetch-test-no-sync`, + queryClient, + queryKey, + queryFn, + getKey, + startSync: false, + }) + ) + + // Refetch should be no-op because observer doesn't exist yet + await collection.utils.refetch() + expect(queryFn).not.toHaveBeenCalled() + + await collection.cleanup() + }) + }) + describe(`Error Handling`, () => { // Helper to create test collection with common configuration const createErrorHandlingTestCollection = ( From 2d4d5e121a9c5f25622f461648ebfb641c46b553 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 27 Oct 2025 17:36:37 -0600 Subject: [PATCH 39/56] ci: Version Packages (#726) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .changeset/soft-doodles-cover.md | 7 ------- examples/react/projects/package.json | 2 +- examples/react/todo/CHANGELOG.md | 7 +++++++ examples/react/todo/package.json | 2 +- packages/query-db-collection/CHANGELOG.md | 8 ++++++++ packages/query-db-collection/package.json | 2 +- pnpm-lock.yaml | 9 ++------- 7 files changed, 20 insertions(+), 17 deletions(-) delete mode 100644 .changeset/soft-doodles-cover.md diff --git a/.changeset/soft-doodles-cover.md b/.changeset/soft-doodles-cover.md deleted file mode 100644 index 102b18448..000000000 --- a/.changeset/soft-doodles-cover.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -"@tanstack/query-db-collection": patch ---- - -**Behavior change**: `utils.refetch()` now uses exact query key targeting (previously used prefix matching). This prevents unintended cascading refetches of related queries. For example, refetching `['todos', 'project-1']` will no longer trigger refetches of `['todos']` or `['todos', 'project-2']`. - -Additionally, `utils.refetch()` now bypasses `enabled: false` to support manual/imperative refetch patterns (matching TanStack Query hook behavior) and returns `QueryObserverResult` instead of `void` for better DX. diff --git a/examples/react/projects/package.json b/examples/react/projects/package.json index 0b8a83c61..533f25b75 100644 --- a/examples/react/projects/package.json +++ b/examples/react/projects/package.json @@ -17,7 +17,7 @@ "dependencies": { "@tailwindcss/vite": "^4.1.16", "@tanstack/query-core": "^5.90.5", - "@tanstack/query-db-collection": "^0.2.36", + "@tanstack/query-db-collection": "^0.2.37", "@tanstack/react-db": "^0.1.36", "@tanstack/react-router": "^1.133.32", "@tanstack/react-router-devtools": "^1.133.32", diff --git a/examples/react/todo/CHANGELOG.md b/examples/react/todo/CHANGELOG.md index 3bf337be2..28ea29fc5 100644 --- a/examples/react/todo/CHANGELOG.md +++ b/examples/react/todo/CHANGELOG.md @@ -1,5 +1,12 @@ # examples/react/todo +## 0.1.16 + +### Patch Changes + +- Updated dependencies [[`c2a5c28`](https://github.com/TanStack/db/commit/c2a5c28919733ee33cbd57903d3ed81656a0d842)]: + - @tanstack/query-db-collection@0.2.37 + ## 0.1.15 ### Patch Changes diff --git a/examples/react/todo/package.json b/examples/react/todo/package.json index d80cbc492..509d7fade 100644 --- a/examples/react/todo/package.json +++ b/examples/react/todo/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/db-example-react-todo", "private": true, - "version": "0.1.15", + "version": "0.1.16", "dependencies": { "@tanstack/electric-db-collection": "workspace:^", "@tanstack/query-core": "^5.90.5", diff --git a/packages/query-db-collection/CHANGELOG.md b/packages/query-db-collection/CHANGELOG.md index ca9f6e096..fd502dbf9 100644 --- a/packages/query-db-collection/CHANGELOG.md +++ b/packages/query-db-collection/CHANGELOG.md @@ -1,5 +1,13 @@ # @tanstack/query-db-collection +## 0.2.37 + +### Patch Changes + +- **Behavior change**: `utils.refetch()` now uses exact query key targeting (previously used prefix matching). This prevents unintended cascading refetches of related queries. For example, refetching `['todos', 'project-1']` will no longer trigger refetches of `['todos']` or `['todos', 'project-2']`. ([#552](https://github.com/TanStack/db/pull/552)) + + Additionally, `utils.refetch()` now bypasses `enabled: false` to support manual/imperative refetch patterns (matching TanStack Query hook behavior) and returns `QueryObserverResult` instead of `void` for better DX. + ## 0.2.36 ### Patch Changes diff --git a/packages/query-db-collection/package.json b/packages/query-db-collection/package.json index 972b5a0dd..e8cf54355 100644 --- a/packages/query-db-collection/package.json +++ b/packages/query-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/query-db-collection", "description": "TanStack Query collection for TanStack DB", - "version": "0.2.36", + "version": "0.2.37", "dependencies": { "@standard-schema/spec": "^1.0.0", "@tanstack/db": "workspace:*" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c00207ff1..9c929f92c 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -196,7 +196,7 @@ importers: specifier: ^5.90.5 version: 5.90.5 '@tanstack/query-db-collection': - specifier: ^0.2.36 + specifier: ^0.2.37 version: link:../../../packages/query-db-collection '@tanstack/react-db': specifier: ^0.1.36 @@ -478,7 +478,7 @@ importers: version: 0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) drizzle-zod: specifier: ^0.8.3 - version: 0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@3.25.76) + version: 0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11) express: specifier: ^4.21.2 version: 4.21.2 @@ -13119,11 +13119,6 @@ snapshots: pg: 8.16.3 postgres: 3.4.7 - drizzle-zod@0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@3.25.76): - dependencies: - drizzle-orm: 0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) - zod: 3.25.76 - drizzle-zod@0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11): dependencies: drizzle-orm: 0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) From fbfa75a74badc8788aed1046915ad4c10f3da09a Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Tue, 28 Oct 2025 14:07:14 +0200 Subject: [PATCH 40/56] Support better schema type conversions --- docs/collections/powersync-collection.md | 253 +++++++++++++- .../src/PowerSyncTransactor.ts | 47 +-- .../src/definitions.ts | 237 +++++++++++-- .../powersync-db-collection/src/helpers.ts | 36 +- .../powersync-db-collection/src/powersync.ts | 232 +++++++++++-- .../src/serlization.ts | 101 ++++++ .../tests/collection-schema.test.ts | 310 ++++++++++++++++++ .../tests/powersync.test.ts | 131 +------- 8 files changed, 1123 insertions(+), 224 deletions(-) create mode 100644 packages/powersync-db-collection/src/serlization.ts create mode 100644 packages/powersync-db-collection/tests/collection-schema.test.ts diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index 5cea78c2f..9fb302507 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -38,9 +38,9 @@ import { Schema, Table, column } from "@powersync/web" const APP_SCHEMA = new Schema({ documents: new Table({ name: column.text, - content: column.text, + author: column.text, created_at: column.text, - updated_at: column.text, + archived: column.integer, }), }) @@ -81,12 +81,14 @@ db.connect(new Connector()) ### 4. Create a TanStack DB Collection -There are two ways to create a collection: using type inference or using schema validation. +There are two main ways to create a collection: using type inference or using schema validation. Type inference will infer collection types from the underlying PowerSync SQLite tables. Schema validation can be used for additional input/output validations and type transforms. #### Option 1: Using Table Type Inference The collection types are automatically inferred from the PowerSync schema table definition. The table is used to construct a default standard schema validator which is used internally to validate collection operations. +Collection mutations accept SQLite types and queries report data with SQLite types. + ```ts import { createCollection } from "@tanstack/react-db" import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" @@ -99,19 +101,137 @@ const documentsCollection = createCollection( ) ``` -#### Option 2: Using Advanced Schema Validation +#### Option 2: SQLite Types with Schema Validation + +The standard PowerSync SQLite types map to these TypeScript types: + +| PowerSync Column Type | TypeScript Type | Description | +| --------------------- | ---------------- | -------------------------------------------------------------------- | +| `column.text` | `string \| null` | Text values, commonly used for strings, JSON, dates (as ISO strings) | +| `column.integer` | `number \| null` | Integer values, also used for booleans (0/1) | +| `column.real` | `number \| null` | Floating point numbers | -Additional validations can be performed by supplying a compatible validation schema (such as a Zod schema). The output typing of the validator is constrained to match the typing of the SQLite table. The input typing can be arbitrary. +Note: All PowerSync column types are nullable by default, as SQLite allows null values in any column. Your schema should always handle null values appropriately by using `.nullable()` in your Zod schemas and handling null cases in your transformations. + +Additional validations for collection mutations can be performed with a custom schema. The Schema below asserts that +the `name`, `author` and `created_at` fields are required as input. `name` also has an additional string length check. + +Note: The input and output types specified in this example still satisfy the underlying SQLite types. An additional `deserializationSchema` is required if the typing differs. See the examples below for more details. ```ts import { createCollection } from "@tanstack/react-db" import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" import { z } from "zod" -// The output of this schema must match the SQLite schema +// Schema validates SQLite types but adds constraints const schema = z.object({ id: z.string(), name: z.string().min(3, { message: "Should be at least 3 characters" }), + author: z.string(), + created_at: z.string(), // SQLite TEXT for dates + archived: z.number(), +}) + +const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + }) +) + +/** Note: The types for input and output are defined as this */ +// Used for mutations like `insert` or `update` +type DocumentCollectionInput = { + id: string + name: string + author: string + created_at: string // SQLite TEXT + archived: number // SQLite integer +} +// The type of query/data results +type DocumentCollectionOutput = DocumentCollectionInput +``` + +#### Option 3: Transform SQLite Input Types to Rich Output Types + +You can transform SQLite types to richer types (like Date objects) while keeping SQLite-compatible input types: + +Note: The Transformed types are provided by TanStackDB to the PowerSync SQLite persister. These types need to be serialized in +order to be persisted to SQLite. Most types are converted by default. For custom types, override the serialization by providing a +`serializer` param. + +```ts +const schema = z.object({ + id: z.string(), + name: z.string().nullable(), + created_at: z + .string() + .nullable() + .transform((val) => (val ? new Date(val) : null)), // Transform SQLite TEXT to Date + archived: z + .number() + .nullable() + .transform((val) => (val != null ? val > 0 : null)), // Transform SQLite INTEGER to boolean +}) + +const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + // Optional: custom column serialization + serializer: { + // Dates are serialized by default, this is just an example + created_at: (value) => (value ? value.toISOString() : null), + }, + }) +) + +/** Note: The types for input and output are defined as this */ +// Used for mutations like `insert` or `update` +type DocumentCollectionInput = { + id: string + name: string | null + author: string | null + created_at: string | null // SQLite TEXT + archived: number | null +} +// The type of query/data results +type DocumentCollectionOutput = { + id: string + name: string | null + author: string | null + created_at: Date | null // JS Date instance + archived: boolean | null // JS boolean +} +``` + +#### Option 4: Custom Input/Output Types with Deserialization + +The input and output types can be completely decoupled from the internal SQLite types. This can be used to accept rich values for input mutations. +We require an additional `deserializationSchema` in order to validate and transform incoming synced (SQLite) updates. This schema should convert the incoming SQLite update to the output type. + +The application logic (including the backend) should enforce that all incoming synced data passes validation with the `deserializationSchema`. Failing to validate data will result in inconsistency of the collection data. This is a fatal error! An `onDeserializationError` handler must be provided to react to this case. + +```ts +// Our input/output types use Date and boolean +const schema = z.object({ + id: z.string(), + name: z.string(), + author: z.string(), + created_at: z.date(), // Accept Date objects as input +}) + +// Schema to transform from SQLite types to our output types +const deserializationSchema = z.object({ + id: z.string(), + name: z.string(), + author: z.string(), + created_at: z + .string() + .nullable() + .transform((val) => (val ? new Date(val) : null)), // SQLite TEXT to Date }) const documentsCollection = createCollection( @@ -119,6 +239,10 @@ const documentsCollection = createCollection( database: db, table: APP_SCHEMA.props.documents, schema, + deserializationSchema, + onDeserializationError: (error) => { + // Present fatal error + }, }) ) ``` @@ -138,6 +262,102 @@ When connected to a PowerSync backend, changes are automatically synchronized in - Queue management for offline changes - Automatic retries on connection loss +### Working with Rich JavaScript Types + +PowerSync collections support rich JavaScript types like `Date`, `Boolean`, and custom objects while maintaining SQLite compatibility. The collection handles serialization and deserialization automatically: + +```typescript +import { z } from "zod" +import { Schema, Table, column } from "@powersync/web" +import { createCollection } from "@tanstack/react-db" +import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" + +// Define PowerSync SQLite schema +const APP_SCHEMA = new Schema({ + tasks: new Table({ + title: column.text, + due_date: column.text, // Stored as ISO string in SQLite + completed: column.integer, // Stored as 0/1 in SQLite + metadata: column.text, // Stored as JSON string in SQLite + }), +}) + +// Define rich types schema +const taskSchema = z.object({ + id: z.string(), + title: z.string().nullable(), + due_date: z + .string() + .nullable() + .transform((val) => (val ? new Date(val) : null)), // Convert to Date + completed: z + .number() + .nullable() + .transform((val) => (val != null ? val > 0 : null)), // Convert to boolean + metadata: z + .string() + .nullable() + .transform((val) => (val ? JSON.parse(val) : null)), // Parse JSON +}) + +// Create collection with rich types +const tasksCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.tasks, + schema: taskSchema, + }) +) + +// Work with rich types in your code +await tasksCollection.insert({ + id: crypto.randomUUID(), + title: "Review PR", + due_date: "2025-10-30T10:00:00Z", // String input is automatically converted to Date + completed: 0, // Number input is automatically converted to boolean + metadata: JSON.stringify({ priority: "high" }), +}) + +// Query returns rich types +const task = tasksCollection.get("task-1") +console.log(task.due_date instanceof Date) // true +console.log(typeof task.completed) // "boolean" +console.log(task.metadata.priority) // "high" +``` + +### Type Safety with Rich Types + +The collection maintains type safety throughout: + +```typescript +type TaskInput = { + id: string + title: string | null + due_date: string | null // Accept ISO string for mutations + completed: number | null // Accept 0/1 for mutations + metadata: string | null // Accept JSON string for mutations +} + +type TaskOutput = { + id: string + title: string | null + due_date: Date | null // Get Date object in queries + completed: boolean | null // Get boolean in queries + metadata: { + priority: string + [key: string]: any + } | null +} + +// TypeScript enforces correct types: +tasksCollection.insert({ + due_date: new Date(), // Error: Type 'Date' is not assignable to type 'string' +}) + +const task = tasksCollection.get("task-1") +task.due_date.getTime() // OK - TypeScript knows this is a Date +``` + ### Optimistic Updates Updates to the collection are applied optimistically to the local state first, then synchronized with PowerSync and the backend. If an error occurs during sync, the changes are automatically rolled back. @@ -147,10 +367,23 @@ Updates to the collection are applied optimistically to the local state first, t The `powerSyncCollectionOptions` function accepts the following options: ```ts -interface PowerSyncCollectionConfig { - database: PowerSyncDatabase // PowerSync database instance - table: Table // PowerSync schema table definition - schema?: StandardSchemaV1 // Optional schema for additional validation (e.g., Zod schema) +interface PowerSyncCollectionConfig { + // Required options + database: PowerSyncDatabase + table: Table + + // Schema validation and type transformation + schema?: StandardSchemaV1 + deserializationSchema?: StandardSchemaV1 // Required for custom input types + onDeserializationError?: (error: StandardSchemaV1.FailureResult) => void // Required for custom input types + + // Optional Custom serialization + serializer?: { + [Key in keyof TOutput]?: (value: TOutput[Key]) => SQLiteCompatibleType + } + + // Performance tuning + syncBatchSize?: number // Control batch size for initial sync, defaults to 1000 } ``` diff --git a/packages/powersync-db-collection/src/PowerSyncTransactor.ts b/packages/powersync-db-collection/src/PowerSyncTransactor.ts index 262ad6e4d..ceed7ed20 100644 --- a/packages/powersync-db-collection/src/PowerSyncTransactor.ts +++ b/packages/powersync-db-collection/src/PowerSyncTransactor.ts @@ -48,7 +48,7 @@ export type TransactorOptions = { * @param transaction - The transaction containing mutations to apply * @returns A promise that resolves when the mutations have been persisted to PowerSync */ -export class PowerSyncTransactor> { +export class PowerSyncTransactor { database: AbstractPowerSyncDatabase pendingOperationStore: PendingOperationStore @@ -60,7 +60,7 @@ export class PowerSyncTransactor> { /** * Persists a {@link Transaction} to the PowerSync SQLite database. */ - async applyTransaction(transaction: Transaction) { + async applyTransaction(transaction: Transaction) { const { mutations } = transaction if (mutations.length == 0) { @@ -147,7 +147,7 @@ export class PowerSyncTransactor> { } protected async handleInsert( - mutation: PendingMutation, + mutation: PendingMutation, context: LockContext, waitForCompletion: boolean = false ): Promise { @@ -157,10 +157,9 @@ export class PowerSyncTransactor> { mutation, context, waitForCompletion, - async (tableName, mutation) => { - const keys = Object.keys(mutation.modified).map( - (key) => sanitizeSQL`${key}` - ) + async (tableName, mutation, serializeValue) => { + const values = serializeValue(mutation.modified) + const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`) await context.execute( ` @@ -169,14 +168,14 @@ export class PowerSyncTransactor> { VALUES (${keys.map((_) => `?`).join(`, `)}) `, - Object.values(mutation.modified) + Object.values(values) ) } ) } protected async handleUpdate( - mutation: PendingMutation, + mutation: PendingMutation, context: LockContext, waitForCompletion: boolean = false ): Promise { @@ -186,27 +185,24 @@ export class PowerSyncTransactor> { mutation, context, waitForCompletion, - async (tableName, mutation) => { - const keys = Object.keys(mutation.modified).map( - (key) => sanitizeSQL`${key}` - ) + async (tableName, mutation, serializeValue) => { + const values = serializeValue(mutation.modified) + const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`) + await context.execute( ` UPDATE ${tableName} SET ${keys.map((key) => `${key} = ?`).join(`, `)} WHERE id = ? `, - [ - ...Object.values(mutation.modified), - asPowerSyncRecord(mutation.modified).id, - ] + [...Object.values(values), asPowerSyncRecord(mutation.modified).id] ) } ) } protected async handleDelete( - mutation: PendingMutation, + mutation: PendingMutation, context: LockContext, waitForCompletion: boolean = false ): Promise { @@ -234,10 +230,14 @@ export class PowerSyncTransactor> { * - Returning the last pending diff operation if required */ protected async handleOperationWithCompletion( - mutation: PendingMutation, + mutation: PendingMutation, context: LockContext, waitForCompletion: boolean, - handler: (tableName: string, mutation: PendingMutation) => Promise + handler: ( + tableName: string, + mutation: PendingMutation, + serializeValue: (value: any) => Record + ) => Promise ): Promise { if ( typeof (mutation.collection.config as any).utils?.getMeta != `function` @@ -246,11 +246,12 @@ export class PowerSyncTransactor> { The provided mutation might not have originated from PowerSync.`) } - const { tableName, trackedTableName } = ( - mutation.collection.config as unknown as EnhancedPowerSyncCollectionConfig + const { tableName, trackedTableName, serializeValue } = ( + mutation.collection + .config as unknown as EnhancedPowerSyncCollectionConfig ).utils.getMeta() - await handler(sanitizeSQL`${tableName}`, mutation) + await handler(sanitizeSQL`${tableName}`, mutation, serializeValue) if (!waitForCompletion) { return null diff --git a/packages/powersync-db-collection/src/definitions.ts b/packages/powersync-db-collection/src/definitions.ts index 5c3b63d33..9d46818c4 100644 --- a/packages/powersync-db-collection/src/definitions.ts +++ b/packages/powersync-db-collection/src/definitions.ts @@ -1,7 +1,192 @@ import type { AbstractPowerSyncDatabase, Table } from "@powersync/common" import type { StandardSchemaV1 } from "@standard-schema/spec" -import type { BaseCollectionConfig, CollectionConfig } from "@tanstack/db" -import type { ExtractedTable } from "./helpers" +import type { + BaseCollectionConfig, + CollectionConfig, + InferSchemaOutput, +} from "@tanstack/db" +import type { + AnyTableColumnType, + ExtractedTable, + OptionalExtractedTable, + PowerSyncRecord, +} from "./helpers" + +/** + * Small helper which determines the output type if: + * - Standard SQLite types are to be used OR + * - If the provided schema should be used. + */ +export type InferPowerSyncOutputType< + TTable extends Table = Table, + TSchema extends StandardSchemaV1 = never, +> = TSchema extends never ? ExtractedTable : InferSchemaOutput + +/** + * A mapping type for custom serialization of object properties to SQLite-compatible values. + * + * This type allows you to override, for keys in the input object (`TOutput`), a function that transforms + * the value to the corresponding SQLite type (`TSQLite`). Keys not specified will use the default SQLite serialization. + * + * ## Generics + * - `TOutput`: The input object type, representing the row data to be serialized. + * - `TSQLite`: The target SQLite-compatible type for each property, typically inferred from the table schema. + * + * ## Usage + * Use this type to define a map of serialization functions for specific keys when you need custom handling + * (e.g., converting complex objects, formatting dates, or handling enums). + * + * Example: + * ```ts + * const serializer: CustomSQLiteSerializer = { + * createdAt: (date) => date.toISOString(), + * status: (status) => status ? 1 : 0, + * meta: (meta) => JSON.stringify(meta), + * }; + * ``` + * + * ## Behavior + * - Each key maps to a function that receives the value and returns the SQLite-compatible value. + * - Used by `serializeForSQLite` to override default serialization for specific columns. + */ +export type CustomSQLiteSerializer< + TOutput extends Record, + TSQLite extends Record, +> = Partial<{ + [Key in keyof TOutput]: ( + value: TOutput[Key] + ) => Key extends keyof TSQLite ? TSQLite[Key] : never +}> + +export type SerializerConfig< + TOutput extends Record, + TSQLite extends Record, +> = { + /** + * Optional partial serializer object for customizing how individual columns are serialized for SQLite. + * + * This should be a partial map of column keys to serialization functions, following the + * {@link CustomSQLiteSerializer} type. Each function receives the column value and returns a value + * compatible with SQLite storage. + * + * If not provided for a column, the default behavior is used: + * - `TEXT`: Strings are stored as-is; Dates are converted to ISO strings; other types are JSON-stringified. + * - `INTEGER`/`REAL`: Numbers are stored as-is; booleans are mapped to 1/0. + * + * Use this option to override serialization for specific columns, such as formatting dates, handling enums, + * or serializing complex objects. + * + * Example: + * ```typescript + * serializer: { + * createdAt: (date) => date.getTime(), // Store as timestamp + * meta: (meta) => JSON.stringify(meta), // Custom object serialization + * } + * ``` + */ + serializer?: CustomSQLiteSerializer +} + +/** + * Config for when TInput and TOutput are both the SQLite types. + */ +export type ConfigWithSQLiteTypes = {} + +/** + * Config where TInput is the SQLite types while TOutput can be defined by TSchema. + * We can use the same schema to validate TInput and incoming SQLite changes. + */ +export type ConfigWithSQLiteInputType< + TTable extends Table, + TSchema extends StandardSchemaV1< + // TInput is the SQLite types. + OptionalExtractedTable, + AnyTableColumnType + >, +> = SerializerConfig< + StandardSchemaV1.InferOutput, + ExtractedTable +> & { + schema: TSchema +} + +/** + * Config where TInput and TOutput have arbitrarily typed values. + * The keys of the types need to equal the SQLite types. + * Since TInput is not the SQLite types, we require a schema in order to deserialize incoming SQLite updates. The schema should validate from SQLite to TOutput. + */ +export type ConfigWithArbitraryCollectionTypes< + TTable extends Table, + TSchema extends StandardSchemaV1< + // The input and output must have the same keys, the value types can be arbitrary + AnyTableColumnType, + AnyTableColumnType + >, +> = SerializerConfig< + StandardSchemaV1.InferOutput, + ExtractedTable +> & { + schema: TSchema + /** + * Schema for deserializing and validating input data from the sync stream. + * + * This schema defines how to transform and validate data coming from SQLite types (as stored in the database) + * into the desired output types (`TOutput`) expected by your application or validation logic. + * + * The generic parameters allow for arbitrary input and output types, so you can specify custom conversion rules + * for each column. This is especially useful when your application expects richer types (e.g., Date, enums, objects) + * than what SQLite natively supports. + * + * Use this to ensure that incoming data from the sync stream is properly converted and validated before use. + * + * Example: + * ```typescript + * deserializationSchema: z.object({ + * createdAt: z.preprocess((val) => new Date(val as string), z.date()), + * meta: z.preprocess((val) => JSON.parse(val as string), z.object({ ... })), + * }) + * ``` + * + * This enables robust type safety and validation for incoming data, bridging the gap between SQLite storage + * and your application's expected types. + */ + deserializationSchema: StandardSchemaV1< + ExtractedTable, + StandardSchemaV1.InferOutput + > + + /** + * Application logic should ensure that incoming synced data is always valid. + * Failing to deserialize and apply incoming changes results in data inconsistency - which is a fatal error. + * Use this callback to react to deserialization errors. + */ + onDeserializationError: (error: StandardSchemaV1.FailureResult) => void +} +export type BasePowerSyncCollectionConfig< + TTable extends Table = Table, + TSchema extends StandardSchemaV1 = never, +> = Omit< + BaseCollectionConfig, string, TSchema>, + `onInsert` | `onUpdate` | `onDelete` | `getKey` +> & { + /** The PowerSync schema Table definition */ + table: TTable + /** The PowerSync database instance */ + database: AbstractPowerSyncDatabase + /** + * The maximum number of documents to read from the SQLite table + * in a single batch during the initial sync between PowerSync and the + * in-memory TanStack DB collection. + * + * @remarks + * - Defaults to {@link DEFAULT_BATCH_SIZE} if not specified. + * - Larger values reduce the number of round trips to the storage + * engine but increase memory usage per batch. + * - Smaller values may lower memory usage and allow earlier + * streaming of initial results, at the cost of more query calls. + */ + syncBatchSize?: number +} /** * Configuration interface for PowerSync collection options. @@ -36,34 +221,18 @@ import type { ExtractedTable } from "./helpers" */ export type PowerSyncCollectionConfig< TTable extends Table = Table, - TSchema extends StandardSchemaV1 = never, -> = Omit< - BaseCollectionConfig, string, TSchema>, - `onInsert` | `onUpdate` | `onDelete` | `getKey` -> & { - /** The PowerSync schema Table definition */ - table: TTable - /** The PowerSync database instance */ - database: AbstractPowerSyncDatabase - /** - * The maximum number of documents to read from the SQLite table - * in a single batch during the initial sync between PowerSync and the - * in-memory TanStack DB collection. - * - * @remarks - * - Defaults to {@link DEFAULT_BATCH_SIZE} if not specified. - * - Larger values reduce the number of round trips to the storage - * engine but increase memory usage per batch. - * - Smaller values may lower memory usage and allow earlier - * streaming of initial results, at the cost of more query calls. - */ - syncBatchSize?: number -} + TSchema extends StandardSchemaV1 = never, +> = BasePowerSyncCollectionConfig & + ( + | ConfigWithSQLiteTypes + | ConfigWithSQLiteInputType + | ConfigWithArbitraryCollectionTypes + ) /** * Metadata for the PowerSync Collection. */ -export type PowerSyncCollectionMeta = { +export type PowerSyncCollectionMeta = { /** * The SQLite table representing the collection. */ @@ -72,25 +241,31 @@ export type PowerSyncCollectionMeta = { * The internal table used to track diffs for the collection. */ trackedTableName: string + + /** + * Serializes a collection value to the SQLite type + */ + serializeValue: (value: any) => ExtractedTable } /** * A CollectionConfig which includes utilities for PowerSync. */ export type EnhancedPowerSyncCollectionConfig< - TTable extends Table = Table, + TTable extends Table, + OutputType extends Record = Record, TSchema extends StandardSchemaV1 = never, -> = CollectionConfig, string, TSchema> & { +> = CollectionConfig & { id?: string - utils: PowerSyncCollectionUtils + utils: PowerSyncCollectionUtils schema?: TSchema } /** * Collection-level utilities for PowerSync. */ -export type PowerSyncCollectionUtils = { - getMeta: () => PowerSyncCollectionMeta +export type PowerSyncCollectionUtils = { + getMeta: () => PowerSyncCollectionMeta } /** diff --git a/packages/powersync-db-collection/src/helpers.ts b/packages/powersync-db-collection/src/helpers.ts index 7d23dc171..b2c35364e 100644 --- a/packages/powersync-db-collection/src/helpers.ts +++ b/packages/powersync-db-collection/src/helpers.ts @@ -1,5 +1,9 @@ import { DiffTriggerOperation } from "@powersync/common" -import type { ExtractColumnValueType, Table } from "@powersync/common" +import type { + BaseColumnType, + ExtractColumnValueType, + Table, +} from "@powersync/common" /** * All PowerSync table records include a UUID `id` column. @@ -21,6 +25,14 @@ type OptionalIfUndefined = { [K in keyof T as undefined extends T[K] ? never : K]: T[K] } +/** + * Provides the base column types for a table. This excludes the `id` column. + */ +export type ExtractedTableColumns = { + [K in keyof TTable[`columnMap`]]: ExtractColumnValueType< + TTable[`columnMap`][K] + > +} /** * Utility type that extracts the typed structure of a table based on its column definitions. * Maps each column to its corresponding TypeScript type using ExtractColumnValueType. @@ -33,10 +45,15 @@ type OptionalIfUndefined = { * age: column.integer * }) * type TableType = ExtractedTable - * // Results in: { name: string | null, age: number | null } + * // Results in: { id: string, name: string | null, age: number | null } * ``` */ -export type ExtractedTable = OptionalIfUndefined<{ +export type ExtractedTable = + ExtractedTableColumns & { + id: string + } + +export type OptionalExtractedTable = OptionalIfUndefined<{ [K in keyof TTable[`columnMap`]]: WithUndefinedIfNull< ExtractColumnValueType > @@ -44,6 +61,14 @@ export type ExtractedTable = OptionalIfUndefined<{ id: string } +/** + * Maps the schema of TTable to a type which + * requires the keys be equal, but the values can have any value type. + */ +export type AnyTableColumnType = { + [K in keyof TTable[`columnMap`]]: any +} & { id: string } + export function asPowerSyncRecord(record: any): PowerSyncRecord { if (typeof record.id !== `string`) { throw new Error(`Record must have a string id field`) @@ -51,6 +76,11 @@ export function asPowerSyncRecord(record: any): PowerSyncRecord { return record as PowerSyncRecord } +// Helper type to ensure the keys of TOutput match the Table columns +export type MapBaseColumnType = { + [Key in keyof TOutput]: BaseColumnType +} + /** * Maps {@link DiffTriggerOperation} to TanstackDB operations */ diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 882deb1b1..065433347 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -4,20 +4,29 @@ import { asPowerSyncRecord, mapOperation } from "./helpers" import { PendingOperationStore } from "./PendingOperationStore" import { PowerSyncTransactor } from "./PowerSyncTransactor" import { convertTableToSchema } from "./schema" -import type { Table, TriggerDiffRecord } from "@powersync/common" -import type { StandardSchemaV1 } from "@standard-schema/spec" +import { serializeForSQLite } from "./serlization" +import type { PendingOperation } from "./PendingOperationStore" import type { - CollectionConfig, - InferSchemaOutput, - SyncConfig, -} from "@tanstack/db" + AnyTableColumnType, + ExtractedTable, + ExtractedTableColumns, + MapBaseColumnType, + OptionalExtractedTable, +} from "./helpers" import type { + BasePowerSyncCollectionConfig, + ConfigWithArbitraryCollectionTypes, + ConfigWithSQLiteInputType, + ConfigWithSQLiteTypes, + CustomSQLiteSerializer, EnhancedPowerSyncCollectionConfig, + InferPowerSyncOutputType, PowerSyncCollectionConfig, PowerSyncCollectionUtils, } from "./definitions" -import type { ExtractedTable } from "./helpers" -import type { PendingOperation } from "./PendingOperationStore" +import type { SyncConfig } from "@tanstack/db" +import type { StandardSchemaV1 } from "@standard-schema/spec" +import type { Table, TriggerDiffRecord } from "@powersync/common" /** * Creates PowerSync collection options for use with a standard Collection. @@ -28,8 +37,11 @@ import type { PendingOperation } from "./PendingOperationStore" * @returns Collection options with utilities */ +// Overload 1: No schema is provided + /** * Creates a PowerSync collection configuration with basic default validation. + * Input and Output types are the SQLite column types. * * @example * ```typescript @@ -57,15 +69,26 @@ import type { PendingOperation } from "./PendingOperationStore" * ``` */ export function powerSyncCollectionOptions( - config: PowerSyncCollectionConfig -): CollectionConfig, string, never> & { - utils: PowerSyncCollectionUtils -} + config: BasePowerSyncCollectionConfig & ConfigWithSQLiteTypes +): EnhancedPowerSyncCollectionConfig< + TTable, + OptionalExtractedTable, + never +> + +// Overload 2: Schema is provided and the TInput matches SQLite types. -// Overload for when schema is provided /** * Creates a PowerSync collection configuration with schema validation. * + * The input types satisfy the SQLite column types. + * + * The output types are defined by the provided schema. This schema can enforce additional + * validation or type transforms. + * Arbitrary output typed mutations are encoded to SQLite for persistence. We provide a basic standard + * serialization implementation to serialize column values. Custom or advanced types require providing additional + * serializer specifications. Partial column overrides can be supplied to `serializer`. + * * @example * ```typescript * import { z } from "zod" @@ -74,6 +97,8 @@ export function powerSyncCollectionOptions( * const APP_SCHEMA = new Schema({ * documents: new Table({ * name: column.text, + * // Dates are stored as ISO date strings in SQLite + * created_at: column.text * }), * }) * @@ -81,37 +106,120 @@ export function powerSyncCollectionOptions( * // is constrained to the SQLite schema of APP_SCHEMA * const schema = z.object({ * id: z.string(), + * // Notice that `name` is not nullable (is required) here and it has additional validation * name: z.string().min(3, { message: "Should be at least 3 characters" }).nullable(), + * // The input type is still the SQLite string type. While collections will output smart Date instances. + * created_at: z.string().transform(val => new Date(val)) * }) * * const collection = createCollection( * powerSyncCollectionOptions({ * database: db, * table: APP_SCHEMA.props.documents, - * schema + * schema, + * serializer: { + * // The default is toISOString, this is just to demonstrate custom overrides + * created_at: (outputValue) => outputValue.toISOString(), + * }, * }) * ) * ``` */ export function powerSyncCollectionOptions< TTable extends Table, - TSchema extends StandardSchemaV1>, + TSchema extends StandardSchemaV1< + // TInput is the SQLite types. We can use the supplied schema to validate sync input + OptionalExtractedTable, + AnyTableColumnType + >, >( - config: PowerSyncCollectionConfig -): CollectionConfig, string, TSchema> & { - utils: PowerSyncCollectionUtils + config: BasePowerSyncCollectionConfig & + ConfigWithSQLiteInputType +): EnhancedPowerSyncCollectionConfig< + TTable, + InferPowerSyncOutputType, + TSchema +> & { schema: TSchema } +// Overload 3: Schema is provided with arbitrary TInput and TOutput /** - * Implementation of powerSyncCollectionOptions that handles both schema and non-schema configurations. + * Creates a PowerSync collection configuration with schema validation. + * + * The input types are not linked to the internal SQLite table types. This can + * give greater flexibility, e.g. by accepting rich types as input for `insert` or `update` operations. + * An additional `deserializationSchema` is required in order to process incoming SQLite updates to the output type. + * + * The output types are defined by the provided schema. This schema can enforce additional + * validation or type transforms. + * Arbitrary output typed mutations are encoded to SQLite for persistence. We provide a basic standard + * serialization implementation to serialize column values. Custom or advanced types require providing additional + * serializer specifications. Partial column overrides can be supplied to `serializer`. + * + * @example + * ```typescript + * import { z } from "zod" + * + * // The PowerSync SQLite schema + * const APP_SCHEMA = new Schema({ + * documents: new Table({ + * name: column.text, + * // Booleans are represented as integers in SQLite + * is_active: column.integer + * }), + * }) + * + * // Advanced Zod validations. + * // We accept boolean values as input for operations and expose Booleans in query results + * const schema = z.object({ + * id: z.string(), + * isActive: z.boolean(), // TInput and TOutput are boolean + * }) + * + * // The deserializationSchema converts the SQLite synced INTEGER (0/1) values to booleans. + * const deserializationSchema = z.object({ + * id: z.string(), + * isActive: z.number().nullable().transform((val) => val == null ? true : val > 0), + * }) + * + * const collection = createCollection( + * powerSyncCollectionOptions({ + * database: db, + * table: APP_SCHEMA.props.documents, + * schema, + * deserializationSchema, + * }) + * ) + * ``` */ export function powerSyncCollectionOptions< - TTable extends Table = Table, - TSchema extends StandardSchemaV1 = never, + TTable extends Table, + TSchema extends StandardSchemaV1< + // The input and output must have the same keys, the value types can be arbitrary + AnyTableColumnType, + AnyTableColumnType + >, >( - config: PowerSyncCollectionConfig -): EnhancedPowerSyncCollectionConfig { + config: BasePowerSyncCollectionConfig & + ConfigWithArbitraryCollectionTypes +): EnhancedPowerSyncCollectionConfig< + TTable, + InferPowerSyncOutputType, + TSchema +> & { + utils: PowerSyncCollectionUtils + schema: TSchema +} + +/** + * Implementation of powerSyncCollectionOptions that handles both schema and non-schema configurations. + */ + +export function powerSyncCollectionOptions< + TTable extends Table, + TSchema extends StandardSchemaV1 = never, +>(config: PowerSyncCollectionConfig) { const { database, table, @@ -120,9 +228,45 @@ export function powerSyncCollectionOptions< ...restConfig } = config - type RecordType = ExtractedTable + const deserializationSchema = + `deserializationSchema` in config ? config.deserializationSchema : null + const serializer = `serializer` in config ? config.serializer : undefined + const onDeserializationError = + `onDeserializationError` in config + ? config.onDeserializationError + : undefined + + // The SQLite table type + type TableType = ExtractedTable + + // The collection output type + type OutputType = InferPowerSyncOutputType + const { viewName } = table + /** + * Deserializes data from the incoming sync stream + */ + const deserializeSyncRow = (value: TableType): OutputType => { + if (deserializationSchema) { + const validation = deserializationSchema[`~standard`].validate(value) + if (`value` in validation) { + return validation.value + } else if (`issues` in validation) { + const issueMessage = `Failed to validate incoming data for ${viewName}. Issues: ${validation.issues.map((issue) => `${issue.path} - ${issue.message}`)}` + database.logger.error(issueMessage) + onDeserializationError!(validation) + throw new Error(issueMessage) + } else { + const unknownErrorMessage = `Unknown deserialization error for ${viewName}` + database.logger.error(unknownErrorMessage) + onDeserializationError!({ issues: [{ message: unknownErrorMessage }] }) + throw new Error(unknownErrorMessage) + } + } + return value as OutputType + } + // We can do basic runtime validations for columns if not explicit schema has been provided const schema = inputSchema ?? (convertTableToSchema(table) as TSchema) /** @@ -143,7 +287,7 @@ export function powerSyncCollectionOptions< .toString(16) .padStart(8, `0`)}` - const transactor = new PowerSyncTransactor({ + const transactor = new PowerSyncTransactor({ database, }) @@ -152,7 +296,7 @@ export function powerSyncCollectionOptions< * Notice that this describes the Sync between the local SQLite table * and the in-memory tanstack-db collection. */ - const sync: SyncConfig = { + const sync: SyncConfig = { sync: (params) => { const { begin, write, commit, markReady } = params const abortController = new AbortController() @@ -175,14 +319,17 @@ export function powerSyncCollectionOptions< for (const op of operations) { const { id, operation, timestamp, value } = op - const parsedValue = { + const parsedValue = deserializeSyncRow({ id, ...JSON.parse(value), - } + }) const parsedPreviousValue = op.operation == DiffTriggerOperation.UPDATE - ? { id, ...JSON.parse(op.previous_value) } - : null + ? deserializeSyncRow({ + id, + ...JSON.parse(op.previous_value), + }) + : undefined write({ type: mapOperation(operation), value: parsedValue, @@ -231,7 +378,7 @@ export function powerSyncCollectionOptions< let cursor = 0 while (currentBatchCount == syncBatchSize) { begin() - const batchItems = await context.getAll( + const batchItems = await context.getAll( sanitizeSQL`SELECT * FROM ${viewName} LIMIT ? OFFSET ?`, [syncBatchSize, cursor] ) @@ -240,7 +387,7 @@ export function powerSyncCollectionOptions< for (const row of batchItems) { write({ type: `insert`, - value: row, + value: deserializeSyncRow(row), }) } commit() @@ -285,9 +432,13 @@ export function powerSyncCollectionOptions< getSyncMetadata: undefined, } - const getKey = (record: RecordType) => asPowerSyncRecord(record).id + const getKey = (record: OutputType) => asPowerSyncRecord(record).id - const outputConfig: EnhancedPowerSyncCollectionConfig = { + const outputConfig: EnhancedPowerSyncCollectionConfig< + TTable, + OutputType, + TSchema + > = { ...restConfig, schema, getKey, @@ -310,6 +461,19 @@ export function powerSyncCollectionOptions< getMeta: () => ({ tableName: viewName, trackedTableName, + serializeValue: (value) => + serializeForSQLite( + value, + // This is required by the input generic + table as Table< + MapBaseColumnType> + >, + // Coerce serializer to the shape that corresponds to the Table constructed from OutputType + serializer as CustomSQLiteSerializer< + OutputType, + ExtractedTableColumns>> + > + ), }), }, } diff --git a/packages/powersync-db-collection/src/serlization.ts b/packages/powersync-db-collection/src/serlization.ts new file mode 100644 index 000000000..13201ffad --- /dev/null +++ b/packages/powersync-db-collection/src/serlization.ts @@ -0,0 +1,101 @@ +import { ColumnType } from "@powersync/common" +import type { Table } from "@powersync/common" +import type { CustomSQLiteSerializer } from "./definitions" +import type { + ExtractedTable, + ExtractedTableColumns, + MapBaseColumnType, +} from "./helpers" + +/** + * Serializes an object for persistence to a SQLite table, mapping its values to appropriate SQLite types. + * + * This function takes an object representing a row, a table schema, and an optional custom serializer map. + * It returns a new object with values transformed to be compatible with SQLite column types. + * + * ## Generics + * - `TOutput`: The shape of the input object, typically matching the row data. + * - `TTable`: The table schema, which must match the keys of `TOutput`. + * + * ## Parameters + * - `value`: The object to serialize (row data). + * - `tableSchema`: The schema describing the SQLite table columns and types. + * - `customSerializer`: An optional map of custom serialization functions for specific keys. + * + * ## Behavior + * - For each key in `value`, finds the corresponding column in `tableSchema`. + * - If a custom serializer is provided for a key, it is used to transform the value. + * - Otherwise, values are mapped according to the column type: + * - `TEXT`: Strings are passed through; Dates are converted to ISO strings; other types are JSON-stringified. + * - `INTEGER`/`REAL`: Numbers are passed through; booleans are mapped to 1/0; other types are coerced to numbers. + * - Throws if a column type is unknown or a value cannot be converted. + * + * ## Returns + * - An object with the same keys as `value`, with values transformed for SQLite compatibility. + * + * ## Errors + * - Throws if a key in `value` does not exist in the schema. + * - Throws if a value cannot be converted to the required SQLite type. + */ +export function serializeForSQLite< + TOutput extends Record, + // The keys should match + TTable extends Table> = Table< + MapBaseColumnType + >, +>( + value: TOutput, + tableSchema: TTable, + customSerializer: Partial< + CustomSQLiteSerializer> + > = {} +): ExtractedTable { + return Object.fromEntries( + Object.entries(value).map(([key, value]) => { + // First get the output schema type + const outputType = + key == `id` + ? ColumnType.TEXT + : tableSchema.columns.find((column) => column.name == key)?.type + if (!outputType) { + throw new Error(`Could not find schema for ${key} column.`) + } + + if (value == null) { + return [key, value] + } + + const customTransform = customSerializer[key] + if (customTransform) { + return [key, customTransform(value as TOutput[string])] + } + + // Map to the output + switch (outputType) { + case ColumnType.TEXT: + if (typeof value == `string`) { + return [key, value] + } else if (value instanceof Date) { + return [key, value.toISOString()] + } else { + return [key, JSON.stringify(value)] + } + case ColumnType.INTEGER: + case ColumnType.REAL: + if (typeof value == `number`) { + return [key, value] + } else if (typeof value == `boolean`) { + return [key, value ? 1 : 0] + } else { + const numberValue = Number(value) + if (isNaN(numberValue)) { + throw new Error( + `Could not convert ${key}=${value} to a number for SQLite` + ) + } + return [key, numberValue] + } + } + }) + ) +} diff --git a/packages/powersync-db-collection/tests/collection-schema.test.ts b/packages/powersync-db-collection/tests/collection-schema.test.ts new file mode 100644 index 000000000..634690d65 --- /dev/null +++ b/packages/powersync-db-collection/tests/collection-schema.test.ts @@ -0,0 +1,310 @@ +import { randomUUID } from "node:crypto" +import { tmpdir } from "node:os" +import { PowerSyncDatabase, Schema, Table, column } from "@powersync/node" +import { SchemaValidationError, createCollection } from "@tanstack/db" +import { describe, expect, it, onTestFinished } from "vitest" +import { z } from "zod" +import { powerSyncCollectionOptions } from "../src" + +const APP_SCHEMA = new Schema({ + documents: new Table({ + name: column.text, + author: column.text, + created_at: column.text, // Will be mapped to Date + archived: column.integer, // Will be mapped to Boolean + }), +}) + +describe(`PowerSync Schema Integration`, () => { + async function createDatabase() { + const db = new PowerSyncDatabase({ + database: { + dbFilename: `test.sqlite`, + dbLocation: tmpdir(), + implementation: { type: `node:sqlite` }, + }, + schema: APP_SCHEMA, + }) + onTestFinished(async () => { + await db.disconnectAndClear() + await db.close() + }) + // Initial clear in case a test might have failed + await db.disconnectAndClear() + return db + } + + function createDocumentsCollection(db: PowerSyncDatabase) { + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + // We get typing and a default validator from this + table: APP_SCHEMA.props.documents, + }) + ) + onTestFinished(() => collection.cleanup()) + return collection + } + + describe(`schema`, () => { + /** + * When using the SQLite types for TInput and TOutput, we provide a basic schema validator. + */ + it(`should use basic runtime validations from automatic SQLite schema`, async () => { + const db = await createDatabase() + + // the collection should infer types and validate with the schema + const collection = createDocumentsCollection(db) + await collection.stateWhenReady() + + collection.insert({ + id: randomUUID(), + name: `aname`, + }) + + collection.insert({ + id: randomUUID(), + name: null, + }) + + expect(collection.size).eq(2) + + // should validate inputs + try { + collection.insert({} as any) + } catch (ex) { + expect(ex instanceof SchemaValidationError).true + if (ex instanceof SchemaValidationError) { + expect(ex.message).contains(`id field must be a string`) + } + } + }) + + /** + * The TInput value can enforce additional validations. + * This example uses the SQLite types as TInput and TOutput. + */ + it(`should allow for advanced input validations`, async () => { + const db = await createDatabase() + + const errorMessage = `Name must be at least 3 characters` + /** + * This has additional validations on TInput. + * These validations will be applied for mutations: e.g. `insert`, `update`/ + * Validations include: + * - `name`, `author` and `created_at` are required + * - `name` must be at least 3 characters long + */ + const schema = z.object({ + id: z.string(), + name: z.string().min(3, { message: errorMessage }), + archived: z.number(), + author: z.string(), + created_at: z.string(), + }) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + }) + ) + onTestFinished(() => collection.cleanup()) + + try { + collection.insert({ + id: randomUUID(), + name: `2`, + author: `name`, + created_at: new Date().toISOString(), + archived: 0, + }) + expect.fail(`Should throw a validation error`) + } catch (ex) { + expect(ex instanceof SchemaValidationError).true + if (ex instanceof SchemaValidationError) { + expect(ex.message).contains(errorMessage) + } + } + + expect(collection.size).eq(0) + + // should validate inputs + try { + collection.insert({} as any) + } catch (ex) { + expect(ex instanceof SchemaValidationError).true + if (ex instanceof SchemaValidationError) { + expect(ex.message).contains(`Required - path: id`) + } + } + }) + + /** + * In this example the TInput and TOutput types are different. + * In this example we use the SQLite types as the input. We don't need an additional deserialization schema. + */ + it(`should allow custom/transformed input types - Input is SQLite`, async () => { + const db = await createDatabase() + + /** + * The input for `created_at` is string, while it's presented as a `Date` in TOutput + */ + const schema = z.object({ + id: z.string(), + name: z.string().nullable(), + archived: z.number().nullable(), + author: z.string().nullable(), + created_at: z + .string() + .nullable() + .transform((val) => val && new Date(val)), + }) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + }) + ) + onTestFinished(() => collection.cleanup()) + + const testDate = new Date() + const id = randomUUID() + collection.insert({ + id, + name: `document`, + author: `nanme`, + created_at: testDate.toISOString(), + archived: 0, + }) + + const item = collection.get(id) + + expect(item?.created_at instanceof Date).true + expect(item?.created_at?.toLocaleString()).eq(testDate.toLocaleString()) + }) + + /** + * In this example the TInput and TOutput types are different. + * In this example we use custom types for TInput. This requires an additional schema for validating + * incoming items from the sync stream (which is typed as SQLite) + */ + it(`should allow custom/transformed input types - Input is different from SQLite`, async () => { + const db = await createDatabase() + + /** + * The input for `created_at` is unix epoch, while it's presented as a `Date` in TOutput + */ + const schema = z.object({ + id: z.string(), + name: z.string().nullable(), + // We want to use booleans as the input here + archived: z.boolean().nullable(), + author: z.string().nullable(), + created_at: z.date().nullable(), + }) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + /** + * This needs to convert the SQLite types (which are different from TInput in this case) + * to TOutput + */ + deserializationSchema: z.object({ + id: z.string(), + name: z.string().nullable(), + archived: z + .number() + .nullable() + .transform((val) => (val ? val != 0 : null)), + author: z.string().nullable(), + // In this case val is an ISO date string from SQLite + created_at: z + .string() + .nullable() + .transform((val) => (val ? new Date(val) : null)), + }), + onDeserializationError: () => {}, + }) + ) + onTestFinished(() => collection.cleanup()) + + const testDate = new Date() + const id = randomUUID() + collection.insert({ + id, + name: `document`, + author: `nanme`, + created_at: new Date(), + archived: false, + }) + + const item = collection.get(id) + + expect(item?.created_at instanceof Date).true + expect(item?.created_at?.toLocaleString()).eq(testDate.toLocaleString()) + }) + + /** + * In this example we contain a TOutput field type which requires custom serialization for SQLite + */ + it(`should allow for custom serializers`, async () => { + const db = await createDatabase() + + class MyDataClass { + constructor(public options: { value: string }) {} + } + + /** + * Here name is stored as a Buffer. We can't serialize this to SQLite automatically. + * We need to provide a serializer. + */ + const schema = z.object({ + id: z.string(), + name: z + .string() + .nullable() + .transform((value) => (value ? new MyDataClass({ value }) : null)), + archived: z.number().nullable(), + author: z.string().nullable(), + created_at: z.string().nullable(), + }) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + // This is used to serialize to SQLite + serializer: { + name: (holder) => holder?.options.value || null, + }, + deserializationSchema: schema, + onDeserializationError: () => {}, + }) + ) + onTestFinished(() => collection.cleanup()) + + const id = randomUUID() + + const result = collection.insert({ + id, + name: `document`, + author: `name`, + created_at: new Date().toISOString(), + archived: 0, + }) + + const item = collection.get(id) + await result.isPersisted.promise + expect(item?.name instanceof MyDataClass).true + expect(item?.name?.options.value).eq(`document`) + }) + }) +}) diff --git a/packages/powersync-db-collection/tests/powersync.test.ts b/packages/powersync-db-collection/tests/powersync.test.ts index 8a7a06c6b..40abc2413 100644 --- a/packages/powersync-db-collection/tests/powersync.test.ts +++ b/packages/powersync-db-collection/tests/powersync.test.ts @@ -8,14 +8,12 @@ import { column, } from "@powersync/node" import { - SchemaValidationError, createCollection, createTransaction, eq, liveQueryCollectionOptions, } from "@tanstack/db" import { describe, expect, it, onTestFinished, vi } from "vitest" -import { z } from "zod" import { powerSyncCollectionOptions } from "../src" import { PowerSyncTransactor } from "../src/PowerSyncTransactor" import type { AbstractPowerSyncDatabase } from "@powersync/node" @@ -23,21 +21,20 @@ import type { AbstractPowerSyncDatabase } from "@powersync/node" const APP_SCHEMA = new Schema({ users: new Table({ name: column.text, + active: column.integer, // Will be mapped to Boolean + }), + documents: new Table({ + name: column.text, + author: column.text, + created_at: column.text, // Will be mapped to Date }), - documents: new Table( - { - name: column.text, - author: column.text, - }, - { viewName: `documents` } - ), }) describe(`PowerSync Integration`, () => { async function createDatabase() { const db = new PowerSyncDatabase({ database: { - dbFilename: `test.sqlite`, + dbFilename: `test-${randomUUID()}.sqlite`, dbLocation: tmpdir(), implementation: { type: `node:sqlite` }, }, @@ -74,119 +71,6 @@ describe(`PowerSync Integration`, () => { `) } - describe(`schema`, () => { - it(`should use basic runtime validations from automatic schema`, async () => { - const db = await createDatabase() - - // the collection should infer types and validate with the schema - const collection = createDocumentsCollection(db) - - collection.insert({ - id: randomUUID(), - name: `aname`, - }) - - collection.insert({ - id: randomUUID(), - name: null, - }) - - expect(collection.size).eq(2) - - // should validate inputs - try { - collection.insert({} as any) - } catch (ex) { - expect(ex instanceof SchemaValidationError).true - if (ex instanceof SchemaValidationError) { - expect(ex.message).contains(`id field must be a string`) - } - } - }) - - it(`should allow for advanced validations`, async () => { - const db = await createDatabase() - - const errorMessage = `Name must be at least 3 characters` - const schema = z.object({ - id: z.string(), - name: z - .string() - .min(3, { message: errorMessage }) - .nullable() - .optional(), - }) - - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - table: APP_SCHEMA.props.documents, - schema, - }) - ) - onTestFinished(() => collection.cleanup()) - - try { - collection.insert({ - id: randomUUID(), - name: `2`, - }) - expect.fail(`Should throw a validation error`) - } catch (ex) { - expect(ex instanceof SchemaValidationError).true - if (ex instanceof SchemaValidationError) { - expect(ex.message).contains(errorMessage) - } - } - - collection.insert({ - id: randomUUID(), - name: null, - }) - - expect(collection.size).eq(1) - - // should validate inputs - try { - collection.insert({} as any) - } catch (ex) { - expect(ex instanceof SchemaValidationError).true - if (ex instanceof SchemaValidationError) { - expect(ex.message).contains(`Required - path: id`) - } - } - }) - - it(`should allow custom input types`, async () => { - const db = await createDatabase() - - // The input can be arbitrarily typed, as long as it converts to SQLite - const schema = z.object({ - id: z.string(), - name: z.number().transform((val) => `Number: ${val}`), - }) - - const collection = createCollection( - powerSyncCollectionOptions({ - database: db, - table: APP_SCHEMA.props.documents, - schema, - }) - ) - onTestFinished(() => collection.cleanup()) - - const id = randomUUID() - collection.insert({ - id, - name: 42, - }) - - const item = collection.get(id) - - expect(item?.name).eq(`Number: 42`) - }) - }) - describe(`sync`, () => { it(`should initialize and fetch initial data`, async () => { const db = await createDatabase() @@ -456,6 +340,7 @@ describe(`PowerSync Integration`, () => { vi.spyOn(options.utils, `getMeta`).mockImplementation(() => ({ tableName: `fakeTable`, trackedTableName: `error`, + serializeValue: () => ({}) as any, })) // Create two collections for the same table const collection = createCollection(options) From da9ec603cfa8182e19a03bc0fa81c34e9d045325 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Tue, 28 Oct 2025 14:32:48 +0200 Subject: [PATCH 41/56] docuement deserialization errors --- docs/collections/powersync-collection.md | 30 +++++++++-- .../src/definitions.ts | 14 ++--- .../powersync-db-collection/src/powersync.ts | 30 +++++------ .../tests/collection-schema.test.ts | 54 ++++++++++++++++++- 4 files changed, 101 insertions(+), 27 deletions(-) diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index 9fb302507..0bd1744e1 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -99,9 +99,19 @@ const documentsCollection = createCollection( table: APP_SCHEMA.props.documents, }) ) -``` -#### Option 2: SQLite Types with Schema Validation +/** Note: The types for input and output are defined as this */ +// Used for mutations like `insert` or `update` +type DocumentCollectionInput = { + id: string + name: string | null + author: string | null + created_at: string | null // SQLite TEXT + archived: number | null // SQLite integer +} +// The type of query/data results +type DocumentCollectionOutput = DocumentCollectionInput +``` The standard PowerSync SQLite types map to these TypeScript types: @@ -111,13 +121,17 @@ The standard PowerSync SQLite types map to these TypeScript types: | `column.integer` | `number \| null` | Integer values, also used for booleans (0/1) | | `column.real` | `number \| null` | Floating point numbers | -Note: All PowerSync column types are nullable by default, as SQLite allows null values in any column. Your schema should always handle null values appropriately by using `.nullable()` in your Zod schemas and handling null cases in your transformations. +Note: All PowerSync column types are nullable by default. + +#### Option 2: SQLite Types with Schema Validation Additional validations for collection mutations can be performed with a custom schema. The Schema below asserts that the `name`, `author` and `created_at` fields are required as input. `name` also has an additional string length check. Note: The input and output types specified in this example still satisfy the underlying SQLite types. An additional `deserializationSchema` is required if the typing differs. See the examples below for more details. +The application logic (including the backend) should enforce that all incoming synced data passes validation with the `deserializationSchema`. Failing to validate data will result in inconsistency of the collection data. This is a fatal error! An `onDeserializationError` handler must be provided to react to this case. + ```ts import { createCollection } from "@tanstack/react-db" import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" @@ -137,6 +151,9 @@ const documentsCollection = createCollection( database: db, table: APP_SCHEMA.props.documents, schema, + onDeserializationError: (error) => { + // Present fatal error + }, }) ) @@ -161,6 +178,10 @@ Note: The Transformed types are provided by TanStackDB to the PowerSync SQLite p order to be persisted to SQLite. Most types are converted by default. For custom types, override the serialization by providing a `serializer` param. +The example below uses `nullable` columns, this is not a requirement. + +The application logic (including the backend) should enforce that all incoming synced data passes validation with the `deserializationSchema`. Failing to validate data will result in inconsistency of the collection data. This is a fatal error! An `onDeserializationError` handler must be provided to react to this case. + ```ts const schema = z.object({ id: z.string(), @@ -180,6 +201,9 @@ const documentsCollection = createCollection( database: db, table: APP_SCHEMA.props.documents, schema, + onDeserializationError: (error) => { + // Present fatal error + }, // Optional: custom column serialization serializer: { // Dates are serialized by default, this is just an example diff --git a/packages/powersync-db-collection/src/definitions.ts b/packages/powersync-db-collection/src/definitions.ts index 9d46818c4..d27b47311 100644 --- a/packages/powersync-db-collection/src/definitions.ts +++ b/packages/powersync-db-collection/src/definitions.ts @@ -85,6 +85,13 @@ export type SerializerConfig< * ``` */ serializer?: CustomSQLiteSerializer + + /** + * Application logic should ensure that incoming synced data is always valid. + * Failing to deserialize and apply incoming changes results in data inconsistency - which is a fatal error. + * Use this callback to react to deserialization errors. + */ + onDeserializationError: (error: StandardSchemaV1.FailureResult) => void } /** @@ -154,13 +161,6 @@ export type ConfigWithArbitraryCollectionTypes< ExtractedTable, StandardSchemaV1.InferOutput > - - /** - * Application logic should ensure that incoming synced data is always valid. - * Failing to deserialize and apply incoming changes results in data inconsistency - which is a fatal error. - * Use this callback to react to deserialization errors. - */ - onDeserializationError: (error: StandardSchemaV1.FailureResult) => void } export type BasePowerSyncCollectionConfig< TTable extends Table = Table, diff --git a/packages/powersync-db-collection/src/powersync.ts b/packages/powersync-db-collection/src/powersync.ts index 065433347..f3e6d8643 100644 --- a/packages/powersync-db-collection/src/powersync.ts +++ b/packages/powersync-db-collection/src/powersync.ts @@ -248,23 +248,21 @@ export function powerSyncCollectionOptions< * Deserializes data from the incoming sync stream */ const deserializeSyncRow = (value: TableType): OutputType => { - if (deserializationSchema) { - const validation = deserializationSchema[`~standard`].validate(value) - if (`value` in validation) { - return validation.value - } else if (`issues` in validation) { - const issueMessage = `Failed to validate incoming data for ${viewName}. Issues: ${validation.issues.map((issue) => `${issue.path} - ${issue.message}`)}` - database.logger.error(issueMessage) - onDeserializationError!(validation) - throw new Error(issueMessage) - } else { - const unknownErrorMessage = `Unknown deserialization error for ${viewName}` - database.logger.error(unknownErrorMessage) - onDeserializationError!({ issues: [{ message: unknownErrorMessage }] }) - throw new Error(unknownErrorMessage) - } + const validationSchema = deserializationSchema || schema + const validation = validationSchema[`~standard`].validate(value) + if (`value` in validation) { + return validation.value + } else if (`issues` in validation) { + const issueMessage = `Failed to validate incoming data for ${viewName}. Issues: ${validation.issues.map((issue) => `${issue.path} - ${issue.message}`)}` + database.logger.error(issueMessage) + onDeserializationError!(validation) + throw new Error(issueMessage) + } else { + const unknownErrorMessage = `Unknown deserialization error for ${viewName}` + database.logger.error(unknownErrorMessage) + onDeserializationError!({ issues: [{ message: unknownErrorMessage }] }) + throw new Error(unknownErrorMessage) } - return value as OutputType } // We can do basic runtime validations for columns if not explicit schema has been provided diff --git a/packages/powersync-db-collection/tests/collection-schema.test.ts b/packages/powersync-db-collection/tests/collection-schema.test.ts index 634690d65..195ffacf7 100644 --- a/packages/powersync-db-collection/tests/collection-schema.test.ts +++ b/packages/powersync-db-collection/tests/collection-schema.test.ts @@ -2,9 +2,10 @@ import { randomUUID } from "node:crypto" import { tmpdir } from "node:os" import { PowerSyncDatabase, Schema, Table, column } from "@powersync/node" import { SchemaValidationError, createCollection } from "@tanstack/db" -import { describe, expect, it, onTestFinished } from "vitest" +import { describe, expect, it, onTestFinished, vi } from "vitest" import { z } from "zod" import { powerSyncCollectionOptions } from "../src" +import type { StandardSchemaV1 } from "@standard-schema/spec" const APP_SCHEMA = new Schema({ documents: new Table({ @@ -108,6 +109,7 @@ describe(`PowerSync Schema Integration`, () => { database: db, table: APP_SCHEMA.props.documents, schema, + onDeserializationError: () => {}, }) ) onTestFinished(() => collection.cleanup()) @@ -167,6 +169,7 @@ describe(`PowerSync Schema Integration`, () => { database: db, table: APP_SCHEMA.props.documents, schema, + onDeserializationError: () => {}, }) ) onTestFinished(() => collection.cleanup()) @@ -306,5 +309,54 @@ describe(`PowerSync Schema Integration`, () => { expect(item?.name instanceof MyDataClass).true expect(item?.name?.options.value).eq(`document`) }) + + /** + * We sync data which cannot be validated by the schema. This is a fatal error. + */ + it(`should catch deserialization errors`, async () => { + const db = await createDatabase() + + /** + * Here name is stored as a Buffer. We can't serialize this to SQLite automatically. + * We need to provide a serializer. + */ + const schema = z.object({ + id: z.string(), + name: z.string(), + archived: z.number(), + author: z.string(), + created_at: z.string(), + }) + + const onError = vi.fn((() => {}) as ( + error: StandardSchemaV1.FailureResult + ) => void) + + const collection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + schema, + onDeserializationError: onError, + }) + ) + onTestFinished(() => collection.cleanup()) + + await collection.stateWhenReady() + + // The columns are not nullable in the schema + // Write invalid data to SQLite, this simulates a sync + await db.execute(`INSERT INTO documents(id) VALUES(uuid())`) + + await vi.waitFor( + () => { + const issues = onError.mock.lastCall?.[0]?.issues + expect(issues).toBeDefined() + // Each column which should have been defined + expect(issues?.length).eq(4) + }, + { timeout: 1000 } + ) + }) }) }) From c4398993f4e822be28d877705e2ddb982372c906 Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Tue, 28 Oct 2025 14:35:10 +0200 Subject: [PATCH 42/56] Fix typo in READMe --- docs/collections/powersync-collection.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index 0bd1744e1..8acd28648 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -130,7 +130,7 @@ the `name`, `author` and `created_at` fields are required as input. `name` also Note: The input and output types specified in this example still satisfy the underlying SQLite types. An additional `deserializationSchema` is required if the typing differs. See the examples below for more details. -The application logic (including the backend) should enforce that all incoming synced data passes validation with the `deserializationSchema`. Failing to validate data will result in inconsistency of the collection data. This is a fatal error! An `onDeserializationError` handler must be provided to react to this case. +The application logic (including the backend) should enforce that all incoming synced data passes validation with the `schema`. Failing to validate data will result in inconsistency of the collection data. This is a fatal error! An `onDeserializationError` handler must be provided to react to this case. ```ts import { createCollection } from "@tanstack/react-db" @@ -180,7 +180,7 @@ order to be persisted to SQLite. Most types are converted by default. For custom The example below uses `nullable` columns, this is not a requirement. -The application logic (including the backend) should enforce that all incoming synced data passes validation with the `deserializationSchema`. Failing to validate data will result in inconsistency of the collection data. This is a fatal error! An `onDeserializationError` handler must be provided to react to this case. +The application logic (including the backend) should enforce that all incoming synced data passes validation with the `schema`. Failing to validate data will result in inconsistency of the collection data. This is a fatal error! An `onDeserializationError` handler must be provided to react to this case. ```ts const schema = z.object({ From db3eae5070da67f4513167371e4aa65471f33c3a Mon Sep 17 00:00:00 2001 From: stevensJourney Date: Tue, 28 Oct 2025 14:39:53 +0200 Subject: [PATCH 43/56] Add type to README example --- docs/collections/powersync-collection.md | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/docs/collections/powersync-collection.md b/docs/collections/powersync-collection.md index 8acd28648..afc665836 100644 --- a/docs/collections/powersync-collection.md +++ b/docs/collections/powersync-collection.md @@ -245,6 +245,7 @@ const schema = z.object({ name: z.string(), author: z.string(), created_at: z.date(), // Accept Date objects as input + archived: z.boolean(), // Accept Booleans as input }) // Schema to transform from SQLite types to our output types @@ -254,8 +255,10 @@ const deserializationSchema = z.object({ author: z.string(), created_at: z .string() - .nullable() - .transform((val) => (val ? new Date(val) : null)), // SQLite TEXT to Date + .transform((val) => (new Date(val))), // SQLite TEXT to Date + archived: z + .number() + .transform((val) => (val > 0), // SQLite INTEGER to Boolean }) const documentsCollection = createCollection( @@ -269,6 +272,18 @@ const documentsCollection = createCollection( }, }) ) + +/** Note: The types for input and output are defined as this */ +// Used for mutations like `insert` or `update` +type DocumentCollectionInput = { + id: string + name: string + author: string + created_at: Date + archived: boolean +} +// The type of query/data results +type DocumentCollectionOutput = DocumentCollectionInput ``` ## Features From 673824791bcfae04acf42fc35e5d6d8755adceb2 Mon Sep 17 00:00:00 2001 From: Alberto Harka <44508455+sadkebab@users.noreply.github.com> Date: Thu, 30 Oct 2025 14:19:13 +0100 Subject: [PATCH 44/56] Feat: Add support for custom parsers/serializers in LocalStorage collections (#730) * feat(local-storage): add support for custom parsers/serializers * added changeset * feat(local-storage): using parser instead of JSON in loadFromStorage * feat(local-storage): changed argument order in loadFromStorage to respect previous one * feat(local-storage): exporting parser type --- .changeset/cruel-signs-work.md | 5 +++ packages/db/package.json | 1 + packages/db/src/local-storage.ts | 58 +++++++++++++++++-------- packages/db/tests/local-storage.test.ts | 35 +++++++++++++++ pnpm-lock.yaml | 32 +++++++++++++- 5 files changed, 113 insertions(+), 18 deletions(-) create mode 100644 .changeset/cruel-signs-work.md diff --git a/.changeset/cruel-signs-work.md b/.changeset/cruel-signs-work.md new file mode 100644 index 000000000..7ee0aaf8b --- /dev/null +++ b/.changeset/cruel-signs-work.md @@ -0,0 +1,5 @@ +--- +"@tanstack/db": patch +--- + +Added support for custom parsers/serializers like superjson in LocalStorage collections diff --git a/packages/db/package.json b/packages/db/package.json index 9cfb30767..050a97e7b 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -9,6 +9,7 @@ "devDependencies": { "@vitest/coverage-istanbul": "^3.2.4", "arktype": "^2.1.23", + "superjson": "^2.2.5", "temporal-polyfill": "^0.3.0" }, "exports": { diff --git a/packages/db/src/local-storage.ts b/packages/db/src/local-storage.ts index df3b1b2e5..c744cae5c 100644 --- a/packages/db/src/local-storage.ts +++ b/packages/db/src/local-storage.ts @@ -44,6 +44,11 @@ interface StoredItem { data: T } +export interface Parser { + parse: (data: string) => unknown + stringify: (data: unknown) => string +} + /** * Configuration interface for localStorage collection options * @template T - The type of items in the collection @@ -71,6 +76,12 @@ export interface LocalStorageCollectionConfig< * Can be any object that implements addEventListener/removeEventListener for storage events */ storageEventApi?: StorageEventApi + + /** + * Parser to use for serializing and deserializing data to and from storage + * Defaults to JSON + */ + parser?: Parser } /** @@ -113,13 +124,18 @@ export interface LocalStorageCollectionUtils extends UtilsRecord { /** * Validates that a value can be JSON serialized + * @param parser - The parser to use for serialization * @param value - The value to validate for JSON serialization * @param operation - The operation type being performed (for error messages) * @throws Error if the value cannot be JSON serialized */ -function validateJsonSerializable(value: any, operation: string): void { +function validateJsonSerializable( + parser: Parser, + value: any, + operation: string +): void { try { - JSON.stringify(value) + parser.stringify(value) } catch (error) { throw new SerializationError( operation, @@ -314,6 +330,9 @@ export function localStorageCollectionOptions( (typeof window !== `undefined` ? window : null) || createNoOpStorageEventApi() + // Default to JSON parser if no parser is provided + const parser = config.parser || JSON + // Track the last known state to detect changes const lastKnownData = new Map>() @@ -322,6 +341,7 @@ export function localStorageCollectionOptions( config.storageKey, storage, storageEventApi, + parser, config.getKey, lastKnownData ) @@ -349,7 +369,7 @@ export function localStorageCollectionOptions( dataMap.forEach((storedItem, key) => { objectData[String(key)] = storedItem }) - const serialized = JSON.stringify(objectData) + const serialized = parser.stringify(objectData) storage.setItem(config.storageKey, serialized) } catch (error) { console.error( @@ -383,7 +403,7 @@ export function localStorageCollectionOptions( const wrappedOnInsert = async (params: InsertMutationFnParams) => { // Validate that all values in the transaction can be JSON serialized params.transaction.mutations.forEach((mutation) => { - validateJsonSerializable(mutation.modified, `insert`) + validateJsonSerializable(parser, mutation.modified, `insert`) }) // Call the user handler BEFORE persisting changes (if provided) @@ -394,7 +414,7 @@ export function localStorageCollectionOptions( // Always persist to storage // Load current data from storage - const currentData = loadFromStorage(config.storageKey, storage) + const currentData = loadFromStorage(config.storageKey, storage, parser) // Add new items with version keys params.transaction.mutations.forEach((mutation) => { @@ -418,7 +438,7 @@ export function localStorageCollectionOptions( const wrappedOnUpdate = async (params: UpdateMutationFnParams) => { // Validate that all values in the transaction can be JSON serialized params.transaction.mutations.forEach((mutation) => { - validateJsonSerializable(mutation.modified, `update`) + validateJsonSerializable(parser, mutation.modified, `update`) }) // Call the user handler BEFORE persisting changes (if provided) @@ -429,7 +449,7 @@ export function localStorageCollectionOptions( // Always persist to storage // Load current data from storage - const currentData = loadFromStorage(config.storageKey, storage) + const currentData = loadFromStorage(config.storageKey, storage, parser) // Update items with new version keys params.transaction.mutations.forEach((mutation) => { @@ -459,7 +479,7 @@ export function localStorageCollectionOptions( // Always persist to storage // Load current data from storage - const currentData = loadFromStorage(config.storageKey, storage) + const currentData = loadFromStorage(config.storageKey, storage, parser) // Remove items params.transaction.mutations.forEach((mutation) => { @@ -518,10 +538,10 @@ export function localStorageCollectionOptions( switch (mutation.type) { case `insert`: case `update`: - validateJsonSerializable(mutation.modified, mutation.type) + validateJsonSerializable(parser, mutation.modified, mutation.type) break case `delete`: - validateJsonSerializable(mutation.original, mutation.type) + validateJsonSerializable(parser, mutation.original, mutation.type) break } } @@ -529,7 +549,8 @@ export function localStorageCollectionOptions( // Load current data from storage const currentData = loadFromStorage>( config.storageKey, - storage + storage, + parser ) // Apply each mutation @@ -579,13 +600,15 @@ export function localStorageCollectionOptions( /** * Load data from storage and return as a Map + * @param parser - The parser to use for deserializing the data * @param storageKey - The key used to store data in the storage API * @param storage - The storage API to load from (localStorage, sessionStorage, etc.) * @returns Map of stored items with version tracking, or empty Map if loading fails */ function loadFromStorage( storageKey: string, - storage: StorageApi + storage: StorageApi, + parser: Parser ): Map> { try { const rawData = storage.getItem(storageKey) @@ -593,7 +616,7 @@ function loadFromStorage( return new Map() } - const parsed = JSON.parse(rawData) + const parsed = parser.parse(rawData) const dataMap = new Map>() // Handle object format where keys map to StoredItem values @@ -644,6 +667,7 @@ function createLocalStorageSync( storageKey: string, storage: StorageApi, storageEventApi: StorageEventApi, + parser: Parser, _getKey: (item: T) => string | number, lastKnownData: Map> ): SyncConfig & { @@ -704,7 +728,7 @@ function createLocalStorageSync( const { begin, write, commit } = syncParams // Load the new data - const newData = loadFromStorage(storageKey, storage) + const newData = loadFromStorage(storageKey, storage, parser) // Find the specific changes const changes = findChanges(lastKnownData, newData) @@ -713,7 +737,7 @@ function createLocalStorageSync( begin() changes.forEach(({ type, value }) => { if (value) { - validateJsonSerializable(value, type) + validateJsonSerializable(parser, value, type) write({ type, value }) } }) @@ -739,11 +763,11 @@ function createLocalStorageSync( collection = params.collection // Initial load - const initialData = loadFromStorage(storageKey, storage) + const initialData = loadFromStorage(storageKey, storage, parser) if (initialData.size > 0) { begin() initialData.forEach((storedItem) => { - validateJsonSerializable(storedItem.data, `load`) + validateJsonSerializable(parser, storedItem.data, `load`) write({ type: `insert`, value: storedItem.data }) }) commit() diff --git a/packages/db/tests/local-storage.test.ts b/packages/db/tests/local-storage.test.ts index b991622dc..b68c46831 100644 --- a/packages/db/tests/local-storage.test.ts +++ b/packages/db/tests/local-storage.test.ts @@ -1,4 +1,5 @@ import { afterEach, beforeEach, describe, expect, it, vi } from "vitest" +import superjson from "superjson" import { createCollection } from "../src/index" import { localStorageCollectionOptions } from "../src/local-storage" import { createTransaction } from "../src/transactions" @@ -175,6 +176,40 @@ describe(`localStorage collection`, () => { // Restore window globalThis.window = originalWindow }) + + it(`should support custom parsers like superjson`, async () => { + const collection = createCollection( + localStorageCollectionOptions({ + storageKey: `todos`, + storage: mockStorage, + storageEventApi: mockStorageEventApi, + getKey: (item) => item.id, + parser: superjson, + }) + ) + + const todo: Todo = { + id: `1`, + title: `superjson`, + completed: false, + createdAt: new Date(), + } + + const insertTx = collection.insert(todo) + + await insertTx.isPersisted.promise + + const storedData = mockStorage.getItem(`todos`) + expect(storedData).toBeDefined() + + const parsed = superjson.parse>( + storedData! + ) + + expect(parsed[`1`]?.data.title).toBe(`superjson`) + expect(parsed[`1`]?.data.completed).toBe(false) + expect(parsed[`1`]?.data.createdAt).toBeInstanceOf(Date) + }) }) describe(`data persistence`, () => { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 9c929f92c..647f0c249 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -478,7 +478,7 @@ importers: version: 0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) drizzle-zod: specifier: ^0.8.3 - version: 0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11) + version: 0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@3.25.76) express: specifier: ^4.21.2 version: 4.21.2 @@ -602,6 +602,9 @@ importers: arktype: specifier: ^2.1.23 version: 2.1.23 + superjson: + specifier: ^2.2.5 + version: 2.2.5 temporal-polyfill: specifier: ^0.3.0 version: 0.3.0 @@ -4618,6 +4621,10 @@ packages: resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==} engines: {node: '>= 0.6'} + copy-anything@4.0.5: + resolution: {integrity: sha512-7Vv6asjS4gMOuILabD3l739tsaxFQmC+a7pLZm02zyvs8p977bL3zEgq3yDk5rn9B0PbYgIv++jmHcuUab4RhA==} + engines: {node: '>=18'} + cors@2.8.5: resolution: {integrity: sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==} engines: {node: '>= 0.10'} @@ -5945,6 +5952,10 @@ packages: resolution: {integrity: sha512-ZhMwEosbFJkA0YhFnNDgTM4ZxDRsS6HqTo7qsZM08fehyRYIYa0yHu5R6mgo1n/8MgaPBXiPimPD77baVFYg+A==} engines: {node: '>=12.13'} + is-what@5.5.0: + resolution: {integrity: sha512-oG7cgbmg5kLYae2N5IVd3jm2s+vldjxJzK1pcu9LfpGuQ93MQSzo0okvRna+7y5ifrD+20FE8FvjusyGaz14fw==} + engines: {node: '>=18'} + is-windows@1.0.2: resolution: {integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==} engines: {node: '>=0.10.0'} @@ -7742,6 +7753,10 @@ packages: engines: {node: '>=16 || 14 >=14.17'} hasBin: true + superjson@2.2.5: + resolution: {integrity: sha512-zWPTX96LVsA/eVYnqOM2+ofcdPqdS1dAF1LN4TS2/MWuUpfitd9ctTa87wt4xrYnZnkLtS69xpBdSxVBP5Rm6w==} + engines: {node: '>=16'} + supports-color@7.2.0: resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} engines: {node: '>=8'} @@ -12903,6 +12918,10 @@ snapshots: cookie@0.7.2: {} + copy-anything@4.0.5: + dependencies: + is-what: 5.5.0 + cors@2.8.5: dependencies: object-assign: 4.1.1 @@ -13119,6 +13138,11 @@ snapshots: pg: 8.16.3 postgres: 3.4.7 + drizzle-zod@0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@3.25.76): + dependencies: + drizzle-orm: 0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) + zod: 3.25.76 + drizzle-zod@0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11): dependencies: drizzle-orm: 0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) @@ -14395,6 +14419,8 @@ snapshots: is-what@4.1.16: {} + is-what@5.5.0: {} + is-windows@1.0.2: {} isarray@2.0.5: {} @@ -16441,6 +16467,10 @@ snapshots: pirates: 4.0.7 ts-interface-checker: 0.1.13 + superjson@2.2.5: + dependencies: + copy-anything: 4.0.5 + supports-color@7.2.0: dependencies: has-flag: 4.0.0 From 7b9c681d45f08a71c3fb165b6e85acf31af0fcd6 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 30 Oct 2025 11:34:39 -0600 Subject: [PATCH 45/56] ci: Version Packages (#731) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .changeset/cruel-signs-work.md | 5 ----- examples/angular/todos/CHANGELOG.md | 8 ++++++++ examples/angular/todos/package.json | 2 +- examples/react/projects/package.json | 4 ++-- examples/react/todo/CHANGELOG.md | 10 ++++++++++ examples/react/todo/package.json | 2 +- packages/angular-db/CHANGELOG.md | 7 +++++++ packages/angular-db/package.json | 2 +- packages/db/CHANGELOG.md | 6 ++++++ packages/db/package.json | 2 +- packages/electric-db-collection/CHANGELOG.md | 7 +++++++ packages/electric-db-collection/package.json | 2 +- packages/query-db-collection/CHANGELOG.md | 7 +++++++ packages/query-db-collection/package.json | 2 +- packages/react-db/CHANGELOG.md | 7 +++++++ packages/react-db/package.json | 2 +- packages/rxdb-db-collection/CHANGELOG.md | 7 +++++++ packages/rxdb-db-collection/package.json | 2 +- packages/solid-db/CHANGELOG.md | 7 +++++++ packages/solid-db/package.json | 2 +- packages/svelte-db/CHANGELOG.md | 7 +++++++ packages/svelte-db/package.json | 2 +- packages/trailbase-db-collection/CHANGELOG.md | 7 +++++++ packages/trailbase-db-collection/package.json | 2 +- packages/vue-db/CHANGELOG.md | 7 +++++++ packages/vue-db/package.json | 2 +- pnpm-lock.yaml | 11 +++-------- 27 files changed, 104 insertions(+), 27 deletions(-) delete mode 100644 .changeset/cruel-signs-work.md diff --git a/.changeset/cruel-signs-work.md b/.changeset/cruel-signs-work.md deleted file mode 100644 index 7ee0aaf8b..000000000 --- a/.changeset/cruel-signs-work.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@tanstack/db": patch ---- - -Added support for custom parsers/serializers like superjson in LocalStorage collections diff --git a/examples/angular/todos/CHANGELOG.md b/examples/angular/todos/CHANGELOG.md index 7278a465c..656567218 100644 --- a/examples/angular/todos/CHANGELOG.md +++ b/examples/angular/todos/CHANGELOG.md @@ -1,5 +1,13 @@ # todos +## 0.0.16 + +### Patch Changes + +- Updated dependencies [[`6738247`](https://github.com/TanStack/db/commit/673824791bcfae04acf42fc35e5d6d8755adceb2)]: + - @tanstack/db@0.4.15 + - @tanstack/angular-db@0.1.20 + ## 0.0.15 ### Patch Changes diff --git a/examples/angular/todos/package.json b/examples/angular/todos/package.json index e500a400a..cb1778a61 100644 --- a/examples/angular/todos/package.json +++ b/examples/angular/todos/package.json @@ -1,6 +1,6 @@ { "name": "todos", - "version": "0.0.15", + "version": "0.0.16", "scripts": { "ng": "ng", "start": "ng serve", diff --git a/examples/react/projects/package.json b/examples/react/projects/package.json index 533f25b75..f3817334a 100644 --- a/examples/react/projects/package.json +++ b/examples/react/projects/package.json @@ -17,8 +17,8 @@ "dependencies": { "@tailwindcss/vite": "^4.1.16", "@tanstack/query-core": "^5.90.5", - "@tanstack/query-db-collection": "^0.2.37", - "@tanstack/react-db": "^0.1.36", + "@tanstack/query-db-collection": "^0.2.38", + "@tanstack/react-db": "^0.1.37", "@tanstack/react-router": "^1.133.32", "@tanstack/react-router-devtools": "^1.133.32", "@tanstack/react-router-with-query": "^1.130.17", diff --git a/examples/react/todo/CHANGELOG.md b/examples/react/todo/CHANGELOG.md index 28ea29fc5..5793a3fb2 100644 --- a/examples/react/todo/CHANGELOG.md +++ b/examples/react/todo/CHANGELOG.md @@ -1,5 +1,15 @@ # examples/react/todo +## 0.1.17 + +### Patch Changes + +- Updated dependencies []: + - @tanstack/electric-db-collection@0.1.39 + - @tanstack/query-db-collection@0.2.38 + - @tanstack/react-db@0.1.37 + - @tanstack/trailbase-db-collection@0.1.37 + ## 0.1.16 ### Patch Changes diff --git a/examples/react/todo/package.json b/examples/react/todo/package.json index 509d7fade..ca23f81f6 100644 --- a/examples/react/todo/package.json +++ b/examples/react/todo/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/db-example-react-todo", "private": true, - "version": "0.1.16", + "version": "0.1.17", "dependencies": { "@tanstack/electric-db-collection": "workspace:^", "@tanstack/query-core": "^5.90.5", diff --git a/packages/angular-db/CHANGELOG.md b/packages/angular-db/CHANGELOG.md index fb7488154..59d9dd26e 100644 --- a/packages/angular-db/CHANGELOG.md +++ b/packages/angular-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/angular-db +## 0.1.20 + +### Patch Changes + +- Updated dependencies [[`6738247`](https://github.com/TanStack/db/commit/673824791bcfae04acf42fc35e5d6d8755adceb2)]: + - @tanstack/db@0.4.15 + ## 0.1.19 ### Patch Changes diff --git a/packages/angular-db/package.json b/packages/angular-db/package.json index bdb37d443..7c299837c 100644 --- a/packages/angular-db/package.json +++ b/packages/angular-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/angular-db", "description": "Angular integration for @tanstack/db", - "version": "0.1.19", + "version": "0.1.20", "author": "Ethan McDaniel", "license": "MIT", "repository": { diff --git a/packages/db/CHANGELOG.md b/packages/db/CHANGELOG.md index 921139ba0..afcfa6ad3 100644 --- a/packages/db/CHANGELOG.md +++ b/packages/db/CHANGELOG.md @@ -1,5 +1,11 @@ # @tanstack/db +## 0.4.15 + +### Patch Changes + +- Added support for custom parsers/serializers like superjson in LocalStorage collections ([#730](https://github.com/TanStack/db/pull/730)) + ## 0.4.14 ### Patch Changes diff --git a/packages/db/package.json b/packages/db/package.json index 050a97e7b..64e8fdf69 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/db", "description": "A reactive client store for building super fast apps on sync", - "version": "0.4.14", + "version": "0.4.15", "dependencies": { "@standard-schema/spec": "^1.0.0", "@tanstack/db-ivm": "workspace:*" diff --git a/packages/electric-db-collection/CHANGELOG.md b/packages/electric-db-collection/CHANGELOG.md index 8c099cfca..10765ba5b 100644 --- a/packages/electric-db-collection/CHANGELOG.md +++ b/packages/electric-db-collection/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/electric-db-collection +## 0.1.39 + +### Patch Changes + +- Updated dependencies [[`6738247`](https://github.com/TanStack/db/commit/673824791bcfae04acf42fc35e5d6d8755adceb2)]: + - @tanstack/db@0.4.15 + ## 0.1.38 ### Patch Changes diff --git a/packages/electric-db-collection/package.json b/packages/electric-db-collection/package.json index 7e9405d61..429e96c43 100644 --- a/packages/electric-db-collection/package.json +++ b/packages/electric-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/electric-db-collection", "description": "ElectricSQL collection for TanStack DB", - "version": "0.1.38", + "version": "0.1.39", "dependencies": { "@electric-sql/client": "^1.1.0", "@standard-schema/spec": "^1.0.0", diff --git a/packages/query-db-collection/CHANGELOG.md b/packages/query-db-collection/CHANGELOG.md index fd502dbf9..f344a121a 100644 --- a/packages/query-db-collection/CHANGELOG.md +++ b/packages/query-db-collection/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/query-db-collection +## 0.2.38 + +### Patch Changes + +- Updated dependencies [[`6738247`](https://github.com/TanStack/db/commit/673824791bcfae04acf42fc35e5d6d8755adceb2)]: + - @tanstack/db@0.4.15 + ## 0.2.37 ### Patch Changes diff --git a/packages/query-db-collection/package.json b/packages/query-db-collection/package.json index e8cf54355..e8186f3ef 100644 --- a/packages/query-db-collection/package.json +++ b/packages/query-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/query-db-collection", "description": "TanStack Query collection for TanStack DB", - "version": "0.2.37", + "version": "0.2.38", "dependencies": { "@standard-schema/spec": "^1.0.0", "@tanstack/db": "workspace:*" diff --git a/packages/react-db/CHANGELOG.md b/packages/react-db/CHANGELOG.md index 8c668a4a0..32d754c9a 100644 --- a/packages/react-db/CHANGELOG.md +++ b/packages/react-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/react-db +## 0.1.37 + +### Patch Changes + +- Updated dependencies [[`6738247`](https://github.com/TanStack/db/commit/673824791bcfae04acf42fc35e5d6d8755adceb2)]: + - @tanstack/db@0.4.15 + ## 0.1.36 ### Patch Changes diff --git a/packages/react-db/package.json b/packages/react-db/package.json index 06accb5b4..ccb0f6dbc 100644 --- a/packages/react-db/package.json +++ b/packages/react-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/react-db", "description": "React integration for @tanstack/db", - "version": "0.1.36", + "version": "0.1.37", "author": "Kyle Mathews", "license": "MIT", "repository": { diff --git a/packages/rxdb-db-collection/CHANGELOG.md b/packages/rxdb-db-collection/CHANGELOG.md index 957d87cfb..d3bf1cb75 100644 --- a/packages/rxdb-db-collection/CHANGELOG.md +++ b/packages/rxdb-db-collection/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/rxdb-db-collection +## 0.1.26 + +### Patch Changes + +- Updated dependencies [[`6738247`](https://github.com/TanStack/db/commit/673824791bcfae04acf42fc35e5d6d8755adceb2)]: + - @tanstack/db@0.4.15 + ## 0.1.25 ### Patch Changes diff --git a/packages/rxdb-db-collection/package.json b/packages/rxdb-db-collection/package.json index 87b9e31e7..6ab2a84b5 100644 --- a/packages/rxdb-db-collection/package.json +++ b/packages/rxdb-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/rxdb-db-collection", "description": "RxDB collection for TanStack DB", - "version": "0.1.25", + "version": "0.1.26", "dependencies": { "rxdb": "16.20.0", "@standard-schema/spec": "^1.0.0", diff --git a/packages/solid-db/CHANGELOG.md b/packages/solid-db/CHANGELOG.md index e3c0b7f7c..f5ea64245 100644 --- a/packages/solid-db/CHANGELOG.md +++ b/packages/solid-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/react-db +## 0.1.37 + +### Patch Changes + +- Updated dependencies [[`6738247`](https://github.com/TanStack/db/commit/673824791bcfae04acf42fc35e5d6d8755adceb2)]: + - @tanstack/db@0.4.15 + ## 0.1.36 ### Patch Changes diff --git a/packages/solid-db/package.json b/packages/solid-db/package.json index 5959bd75e..cd7f62d5b 100644 --- a/packages/solid-db/package.json +++ b/packages/solid-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/solid-db", "description": "Solid integration for @tanstack/db", - "version": "0.1.36", + "version": "0.1.37", "author": "Kyle Mathews", "license": "MIT", "repository": { diff --git a/packages/svelte-db/CHANGELOG.md b/packages/svelte-db/CHANGELOG.md index e789ee0ea..a85110675 100644 --- a/packages/svelte-db/CHANGELOG.md +++ b/packages/svelte-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/svelte-db +## 0.1.37 + +### Patch Changes + +- Updated dependencies [[`6738247`](https://github.com/TanStack/db/commit/673824791bcfae04acf42fc35e5d6d8755adceb2)]: + - @tanstack/db@0.4.15 + ## 0.1.36 ### Patch Changes diff --git a/packages/svelte-db/package.json b/packages/svelte-db/package.json index 7f5c51cdc..8266f5da2 100644 --- a/packages/svelte-db/package.json +++ b/packages/svelte-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/svelte-db", "description": "Svelte integration for @tanstack/db", - "version": "0.1.36", + "version": "0.1.37", "dependencies": { "@tanstack/db": "workspace:*" }, diff --git a/packages/trailbase-db-collection/CHANGELOG.md b/packages/trailbase-db-collection/CHANGELOG.md index 598b8d35e..59e42febb 100644 --- a/packages/trailbase-db-collection/CHANGELOG.md +++ b/packages/trailbase-db-collection/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/trailbase-db-collection +## 0.1.37 + +### Patch Changes + +- Updated dependencies [[`6738247`](https://github.com/TanStack/db/commit/673824791bcfae04acf42fc35e5d6d8755adceb2)]: + - @tanstack/db@0.4.15 + ## 0.1.36 ### Patch Changes diff --git a/packages/trailbase-db-collection/package.json b/packages/trailbase-db-collection/package.json index 7521bdf94..cd5303767 100644 --- a/packages/trailbase-db-collection/package.json +++ b/packages/trailbase-db-collection/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/trailbase-db-collection", "description": "TrailBase collection for TanStack DB", - "version": "0.1.36", + "version": "0.1.37", "dependencies": { "@standard-schema/spec": "^1.0.0", "@tanstack/db": "workspace:*", diff --git a/packages/vue-db/CHANGELOG.md b/packages/vue-db/CHANGELOG.md index 43d15c282..a04f7c96f 100644 --- a/packages/vue-db/CHANGELOG.md +++ b/packages/vue-db/CHANGELOG.md @@ -1,5 +1,12 @@ # @tanstack/vue-db +## 0.0.70 + +### Patch Changes + +- Updated dependencies [[`6738247`](https://github.com/TanStack/db/commit/673824791bcfae04acf42fc35e5d6d8755adceb2)]: + - @tanstack/db@0.4.15 + ## 0.0.69 ### Patch Changes diff --git a/packages/vue-db/package.json b/packages/vue-db/package.json index 0a04934ff..1b3526682 100644 --- a/packages/vue-db/package.json +++ b/packages/vue-db/package.json @@ -1,7 +1,7 @@ { "name": "@tanstack/vue-db", "description": "Vue integration for @tanstack/db", - "version": "0.0.69", + "version": "0.0.70", "author": "Kyle Mathews", "license": "MIT", "repository": { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 647f0c249..9b42c57b0 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -196,10 +196,10 @@ importers: specifier: ^5.90.5 version: 5.90.5 '@tanstack/query-db-collection': - specifier: ^0.2.37 + specifier: ^0.2.38 version: link:../../../packages/query-db-collection '@tanstack/react-db': - specifier: ^0.1.36 + specifier: ^0.1.37 version: link:../../../packages/react-db '@tanstack/react-router': specifier: ^1.133.32 @@ -478,7 +478,7 @@ importers: version: 0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) drizzle-zod: specifier: ^0.8.3 - version: 0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@3.25.76) + version: 0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11) express: specifier: ^4.21.2 version: 4.21.2 @@ -13138,11 +13138,6 @@ snapshots: pg: 8.16.3 postgres: 3.4.7 - drizzle-zod@0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@3.25.76): - dependencies: - drizzle-orm: 0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) - zod: 3.25.76 - drizzle-zod@0.8.3(drizzle-orm@0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7))(zod@4.1.11): dependencies: drizzle-orm: 0.44.7(@types/pg@8.15.5)(gel@2.1.1)(kysely@0.28.5)(pg@8.16.3)(postgres@3.4.7) From 7e9a1d87f8a40696ac0ec49f7c83cf6710be32d8 Mon Sep 17 00:00:00 2001 From: Kyle Mathews Date: Fri, 31 Oct 2025 04:14:16 -0600 Subject: [PATCH 46/56] Fix flaky test (#735) fix(react-db): fix flaky test by preventing race condition The test "optimistic state is dropped after commit" was flaky because it had a race condition: 1. The test would wait for state size to become 4 2. Then immediately check that the temp-key exists 3. However, the async mutation (with only 10ms delay) could complete between steps 1 and 2 Fixed by moving all assertions into the same waitFor() block, ensuring they execute atomically. This prevents the mutation from completing between the size check and the temp-key verification. Co-authored-by: Claude --- packages/react-db/tests/useLiveQuery.test.tsx | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/react-db/tests/useLiveQuery.test.tsx b/packages/react-db/tests/useLiveQuery.test.tsx index 214e9238d..fec064163 100644 --- a/packages/react-db/tests/useLiveQuery.test.tsx +++ b/packages/react-db/tests/useLiveQuery.test.tsx @@ -903,13 +903,13 @@ describe(`Query Collections`, () => { await waitFor(() => { // Verify optimistic state is immediately reflected expect(result.current.state.size).toBe(4) + expect(result.current.state.get(`[temp-key,1]`)).toMatchObject({ + id: `temp-key`, + name: `John Doe`, + title: `New Issue`, + }) + expect(result.current.state.get(`[4,1]`)).toBeUndefined() }) - expect(result.current.state.get(`[temp-key,1]`)).toMatchObject({ - id: `temp-key`, - name: `John Doe`, - title: `New Issue`, - }) - expect(result.current.state.get(`[4,1]`)).toBeUndefined() // Wait for the transaction to be committed await transaction.isPersisted.promise From 9e4cbef4ba1f864eccf9c0c58a167b79a075cfe2 Mon Sep 17 00:00:00 2001 From: Kyle Mathews Date: Fri, 31 Oct 2025 05:46:19 -0600 Subject: [PATCH 47/56] Document how to destructure in Svelte (#733) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs(svelte-db): Add documentation for destructuring reactivity issue (#414) ## Summary This commit addresses issue #414 where users reported that destructuring the return value from useLiveQuery() breaks reactivity in Svelte 5. ## Root Cause This is a fundamental limitation of Svelte 5's reactivity system, not a bug in the library. When objects with getters are destructured, the destructuring evaluates getters once and captures the values at that moment, losing the reactive connection. ## Solution Added comprehensive documentation explaining: - Why direct destructuring breaks reactivity - Two correct usage patterns: 1. Use dot notation (recommended): `query.data`, `query.isLoading` 2. Wrap with $derived: `const { data } = $derived(query)` ## Changes - Updated JSDoc comments in useLiveQuery.svelte.ts with detailed explanation and examples - Updated README.md with clear usage guidelines - Added test case demonstrating the correct $derived pattern - All 23 existing tests continue to pass ## References - Issue: #414 - Svelte documentation: https://github.com/sveltejs/svelte/issues/11002 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * chore(svelte-db): Revert README changes to keep it minimal The README is intentionally kept small, so reverting the detailed documentation. The comprehensive documentation remains in the JSDoc comments in useLiveQuery.svelte.ts. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * chore: Remove package-lock.json (project uses pnpm) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --------- Co-authored-by: Claude --- packages/svelte-db/src/useLiveQuery.svelte.ts | 35 +++++++- .../tests/useLiveQuery.svelte.test.ts | 90 +++++++++++++++++++ 2 files changed, 124 insertions(+), 1 deletion(-) diff --git a/packages/svelte-db/src/useLiveQuery.svelte.ts b/packages/svelte-db/src/useLiveQuery.svelte.ts index 4c3625fdc..c9ae8d474 100644 --- a/packages/svelte-db/src/useLiveQuery.svelte.ts +++ b/packages/svelte-db/src/useLiveQuery.svelte.ts @@ -68,13 +68,38 @@ function toValue(value: MaybeGetter): T { * @param queryFn - Query function that defines what data to fetch * @param deps - Array of reactive dependencies that trigger query re-execution when changed * @returns Reactive object with query data, state, and status information + * + * @remarks + * **IMPORTANT - Destructuring in Svelte 5:** + * Direct destructuring breaks reactivity. To destructure, wrap with `$derived`: + * + * ❌ **Incorrect** - Loses reactivity: + * ```ts + * const { data, isLoading } = useLiveQuery(...) + * ``` + * + * ✅ **Correct** - Maintains reactivity: + * ```ts + * // Option 1: Use dot notation (recommended) + * const query = useLiveQuery(...) + * // Access: query.data, query.isLoading + * + * // Option 2: Wrap with $derived for destructuring + * const query = useLiveQuery(...) + * const { data, isLoading } = $derived(query) + * ``` + * + * This is a fundamental Svelte 5 limitation, not a library bug. See: + * https://github.com/sveltejs/svelte/issues/11002 + * * @example - * // Basic query with object syntax + * // Basic query with object syntax (recommended pattern) * const todosQuery = useLiveQuery((q) => * q.from({ todos: todosCollection }) * .where(({ todos }) => eq(todos.completed, false)) * .select(({ todos }) => ({ id: todos.id, text: todos.text })) * ) + * // Access via: todosQuery.data, todosQuery.isLoading, etc. * * @example * // With reactive dependencies @@ -86,6 +111,14 @@ function toValue(value: MaybeGetter): T { * ) * * @example + * // Destructuring with $derived (if needed) + * const query = useLiveQuery((q) => + * q.from({ todos: todosCollection }) + * ) + * const { data, isLoading, isError } = $derived(query) + * // Now data, isLoading, and isError maintain reactivity + * + * @example * // Join pattern * const issuesQuery = useLiveQuery((q) => * q.from({ issues: issueCollection }) diff --git a/packages/svelte-db/tests/useLiveQuery.svelte.test.ts b/packages/svelte-db/tests/useLiveQuery.svelte.test.ts index 5916c9da0..cd48a248a 100644 --- a/packages/svelte-db/tests/useLiveQuery.svelte.test.ts +++ b/packages/svelte-db/tests/useLiveQuery.svelte.test.ts @@ -116,6 +116,96 @@ describe(`Query Collections`, () => { }) }) + it(`should maintain reactivity when destructuring return values with $derived`, () => { + const collection = createCollection( + mockSyncCollectionOptions({ + id: `test-persons-destructure`, + getKey: (person: Person) => person.id, + initialData: initialPersons, + }) + ) + + cleanup = $effect.root(() => { + // IMPORTANT: In Svelte 5, destructuring breaks reactivity unless wrapped in $derived + // This is the correct pattern for destructuring (Issue #414) + const query = useLiveQuery((q) => + q + .from({ persons: collection }) + .where(({ persons }) => gt(persons.age, 30)) + .select(({ persons }) => ({ + id: persons.id, + name: persons.name, + age: persons.age, + })) + ) + + // Destructure using $derived to maintain reactivity + const { data, state, isReady, isLoading } = $derived(query) + + flushSync() + + // Initial state checks + expect(isReady).toBe(true) + expect(isLoading).toBe(false) + expect(state.size).toBe(1) + expect(data).toHaveLength(1) + expect(data[0]).toMatchObject({ + id: `3`, + name: `John Smith`, + age: 35, + }) + + // Add a new person that matches the filter + collection.utils.begin() + collection.utils.write({ + type: `insert`, + value: { + id: `4`, + name: `Alice Johnson`, + age: 40, + email: `alice.johnson@example.com`, + isActive: true, + team: `team1`, + }, + }) + collection.utils.commit() + + flushSync() + + // Verify destructured values are still reactive after collection change + expect(state.size).toBe(2) + expect(data).toHaveLength(2) + expect(data.some((p) => p.id === `4`)).toBe(true) + expect(data.some((p) => p.id === `3`)).toBe(true) + + // Remove a person + collection.utils.begin() + collection.utils.write({ + type: `delete`, + value: { + id: `3`, + name: `John Smith`, + age: 35, + email: `john.smith@example.com`, + isActive: true, + team: `team1`, + }, + }) + collection.utils.commit() + + flushSync() + + // Verify destructured values still track changes + expect(state.size).toBe(1) + expect(data).toHaveLength(1) + expect(data[0]).toMatchObject({ + id: `4`, + name: `Alice Johnson`, + age: 40, + }) + }) + }) + it(`should be able to query a collection with live updates`, () => { const collection = createCollection( mockSyncCollectionOptions({ From f8a979ba3aa90ac7e85f7a065fc050bda6589b4b Mon Sep 17 00:00:00 2001 From: Kyle Mathews Date: Fri, 31 Oct 2025 06:48:47 -0600 Subject: [PATCH 48/56] Fix: Optimizer Missing Final Step - Combine Remaining WHERE Clauses (#732) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Optimize queries without joins by combining multiple WHERE clauses Addresses issue #445 - performance slowdown when using multiple .where() calls. ## Problem When using multiple .where() calls on a query without joins: ```javascript query.from({ item: collection }) .where(({ item }) => eq(item.gridId, gridId)) .where(({ item }) => eq(item.rowId, rowId)) .where(({ item }) => eq(item.side, side)) ``` The optimizer was skipping these queries entirely, leaving multiple WHERE clauses in an array. During query compilation, each WHERE clause was applied as a separate filter() operation in the D2 pipeline, causing a 40%+ performance degradation compared to using a single WHERE clause with AND. ## Solution Modified the optimizer to combine multiple WHERE clauses into a single AND expression for queries without joins. This ensures only one filter operator is added to the pipeline, improving performance while maintaining correct semantics. The optimizer now: 1. Detects queries without joins that have multiple WHERE clauses 2. Combines them using the AND function 3. Reduces pipeline complexity from N filters to 1 filter ## Testing - Updated existing optimizer tests to reflect the new behavior - All 42 optimizer tests pass - Added new test case for combining multiple WHERE clauses without joins 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * docs: Add changeset and investigation report for issue #445 - Added changeset for the WHERE clause optimization fix - Documented root cause analysis and solution details 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * fix: Complete optimizer fix - combine remaining WHERE clauses after pushdown This completes the fix for issue #445 by implementing the missing "step 3" of the optimizer process. ## Problem (Broader than Initially Identified) The optimizer was missing the final step of combining remaining WHERE clauses after optimization. This affected: 1. Queries WITHOUT joins: All optimization was skipped, leaving multiple WHERE clauses as separate array elements 2. Queries WITH joins: After predicate pushdown, remaining WHERE clauses (multi-source + unpushable single-source) were left as separate elements Both cases resulted in multiple filter() operations in the pipeline instead of a single combined filter, causing 40%+ performance degradation. ## Solution Implemented "step 3" (combine remaining WHERE clauses) in two places: 1. **applySingleLevelOptimization**: For queries without joins, combine multiple WHERE clauses before returning 2. **applyOptimizations**: After predicate pushdown for queries with joins, combine all remaining WHERE clauses (multi-source + unpushable) ## Testing - Added test: "should combine multiple remaining WHERE clauses after optimization" - All 43 optimizer tests pass - Updated investigation report with complete analysis - Updated changeset to reflect the complete fix Thanks to colleague feedback for catching that step 3 was missing! 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * style: Run prettier on markdown files 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * docs: Add PR body update for issue #445 fix 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * docs: Remove specific 40% performance claim The original issue compared TanStack db with Redux, not the bug itself. Changed to more general language about performance degradation. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * docs: Remove temporary investigation and PR body files These were used for context during development but aren't needed in the repo. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * fix: Flatten nested AND expressions when combining WHERE clauses Addresses reviewer feedback - when combining remaining WHERE clauses after predicate pushdown, flatten any nested AND expressions to avoid creating and(and(...), ...) structures. Changes: - Use flatMap(splitAndClausesRecursive) before combineWithAnd to flatten - Added test for nested AND flattening - Added test verifying functional WHERE clauses remain separate All 45 optimizer tests pass. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * style: Remove issue reference from code comment As requested by @samwillis - issue references in code comments can become stale. The comment is self-explanatory without the reference. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --------- Co-authored-by: Claude --- .changeset/optimize-multiple-where-clauses.md | 10 + packages/db/src/query/optimizer.ts | 35 ++- packages/db/tests/query/optimizer.test.ts | 295 +++++++++++++++--- 3 files changed, 285 insertions(+), 55 deletions(-) create mode 100644 .changeset/optimize-multiple-where-clauses.md diff --git a/.changeset/optimize-multiple-where-clauses.md b/.changeset/optimize-multiple-where-clauses.md new file mode 100644 index 000000000..6f2d55850 --- /dev/null +++ b/.changeset/optimize-multiple-where-clauses.md @@ -0,0 +1,10 @@ +--- +"@tanstack/db": patch +--- + +Fixed performance issue where using multiple `.where()` calls created multiple filter operators in the query pipeline. The optimizer now implements the missing final step (step 3) of combining remaining WHERE clauses into a single AND expression. This applies to both queries with and without joins: + +- Queries without joins: Multiple WHERE clauses are now combined before compilation +- Queries with joins: Remaining WHERE clauses after predicate pushdown are combined + +This reduces filter operators from N to 1, making chained `.where()` calls perform identically to using a single `.where()` with `and()`. diff --git a/packages/db/src/query/optimizer.ts b/packages/db/src/query/optimizer.ts index 738eec95a..71927da0d 100644 --- a/packages/db/src/query/optimizer.ts +++ b/packages/db/src/query/optimizer.ts @@ -330,9 +330,22 @@ function applySingleLevelOptimization(query: QueryIR): QueryIR { return query } - // Skip optimization if there are no joins - predicate pushdown only benefits joins - // Single-table queries don't benefit from this optimization + // For queries without joins, combine multiple WHERE clauses into a single clause + // to avoid creating multiple filter operators in the pipeline if (!query.join || query.join.length === 0) { + // Only optimize if there are multiple WHERE clauses to combine + if (query.where.length > 1) { + // Combine multiple WHERE clauses into a single AND expression + const splitWhereClauses = splitAndClauses(query.where) + const combinedWhere = combineWithAnd(splitWhereClauses) + + return { + ...query, + where: [combinedWhere], + } + } + + // For single WHERE clauses, no optimization needed return query } @@ -674,6 +687,20 @@ function applyOptimizations( // If optimized and no outer JOINs - don't keep (original behavior) } + // Combine multiple remaining WHERE clauses into a single clause to avoid + // multiple filter operations in the pipeline (performance optimization) + // First flatten any nested AND expressions to avoid and(and(...), ...) + const finalWhere: Array = + remainingWhereClauses.length > 1 + ? [ + combineWithAnd( + remainingWhereClauses.flatMap((clause) => + splitAndClausesRecursive(getWhereExpression(clause)) + ) + ), + ] + : remainingWhereClauses + // Create a completely new query object to ensure immutability const optimizedQuery: QueryIR = { // Copy all non-optimized fields as-is @@ -692,8 +719,8 @@ function applyOptimizations( from: optimizedFrom, join: optimizedJoins, - // Only include WHERE clauses that weren't successfully optimized - where: remainingWhereClauses.length > 0 ? remainingWhereClauses : [], + // Include combined WHERE clauses + where: finalWhere.length > 0 ? finalWhere : [], } return optimizedQuery diff --git a/packages/db/tests/query/optimizer.test.ts b/packages/db/tests/query/optimizer.test.ts index 9e25bb2d4..3748be66d 100644 --- a/packages/db/tests/query/optimizer.test.ts +++ b/packages/db/tests/query/optimizer.test.ts @@ -69,16 +69,41 @@ describe(`Query Optimizer`, () => { expect(optimized).toEqual(query) }) - test(`should skip optimization for queries without joins`, () => { + test(`should skip optimization for queries without joins and single WHERE clause`, () => { const query: QueryIR = { from: new CollectionRef(mockCollection, `u`), where: [createEq(createPropRef(`u`, `department_id`), createValue(1))], } const { optimizedQuery: optimized } = optimizeQuery(query) - // Query should remain unchanged since there are no joins to optimize + // Query should remain unchanged since there is only one WHERE clause expect(optimized).toEqual(query) }) + + test(`should combine multiple WHERE clauses for queries without joins`, () => { + const query: QueryIR = { + from: new CollectionRef(mockCollection, `u`), + where: [ + createEq(createPropRef(`u`, `department_id`), createValue(1)), + createGt(createPropRef(`u`, `salary`), createValue(50000)), + createEq(createPropRef(`u`, `active`), createValue(true)), + ], + } + + const { optimizedQuery: optimized } = optimizeQuery(query) + + // The WHERE clauses should be combined into a single AND expression + expect(optimized.where).toHaveLength(1) + expect(optimized.where![0]).toMatchObject({ + type: `func`, + name: `and`, + args: [ + createEq(createPropRef(`u`, `department_id`), createValue(1)), + createGt(createPropRef(`u`, `salary`), createValue(50000)), + createEq(createPropRef(`u`, `active`), createValue(true)), + ], + }) + }) }) describe(`Single Source Optimization with Joins`, () => { @@ -518,16 +543,19 @@ describe(`Query Optimizer`, () => { const { optimizedQuery: optimized } = optimizeQuery(query) - // The existing subquery should have both WHERE clauses + // The existing subquery should have WHERE clauses combined for performance expect(optimized.from.type).toBe(`queryRef`) if (optimized.from.type === `queryRef`) { - expect(optimized.from.query.where).toHaveLength(2) - expect(optimized.from.query.where![0]).toEqual( - createGt(createPropRef(`u`, `id`), createValue(50)) - ) - expect(optimized.from.query.where![1]).toEqual( - createEq(createPropRef(`u`, `department_id`), createValue(1)) - ) + // After optimization, the WHERE clauses are combined into a single AND expression + expect(optimized.from.query.where).toHaveLength(1) + expect(optimized.from.query.where![0]).toMatchObject({ + type: `func`, + name: `and`, + args: [ + createGt(createPropRef(`u`, `id`), createValue(50)), + createEq(createPropRef(`u`, `department_id`), createValue(1)), + ], + }) } }) @@ -558,10 +586,11 @@ describe(`Query Optimizer`, () => { const { optimizedQuery: optimized } = optimizeQuery(query) - // The deeply nested structure should be preserved and new WHERE clause added + // The deeply nested structure should be preserved and WHERE clauses combined expect(optimized.from.type).toBe(`queryRef`) if (optimized.from.type === `queryRef`) { - expect(optimized.from.query.where).toHaveLength(2) + // WHERE clauses are combined for performance + expect(optimized.from.query.where).toHaveLength(1) expect(optimized.from.query.from.type).toBe(`queryRef`) } }) @@ -746,18 +775,20 @@ describe(`Query Optimizer`, () => { const { optimizedQuery: optimized } = optimizeQuery(nestedQuery) - // The new WHERE clause should be pushed to the nested level + // The new WHERE clause should be pushed to the nested level and combined expect(optimized.where).toEqual([]) expect(optimized.from.type).toBe(`queryRef`) if (optimized.from.type === `queryRef`) { - // Should have both WHERE clauses at the inner level - expect(optimized.from.query.where).toHaveLength(2) - expect(optimized.from.query.where).toContainEqual( - createGt(createPropRef(`u`, `id`), createValue(10)) - ) - expect(optimized.from.query.where).toContainEqual( - createEq(createPropRef(`u`, `department_id`), createValue(1)) - ) + // WHERE clauses are combined into a single AND expression for performance + expect(optimized.from.query.where).toHaveLength(1) + expect(optimized.from.query.where![0]).toMatchObject({ + type: `func`, + name: `and`, + args: [ + createGt(createPropRef(`u`, `id`), createValue(10)), + createEq(createPropRef(`u`, `department_id`), createValue(1)), + ], + }) } }) @@ -790,20 +821,25 @@ describe(`Query Optimizer`, () => { const { optimizedQuery: optimized } = optimizeQuery(deeplyNestedQuery) - // Should at least push the top-level WHERE clause down one level + // Should at least push the top-level WHERE clause down one level and combine them expect(optimized.where).toEqual([]) expect(optimized.from.type).toBe(`queryRef`) if (optimized.from.type === `queryRef`) { const innerQuery = optimized.from.query - // The department_id clause should be pushed to this level - expect(innerQuery.where).toContainEqual( - createEq(createPropRef(`u`, `department_id`), createValue(1)) - ) - - // The age clause should remain here or be pushed deeper - expect(innerQuery.where).toContainEqual( + // The WHERE clauses should be combined into a single AND expression + expect(innerQuery.where).toHaveLength(1) + expect(innerQuery.where![0]).toMatchObject({ + type: `func`, + name: `and`, + }) + // Verify both conditions are in the combined expression + const combinedWhere = innerQuery.where![0] as any + expect(combinedWhere.args).toContainEqual( createLt(createPropRef(`u`, `age`), createValue(50)) ) + expect(combinedWhere.args).toContainEqual( + createEq(createPropRef(`u`, `department_id`), createValue(1)) + ) } }) @@ -885,16 +921,19 @@ describe(`Query Optimizer`, () => { createEq(createPropRef(`u`, `id`), createPropRef(`p`, `author_id`)) ) - // Single-source clauses should be pushed to their respective subqueries + // Single-source clauses should be pushed to their respective subqueries and combined expect(optimized.from.type).toBe(`queryRef`) if (optimized.from.type === `queryRef`) { - expect(optimized.from.query.where).toHaveLength(2) // Original + new clause - expect(optimized.from.query.where).toContainEqual( - createGt(createPropRef(`u`, `age`), createValue(25)) - ) - expect(optimized.from.query.where).toContainEqual( - createEq(createPropRef(`u`, `department_id`), createValue(1)) - ) + // WHERE clauses are combined for performance + expect(optimized.from.query.where).toHaveLength(1) + expect(optimized.from.query.where![0]).toMatchObject({ + type: `func`, + name: `and`, + args: [ + createGt(createPropRef(`u`, `age`), createValue(25)), + createEq(createPropRef(`u`, `department_id`), createValue(1)), + ], + }) } expect(optimized.join).toHaveLength(1) @@ -902,13 +941,16 @@ describe(`Query Optimizer`, () => { const joinClause = optimized.join[0]! expect(joinClause.from.type).toBe(`queryRef`) if (joinClause.from.type === `queryRef`) { - expect(joinClause.from.query.where).toHaveLength(2) // Original + new clause - expect(joinClause.from.query.where).toContainEqual( - createGt(createPropRef(`p`, `views`), createValue(50)) - ) - expect(joinClause.from.query.where).toContainEqual( - createGt(createPropRef(`p`, `rating`), createValue(4)) - ) + // WHERE clauses are combined for performance + expect(joinClause.from.query.where).toHaveLength(1) + expect(joinClause.from.query.where![0]).toMatchObject({ + type: `func`, + name: `and`, + args: [ + createGt(createPropRef(`p`, `views`), createValue(50)), + createGt(createPropRef(`p`, `rating`), createValue(4)), + ], + }) } } }) @@ -1041,18 +1083,21 @@ describe(`Query Optimizer`, () => { const { optimizedQuery: optimized } = optimizeQuery(complexQuery) - // AND clause should be split and single-source parts pushed down + // AND clause should be split and single-source parts pushed down, then combined for performance expect(optimized.where).toEqual([]) expect(optimized.from.type).toBe(`queryRef`) if (optimized.from.type === `queryRef`) { - // Should contain the original condition plus the AND clause (which gets split) - expect(optimized.from.query.where).toContainEqual( + // WHERE clauses should be combined into a single AND expression + expect(optimized.from.query.where).toHaveLength(1) + expect(optimized.from.query.where![0]).toMatchObject({ + type: `func`, + name: `and`, + }) + // Verify it contains the original condition and the new conditions + const combinedWhere = optimized.from.query.where![0] as any + expect(combinedWhere.args).toContainEqual( createGt(createPropRef(`u`, `age`), createValue(18)) ) - - // Should have the AND clause pushed down (may be split into components) - const whereClausesLength = optimized.from.query.where?.length || 0 - expect(whereClausesLength).toBeGreaterThan(1) // Should have at least the original + new conditions } }) }) @@ -1419,6 +1464,154 @@ describe(`Query Optimizer`, () => { ) } }) + + test(`should combine multiple remaining WHERE clauses after optimization`, () => { + // This test verifies that if multiple WHERE clauses remain after optimization + // (e.g., because some can't be pushed down), they are combined into a single clause + const subqueryWithAggregates: QueryIR = { + from: new CollectionRef(mockCollection, `u`), + select: { + department_id: createPropRef(`u`, `department_id`), + user_count: createAgg(`count`, createPropRef(`u`, `id`)), + }, + groupBy: [createPropRef(`u`, `department_id`)], + } + + const query: QueryIR = { + from: new QueryRef(subqueryWithAggregates, `stats`), + join: [ + { + from: new CollectionRef(mockCollection, `p`), + type: `inner`, + left: createPropRef(`stats`, `department_id`), + right: createPropRef(`p`, `department_id`), + }, + ], + where: [ + createGt(createPropRef(`stats`, `user_count`), createValue(5)), // Can't push down - GROUP BY + createGt(createPropRef(`p`, `views`), createValue(100)), // Can push down + createEq( + createPropRef(`stats`, `department_id`), + createPropRef(`p`, `author_dept`) + ), // Multi-source + ], + } + + const { optimizedQuery: optimized } = optimizeQuery(query) + + // The posts clause should be pushed down + expect(optimized.join).toHaveLength(1) + if (optimized.join && optimized.join[0]) { + expect(optimized.join[0].from.type).toBe(`queryRef`) + if (optimized.join[0].from.type === `queryRef`) { + expect(optimized.join[0].from.query.where).toHaveLength(1) + } + } + + // The stats clause and multi-source clause should remain BUT be combined into ONE + console.log( + `Remaining WHERE clauses: ${optimized.where?.length || 0}`, + JSON.stringify(optimized.where, null, 2) + ) + expect(optimized.where).toBeDefined() + // This is the KEY assertion - all remaining clauses should be combined + // Currently this might FAIL if step 3 is missing + expect(optimized.where!.length).toBe(1) + expect(optimized.where![0]).toMatchObject({ + type: `func`, + name: `and`, + }) + }) + + test(`should flatten nested AND expressions when combining remaining clauses`, () => { + // This test verifies that if remaining WHERE clauses already contain AND expressions, + // they are flattened to avoid and(and(...), ...) nesting + const subqueryWithAggregates: QueryIR = { + from: new CollectionRef(mockCollection, `u`), + select: { + department_id: createPropRef(`u`, `department_id`), + user_count: createAgg(`count`, createPropRef(`u`, `id`)), + }, + groupBy: [createPropRef(`u`, `department_id`)], + } + + const query: QueryIR = { + from: new QueryRef(subqueryWithAggregates, `stats`), + join: [ + { + from: new CollectionRef(mockCollection, `p`), + type: `inner`, + left: createPropRef(`stats`, `department_id`), + right: createPropRef(`p`, `department_id`), + }, + ], + where: [ + // This is an AND expression that can't be pushed down + createAnd( + createGt(createPropRef(`stats`, `user_count`), createValue(5)), + createEq(createPropRef(`stats`, `department_id`), createValue(1)) + ), + createGt(createPropRef(`p`, `views`), createValue(100)), // Can push down + createEq( + createPropRef(`stats`, `department_id`), + createPropRef(`p`, `author_dept`) + ), // Multi-source + ], + } + + const { optimizedQuery: optimized } = optimizeQuery(query) + + // The posts clause should be pushed down + expect(optimized.join).toHaveLength(1) + if (optimized.join && optimized.join[0]) { + expect(optimized.join[0].from.type).toBe(`queryRef`) + } + + // The remaining clauses should be combined WITHOUT nested AND + expect(optimized.where).toBeDefined() + expect(optimized.where!.length).toBe(1) + const combinedWhere = optimized.where![0] as any + expect(combinedWhere.type).toBe(`func`) + expect(combinedWhere.name).toBe(`and`) + // Should have 4 args (the 2 from the nested AND + the multi-source clause), + // NOT 2 args where one is itself an AND + expect(combinedWhere.args).toHaveLength(3) + // Verify none of the args are AND expressions (i.e., fully flattened) + const argTypes = combinedWhere.args.map((arg: any) => ({ + type: arg.type, + name: arg.name, + })) + expect(argTypes).not.toContainEqual({ type: `func`, name: `and` }) + }) + + test(`should not combine functional WHERE clauses`, () => { + // Verify that fn.where() clauses remain separate and are not combined + const query: QueryIR = { + from: new CollectionRef(mockCollection, `u`), + where: [ + createEq(createPropRef(`u`, `department_id`), createValue(1)), + createGt(createPropRef(`u`, `age`), createValue(25)), + ], + fnWhere: [ + (row: any) => row.u.name.startsWith(`A`), + (row: any) => row.u.email !== null, + ], + } + + const { optimizedQuery: optimized } = optimizeQuery(query) + + // Regular WHERE clauses should be combined into one + expect(optimized.where).toHaveLength(1) + expect(optimized.where![0]).toMatchObject({ + type: `func`, + name: `and`, + }) + + // Functional WHERE clauses should remain separate (not combined) + expect(optimized.fnWhere).toHaveLength(2) + expect(optimized.fnWhere![0]).toBeTypeOf(`function`) + expect(optimized.fnWhere![1]).toBeTypeOf(`function`) + }) }) describe(`JOIN semantics preservation`, () => { From 979a66f2f6eff0ffe44dfde7c67feea933ee6110 Mon Sep 17 00:00:00 2001 From: Kyle Mathews Date: Fri, 31 Oct 2025 06:59:48 -0600 Subject: [PATCH 49/56] Enable auto-indexing for nested field paths (#728) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: enable auto-indexing for nested field paths This fix allows auto-indexes to be created for nested field paths (e.g., `profile.score`, `metadata.stats.views`), not just top-level fields. This resolves performance issues where queries with `eq()`, `gt()`, etc. on nested fields were forced to do full table scans instead of using indexes. Changes: - Remove the `fieldPath.length !== 1` restriction in `extractIndexableExpressions()` - Update `ensureIndexForField()` to properly traverse nested paths when creating index accessors - Add comprehensive tests for nested path auto-indexing with 1, 2, and 3-level nesting - Verify that nested path indexes are properly used by the query optimizer Fixes #727 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * fix: use colon-prefixed naming for auto-indexes to avoid conflicts Change auto-index naming from 'auto_field_path' to 'auto:field.path' to prevent ambiguity between nested paths and fields with underscores. Examples: - user.profile -> auto:user.profile - user_profile -> auto:user_profile (no conflict!) Co-authored-by: Sam Willis * chore: add changeset for nested auto-index fix * style: format changeset with prettier --------- Co-authored-by: Claude Co-authored-by: Sam Willis --- .changeset/enable-nested-auto-index.md | 50 ++++++ packages/db/src/indexes/auto-index.ts | 33 ++-- .../db/tests/collection-auto-index.test.ts | 144 ++++++++++++++++++ 3 files changed, 217 insertions(+), 10 deletions(-) create mode 100644 .changeset/enable-nested-auto-index.md diff --git a/.changeset/enable-nested-auto-index.md b/.changeset/enable-nested-auto-index.md new file mode 100644 index 000000000..d29cd9aa1 --- /dev/null +++ b/.changeset/enable-nested-auto-index.md @@ -0,0 +1,50 @@ +--- +"@tanstack/db": patch +--- + +Enable auto-indexing for nested field paths + +Previously, auto-indexes were only created for top-level fields. Queries filtering on nested fields like `vehicleDispatch.date` or `profile.score` were forced to perform full table scans, causing significant performance issues. + +Now, auto-indexes are automatically created for nested field paths of any depth when using `eq()`, `gt()`, `gte()`, `lt()`, `lte()`, or `in()` operations. + +**Performance Impact:** + +Before this fix, filtering on nested fields resulted in expensive full scans: + +- Query time: ~353ms for 39 executions (from issue #727) +- "graph run" and "d2ts join" operations dominated execution time + +After this fix, nested field queries use indexes: + +- Query time: Sub-millisecond (typical indexed lookup) +- Proper index utilization verified through query optimizer + +**Example:** + +```typescript +const collection = createCollection({ + getKey: (item) => item.id, + autoIndex: "eager", // default + // ... sync config +}) + +// These now automatically create and use indexes: +collection.subscribeChanges((items) => console.log(items), { + whereExpression: eq(row.vehicleDispatch?.date, "2024-01-01"), +}) + +collection.subscribeChanges((items) => console.log(items), { + whereExpression: gt(row.profile?.stats.rating, 4.5), +}) +``` + +**Index Naming:** + +Auto-indexes for nested paths use the format `auto:field.path` to avoid naming conflicts: + +- `auto:status` for top-level field `status` +- `auto:profile.score` for nested field `profile.score` +- `auto:metadata.stats.views` for deeply nested field `metadata.stats.views` + +Fixes #727 diff --git a/packages/db/src/indexes/auto-index.ts b/packages/db/src/indexes/auto-index.ts index f9387c968..56c352b51 100644 --- a/packages/db/src/indexes/auto-index.ts +++ b/packages/db/src/indexes/auto-index.ts @@ -44,14 +44,25 @@ export function ensureIndexForField< // Create a new index for this field using the collection's createIndex method try { - collection.createIndex((row) => (row as any)[fieldName], { - name: `auto_${fieldName}`, - indexType: BTreeIndex, - options: compareFn ? { compareFn, compareOptions } : {}, - }) + // Use the proxy-based approach to create the proper accessor for nested paths + collection.createIndex( + (row) => { + // Navigate through the field path + let current: any = row + for (const part of fieldPath) { + current = current[part] + } + return current + }, + { + name: `auto:${fieldPath.join(`.`)}`, + indexType: BTreeIndex, + options: compareFn ? { compareFn, compareOptions } : {}, + } + ) } catch (error) { console.warn( - `${collection.id ? `[${collection.id}] ` : ``}Failed to create auto-index for field "${fieldName}":`, + `${collection.id ? `[${collection.id}] ` : ``}Failed to create auto-index for field path "${fieldPath.join(`.`)}":`, error ) } @@ -108,7 +119,7 @@ function extractIndexableExpressions( return } - // Check if the first argument is a property reference (single field) + // Check if the first argument is a property reference if (func.args.length < 1 || func.args[0].type !== `ref`) { return } @@ -116,12 +127,14 @@ function extractIndexableExpressions( const fieldRef = func.args[0] const fieldPath = fieldRef.path - // Skip if it's not a simple field (e.g., nested properties or array access) - if (fieldPath.length !== 1) { + // Skip if the path is empty + if (fieldPath.length === 0) { return } - const fieldName = fieldPath[0] + // For nested paths, use the full path joined with underscores as the field name + // For simple paths, use the first (and only) element + const fieldName = fieldPath.join(`_`) results.push({ fieldName, fieldPath }) } diff --git a/packages/db/tests/collection-auto-index.test.ts b/packages/db/tests/collection-auto-index.test.ts index f3821f7cb..3047a4d3e 100644 --- a/packages/db/tests/collection-auto-index.test.ts +++ b/packages/db/tests/collection-auto-index.test.ts @@ -750,4 +750,148 @@ describe(`Collection Auto-Indexing`, () => { subscription.unsubscribe() }) + + it(`should create auto-indexes for nested field paths`, async () => { + interface NestedTestItem { + id: string + name: string + profile?: { + score: number + bio: string + } + metadata?: { + tags: Array + stats: { + views: number + likes: number + } + } + } + + const nestedTestData: Array = [ + { + id: `1`, + name: `Alice`, + profile: { score: 85, bio: `Developer` }, + metadata: { + tags: [`tech`, `coding`], + stats: { views: 100, likes: 50 }, + }, + }, + { + id: `2`, + name: `Bob`, + profile: { score: 92, bio: `Designer` }, + metadata: { + tags: [`design`, `ui`], + stats: { views: 200, likes: 75 }, + }, + }, + { + id: `3`, + name: `Charlie`, + profile: { score: 78, bio: `Manager` }, + metadata: { + tags: [`management`, `leadership`], + stats: { views: 150, likes: 60 }, + }, + }, + ] + + const collection = createCollection({ + getKey: (item) => item.id, + autoIndex: `eager`, + startSync: true, + sync: { + sync: ({ begin, write, commit, markReady }) => { + begin() + for (const item of nestedTestData) { + write({ + type: `insert`, + value: item, + }) + } + commit() + markReady() + }, + }, + }) + + await collection.stateWhenReady() + + // Should have no indexes initially + expect(collection.indexes.size).toBe(0) + + // Test 1: Nested field one level deep (profile.score) + const changes1: Array = [] + const subscription1 = collection.subscribeChanges( + (items) => { + changes1.push(...items) + }, + { + includeInitialState: true, + whereExpression: gt(new PropRef([`profile`, `score`]), 80), + } + ) + + // Should have created an auto-index for profile.score + const profileScoreIndex = Array.from(collection.indexes.values()).find( + (index) => + index.expression.type === `ref` && + (index.expression as any).path.length === 2 && + (index.expression as any).path[0] === `profile` && + (index.expression as any).path[1] === `score` + ) + expect(profileScoreIndex).toBeDefined() + + // Verify the filtered results are correct + expect(changes1.filter((c) => c.type === `insert`).length).toBe(2) // Alice (85) and Bob (92) + + subscription1.unsubscribe() + + // Test 2: Deeply nested field (metadata.stats.views) + const changes2: Array = [] + const subscription2 = collection.subscribeChanges( + (items) => { + changes2.push(...items) + }, + { + includeInitialState: true, + whereExpression: eq(new PropRef([`metadata`, `stats`, `views`]), 200), + } + ) + + // Should have created an auto-index for metadata.stats.views + const viewsIndex = Array.from(collection.indexes.values()).find( + (index) => + index.expression.type === `ref` && + (index.expression as any).path.length === 3 && + (index.expression as any).path[0] === `metadata` && + (index.expression as any).path[1] === `stats` && + (index.expression as any).path[2] === `views` + ) + expect(viewsIndex).toBeDefined() + + // Verify the filtered results are correct + expect(changes2.filter((c) => c.type === `insert`).length).toBe(1) // Only Bob has 200 views + + subscription2.unsubscribe() + + // Test 3: Index usage verification with tracker + withIndexTracking(collection, (tracker) => { + const result = collection.currentStateAsChanges({ + where: gt(new PropRef([`profile`, `score`]), 80), + })! + + expect(result.length).toBe(2) // Alice and Bob + + // Verify it used the auto-created index + expectIndexUsage(tracker.stats, { + shouldUseIndex: true, + shouldUseFullScan: false, + indexCallCount: 1, + fullScanCallCount: 0, + }) + }) + }) }) From d2b569c49facb5e81513cc9df77fc7793722b03a Mon Sep 17 00:00:00 2001 From: Kyle Mathews Date: Fri, 31 Oct 2025 09:49:44 -0600 Subject: [PATCH 50/56] Investigate Size Change action minification (#736) feat: use minified builds for bundle size comparisons Add build:minified scripts that enable minification during builds, and configure the compressed-size-action to use these scripts. This ensures that bundle size measurements in PRs reflect actual code changes rather than being inflated by comments and whitespace, while keeping the published packages readable and unminified. Changes: - Add build:minified script to root package.json - Add build:minified scripts to @tanstack/db and @tanstack/react-db - Configure compressed-size-action to use build:minified script Co-authored-by: Claude --- .github/workflows/pr.yml | 2 ++ package.json | 1 + packages/db/package.json | 1 + packages/react-db/package.json | 1 + 4 files changed, 5 insertions(+) diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 472b8ec28..5ff230848 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -54,12 +54,14 @@ jobs: repo-token: "${{ secrets.GITHUB_TOKEN }}" pattern: "./packages/db/dist/**/*.{js,mjs}" comment-key: "db-package-size" + build-script: "build:minified" - name: Compressed Size Action - React DB Package uses: preactjs/compressed-size-action@v2 with: repo-token: "${{ secrets.GITHUB_TOKEN }}" pattern: "./packages/react-db/dist/**/*.{js,mjs}" comment-key: "react-db-package-size" + build-script: "build:minified" build-example: name: Build Example Site runs-on: ubuntu-latest diff --git a/package.json b/package.json index be8334fcc..d85d17010 100644 --- a/package.json +++ b/package.json @@ -9,6 +9,7 @@ "type": "module", "scripts": { "build": "pnpm --filter \"./packages/**\" build", + "build:minified": "pnpm --filter \"./packages/**\" build:minified", "changeset": "changeset", "changeset:publish": "changeset publish", "changeset:version": "changeset version && pnpm install --no-frozen-lockfile", diff --git a/packages/db/package.json b/packages/db/package.json index 64e8fdf69..f2e4129e3 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -49,6 +49,7 @@ ], "scripts": { "build": "vite build", + "build:minified": "vite build --minify", "dev": "vite build --watch", "lint": "eslint . --fix", "test": "npx vitest --run" diff --git a/packages/react-db/package.json b/packages/react-db/package.json index ccb0f6dbc..4773c6b33 100644 --- a/packages/react-db/package.json +++ b/packages/react-db/package.json @@ -54,6 +54,7 @@ }, "scripts": { "build": "vite build", + "build:minified": "vite build --minify", "dev": "vite build --watch", "test": "npx vitest --run", "lint": "eslint . --fix" From cb256234c9cd8df7771808b147e5afc2be56f51f Mon Sep 17 00:00:00 2001 From: Kyle Mathews Date: Fri, 31 Oct 2025 09:55:33 -0600 Subject: [PATCH 51/56] feat: Add paced mutations with timing strategies (#704) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add useSerializedMutations hook with timing strategies Implements a new hook for managing optimistic mutations with pluggable timing strategies (debounce, queue, throttle) using TanStack Pacer. Key features: - Auto-merge mutations and serialize persistence according to strategy - Track and rollback superseded pending transactions to prevent memory leaks - Proper cleanup of pending/executing transactions on unmount - Queue strategy uses AsyncQueuer for true sequential processing Breaking changes from initial design: - Renamed from useSerializedTransaction to useSerializedMutations (more accurate name) - Each mutate() call creates mutations that are auto-merged, not separate transactions Addresses feedback: - HIGH: Rollback superseded transactions to prevent orphaned isPersisted promises - HIGH: cleanup() now properly rolls back all pending/executing transactions - HIGH: Queue strategy properly serializes commits using AsyncQueuer with concurrency: 1 Example usage: ```tsx const mutate = useSerializedMutations({ mutationFn: async ({ transaction }) => { await api.save(transaction.mutations) }, strategy: debounceStrategy({ wait: 500 }) }) ``` 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * Fix feedback-4 issues and add interactive demo Fixes for feedback-4 issues: - Queue strategy: await isPersisted.promise instead of calling commit() again to fix double-commit error - cleanup(): check transaction state before rollback to prevent errors on completed transactions - Pending transactions: rollback all pending transactions on each new mutate() call to handle dropped callbacks Added interactive serialized mutations demo: - Visual tracking of transaction states (pending/executing/completed/failed) - Live configuration of debounce/queue/throttle strategies - Real-time stats dashboard showing transaction counts - Transaction timeline with mutation details and durations 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * fix: serialized mutations strategy execution and transaction handling Core fixes: - Save transaction reference before calling strategy.execute() to prevent null returns when strategies (like queue) execute callbacks synchronously - Call strategy.execute() on every mutate() call to properly reset debounce timers - Simplified transaction lifecycle - single active transaction that gets reused for batching Demo improvements: - Memoized strategy and mutationFn to prevent unnecessary recreations - Added fake server sync to demonstrate optimistic updates - Enhanced UI to show optimistic vs synced state and detailed timing - Added mitt for event-based server communication Tests: - Replaced comprehensive test suite with focused debounce strategy tests - Two tests demonstrating batching and timer reset behavior - Tests pass with real timers and validate mutation auto-merging 🤖 Generated with [Claude Code](https://claude.com/claude-code) * prettier * test: add comprehensive tests for queue and throttle strategies Added test coverage for all three mutation strategies: - Debounce: batching and timer reset (already passing) - Queue: accumulation and sequential processing - Throttle: leading/trailing edge execution All 5 tests passing with 100% coverage on useSerializedMutations hook. Also added changeset documenting the new serialized mutations feature. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * fix: resolve TypeScript strict mode errors in useSerializedMutations tests Added non-null assertions and proper type casting for test variables to satisfy TypeScript's strict null checking. All 62 tests still passing with 100% coverage on useSerializedMutations hook. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * refactor: convert demo to slider-based interface with 300ms default Changed from button-based mutations to a slider interface that better demonstrates the different strategies in action: - Changed Item.value from string to number (was already being used as number) - Reduced default wait time from 1000ms to 300ms for more responsive demo - Replaced "Trigger Mutation" and "Trigger 5 Rapid Mutations" buttons with a slider (0-100 range) that triggers mutations on every change - Updated UI text to reference slider instead of buttons - Changed mutation display from "value X-1 → X" to "value = X" since slider sets absolute values rather than incrementing The slider provides a more natural and vivid demonstration of how strategies handle rapid mutations - users can drag it and see debounce wait for stops, throttle sample during drags, and queue process all changes. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * fix(demo): improve UI and fix slider reset issue - Use mutation.modified instead of mutation.changes for updates to preserve full state - Remove Delta stat card as it wasn't providing value - Show newest transactions first in timeline for better UX 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * fix(queue): capture transaction before clearing activeTransaction Queue strategy now receives a closure that commits the captured transaction instead of calling commitCallback which expects activeTransaction to be set. This prevents "no active transaction exists" errors. - Capture transaction before clearing activeTransaction for queue strategy - Pass commit closure to queue that operates on captured transaction - Remove "Reset to 0" button from demo - All tests passing 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * fix(queue): explicitly default to FIFO processing order Set explicit defaults for addItemsTo='back' and getItemsFrom='front' to ensure queue strategy processes transactions in FIFO order (oldest first). 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * docs: clarify queue strategy creates separate transactions with configurable order Update changeset to reflect that queue strategy creates separate transactions per mutation and defaults to FIFO (but is configurable). 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * refactor: rename "Serialized Mutations" to "Paced Mutations" Rename the feature from "Serialized Mutations" to "Paced Mutations" to better reflect its purpose of controlling mutation timing rather than serialization. This includes: - Renamed core functions: createSerializedMutations → createPacedMutations - Renamed React hook: useSerializedMutations → usePacedMutations - Renamed types: SerializedMutationsConfig → PacedMutationsConfig - Updated all file names, imports, exports, and documentation - Updated demo app title and examples - Updated changeset All tests pass and the demo app builds successfully. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * update lock * chore: change paced mutations changeset from minor to patch 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * fix: update remaining references to useSerializedMutations Update todo example and queueStrategy JSDoc to use usePacedMutations instead of useSerializedMutations. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * docs: mention TanStack Pacer in changeset Add reference to TanStack Pacer which powers the paced mutations strategies. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * docs: clarify key design difference between strategies Make it crystal clear that debounce/throttle only allow one pending tx (collecting mutations) and one persisting tx at a time, while queue guarantees each mutation becomes a separate tx processed in order. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * docs: add comprehensive Paced Mutations guide Add new "Paced Mutations" section to mutations.md covering: - Introduction to paced mutations and TanStack Pacer - Key design differences (debounce/throttle vs queue) - Detailed examples for each strategy (debounce, throttle, queue) - Guidance on choosing the right strategy - React hook usage with usePacedMutations - Non-React usage with createPacedMutations 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * fix: remove id property from PacedMutationsConfig The id property doesn't make sense for paced mutations because: - Queue strategy creates separate transactions per mutate() call - Debounce/throttle create multiple transactions over time - Users shouldn't control internal transaction IDs Changed PacedMutationsConfig to explicitly define only the properties that make sense (mutationFn, strategy, metadata) instead of extending TransactionConfig. This prevents TypeScript from accepting invalid configuration like: usePacedMutations({ id: 'foo', ... }) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * fix: prevent unnecessary recreation of paced mutations instance Fixed issue where wrapping usePacedMutations in another hook would recreate the instance on every render when passing strategy inline: Before (broken): usePacedMutations({ strategy: debounceStrategy({ wait: 3000 }) }) // Recreates instance every render because strategy object changes After (fixed): // Serializes strategy type + options for stable comparison // Only recreates when actual values change Now uses JSON.stringify to create a stable dependency from the strategy's type and options, so the instance is only recreated when the strategy configuration actually changes, not when the object reference changes. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * test: add memoization tests for usePacedMutations Add comprehensive tests to verify that usePacedMutations doesn't recreate the instance unnecessarily when wrapped in custom hooks. Tests cover: 1. Basic memoization - instance stays same when strategy values are same 2. User's exact scenario - custom hook with inline strategy creation 3. Proper recreation - instance changes when strategy options change These tests verify the fix for the bug where wrapping usePacedMutations in a custom hook with inline strategy would recreate the instance on every render. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * fix: stabilize mutationFn to prevent recreating paced mutations instance Wrap the user-provided mutationFn in a stable callback using useRef, so that even if the mutationFn reference changes on each render, the paced mutations instance is not recreated. This fixes the bug where: 1. User types "123" in a textarea 2. Each keystroke recreates the instance (new mutationFn on each render) 3. Each call to mutate() gets a different transaction ID 4. Old transactions with stale data (e.g. "12") are still pending 5. When they complete, they overwrite the correct "123" value Now the mutationFn identity is stable, so the same paced mutations instance is reused across renders, and all mutations during the debounce window batch into the same transaction. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * Refactor paced mutations to work like createOptimisticAction Modified the paced mutations API to follow the same pattern as createOptimisticAction, where the hook takes an onMutate callback and you pass the actual update variables directly to the mutate function. Changes: - Updated PacedMutationsConfig to accept onMutate callback - Modified createPacedMutations to accept variables instead of callback - Updated usePacedMutations hook to handle the new API - Fixed all tests to use the new API with onMutate - Updated documentation and examples to reflect the new pattern 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude * Update paced mutations demo to use new onMutate API Modified the example to use the new variables-based API where you pass the value directly to mutate() and provide an onMutate callback for optimistic updates. This aligns with the createOptimisticAction pattern. Changes: - Removed useCallback wrappers (hook handles stabilization internally) - Pass newValue directly to mutate() instead of a callback - Simplified code since hook manages ref stability 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --------- Co-authored-by: Claude --- .changeset/paced-mutations.md | 49 ++ SERIALIZED_TRANSACTION_PLAN.md | 365 ++++++++++++++ docs/guides/mutations.md | 233 +++++++++ .../react/paced-mutations-demo/index.html | 12 + .../react/paced-mutations-demo/package.json | 25 + .../react/paced-mutations-demo/src/App.tsx | 448 +++++++++++++++++ .../react/paced-mutations-demo/src/index.css | 285 +++++++++++ .../react/paced-mutations-demo/src/main.tsx | 10 + .../react/paced-mutations-demo/tsconfig.json | 21 + .../react/paced-mutations-demo/vite.config.ts | 6 + .../react/todo/src/components/TodoApp.tsx | 66 ++- examples/react/todo/src/routes/electric.tsx | 37 ++ examples/react/todo/src/routes/query.tsx | 32 ++ examples/react/todo/src/routes/trailbase.tsx | 7 + feedback-1.md | 7 + feedback-2.md | 1 + feedback-3.md | 5 + feedback-4.md | 5 + packages/db/package.json | 3 +- packages/db/src/index.ts | 2 + packages/db/src/paced-mutations.ts | 169 +++++++ .../db/src/strategies/debounceStrategy.ts | 45 ++ packages/db/src/strategies/index.ts | 17 + packages/db/src/strategies/queueStrategy.ts | 75 +++ .../db/src/strategies/throttleStrategy.ts | 62 +++ packages/db/src/strategies/types.ts | 130 +++++ packages/react-db/src/index.ts | 1 + packages/react-db/src/usePacedMutations.ts | 138 +++++ .../react-db/tests/usePacedMutations.test.tsx | 472 ++++++++++++++++++ pnpm-lock.yaml | 258 +++++++++- 30 files changed, 2954 insertions(+), 32 deletions(-) create mode 100644 .changeset/paced-mutations.md create mode 100644 SERIALIZED_TRANSACTION_PLAN.md create mode 100644 examples/react/paced-mutations-demo/index.html create mode 100644 examples/react/paced-mutations-demo/package.json create mode 100644 examples/react/paced-mutations-demo/src/App.tsx create mode 100644 examples/react/paced-mutations-demo/src/index.css create mode 100644 examples/react/paced-mutations-demo/src/main.tsx create mode 100644 examples/react/paced-mutations-demo/tsconfig.json create mode 100644 examples/react/paced-mutations-demo/vite.config.ts create mode 100644 feedback-1.md create mode 100644 feedback-2.md create mode 100644 feedback-3.md create mode 100644 feedback-4.md create mode 100644 packages/db/src/paced-mutations.ts create mode 100644 packages/db/src/strategies/debounceStrategy.ts create mode 100644 packages/db/src/strategies/index.ts create mode 100644 packages/db/src/strategies/queueStrategy.ts create mode 100644 packages/db/src/strategies/throttleStrategy.ts create mode 100644 packages/db/src/strategies/types.ts create mode 100644 packages/react-db/src/usePacedMutations.ts create mode 100644 packages/react-db/tests/usePacedMutations.test.tsx diff --git a/.changeset/paced-mutations.md b/.changeset/paced-mutations.md new file mode 100644 index 000000000..6329c2963 --- /dev/null +++ b/.changeset/paced-mutations.md @@ -0,0 +1,49 @@ +--- +"@tanstack/db": patch +"@tanstack/react-db": patch +--- + +Add paced mutations with pluggable timing strategies + +Introduces a new paced mutations system that enables optimistic mutations with pluggable timing strategies. This provides fine-grained control over when and how mutations are persisted to the backend. Powered by [TanStack Pacer](https://github.com/TanStack/pacer). + +**Key Design:** + +- **Debounce/Throttle**: Only one pending transaction (collecting mutations) and one persisting transaction (writing to backend) at a time. Multiple rapid mutations automatically merge together. +- **Queue**: Each mutation creates a separate transaction, guaranteed to run in the order they're made (FIFO by default, configurable to LIFO). + +**Core Features:** + +- **Pluggable Strategy System**: Choose from debounce, queue, or throttle strategies to control mutation timing +- **Auto-merging Mutations**: Multiple rapid mutations on the same item automatically merge for efficiency (debounce/throttle only) +- **Transaction Management**: Full transaction lifecycle tracking (pending → persisting → completed/failed) +- **React Hook**: `usePacedMutations` for easy integration in React applications + +**Available Strategies:** + +- `debounceStrategy`: Wait for inactivity before persisting. Only final state is saved. (ideal for auto-save, search-as-you-type) +- `queueStrategy`: Each mutation becomes a separate transaction, processed sequentially in order (defaults to FIFO, configurable to LIFO). All mutations are guaranteed to persist. (ideal for sequential workflows, rate-limited APIs) +- `throttleStrategy`: Ensure minimum spacing between executions. Mutations between executions are merged. (ideal for analytics, progress updates) + +**Example Usage:** + +```ts +import { usePacedMutations, debounceStrategy } from "@tanstack/react-db" + +const mutate = usePacedMutations({ + mutationFn: async ({ transaction }) => { + await api.save(transaction.mutations) + }, + strategy: debounceStrategy({ wait: 500 }), +}) + +// Trigger a mutation +const tx = mutate(() => { + collection.update(id, (draft) => { + draft.value = newValue + }) +}) + +// Optionally await persistence +await tx.isPersisted.promise +``` diff --git a/SERIALIZED_TRANSACTION_PLAN.md b/SERIALIZED_TRANSACTION_PLAN.md new file mode 100644 index 000000000..1736c2afd --- /dev/null +++ b/SERIALIZED_TRANSACTION_PLAN.md @@ -0,0 +1,365 @@ +# Implementation Plan for `useSerializedTransaction` with TanStack Pacer + +Based on [GitHub issue #35](https://github.com/TanStack/db/issues/35), using @tanstack/pacer for strategy implementation across all 5 framework integrations. + +## Overview + +Create a framework-agnostic core in `@tanstack/db` that manages optimistic transactions with pluggable queuing strategies powered by TanStack Pacer. Each framework package wraps the core with framework-specific reactive primitives. + +## Architecture Pattern + +The core transaction logic stays in one place (`@tanstack/db`) while each framework provides its own wrapper using framework-specific reactive primitives. + +```typescript +// Core in @tanstack/db (framework-agnostic) +createSerializedTransaction(config) // Returns { mutate, cleanup } + +// React wrapper +useSerializedTransaction(config) // Uses React hooks, returns mutate function + +// Solid wrapper +useSerializedTransaction(config) // Uses Solid signals, matches useLiveQuery pattern + +// Svelte/Vue wrappers +useSerializedTransaction(config) // Framework-specific implementations + +// Angular wrapper +injectSerializedTransaction(config) // Uses Angular DI, follows injectLiveQuery pattern +``` + +## Available Strategies (Based on Pacer Utilities) + +### 1. **debounceStrategy({ wait, leading?, trailing? })** + +- Uses Pacer's `Debouncer` class +- Waits for pause in activity before committing +- **Best for:** Search inputs, auto-save fields + +### 2. **queueStrategy({ wait?, maxSize?, addItemsTo?, getItemsFrom? })** + +- Uses Pacer's `Queuer` class +- Processes all transactions in order (FIFO/LIFO) +- FIFO: `{ addItemsTo: 'back', getItemsFrom: 'front' }` +- LIFO: `{ addItemsTo: 'back', getItemsFrom: 'back' }` +- **Best for:** Sequential operations that must all complete + +### 3. **throttleStrategy({ wait, leading?, trailing? })** + +- Uses Pacer's `Throttler` class +- Evenly spaces transaction executions over time +- **Best for:** Sliders, scroll handlers, progress bars + +### 4. **batchStrategy({ maxSize?, wait?, getShouldExecute? })** + +- Uses Pacer's `Batcher` class +- Groups multiple mutations into batches +- Triggers on size or time threshold +- **Best for:** Bulk operations, reducing network calls + +## File Structure + +``` +packages/db/src/ + ├── serialized-transaction.ts # Core framework-agnostic logic + └── strategies/ + ├── index.ts # Export all strategies + ├── debounceStrategy.ts # Wraps Pacer Debouncer + ├── queueStrategy.ts # Wraps Pacer Queuer + ├── throttleStrategy.ts # Wraps Pacer Throttler + ├── batchStrategy.ts # Wraps Pacer Batcher + └── types.ts # Strategy type definitions + +packages/db/package.json # Add @tanstack/pacer dependency + +packages/react-db/src/ + └── useSerializedTransaction.ts # React hook wrapper + +packages/solid-db/src/ + └── useSerializedTransaction.ts # Solid wrapper (matches useLiveQuery pattern) + +packages/svelte-db/src/ + └── useSerializedTransaction.svelte.ts # Svelte wrapper + +packages/vue-db/src/ + └── useSerializedTransaction.ts # Vue wrapper + +packages/angular-db/src/ + └── injectSerializedTransaction.ts # Angular wrapper (DI pattern) + +packages/*/tests/ + └── serialized-transaction.test.ts # Tests per package +``` + +## Core API Design + +```typescript +// Framework-agnostic core (packages/db) +import { debounceStrategy } from '@tanstack/db' + +const { mutate, cleanup } = createSerializedTransaction({ + mutationFn: async ({ transaction }) => { + await api.save(transaction.mutations) + }, + strategy: debounceStrategy({ wait: 500 }), + metadata?: Record, +}) + +// mutate() executes mutations according to strategy and returns Transaction +const transaction = mutate(() => { + collection.update(id, draft => { draft.value = newValue }) +}) + +// Await persistence and handle errors +try { + await transaction.isPersisted.promise + console.log('Transaction committed successfully') +} catch (error) { + console.error('Transaction failed:', error) +} + +// cleanup() when done (frameworks handle this automatically) +cleanup() +``` + +## React Hook Wrapper + +```typescript +// packages/react-db +import { debounceStrategy } from "@tanstack/react-db" + +const mutate = useSerializedTransaction({ + mutationFn: async ({ transaction }) => { + await api.save(transaction.mutations) + }, + strategy: debounceStrategy({ wait: 1000 }), +}) + +// Usage in component +const handleChange = async (value) => { + const tx = mutate(() => { + collection.update(id, (draft) => { + draft.value = value + }) + }) + + // Optional: await persistence or handle errors + try { + await tx.isPersisted.promise + } catch (error) { + console.error("Update failed:", error) + } +} +``` + +## Example: Slider with Different Strategies + +```typescript +// Debounce - wait for user to stop moving slider +const mutate = useSerializedTransaction({ + mutationFn: async ({ transaction }) => { + await api.updateVolume(transaction.mutations) + }, + strategy: debounceStrategy({ wait: 500 }), +}) + +// Throttle - update every 200ms while sliding +const mutate = useSerializedTransaction({ + mutationFn: async ({ transaction }) => { + await api.updateVolume(transaction.mutations) + }, + strategy: throttleStrategy({ wait: 200 }), +}) + +// Debounce with leading/trailing - save first + final value only +const mutate = useSerializedTransaction({ + mutationFn: async ({ transaction }) => { + await api.updateVolume(transaction.mutations) + }, + strategy: debounceStrategy({ wait: 0, leading: true, trailing: true }), +}) + +// Queue - save every change in order (FIFO) +const mutate = useSerializedTransaction({ + mutationFn: async ({ transaction }) => { + await api.updateVolume(transaction.mutations) + }, + strategy: queueStrategy({ + wait: 200, + addItemsTo: "back", + getItemsFrom: "front", + }), +}) +``` + +## Implementation Steps + +### Phase 1: Core Package (@tanstack/db) + +1. Add `@tanstack/pacer` dependency to packages/db/package.json +2. Create strategy type definitions in strategies/types.ts +3. Implement strategy factories: + - `debounceStrategy.ts` - wraps Pacer Debouncer + - `queueStrategy.ts` - wraps Pacer Queuer + - `throttleStrategy.ts` - wraps Pacer Throttler + - `batchStrategy.ts` - wraps Pacer Batcher +4. Create core `createSerializedTransaction()` function +5. Export strategies + core function from packages/db/src/index.ts + +### Phase 2: Framework Wrappers + +6. **React** - Create `useSerializedTransaction` using useRef/useEffect/useCallback +7. **Solid** - Create `useSerializedTransaction` using createSignal/onCleanup (matches `useLiveQuery` pattern) +8. **Svelte** - Create `useSerializedTransaction` using Svelte stores +9. **Vue** - Create `useSerializedTransaction` using ref/onUnmounted +10. **Angular** - Create `injectSerializedTransaction` using inject/DestroyRef (matches `injectLiveQuery` pattern) + +### Phase 3: Testing & Documentation + +11. Write tests for core logic in packages/db +12. Write tests for each framework wrapper +13. Update README with examples +14. Add TypeScript examples to docs + +## Strategy Type System + +```typescript +export type Strategy = + | DebounceStrategy + | QueueStrategy + | ThrottleStrategy + | BatchStrategy + +interface BaseStrategy { + _type: TName // Discriminator for type narrowing + execute: (fn: () => void) => void | Promise + cleanup: () => void +} + +export function debounceStrategy(opts: { + wait: number + leading?: boolean + trailing?: boolean +}): DebounceStrategy + +export function queueStrategy(opts?: { + wait?: number + maxSize?: number + addItemsTo?: "front" | "back" + getItemsFrom?: "front" | "back" +}): QueueStrategy + +export function throttleStrategy(opts: { + wait: number + leading?: boolean + trailing?: boolean +}): ThrottleStrategy + +export function batchStrategy(opts?: { + maxSize?: number + wait?: number + getShouldExecute?: (items: any[]) => boolean +}): BatchStrategy +``` + +## Technical Implementation Details + +### Core createSerializedTransaction + +The core function will: + +1. Accept a strategy and mutationFn +2. Create a wrapper around `createTransaction` from existing code +3. Use the strategy's `execute()` method to control when transactions are committed +4. Return `{ mutate, cleanup }` where: + - `mutate(callback): Transaction` - executes mutations according to strategy and returns the Transaction object + - `cleanup()` - cleans up strategy resources + +**Important:** The `mutate()` function returns a `Transaction` object so callers can: + +- Await `transaction.isPersisted.promise` to know when persistence completes +- Handle errors via try/catch or `.catch()` +- Access transaction state and metadata + +### Strategy Factories + +Each strategy factory returns an object with: + +- `execute(fn)` - wraps the function with Pacer's utility +- `cleanup()` - cleans up the Pacer instance + +Example for debounceStrategy: + +```typescript +// NOTE: Import path needs validation - Pacer may export from main entry point +// Likely: import { Debouncer } from '@tanstack/pacer' or similar +import { Debouncer } from "@tanstack/pacer" // TODO: Validate actual export path + +export function debounceStrategy(opts: { + wait: number + leading?: boolean + trailing?: boolean +}) { + const debouncer = new Debouncer(opts) + + return { + _type: "debounce" as const, + execute: (fn: () => void) => { + debouncer.execute(fn) + }, + cleanup: () => { + debouncer.cancel() + }, + } +} +``` + +### React Hook Implementation + +```typescript +export function useSerializedTransaction(config) { + // Include strategy in dependencies to handle strategy changes + const { mutate, cleanup } = useMemo(() => { + return createSerializedTransaction(config) + }, [config.mutationFn, config.metadata, config.strategy]) + + // Cleanup on unmount or when dependencies change + useEffect(() => { + return () => cleanup() + }, [cleanup]) + + // Use useCallback to provide stable reference + const stableMutate = useCallback(mutate, [mutate]) + + return stableMutate +} +``` + +**Key fixes:** + +- Include `config.strategy` in `useMemo` dependencies to handle strategy changes +- Properly cleanup when strategy changes (via useEffect cleanup) +- Return stable callback reference via `useCallback` + +## Benefits + +- ✅ Leverages battle-tested TanStack Pacer utilities +- ✅ Reduces backend write contention +- ✅ Framework-agnostic core promotes consistency +- ✅ Type-safe, composable API +- ✅ Aligns with TanStack ecosystem patterns +- ✅ Supports all 5 framework integrations +- ✅ Simple, declarative API for users +- ✅ Easy to add custom strategies + +## Open Questions + +1. Should we support custom strategies? (i.e., users passing their own strategy objects) +2. Do we need lifecycle callbacks like `onSuccess`, `onError` for each mutate call? +3. Should batching strategy automatically merge mutations or keep them separate? +4. Rate limiting strategy - useful or skip for now? + +## Notes + +- ❌ Dropped merge strategy for now (more complex to design, less clear use case) +- The pattern follows existing TanStack patterns where core is framework-agnostic +- Similar to how `useLiveQuery` wraps core query logic per framework diff --git a/docs/guides/mutations.md b/docs/guides/mutations.md index 24e8aba92..f81fea8e6 100644 --- a/docs/guides/mutations.md +++ b/docs/guides/mutations.md @@ -100,6 +100,7 @@ The benefits: - [Operation Handlers](#operation-handlers) - [Creating Custom Actions](#creating-custom-actions) - [Manual Transactions](#manual-transactions) +- [Paced Mutations](#paced-mutations) - [Mutation Merging](#mutation-merging) - [Controlling Optimistic Behavior](#controlling-optimistic-behavior) - [Transaction States](#transaction-states) @@ -892,6 +893,238 @@ tx.isPersisted.promise.then(() => { console.log(tx.state) // 'pending', 'persisting', 'completed', or 'failed' ``` +## Paced Mutations + +Paced mutations provide fine-grained control over **when and how** mutations are persisted to your backend. Instead of persisting every mutation immediately, you can use timing strategies to batch, delay, or queue mutations based on your application's needs. + +Powered by [TanStack Pacer](https://github.com/TanStack/pacer), paced mutations are ideal for scenarios like: +- **Auto-save forms** that wait for the user to stop typing +- **Slider controls** that need smooth updates without overwhelming the backend +- **Sequential workflows** where order matters and every mutation must persist + +### Key Design + +The fundamental difference between strategies is how they handle transactions: + +**Debounce/Throttle**: Only one pending transaction (collecting mutations) and one persisting transaction (writing to backend) at a time. Multiple rapid mutations automatically merge together into a single transaction. + +**Queue**: Each mutation creates a separate transaction, guaranteed to run in the order they're made (FIFO by default, configurable to LIFO). All mutations are guaranteed to persist. + +### Available Strategies + +| Strategy | Behavior | Best For | +|----------|----------|----------| +| **`debounceStrategy`** | Wait for inactivity before persisting. Only final state is saved. | Auto-save forms, search-as-you-type | +| **`throttleStrategy`** | Ensure minimum spacing between executions. Mutations between executions are merged. | Sliders, progress updates, analytics | +| **`queueStrategy`** | Each mutation becomes a separate transaction, processed sequentially in order (FIFO by default, configurable to LIFO). All mutations guaranteed to persist. | Sequential workflows, file uploads, rate-limited APIs | + +### Debounce Strategy + +The debounce strategy waits for a period of inactivity before persisting. This is perfect for auto-save scenarios where you want to wait until the user stops typing before saving their work. + +```tsx +import { usePacedMutations, debounceStrategy } from "@tanstack/react-db" + +function AutoSaveForm() { + const mutate = usePacedMutations({ + mutationFn: async ({ transaction }) => { + // Persist the final merged state to the backend + await api.forms.save(transaction.mutations) + }, + // Wait 500ms after the last change before persisting + strategy: debounceStrategy({ wait: 500 }), + }) + + const handleChange = (field: string, value: string) => { + // Multiple rapid changes merge into a single transaction + mutate(() => { + formCollection.update(formId, (draft) => { + draft[field] = value + }) + }) + } + + return ( +
+ handleChange('title', e.target.value)} /> +