From 1fcebaed7719365498747d75ba5fb141167ec828 Mon Sep 17 00:00:00 2001 From: Denver Coneybeare Date: Fri, 17 Mar 2023 16:13:04 -0400 Subject: [PATCH 1/2] query.test.ts: add a test for resuming a query with existence filter --- .../test/integration/api/query.test.ts | 57 +++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/packages/firestore/test/integration/api/query.test.ts b/packages/firestore/test/integration/api/query.test.ts index bec924e3fb9..cd401b28d68 100644 --- a/packages/firestore/test/integration/api/query.test.ts +++ b/packages/firestore/test/integration/api/query.test.ts @@ -47,6 +47,7 @@ import { Query, query, QuerySnapshot, + runTransaction, setDoc, startAfter, startAt, @@ -2059,6 +2060,62 @@ apiDescribe('Queries', (persistence: boolean) => { }); }); }); + + it('resuming a query should use existence filter to detect deletes', async () => { + // Prepare the names and contents of the 100 documents to create. + const testDocs: { [key: string]: object } = {}; + for (let i = 0; i < 100; i++) { + testDocs['doc' + (1000 + i)] = { key: 42 }; + } + + return withTestCollection(persistence, testDocs, async (coll, db) => { + // Run a query to populate the local cache with the 100 documents and a + // resume token. + const snapshot1 = await getDocs(coll); + expect(snapshot1.size, 'snapshot1.size').to.equal(100); + const createdDocuments = snapshot1.docs.map(snapshot => snapshot.ref); + + // Delete 50 of the 100 documents. Do this in a transaction, rather than + // deleteDoc(), to avoid affecting the local cache. + const deletedDocumentIds = new Set(); + await runTransaction(db, async txn => { + for (let i = 0; i < createdDocuments.length; i += 2) { + const documentToDelete = createdDocuments[i]; + txn.delete(documentToDelete); + deletedDocumentIds.add(documentToDelete.id); + } + }); + + // Wait for 10 seconds, during which Watch will stop tracking the query + // and will send an existence filter rather than "delete" events when the + // query is resumed. + await new Promise(resolve => setTimeout(resolve, 10000)); + + // Resume the query and save the resulting snapshot for verification. + const snapshot2 = await getDocs(coll); + + // Verify that the snapshot from the resumed query contains the expected + // documents; that is, that it contains the 50 documents that were _not_ + // deleted. + // TODO(b/270731363): Remove the "if" condition below once the + // Firestore Emulator is fixed to send an existence filter. At the time of + // writing, the Firestore emulator fails to send an existence filter, + // resulting in the client including the deleted documents in the snapshot + // of the resumed query. + if (!(USE_EMULATOR && snapshot2.size === 100)) { + const actualDocumentIds = snapshot2.docs + .map(documentSnapshot => documentSnapshot.ref.id) + .sort(); + const expectedDocumentIds = createdDocuments + .filter(documentRef => !deletedDocumentIds.has(documentRef.id)) + .map(documentRef => documentRef.id) + .sort(); + expect(actualDocumentIds, 'snapshot2.docs').to.deep.equal( + expectedDocumentIds + ); + } + }); + }).timeout('90s'); }); function verifyDocumentChange( From eced0621867dc8a880b1c39958f202d4d16da95a Mon Sep 17 00:00:00 2001 From: Denver Coneybeare Date: Fri, 17 Mar 2023 16:40:53 -0400 Subject: [PATCH 2/2] integration/util/helpers.ts: use a WriteBatch to create documents, for efficiency --- .../test/integration/util/helpers.ts | 37 ++++++++++++++++--- 1 file changed, 31 insertions(+), 6 deletions(-) diff --git a/packages/firestore/test/integration/util/helpers.ts b/packages/firestore/test/integration/util/helpers.ts index 2d579afeb5f..5cd73532d5c 100644 --- a/packages/firestore/test/integration/util/helpers.ts +++ b/packages/firestore/test/integration/util/helpers.ts @@ -32,7 +32,9 @@ import { PrivateSettings, SnapshotListenOptions, newTestFirestore, - newTestApp + newTestApp, + writeBatch, + WriteBatch } from './firebase_export'; import { ALT_PROJECT_ID, @@ -317,11 +319,34 @@ export function withTestCollectionSettings( const collectionId = 'test-collection-' + doc(collection(testDb, 'x')).id; const testCollection = collection(testDb, collectionId); const setupCollection = collection(setupDb, collectionId); - const sets: Array> = []; - Object.keys(docs).forEach(key => { - sets.push(setDoc(doc(setupCollection, key), docs[key])); - }); - return Promise.all(sets).then(() => fn(testCollection, testDb)); + + const writeBatchCommits: Array> = []; + let writeBatch_: WriteBatch | null = null; + let writeBatchSize = 0; + + for (const key of Object.keys(docs)) { + if (writeBatch_ === null) { + writeBatch_ = writeBatch(setupDb); + } + + writeBatch_.set(doc(setupCollection, key), docs[key]); + writeBatchSize++; + + // Write batches are capped at 500 writes. Use 400 just to be safe. + if (writeBatchSize === 400) { + writeBatchCommits.push(writeBatch_.commit()); + writeBatch_ = null; + writeBatchSize = 0; + } + } + + if (writeBatch_ !== null) { + writeBatchCommits.push(writeBatch_.commit()); + } + + return Promise.all(writeBatchCommits).then(() => + fn(testCollection, testDb) + ); } ); }