From ad005a00d4fb5168b0878238fe723ccb212157ce Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 30 Sep 2020 12:06:16 -0700 Subject: [PATCH 1/4] Add IndexedDB action to error message --- .../src/local/indexeddb_persistence.ts | 3 +- .../firestore/src/local/indexeddb_schema.ts | 2 +- packages/firestore/src/local/simple_db.ts | 32 +++-- .../unit/local/encoded_resource_path.test.ts | 11 +- .../unit/local/indexeddb_persistence.test.ts | 120 ++++++++++-------- .../test/unit/local/simple_db.test.ts | 64 ++++++---- .../test/unit/specs/spec_test_components.ts | 1 + .../test/unit/specs/spec_test_runner.ts | 17 ++- .../test/unit/util/async_queue.test.ts | 4 + 9 files changed, 157 insertions(+), 97 deletions(-) diff --git a/packages/firestore/src/local/indexeddb_persistence.ts b/packages/firestore/src/local/indexeddb_persistence.ts index 7b248fc5b79..4cc6e275928 100644 --- a/packages/firestore/src/local/indexeddb_persistence.ts +++ b/packages/firestore/src/local/indexeddb_persistence.ts @@ -684,6 +684,7 @@ export class IndexedDbPersistence implements Persistence { // Use `SimpleDb.runTransaction` directly to avoid failing if another tab // has obtained the primary lease. await this.simpleDb.runTransaction( + 'shutdown', 'readwrite', [DbPrimaryClient.store, DbClientMetadata.store], simpleDbTxn => { @@ -794,7 +795,7 @@ export class IndexedDbPersistence implements Persistence { // Do all transactions as readwrite against all object stores, since we // are the only reader/writer. return this.simpleDb - .runTransaction(simpleDbMode, ALL_STORES, simpleDbTxn => { + .runTransaction(action, simpleDbMode, ALL_STORES, simpleDbTxn => { persistenceTransaction = new IndexedDbTransaction( simpleDbTxn, this.listenSequence diff --git a/packages/firestore/src/local/indexeddb_schema.ts b/packages/firestore/src/local/indexeddb_schema.ts index 5a9f3c7b87a..9e2403542b0 100644 --- a/packages/firestore/src/local/indexeddb_schema.ts +++ b/packages/firestore/src/local/indexeddb_schema.ts @@ -88,7 +88,7 @@ export class SchemaConverter implements SimpleDbSchemaConverter { `Unexpected schema upgrade from v${fromVersion} to v${toVersion}.` ); - const simpleDbTransaction = new SimpleDbTransaction(txn); + const simpleDbTransaction = new SimpleDbTransaction('createOrUpgrade', txn); if (fromVersion < 1 && toVersion >= 1) { createPrimaryClientStore(db); diff --git a/packages/firestore/src/local/simple_db.ts b/packages/firestore/src/local/simple_db.ts index 2118b877593..dafacb32407 100644 --- a/packages/firestore/src/local/simple_db.ts +++ b/packages/firestore/src/local/simple_db.ts @@ -189,7 +189,7 @@ export class SimpleDb { /** * Opens the specified database, creating or upgrading it if necessary. */ - async ensureDb(): Promise { + async ensureDb(action: string): Promise { if (!this.db) { logDebug(LOG_TAG, 'Opening database:', this.name); this.db = await new Promise((resolve, reject) => { @@ -208,6 +208,7 @@ export class SimpleDb { request.onblocked = () => { reject( new IndexedDbTransactionError( + action, 'Cannot upgrade IndexedDB schema while another tab is open. ' + 'Close all tabs that access Firestore and reload this page to proceed.' ) @@ -228,7 +229,7 @@ export class SimpleDb { ) ); } else { - reject(new IndexedDbTransactionError(error)); + reject(new IndexedDbTransactionError(action, error)); } }; @@ -274,6 +275,7 @@ export class SimpleDb { } async runTransaction( + action: string, mode: SimpleDbTransactionMode, objectStores: string[], transactionFn: (transaction: SimpleDbTransaction) => PersistencePromise @@ -285,10 +287,11 @@ export class SimpleDb { ++attemptNumber; try { - this.db = await this.ensureDb(); + this.db = await this.ensureDb(action); const transaction = SimpleDbTransaction.open( this.db, + action, readonly ? 'readonly' : 'readwrite', objectStores ); @@ -424,8 +427,11 @@ export interface IterateOptions { export class IndexedDbTransactionError extends FirestoreError { name = 'IndexedDbTransactionError'; - constructor(cause: Error | string) { - super(Code.UNAVAILABLE, 'IndexedDB transaction failed: ' + cause); + constructor(actionName: string, cause: Error | string) { + super( + Code.UNAVAILABLE, + `IndexedDB transaction '${actionName}' failed: ${cause}` + ); } } @@ -450,24 +456,28 @@ export class SimpleDbTransaction { static open( db: IDBDatabase, + action: string, mode: IDBTransactionMode, objectStoreNames: string[] ): SimpleDbTransaction { try { - return new SimpleDbTransaction(db.transaction(objectStoreNames, mode)); + return new SimpleDbTransaction(action, db.transaction(mode)); } catch (e) { - throw new IndexedDbTransactionError(e); + throw new IndexedDbTransactionError(action, e); } } - constructor(private readonly transaction: IDBTransaction) { + constructor( + private readonly action: string, + private readonly transaction: IDBTransaction + ) { this.transaction.oncomplete = () => { this.completionDeferred.resolve(); }; this.transaction.onabort = () => { if (transaction.error) { this.completionDeferred.reject( - new IndexedDbTransactionError(transaction.error) + new IndexedDbTransactionError(action, transaction.error) ); } else { this.completionDeferred.resolve(); @@ -477,7 +487,9 @@ export class SimpleDbTransaction { const error = checkForAndReportiOSError( (event.target as IDBRequest).error! ); - this.completionDeferred.reject(new IndexedDbTransactionError(error)); + this.completionDeferred.reject( + new IndexedDbTransactionError(action, error) + ); }; } diff --git a/packages/firestore/test/unit/local/encoded_resource_path.test.ts b/packages/firestore/test/unit/local/encoded_resource_path.test.ts index 8d259973cba..9f4253ccd98 100644 --- a/packages/firestore/test/unit/local/encoded_resource_path.test.ts +++ b/packages/firestore/test/unit/local/encoded_resource_path.test.ts @@ -195,7 +195,12 @@ function runTransaction( transaction: SimpleDbTransaction ) => PersistencePromise ): Promise { - return db.runTransaction('readwrite', ['test'], txn => { - return fn(txn.store('test'), txn); - }); + return db.runTransaction( + 'EncodedResourcePathTests', + 'readwrite', + ['test'], + txn => { + return fn(txn.store('test'), txn); + } + ); } diff --git a/packages/firestore/test/unit/local/indexeddb_persistence.test.ts b/packages/firestore/test/unit/local/indexeddb_persistence.test.ts index c7482cb14bb..6db9db68ca1 100644 --- a/packages/firestore/test/unit/local/indexeddb_persistence.test.ts +++ b/packages/firestore/test/unit/local/indexeddb_persistence.test.ts @@ -85,6 +85,8 @@ import { getWindow } from '../../../src/platform/dom'; use(chaiAsPromised); +export const TEST_ACTION = 'IndexedDbPersistenceTest'; + /* eslint-disable no-restricted-globals */ async function withDb( schemaVersion: number, @@ -96,7 +98,7 @@ async function withDb( schemaVersion, schemaConverter ); - const database = await simpleDb.ensureDb(); + const database = await simpleDb.ensureDb(TEST_ACTION); await fn(simpleDb, database.version, Array.from(database.objectStoreNames)); await simpleDb.close(); } @@ -262,6 +264,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { return withDb(2, db => { return db.runTransaction( + TEST_ACTION, 'readwrite', [DbTarget.store, DbTargetGlobal.store, DbMutationBatch.store], txn => { @@ -290,6 +293,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { expect(objectStores).to.have.members(V3_STORES); return db.runTransaction( + TEST_ACTION, 'readwrite', [DbTarget.store, DbTargetGlobal.store, DbMutationBatch.store], txn => { @@ -354,37 +358,47 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { ]; return withDb(3, db => { - return db.runTransaction('readwrite', [DbMutationBatch.store], txn => { - const store = txn.store(DbMutationBatch.store); - return PersistencePromise.forEach( - testMutations, - (testMutation: DbMutationBatch) => store.put(testMutation) - ); - }); + return db.runTransaction( + TEST_ACTION, + 'readwrite', + [DbMutationBatch.store], + txn => { + const store = txn.store(DbMutationBatch.store); + return PersistencePromise.forEach( + testMutations, + (testMutation: DbMutationBatch) => store.put(testMutation) + ); + } + ); }).then(() => withDb(4, (db, version, objectStores) => { expect(version).to.be.equal(4); expect(objectStores).to.have.members(V4_STORES); - return db.runTransaction('readwrite', [DbMutationBatch.store], txn => { - const store = txn.store( - DbMutationBatch.store - ); - let p = PersistencePromise.forEach( - testMutations, - (testMutation: DbMutationBatch) => - store.get(testMutation.batchId).next(mutationBatch => { - expect(mutationBatch).to.deep.equal(testMutation); - }) - ); - p = p.next(() => { - store - .add({} as any) // eslint-disable-line @typescript-eslint/no-explicit-any - .next(batchId => { - expect(batchId).to.equal(43); - }); - }); - return p; - }); + return db.runTransaction( + TEST_ACTION, + 'readwrite', + [DbMutationBatch.store], + txn => { + const store = txn.store( + DbMutationBatch.store + ); + let p = PersistencePromise.forEach( + testMutations, + (testMutation: DbMutationBatch) => + store.get(testMutation.batchId).next(mutationBatch => { + expect(mutationBatch).to.deep.equal(testMutation); + }) + ); + p = p.next(() => { + store + .add({} as any) // eslint-disable-line @typescript-eslint/no-explicit-any + .next(batchId => { + expect(batchId).to.equal(43); + }); + }); + return p; + } + ); }) ); }); @@ -456,7 +470,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { return withDb(4, db => { // We can only use the V4 stores here, since that's as far as we've upgraded. - return db.runTransaction('readwrite', V4_STORES, txn => { + return db.runTransaction(TEST_ACTION, 'readwrite', V4_STORES, txn => { const mutationBatchStore = txn.store< DbMutationBatchKey, DbMutationBatch @@ -504,7 +518,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { expect(version).to.be.equal(5); // There is no V5_STORES, continue using V4. - return db.runTransaction('readwrite', V4_STORES, txn => { + return db.runTransaction(TEST_ACTION, 'readwrite', V4_STORES, txn => { const mutationBatchStore = txn.store< DbMutationBatchKey, DbMutationBatch @@ -555,7 +569,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { dbDoc: toDbRemoteDocument(TEST_SERIALIZER, doc, doc.version) })); // V5 stores doesn't exist - return db.runTransaction('readwrite', V4_STORES, txn => { + return db.runTransaction(TEST_ACTION, 'readwrite', V4_STORES, txn => { const store = txn.store( DbRemoteDocument.store ); @@ -567,7 +581,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { }); }); await withDb(6, db => { - return db.runTransaction('readwrite', V6_STORES, txn => { + return db.runTransaction(TEST_ACTION, 'readwrite', V6_STORES, txn => { const store = txn.store< DbRemoteDocumentGlobalKey, DbRemoteDocumentGlobal @@ -588,7 +602,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { const newSequenceNumber = 2; await withDb(6, db => { const serializer = TEST_SERIALIZER; - return db.runTransaction('readwrite', V6_STORES, txn => { + return db.runTransaction(TEST_ACTION, 'readwrite', V6_STORES, txn => { const targetGlobalStore = txn.store( DbTargetGlobal.store ); @@ -638,7 +652,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { // Now run the migration and verify await withDb(7, db => { - return db.runTransaction('readwrite', V6_STORES, txn => { + return db.runTransaction(TEST_ACTION, 'readwrite', V6_STORES, txn => { const targetDocumentStore = txn.store< DbTargetDocumentKey, DbTargetDocument @@ -688,7 +702,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { }; await withDb(7, db => { - return db.runTransaction('readwrite', V6_STORES, txn => { + return db.runTransaction(TEST_ACTION, 'readwrite', V6_STORES, txn => { const remoteDocumentStore = txn.store< DbRemoteDocumentKey, DbRemoteDocument @@ -724,7 +738,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { // Migrate to v8 and verify index entries. await withDb(8, db => { - return db.runTransaction('readwrite', V8_STORES, txn => { + return db.runTransaction(TEST_ACTION, 'readwrite', V8_STORES, txn => { const collectionParentsStore = txn.store< DbCollectionParentKey, DbCollectionParent @@ -748,7 +762,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { it('rewrites canonical IDs during upgrade from version 9 to 10', async () => { await withDb(9, db => { - return db.runTransaction('readwrite', V8_STORES, txn => { + return db.runTransaction(TEST_ACTION, 'readwrite', V8_STORES, txn => { const targetsStore = txn.store(DbTarget.store); const filteredQuery = query('collection', filter('foo', '==', 'bar')); @@ -767,7 +781,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { }); await withDb(10, db => { - return db.runTransaction('readwrite', V8_STORES, txn => { + return db.runTransaction(TEST_ACTION, 'readwrite', V8_STORES, txn => { const targetsStore = txn.store(DbTarget.store); return targetsStore.iterate((key, value) => { const targetData = fromDbTarget(value).target; @@ -799,7 +813,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { ]; await withDb(8, db => { - return db.runTransaction('readwrite', V8_STORES, txn => { + return db.runTransaction(TEST_ACTION, 'readwrite', V8_STORES, txn => { const remoteDocumentStore = txn.store< DbRemoteDocumentKey, DbRemoteDocument @@ -828,7 +842,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { // Migrate to v9 and verify that new documents are indexed. await withDb(9, db => { - return db.runTransaction('readwrite', V8_STORES, txn => { + return db.runTransaction(TEST_ACTION, 'readwrite', V8_STORES, txn => { const remoteDocumentStore = txn.store< DbRemoteDocumentKey, DbRemoteDocument @@ -874,7 +888,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { const newDocPaths = ['coll/doc3', 'coll/doc4', 'abc/doc2']; await withDb(9, db => { - return db.runTransaction('readwrite', V8_STORES, txn => { + return db.runTransaction(TEST_ACTION, 'readwrite', V8_STORES, txn => { return addDocs(txn, oldDocPaths, /* version= */ 1).next(() => addDocs(txn, newDocPaths, /* version= */ 2).next(() => { const remoteDocumentStore = txn.store< @@ -907,7 +921,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { const newDocPaths = ['coll1/new', 'coll2/new']; await withDb(9, db => { - return db.runTransaction('readwrite', V8_STORES, txn => { + return db.runTransaction(TEST_ACTION, 'readwrite', V8_STORES, txn => { return addDocs(txn, oldDocPaths, /* version= */ 1).next(() => addDocs(txn, newDocPaths, /* version= */ 2).next(() => { const remoteDocumentStore = txn.store< @@ -947,7 +961,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { downgradeVersion, schemaConverter ); - await db.ensureDb(); + await db.ensureDb(TEST_ACTION); } catch (e) { error = e; expect( @@ -970,12 +984,17 @@ describe('IndexedDb: canActAsPrimary', () => { SCHEMA_VERSION, new SchemaConverter(TEST_SERIALIZER) ); - await simpleDb.runTransaction('readwrite', [DbPrimaryClient.store], txn => { - const primaryStore = txn.store( - DbPrimaryClient.store - ); - return primaryStore.delete(DbPrimaryClient.key); - }); + await simpleDb.runTransaction( + TEST_ACTION, + 'readwrite', + [DbPrimaryClient.store], + txn => { + const primaryStore = txn.store( + DbPrimaryClient.store + ); + return primaryStore.delete(DbPrimaryClient.key); + } + ); simpleDb.close(); } @@ -986,6 +1005,7 @@ describe('IndexedDb: canActAsPrimary', () => { new SchemaConverter(TEST_SERIALIZER) ); const leaseOwner = await simpleDb.runTransaction( + TEST_ACTION, 'readonly', [DbPrimaryClient.store], txn => { @@ -1228,7 +1248,7 @@ describe('IndexedDb', () => { db.close(); // Running a new IndexedDB transaction should re-open the database and not // throw. - await db.runTransaction('readwrite', V1_STORES, () => + await db.runTransaction(TEST_ACTION, 'readwrite', V1_STORES, () => PersistencePromise.resolve() ); }); diff --git a/packages/firestore/test/unit/local/simple_db.test.ts b/packages/firestore/test/unit/local/simple_db.test.ts index b3a19f0fa2c..08369aafc4d 100644 --- a/packages/firestore/test/unit/local/simple_db.test.ts +++ b/packages/firestore/test/unit/local/simple_db.test.ts @@ -32,6 +32,8 @@ import { Code, FirestoreError } from '../../../src/util/error'; use(chaiAsPromised); +const TEST_ACTION = 'SimpleDbTest'; + interface User { id: number; name: string; @@ -94,7 +96,7 @@ describe('SimpleDb', () => { transaction: SimpleDbTransaction ) => PersistencePromise ): Promise { - return db.runTransaction('readwrite', ['users'], txn => { + return db.runTransaction(TEST_ACTION, 'readwrite', ['users'], txn => { return fn(txn.store('users'), txn); }); } @@ -480,17 +482,22 @@ describe('SimpleDb', () => { ['foo', 'd'], ['foob'] ]; - await db.runTransaction('readwrite', ['users', 'docs'], txn => { - const docsStore = txn.store('docs'); - return PersistencePromise.waitFor( - keys.map(key => { - const value = 'doc ' + key.join('/'); - return docsStore.put(key, value); - }) - ); - }); + await db.runTransaction( + TEST_ACTION, + 'readwrite', + ['users', 'docs'], + txn => { + const docsStore = txn.store('docs'); + return PersistencePromise.waitFor( + keys.map(key => { + const value = 'doc ' + key.join('/'); + return docsStore.put(key, value); + }) + ); + } + ); - await db.runTransaction('readonly', ['docs'], txn => { + await db.runTransaction(TEST_ACTION, 'readonly', ['docs'], txn => { const store = txn.store('docs'); const range = IDBKeyRange.bound(['foo'], ['foo', 'c']); return store.loadAll(range).next(results => { @@ -502,20 +509,25 @@ describe('SimpleDb', () => { it('retries transactions', async () => { let attemptCount = 0; - const result = await db.runTransaction('readwrite', ['users'], txn => { - ++attemptCount; - if (attemptCount === 1) { - const store = txn.store('users'); - return store - .add(dummyUser) - .next(() => { - return store.add(dummyUser); // Fails with a unique key violation - }) - .next(() => 'Aborted'); - } else { - return PersistencePromise.resolve('success'); + const result = await db.runTransaction( + TEST_ACTION, + 'readwrite', + ['users'], + txn => { + ++attemptCount; + if (attemptCount === 1) { + const store = txn.store('users'); + return store + .add(dummyUser) + .next(() => { + return store.add(dummyUser); // Fails with a unique key violation + }) + .next(() => 'Aborted'); + } else { + return PersistencePromise.resolve('success'); + } } - }); + ); expect(result).to.equal('success'); expect(attemptCount).to.equal(2); @@ -525,7 +537,7 @@ describe('SimpleDb', () => { let attemptCount = 0; await expect( - db.runTransaction('readwrite', ['users'], txn => { + db.runTransaction(TEST_ACTION, 'readwrite', ['users'], txn => { ++attemptCount; const store = txn.store('users'); return store @@ -544,7 +556,7 @@ describe('SimpleDb', () => { let attemptCount = 0; await expect( - db.runTransaction('readwrite', ['users'], txn => { + db.runTransaction(TEST_ACTION, 'readwrite', ['users'], txn => { ++attemptCount; txn.abort(new FirestoreError(Code.ABORTED, 'Aborted')); return PersistencePromise.reject(new Error()); diff --git a/packages/firestore/test/unit/specs/spec_test_components.ts b/packages/firestore/test/unit/specs/spec_test_components.ts index c17db8caf6c..ab6c672a888 100644 --- a/packages/firestore/test/unit/specs/spec_test_components.ts +++ b/packages/firestore/test/unit/specs/spec_test_components.ts @@ -115,6 +115,7 @@ function failTransactionIfNeeded( failActions.indexOf(actionName as PersistenceAction) !== -1; if (shouldFail) { throw new IndexedDbTransactionError( + 'Simulated error', new Error('Simulated retryable error: ' + actionName) ); } diff --git a/packages/firestore/test/unit/specs/spec_test_runner.ts b/packages/firestore/test/unit/specs/spec_test_runner.ts index 5f474c466d8..c726f329f44 100644 --- a/packages/firestore/test/unit/specs/spec_test_runner.ts +++ b/packages/firestore/test/unit/specs/spec_test_runner.ts @@ -1624,11 +1624,16 @@ async function clearCurrentPrimaryLease(): Promise { SCHEMA_VERSION, new SchemaConverter(TEST_SERIALIZER) ); - await db.runTransaction('readwrite', [DbPrimaryClient.store], txn => { - const primaryClientStore = txn.store( - DbPrimaryClient.store - ); - return primaryClientStore.delete(DbPrimaryClient.key); - }); + await db.runTransaction( + 'clearCurrentPrimaryLease', + 'readwrite', + [DbPrimaryClient.store], + txn => { + const primaryClientStore = txn.store( + DbPrimaryClient.store + ); + return primaryClientStore.delete(DbPrimaryClient.key); + } + ); db.close(); } diff --git a/packages/firestore/test/unit/util/async_queue.test.ts b/packages/firestore/test/unit/util/async_queue.test.ts index d6a78e14c24..64725146e3f 100644 --- a/packages/firestore/test/unit/util/async_queue.test.ts +++ b/packages/firestore/test/unit/util/async_queue.test.ts @@ -222,6 +222,7 @@ describe('AsyncQueue', () => { doStep(1); if (completedSteps.length === 1) { throw new IndexedDbTransactionError( + 'Simulated error', new Error('Simulated retryable error') ); } @@ -272,6 +273,7 @@ describe('AsyncQueue', () => { doStep(1); if (completedSteps.length === 1) { throw new IndexedDbTransactionError( + 'Simulated error', new Error('Simulated retryable error') ); } @@ -299,6 +301,7 @@ describe('AsyncQueue', () => { doStep(1); if (completedSteps.length === 1) { throw new IndexedDbTransactionError( + 'Simulated error', new Error('Simulated retryable error') ); } @@ -345,6 +348,7 @@ describe('AsyncQueue', () => { doStep(1); if (completedSteps.length === 1) { throw new IndexedDbTransactionError( + 'Simulated error', new Error('Simulated retryable error') ); } From a7f8cdd8054296603a0b71982dd7d3a35903d082 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 30 Sep 2020 12:13:54 -0700 Subject: [PATCH 2/4] Create gentle-doors-play.md --- .changeset/gentle-doors-play.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .changeset/gentle-doors-play.md diff --git a/.changeset/gentle-doors-play.md b/.changeset/gentle-doors-play.md new file mode 100644 index 00000000000..d7f742f0f86 --- /dev/null +++ b/.changeset/gentle-doors-play.md @@ -0,0 +1,5 @@ +--- +"@firebase/firestore": patch +--- + +The SDK now include more information in the error message for failed IndexedDB transactions. From af77049edef78d04e372f5b19c8016d2b826713b Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 30 Sep 2020 13:41:32 -0700 Subject: [PATCH 3/4] Feedback --- .../unit/local/indexeddb_persistence.test.ts | 799 ++++++++++-------- .../test/unit/local/simple_db.test.ts | 49 +- 2 files changed, 471 insertions(+), 377 deletions(-) diff --git a/packages/firestore/test/unit/local/indexeddb_persistence.test.ts b/packages/firestore/test/unit/local/indexeddb_persistence.test.ts index 6db9db68ca1..7635da6682c 100644 --- a/packages/firestore/test/unit/local/indexeddb_persistence.test.ts +++ b/packages/firestore/test/unit/local/indexeddb_persistence.test.ts @@ -82,11 +82,10 @@ import { import { canonifyTarget } from '../../../src/core/target'; import { FakeDocument, testDocument } from '../../util/test_platform'; import { getWindow } from '../../../src/platform/dom'; +import { Context } from 'mocha'; use(chaiAsPromised); -export const TEST_ACTION = 'IndexedDbPersistenceTest'; - /* eslint-disable no-restricted-globals */ async function withDb( schemaVersion: number, @@ -98,7 +97,7 @@ async function withDb( schemaVersion, schemaConverter ); - const database = await simpleDb.ensureDb(TEST_ACTION); + const database = await simpleDb.ensureDb('IndexedDbPersistenceTests'); await fn(simpleDb, database.version, Array.from(database.objectStoreNames)); await simpleDb.close(); } @@ -243,7 +242,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { }); }); - it('drops the query cache from 2 to 3', () => { + it('drops the query cache from 2 to 3', function (this: Context) { const userId = 'user'; const batchId = 1; const targetId = 2; @@ -264,7 +263,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { return withDb(2, db => { return db.runTransaction( - TEST_ACTION, + this.test!.fullTitle(), 'readwrite', [DbTarget.store, DbTargetGlobal.store, DbMutationBatch.store], txn => { @@ -293,7 +292,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { expect(objectStores).to.have.members(V3_STORES); return db.runTransaction( - TEST_ACTION, + this.test!.fullTitle(), 'readwrite', [DbTarget.store, DbTargetGlobal.store, DbMutationBatch.store], txn => { @@ -331,7 +330,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { }); }); - it('can upgrade from schema version 3 to 4', () => { + it('can upgrade from schema version 3 to 4', function (this: Context) { const testWrite = { delete: 'foo' }; const testMutations: DbMutationBatch[] = [ { @@ -359,7 +358,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { return withDb(3, db => { return db.runTransaction( - TEST_ACTION, + this.test!.fullTitle(), 'readwrite', [DbMutationBatch.store], txn => { @@ -375,7 +374,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { expect(version).to.be.equal(4); expect(objectStores).to.have.members(V4_STORES); return db.runTransaction( - TEST_ACTION, + this.test!.fullTitle(), 'readwrite', [DbMutationBatch.store], txn => { @@ -403,7 +402,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { ); }); - it('can upgrade from schema version 4 to 5', () => { + it('can upgrade from schema version 4 to 5', function (this: Context) { // This test creates a database with schema version 4 that has two users, // both of which have acknowledged mutations that haven't yet been removed // from IndexedDb ("heldWriteAcks"). Schema version 5 removes heldWriteAcks, @@ -470,55 +469,11 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { return withDb(4, db => { // We can only use the V4 stores here, since that's as far as we've upgraded. - return db.runTransaction(TEST_ACTION, 'readwrite', V4_STORES, txn => { - const mutationBatchStore = txn.store< - DbMutationBatchKey, - DbMutationBatch - >(DbMutationBatch.store); - const documentMutationStore = txn.store< - DbDocumentMutationKey, - DbDocumentMutation - >(DbDocumentMutation.store); - const mutationQueuesStore = txn.store< - DbMutationQueueKey, - DbMutationQueue - >(DbMutationQueue.store); - // Manually populate the mutation queue and create all indicies. - return PersistencePromise.forEach( - testMutations, - (testMutation: DbMutationBatch) => { - return mutationBatchStore.put(testMutation).next(() => { - return PersistencePromise.forEach( - testMutation.mutations, - (write: firestoreV1ApiClientInterfaces.Write) => { - const indexKey = DbDocumentMutation.key( - testMutation.userId, - path(write.update!.name!, 5), - testMutation.batchId - ); - return documentMutationStore.put( - indexKey, - DbDocumentMutation.PLACEHOLDER - ); - } - ); - }); - } - ).next(() => - // Populate the mutation queues' metadata - PersistencePromise.waitFor([ - mutationQueuesStore.put(new DbMutationQueue('foo', 2, '')), - mutationQueuesStore.put(new DbMutationQueue('bar', 3, '')), - mutationQueuesStore.put(new DbMutationQueue('empty', -1, '')) - ]) - ); - }); - }).then(() => - withDb(5, (db, version) => { - expect(version).to.be.equal(5); - - // There is no V5_STORES, continue using V4. - return db.runTransaction(TEST_ACTION, 'readwrite', V4_STORES, txn => { + return db.runTransaction( + this.test!.fullTitle(), + 'readwrite', + V4_STORES, + txn => { const mutationBatchStore = txn.store< DbMutationBatchKey, DbMutationBatch @@ -531,32 +486,86 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { DbMutationQueueKey, DbMutationQueue >(DbMutationQueue.store); - - // Verify that all but the two pending mutations have been cleared - // by the migration. - let p = mutationBatchStore.count().next(count => { - expect(count).to.deep.equal(2); - }); - p = p.next(() => - documentMutationStore.count().next(count => { - expect(count).to.equal(2); - }) + // Manually populate the mutation queue and create all indicies. + return PersistencePromise.forEach( + testMutations, + (testMutation: DbMutationBatch) => { + return mutationBatchStore.put(testMutation).next(() => { + return PersistencePromise.forEach( + testMutation.mutations, + (write: firestoreV1ApiClientInterfaces.Write) => { + const indexKey = DbDocumentMutation.key( + testMutation.userId, + path(write.update!.name!, 5), + testMutation.batchId + ); + return documentMutationStore.put( + indexKey, + DbDocumentMutation.PLACEHOLDER + ); + } + ); + }); + } + ).next(() => + // Populate the mutation queues' metadata + PersistencePromise.waitFor([ + mutationQueuesStore.put(new DbMutationQueue('foo', 2, '')), + mutationQueuesStore.put(new DbMutationQueue('bar', 3, '')), + mutationQueuesStore.put(new DbMutationQueue('empty', -1, '')) + ]) ); + } + ); + }).then(() => + withDb(5, (db, version) => { + expect(version).to.be.equal(5); - // Verify that we still have one metadata entry for each existing - // queue - p = p.next(() => - mutationQueuesStore.count().next(count => { - expect(count).to.equal(3); - }) - ); - return p; - }); + // There is no V5_STORES, continue using V4. + return db.runTransaction( + this.test!.fullTitle(), + 'readwrite', + V4_STORES, + txn => { + const mutationBatchStore = txn.store< + DbMutationBatchKey, + DbMutationBatch + >(DbMutationBatch.store); + const documentMutationStore = txn.store< + DbDocumentMutationKey, + DbDocumentMutation + >(DbDocumentMutation.store); + const mutationQueuesStore = txn.store< + DbMutationQueueKey, + DbMutationQueue + >(DbMutationQueue.store); + + // Verify that all but the two pending mutations have been cleared + // by the migration. + let p = mutationBatchStore.count().next(count => { + expect(count).to.deep.equal(2); + }); + p = p.next(() => + documentMutationStore.count().next(count => { + expect(count).to.equal(2); + }) + ); + + // Verify that we still have one metadata entry for each existing + // queue + p = p.next(() => + mutationQueuesStore.count().next(count => { + expect(count).to.equal(3); + }) + ); + return p; + } + ); }) ); }); - it('can upgrade from version 5 to 6', async () => { + it('can upgrade from version 5 to 6', async function (this: Context) { await withDb(5, db => { // Add some documents const docs = [ @@ -569,116 +578,137 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { dbDoc: toDbRemoteDocument(TEST_SERIALIZER, doc, doc.version) })); // V5 stores doesn't exist - return db.runTransaction(TEST_ACTION, 'readwrite', V4_STORES, txn => { - const store = txn.store( - DbRemoteDocument.store - ); - return PersistencePromise.forEach( - dbRemoteDocs, - ({ dbKey, dbDoc }: { dbKey: string[]; dbDoc: DbRemoteDocument }) => - store.put(dbKey, dbDoc) - ); - }); + return db.runTransaction( + this.test!.fullTitle(), + 'readwrite', + V4_STORES, + txn => { + const store = txn.store( + DbRemoteDocument.store + ); + return PersistencePromise.forEach( + dbRemoteDocs, + ({ dbKey, dbDoc }: { dbKey: string[]; dbDoc: DbRemoteDocument }) => + store.put(dbKey, dbDoc) + ); + } + ); }); await withDb(6, db => { - return db.runTransaction(TEST_ACTION, 'readwrite', V6_STORES, txn => { - const store = txn.store< - DbRemoteDocumentGlobalKey, - DbRemoteDocumentGlobal - >(DbRemoteDocumentGlobal.store); - return store.get(DbRemoteDocumentGlobal.key).next(metadata => { - // We don't really care what the size is, just that it's greater than 0. - // Our sizing algorithm may change at some point. - expect(metadata!.byteSize).to.be.greaterThan(0); - }); - }); + return db.runTransaction( + this.test!.fullTitle(), + 'readwrite', + V6_STORES, + txn => { + const store = txn.store< + DbRemoteDocumentGlobalKey, + DbRemoteDocumentGlobal + >(DbRemoteDocumentGlobal.store); + return store.get(DbRemoteDocumentGlobal.key).next(metadata => { + // We don't really care what the size is, just that it's greater than 0. + // Our sizing algorithm may change at some point. + expect(metadata!.byteSize).to.be.greaterThan(0); + }); + } + ); }); }); - it('can upgrade from version 6 to 7', async () => { + it('can upgrade from version 6 to 7', async function (this: Context) { const oldSequenceNumber = 1; // Set the highest sequence number to this value so that untagged documents // will pick up this value. const newSequenceNumber = 2; await withDb(6, db => { const serializer = TEST_SERIALIZER; - return db.runTransaction(TEST_ACTION, 'readwrite', V6_STORES, txn => { - const targetGlobalStore = txn.store( - DbTargetGlobal.store - ); - const remoteDocumentStore = txn.store< - DbRemoteDocumentKey, - DbRemoteDocument - >(DbRemoteDocument.store); - const targetDocumentStore = txn.store< - DbTargetDocumentKey, - DbTargetDocument - >(DbTargetDocument.store); - return targetGlobalStore - .get(DbTargetGlobal.key) - .next(metadata => { - expect(metadata).to.not.be.null; - metadata!.highestListenSequenceNumber = newSequenceNumber; - return targetGlobalStore.put(DbTargetGlobal.key, metadata!); - }) - .next(() => { - // Set up some documents (we only need the keys) - // For the odd ones, add sentinel rows. - const promises: Array> = []; - for (let i = 0; i < 10; i++) { - const document = doc('docs/doc_' + i, 1, { foo: 'bar' }); - promises.push( - remoteDocumentStore.put( - document.key.path.toArray(), - toDbRemoteDocument(serializer, document, document.version) - ) - ); - if (i % 2 === 1) { + return db.runTransaction( + this.test!.fullTitle(), + 'readwrite', + V6_STORES, + txn => { + const targetGlobalStore = txn.store< + DbTargetGlobalKey, + DbTargetGlobal + >(DbTargetGlobal.store); + const remoteDocumentStore = txn.store< + DbRemoteDocumentKey, + DbRemoteDocument + >(DbRemoteDocument.store); + const targetDocumentStore = txn.store< + DbTargetDocumentKey, + DbTargetDocument + >(DbTargetDocument.store); + return targetGlobalStore + .get(DbTargetGlobal.key) + .next(metadata => { + expect(metadata).to.not.be.null; + metadata!.highestListenSequenceNumber = newSequenceNumber; + return targetGlobalStore.put(DbTargetGlobal.key, metadata!); + }) + .next(() => { + // Set up some documents (we only need the keys) + // For the odd ones, add sentinel rows. + const promises: Array> = []; + for (let i = 0; i < 10; i++) { + const document = doc('docs/doc_' + i, 1, { foo: 'bar' }); promises.push( - targetDocumentStore.put( - new DbTargetDocument( - 0, - encodeResourcePath(document.key.path), - oldSequenceNumber - ) + remoteDocumentStore.put( + document.key.path.toArray(), + toDbRemoteDocument(serializer, document, document.version) ) ); + if (i % 2 === 1) { + promises.push( + targetDocumentStore.put( + new DbTargetDocument( + 0, + encodeResourcePath(document.key.path), + oldSequenceNumber + ) + ) + ); + } } - } - return PersistencePromise.waitFor(promises); - }); - }); + return PersistencePromise.waitFor(promises); + }); + } + ); }); // Now run the migration and verify await withDb(7, db => { - return db.runTransaction(TEST_ACTION, 'readwrite', V6_STORES, txn => { - const targetDocumentStore = txn.store< - DbTargetDocumentKey, - DbTargetDocument - >(DbTargetDocument.store); - const range = IDBKeyRange.bound( - [0], - [1], - /*lowerOpen=*/ false, - /*upperOpen=*/ true - ); - return targetDocumentStore.iterate( - { range }, - ([_, path], targetDocument) => { - const decoded = decodeResourcePath(path); - const lastSegment = decoded.lastSegment(); - const docNum = +lastSegment.split('_')[1]; - const expected = - docNum % 2 === 1 ? oldSequenceNumber : newSequenceNumber; - expect(targetDocument.sequenceNumber).to.equal(expected); - } - ); - }); + return db.runTransaction( + this.test!.fullTitle(), + 'readwrite', + V6_STORES, + txn => { + const targetDocumentStore = txn.store< + DbTargetDocumentKey, + DbTargetDocument + >(DbTargetDocument.store); + const range = IDBKeyRange.bound( + [0], + [1], + /*lowerOpen=*/ false, + /*upperOpen=*/ true + ); + return targetDocumentStore.iterate( + { range }, + ([_, path], targetDocument) => { + const decoded = decodeResourcePath(path); + const lastSegment = decoded.lastSegment(); + const docNum = +lastSegment.split('_')[1]; + const expected = + docNum % 2 === 1 ? oldSequenceNumber : newSequenceNumber; + expect(targetDocument.sequenceNumber).to.equal(expected); + } + ); + } + ); }); }); - it('can upgrade from version 7 to 8', async () => { + it('can upgrade from version 7 to 8', async function (this: Context) { // This test creates a database with schema version 7 that has a few // mutations and a few remote documents and then ensures that appropriate // entries are written to the collectionParentIndex. @@ -702,99 +732,128 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { }; await withDb(7, db => { - return db.runTransaction(TEST_ACTION, 'readwrite', V6_STORES, txn => { - const remoteDocumentStore = txn.store< - DbRemoteDocumentKey, - DbRemoteDocument - >(DbRemoteDocument.store); - const documentMutationStore = txn.store< - DbDocumentMutationKey, - DbDocumentMutation - >(DbDocumentMutation.store); - // We "cheat" and only write the DbDocumentMutation index entries, since that's - // all the migration uses. - return PersistencePromise.forEach(writePaths, (writePath: string) => { - const indexKey = DbDocumentMutation.key( - 'dummy-uid', - path(writePath), - /*dummy batchId=*/ 123 - ); - return documentMutationStore.put( - indexKey, - DbDocumentMutation.PLACEHOLDER - ); - }).next(() => { - // Write the remote document entries. - return PersistencePromise.forEach(remoteDocPaths, (path: string) => { - const remoteDoc = doc(path, /*version=*/ 1, { data: 1 }); - return remoteDocumentStore.put( - remoteDoc.key.path.toArray(), - toDbRemoteDocument(TEST_SERIALIZER, remoteDoc, remoteDoc.version) + return db.runTransaction( + this.test!.fullTitle(), + 'readwrite', + V6_STORES, + txn => { + const remoteDocumentStore = txn.store< + DbRemoteDocumentKey, + DbRemoteDocument + >(DbRemoteDocument.store); + const documentMutationStore = txn.store< + DbDocumentMutationKey, + DbDocumentMutation + >(DbDocumentMutation.store); + // We "cheat" and only write the DbDocumentMutation index entries, since that's + // all the migration uses. + return PersistencePromise.forEach(writePaths, (writePath: string) => { + const indexKey = DbDocumentMutation.key( + 'dummy-uid', + path(writePath), + /*dummy batchId=*/ 123 + ); + return documentMutationStore.put( + indexKey, + DbDocumentMutation.PLACEHOLDER + ); + }).next(() => { + // Write the remote document entries. + return PersistencePromise.forEach( + remoteDocPaths, + (path: string) => { + const remoteDoc = doc(path, /*version=*/ 1, { data: 1 }); + return remoteDocumentStore.put( + remoteDoc.key.path.toArray(), + toDbRemoteDocument( + TEST_SERIALIZER, + remoteDoc, + remoteDoc.version + ) + ); + } ); }); - }); - }); + } + ); }); // Migrate to v8 and verify index entries. await withDb(8, db => { - return db.runTransaction(TEST_ACTION, 'readwrite', V8_STORES, txn => { - const collectionParentsStore = txn.store< - DbCollectionParentKey, - DbCollectionParent - >(DbCollectionParent.store); - return collectionParentsStore.loadAll().next(indexEntries => { - const actualParents: { [key: string]: string[] } = {}; - for (const { collectionId, parent } of indexEntries) { - let parents = actualParents[collectionId]; - if (!parents) { - parents = []; - actualParents[collectionId] = parents; + return db.runTransaction( + this.test!.fullTitle(), + 'readwrite', + V8_STORES, + txn => { + const collectionParentsStore = txn.store< + DbCollectionParentKey, + DbCollectionParent + >(DbCollectionParent.store); + return collectionParentsStore.loadAll().next(indexEntries => { + const actualParents: { [key: string]: string[] } = {}; + for (const { collectionId, parent } of indexEntries) { + let parents = actualParents[collectionId]; + if (!parents) { + parents = []; + actualParents[collectionId] = parents; + } + parents.push(decodeResourcePath(parent).toString()); } - parents.push(decodeResourcePath(parent).toString()); - } - expect(actualParents).to.deep.equal(expectedParents); - }); - }); + expect(actualParents).to.deep.equal(expectedParents); + }); + } + ); }); }); - it('rewrites canonical IDs during upgrade from version 9 to 10', async () => { + it('rewrites canonical IDs during upgrade from version 9 to 10', async function (this: Context) { await withDb(9, db => { - return db.runTransaction(TEST_ACTION, 'readwrite', V8_STORES, txn => { - const targetsStore = txn.store(DbTarget.store); - - const filteredQuery = query('collection', filter('foo', '==', 'bar')); - const initialTargetData = new TargetData( - queryToTarget(filteredQuery), - /* targetId= */ 2, - TargetPurpose.Listen, - /* sequenceNumber= */ 1 - ); + return db.runTransaction( + this.test!.fullTitle(), + 'readwrite', + V8_STORES, + txn => { + const targetsStore = txn.store(DbTarget.store); + + const filteredQuery = query('collection', filter('foo', '==', 'bar')); + const initialTargetData = new TargetData( + queryToTarget(filteredQuery), + /* targetId= */ 2, + TargetPurpose.Listen, + /* sequenceNumber= */ 1 + ); - const serializedData = toDbTarget(TEST_SERIALIZER, initialTargetData); - serializedData.canonicalId = 'invalid_canonical_id'; + const serializedData = toDbTarget(TEST_SERIALIZER, initialTargetData); + serializedData.canonicalId = 'invalid_canonical_id'; - return targetsStore.put(toDbTarget(TEST_SERIALIZER, initialTargetData)); - }); + return targetsStore.put( + toDbTarget(TEST_SERIALIZER, initialTargetData) + ); + } + ); }); await withDb(10, db => { - return db.runTransaction(TEST_ACTION, 'readwrite', V8_STORES, txn => { - const targetsStore = txn.store(DbTarget.store); - return targetsStore.iterate((key, value) => { - const targetData = fromDbTarget(value).target; - const expectedCanonicalId = canonifyTarget(targetData); - - const actualCanonicalId = value.canonicalId; - expect(actualCanonicalId).to.equal(expectedCanonicalId); - }); - }); + return db.runTransaction( + this.test!.fullTitle(), + 'readwrite', + V8_STORES, + txn => { + const targetsStore = txn.store(DbTarget.store); + return targetsStore.iterate((key, value) => { + const targetData = fromDbTarget(value).target; + const expectedCanonicalId = canonifyTarget(targetData); + + const actualCanonicalId = value.canonicalId; + expect(actualCanonicalId).to.equal(expectedCanonicalId); + }); + } + ); }); }); - it('can use read-time index after schema migration', async () => { + it('can use read-time index after schema migration', async function (this: Context) { // This test creates a database with schema version 8 that has a few // remote documents, adds an index and then reads new documents back // via that index. @@ -813,140 +872,163 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { ]; await withDb(8, db => { - return db.runTransaction(TEST_ACTION, 'readwrite', V8_STORES, txn => { - const remoteDocumentStore = txn.store< - DbRemoteDocumentKey, - DbRemoteDocument - >(DbRemoteDocument.store); - - // Write the remote document entries. - return PersistencePromise.forEach(existingDocPaths, (path: string) => { - const remoteDoc = doc(path, /*version=*/ 1, { data: 1 }); - - const dbRemoteDoc = toDbRemoteDocument( - TEST_SERIALIZER, - remoteDoc, - remoteDoc.version - ); - // Mimic the old serializer and delete previously unset values - delete dbRemoteDoc.readTime; - delete dbRemoteDoc.parentPath; + return db.runTransaction( + this.test!.fullTitle(), + 'readwrite', + V8_STORES, + txn => { + const remoteDocumentStore = txn.store< + DbRemoteDocumentKey, + DbRemoteDocument + >(DbRemoteDocument.store); - return remoteDocumentStore.put( - remoteDoc.key.path.toArray(), - dbRemoteDoc + // Write the remote document entries. + return PersistencePromise.forEach( + existingDocPaths, + (path: string) => { + const remoteDoc = doc(path, /*version=*/ 1, { data: 1 }); + + const dbRemoteDoc = toDbRemoteDocument( + TEST_SERIALIZER, + remoteDoc, + remoteDoc.version + ); + // Mimic the old serializer and delete previously unset values + delete dbRemoteDoc.readTime; + delete dbRemoteDoc.parentPath; + + return remoteDocumentStore.put( + remoteDoc.key.path.toArray(), + dbRemoteDoc + ); + } ); - }); - }); + } + ); }); // Migrate to v9 and verify that new documents are indexed. await withDb(9, db => { - return db.runTransaction(TEST_ACTION, 'readwrite', V8_STORES, txn => { - const remoteDocumentStore = txn.store< - DbRemoteDocumentKey, - DbRemoteDocument - >(DbRemoteDocument.store); - - // Verify the existing remote document entries. - return remoteDocumentStore - .loadAll() - .next(docsRead => { - const keys = docsRead.map(dbDoc => dbDoc.document!.name); - expect(keys).to.have.members([ - 'projects/test-project/databases/(default)/documents/coll1/doc1', - 'projects/test-project/databases/(default)/documents/coll1/doc2', - 'projects/test-project/databases/(default)/documents/coll2/doc1', - 'projects/test-project/databases/(default)/documents/coll2/doc2' - ]); - }) - .next(() => addDocs(txn, newDocPaths, /* version= */ 2)) - .next(() => { - // Verify that we can get recent changes in a collection filtered by - // read time. - const lastReadTime = toDbTimestampKey(version(1)); - const range = IDBKeyRange.lowerBound( - [['coll2'], lastReadTime], - true - ); - return remoteDocumentStore - .loadAll(DbRemoteDocument.collectionReadTimeIndex, range) - .next(docsRead => { - const keys = docsRead.map(dbDoc => dbDoc.document!.name); - expect(keys).to.have.members([ - 'projects/test-project/databases/(default)/documents/coll2/doc3', - 'projects/test-project/databases/(default)/documents/coll2/doc4' - ]); - }); - }); - }); + return db.runTransaction( + this.test!.fullTitle(), + 'readwrite', + V8_STORES, + txn => { + const remoteDocumentStore = txn.store< + DbRemoteDocumentKey, + DbRemoteDocument + >(DbRemoteDocument.store); + + // Verify the existing remote document entries. + return remoteDocumentStore + .loadAll() + .next(docsRead => { + const keys = docsRead.map(dbDoc => dbDoc.document!.name); + expect(keys).to.have.members([ + 'projects/test-project/databases/(default)/documents/coll1/doc1', + 'projects/test-project/databases/(default)/documents/coll1/doc2', + 'projects/test-project/databases/(default)/documents/coll2/doc1', + 'projects/test-project/databases/(default)/documents/coll2/doc2' + ]); + }) + .next(() => addDocs(txn, newDocPaths, /* version= */ 2)) + .next(() => { + // Verify that we can get recent changes in a collection filtered by + // read time. + const lastReadTime = toDbTimestampKey(version(1)); + const range = IDBKeyRange.lowerBound( + [['coll2'], lastReadTime], + true + ); + return remoteDocumentStore + .loadAll(DbRemoteDocument.collectionReadTimeIndex, range) + .next(docsRead => { + const keys = docsRead.map(dbDoc => dbDoc.document!.name); + expect(keys).to.have.members([ + 'projects/test-project/databases/(default)/documents/coll2/doc3', + 'projects/test-project/databases/(default)/documents/coll2/doc4' + ]); + }); + }); + } + ); }); }); - it('can get recent document changes in a collection', async () => { + it('can get recent document changes in a collection', async function (this: Context) { const oldDocPaths = ['coll/doc1', 'coll/doc2', 'abc/doc1']; const newDocPaths = ['coll/doc3', 'coll/doc4', 'abc/doc2']; await withDb(9, db => { - return db.runTransaction(TEST_ACTION, 'readwrite', V8_STORES, txn => { - return addDocs(txn, oldDocPaths, /* version= */ 1).next(() => - addDocs(txn, newDocPaths, /* version= */ 2).next(() => { - const remoteDocumentStore = txn.store< - DbRemoteDocumentKey, - DbRemoteDocument - >(DbRemoteDocument.store); - - const lastReadTime = toDbTimestampKey(version(1)); - const range = IDBKeyRange.lowerBound( - [['coll'], lastReadTime], - true - ); - return remoteDocumentStore - .loadAll(DbRemoteDocument.collectionReadTimeIndex, range) - .next(docsRead => { - const keys = docsRead.map(dbDoc => dbDoc.document!.name); - expect(keys).to.have.members([ - 'projects/test-project/databases/(default)/documents/coll/doc3', - 'projects/test-project/databases/(default)/documents/coll/doc4' - ]); - }); - }) - ); - }); + return db.runTransaction( + this.test!.fullTitle(), + 'readwrite', + V8_STORES, + txn => { + return addDocs(txn, oldDocPaths, /* version= */ 1).next(() => + addDocs(txn, newDocPaths, /* version= */ 2).next(() => { + const remoteDocumentStore = txn.store< + DbRemoteDocumentKey, + DbRemoteDocument + >(DbRemoteDocument.store); + + const lastReadTime = toDbTimestampKey(version(1)); + const range = IDBKeyRange.lowerBound( + [['coll'], lastReadTime], + true + ); + return remoteDocumentStore + .loadAll(DbRemoteDocument.collectionReadTimeIndex, range) + .next(docsRead => { + const keys = docsRead.map(dbDoc => dbDoc.document!.name); + expect(keys).to.have.members([ + 'projects/test-project/databases/(default)/documents/coll/doc3', + 'projects/test-project/databases/(default)/documents/coll/doc4' + ]); + }); + }) + ); + } + ); }); }); - it('can get recent document changes', async () => { + it('can get recent document changes', async function (this: Context) { const oldDocPaths = ['coll1/old', 'coll2/old']; const newDocPaths = ['coll1/new', 'coll2/new']; await withDb(9, db => { - return db.runTransaction(TEST_ACTION, 'readwrite', V8_STORES, txn => { - return addDocs(txn, oldDocPaths, /* version= */ 1).next(() => - addDocs(txn, newDocPaths, /* version= */ 2).next(() => { - const remoteDocumentStore = txn.store< - DbRemoteDocumentKey, - DbRemoteDocument - >(DbRemoteDocument.store); - - const lastReadTime = toDbTimestampKey(version(1)); - const range = IDBKeyRange.lowerBound(lastReadTime, true); - return remoteDocumentStore - .loadAll(DbRemoteDocument.readTimeIndex, range) - .next(docsRead => { - const keys = docsRead.map(dbDoc => dbDoc.document!.name); - expect(keys).to.have.members([ - 'projects/test-project/databases/(default)/documents/coll1/new', - 'projects/test-project/databases/(default)/documents/coll2/new' - ]); - }); - }) - ); - }); + return db.runTransaction( + this.test!.fullTitle(), + 'readwrite', + V8_STORES, + txn => { + return addDocs(txn, oldDocPaths, /* version= */ 1).next(() => + addDocs(txn, newDocPaths, /* version= */ 2).next(() => { + const remoteDocumentStore = txn.store< + DbRemoteDocumentKey, + DbRemoteDocument + >(DbRemoteDocument.store); + + const lastReadTime = toDbTimestampKey(version(1)); + const range = IDBKeyRange.lowerBound(lastReadTime, true); + return remoteDocumentStore + .loadAll(DbRemoteDocument.readTimeIndex, range) + .next(docsRead => { + const keys = docsRead.map(dbDoc => dbDoc.document!.name); + expect(keys).to.have.members([ + 'projects/test-project/databases/(default)/documents/coll1/new', + 'projects/test-project/databases/(default)/documents/coll2/new' + ]); + }); + }) + ); + } + ); }); }); - it('downgrading throws a custom error', async () => { + it('downgrading throws a custom error', async function (this: Context) { // Upgrade to latest version await withDb(SCHEMA_VERSION, async (db, version) => { expect(version).to.equal(SCHEMA_VERSION); @@ -961,7 +1043,7 @@ describe('IndexedDbSchema: createOrUpgradeDb', () => { downgradeVersion, schemaConverter ); - await db.ensureDb(TEST_ACTION); + await db.ensureDb(this.test!.fullTitle()); } catch (e) { error = e; expect( @@ -985,7 +1067,7 @@ describe('IndexedDb: canActAsPrimary', () => { new SchemaConverter(TEST_SERIALIZER) ); await simpleDb.runTransaction( - TEST_ACTION, + 'clearPrimaryLease', 'readwrite', [DbPrimaryClient.store], txn => { @@ -1005,7 +1087,7 @@ describe('IndexedDb: canActAsPrimary', () => { new SchemaConverter(TEST_SERIALIZER) ); const leaseOwner = await simpleDb.runTransaction( - TEST_ACTION, + 'getCurrentLeaseOwner', 'readonly', [DbPrimaryClient.store], txn => { @@ -1243,13 +1325,16 @@ describe('IndexedDb', () => { return; } - it('can re-open after close', async () => { + it('can re-open after close', async function (this: Context) { return withDb(2, async db => { db.close(); // Running a new IndexedDB transaction should re-open the database and not // throw. - await db.runTransaction(TEST_ACTION, 'readwrite', V1_STORES, () => - PersistencePromise.resolve() + await db.runTransaction( + this.test!.fullTitle(), + 'readwrite', + V1_STORES, + () => PersistencePromise.resolve() ); }); }); diff --git a/packages/firestore/test/unit/local/simple_db.test.ts b/packages/firestore/test/unit/local/simple_db.test.ts index 08369aafc4d..e314dae41b0 100644 --- a/packages/firestore/test/unit/local/simple_db.test.ts +++ b/packages/firestore/test/unit/local/simple_db.test.ts @@ -29,11 +29,10 @@ import { PersistencePromise } from '../../../src/local/persistence_promise'; import { fail } from '../../../src/util/assert'; import { Code, FirestoreError } from '../../../src/util/error'; +import { Context } from 'mocha'; use(chaiAsPromised); -const TEST_ACTION = 'SimpleDbTest'; - interface User { id: number; name: string; @@ -96,9 +95,14 @@ describe('SimpleDb', () => { transaction: SimpleDbTransaction ) => PersistencePromise ): Promise { - return db.runTransaction(TEST_ACTION, 'readwrite', ['users'], txn => { - return fn(txn.store('users'), txn); - }); + return db.runTransaction( + 'SimpleDbTests', + 'readwrite', + ['users'], + txn => { + return fn(txn.store('users'), txn); + } + ); } function writeTestData(): Promise { @@ -474,7 +478,7 @@ describe('SimpleDb', () => { }); }); - it('can use arrays as keys and do partial bounds ranges', async () => { + it('can use arrays as keys and do partial bounds ranges', async function (this: Context) { const keys = [ ['fo'], ['foo'], @@ -483,7 +487,7 @@ describe('SimpleDb', () => { ['foob'] ]; await db.runTransaction( - TEST_ACTION, + this.test!.fullTitle(), 'readwrite', ['users', 'docs'], txn => { @@ -497,20 +501,25 @@ describe('SimpleDb', () => { } ); - await db.runTransaction(TEST_ACTION, 'readonly', ['docs'], txn => { - const store = txn.store('docs'); - const range = IDBKeyRange.bound(['foo'], ['foo', 'c']); - return store.loadAll(range).next(results => { - expect(results).to.deep.equal(['doc foo', 'doc foo/bar/baz']); - }); - }); + await db.runTransaction( + this.test!.fullTitle(), + 'readonly', + ['docs'], + txn => { + const store = txn.store('docs'); + const range = IDBKeyRange.bound(['foo'], ['foo', 'c']); + return store.loadAll(range).next(results => { + expect(results).to.deep.equal(['doc foo', 'doc foo/bar/baz']); + }); + } + ); }); - it('retries transactions', async () => { + it('retries transactions', async function (this: Context) { let attemptCount = 0; const result = await db.runTransaction( - TEST_ACTION, + this.test!.fullTitle(), 'readwrite', ['users'], txn => { @@ -533,11 +542,11 @@ describe('SimpleDb', () => { expect(attemptCount).to.equal(2); }); - it('retries transactions only three times', async () => { + it('retries transactions only three times', async function (this: Context) { let attemptCount = 0; await expect( - db.runTransaction(TEST_ACTION, 'readwrite', ['users'], txn => { + db.runTransaction(this.test!.fullTitle(), 'readwrite', ['users'], txn => { ++attemptCount; const store = txn.store('users'); return store @@ -552,11 +561,11 @@ describe('SimpleDb', () => { expect(attemptCount).to.equal(3); }); - it('does not retry explicitly aborted transactions', async () => { + it('does not retry explicitly aborted transactions', async function (this: Context) { let attemptCount = 0; await expect( - db.runTransaction(TEST_ACTION, 'readwrite', ['users'], txn => { + db.runTransaction(this.test!.fullTitle(), 'readwrite', ['users'], txn => { ++attemptCount; txn.abort(new FirestoreError(Code.ABORTED, 'Aborted')); return PersistencePromise.reject(new Error()); From 494f7ae81789bd6068959223ab71aacf597acc9a Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 30 Sep 2020 17:18:18 -0700 Subject: [PATCH 4/4] Fix big --- packages/firestore/src/local/simple_db.ts | 5 ++++- .../firestore/test/unit/local/indexeddb_persistence.test.ts | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/packages/firestore/src/local/simple_db.ts b/packages/firestore/src/local/simple_db.ts index dafacb32407..47400c8d1ea 100644 --- a/packages/firestore/src/local/simple_db.ts +++ b/packages/firestore/src/local/simple_db.ts @@ -461,7 +461,10 @@ export class SimpleDbTransaction { objectStoreNames: string[] ): SimpleDbTransaction { try { - return new SimpleDbTransaction(action, db.transaction(mode)); + return new SimpleDbTransaction( + action, + db.transaction(objectStoreNames, mode) + ); } catch (e) { throw new IndexedDbTransactionError(action, e); } diff --git a/packages/firestore/test/unit/local/indexeddb_persistence.test.ts b/packages/firestore/test/unit/local/indexeddb_persistence.test.ts index 7635da6682c..f3268600d52 100644 --- a/packages/firestore/test/unit/local/indexeddb_persistence.test.ts +++ b/packages/firestore/test/unit/local/indexeddb_persistence.test.ts @@ -1282,7 +1282,7 @@ describe('IndexedDb: allowTabSynchronization', () => { async db1 => { db1.injectFailures = ['getHighestListenSequenceNumber']; await expect(db1.start()).to.eventually.be.rejectedWith( - 'IndexedDB transaction failed' + "IndexedDB transaction 'Simulated error' failed" ); await db1.shutdown(); }