From eab8740b65beb043bc71187f244bec28c38e30a6 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Wed, 12 Sep 2018 11:41:06 -0700 Subject: [PATCH 01/23] Implement IndexedDB LRU Reference Delegate, add LRU tests --- .../src/local/indexeddb_mutation_queue.ts | 37 +- .../src/local/indexeddb_persistence.ts | 237 +++++- .../src/local/indexeddb_query_cache.ts | 124 ++- .../src/local/lru_garbage_collector.ts | 268 ++++++ packages/firestore/src/local/persistence.ts | 56 ++ packages/firestore/src/local/simple_db.ts | 33 + .../unit/local/lru_garbage_collector.test.ts | 770 ++++++++++++++++++ 7 files changed, 1501 insertions(+), 24 deletions(-) create mode 100644 packages/firestore/src/local/lru_garbage_collector.ts create mode 100644 packages/firestore/test/unit/local/lru_garbage_collector.test.ts diff --git a/packages/firestore/src/local/indexeddb_mutation_queue.ts b/packages/firestore/src/local/indexeddb_mutation_queue.ts index e3fa8f1adbf..17defce5ff9 100644 --- a/packages/firestore/src/local/indexeddb_mutation_queue.ts +++ b/packages/firestore/src/local/indexeddb_mutation_queue.ts @@ -525,19 +525,7 @@ export class IndexedDbMutationQueue implements MutationQueue { txn: PersistenceTransaction, key: DocumentKey ): PersistencePromise { - const indexKey = DbDocumentMutation.prefixForPath(this.userId, key.path); - const encodedPath = indexKey[1]; - const startRange = IDBKeyRange.lowerBound(indexKey); - let containsKey = false; - return documentMutationsStore(txn) - .iterate({ range: startRange, keysOnly: true }, (key, value, control) => { - const [userID, keyPath, /*batchID*/ _] = key; - if (userID === this.userId && keyPath === encodedPath) { - containsKey = true; - } - control.done(); - }) - .next(() => containsKey); + return mutationQueueContainsKey(txn, this.userId, key); } // PORTING NOTE: Multi-tab only (state is held in memory in other clients). @@ -560,6 +548,29 @@ export class IndexedDbMutationQueue implements MutationQueue { } } +/** + * @return true if the mutation queue for the given user contains a pending mutation for the given key. + */ +export function mutationQueueContainsKey( + txn: PersistenceTransaction, + userId: string, + key: DocumentKey +): PersistencePromise { + const indexKey = DbDocumentMutation.prefixForPath(userId, key.path); + const encodedPath = indexKey[1]; + const startRange = IDBKeyRange.lowerBound(indexKey); + let containsKey = false; + return documentMutationsStore(txn) + .iterate({ range: startRange, keysOnly: true }, (key, value, control) => { + const [userID, keyPath, /*batchID*/ _] = key; + if (userID === userId && keyPath === encodedPath) { + containsKey = true; + } + control.done(); + }) + .next(() => containsKey); +} + /** * Delete a mutation batch and the associated document mutations. * @return A PersistencePromise of the document mutations that were removed. diff --git a/packages/firestore/src/local/indexeddb_persistence.ts b/packages/firestore/src/local/indexeddb_persistence.ts index 7dfa9d86e50..6138b7185a9 100644 --- a/packages/firestore/src/local/indexeddb_persistence.ts +++ b/packages/firestore/src/local/indexeddb_persistence.ts @@ -21,7 +21,10 @@ import { assert, fail } from '../util/assert'; import { Code, FirestoreError } from '../util/error'; import * as log from '../util/log'; -import { IndexedDbMutationQueue } from './indexeddb_mutation_queue'; +import { + IndexedDbMutationQueue, + mutationQueueContainsKey +} from './indexeddb_mutation_queue'; import { IndexedDbQueryCache, getHighestListenSequenceNumber @@ -35,14 +38,21 @@ import { DbPrimaryClientKey, SCHEMA_VERSION, DbTargetGlobal, - SchemaConverter + SchemaConverter, + DbTargetDocument, + DbTargetDocumentKey, + DbMutationQueueKey, + DbMutationQueue, + DbDocumentMutationKey, + DbDocumentMutation } from './indexeddb_schema'; import { LocalSerializer } from './local_serializer'; import { MutationQueue } from './mutation_queue'; import { Persistence, PersistenceTransaction, - PrimaryStateListener + PrimaryStateListener, + ReferenceDelegate } from './persistence'; import { PersistencePromise } from './persistence_promise'; import { QueryCache } from './query_cache'; @@ -53,6 +63,12 @@ import { AsyncQueue, TimerId } from '../util/async_queue'; import { ClientId } from './shared_client_state'; import { CancelablePromise } from '../util/promise'; import { ListenSequence, SequenceNumberSyncer } from '../core/listen_sequence'; +import { ReferenceSet } from './reference_set'; +import { QueryData } from './query_data'; +import { DocumentKey } from '../model/document_key'; +import { encode, EncodedResourcePath } from './encoded_resource_path'; +import { LiveTargets, LruDelegate } from './lru_garbage_collector'; +import { ListenSequenceNumber, TargetId } from '../core/types'; const LOG_TAG = 'IndexedDbPersistence'; @@ -247,6 +263,7 @@ export class IndexedDbPersistence implements Persistence { private remoteDocumentCache: IndexedDbRemoteDocumentCache; private readonly webStorage: Storage; private listenSequence: ListenSequence; + readonly referenceDelegate: ReferenceDelegate; // Note that `multiClientParams` must be present to enable multi-client support while multi-tab // is still experimental. When multi-client is switched to always on, `multiClientParams` will @@ -265,11 +282,12 @@ export class IndexedDbPersistence implements Persistence { UNSUPPORTED_PLATFORM_ERROR_MSG ); } + this.referenceDelegate = new IndexedDbLruDelegate(this); this.dbName = persistenceKey + IndexedDbPersistence.MAIN_DATABASE; this.serializer = new LocalSerializer(serializer); this.document = platform.document; this.allowTabSynchronization = multiClientParams !== undefined; - this.queryCache = new IndexedDbQueryCache(this.serializer); + this.queryCache = new IndexedDbQueryCache(this.serializer, this); this.remoteDocumentCache = new IndexedDbRemoteDocumentCache( this.serializer, /*keepDocumentChangeLog=*/ this.allowTabSynchronization @@ -694,7 +712,7 @@ export class IndexedDbPersistence implements Persistence { return IndexedDbMutationQueue.forUser(user, this.serializer); } - getQueryCache(): QueryCache { + getQueryCache(): IndexedDbQueryCache { assert( this.started, 'Cannot initialize QueryCache before persistence is started.' @@ -1026,3 +1044,212 @@ function clientMetadataStore( DbClientMetadata.store ); } + +/** Provides LRU functionality for IndexedDB persistence. */ +class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { + private additionalReferences: ReferenceSet | null; + + constructor(private readonly db: IndexedDbPersistence) {} + + setInMemoryPins(inMemoryPins: ReferenceSet): void { + this.additionalReferences = inMemoryPins; + } + + removeTarget( + txn: PersistenceTransaction, + queryData: QueryData + ): PersistencePromise { + const updated = queryData.copy({ + sequenceNumber: txn.currentSequenceNumber + }); + return this.db.getQueryCache().updateQueryData(txn, updated); + } + + addReference( + txn: PersistenceTransaction, + key: DocumentKey + ): PersistencePromise { + return this.writeSentinelKey(txn, key); + } + + removeReference( + txn: PersistenceTransaction, + key: DocumentKey + ): PersistencePromise { + return this.writeSentinelKey(txn, key); + } + + removeMutationReference( + txn: PersistenceTransaction, + key: DocumentKey + ): PersistencePromise { + return this.writeSentinelKey(txn, key); + } + + onLimboDocumentUpdated( + txn: PersistenceTransaction, + key: DocumentKey + ): PersistencePromise { + return this.writeSentinelKey(txn, key); + } + + forEachTarget( + txn: PersistenceTransaction, + f: (q: QueryData) => void + ): PersistencePromise { + return this.db.getQueryCache().forEachTarget(txn, f); + } + + forEachOrphanedDocumentSequenceNumber( + txn: PersistenceTransaction, + f: (sequenceNumber: ListenSequenceNumber) => void + ): PersistencePromise { + return this.db + .getQueryCache() + .forEachOrphanedDocument(txn, (docKey, sequenceNumber) => + f(sequenceNumber) + ); + } + + getTargetCount(txn: PersistenceTransaction): PersistencePromise { + return this.db.getQueryCache().getQueryCount(txn); + } + + removeTargets( + txn: PersistenceTransaction, + upperBound: ListenSequenceNumber, + liveTargets: LiveTargets + ): PersistencePromise { + return this.db.getQueryCache().removeTargets(txn, upperBound, liveTargets); + } + + removeOrphanedDocuments( + txn: PersistenceTransaction, + upperBound: ListenSequenceNumber + ): PersistencePromise { + let count = 0; + let p = PersistencePromise.resolve(); + const iteration = this.db + .getQueryCache() + .forEachOrphanedDocument(txn, (docKey, sequenceNumber) => { + if (sequenceNumber <= upperBound) { + p = p.next(() => this.isPinned(txn, docKey)).next(isPinned => { + if (!isPinned) { + count++; + return this.removeOrphanedDocument(txn, docKey); + } + }); + } + }); + return iteration.next(() => p).next(() => count); + } + + /** + * Clears a document from the cache. The document is assumed to be orphaned, so target-document + * associations are not queried. We remove it from the remote document cache, as well as remove + * its sentinel row. + */ + private removeOrphanedDocument( + txn: PersistenceTransaction, + docKey: DocumentKey + ): PersistencePromise { + return PersistencePromise.waitFor([ + sentinelKeyStore(txn).delete(sentinelKey(docKey)), + this.db.getRemoteDocumentCache().removeEntry(txn, docKey) + ]); + } + + /** + * Returns true if anything would prevent this document from being garbage collected, given that + * the document in question is not present in any targets and has a sequence number less than or + * equal to the upper bound for the collection run. + */ + private isPinned( + txn: PersistenceTransaction, + docKey: DocumentKey + ): PersistencePromise { + return this.additionalReferences!.containsKey(txn, docKey).next( + isPinned => { + if (isPinned) { + return PersistencePromise.resolve(true); + } else { + return this.mutationQueuesContainKey(txn, docKey); + } + } + ); + } + + /** Returns true if any mutation queue contains the given document. */ + private mutationQueuesContainKey( + txn: PersistenceTransaction, + docKey: DocumentKey + ): PersistencePromise { + let found = false; + return IndexedDbPersistence.getStore( + txn, + DbMutationQueue.store + ) + .iterateAsync(userId => { + return mutationQueueContainsKey(txn, userId, docKey).next( + containsKey => { + if (containsKey) { + found = true; + } + return PersistencePromise.resolve(!containsKey); + } + ); + }) + .next(() => found); + } + + private writeSentinelKey( + txn: PersistenceTransaction, + key: DocumentKey + ): PersistencePromise { + return sentinelKeyStore(txn).put( + sentinelRow(key, txn.currentSequenceNumber) + ); + } +} + +/** + * `SentinelRow` describes the schema of rows in the DbTargetDocument store that + * have TargetId === 0. The sequence number is an approximation of a last-used value + * for the document identified by the path portion of the key. + */ +export type SentinelRow = { + targetId: TargetId; + path: EncodedResourcePath; + sequenceNumber: ListenSequenceNumber; +}; + +/** + * The sentinel key store is the same as the DbTargetDocument store, but allows for + * reading and writing sequence numbers as part of the value stored. + */ +function sentinelKeyStore( + txn: PersistenceTransaction +): SimpleDbStore { + return IndexedDbPersistence.getStore( + txn, + DbTargetDocument.store + ); +} + +function sentinelKey(key: DocumentKey): [TargetId, EncodedResourcePath] { + return [0, encode(key.path)]; +} + +/** + * @return A value suitable for writing in the sentinel key store. + */ +function sentinelRow( + key: DocumentKey, + sequenceNumber: ListenSequenceNumber +): SentinelRow { + return { + targetId: 0, + path: encode(key.path), + sequenceNumber + }; +} diff --git a/packages/firestore/src/local/indexeddb_query_cache.ts b/packages/firestore/src/local/indexeddb_query_cache.ts index e30c3e9a858..a73595f2715 100644 --- a/packages/firestore/src/local/indexeddb_query_cache.ts +++ b/packages/firestore/src/local/indexeddb_query_cache.ts @@ -39,14 +39,25 @@ import { PersistencePromise } from './persistence_promise'; import { QueryCache } from './query_cache'; import { QueryData } from './query_data'; import { TargetIdGenerator } from '../core/target_id_generator'; -import { SimpleDbStore, SimpleDbTransaction, SimpleDb } from './simple_db'; +import { + SimpleDbStore, + SimpleDbTransaction, + SimpleDb, + IterationController +} from './simple_db'; import { IndexedDbPersistence, - IndexedDbTransaction + IndexedDbTransaction, + SentinelRow } from './indexeddb_persistence'; +import { ListenSequence } from '../core/listen_sequence'; +import { LiveTargets } from './lru_garbage_collector'; export class IndexedDbQueryCache implements QueryCache { - constructor(private serializer: LocalSerializer) {} + constructor( + private serializer: LocalSerializer, + private readonly db: IndexedDbPersistence + ) {} /** The garbage collector to notify about potential garbage keys. */ private garbageCollector: GarbageCollector | null = null; @@ -145,6 +156,100 @@ export class IndexedDbQueryCache implements QueryCache { }); } + /** + * Drops any targets with sequence number less than or equal to the upper bound, excepting those + * present in `activeTargetIds`. Document associations for the removed targets are also removed. + * Returns the number of targets removed. + */ + removeTargets( + txn: PersistenceTransaction, + upperBound: ListenSequenceNumber, + activeTargetIds: LiveTargets + ): PersistencePromise { + let count = 0; + const promises: Array> = []; + return targetsStore(txn) + .iterate((key, value) => { + const queryData = this.serializer.fromDbTarget(value); + if ( + queryData.sequenceNumber <= upperBound && + activeTargetIds[queryData.targetId] === undefined + ) { + count++; + promises.push(this.removeQueryData(txn, queryData)); + } + }) + .next(() => PersistencePromise.waitFor(promises)) + .next(() => count); + } + + /** + * Call provided function with each `QueryData` that we have cached. + */ + forEachTarget( + txn: PersistenceTransaction, + f: (q: QueryData) => void + ): PersistencePromise { + return targetsStore(txn).iterate((key, value) => { + const queryData = this.serializer.fromDbTarget(value); + f(queryData); + }); + } + + /** + * Call provided function for each document in the cache that is 'orphaned'. Orphaned + * means not a part of any target, so the only entry in the target-document index for + * that document will be the sentinel row (targetId 0), which will also have the sequence + * number for the last time the document was accessed. + */ + forEachOrphanedDocument( + txn: PersistenceTransaction, + f: (docKey: DocumentKey, sequenceNumber: ListenSequenceNumber) => void + ): PersistencePromise { + const store = IndexedDbPersistence.getStore< + DbTargetDocumentKey, + SentinelRow + >(txn, DbTargetDocument.store); + let nextToReport: ListenSequenceNumber = ListenSequence.INVALID; + let nextPath: EncodedResourcePath.EncodedResourcePath; + return store + .iterate( + { + index: DbTargetDocument.documentTargetsIndex + }, + ([targetId, docKey], { path, sequenceNumber }) => { + if (targetId === 0) { + // if nextToReport is valid, report it, this is a new key so the last one + // must be not be a member of any targets. + if (nextToReport !== ListenSequence.INVALID) { + f( + new DocumentKey(EncodedResourcePath.decode(nextPath)), + nextToReport + ); + } + // set nextToReport to be this sequence number. It's the next one we might + // report, if we don't find any targets for this document. + nextToReport = sequenceNumber; + nextPath = path; + } else { + // set nextToReport to be invalid, we know we don't need to report this one since + // we found a target for it. + nextToReport = ListenSequence.INVALID; + } + } + ) + .next(() => { + // Since we report sequence numbers after getting to the next key, we need to check if + // the last key we iterated over was an orphaned document and report it. + if (nextToReport !== ListenSequence.INVALID) { + f( + new DocumentKey(EncodedResourcePath.decode(nextPath)), + nextToReport + ); + } + }); + } + private retrieveMetadata( transaction: PersistenceTransaction ): PersistencePromise { @@ -238,6 +343,7 @@ export class IndexedDbQueryCache implements QueryCache { keys.forEach(key => { const path = EncodedResourcePath.encode(key.path); promises.push(store.put(new DbTargetDocument(targetId, path))); + promises.push(this.db.referenceDelegate.addReference(txn, key)); }); return PersistencePromise.waitFor(promises); } @@ -257,6 +363,7 @@ export class IndexedDbQueryCache implements QueryCache { if (this.garbageCollector !== null) { this.garbageCollector.addPotentialGarbageKey(key); } + promises.push(this.db.referenceDelegate.removeReference(txn, key)); }); return PersistencePromise.waitFor(promises); } @@ -352,9 +459,14 @@ export class IndexedDbQueryCache implements QueryCache { keysOnly: true, range }, - (key, _, control) => { - count++; - control.done(); + ([targetId, path], _, control) => { + // Having a sentinel row for a document does not count as containing that document; + // For the query cache, containing the document means the document is part of some + // target. + if (targetId !== 0) { + count++; + control.done(); + } } ) .next(() => count > 0); diff --git a/packages/firestore/src/local/lru_garbage_collector.ts b/packages/firestore/src/local/lru_garbage_collector.ts new file mode 100644 index 00000000000..bbbe4f09ca3 --- /dev/null +++ b/packages/firestore/src/local/lru_garbage_collector.ts @@ -0,0 +1,268 @@ +/** + * Copyright 2018 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { QueryData } from './query_data'; +import { PersistenceTransaction } from './persistence'; +import { PersistencePromise } from './persistence_promise'; +import { ListenSequenceNumber } from '../core/types'; +import { ListenSequence } from '../core/listen_sequence'; +import { assert } from '../util/assert'; +import { AnyJs } from '../util/misc'; + +/** + * Persistence layers intending to use LRU Garbage collection should have reference delegates that + * implement this interface. This interface defines the operations that the LRU garbage collector + * needs from the persistence layer. + */ +export interface LruDelegate { + getTargetCount(txn: PersistenceTransaction): PersistencePromise; + + /** Enumerates all the targets in the QueryCache. */ + forEachTarget( + txn: PersistenceTransaction, + f: (target: QueryData) => void + ): PersistencePromise; + + /** Enumerates sequence numbers for documents not associated with a target. */ + forEachOrphanedDocumentSequenceNumber( + txn: PersistenceTransaction, + f: (sequenceNumber: ListenSequenceNumber) => void + ): PersistencePromise; + + /** + * Removes all targets that have a sequence number less than or equal to `upperBound`, and are not + * present in the `activeTargetIds` set. + * + * @return the number of targets removed. + */ + removeTargets( + txn: PersistenceTransaction, + upperBound: ListenSequenceNumber, + activeTargetIds: LiveTargets + ): PersistencePromise; + + /** + * Removes all unreferenced documents from the cache that have a sequence number less than or + * equal to the given `upperBound`. + * + * @return the number of documents removed. + */ + removeOrphanedDocuments( + txn: PersistenceTransaction, + upperBound: ListenSequenceNumber + ): PersistencePromise; +} + +/** + * Describes an object whose keys are active target ids. We do not care about the type of the values. + */ +export type LiveTargets = { + [id: number]: AnyJs; +}; + +/** + * A selective port of `java.util.PriorityQueue` + * {@see PriorityQueue.java} + * The queue does not grow and must have an initial capacity when it is constructed. Additionally, elements may only be + * `poll()`'d and cannot be removed from any other position in the queue. + */ +class PriorityQueue { + private _size = 0; + get size(): number { + return this._size; + } + private readonly queue: T[]; + constructor( + private readonly capacity: number, + private readonly comparator: (a: T, b: T) => number + ) { + assert(capacity > 0, 'Capacity must be greater than 0'); + this.queue = new Array(capacity); + } + + add(elem: T): void { + assert(this._size + 1 <= this.capacity, 'Queue is over capacity'); + if (this._size === 0) { + this.queue[0] = elem; + this._size = 1; + } else { + this.siftUp(elem); + } + } + + poll(): T | null { + if (this._size === 0) { + return null; + } + const result = this.queue[0]; + const newSize = --this._size; + const last = this.queue[newSize]; + delete this.queue[newSize]; + if (newSize !== 0) { + this.siftDown(last); + } + return result; + } + + peek(): T | null { + if (this._size > 0) { + return this.queue[0]; + } + return null; + } + + private siftUp(elem: T): void { + let k = this._size; + while (k > 0) { + const parent = (k - 1) >>> 1; + const toCheck = this.queue[parent]; + const comp = this.comparator(elem, toCheck); + if (comp >= 0) { + break; + } + this.queue[k] = toCheck; + k = parent; + } + this.queue[k] = elem; + this._size++; + } + + private siftDown(lastElem: T): void { + let k = 0; + const half = this._size >>> 1; + while (k < half) { + let child = (k << 1) + 1; + let toCheck = this.queue[child]; + const right = child + 1; + if ( + right < this._size && + this.comparator(toCheck, this.queue[right]) > 0 + ) { + toCheck = this.queue[right]; + child = right; + } + if (this.comparator(lastElem, toCheck) <= 0) { + break; + } + this.queue[k] = toCheck; + k = child; + } + this.queue[k] = lastElem; + } +} + +/** + * Used to calculate the nth sequence number. Keeps a rolling buffer of the lowest n values passed + * to `addElement`, and finally reports the largest of them in `maxValue`. + */ +class RollingSequenceNumberBuffer { + private queue: PriorityQueue; + + // Invert the comparison because we want to keep the smallest values. + private static COMPARATOR: ( + a: ListenSequenceNumber, + b: ListenSequenceNumber + ) => number = (a, b) => { + if (b < a) { + return -1; + } else if (b === a) { + return 0; + } + return 1; + }; + + constructor(private readonly maxElements: number) { + this.queue = new PriorityQueue( + maxElements, + RollingSequenceNumberBuffer.COMPARATOR + ); + } + + addElement(sequenceNumber: ListenSequenceNumber): void { + if (this.queue.size < this.maxElements) { + this.queue.add(sequenceNumber); + } else { + // Note: use first because we have inverted the comparison + const highestValue = this.queue.peek()!; + if (sequenceNumber < highestValue) { + this.queue.poll(); + this.queue.add(sequenceNumber); + } + } + } + + get maxValue(): ListenSequenceNumber { + return this.queue.peek()!; + } +} + +/** Implements the steps for LRU garbage collection. */ +export class LruGarbageCollector { + constructor(private readonly delegate: LruDelegate) {} + + /** Given a percentile of target to collect, returns the number of targets to collect. */ + calculateTargetCount( + txn: PersistenceTransaction, + percentile: number + ): PersistencePromise { + return this.delegate.getTargetCount(txn).next(targetCount => { + return Math.floor(percentile / 100.0 * targetCount); + }); + } + + /** Returns the nth sequence number, counting in order from the smallest. */ + nthSequenceNumber( + txn: PersistenceTransaction, + n: number + ): PersistencePromise { + if (n === 0) { + return PersistencePromise.resolve(ListenSequence.INVALID); + } + + const buffer = new RollingSequenceNumberBuffer(n); + return this.delegate + .forEachTarget(txn, target => buffer.addElement(target.sequenceNumber)) + .next(() => { + return this.delegate.forEachOrphanedDocumentSequenceNumber( + txn, + sequenceNumber => buffer.addElement(sequenceNumber) + ); + }) + .next(() => buffer.maxValue); + } + + /** + * Removes targets with a sequence number equal to or less than the given upper bound, and removes + * document associations with those targets. + */ + removeTargets( + txn: PersistenceTransaction, + upperBound: ListenSequenceNumber, + liveTargets: LiveTargets + ): PersistencePromise { + return this.delegate.removeTargets(txn, upperBound, liveTargets); + } + + /** + * Removes documents that have a sequence number equal to or less than the upper bound and are not + * otherwise pinned. + */ + removeOrphanedDocuments( + txn: PersistenceTransaction, + upperBound: ListenSequenceNumber + ): PersistencePromise { + return this.delegate.removeOrphanedDocuments(txn, upperBound); + } +} diff --git a/packages/firestore/src/local/persistence.ts b/packages/firestore/src/local/persistence.ts index 6c8ca18e982..9c289183c28 100644 --- a/packages/firestore/src/local/persistence.ts +++ b/packages/firestore/src/local/persistence.ts @@ -22,6 +22,9 @@ import { QueryCache } from './query_cache'; import { RemoteDocumentCache } from './remote_document_cache'; import { ClientId } from './shared_client_state'; import { ListenSequenceNumber } from '../core/types'; +import { ReferenceSet } from './reference_set'; +import { QueryData } from './query_data'; +import { DocumentKey } from '../model/document_key'; /** * Opaque interface representing a persistence transaction. @@ -46,6 +49,59 @@ export abstract class PersistenceTransaction { */ export type PrimaryStateListener = (isPrimary: boolean) => Promise; +/** + * A ReferenceDelegate instance handles all of the hooks into the document-reference lifecycle. This + * includes being added to a target, being removed from a target, being subject to mutation, and + * being mutated by the user. + * + *

Different implementations may do different things with each of these events. Not every + * implementation needs to do something with every lifecycle hook. + * + * Porting note: since sequence numbers are attached to transactions in this client, the + * ReferenceDelegate does not need to deal in transactional semantics, nor does it need to + * track and generate sequence numbers. + */ +export interface ReferenceDelegate { + /** + * Registers a ReferenceSet of documents that should be considered 'referenced' and not eligible + * for removal during garbage collection. + */ + setInMemoryPins(pins: ReferenceSet): void; + + /** Notify the delegate that the given document was added to a target. */ + addReference( + txn: PersistenceTransaction, + doc: DocumentKey + ): PersistencePromise; + + /** Notify the delegate that the given document was removed from a target. */ + removeReference( + txn: PersistenceTransaction, + doc: DocumentKey + ): PersistencePromise; + + /** + * Notify the delegate that a target was removed. The delegate may, but is not obligated to, + * actually delete the target and associated data. + */ + removeTarget( + txn: PersistenceTransaction, + queryData: QueryData + ): PersistencePromise; + + /** Notify the delegate that a document is no longer being mutated by the user. */ + removeMutationReference( + txn: PersistenceTransaction, + doc: DocumentKey + ): PersistencePromise; + + /** Notify the delegate that a limbo document was updated. */ + onLimboDocumentUpdated( + txn: PersistenceTransaction, + doc: DocumentKey + ): PersistencePromise; +} + /** * Persistence is the lowest-level shared interface to persistent storage in * Firestore. diff --git a/packages/firestore/src/local/simple_db.ts b/packages/firestore/src/local/simple_db.ts index b278916f177..9ff5beb75be 100644 --- a/packages/firestore/src/local/simple_db.ts +++ b/packages/firestore/src/local/simple_db.ts @@ -498,6 +498,39 @@ export class SimpleDbStore< return this.iterateCursor(cursor, callback); } + /** + * Iterates over a store, but waits for the given callback to complete for each entry + * before iterating the next entry. This allows the callback to do asynchronous work + * to determine if this iteration should continue. + * + * The provided callback should return `true` to continue iteration, and `false` otherwise. + */ + iterateAsync( + callback: (k: KeyType, v: ValueType) => PersistencePromise + ): PersistencePromise { + const cursorRequest = this.cursor({}); + return new PersistencePromise((resolve, reject) => { + cursorRequest.onerror = (event: Event) => { + reject((event.target as IDBRequest).error); + }; + cursorRequest.onsuccess = (event: Event) => { + const cursor: IDBCursorWithValue = (event.target as IDBRequest).result; + if (!cursor) { + resolve(); + return; + } + + callback(cursor.primaryKey, cursor.value).next(shouldContinue => { + if (shouldContinue) { + cursor.continue(); + } else { + resolve(); + } + }); + }; + }); + } + private iterateCursor( cursorRequest: IDBRequest, fn: IterateCallback diff --git a/packages/firestore/test/unit/local/lru_garbage_collector.test.ts b/packages/firestore/test/unit/local/lru_garbage_collector.test.ts new file mode 100644 index 00000000000..a2efeda8e8c --- /dev/null +++ b/packages/firestore/test/unit/local/lru_garbage_collector.test.ts @@ -0,0 +1,770 @@ +/** + * Copyright 2018 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { expect } from 'chai'; +import { IndexedDbPersistence } from '../../../src/local/indexeddb_persistence'; +import { + Persistence, + PersistenceTransaction +} from '../../../src/local/persistence'; +import * as PersistenceTestHelpers from './persistence_test_helpers'; +import { QueryData, QueryPurpose } from '../../../src/local/query_data'; +import { PersistencePromise } from '../../../src/local/persistence_promise'; +import { ListenSequenceNumber, TargetId } from '../../../src/core/types'; +import { Query } from '../../../src/core/query'; +import { doc, path, queryData, wrapObject } from '../../util/helpers'; +import { QueryCache } from '../../../src/local/query_cache'; +import { + LiveTargets, + LruDelegate, + LruGarbageCollector +} from '../../../src/local/lru_garbage_collector'; +import { ListenSequence } from '../../../src/core/listen_sequence'; +import { DocumentKey } from '../../../src/model/document_key'; +import { documentKeySet } from '../../../src/model/collections'; +import { + Mutation, + Precondition, + SetMutation +} from '../../../src/model/mutation'; +import { Document } from '../../../src/model/document'; +import { SnapshotVersion } from '../../../src/core/snapshot_version'; +import { MutationQueue } from '../../../src/local/mutation_queue'; +import { User } from '../../../src/auth/user'; +import { Timestamp } from '../../../src/api/timestamp'; +import { RemoteDocumentCache } from '../../../src/local/remote_document_cache'; +import { ReferenceSet } from '../../../src/local/reference_set'; +import { IndexedDbQueryCache } from '../../../src/local/indexeddb_query_cache'; + +describe('IndexedDbLruReferenceDelegate', () => { + if (!IndexedDbPersistence.isAvailable()) { + console.warn('No IndexedDB. Skipping IndexedDbLruReferenceDelegate tests.'); + return; + } + + genericLruGarbageCollectorTests(() => + PersistenceTestHelpers.testIndexedDbPersistence() + ); +}); + +function genericLruGarbageCollectorTests( + newPersistence: () => Promise +): void { + let previousTargetId: TargetId; + let previousDocNum: number; + beforeEach(async () => { + previousTargetId = 500; + previousDocNum = 10; + await newTestResources(); + }); + + afterEach(async () => { + await persistence.shutdown(/* deleteData= */ true); + }); + + let persistence: Persistence; + let queryCache: QueryCache; + let garbageCollector: LruGarbageCollector; + let initialSequenceNumber: ListenSequenceNumber; + let mutationQueue: MutationQueue; + let documentCache: RemoteDocumentCache; + async function newTestResources(): Promise { + if (persistence && persistence.started) { + await persistence.shutdown(/* deleteData= */ true); + } + persistence = await newPersistence(); + queryCache = persistence.getQueryCache(); + mutationQueue = persistence.getMutationQueue(new User('user')); + documentCache = persistence.getRemoteDocumentCache(); + initialSequenceNumber = await persistence.runTransaction( + 'highest sequence number', + false, + txn => PersistencePromise.resolve(txn.currentSequenceNumber) + ); + // TODO(gsoltis): remove cast + const referenceDelegate = (persistence as IndexedDbPersistence) + .referenceDelegate; + referenceDelegate.setInMemoryPins(new ReferenceSet()); + garbageCollector = new LruGarbageCollector( + (referenceDelegate as any) as LruDelegate + ); + } + + function nextQueryData(sequenceNumber: ListenSequenceNumber): QueryData { + const targetId = ++previousTargetId; + return new QueryData( + Query.atPath(path('path' + targetId)), + targetId, + QueryPurpose.Listen, + sequenceNumber + ); + } + + function nextTestDocumentKey(): DocumentKey { + return DocumentKey.fromPathString('docs/doc_' + ++previousDocNum); + } + + function addNextTargetInTransaction( + txn: PersistenceTransaction + ): PersistencePromise { + const queryData = nextQueryData(txn.currentSequenceNumber); + return queryCache.addQueryData(txn, queryData).next(() => queryData); + } + + function addNextTarget(): Promise { + return persistence.runTransaction('add query', false, txn => { + return addNextTargetInTransaction(txn); + }); + } + + function updateTargetInTransaction( + txn: PersistenceTransaction, + queryData: QueryData + ): PersistencePromise { + const updated = queryData.copy({ + sequenceNumber: txn.currentSequenceNumber + }); + return queryCache + .updateQueryData(txn, updated) + .next(() => + queryCache.setTargetsMetadata(txn, txn.currentSequenceNumber) + ); + } + + function markDocumentEligibleForGCInTransaction( + txn: PersistenceTransaction, + key: DocumentKey + ): PersistencePromise { + // TODO: change this once reference delegate is added to the persistence interface + return (persistence as IndexedDbPersistence).referenceDelegate.removeMutationReference( + txn, + key + ); + } + + function markDocumentEligibleForGC(key: DocumentKey): Promise { + return persistence.runTransaction( + 'mark document eligible for GC', + false, + txn => { + return markDocumentEligibleForGCInTransaction(txn, key); + } + ); + } + + function markADocumentEligibleForGCInTransaction( + txn: PersistenceTransaction + ): PersistencePromise { + const key = nextTestDocumentKey(); + return markDocumentEligibleForGCInTransaction(txn, key); + } + + function markADocumentEligibleForGC(): Promise { + const key = nextTestDocumentKey(); + return markDocumentEligibleForGC(key); + } + + function calculateTargetCount(percentile: number): Promise { + return persistence.runTransaction('calculate target count', false, txn => { + return garbageCollector.calculateTargetCount(txn, percentile); + }); + } + + function nthSequenceNumber(n: number): Promise { + return persistence.runTransaction('nth sequence number', false, txn => { + return garbageCollector.nthSequenceNumber(txn, n); + }); + } + + function removeTargets( + upperBound: ListenSequenceNumber, + liveTargets: LiveTargets + ): Promise { + return persistence.runTransaction('remove targets', false, txn => { + return garbageCollector.removeTargets(txn, upperBound, liveTargets); + }); + } + + function removeOrphanedDocuments( + upperBound: ListenSequenceNumber + ): Promise { + return persistence.runTransaction( + 'remove orphaned documents', + false, + txn => { + return garbageCollector.removeOrphanedDocuments(txn, upperBound); + } + ); + } + + function nextTestDocument(): Document { + const key = nextTestDocumentKey(); + return new Document( + key, + SnapshotVersion.fromMicroseconds(1000), + wrapObject({ foo: 3, bar: false }), + {} + ); + } + + function cacheADocumentInTransaction( + txn: PersistenceTransaction + ): PersistencePromise { + const doc = nextTestDocument(); + return documentCache.addEntries(txn, [doc]).next(() => doc.key); + } + + function mutation(key: DocumentKey): Mutation { + return new SetMutation( + key, + wrapObject({ baz: 'hello', world: 2 }), + Precondition.NONE + ); + } + + it('pick sequence number percentile', async () => { + const testCases: Array<{ targets: number; expected: number }> = [ + //{ targets: 0, expected: 0 }, + { targets: 10, expected: 1 }, + { targets: 9, expected: 0 }, + { targets: 50, expected: 5 }, + { targets: 49, expected: 4 } + ]; + + for (const { targets, expected } of testCases) { + await newTestResources(); + for (let i = 0; i < targets; i++) { + await addNextTarget(); + } + const tenth = await calculateTargetCount(10); + expect(tenth).to.equal( + expected, + 'Expected 10% of ' + targets + ' to be ' + expected + ); + } + }); + + it('sequence number for no targets', async () => { + expect(await nthSequenceNumber(0)).to.equal(ListenSequence.INVALID); + }); + + it('sequence number for 50 targets', async () => { + // Add 50 queries sequentially, aim to collect 10 of them. + // The sequence number to collect should be 10 past the initial sequence number. + for (let i = 0; i < 50; i++) { + await addNextTarget(); + } + const expected = initialSequenceNumber + 10; + expect(await nthSequenceNumber(10)).to.equal(expected); + }); + + it('sequence number for multiple targets in a transaction', async () => { + // 50 queries, 9 with one transaction, incrementing from there. Should get second sequence + // number. + await persistence.runTransaction('9 targets in a batch', false, txn => { + let p = PersistencePromise.resolve(); + for (let i = 0; i < 9; i++) { + p = p.next(() => addNextTargetInTransaction(txn)).next(); + } + return p; + }); + for (let i = 9; i < 50; i++) { + await addNextTarget(); + } + const expected = initialSequenceNumber + 2; + expect(await nthSequenceNumber(10)).to.equal(expected); + }); + + it('all collected targets in a single transaction', async () => { + await persistence.runTransaction('11 targets in a batch', false, txn => { + let p = PersistencePromise.resolve(); + for (let i = 0; i < 11; i++) { + p = p.next(() => addNextTargetInTransaction(txn)).next(); + } + return p; + }); + for (let i = 11; i < 50; i++) { + await addNextTarget(); + } + const expected = initialSequenceNumber + 1; + expect(await nthSequenceNumber(10)).to.equal(expected); + }); + + it('sequence number with mutation and sequential targets', async () => { + // Remove a mutated doc reference, marking it as eligible for GC. + // Then add 50 queries. Should get 10 past initial (9 queries). + await markADocumentEligibleForGC(); + for (let i = 0; i < 50; i++) { + await addNextTarget(); + } + + const expected = initialSequenceNumber + 10; + expect(await nthSequenceNumber(10)).to.equal(expected); + }); + + it('sequence numbers with mutations in targets', async () => { + // Add mutated docs, then add one of them to a query target so it doesn't get GC'd. + // Expect 3 past the initial value: the mutations not part of a query, and two queries + const docInTarget = nextTestDocumentKey(); + await persistence.runTransaction('mark mutations', false, txn => { + // Adding 9 doc keys in a transaction. If we remove one of them, we'll have room for two + // actual targets. + let p = markDocumentEligibleForGCInTransaction(txn, docInTarget); + for (let i = 0; i < 8; i++) { + p = p.next(() => markADocumentEligibleForGCInTransaction(txn)); + } + return p; + }); + + for (let i = 0; i < 49; i++) { + await addNextTarget(); + } + await persistence.runTransaction('target with a mutation', false, txn => { + return addNextTargetInTransaction(txn).next(queryData => { + const keySet = documentKeySet().add(docInTarget); + return queryCache.addMatchingKeys(txn, keySet, queryData.targetId); + }); + }); + const expected = initialSequenceNumber + 3; + expect(await nthSequenceNumber(10)).to.equal(expected); + }); + + it('remove targets up through sequence number', async () => { + const liveTargets: LiveTargets = {}; + for (let i = 0; i < 100; i++) { + const queryData = await addNextTarget(); + // Mark odd queries as live so we can test filtering out live queries. + const targetId = queryData.targetId; + if (targetId % 2 === 1) { + liveTargets[targetId] = queryData; + } + } + + // GC up through 20th query, which is 20%. + // Expect to have GC'd 10 targets, since every other target is live + const upperBound = 20 + initialSequenceNumber; + const removed = await removeTargets(upperBound, liveTargets); + expect(removed).to.equal(10); + // Make sure we removed the even targets with targetID <= 20. + await persistence.runTransaction( + 'verify remaining targets > 20 or odd', + false, + txn => { + // TODO: change this once forEachTarget is added to QueryCache interface + return (queryCache as IndexedDbQueryCache).forEachTarget( + txn, + queryData => { + const targetId = queryData.targetId; + expect(targetId > 20 || targetId % 2 === 1).to.be.true; + } + ); + } + ); + }); + + it('remove orphaned documents', async () => { + // Track documents we expect to be retained so we can verify post-GC. + // This will contain documents associated with targets that survive GC, as well + // as any documents with pending mutations. + const expectedRetained = new Set(); + // we add two mutations later, for now track them in an array. + const mutations: Mutation[] = []; + + // Add two documents to first target, queue a mutation on the second document + await persistence.runTransaction( + 'add a target and add two documents to it', + false, + txn => { + return addNextTargetInTransaction(txn).next(queryData => { + let keySet = documentKeySet(); + return cacheADocumentInTransaction(txn) + .next(docKey1 => { + expectedRetained.add(docKey1); + keySet = keySet.add(docKey1); + }) + .next(() => cacheADocumentInTransaction(txn)) + .next(docKey2 => { + expectedRetained.add(docKey2); + keySet = keySet.add(docKey2); + mutations.push(mutation(docKey2)); + }) + .next(() => + queryCache.addMatchingKeys(txn, keySet, queryData.targetId) + ); + }); + } + ); + + // Add a second query and register a third document on it + await persistence.runTransaction('second target', false, txn => { + return addNextTargetInTransaction(txn).next(queryData => { + return cacheADocumentInTransaction(txn).next(docKey3 => { + expectedRetained.add(docKey3); + const keySet = documentKeySet().add(docKey3); + return queryCache.addMatchingKeys(txn, keySet, queryData.targetId); + }); + }); + }); + + // cache another document and prepare a mutation on it. + await persistence.runTransaction('queue a mutation', false, txn => { + return cacheADocumentInTransaction(txn).next(docKey4 => { + mutations.push(mutation(docKey4)); + expectedRetained.add(docKey4); + }); + }); + + // Insert the mutations. These operations don't have a sequence number, they just + // serve to keep the mutated documents from being GC'd while the mutations are outstanding. + await persistence.runTransaction( + 'actually register the mutations', + false, + txn => { + return mutationQueue.addMutationBatch( + txn, + Timestamp.fromMillis(2000), + mutations + ); + } + ); + + // Mark 5 documents eligible for GC. This simulates documents that were mutated then ack'd. + // Since they were ack'd, they are no longer in a mutation queue, and there is nothing keeping + // them alive. + const toBeRemoved = new Set(); + await persistence.runTransaction( + "add orphaned docs (previously mutated, then ack'd", + false, + txn => { + let p = PersistencePromise.resolve(); + for (let i = 0; i < 5; i++) { + p = p.next(() => { + return cacheADocumentInTransaction(txn).next(docKey => { + toBeRemoved.add(docKey); + return markDocumentEligibleForGCInTransaction(txn, docKey); + }); + }); + } + return p; + } + ); + + // We expect only the orphaned documents, those not in a mutation or a target, to be removed. + // use a large sequence number to remove as much as possible + const removed = await removeOrphanedDocuments(1000); + expect(removed).to.equal(toBeRemoved.size); + await persistence.runTransaction('verify', false, txn => { + let p = PersistencePromise.resolve(); + toBeRemoved.forEach(docKey => { + p = p.next(() => { + return documentCache.getEntry(txn, docKey).next(maybeDoc => { + expect(maybeDoc).to.be.null; + }); + }); + }); + expectedRetained.forEach(docKey => { + p = p.next(() => { + return documentCache.getEntry(txn, docKey).next(maybeDoc => { + expect(maybeDoc).to.not.be.null; + }); + }); + }); + return p; + }); + }); + + it('remove targets then GC', async () => { + // Create 3 targets, add docs to all of them + // Leave oldest target alone, it is still live + // Remove newest target + // Blind write 2 documents + // Add one of the blind write docs to oldest target (preserves it) + // Remove some documents from middle target (bumps sequence number) + // Add some documents from newest target to oldest target (preserves them) + // Update a doc from middle target + // Remove middle target + // Do a blind write + // GC up to but not including the removal of the middle target + // + // Expect: + // All docs in oldest target are still around + // One blind write is gone, the first one not added to oldest target + // Documents removed from middle target are gone, except ones added to oldest target + // Documents from newest target are gone, except + + // Through the various steps, track which documents we expect to be removed vs + // documents we expect to be retained. + const expectedRetained = new Set(); + const expectedRemoved = new Set(); + + // Add oldest target, 5 documents, and add those documents to the target. + // This target will not be removed, so all documents that are part of it will + // be retained. + const oldestTarget = await persistence.runTransaction( + 'Add oldest target and docs', + false, + txn => { + return addNextTargetInTransaction(txn).next(queryData => { + let p = PersistencePromise.resolve(); + let keySet = documentKeySet(); + for (let i = 0; i < 5; i++) { + p = p.next(() => { + return cacheADocumentInTransaction(txn).next(docKey => { + expectedRetained.add(docKey); + keySet = keySet.add(docKey); + }); + }); + } + return p + .next(() => + queryCache.addMatchingKeys(txn, keySet, queryData.targetId) + ) + .next(() => queryData); + }); + } + ); + + // Add middle target and docs. Some docs will be removed from this target later, + // which we track here. + let middleDocsToRemove = documentKeySet(); + let middleDocToUpdate: DocumentKey; + // This will be the document in this target that gets an update later. + const middleTarget = await persistence.runTransaction( + 'Add middle target and docs', + false, + txn => { + return addNextTargetInTransaction(txn).next(queryData => { + let p = PersistencePromise.resolve(); + let keySet = documentKeySet(); + + // these docs will be removed from this target later, triggering a bump + // to their sequence numbers. Since they will not be a part of the target, we + // expect them to be removed. + for (let i = 0; i < 2; i++) { + p = p.next(() => { + return cacheADocumentInTransaction(txn).next(docKey => { + expectedRemoved.add(docKey); + keySet = keySet.add(docKey); + middleDocsToRemove = middleDocsToRemove.add(docKey); + }); + }); + } + + // these docs stay in this target and only this target. There presence in this + // target prevents them from being GC'd, so they are also expected to be retained. + for (let i = 2; i < 4; i++) { + p = p.next(() => { + return cacheADocumentInTransaction(txn).next(docKey => { + expectedRetained.add(docKey); + keySet = keySet.add(docKey); + }); + }); + } + + // This doc stays in this target, but gets updated. + { + p = p.next(() => { + return cacheADocumentInTransaction(txn).next(docKey => { + expectedRetained.add(docKey); + keySet = keySet.add(docKey); + middleDocToUpdate = docKey; + }); + }); + } + + return p + .next(() => + queryCache.addMatchingKeys(txn, keySet, queryData.targetId) + ) + .next(() => queryData); + }); + } + ); + + // Add the newest target and add 5 documents to it. Some of those documents will + // additionally be added to the oldest target, which will cause those documents to + // be retained. The remaining documents are expected to be removed, since this target + // will be removed. + let newestDocsToAddToOldest = documentKeySet(); + await persistence.runTransaction( + 'Add newest target and docs', + false, + txn => { + return addNextTargetInTransaction(txn).next(queryData => { + let p = PersistencePromise.resolve(); + let keySet = documentKeySet(); + // These documents are only in this target. They are expected to be removed + // because this target will also be removed. + for (let i = 0; i < 3; i++) { + p = p.next(() => { + return cacheADocumentInTransaction(txn).next(docKey => { + expectedRemoved.add(docKey); + keySet = keySet.add(docKey); + }); + }); + } + + // docs to add to the oldest target in addition to this target. They will be retained + for (let i = 3; i < 5; i++) { + p = p.next(() => { + return cacheADocumentInTransaction(txn).next(docKey => { + expectedRetained.add(docKey); + keySet = keySet.add(docKey); + newestDocsToAddToOldest = newestDocsToAddToOldest.add(docKey); + }); + }); + } + + return p + .next(() => + queryCache.addMatchingKeys(txn, keySet, queryData.targetId) + ) + .next(() => queryData); + }); + } + ); + + // 2 doc writes, add one of them to the oldest target. + await persistence.runTransaction( + '2 doc writes, add one of them to the oldest target', + false, + txn => { + let keySet = documentKeySet(); + return cacheADocumentInTransaction(txn) + .next(docKey1 => { + keySet = keySet.add(docKey1); + expectedRetained.add(docKey1); + return markDocumentEligibleForGCInTransaction(txn, docKey1); + }) + .next(() => { + return queryCache.addMatchingKeys( + txn, + keySet, + oldestTarget.targetId + ); + }) + .next(() => { + return updateTargetInTransaction(txn, oldestTarget); + }) + .next(() => { + return cacheADocumentInTransaction(txn); + }) + .next(docKey2 => { + expectedRemoved.add(docKey2); + return markDocumentEligibleForGCInTransaction(txn, docKey2); + }); + } + ); + + // Remove some documents from the middle target. + await persistence.runTransaction( + 'Remove some documents from the middle target', + false, + txn => { + return updateTargetInTransaction(txn, middleTarget).next(() => + queryCache.removeMatchingKeys( + txn, + middleDocsToRemove, + middleTarget.targetId + ) + ); + } + ); + + // Add a couple docs from the newest target to the oldest (preserves them past the point where + // newest was removed) + // upperBound is the sequence number right before middleTarget is updated, then removed. + const upperBound = await persistence.runTransaction( + 'Add a couple docs from the newest target to the oldest', + false, + txn => { + return updateTargetInTransaction(txn, oldestTarget) + .next(() => { + return queryCache.addMatchingKeys( + txn, + newestDocsToAddToOldest, + oldestTarget.targetId + ); + }) + .next(() => txn.currentSequenceNumber); + } + ); + + // Update a doc in the middle target + await persistence.runTransaction( + 'Update a doc in the middle target', + false, + txn => { + const doc = new Document( + middleDocToUpdate, + SnapshotVersion.fromMicroseconds(2000), + wrapObject({ foo: 4, bar: true }), + {} + ); + return documentCache.addEntries(txn, [doc]).next(() => { + return updateTargetInTransaction(txn, middleTarget); + }); + } + ); + + // Remove the middle target + await persistence.runTransaction('remove middle target', false, txn => { + // TODO(gsoltis): fix this cast + return (persistence as IndexedDbPersistence).referenceDelegate.removeTarget( + txn, + middleTarget + ); + }); + + // Write a doc and get an ack, not part of a target + await persistence.runTransaction( + 'Write a doc and get an ack, not part of a target', + false, + txn => { + return cacheADocumentInTransaction(txn).next(docKey => { + // This should be retained, it's too new to get removed. + expectedRetained.add(docKey); + // Mark it as eligible for GC, but this is after our upper bound for what we will collect. + return markDocumentEligibleForGCInTransaction(txn, docKey); + }); + } + ); + + // Finally, do the garbage collection, up to but not including the removal of middleTarget + const liveTargets: LiveTargets = {}; + liveTargets[oldestTarget.targetId] = {}; + + // Expect to remove newest target + const removed = await removeTargets(upperBound, liveTargets); + expect(removed).to.equal(1); + const docsRemoved = await removeOrphanedDocuments(upperBound); + expect(docsRemoved).to.equal(expectedRemoved.size); + await persistence.runTransaction('verify results', false, txn => { + let p = PersistencePromise.resolve(); + expectedRemoved.forEach(key => { + p = p.next(() => documentCache.getEntry(txn, key)).next(maybeDoc => { + expect(maybeDoc).to.be.null; + }); + }); + expectedRetained.forEach(key => { + p = p.next(() => documentCache.getEntry(txn, key)).next(maybeDoc => { + expect(maybeDoc).to.not.be.null; + }); + }); + return p; + }); + }); +} From 68185025a018ccf74744749cadd737a4dfa0d1c0 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Wed, 12 Sep 2018 12:14:59 -0700 Subject: [PATCH 02/23] Lint cleanup --- .../src/local/indexeddb_persistence.ts | 5 +- .../src/local/indexeddb_query_cache.ts | 3 +- .../unit/local/lru_garbage_collector.test.ts | 59 ++++++++++--------- 3 files changed, 32 insertions(+), 35 deletions(-) diff --git a/packages/firestore/src/local/indexeddb_persistence.ts b/packages/firestore/src/local/indexeddb_persistence.ts index 6138b7185a9..62c53a6c66d 100644 --- a/packages/firestore/src/local/indexeddb_persistence.ts +++ b/packages/firestore/src/local/indexeddb_persistence.ts @@ -42,9 +42,7 @@ import { DbTargetDocument, DbTargetDocumentKey, DbMutationQueueKey, - DbMutationQueue, - DbDocumentMutationKey, - DbDocumentMutation + DbMutationQueue } from './indexeddb_schema'; import { LocalSerializer } from './local_serializer'; import { MutationQueue } from './mutation_queue'; @@ -55,7 +53,6 @@ import { ReferenceDelegate } from './persistence'; import { PersistencePromise } from './persistence_promise'; -import { QueryCache } from './query_cache'; import { RemoteDocumentCache } from './remote_document_cache'; import { SimpleDb, SimpleDbStore, SimpleDbTransaction } from './simple_db'; import { Platform } from '../platform/platform'; diff --git a/packages/firestore/src/local/indexeddb_query_cache.ts b/packages/firestore/src/local/indexeddb_query_cache.ts index a73595f2715..4518c252842 100644 --- a/packages/firestore/src/local/indexeddb_query_cache.ts +++ b/packages/firestore/src/local/indexeddb_query_cache.ts @@ -42,8 +42,7 @@ import { TargetIdGenerator } from '../core/target_id_generator'; import { SimpleDbStore, SimpleDbTransaction, - SimpleDb, - IterationController + SimpleDb } from './simple_db'; import { IndexedDbPersistence, diff --git a/packages/firestore/test/unit/local/lru_garbage_collector.test.ts b/packages/firestore/test/unit/local/lru_garbage_collector.test.ts index a2efeda8e8c..1aa36edf9ef 100644 --- a/packages/firestore/test/unit/local/lru_garbage_collector.test.ts +++ b/packages/firestore/test/unit/local/lru_garbage_collector.test.ts @@ -25,7 +25,7 @@ import { QueryData, QueryPurpose } from '../../../src/local/query_data'; import { PersistencePromise } from '../../../src/local/persistence_promise'; import { ListenSequenceNumber, TargetId } from '../../../src/core/types'; import { Query } from '../../../src/core/query'; -import { doc, path, queryData, wrapObject } from '../../util/helpers'; +import { path, wrapObject } from '../../util/helpers'; import { QueryCache } from '../../../src/local/query_cache'; import { LiveTargets, @@ -91,7 +91,7 @@ function genericLruGarbageCollectorTests( documentCache = persistence.getRemoteDocumentCache(); initialSequenceNumber = await persistence.runTransaction( 'highest sequence number', - false, + 'readwrite', txn => PersistencePromise.resolve(txn.currentSequenceNumber) ); // TODO(gsoltis): remove cast @@ -99,6 +99,7 @@ function genericLruGarbageCollectorTests( .referenceDelegate; referenceDelegate.setInMemoryPins(new ReferenceSet()); garbageCollector = new LruGarbageCollector( + // tslint:disable-next-line:no-any (referenceDelegate as any) as LruDelegate ); } @@ -125,7 +126,7 @@ function genericLruGarbageCollectorTests( } function addNextTarget(): Promise { - return persistence.runTransaction('add query', false, txn => { + return persistence.runTransaction('add query', 'readwrite', txn => { return addNextTargetInTransaction(txn); }); } @@ -158,7 +159,7 @@ function genericLruGarbageCollectorTests( function markDocumentEligibleForGC(key: DocumentKey): Promise { return persistence.runTransaction( 'mark document eligible for GC', - false, + 'readwrite', txn => { return markDocumentEligibleForGCInTransaction(txn, key); } @@ -178,13 +179,13 @@ function genericLruGarbageCollectorTests( } function calculateTargetCount(percentile: number): Promise { - return persistence.runTransaction('calculate target count', false, txn => { + return persistence.runTransaction('calculate target count', 'readwrite', txn => { return garbageCollector.calculateTargetCount(txn, percentile); }); } function nthSequenceNumber(n: number): Promise { - return persistence.runTransaction('nth sequence number', false, txn => { + return persistence.runTransaction('nth sequence number', 'readwrite', txn => { return garbageCollector.nthSequenceNumber(txn, n); }); } @@ -193,7 +194,7 @@ function genericLruGarbageCollectorTests( upperBound: ListenSequenceNumber, liveTargets: LiveTargets ): Promise { - return persistence.runTransaction('remove targets', false, txn => { + return persistence.runTransaction('remove targets', 'readwrite', txn => { return garbageCollector.removeTargets(txn, upperBound, liveTargets); }); } @@ -203,7 +204,7 @@ function genericLruGarbageCollectorTests( ): Promise { return persistence.runTransaction( 'remove orphaned documents', - false, + 'readwrite', txn => { return garbageCollector.removeOrphanedDocuments(txn, upperBound); } @@ -274,7 +275,7 @@ function genericLruGarbageCollectorTests( it('sequence number for multiple targets in a transaction', async () => { // 50 queries, 9 with one transaction, incrementing from there. Should get second sequence // number. - await persistence.runTransaction('9 targets in a batch', false, txn => { + await persistence.runTransaction('9 targets in a batch', 'readwrite', txn => { let p = PersistencePromise.resolve(); for (let i = 0; i < 9; i++) { p = p.next(() => addNextTargetInTransaction(txn)).next(); @@ -289,7 +290,7 @@ function genericLruGarbageCollectorTests( }); it('all collected targets in a single transaction', async () => { - await persistence.runTransaction('11 targets in a batch', false, txn => { + await persistence.runTransaction('11 targets in a batch', 'readwrite', txn => { let p = PersistencePromise.resolve(); for (let i = 0; i < 11; i++) { p = p.next(() => addNextTargetInTransaction(txn)).next(); @@ -319,7 +320,7 @@ function genericLruGarbageCollectorTests( // Add mutated docs, then add one of them to a query target so it doesn't get GC'd. // Expect 3 past the initial value: the mutations not part of a query, and two queries const docInTarget = nextTestDocumentKey(); - await persistence.runTransaction('mark mutations', false, txn => { + await persistence.runTransaction('mark mutations', 'readwrite', txn => { // Adding 9 doc keys in a transaction. If we remove one of them, we'll have room for two // actual targets. let p = markDocumentEligibleForGCInTransaction(txn, docInTarget); @@ -332,7 +333,7 @@ function genericLruGarbageCollectorTests( for (let i = 0; i < 49; i++) { await addNextTarget(); } - await persistence.runTransaction('target with a mutation', false, txn => { + await persistence.runTransaction('target with a mutation', 'readwrite', txn => { return addNextTargetInTransaction(txn).next(queryData => { const keySet = documentKeySet().add(docInTarget); return queryCache.addMatchingKeys(txn, keySet, queryData.targetId); @@ -361,7 +362,7 @@ function genericLruGarbageCollectorTests( // Make sure we removed the even targets with targetID <= 20. await persistence.runTransaction( 'verify remaining targets > 20 or odd', - false, + 'readwrite', txn => { // TODO: change this once forEachTarget is added to QueryCache interface return (queryCache as IndexedDbQueryCache).forEachTarget( @@ -386,7 +387,7 @@ function genericLruGarbageCollectorTests( // Add two documents to first target, queue a mutation on the second document await persistence.runTransaction( 'add a target and add two documents to it', - false, + 'readwrite', txn => { return addNextTargetInTransaction(txn).next(queryData => { let keySet = documentKeySet(); @@ -409,7 +410,7 @@ function genericLruGarbageCollectorTests( ); // Add a second query and register a third document on it - await persistence.runTransaction('second target', false, txn => { + await persistence.runTransaction('second target', 'readwrite', txn => { return addNextTargetInTransaction(txn).next(queryData => { return cacheADocumentInTransaction(txn).next(docKey3 => { expectedRetained.add(docKey3); @@ -420,7 +421,7 @@ function genericLruGarbageCollectorTests( }); // cache another document and prepare a mutation on it. - await persistence.runTransaction('queue a mutation', false, txn => { + await persistence.runTransaction('queue a mutation', 'readwrite', txn => { return cacheADocumentInTransaction(txn).next(docKey4 => { mutations.push(mutation(docKey4)); expectedRetained.add(docKey4); @@ -431,7 +432,7 @@ function genericLruGarbageCollectorTests( // serve to keep the mutated documents from being GC'd while the mutations are outstanding. await persistence.runTransaction( 'actually register the mutations', - false, + 'readwrite', txn => { return mutationQueue.addMutationBatch( txn, @@ -447,7 +448,7 @@ function genericLruGarbageCollectorTests( const toBeRemoved = new Set(); await persistence.runTransaction( "add orphaned docs (previously mutated, then ack'd", - false, + 'readwrite', txn => { let p = PersistencePromise.resolve(); for (let i = 0; i < 5; i++) { @@ -466,7 +467,7 @@ function genericLruGarbageCollectorTests( // use a large sequence number to remove as much as possible const removed = await removeOrphanedDocuments(1000); expect(removed).to.equal(toBeRemoved.size); - await persistence.runTransaction('verify', false, txn => { + await persistence.runTransaction('verify', 'readwrite', txn => { let p = PersistencePromise.resolve(); toBeRemoved.forEach(docKey => { p = p.next(() => { @@ -515,7 +516,7 @@ function genericLruGarbageCollectorTests( // be retained. const oldestTarget = await persistence.runTransaction( 'Add oldest target and docs', - false, + 'readwrite', txn => { return addNextTargetInTransaction(txn).next(queryData => { let p = PersistencePromise.resolve(); @@ -544,7 +545,7 @@ function genericLruGarbageCollectorTests( // This will be the document in this target that gets an update later. const middleTarget = await persistence.runTransaction( 'Add middle target and docs', - false, + 'readwrite', txn => { return addNextTargetInTransaction(txn).next(queryData => { let p = PersistencePromise.resolve(); @@ -601,7 +602,7 @@ function genericLruGarbageCollectorTests( let newestDocsToAddToOldest = documentKeySet(); await persistence.runTransaction( 'Add newest target and docs', - false, + 'readwrite', txn => { return addNextTargetInTransaction(txn).next(queryData => { let p = PersistencePromise.resolve(); @@ -640,7 +641,7 @@ function genericLruGarbageCollectorTests( // 2 doc writes, add one of them to the oldest target. await persistence.runTransaction( '2 doc writes, add one of them to the oldest target', - false, + 'readwrite', txn => { let keySet = documentKeySet(); return cacheADocumentInTransaction(txn) @@ -672,7 +673,7 @@ function genericLruGarbageCollectorTests( // Remove some documents from the middle target. await persistence.runTransaction( 'Remove some documents from the middle target', - false, + 'readwrite', txn => { return updateTargetInTransaction(txn, middleTarget).next(() => queryCache.removeMatchingKeys( @@ -689,7 +690,7 @@ function genericLruGarbageCollectorTests( // upperBound is the sequence number right before middleTarget is updated, then removed. const upperBound = await persistence.runTransaction( 'Add a couple docs from the newest target to the oldest', - false, + 'readwrite', txn => { return updateTargetInTransaction(txn, oldestTarget) .next(() => { @@ -706,7 +707,7 @@ function genericLruGarbageCollectorTests( // Update a doc in the middle target await persistence.runTransaction( 'Update a doc in the middle target', - false, + 'readwrite', txn => { const doc = new Document( middleDocToUpdate, @@ -721,7 +722,7 @@ function genericLruGarbageCollectorTests( ); // Remove the middle target - await persistence.runTransaction('remove middle target', false, txn => { + await persistence.runTransaction('remove middle target', 'readwrite', txn => { // TODO(gsoltis): fix this cast return (persistence as IndexedDbPersistence).referenceDelegate.removeTarget( txn, @@ -732,7 +733,7 @@ function genericLruGarbageCollectorTests( // Write a doc and get an ack, not part of a target await persistence.runTransaction( 'Write a doc and get an ack, not part of a target', - false, + 'readwrite', txn => { return cacheADocumentInTransaction(txn).next(docKey => { // This should be retained, it's too new to get removed. @@ -752,7 +753,7 @@ function genericLruGarbageCollectorTests( expect(removed).to.equal(1); const docsRemoved = await removeOrphanedDocuments(upperBound); expect(docsRemoved).to.equal(expectedRemoved.size); - await persistence.runTransaction('verify results', false, txn => { + await persistence.runTransaction('verify results', 'readwrite', txn => { let p = PersistencePromise.resolve(); expectedRemoved.forEach(key => { p = p.next(() => documentCache.getEntry(txn, key)).next(maybeDoc => { From d37c2c4c1649caee60e0ebfc69bff7a9141b0f18 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Wed, 12 Sep 2018 12:15:34 -0700 Subject: [PATCH 03/23] [AUTOMATED]: Prettier Code Styling --- .../src/local/indexeddb_query_cache.ts | 6 +- .../unit/local/lru_garbage_collector.test.ts | 86 ++++++++++++------- 2 files changed, 56 insertions(+), 36 deletions(-) diff --git a/packages/firestore/src/local/indexeddb_query_cache.ts b/packages/firestore/src/local/indexeddb_query_cache.ts index 4518c252842..2a6b92a92b6 100644 --- a/packages/firestore/src/local/indexeddb_query_cache.ts +++ b/packages/firestore/src/local/indexeddb_query_cache.ts @@ -39,11 +39,7 @@ import { PersistencePromise } from './persistence_promise'; import { QueryCache } from './query_cache'; import { QueryData } from './query_data'; import { TargetIdGenerator } from '../core/target_id_generator'; -import { - SimpleDbStore, - SimpleDbTransaction, - SimpleDb -} from './simple_db'; +import { SimpleDbStore, SimpleDbTransaction, SimpleDb } from './simple_db'; import { IndexedDbPersistence, IndexedDbTransaction, diff --git a/packages/firestore/test/unit/local/lru_garbage_collector.test.ts b/packages/firestore/test/unit/local/lru_garbage_collector.test.ts index 1aa36edf9ef..9736b5d4f04 100644 --- a/packages/firestore/test/unit/local/lru_garbage_collector.test.ts +++ b/packages/firestore/test/unit/local/lru_garbage_collector.test.ts @@ -179,15 +179,23 @@ function genericLruGarbageCollectorTests( } function calculateTargetCount(percentile: number): Promise { - return persistence.runTransaction('calculate target count', 'readwrite', txn => { - return garbageCollector.calculateTargetCount(txn, percentile); - }); + return persistence.runTransaction( + 'calculate target count', + 'readwrite', + txn => { + return garbageCollector.calculateTargetCount(txn, percentile); + } + ); } function nthSequenceNumber(n: number): Promise { - return persistence.runTransaction('nth sequence number', 'readwrite', txn => { - return garbageCollector.nthSequenceNumber(txn, n); - }); + return persistence.runTransaction( + 'nth sequence number', + 'readwrite', + txn => { + return garbageCollector.nthSequenceNumber(txn, n); + } + ); } function removeTargets( @@ -275,13 +283,17 @@ function genericLruGarbageCollectorTests( it('sequence number for multiple targets in a transaction', async () => { // 50 queries, 9 with one transaction, incrementing from there. Should get second sequence // number. - await persistence.runTransaction('9 targets in a batch', 'readwrite', txn => { - let p = PersistencePromise.resolve(); - for (let i = 0; i < 9; i++) { - p = p.next(() => addNextTargetInTransaction(txn)).next(); + await persistence.runTransaction( + '9 targets in a batch', + 'readwrite', + txn => { + let p = PersistencePromise.resolve(); + for (let i = 0; i < 9; i++) { + p = p.next(() => addNextTargetInTransaction(txn)).next(); + } + return p; } - return p; - }); + ); for (let i = 9; i < 50; i++) { await addNextTarget(); } @@ -290,13 +302,17 @@ function genericLruGarbageCollectorTests( }); it('all collected targets in a single transaction', async () => { - await persistence.runTransaction('11 targets in a batch', 'readwrite', txn => { - let p = PersistencePromise.resolve(); - for (let i = 0; i < 11; i++) { - p = p.next(() => addNextTargetInTransaction(txn)).next(); + await persistence.runTransaction( + '11 targets in a batch', + 'readwrite', + txn => { + let p = PersistencePromise.resolve(); + for (let i = 0; i < 11; i++) { + p = p.next(() => addNextTargetInTransaction(txn)).next(); + } + return p; } - return p; - }); + ); for (let i = 11; i < 50; i++) { await addNextTarget(); } @@ -333,12 +349,16 @@ function genericLruGarbageCollectorTests( for (let i = 0; i < 49; i++) { await addNextTarget(); } - await persistence.runTransaction('target with a mutation', 'readwrite', txn => { - return addNextTargetInTransaction(txn).next(queryData => { - const keySet = documentKeySet().add(docInTarget); - return queryCache.addMatchingKeys(txn, keySet, queryData.targetId); - }); - }); + await persistence.runTransaction( + 'target with a mutation', + 'readwrite', + txn => { + return addNextTargetInTransaction(txn).next(queryData => { + const keySet = documentKeySet().add(docInTarget); + return queryCache.addMatchingKeys(txn, keySet, queryData.targetId); + }); + } + ); const expected = initialSequenceNumber + 3; expect(await nthSequenceNumber(10)).to.equal(expected); }); @@ -722,13 +742,17 @@ function genericLruGarbageCollectorTests( ); // Remove the middle target - await persistence.runTransaction('remove middle target', 'readwrite', txn => { - // TODO(gsoltis): fix this cast - return (persistence as IndexedDbPersistence).referenceDelegate.removeTarget( - txn, - middleTarget - ); - }); + await persistence.runTransaction( + 'remove middle target', + 'readwrite', + txn => { + // TODO(gsoltis): fix this cast + return (persistence as IndexedDbPersistence).referenceDelegate.removeTarget( + txn, + middleTarget + ); + } + ); // Write a doc and get an ack, not part of a target await persistence.runTransaction( From 11cad0d0d0338fb6d3318d0ff73c8cc0666115cd Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Mon, 17 Sep 2018 10:47:21 -0700 Subject: [PATCH 04/23] Add garbagecollector property, reorder indexeddb query cache constructor params --- .../firestore/src/local/indexeddb_persistence.ts | 16 ++++++++++++---- .../firestore/src/local/indexeddb_query_cache.ts | 4 ++-- .../firestore/src/local/lru_garbage_collector.ts | 2 ++ .../unit/local/lru_garbage_collector.test.ts | 5 +---- 4 files changed, 17 insertions(+), 10 deletions(-) diff --git a/packages/firestore/src/local/indexeddb_persistence.ts b/packages/firestore/src/local/indexeddb_persistence.ts index 62c53a6c66d..e3801459586 100644 --- a/packages/firestore/src/local/indexeddb_persistence.ts +++ b/packages/firestore/src/local/indexeddb_persistence.ts @@ -64,7 +64,11 @@ import { ReferenceSet } from './reference_set'; import { QueryData } from './query_data'; import { DocumentKey } from '../model/document_key'; import { encode, EncodedResourcePath } from './encoded_resource_path'; -import { LiveTargets, LruDelegate } from './lru_garbage_collector'; +import { + LiveTargets, + LruDelegate, + LruGarbageCollector +} from './lru_garbage_collector'; import { ListenSequenceNumber, TargetId } from '../core/types'; const LOG_TAG = 'IndexedDbPersistence'; @@ -260,7 +264,7 @@ export class IndexedDbPersistence implements Persistence { private remoteDocumentCache: IndexedDbRemoteDocumentCache; private readonly webStorage: Storage; private listenSequence: ListenSequence; - readonly referenceDelegate: ReferenceDelegate; + readonly referenceDelegate: IndexedDbLruDelegate; // Note that `multiClientParams` must be present to enable multi-client support while multi-tab // is still experimental. When multi-client is switched to always on, `multiClientParams` will @@ -284,7 +288,7 @@ export class IndexedDbPersistence implements Persistence { this.serializer = new LocalSerializer(serializer); this.document = platform.document; this.allowTabSynchronization = multiClientParams !== undefined; - this.queryCache = new IndexedDbQueryCache(this.serializer, this); + this.queryCache = new IndexedDbQueryCache(this, this.serializer); this.remoteDocumentCache = new IndexedDbRemoteDocumentCache( this.serializer, /*keepDocumentChangeLog=*/ this.allowTabSynchronization @@ -1046,7 +1050,11 @@ function clientMetadataStore( class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { private additionalReferences: ReferenceSet | null; - constructor(private readonly db: IndexedDbPersistence) {} + readonly garbageCollector: LruGarbageCollector; + + constructor(private readonly db: IndexedDbPersistence) { + this.garbageCollector = new LruGarbageCollector(this); + } setInMemoryPins(inMemoryPins: ReferenceSet): void { this.additionalReferences = inMemoryPins; diff --git a/packages/firestore/src/local/indexeddb_query_cache.ts b/packages/firestore/src/local/indexeddb_query_cache.ts index 2a6b92a92b6..6f780eeb579 100644 --- a/packages/firestore/src/local/indexeddb_query_cache.ts +++ b/packages/firestore/src/local/indexeddb_query_cache.ts @@ -50,8 +50,8 @@ import { LiveTargets } from './lru_garbage_collector'; export class IndexedDbQueryCache implements QueryCache { constructor( - private serializer: LocalSerializer, - private readonly db: IndexedDbPersistence + private readonly db: IndexedDbPersistence, + private serializer: LocalSerializer ) {} /** The garbage collector to notify about potential garbage keys. */ diff --git a/packages/firestore/src/local/lru_garbage_collector.ts b/packages/firestore/src/local/lru_garbage_collector.ts index bbbe4f09ca3..178409f6fb3 100644 --- a/packages/firestore/src/local/lru_garbage_collector.ts +++ b/packages/firestore/src/local/lru_garbage_collector.ts @@ -27,6 +27,8 @@ import { AnyJs } from '../util/misc'; * needs from the persistence layer. */ export interface LruDelegate { + readonly garbageCollector: LruGarbageCollector; + getTargetCount(txn: PersistenceTransaction): PersistencePromise; /** Enumerates all the targets in the QueryCache. */ diff --git a/packages/firestore/test/unit/local/lru_garbage_collector.test.ts b/packages/firestore/test/unit/local/lru_garbage_collector.test.ts index 9736b5d4f04..793ca3e54f1 100644 --- a/packages/firestore/test/unit/local/lru_garbage_collector.test.ts +++ b/packages/firestore/test/unit/local/lru_garbage_collector.test.ts @@ -98,10 +98,7 @@ function genericLruGarbageCollectorTests( const referenceDelegate = (persistence as IndexedDbPersistence) .referenceDelegate; referenceDelegate.setInMemoryPins(new ReferenceSet()); - garbageCollector = new LruGarbageCollector( - // tslint:disable-next-line:no-any - (referenceDelegate as any) as LruDelegate - ); + garbageCollector = referenceDelegate.garbageCollector; } function nextQueryData(sequenceNumber: ListenSequenceNumber): QueryData { From 3114ce7c602f2d900de8823ff953877312da3ce6 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Mon, 17 Sep 2018 11:32:35 -0700 Subject: [PATCH 05/23] LiveTargets -> ActiveTargets, fix comments on ReferenceDelegate --- .../src/local/indexeddb_persistence.ts | 6 +++--- .../src/local/indexeddb_query_cache.ts | 4 ++-- .../src/local/lru_garbage_collector.ts | 8 ++++---- packages/firestore/src/local/persistence.ts | 9 +++++---- .../unit/local/lru_garbage_collector.test.ts | 18 +++++++++--------- 5 files changed, 23 insertions(+), 22 deletions(-) diff --git a/packages/firestore/src/local/indexeddb_persistence.ts b/packages/firestore/src/local/indexeddb_persistence.ts index e3801459586..3daf83ddd37 100644 --- a/packages/firestore/src/local/indexeddb_persistence.ts +++ b/packages/firestore/src/local/indexeddb_persistence.ts @@ -65,7 +65,7 @@ import { QueryData } from './query_data'; import { DocumentKey } from '../model/document_key'; import { encode, EncodedResourcePath } from './encoded_resource_path'; import { - LiveTargets, + ActiveTargets, LruDelegate, LruGarbageCollector } from './lru_garbage_collector'; @@ -1123,9 +1123,9 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { removeTargets( txn: PersistenceTransaction, upperBound: ListenSequenceNumber, - liveTargets: LiveTargets + activeTargetIds: ActiveTargets ): PersistencePromise { - return this.db.getQueryCache().removeTargets(txn, upperBound, liveTargets); + return this.db.getQueryCache().removeTargets(txn, upperBound, activeTargetIds); } removeOrphanedDocuments( diff --git a/packages/firestore/src/local/indexeddb_query_cache.ts b/packages/firestore/src/local/indexeddb_query_cache.ts index 6f780eeb579..bb2b849eb9d 100644 --- a/packages/firestore/src/local/indexeddb_query_cache.ts +++ b/packages/firestore/src/local/indexeddb_query_cache.ts @@ -46,7 +46,7 @@ import { SentinelRow } from './indexeddb_persistence'; import { ListenSequence } from '../core/listen_sequence'; -import { LiveTargets } from './lru_garbage_collector'; +import { ActiveTargets } from './lru_garbage_collector'; export class IndexedDbQueryCache implements QueryCache { constructor( @@ -159,7 +159,7 @@ export class IndexedDbQueryCache implements QueryCache { removeTargets( txn: PersistenceTransaction, upperBound: ListenSequenceNumber, - activeTargetIds: LiveTargets + activeTargetIds: ActiveTargets ): PersistencePromise { let count = 0; const promises: Array> = []; diff --git a/packages/firestore/src/local/lru_garbage_collector.ts b/packages/firestore/src/local/lru_garbage_collector.ts index 178409f6fb3..3ab01620c97 100644 --- a/packages/firestore/src/local/lru_garbage_collector.ts +++ b/packages/firestore/src/local/lru_garbage_collector.ts @@ -52,7 +52,7 @@ export interface LruDelegate { removeTargets( txn: PersistenceTransaction, upperBound: ListenSequenceNumber, - activeTargetIds: LiveTargets + activeTargetIds: ActiveTargets ): PersistencePromise; /** @@ -70,7 +70,7 @@ export interface LruDelegate { /** * Describes an object whose keys are active target ids. We do not care about the type of the values. */ -export type LiveTargets = { +export type ActiveTargets = { [id: number]: AnyJs; }; @@ -252,9 +252,9 @@ export class LruGarbageCollector { removeTargets( txn: PersistenceTransaction, upperBound: ListenSequenceNumber, - liveTargets: LiveTargets + activeTargetIds: ActiveTargets ): PersistencePromise { - return this.delegate.removeTargets(txn, upperBound, liveTargets); + return this.delegate.removeTargets(txn, upperBound, activeTargetIds); } /** diff --git a/packages/firestore/src/local/persistence.ts b/packages/firestore/src/local/persistence.ts index 9c289183c28..986224e7af4 100644 --- a/packages/firestore/src/local/persistence.ts +++ b/packages/firestore/src/local/persistence.ts @@ -54,12 +54,13 @@ export type PrimaryStateListener = (isPrimary: boolean) => Promise; * includes being added to a target, being removed from a target, being subject to mutation, and * being mutated by the user. * - *

Different implementations may do different things with each of these events. Not every + * Different implementations may do different things with each of these events. Not every * implementation needs to do something with every lifecycle hook. * - * Porting note: since sequence numbers are attached to transactions in this client, the - * ReferenceDelegate does not need to deal in transactional semantics, nor does it need to - * track and generate sequence numbers. + * PORTING NOTE: since sequence numbers are attached to transactions in this + * client, the ReferenceDelegate does not need to deal in transactional + * semantics (onTransactionStarted/Committed()), nor does it need to track and + * generate sequence numbers (getCurrentSequenceNumber()). */ export interface ReferenceDelegate { /** diff --git a/packages/firestore/test/unit/local/lru_garbage_collector.test.ts b/packages/firestore/test/unit/local/lru_garbage_collector.test.ts index 793ca3e54f1..d5f5e74a4fe 100644 --- a/packages/firestore/test/unit/local/lru_garbage_collector.test.ts +++ b/packages/firestore/test/unit/local/lru_garbage_collector.test.ts @@ -28,7 +28,7 @@ import { Query } from '../../../src/core/query'; import { path, wrapObject } from '../../util/helpers'; import { QueryCache } from '../../../src/local/query_cache'; import { - LiveTargets, + ActiveTargets, LruDelegate, LruGarbageCollector } from '../../../src/local/lru_garbage_collector'; @@ -197,10 +197,10 @@ function genericLruGarbageCollectorTests( function removeTargets( upperBound: ListenSequenceNumber, - liveTargets: LiveTargets + activeTargetIds: ActiveTargets ): Promise { return persistence.runTransaction('remove targets', 'readwrite', txn => { - return garbageCollector.removeTargets(txn, upperBound, liveTargets); + return garbageCollector.removeTargets(txn, upperBound, activeTargetIds); }); } @@ -361,20 +361,20 @@ function genericLruGarbageCollectorTests( }); it('remove targets up through sequence number', async () => { - const liveTargets: LiveTargets = {}; + const activeTargetIds: ActiveTargets = {}; for (let i = 0; i < 100; i++) { const queryData = await addNextTarget(); // Mark odd queries as live so we can test filtering out live queries. const targetId = queryData.targetId; if (targetId % 2 === 1) { - liveTargets[targetId] = queryData; + activeTargetIds[targetId] = queryData; } } // GC up through 20th query, which is 20%. // Expect to have GC'd 10 targets, since every other target is live const upperBound = 20 + initialSequenceNumber; - const removed = await removeTargets(upperBound, liveTargets); + const removed = await removeTargets(upperBound, activeTargetIds); expect(removed).to.equal(10); // Make sure we removed the even targets with targetID <= 20. await persistence.runTransaction( @@ -766,11 +766,11 @@ function genericLruGarbageCollectorTests( ); // Finally, do the garbage collection, up to but not including the removal of middleTarget - const liveTargets: LiveTargets = {}; - liveTargets[oldestTarget.targetId] = {}; + const activeTargetIds: ActiveTargets = {}; + activeTargetIds[oldestTarget.targetId] = {}; // Expect to remove newest target - const removed = await removeTargets(upperBound, liveTargets); + const removed = await removeTargets(upperBound, activeTargetIds); expect(removed).to.equal(1); const docsRemoved = await removeOrphanedDocuments(upperBound); expect(docsRemoved).to.equal(expectedRemoved.size); From 5718c24ade0e7cdaa76ca974b9cdce5cf13cd995 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Mon, 17 Sep 2018 11:33:05 -0700 Subject: [PATCH 06/23] [AUTOMATED]: Prettier Code Styling --- packages/firestore/src/local/indexeddb_persistence.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/firestore/src/local/indexeddb_persistence.ts b/packages/firestore/src/local/indexeddb_persistence.ts index 3daf83ddd37..d9db8e22f3e 100644 --- a/packages/firestore/src/local/indexeddb_persistence.ts +++ b/packages/firestore/src/local/indexeddb_persistence.ts @@ -1125,7 +1125,9 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { upperBound: ListenSequenceNumber, activeTargetIds: ActiveTargets ): PersistencePromise { - return this.db.getQueryCache().removeTargets(txn, upperBound, activeTargetIds); + return this.db + .getQueryCache() + .removeTargets(txn, upperBound, activeTargetIds); } removeOrphanedDocuments( From baa9fa888c67607ce29a4f1109de7aeaf0f387a0 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Mon, 17 Sep 2018 13:11:08 -0700 Subject: [PATCH 07/23] Methods reordered to match android --- .../src/local/indexeddb_persistence.ts | 151 +++++++++--------- 1 file changed, 76 insertions(+), 75 deletions(-) diff --git a/packages/firestore/src/local/indexeddb_persistence.ts b/packages/firestore/src/local/indexeddb_persistence.ts index d9db8e22f3e..62a0c21cc19 100644 --- a/packages/firestore/src/local/indexeddb_persistence.ts +++ b/packages/firestore/src/local/indexeddb_persistence.ts @@ -1056,78 +1056,105 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { this.garbageCollector = new LruGarbageCollector(this); } - setInMemoryPins(inMemoryPins: ReferenceSet): void { - this.additionalReferences = inMemoryPins; + getTargetCount(txn: PersistenceTransaction): PersistencePromise { + return this.db.getQueryCache().getQueryCount(txn); } - removeTarget( + forEachTarget( txn: PersistenceTransaction, - queryData: QueryData + f: (q: QueryData) => void ): PersistencePromise { - const updated = queryData.copy({ - sequenceNumber: txn.currentSequenceNumber - }); - return this.db.getQueryCache().updateQueryData(txn, updated); + return this.db.getQueryCache().forEachTarget(txn, f); } - addReference( + forEachOrphanedDocumentSequenceNumber( txn: PersistenceTransaction, - key: DocumentKey + f: (sequenceNumber: ListenSequenceNumber) => void ): PersistencePromise { - return this.writeSentinelKey(txn, key); + return this.db + .getQueryCache() + .forEachOrphanedDocument(txn, (docKey, sequenceNumber) => + f(sequenceNumber) + ); } - removeReference( - txn: PersistenceTransaction, - key: DocumentKey - ): PersistencePromise { - return this.writeSentinelKey(txn, key); + setInMemoryPins(inMemoryPins: ReferenceSet): void { + this.additionalReferences = inMemoryPins; } - removeMutationReference( + addReference( txn: PersistenceTransaction, key: DocumentKey ): PersistencePromise { return this.writeSentinelKey(txn, key); } - onLimboDocumentUpdated( + removeReference( txn: PersistenceTransaction, key: DocumentKey ): PersistencePromise { return this.writeSentinelKey(txn, key); } - forEachTarget( + removeTargets( txn: PersistenceTransaction, - f: (q: QueryData) => void - ): PersistencePromise { - return this.db.getQueryCache().forEachTarget(txn, f); + upperBound: ListenSequenceNumber, + activeTargetIds: ActiveTargets + ): PersistencePromise { + return this.db + .getQueryCache() + .removeTargets(txn, upperBound, activeTargetIds); } - forEachOrphanedDocumentSequenceNumber( + removeMutationReference( txn: PersistenceTransaction, - f: (sequenceNumber: ListenSequenceNumber) => void + key: DocumentKey ): PersistencePromise { - return this.db - .getQueryCache() - .forEachOrphanedDocument(txn, (docKey, sequenceNumber) => - f(sequenceNumber) - ); + return this.writeSentinelKey(txn, key); } - getTargetCount(txn: PersistenceTransaction): PersistencePromise { - return this.db.getQueryCache().getQueryCount(txn); + /** Returns true if any mutation queue contains the given document. */ + private mutationQueuesContainKey( + txn: PersistenceTransaction, + docKey: DocumentKey + ): PersistencePromise { + let found = false; + return IndexedDbPersistence.getStore( + txn, + DbMutationQueue.store + ) + .iterateAsync(userId => { + return mutationQueueContainsKey(txn, userId, docKey).next( + containsKey => { + if (containsKey) { + found = true; + } + return PersistencePromise.resolve(!containsKey); + } + ); + }) + .next(() => found); } - removeTargets( + /** + * Returns true if anything would prevent this document from being garbage + * collected, given that the document in question is not present in any + * targets and has a sequence number less than or equal to the upper bound for + * the collection run. + */ + private isPinned( txn: PersistenceTransaction, - upperBound: ListenSequenceNumber, - activeTargetIds: ActiveTargets - ): PersistencePromise { - return this.db - .getQueryCache() - .removeTargets(txn, upperBound, activeTargetIds); + docKey: DocumentKey + ): PersistencePromise { + return this.additionalReferences!.containsKey(txn, docKey).next( + isPinned => { + if (isPinned) { + return PersistencePromise.resolve(true); + } else { + return this.mutationQueuesContainKey(txn, docKey); + } + } + ); } removeOrphanedDocuments( @@ -1166,47 +1193,21 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { ]); } - /** - * Returns true if anything would prevent this document from being garbage collected, given that - * the document in question is not present in any targets and has a sequence number less than or - * equal to the upper bound for the collection run. - */ - private isPinned( + removeTarget( txn: PersistenceTransaction, - docKey: DocumentKey - ): PersistencePromise { - return this.additionalReferences!.containsKey(txn, docKey).next( - isPinned => { - if (isPinned) { - return PersistencePromise.resolve(true); - } else { - return this.mutationQueuesContainKey(txn, docKey); - } - } - ); + queryData: QueryData + ): PersistencePromise { + const updated = queryData.copy({ + sequenceNumber: txn.currentSequenceNumber + }); + return this.db.getQueryCache().updateQueryData(txn, updated); } - /** Returns true if any mutation queue contains the given document. */ - private mutationQueuesContainKey( + onLimboDocumentUpdated( txn: PersistenceTransaction, - docKey: DocumentKey - ): PersistencePromise { - let found = false; - return IndexedDbPersistence.getStore( - txn, - DbMutationQueue.store - ) - .iterateAsync(userId => { - return mutationQueueContainsKey(txn, userId, docKey).next( - containsKey => { - if (containsKey) { - found = true; - } - return PersistencePromise.resolve(!containsKey); - } - ); - }) - .next(() => found); + key: DocumentKey + ): PersistencePromise { + return this.writeSentinelKey(txn, key); } private writeSentinelKey( From ed3c84da4ec4c5c5bd3a5786cdb7f51b856cd8e0 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Mon, 17 Sep 2018 14:19:26 -0700 Subject: [PATCH 08/23] Remove forEachOrphanedDocument from query cache, move to reference delegate --- .../src/local/indexeddb_persistence.ts | 90 ++++++++++++++----- .../src/local/indexeddb_query_cache.ts | 58 +----------- 2 files changed, 71 insertions(+), 77 deletions(-) diff --git a/packages/firestore/src/local/indexeddb_persistence.ts b/packages/firestore/src/local/indexeddb_persistence.ts index 62a0c21cc19..f1f62cc0986 100644 --- a/packages/firestore/src/local/indexeddb_persistence.ts +++ b/packages/firestore/src/local/indexeddb_persistence.ts @@ -63,7 +63,7 @@ import { ListenSequence, SequenceNumberSyncer } from '../core/listen_sequence'; import { ReferenceSet } from './reference_set'; import { QueryData } from './query_data'; import { DocumentKey } from '../model/document_key'; -import { encode, EncodedResourcePath } from './encoded_resource_path'; +import { decode, encode, EncodedResourcePath } from './encoded_resource_path'; import { ActiveTargets, LruDelegate, @@ -1071,9 +1071,7 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { txn: PersistenceTransaction, f: (sequenceNumber: ListenSequenceNumber) => void ): PersistencePromise { - return this.db - .getQueryCache() - .forEachOrphanedDocument(txn, (docKey, sequenceNumber) => + return this.forEachOrphanedDocument(txn, (docKey, sequenceNumber) => f(sequenceNumber) ); } @@ -1086,14 +1084,14 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { txn: PersistenceTransaction, key: DocumentKey ): PersistencePromise { - return this.writeSentinelKey(txn, key); + return writeSentinelKey(txn, key); } removeReference( txn: PersistenceTransaction, key: DocumentKey ): PersistencePromise { - return this.writeSentinelKey(txn, key); + return writeSentinelKey(txn, key); } removeTargets( @@ -1110,7 +1108,7 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { txn: PersistenceTransaction, key: DocumentKey ): PersistencePromise { - return this.writeSentinelKey(txn, key); + return writeSentinelKey(txn, key); } /** Returns true if any mutation queue contains the given document. */ @@ -1162,20 +1160,21 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { upperBound: ListenSequenceNumber ): PersistencePromise { let count = 0; - let p = PersistencePromise.resolve(); - const iteration = this.db - .getQueryCache() - .forEachOrphanedDocument(txn, (docKey, sequenceNumber) => { + const promises: Array> = []; + const iteration = this.forEachOrphanedDocument(txn, (docKey, sequenceNumber) => { if (sequenceNumber <= upperBound) { - p = p.next(() => this.isPinned(txn, docKey)).next(isPinned => { + const p = this.isPinned(txn, docKey).next(isPinned => { if (!isPinned) { count++; return this.removeOrphanedDocument(txn, docKey); } }); + promises.push(p); } }); - return iteration.next(() => p).next(() => count); + // Wait for iteration first to make sure we have a chance to add all of the + // removal promises to the array. + return iteration.next(() => PersistencePromise.waitFor(promises)).next(() => count); } /** @@ -1207,16 +1206,58 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { txn: PersistenceTransaction, key: DocumentKey ): PersistencePromise { - return this.writeSentinelKey(txn, key); + return writeSentinelKey(txn, key); } - private writeSentinelKey( + /** + * Call provided function for each document in the cache that is 'orphaned'. Orphaned + * means not a part of any target, so the only entry in the target-document index for + * that document will be the sentinel row (targetId 0), which will also have the sequence + * number for the last time the document was accessed. + */ + private forEachOrphanedDocument( txn: PersistenceTransaction, - key: DocumentKey + f: (docKey: DocumentKey, sequenceNumber: ListenSequenceNumber) => void ): PersistencePromise { - return sentinelKeyStore(txn).put( - sentinelRow(key, txn.currentSequenceNumber) - ); + const store = sentinelKeyStore(txn); + let nextToReport: ListenSequenceNumber = ListenSequence.INVALID; + let nextPath: EncodedResourcePath; + return store + .iterate( + { + index: DbTargetDocument.documentTargetsIndex + }, + ([targetId, docKey], { path, sequenceNumber }) => { + if (targetId === 0) { + // if nextToReport is valid, report it, this is a new key so the last one + // must be not be a member of any targets. + if (nextToReport !== ListenSequence.INVALID) { + f( + new DocumentKey(decode(nextPath)), + nextToReport + ); + } + // set nextToReport to be this sequence number. It's the next one we might + // report, if we don't find any targets for this document. + nextToReport = sequenceNumber; + nextPath = path; + } else { + // set nextToReport to be invalid, we know we don't need to report this one since + // we found a target for it. + nextToReport = ListenSequence.INVALID; + } + } + ) + .next(() => { + // Since we report sequence numbers after getting to the next key, we need to check if + // the last key we iterated over was an orphaned document and report it. + if (nextToReport !== ListenSequence.INVALID) { + f( + new DocumentKey(decode(nextPath)), + nextToReport + ); + } + }); } } @@ -1225,7 +1266,7 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { * have TargetId === 0. The sequence number is an approximation of a last-used value * for the document identified by the path portion of the key. */ -export type SentinelRow = { +type SentinelRow = { targetId: TargetId; path: EncodedResourcePath; sequenceNumber: ListenSequenceNumber; @@ -1261,3 +1302,12 @@ function sentinelRow( sequenceNumber }; } + +function writeSentinelKey( + txn: PersistenceTransaction, + key: DocumentKey +): PersistencePromise { + return sentinelKeyStore(txn).put( + sentinelRow(key, txn.currentSequenceNumber) + ); +} diff --git a/packages/firestore/src/local/indexeddb_query_cache.ts b/packages/firestore/src/local/indexeddb_query_cache.ts index bb2b849eb9d..70875751f8e 100644 --- a/packages/firestore/src/local/indexeddb_query_cache.ts +++ b/packages/firestore/src/local/indexeddb_query_cache.ts @@ -42,10 +42,8 @@ import { TargetIdGenerator } from '../core/target_id_generator'; import { SimpleDbStore, SimpleDbTransaction, SimpleDb } from './simple_db'; import { IndexedDbPersistence, - IndexedDbTransaction, - SentinelRow + IndexedDbTransaction } from './indexeddb_persistence'; -import { ListenSequence } from '../core/listen_sequence'; import { ActiveTargets } from './lru_garbage_collector'; export class IndexedDbQueryCache implements QueryCache { @@ -191,60 +189,6 @@ export class IndexedDbQueryCache implements QueryCache { }); } - /** - * Call provided function for each document in the cache that is 'orphaned'. Orphaned - * means not a part of any target, so the only entry in the target-document index for - * that document will be the sentinel row (targetId 0), which will also have the sequence - * number for the last time the document was accessed. - */ - forEachOrphanedDocument( - txn: PersistenceTransaction, - f: (docKey: DocumentKey, sequenceNumber: ListenSequenceNumber) => void - ): PersistencePromise { - const store = IndexedDbPersistence.getStore< - DbTargetDocumentKey, - SentinelRow - >(txn, DbTargetDocument.store); - let nextToReport: ListenSequenceNumber = ListenSequence.INVALID; - let nextPath: EncodedResourcePath.EncodedResourcePath; - return store - .iterate( - { - index: DbTargetDocument.documentTargetsIndex - }, - ([targetId, docKey], { path, sequenceNumber }) => { - if (targetId === 0) { - // if nextToReport is valid, report it, this is a new key so the last one - // must be not be a member of any targets. - if (nextToReport !== ListenSequence.INVALID) { - f( - new DocumentKey(EncodedResourcePath.decode(nextPath)), - nextToReport - ); - } - // set nextToReport to be this sequence number. It's the next one we might - // report, if we don't find any targets for this document. - nextToReport = sequenceNumber; - nextPath = path; - } else { - // set nextToReport to be invalid, we know we don't need to report this one since - // we found a target for it. - nextToReport = ListenSequence.INVALID; - } - } - ) - .next(() => { - // Since we report sequence numbers after getting to the next key, we need to check if - // the last key we iterated over was an orphaned document and report it. - if (nextToReport !== ListenSequence.INVALID) { - f( - new DocumentKey(EncodedResourcePath.decode(nextPath)), - nextToReport - ); - } - }); - } - private retrieveMetadata( transaction: PersistenceTransaction ): PersistencePromise { From 9d686101060da0275716b780a63fe2d3f6e0650f Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Mon, 17 Sep 2018 14:50:22 -0700 Subject: [PATCH 09/23] Fix comment typo and reflow, move mutationQueuesContainKey to indexeddb_mutation_queue.ts --- .../src/local/indexeddb_mutation_queue.ts | 21 +++++++- .../src/local/indexeddb_persistence.ts | 48 +++++-------------- 2 files changed, 32 insertions(+), 37 deletions(-) diff --git a/packages/firestore/src/local/indexeddb_mutation_queue.ts b/packages/firestore/src/local/indexeddb_mutation_queue.ts index 17defce5ff9..3506ee4387b 100644 --- a/packages/firestore/src/local/indexeddb_mutation_queue.ts +++ b/packages/firestore/src/local/indexeddb_mutation_queue.ts @@ -551,7 +551,7 @@ export class IndexedDbMutationQueue implements MutationQueue { /** * @return true if the mutation queue for the given user contains a pending mutation for the given key. */ -export function mutationQueueContainsKey( +function mutationQueueContainsKey( txn: PersistenceTransaction, userId: string, key: DocumentKey @@ -571,6 +571,25 @@ export function mutationQueueContainsKey( .next(() => containsKey); } +/** Returns true if any mutation queue contains the given document. */ +export function mutationQueuesContainKey( + txn: PersistenceTransaction, + docKey: DocumentKey +): PersistencePromise { + let found = false; + return mutationQueuesStore(txn).iterateAsync(userId => { + return mutationQueueContainsKey(txn, userId, docKey).next( + containsKey => { + if (containsKey) { + found = true; + } + return PersistencePromise.resolve(!containsKey); + } + ); + }) + .next(() => found); +} + /** * Delete a mutation batch and the associated document mutations. * @return A PersistencePromise of the document mutations that were removed. diff --git a/packages/firestore/src/local/indexeddb_persistence.ts b/packages/firestore/src/local/indexeddb_persistence.ts index f1f62cc0986..b108aba1477 100644 --- a/packages/firestore/src/local/indexeddb_persistence.ts +++ b/packages/firestore/src/local/indexeddb_persistence.ts @@ -23,7 +23,7 @@ import * as log from '../util/log'; import { IndexedDbMutationQueue, - mutationQueueContainsKey + mutationQueuesContainKey } from './indexeddb_mutation_queue'; import { IndexedDbQueryCache, @@ -40,9 +40,7 @@ import { DbTargetGlobal, SchemaConverter, DbTargetDocument, - DbTargetDocumentKey, - DbMutationQueueKey, - DbMutationQueue + DbTargetDocumentKey } from './indexeddb_schema'; import { LocalSerializer } from './local_serializer'; import { MutationQueue } from './mutation_queue'; @@ -1111,29 +1109,6 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { return writeSentinelKey(txn, key); } - /** Returns true if any mutation queue contains the given document. */ - private mutationQueuesContainKey( - txn: PersistenceTransaction, - docKey: DocumentKey - ): PersistencePromise { - let found = false; - return IndexedDbPersistence.getStore( - txn, - DbMutationQueue.store - ) - .iterateAsync(userId => { - return mutationQueueContainsKey(txn, userId, docKey).next( - containsKey => { - if (containsKey) { - found = true; - } - return PersistencePromise.resolve(!containsKey); - } - ); - }) - .next(() => found); - } - /** * Returns true if anything would prevent this document from being garbage * collected, given that the document in question is not present in any @@ -1149,7 +1124,7 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { if (isPinned) { return PersistencePromise.resolve(true); } else { - return this.mutationQueuesContainKey(txn, docKey); + return mutationQueuesContainKey(txn, docKey); } } ); @@ -1229,28 +1204,29 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { }, ([targetId, docKey], { path, sequenceNumber }) => { if (targetId === 0) { - // if nextToReport is valid, report it, this is a new key so the last one - // must be not be a member of any targets. + // if nextToReport is valid, report it, this is a new key so the + // last one must not be a member of any targets. if (nextToReport !== ListenSequence.INVALID) { f( new DocumentKey(decode(nextPath)), nextToReport ); } - // set nextToReport to be this sequence number. It's the next one we might - // report, if we don't find any targets for this document. + // set nextToReport to be this sequence number. It's the next one we + // might report, if we don't find any targets for this document. nextToReport = sequenceNumber; nextPath = path; } else { - // set nextToReport to be invalid, we know we don't need to report this one since - // we found a target for it. + // set nextToReport to be invalid, we know we don't need to report + // this one since we found a target for it. nextToReport = ListenSequence.INVALID; } } ) .next(() => { - // Since we report sequence numbers after getting to the next key, we need to check if - // the last key we iterated over was an orphaned document and report it. + // Since we report sequence numbers after getting to the next key, we + // need to check if the last key we iterated over was an orphaned + // document and report it. if (nextToReport !== ListenSequence.INVALID) { f( new DocumentKey(decode(nextPath)), From cba7bf7052d9fc06ebb9ba694669dc9927e1853d Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Mon, 17 Sep 2018 14:51:03 -0700 Subject: [PATCH 10/23] [AUTOMATED]: Prettier Code Styling --- .../src/local/indexeddb_mutation_queue.ts | 13 ++++----- .../src/local/indexeddb_persistence.ts | 29 +++++++++---------- 2 files changed, 19 insertions(+), 23 deletions(-) diff --git a/packages/firestore/src/local/indexeddb_mutation_queue.ts b/packages/firestore/src/local/indexeddb_mutation_queue.ts index 3506ee4387b..dc2df57e131 100644 --- a/packages/firestore/src/local/indexeddb_mutation_queue.ts +++ b/packages/firestore/src/local/indexeddb_mutation_queue.ts @@ -577,17 +577,16 @@ export function mutationQueuesContainKey( docKey: DocumentKey ): PersistencePromise { let found = false; - return mutationQueuesStore(txn).iterateAsync(userId => { - return mutationQueueContainsKey(txn, userId, docKey).next( - containsKey => { + return mutationQueuesStore(txn) + .iterateAsync(userId => { + return mutationQueueContainsKey(txn, userId, docKey).next(containsKey => { if (containsKey) { found = true; } return PersistencePromise.resolve(!containsKey); - } - ); - }) - .next(() => found); + }); + }) + .next(() => found); } /** diff --git a/packages/firestore/src/local/indexeddb_persistence.ts b/packages/firestore/src/local/indexeddb_persistence.ts index b108aba1477..f154cfb32c7 100644 --- a/packages/firestore/src/local/indexeddb_persistence.ts +++ b/packages/firestore/src/local/indexeddb_persistence.ts @@ -1070,8 +1070,8 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { f: (sequenceNumber: ListenSequenceNumber) => void ): PersistencePromise { return this.forEachOrphanedDocument(txn, (docKey, sequenceNumber) => - f(sequenceNumber) - ); + f(sequenceNumber) + ); } setInMemoryPins(inMemoryPins: ReferenceSet): void { @@ -1136,7 +1136,9 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { ): PersistencePromise { let count = 0; const promises: Array> = []; - const iteration = this.forEachOrphanedDocument(txn, (docKey, sequenceNumber) => { + const iteration = this.forEachOrphanedDocument( + txn, + (docKey, sequenceNumber) => { if (sequenceNumber <= upperBound) { const p = this.isPinned(txn, docKey).next(isPinned => { if (!isPinned) { @@ -1146,10 +1148,13 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { }); promises.push(p); } - }); + } + ); // Wait for iteration first to make sure we have a chance to add all of the // removal promises to the array. - return iteration.next(() => PersistencePromise.waitFor(promises)).next(() => count); + return iteration + .next(() => PersistencePromise.waitFor(promises)) + .next(() => count); } /** @@ -1207,10 +1212,7 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { // if nextToReport is valid, report it, this is a new key so the // last one must not be a member of any targets. if (nextToReport !== ListenSequence.INVALID) { - f( - new DocumentKey(decode(nextPath)), - nextToReport - ); + f(new DocumentKey(decode(nextPath)), nextToReport); } // set nextToReport to be this sequence number. It's the next one we // might report, if we don't find any targets for this document. @@ -1228,10 +1230,7 @@ class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { // need to check if the last key we iterated over was an orphaned // document and report it. if (nextToReport !== ListenSequence.INVALID) { - f( - new DocumentKey(decode(nextPath)), - nextToReport - ); + f(new DocumentKey(decode(nextPath)), nextToReport); } }); } @@ -1283,7 +1282,5 @@ function writeSentinelKey( txn: PersistenceTransaction, key: DocumentKey ): PersistencePromise { - return sentinelKeyStore(txn).put( - sentinelRow(key, txn.currentSequenceNumber) - ); + return sentinelKeyStore(txn).put(sentinelRow(key, txn.currentSequenceNumber)); } From f88817a6ec4e603f0a07f32787f97e7ada1114c3 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Mon, 17 Sep 2018 15:08:42 -0700 Subject: [PATCH 11/23] Only pass reference delegate to indexeddb query cache, not whole persistence object --- packages/firestore/src/local/indexeddb_persistence.ts | 4 ++-- packages/firestore/src/local/indexeddb_query_cache.ts | 7 ++++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/packages/firestore/src/local/indexeddb_persistence.ts b/packages/firestore/src/local/indexeddb_persistence.ts index f154cfb32c7..57f1df7becf 100644 --- a/packages/firestore/src/local/indexeddb_persistence.ts +++ b/packages/firestore/src/local/indexeddb_persistence.ts @@ -286,7 +286,7 @@ export class IndexedDbPersistence implements Persistence { this.serializer = new LocalSerializer(serializer); this.document = platform.document; this.allowTabSynchronization = multiClientParams !== undefined; - this.queryCache = new IndexedDbQueryCache(this, this.serializer); + this.queryCache = new IndexedDbQueryCache(this.referenceDelegate, this.serializer); this.remoteDocumentCache = new IndexedDbRemoteDocumentCache( this.serializer, /*keepDocumentChangeLog=*/ this.allowTabSynchronization @@ -1045,7 +1045,7 @@ function clientMetadataStore( } /** Provides LRU functionality for IndexedDB persistence. */ -class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { +export class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { private additionalReferences: ReferenceSet | null; readonly garbageCollector: LruGarbageCollector; diff --git a/packages/firestore/src/local/indexeddb_query_cache.ts b/packages/firestore/src/local/indexeddb_query_cache.ts index 70875751f8e..b123e61e714 100644 --- a/packages/firestore/src/local/indexeddb_query_cache.ts +++ b/packages/firestore/src/local/indexeddb_query_cache.ts @@ -41,6 +41,7 @@ import { QueryData } from './query_data'; import { TargetIdGenerator } from '../core/target_id_generator'; import { SimpleDbStore, SimpleDbTransaction, SimpleDb } from './simple_db'; import { + IndexedDbLruDelegate, IndexedDbPersistence, IndexedDbTransaction } from './indexeddb_persistence'; @@ -48,7 +49,7 @@ import { ActiveTargets } from './lru_garbage_collector'; export class IndexedDbQueryCache implements QueryCache { constructor( - private readonly db: IndexedDbPersistence, + private readonly referenceDelegate: IndexedDbLruDelegate, private serializer: LocalSerializer ) {} @@ -282,7 +283,7 @@ export class IndexedDbQueryCache implements QueryCache { keys.forEach(key => { const path = EncodedResourcePath.encode(key.path); promises.push(store.put(new DbTargetDocument(targetId, path))); - promises.push(this.db.referenceDelegate.addReference(txn, key)); + promises.push(this.referenceDelegate.addReference(txn, key)); }); return PersistencePromise.waitFor(promises); } @@ -302,7 +303,7 @@ export class IndexedDbQueryCache implements QueryCache { if (this.garbageCollector !== null) { this.garbageCollector.addPotentialGarbageKey(key); } - promises.push(this.db.referenceDelegate.removeReference(txn, key)); + promises.push(this.referenceDelegate.removeReference(txn, key)); }); return PersistencePromise.waitFor(promises); } From a79a9410f2580f04e806093c97fec858c227aa50 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Mon, 17 Sep 2018 15:09:10 -0700 Subject: [PATCH 12/23] [AUTOMATED]: Prettier Code Styling --- packages/firestore/src/local/indexeddb_persistence.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/firestore/src/local/indexeddb_persistence.ts b/packages/firestore/src/local/indexeddb_persistence.ts index 57f1df7becf..890041f26e6 100644 --- a/packages/firestore/src/local/indexeddb_persistence.ts +++ b/packages/firestore/src/local/indexeddb_persistence.ts @@ -286,7 +286,10 @@ export class IndexedDbPersistence implements Persistence { this.serializer = new LocalSerializer(serializer); this.document = platform.document; this.allowTabSynchronization = multiClientParams !== undefined; - this.queryCache = new IndexedDbQueryCache(this.referenceDelegate, this.serializer); + this.queryCache = new IndexedDbQueryCache( + this.referenceDelegate, + this.serializer + ); this.remoteDocumentCache = new IndexedDbRemoteDocumentCache( this.serializer, /*keepDocumentChangeLog=*/ this.allowTabSynchronization From be943c55d4095b93269c6222ca93dd493e4c888f Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Mon, 17 Sep 2018 15:51:23 -0700 Subject: [PATCH 13/23] iterateAsync -> iterateSerial --- .../firestore/src/local/indexeddb_mutation_queue.ts | 2 +- packages/firestore/src/local/lru_garbage_collector.ts | 5 ++++- packages/firestore/src/local/simple_db.ts | 11 ++++++----- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/packages/firestore/src/local/indexeddb_mutation_queue.ts b/packages/firestore/src/local/indexeddb_mutation_queue.ts index dc2df57e131..05845918e43 100644 --- a/packages/firestore/src/local/indexeddb_mutation_queue.ts +++ b/packages/firestore/src/local/indexeddb_mutation_queue.ts @@ -578,7 +578,7 @@ export function mutationQueuesContainKey( ): PersistencePromise { let found = false; return mutationQueuesStore(txn) - .iterateAsync(userId => { + .iterateSerial(userId => { return mutationQueueContainsKey(txn, userId, docKey).next(containsKey => { if (containsKey) { found = true; diff --git a/packages/firestore/src/local/lru_garbage_collector.ts b/packages/firestore/src/local/lru_garbage_collector.ts index 3ab01620c97..341e9800a83 100644 --- a/packages/firestore/src/local/lru_garbage_collector.ts +++ b/packages/firestore/src/local/lru_garbage_collector.ts @@ -37,7 +37,10 @@ export interface LruDelegate { f: (target: QueryData) => void ): PersistencePromise; - /** Enumerates sequence numbers for documents not associated with a target. */ + /** + * Enumerates sequence numbers for documents not associated with a target. + * Note that this may include duplicate sequence numbers. + */ forEachOrphanedDocumentSequenceNumber( txn: PersistenceTransaction, f: (sequenceNumber: ListenSequenceNumber) => void diff --git a/packages/firestore/src/local/simple_db.ts b/packages/firestore/src/local/simple_db.ts index 9ff5beb75be..a2a79f3e74d 100644 --- a/packages/firestore/src/local/simple_db.ts +++ b/packages/firestore/src/local/simple_db.ts @@ -499,13 +499,14 @@ export class SimpleDbStore< } /** - * Iterates over a store, but waits for the given callback to complete for each entry - * before iterating the next entry. This allows the callback to do asynchronous work - * to determine if this iteration should continue. + * Iterates over a store, but waits for the given callback to complete for + * each entry before iterating the next entry. This allows the callback to do + * asynchronous work to determine if this iteration should continue. * - * The provided callback should return `true` to continue iteration, and `false` otherwise. + * The provided callback should return `true` to continue iteration, and + * `false` otherwise. */ - iterateAsync( + iterateSerial( callback: (k: KeyType, v: ValueType) => PersistencePromise ): PersistencePromise { const cursorRequest = this.cursor({}); From 16eaaca4d4726515c8b177eba46c6451b1b61b36 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Mon, 17 Sep 2018 15:58:01 -0700 Subject: [PATCH 14/23] onLimboDocumentUpdated -> updateLimboDocument --- packages/firestore/src/local/indexeddb_persistence.ts | 2 +- packages/firestore/src/local/persistence.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/firestore/src/local/indexeddb_persistence.ts b/packages/firestore/src/local/indexeddb_persistence.ts index 890041f26e6..3b3363d180d 100644 --- a/packages/firestore/src/local/indexeddb_persistence.ts +++ b/packages/firestore/src/local/indexeddb_persistence.ts @@ -1185,7 +1185,7 @@ export class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { return this.db.getQueryCache().updateQueryData(txn, updated); } - onLimboDocumentUpdated( + updateLimboDocument( txn: PersistenceTransaction, key: DocumentKey ): PersistencePromise { diff --git a/packages/firestore/src/local/persistence.ts b/packages/firestore/src/local/persistence.ts index 986224e7af4..c16db4eb3c7 100644 --- a/packages/firestore/src/local/persistence.ts +++ b/packages/firestore/src/local/persistence.ts @@ -97,7 +97,7 @@ export interface ReferenceDelegate { ): PersistencePromise; /** Notify the delegate that a limbo document was updated. */ - onLimboDocumentUpdated( + updateLimboDocument( txn: PersistenceTransaction, doc: DocumentKey ): PersistencePromise; From e25162cd86fdd05ca2dfed3765132763c54cd266 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Mon, 17 Sep 2018 16:08:05 -0700 Subject: [PATCH 15/23] Reorder lru delegate methods to match android --- packages/firestore/src/local/lru_garbage_collector.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/firestore/src/local/lru_garbage_collector.ts b/packages/firestore/src/local/lru_garbage_collector.ts index 341e9800a83..918eabbc4ac 100644 --- a/packages/firestore/src/local/lru_garbage_collector.ts +++ b/packages/firestore/src/local/lru_garbage_collector.ts @@ -29,14 +29,14 @@ import { AnyJs } from '../util/misc'; export interface LruDelegate { readonly garbageCollector: LruGarbageCollector; - getTargetCount(txn: PersistenceTransaction): PersistencePromise; - /** Enumerates all the targets in the QueryCache. */ forEachTarget( txn: PersistenceTransaction, f: (target: QueryData) => void ): PersistencePromise; + getTargetCount(txn: PersistenceTransaction): PersistencePromise; + /** * Enumerates sequence numbers for documents not associated with a target. * Note that this may include duplicate sequence numbers. From 279e14a9384460e18227d85ed176726bdb9b6811 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Mon, 17 Sep 2018 16:35:14 -0700 Subject: [PATCH 16/23] Regroup tests a little --- .../unit/local/lru_garbage_collector.test.ts | 195 +++++++++--------- 1 file changed, 99 insertions(+), 96 deletions(-) diff --git a/packages/firestore/test/unit/local/lru_garbage_collector.test.ts b/packages/firestore/test/unit/local/lru_garbage_collector.test.ts index d5f5e74a4fe..bcc659973e2 100644 --- a/packages/firestore/test/unit/local/lru_garbage_collector.test.ts +++ b/packages/firestore/test/unit/local/lru_garbage_collector.test.ts @@ -29,7 +29,6 @@ import { path, wrapObject } from '../../util/helpers'; import { QueryCache } from '../../../src/local/query_cache'; import { ActiveTargets, - LruDelegate, LruGarbageCollector } from '../../../src/local/lru_garbage_collector'; import { ListenSequence } from '../../../src/core/listen_sequence'; @@ -241,7 +240,7 @@ function genericLruGarbageCollectorTests( ); } - it('pick sequence number percentile', async () => { + it('picks sequence number percentile', async () => { const testCases: Array<{ targets: number; expected: number }> = [ //{ targets: 0, expected: 0 }, { targets: 10, expected: 1 }, @@ -263,104 +262,106 @@ function genericLruGarbageCollectorTests( } }); - it('sequence number for no targets', async () => { - expect(await nthSequenceNumber(0)).to.equal(ListenSequence.INVALID); - }); + describe('nthSequenceNumber()', () => { + it('sequence number for no targets', async () => { + expect(await nthSequenceNumber(0)).to.equal(ListenSequence.INVALID); + }); - it('sequence number for 50 targets', async () => { - // Add 50 queries sequentially, aim to collect 10 of them. - // The sequence number to collect should be 10 past the initial sequence number. - for (let i = 0; i < 50; i++) { - await addNextTarget(); - } - const expected = initialSequenceNumber + 10; - expect(await nthSequenceNumber(10)).to.equal(expected); - }); + it('with 50 targets', async () => { + // Add 50 queries sequentially, aim to collect 10 of them. + // The sequence number to collect should be 10 past the initial sequence number. + for (let i = 0; i < 50; i++) { + await addNextTarget(); + } + const expected = initialSequenceNumber + 10; + expect(await nthSequenceNumber(10)).to.equal(expected); + }); - it('sequence number for multiple targets in a transaction', async () => { - // 50 queries, 9 with one transaction, incrementing from there. Should get second sequence - // number. - await persistence.runTransaction( - '9 targets in a batch', - 'readwrite', - txn => { - let p = PersistencePromise.resolve(); - for (let i = 0; i < 9; i++) { - p = p.next(() => addNextTargetInTransaction(txn)).next(); + it('with multiple targets in a transaction', async () => { + // 50 queries, 9 with one transaction, incrementing from there. Should get second sequence + // number. + await persistence.runTransaction( + '9 targets in a batch', + 'readwrite', + txn => { + let p = PersistencePromise.resolve(); + for (let i = 0; i < 9; i++) { + p = p.next(() => addNextTargetInTransaction(txn)).next(); + } + return p; } - return p; + ); + for (let i = 9; i < 50; i++) { + await addNextTarget(); } - ); - for (let i = 9; i < 50; i++) { - await addNextTarget(); - } - const expected = initialSequenceNumber + 2; - expect(await nthSequenceNumber(10)).to.equal(expected); - }); + const expected = initialSequenceNumber + 2; + expect(await nthSequenceNumber(10)).to.equal(expected); + }); - it('all collected targets in a single transaction', async () => { - await persistence.runTransaction( - '11 targets in a batch', - 'readwrite', - txn => { - let p = PersistencePromise.resolve(); - for (let i = 0; i < 11; i++) { - p = p.next(() => addNextTargetInTransaction(txn)).next(); + it('with all collected targets in a single transaction', async () => { + await persistence.runTransaction( + '11 targets in a batch', + 'readwrite', + txn => { + let p = PersistencePromise.resolve(); + for (let i = 0; i < 11; i++) { + p = p.next(() => addNextTargetInTransaction(txn)).next(); + } + return p; } - return p; + ); + for (let i = 11; i < 50; i++) { + await addNextTarget(); } - ); - for (let i = 11; i < 50; i++) { - await addNextTarget(); - } - const expected = initialSequenceNumber + 1; - expect(await nthSequenceNumber(10)).to.equal(expected); - }); - - it('sequence number with mutation and sequential targets', async () => { - // Remove a mutated doc reference, marking it as eligible for GC. - // Then add 50 queries. Should get 10 past initial (9 queries). - await markADocumentEligibleForGC(); - for (let i = 0; i < 50; i++) { - await addNextTarget(); - } - - const expected = initialSequenceNumber + 10; - expect(await nthSequenceNumber(10)).to.equal(expected); - }); + const expected = initialSequenceNumber + 1; + expect(await nthSequenceNumber(10)).to.equal(expected); + }); - it('sequence numbers with mutations in targets', async () => { - // Add mutated docs, then add one of them to a query target so it doesn't get GC'd. - // Expect 3 past the initial value: the mutations not part of a query, and two queries - const docInTarget = nextTestDocumentKey(); - await persistence.runTransaction('mark mutations', 'readwrite', txn => { - // Adding 9 doc keys in a transaction. If we remove one of them, we'll have room for two - // actual targets. - let p = markDocumentEligibleForGCInTransaction(txn, docInTarget); - for (let i = 0; i < 8; i++) { - p = p.next(() => markADocumentEligibleForGCInTransaction(txn)); + it('with mutation and sequential targets', async () => { + // Remove a mutated doc reference, marking it as eligible for GC. + // Then add 50 queries. Should get 10 past initial (9 queries). + await markADocumentEligibleForGC(); + for (let i = 0; i < 50; i++) { + await addNextTarget(); } - return p; + + const expected = initialSequenceNumber + 10; + expect(await nthSequenceNumber(10)).to.equal(expected); }); - for (let i = 0; i < 49; i++) { - await addNextTarget(); - } - await persistence.runTransaction( - 'target with a mutation', - 'readwrite', - txn => { - return addNextTargetInTransaction(txn).next(queryData => { - const keySet = documentKeySet().add(docInTarget); - return queryCache.addMatchingKeys(txn, keySet, queryData.targetId); - }); + it('with mutations in targets', async () => { + // Add mutated docs, then add one of them to a query target so it doesn't get GC'd. + // Expect 3 past the initial value: the mutations not part of a query, and two queries + const docInTarget = nextTestDocumentKey(); + await persistence.runTransaction('mark mutations', 'readwrite', txn => { + // Adding 9 doc keys in a transaction. If we remove one of them, we'll have room for two + // actual targets. + let p = markDocumentEligibleForGCInTransaction(txn, docInTarget); + for (let i = 0; i < 8; i++) { + p = p.next(() => markADocumentEligibleForGCInTransaction(txn)); + } + return p; + }); + + for (let i = 0; i < 49; i++) { + await addNextTarget(); } - ); - const expected = initialSequenceNumber + 3; - expect(await nthSequenceNumber(10)).to.equal(expected); + await persistence.runTransaction( + 'target with a mutation', + 'readwrite', + txn => { + return addNextTargetInTransaction(txn).next(queryData => { + const keySet = documentKeySet().add(docInTarget); + return queryCache.addMatchingKeys(txn, keySet, queryData.targetId); + }); + } + ); + const expected = initialSequenceNumber + 3; + expect(await nthSequenceNumber(10)).to.equal(expected); + }); }); - it('remove targets up through sequence number', async () => { + it('removes targets up through sequence number', async () => { const activeTargetIds: ActiveTargets = {}; for (let i = 0; i < 100; i++) { const queryData = await addNextTarget(); @@ -393,7 +394,7 @@ function genericLruGarbageCollectorTests( ); }); - it('remove orphaned documents', async () => { + it('removes orphaned documents', async () => { // Track documents we expect to be retained so we can verify post-GC. // This will contain documents associated with targets that survive GC, as well // as any documents with pending mutations. @@ -504,7 +505,7 @@ function genericLruGarbageCollectorTests( }); }); - it('remove targets then GC', async () => { + it('removes targets then GCs', async () => { // Create 3 targets, add docs to all of them // Leave oldest target alone, it is still live // Remove newest target @@ -520,17 +521,19 @@ function genericLruGarbageCollectorTests( // Expect: // All docs in oldest target are still around // One blind write is gone, the first one not added to oldest target - // Documents removed from middle target are gone, except ones added to oldest target - // Documents from newest target are gone, except + // Documents removed from middle target are gone, except ones added to the + // oldest target + // Documents from newest target are gone, except those added to the oldest + // target - // Through the various steps, track which documents we expect to be removed vs - // documents we expect to be retained. + // Through the various steps, track which documents we expect to be removed + // vs documents we expect to be retained. const expectedRetained = new Set(); const expectedRemoved = new Set(); // Add oldest target, 5 documents, and add those documents to the target. - // This target will not be removed, so all documents that are part of it will - // be retained. + // This target will not be removed, so all documents that are part of it + // will be retained. const oldestTarget = await persistence.runTransaction( 'Add oldest target and docs', 'readwrite', @@ -555,8 +558,8 @@ function genericLruGarbageCollectorTests( } ); - // Add middle target and docs. Some docs will be removed from this target later, - // which we track here. + // Add middle target and docs. Some docs will be removed from this target + // later, which we track here. let middleDocsToRemove = documentKeySet(); let middleDocToUpdate: DocumentKey; // This will be the document in this target that gets an update later. From 34f7967d20852eabf384a37fa48606f87dfd8b00 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Mon, 17 Sep 2018 16:51:59 -0700 Subject: [PATCH 17/23] A little test cleanup --- .../test/unit/local/lru_garbage_collector.test.ts | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/packages/firestore/test/unit/local/lru_garbage_collector.test.ts b/packages/firestore/test/unit/local/lru_garbage_collector.test.ts index bcc659973e2..adbc4eb8091 100644 --- a/packages/firestore/test/unit/local/lru_garbage_collector.test.ts +++ b/packages/firestore/test/unit/local/lru_garbage_collector.test.ts @@ -62,12 +62,16 @@ describe('IndexedDbLruReferenceDelegate', () => { function genericLruGarbageCollectorTests( newPersistence: () => Promise ): void { + // We need to initialize a few counters so that we can use them when we + // auto-generate things like targets and documents. Pick arbitrary values + // such that sequences are unlikely to overlap as we increment them. let previousTargetId: TargetId; let previousDocNum: number; + beforeEach(async () => { previousTargetId = 500; previousDocNum = 10; - await newTestResources(); + await initializeTestResources(); }); afterEach(async () => { @@ -80,7 +84,8 @@ function genericLruGarbageCollectorTests( let initialSequenceNumber: ListenSequenceNumber; let mutationQueue: MutationQueue; let documentCache: RemoteDocumentCache; - async function newTestResources(): Promise { + + async function initializeTestResources(): Promise { if (persistence && persistence.started) { await persistence.shutdown(/* deleteData= */ true); } @@ -145,7 +150,7 @@ function genericLruGarbageCollectorTests( txn: PersistenceTransaction, key: DocumentKey ): PersistencePromise { - // TODO: change this once reference delegate is added to the persistence interface + // TODO(gsoltis): change this once reference delegate is added to the persistence interface return (persistence as IndexedDbPersistence).referenceDelegate.removeMutationReference( txn, key @@ -250,7 +255,7 @@ function genericLruGarbageCollectorTests( ]; for (const { targets, expected } of testCases) { - await newTestResources(); + await initializeTestResources(); for (let i = 0; i < targets; i++) { await addNextTarget(); } From 801694ce1829f91ed28f0eeec5cc84e36a547fba Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Tue, 18 Sep 2018 16:43:05 -0700 Subject: [PATCH 18/23] additionalReferences -> inMemoryPins --- packages/firestore/src/local/indexeddb_persistence.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/firestore/src/local/indexeddb_persistence.ts b/packages/firestore/src/local/indexeddb_persistence.ts index 3b3363d180d..bc9edef69f1 100644 --- a/packages/firestore/src/local/indexeddb_persistence.ts +++ b/packages/firestore/src/local/indexeddb_persistence.ts @@ -1049,7 +1049,7 @@ function clientMetadataStore( /** Provides LRU functionality for IndexedDB persistence. */ export class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { - private additionalReferences: ReferenceSet | null; + private inMemoryPins: ReferenceSet | null; readonly garbageCollector: LruGarbageCollector; @@ -1078,7 +1078,7 @@ export class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { } setInMemoryPins(inMemoryPins: ReferenceSet): void { - this.additionalReferences = inMemoryPins; + this.inMemoryPins = inMemoryPins; } addReference( @@ -1122,7 +1122,7 @@ export class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { txn: PersistenceTransaction, docKey: DocumentKey ): PersistencePromise { - return this.additionalReferences!.containsKey(txn, docKey).next( + return this.inMemoryPins!.containsKey(txn, docKey).next( isPinned => { if (isPinned) { return PersistencePromise.resolve(true); From 34f62fb7e7d81bb710b3a1eb589e0829dc715522 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Wed, 19 Sep 2018 09:40:11 -0700 Subject: [PATCH 19/23] Using SortedSet instead of a PriorityQueue (#1233) Swap out priority queue for a sorted set --- .../src/local/lru_garbage_collector.ts | 158 +++++------------- 1 file changed, 40 insertions(+), 118 deletions(-) diff --git a/packages/firestore/src/local/lru_garbage_collector.ts b/packages/firestore/src/local/lru_garbage_collector.ts index 918eabbc4ac..83993da266c 100644 --- a/packages/firestore/src/local/lru_garbage_collector.ts +++ b/packages/firestore/src/local/lru_garbage_collector.ts @@ -18,8 +18,8 @@ import { PersistenceTransaction } from './persistence'; import { PersistencePromise } from './persistence_promise'; import { ListenSequenceNumber } from '../core/types'; import { ListenSequence } from '../core/listen_sequence'; -import { assert } from '../util/assert'; -import { AnyJs } from '../util/misc'; +import { AnyJs, primitiveComparator } from '../util/misc'; +import { SortedSet } from '../util/sorted_set'; /** * Persistence layers intending to use LRU Garbage collection should have reference delegates that @@ -77,139 +77,61 @@ export type ActiveTargets = { [id: number]: AnyJs; }; -/** - * A selective port of `java.util.PriorityQueue` - * {@see PriorityQueue.java} - * The queue does not grow and must have an initial capacity when it is constructed. Additionally, elements may only be - * `poll()`'d and cannot be removed from any other position in the queue. - */ -class PriorityQueue { - private _size = 0; - get size(): number { - return this._size; - } - private readonly queue: T[]; - constructor( - private readonly capacity: number, - private readonly comparator: (a: T, b: T) => number - ) { - assert(capacity > 0, 'Capacity must be greater than 0'); - this.queue = new Array(capacity); - } - - add(elem: T): void { - assert(this._size + 1 <= this.capacity, 'Queue is over capacity'); - if (this._size === 0) { - this.queue[0] = elem; - this._size = 1; - } else { - this.siftUp(elem); - } - } - - poll(): T | null { - if (this._size === 0) { - return null; - } - const result = this.queue[0]; - const newSize = --this._size; - const last = this.queue[newSize]; - delete this.queue[newSize]; - if (newSize !== 0) { - this.siftDown(last); - } - return result; - } - - peek(): T | null { - if (this._size > 0) { - return this.queue[0]; - } - return null; - } - - private siftUp(elem: T): void { - let k = this._size; - while (k > 0) { - const parent = (k - 1) >>> 1; - const toCheck = this.queue[parent]; - const comp = this.comparator(elem, toCheck); - if (comp >= 0) { - break; - } - this.queue[k] = toCheck; - k = parent; - } - this.queue[k] = elem; - this._size++; - } - - private siftDown(lastElem: T): void { - let k = 0; - const half = this._size >>> 1; - while (k < half) { - let child = (k << 1) + 1; - let toCheck = this.queue[child]; - const right = child + 1; - if ( - right < this._size && - this.comparator(toCheck, this.queue[right]) > 0 - ) { - toCheck = this.queue[right]; - child = right; - } - if (this.comparator(lastElem, toCheck) <= 0) { - break; - } - this.queue[k] = toCheck; - k = child; - } - this.queue[k] = lastElem; +// The type and comparator for the items contained in the SortedSet used in +// place of a priority queue for the RollingSequenceNumberBuffer. +type BufferEntry = [ListenSequenceNumber, number]; +function bufferEntryComparator( + [aSequence, aIndex]: BufferEntry, + [bSequence, bIndex]: BufferEntry +): number { + const seqCmp = primitiveComparator(aSequence, bSequence); + if (seqCmp === 0) { + // This order doesn't matter, but we can bias against churn by sorting + // entries created earlier as less than newer entries. + return primitiveComparator(aIndex, bIndex); + } else { + return seqCmp; } } /** - * Used to calculate the nth sequence number. Keeps a rolling buffer of the lowest n values passed - * to `addElement`, and finally reports the largest of them in `maxValue`. + * Used to calculate the nth sequence number. Keeps a rolling buffer of the + * lowest n values passed to `addElement`, and finally reports the largest of + * them in `maxValue`. */ class RollingSequenceNumberBuffer { - private queue: PriorityQueue; + private buffer: SortedSet = new SortedSet( + bufferEntryComparator + ); - // Invert the comparison because we want to keep the smallest values. - private static COMPARATOR: ( - a: ListenSequenceNumber, - b: ListenSequenceNumber - ) => number = (a, b) => { - if (b < a) { - return -1; - } else if (b === a) { - return 0; - } - return 1; - }; + private previousIndex = 0; + + constructor(private readonly maxElements: number) {} - constructor(private readonly maxElements: number) { - this.queue = new PriorityQueue( - maxElements, - RollingSequenceNumberBuffer.COMPARATOR - ); + private nextIndex(): number { + return ++this.previousIndex; } addElement(sequenceNumber: ListenSequenceNumber): void { - if (this.queue.size < this.maxElements) { - this.queue.add(sequenceNumber); + const entry: BufferEntry = [sequenceNumber, this.nextIndex()]; + if (this.buffer.size < this.maxElements) { + this.buffer = this.buffer.add(entry); } else { - // Note: use first because we have inverted the comparison - const highestValue = this.queue.peek()!; - if (sequenceNumber < highestValue) { - this.queue.poll(); - this.queue.add(sequenceNumber); + const highestValue = this.buffer.last()!; + if (bufferEntryComparator(entry, highestValue) < 0) { + this.buffer = this.buffer.delete(highestValue).add(entry); } } } get maxValue(): ListenSequenceNumber { - return this.queue.peek()!; + // Guaranteed to be non-empty. If we decide we are not collecting any + // sequence numbers, nthSequenceNumber below short-circuits. If we have + // decided that we are collecting n sequence numbers, it's because n is some + // percentage of the existing sequence numbers. That means we should never + // be in a situation where we are collecting sequence numbers but don't + // actually have any. + return this.buffer.last()![0]; } } From 3b748b15f59f37bd8b80a1ee7aa0f57e492ae886 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Wed, 19 Sep 2018 10:42:39 -0700 Subject: [PATCH 20/23] Drop sentinel row and sentinel key store. Add sequence number to target document store --- .../src/local/indexeddb_persistence.ts | 64 ++++++------------- .../src/local/indexeddb_query_cache.ts | 2 +- .../firestore/src/local/indexeddb_schema.ts | 28 ++++++-- 3 files changed, 42 insertions(+), 52 deletions(-) diff --git a/packages/firestore/src/local/indexeddb_persistence.ts b/packages/firestore/src/local/indexeddb_persistence.ts index bc9edef69f1..cb20d0dba95 100644 --- a/packages/firestore/src/local/indexeddb_persistence.ts +++ b/packages/firestore/src/local/indexeddb_persistence.ts @@ -27,7 +27,8 @@ import { } from './indexeddb_mutation_queue'; import { IndexedDbQueryCache, - getHighestListenSequenceNumber + getHighestListenSequenceNumber, + documentTargetStore } from './indexeddb_query_cache'; import { IndexedDbRemoteDocumentCache } from './indexeddb_remote_document_cache'; import { @@ -1122,15 +1123,13 @@ export class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { txn: PersistenceTransaction, docKey: DocumentKey ): PersistencePromise { - return this.inMemoryPins!.containsKey(txn, docKey).next( - isPinned => { - if (isPinned) { - return PersistencePromise.resolve(true); - } else { - return mutationQueuesContainKey(txn, docKey); - } + return this.inMemoryPins!.containsKey(txn, docKey).next(isPinned => { + if (isPinned) { + return PersistencePromise.resolve(true); + } else { + return mutationQueuesContainKey(txn, docKey); } - ); + }); } removeOrphanedDocuments( @@ -1170,7 +1169,7 @@ export class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { docKey: DocumentKey ): PersistencePromise { return PersistencePromise.waitFor([ - sentinelKeyStore(txn).delete(sentinelKey(docKey)), + documentTargetStore(txn).delete(sentinelKey(docKey)), this.db.getRemoteDocumentCache().removeEntry(txn, docKey) ]); } @@ -1202,7 +1201,7 @@ export class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { txn: PersistenceTransaction, f: (docKey: DocumentKey, sequenceNumber: ListenSequenceNumber) => void ): PersistencePromise { - const store = sentinelKeyStore(txn); + const store = documentTargetStore(txn); let nextToReport: ListenSequenceNumber = ListenSequence.INVALID; let nextPath: EncodedResourcePath; return store @@ -1219,7 +1218,9 @@ export class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { } // set nextToReport to be this sequence number. It's the next one we // might report, if we don't find any targets for this document. - nextToReport = sequenceNumber; + // Note that the sequence number must be defined when the targetId + // is 0. + nextToReport = sequenceNumber!; nextPath = path; } else { // set nextToReport to be invalid, we know we don't need to report @@ -1239,51 +1240,26 @@ export class IndexedDbLruDelegate implements ReferenceDelegate, LruDelegate { } } -/** - * `SentinelRow` describes the schema of rows in the DbTargetDocument store that - * have TargetId === 0. The sequence number is an approximation of a last-used value - * for the document identified by the path portion of the key. - */ -type SentinelRow = { - targetId: TargetId; - path: EncodedResourcePath; - sequenceNumber: ListenSequenceNumber; -}; - -/** - * The sentinel key store is the same as the DbTargetDocument store, but allows for - * reading and writing sequence numbers as part of the value stored. - */ -function sentinelKeyStore( - txn: PersistenceTransaction -): SimpleDbStore { - return IndexedDbPersistence.getStore( - txn, - DbTargetDocument.store - ); -} - function sentinelKey(key: DocumentKey): [TargetId, EncodedResourcePath] { return [0, encode(key.path)]; } /** - * @return A value suitable for writing in the sentinel key store. + * @return A value suitable for writing a sentinel row in the target-document + * store. */ function sentinelRow( key: DocumentKey, sequenceNumber: ListenSequenceNumber -): SentinelRow { - return { - targetId: 0, - path: encode(key.path), - sequenceNumber - }; +): DbTargetDocument { + return new DbTargetDocument(0, encode(key.path), sequenceNumber); } function writeSentinelKey( txn: PersistenceTransaction, key: DocumentKey ): PersistencePromise { - return sentinelKeyStore(txn).put(sentinelRow(key, txn.currentSequenceNumber)); + return documentTargetStore(txn).put( + sentinelRow(key, txn.currentSequenceNumber) + ); } diff --git a/packages/firestore/src/local/indexeddb_query_cache.ts b/packages/firestore/src/local/indexeddb_query_cache.ts index b123e61e714..29a0637e542 100644 --- a/packages/firestore/src/local/indexeddb_query_cache.ts +++ b/packages/firestore/src/local/indexeddb_query_cache.ts @@ -476,7 +476,7 @@ export function getHighestListenSequenceNumber( /** * Helper to get a typed SimpleDbStore for the document target object store. */ -function documentTargetStore( +export function documentTargetStore( txn: PersistenceTransaction ): SimpleDbStore { return IndexedDbPersistence.getStore( diff --git a/packages/firestore/src/local/indexeddb_schema.ts b/packages/firestore/src/local/indexeddb_schema.ts index 422b6843f44..5953a51c49b 100644 --- a/packages/firestore/src/local/indexeddb_schema.ts +++ b/packages/firestore/src/local/indexeddb_schema.ts @@ -15,7 +15,7 @@ */ import * as api from '../protos/firestore_proto_api'; -import { BatchId } from '../core/types'; +import { BatchId, ListenSequenceNumber } from '../core/types'; import { TargetId } from '../core/types'; import { ResourcePath } from '../model/path'; import { assert } from '../util/assert'; @@ -573,9 +573,14 @@ export class DbTarget { export type DbTargetDocumentKey = [TargetId, EncodedResourcePath]; /** - * An object representing an association between a target and a document. - * Stored in the targetDocument object store to store the documents tracked by a - * particular target. + * An object representing an association between a target and a document, or a + * sentinel row marking the last sequence number at which a document was used. + * Each document cached must have a corresponding sentinel row before lru + * garbage collection is enabled. + * + * The target associations and sentinel rows are co-located so that orphaned + * documents and their sequence numbers can be identified efficiently via a scan + * of this store. */ export class DbTargetDocument { /** Name of the IndexedDb object store. */ @@ -592,14 +597,23 @@ export class DbTargetDocument { constructor( /** - * The targetId identifying a target. + * The targetId identifying a target or 0 for a sentinel row. */ public targetId: TargetId, /** * The path to the document, as encoded in the key. */ - public path: EncodedResourcePath - ) {} + public path: EncodedResourcePath, + /** + * + */ + public sequenceNumber?: ListenSequenceNumber + ) { + assert( + (targetId !== 0) !== (typeof sequenceNumber !== 'undefined'), + 'A target-document row must either have targetId == 0 and a defined sequence number, or a non-zero targetId and no sequence number' + ); + } } /** From 366418e27dd7a634c5995e6fff59af38c5e43614 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Wed, 19 Sep 2018 11:08:29 -0700 Subject: [PATCH 21/23] Lint --- packages/firestore/src/local/indexeddb_persistence.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/firestore/src/local/indexeddb_persistence.ts b/packages/firestore/src/local/indexeddb_persistence.ts index cb20d0dba95..3df5afc033f 100644 --- a/packages/firestore/src/local/indexeddb_persistence.ts +++ b/packages/firestore/src/local/indexeddb_persistence.ts @@ -40,8 +40,7 @@ import { SCHEMA_VERSION, DbTargetGlobal, SchemaConverter, - DbTargetDocument, - DbTargetDocumentKey + DbTargetDocument } from './indexeddb_schema'; import { LocalSerializer } from './local_serializer'; import { MutationQueue } from './mutation_queue'; From fcbb366b9d2fc94b279b7276834ff24aaabb9f75 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Wed, 19 Sep 2018 11:16:00 -0700 Subject: [PATCH 22/23] Comments and flip conditional --- packages/firestore/src/local/indexeddb_schema.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/firestore/src/local/indexeddb_schema.ts b/packages/firestore/src/local/indexeddb_schema.ts index 5953a51c49b..3f08c035fc9 100644 --- a/packages/firestore/src/local/indexeddb_schema.ts +++ b/packages/firestore/src/local/indexeddb_schema.ts @@ -605,12 +605,14 @@ export class DbTargetDocument { */ public path: EncodedResourcePath, /** - * + * If this is a sentinel row, this should be the sequence number of the last + * time the document specified by `path` was used. Otherwise, it should be + * `undefined`. */ public sequenceNumber?: ListenSequenceNumber ) { assert( - (targetId !== 0) !== (typeof sequenceNumber !== 'undefined'), + (targetId === 0) === (typeof sequenceNumber !== undefined), 'A target-document row must either have targetId == 0 and a defined sequence number, or a non-zero targetId and no sequence number' ); } From c922dce28b606df3c1104020eada3a15bf317300 Mon Sep 17 00:00:00 2001 From: Greg Soltis Date: Wed, 19 Sep 2018 11:25:41 -0700 Subject: [PATCH 23/23] Compare to undefined, not typeof undefined --- packages/firestore/src/local/indexeddb_schema.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/firestore/src/local/indexeddb_schema.ts b/packages/firestore/src/local/indexeddb_schema.ts index 3f08c035fc9..8bff013e8d9 100644 --- a/packages/firestore/src/local/indexeddb_schema.ts +++ b/packages/firestore/src/local/indexeddb_schema.ts @@ -612,7 +612,7 @@ export class DbTargetDocument { public sequenceNumber?: ListenSequenceNumber ) { assert( - (targetId === 0) === (typeof sequenceNumber !== undefined), + (targetId === 0) === (sequenceNumber !== undefined), 'A target-document row must either have targetId == 0 and a defined sequence number, or a non-zero targetId and no sequence number' ); }