Skip to content

Parse Overlays in background #3420

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Feb 10, 2022
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import com.google.firebase.firestore.model.mutation.Mutation;
import com.google.firebase.firestore.model.mutation.Overlay;
import java.util.Map;
import java.util.SortedSet;

/**
* Provides methods to read and write document overlays.
Expand All @@ -38,6 +39,12 @@ public interface DocumentOverlayCache {
@Nullable
Overlay getOverlay(DocumentKey key);

/**
* Gets the saved overlay mutation for the given document keys. Skips keys for which there are no
* overlays.
*/
Map<DocumentKey, Overlay> getOverlays(SortedSet<DocumentKey> keys);

/**
* Saves the given document key to mutation map to persistence as overlays. All overlays will have
* their largest batch id set to {@code largestBatchId}.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,9 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;

/**
* A readonly view of the local state of all documents we're tracking (i.e. we have a cached version
Expand Down Expand Up @@ -115,24 +117,20 @@ ImmutableSortedMap<DocumentKey, Document> getDocuments(Iterable<DocumentKey> key
*/
ImmutableSortedMap<DocumentKey, Document> getLocalViewOfDocuments(
Map<DocumentKey, MutableDocument> docs, Set<DocumentKey> existenceStateChanged) {
return computeViews(docs, Collections.emptyMap(), existenceStateChanged);
Map<DocumentKey, Overlay> overlays = new HashMap<>();
populateOverlays(overlays, docs.keySet());
return computeViews(docs, overlays, existenceStateChanged);
}

/**
* Computes the local view for doc, applying overlays from both {@code memoizedOverlays} and the
* overlay cache.
*/
/*Computes the local view for doc */
private ImmutableSortedMap<DocumentKey, Document> computeViews(
Map<DocumentKey, MutableDocument> docs,
Map<DocumentKey, Overlay> memoizedOverlays,
Map<DocumentKey, Overlay> overlays,
Set<DocumentKey> existenceStateChanged) {
ImmutableSortedMap<DocumentKey, Document> results = emptyDocumentMap();
Map<DocumentKey, MutableDocument> recalculateDocuments = new HashMap<>();
for (MutableDocument doc : docs.values()) {
Overlay overlay =
memoizedOverlays.containsKey(doc.getKey())
? memoizedOverlays.get(doc.getKey())
: documentOverlayCache.getOverlay(doc.getKey());
Overlay overlay = overlays.get(doc.getKey());
// Recalculate an overlay if the document's existence state is changed due to a remote
// event *and* the overlay is a PatchMutation. This is because document existence state
// can change if some patch mutation's preconditions are met.
Expand Down Expand Up @@ -290,11 +288,26 @@ LocalDocumentsResult getNextDocuments(String collectionGroup, IndexOffset offset
largestBatchId = Math.max(largestBatchId, overlay.getLargestBatchId());
}

populateOverlays(overlays, docs.keySet());
ImmutableSortedMap<DocumentKey, Document> localDocs =
computeViews(docs, overlays, Collections.emptySet());
return new LocalDocumentsResult(largestBatchId, localDocs);
}

/**
* Fetches the overlays for {@code keys} and adds them to provided overlay map if the map does not
* already contain an entry for the given key.
*/
private void populateOverlays(Map<DocumentKey, Overlay> overlays, Set<DocumentKey> keys) {
SortedSet<DocumentKey> missingOverlays = new TreeSet<>();
for (DocumentKey key : keys) {
if (!overlays.containsKey(key)) {
missingOverlays.add(key);
}
}
overlays.putAll(documentOverlayCache.getOverlays(missingOverlays));
}

private ImmutableSortedMap<DocumentKey, Document> getDocumentsMatchingCollectionQuery(
Query query, IndexOffset offset) {
Map<DocumentKey, MutableDocument> remoteDocuments =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;

public class MemoryDocumentOverlayCache implements DocumentOverlayCache {
Expand All @@ -38,6 +39,17 @@ public Overlay getOverlay(DocumentKey key) {
return overlays.get(key);
}

public Map<DocumentKey, Overlay> getOverlays(SortedSet<DocumentKey> keys) {
Map<DocumentKey, Overlay> result = new HashMap<>();
for (DocumentKey key : keys) {
Overlay overlay = overlays.get(key);
if (overlay != null) {
result.put(key, overlay);
}
}
return result;
}

private void saveOverlay(int largestBatchId, @Nullable Mutation mutation) {
if (mutation == null) {
return;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,26 @@
package com.google.firebase.firestore.local;

import static com.google.firebase.firestore.util.Assert.fail;
import static com.google.firebase.firestore.util.Assert.hardAssert;

import android.database.Cursor;
import androidx.annotation.Nullable;
import com.google.firebase.firestore.auth.User;
import com.google.firebase.firestore.model.DocumentKey;
import com.google.firebase.firestore.model.ResourcePath;
import com.google.firebase.firestore.model.mutation.Mutation;
import com.google.firebase.firestore.model.mutation.Overlay;
import com.google.firebase.firestore.util.BackgroundQueue;
import com.google.firebase.firestore.util.Executors;
import com.google.firestore.v1.Write;
import com.google.protobuf.InvalidProtocolBufferException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.concurrent.Executor;

public class SQLiteDocumentOverlayCache implements DocumentOverlayCache {
private final SQLitePersistence db;
Expand All @@ -47,7 +56,54 @@ public Overlay getOverlay(DocumentKey key) {
"SELECT overlay_mutation, largest_batch_id FROM document_overlays "
+ "WHERE uid = ? AND collection_path = ? AND document_id = ?")
.binding(uid, collectionPath, documentId)
.firstValue(this::decodeOverlay);
.firstValue(row -> this.decodeOverlay(row.getBlob(0), row.getInt(1)));
}

@Override
public Map<DocumentKey, Overlay> getOverlays(SortedSet<DocumentKey> keys) {
hardAssert(keys.comparator() == null, "getOverlays() requires natural order");
Map<DocumentKey, Overlay> result = new HashMap<>();

BackgroundQueue backgroundQueue = new BackgroundQueue();
ResourcePath currentCollection = ResourcePath.EMPTY;
List<Object> accumulatedDocumentIds = new ArrayList<>();
for (DocumentKey key : keys) {
if (!currentCollection.equals(key.getCollectionPath())) {
processSingleCollection(result, backgroundQueue, currentCollection, accumulatedDocumentIds);
currentCollection = key.getCollectionPath();
accumulatedDocumentIds = new ArrayList<>();
}
accumulatedDocumentIds.add(key.getDocumentId());
}

processSingleCollection(result, backgroundQueue, currentCollection, accumulatedDocumentIds);
backgroundQueue.drain();
return result;
}

/** Reads the overlays for the documents in a single collection. */
private void processSingleCollection(
Map<DocumentKey, Overlay> result,
BackgroundQueue backgroundQueue,
ResourcePath collectionPath,
List<Object> documentIds) {
if (documentIds.isEmpty()) {
return;
}

SQLitePersistence.LongQuery longQuery =
new SQLitePersistence.LongQuery(
db,
"SELECT overlay_mutation, largest_batch_id FROM document_overlays "
+ "WHERE uid = ? AND collection_path = ? AND document_id IN (",
Arrays.asList(uid, EncodedPath.encode(collectionPath)),
documentIds,
")");
while (longQuery.hasMoreSubqueries()) {
longQuery
.performNextSubquery()
.forEach(row -> processOverlaysInBackground(backgroundQueue, result, row));
}
}

private void saveOverlay(int largestBatchId, DocumentKey key, @Nullable Mutation mutation) {
Expand Down Expand Up @@ -83,49 +139,48 @@ public void removeOverlaysForBatchId(int batchId) {

@Override
public Map<DocumentKey, Overlay> getOverlays(ResourcePath collection, int sinceBatchId) {
String collectionPath = EncodedPath.encode(collection);

Map<DocumentKey, Overlay> result = new HashMap<>();
BackgroundQueue backgroundQueue = new BackgroundQueue();
db.query(
"SELECT overlay_mutation, largest_batch_id FROM document_overlays "
+ "WHERE uid = ? AND collection_path = ? AND largest_batch_id > ?")
.binding(uid, collectionPath, sinceBatchId)
.forEach(
row -> {
Overlay overlay = decodeOverlay(row);
result.put(overlay.getKey(), overlay);
});

.binding(uid, EncodedPath.encode(collection), sinceBatchId)
.forEach(row -> processOverlaysInBackground(backgroundQueue, result, row));
backgroundQueue.drain();
return result;
}

@Override
public Map<DocumentKey, Overlay> getOverlays(
String collectionGroup, int sinceBatchId, int count) {
Map<DocumentKey, Overlay> result = new HashMap<>();
Overlay[] lastOverlay = new Overlay[] {null};
String[] lastCollectionPath = new String[1];
String[] lastDocumentPath = new String[1];
int[] lastLargestBatchId = new int[1];

BackgroundQueue backgroundQueue = new BackgroundQueue();
db.query(
"SELECT overlay_mutation, largest_batch_id FROM document_overlays "
"SELECT overlay_mutation, largest_batch_id, collection_path, document_id "
+ " FROM document_overlays "
+ "WHERE uid = ? AND collection_group = ? AND largest_batch_id > ? "
+ "ORDER BY largest_batch_id, collection_path, document_id LIMIT ?")
.binding(uid, collectionGroup, sinceBatchId, count)
.forEach(
row -> {
lastOverlay[0] = decodeOverlay(row);
result.put(lastOverlay[0].getKey(), lastOverlay[0]);
lastLargestBatchId[0] = row.getInt(1);
lastCollectionPath[0] = row.getString(2);
lastDocumentPath[0] = row.getString(3);
processOverlaysInBackground(backgroundQueue, result, row);
});

if (lastOverlay[0] == null) {
if (lastCollectionPath[0] == null) {
return result;
}

// This function should not return partial batch overlays, even if the number of overlays in the
// result set exceeds the given `count` argument. Since the `LIMIT` in the above query might
// result in a partial batch, the following query appends any remaining overlays for the last
// batch.
DocumentKey key = lastOverlay[0].getKey();
String encodedCollectionPath = EncodedPath.encode(key.getCollectionPath());
db.query(
"SELECT overlay_mutation, largest_batch_id FROM document_overlays "
+ "WHERE uid = ? AND collection_group = ? "
Expand All @@ -134,23 +189,35 @@ public Map<DocumentKey, Overlay> getOverlays(
.binding(
uid,
collectionGroup,
encodedCollectionPath,
encodedCollectionPath,
key.getDocumentId(),
lastOverlay[0].getLargestBatchId())
.forEach(
row -> {
Overlay overlay = decodeOverlay(row);
result.put(overlay.getKey(), overlay);
});

lastCollectionPath[0],
lastCollectionPath[0],
lastDocumentPath[0],
lastLargestBatchId[0])
.forEach(row -> processOverlaysInBackground(backgroundQueue, result, row));
backgroundQueue.drain();
return result;
}

private Overlay decodeOverlay(android.database.Cursor row) {
private void processOverlaysInBackground(
BackgroundQueue backgroundQueue, Map<DocumentKey, Overlay> results, Cursor row) {
byte[] rawMutation = row.getBlob(0);
int largestBatchId = row.getInt(1);

// Since scheduling background tasks incurs overhead, we only dispatch to a
// background thread if there are still some documents remaining.
Executor executor = row.isLast() ? Executors.DIRECT_EXECUTOR : backgroundQueue;
executor.execute(
() -> {
Overlay overlay = decodeOverlay(rawMutation, largestBatchId);
synchronized (results) {
results.put(overlay.getKey(), overlay);
}
});
}

private Overlay decodeOverlay(byte[] rawMutation, int largestBatchId) {
try {
Write write = Write.parseFrom(row.getBlob(0));
int largestBatchId = row.getInt(1);
Write write = Write.parseFrom(rawMutation);
Mutation mutation = serializer.decodeMutation(write);
return Overlay.create(largestBatchId, mutation);
} catch (InvalidProtocolBufferException e) {
Expand Down
Loading