Skip to content

Test large documents #9104

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
114 changes: 112 additions & 2 deletions packages/firestore/test/integration/api/database.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,8 @@ import {
FirestoreError,
QuerySnapshot,
vector,
getDocsFromServer
getDocsFromServer,
Bytes
} from '../util/firebase_export';
import {
apiDescribe,
Expand All @@ -80,7 +81,8 @@ import {
withNamedTestDbsOrSkipUnlessUsingEmulator,
toDataArray,
checkOnlineAndOfflineResultsMatch,
toIds
toIds,
withEmptyTestCollection
} from '../util/helpers';
import { DEFAULT_SETTINGS, DEFAULT_PROJECT_ID } from '../util/settings';

Expand Down Expand Up @@ -2691,4 +2693,112 @@ apiDescribe('Database', persistence => {
}
);
});

describe('Test large documents', () => {
/**
* Returns a Blob with the size equal to the largest number of bytes allowed to
* be stored in a Firestore document.
*/
function getLargestDocContent(): object {
const MAX_BYTES_PER_FIELD_VALUE = 1048487;
// Subtract 8 for '__name__', 20 for its value, and 4 for 'blob'.
const numBytesToUse = MAX_BYTES_PER_FIELD_VALUE - 8 - 20 - 4;

const bytes = new Uint8Array(numBytesToUse);
for (let i = 0; i < bytes.length; i++) {
// Fill with random byte (0-255)
bytes[i] = Math.floor(Math.random() * 256);
}

return { blob: Bytes.fromUint8Array(bytes) };
}

it('can CRUD and query large documents', async () => {
return withEmptyTestCollection(persistence, async coll => {
const docRef = doc(coll);
const data = getLargestDocContent();

// Set
await setDoc(docRef, data);

// Get
let docSnap = await getDoc(docRef);
expect(docSnap.data()).to.deep.equal(data);

// Update
const newData = getLargestDocContent();
await updateDoc(docRef, newData);
docSnap = await getDoc(docRef);
expect(docSnap.data()).to.deep.equal(newData);

// Query
const querySnap = await getDocs(coll);
expect(querySnap.size).to.equal(1);
expect(toDataArray(querySnap)).to.deep.equal([newData]);

// Delete
await deleteDoc(docRef);
docSnap = await getDoc(docRef);
expect(docSnap.exists()).to.be.false;
});
});

it('can CRUD large documents inside transaction', async () => {
return withEmptyTestCollection(persistence, async (coll, db) => {
const data = getLargestDocContent();
const newData = getLargestDocContent();

const docRef1 = doc(coll, 'doc1');
const docRef2 = doc(coll, 'doc2');
const docRef3 = doc(coll, 'doc3');

await setDoc(docRef1, data);
await setDoc(docRef3, data);

await runTransaction(db, async tx => {
// Get and update
const docSnap = await tx.get(docRef1);
expect(docSnap.data()).to.deep.equal(data);
tx.update(docRef1, newData);

// Set
tx.set(docRef2, data);

// Delete
tx.delete(docRef3);
});

let docSnap = await getDoc(docRef1);
expect(docSnap.data()).to.deep.equal(newData);

docSnap = await getDoc(docRef2);
expect(docSnap.data()).to.deep.equal(data);

docSnap = await getDoc(docRef3);
expect(docSnap.exists()).to.be.false;
});
}).timeout(10000);

it('can listen to large query snapshots', async () => {
const testDoc = { doc1: getLargestDocContent() };
return withTestCollection(persistence, testDoc, async coll => {
const storeEvent = new EventsAccumulator<QuerySnapshot>();
const unsubscribe = onSnapshot(coll, storeEvent.storeEvent);
const watchSnapshot = await storeEvent.awaitEvent();
expect(toDataArray(watchSnapshot)).to.deep.equal([testDoc.doc1]);
unsubscribe();
});
});

it('can listen to large document snapshots', async () => {
const testDoc = getLargestDocContent();
return withTestDocAndInitialData(persistence, testDoc, async docRef => {
const storeEvent = new EventsAccumulator<DocumentSnapshot>();
const unsubscribe = onSnapshot(docRef, storeEvent.storeEvent);
const watchSnapshot = await storeEvent.awaitEvent();
expect(watchSnapshot.data()).to.deep.equal(testDoc);
unsubscribe();
});
});
});
});
Loading