Skip to content

Commit

Permalink
Remove most debugging messages. Change debugFetch param into fetchMod…
Browse files Browse the repository at this point in the history
…e=chunkByChunk to test it
  • Loading branch information
codedread committed Jul 4, 2024
1 parent 6e27a7f commit 8d30712
Show file tree
Hide file tree
Showing 5 changed files with 53 additions and 53 deletions.
2 changes: 0 additions & 2 deletions code/bitjs/archive/common.js
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ export async function getConnectedPort(implFilename) {
const messageChannel = new MessageChannel();
const hostPort = messageChannel.port1;
const implPort = messageChannel.port2;
console.log(`debugFetch: Connected host to implementation with ports`);

if (typeof Worker === 'undefined') {
const implModule = await import(`${implFilename}`);
Expand All @@ -51,7 +50,6 @@ export async function getConnectedPort(implFilename) {
const workerScriptPath = new URL(`./webworker-wrapper.js`, import.meta.url).href;
const worker = new Worker(workerScriptPath, { type: 'module' });
worker.onmessage = () => {
console.log(`debugFetch: Got the connected event from the worker`);
resolve({
hostPort,
disconnectFn: () => worker.postMessage({ disconnect: true }),
Expand Down
2 changes: 0 additions & 2 deletions code/bitjs/archive/unzip.js
Original file line number Diff line number Diff line change
Expand Up @@ -789,8 +789,6 @@ export function connect(port) {

hostPort = port;
port.onmessage = onmessage;
// TODO: kthoom change for debugging.
console.log(`debugFetch: Connected host to unzip implementation`);
}

export function disconnect() {
Expand Down
1 change: 0 additions & 1 deletion code/bitjs/archive/webworker-wrapper.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ let implPort;
let module;

onmessage = async (evt) => {
console.log(`debugFetch: Got a message inside webworker-wrapper with implSrc: ${evt.data.implSrc}`);
if (evt.data.implSrc) {
module = await import(evt.data.implSrc);
module.connect(evt.ports[0]);
Expand Down
88 changes: 48 additions & 40 deletions code/book.js
Original file line number Diff line number Diff line change
Expand Up @@ -342,48 +342,56 @@ export class Book extends EventTarget {
throw e;
}

// =============================================================================================
// Option 1: Readable code, fetching chunk by chunk using await.
// const reader = response.body.getReader();

// /**
// * Reads one chunk at a time.
// * @returns {Promise<ArrayBuffer | null>}
// */
// const getOneChunk = async () => {
// const { done, value } = await reader.read();
// if (!done) return value.buffer;
// return null;
// };

// const firstChunk = await getOneChunk();
// if (!firstChunk) {
// throw `Could not get one chunk from fetch()`;
// }
// bytesTotal = firstChunk.byteLength;

// // Asynchronously wait for the BookBinder and its implementation to be connected.
// await this.#startBookBinding(this.#name, firstChunk, this.#expectedSize);

// // Read out all subsequent chunks.
// /** @type {ArrayBuffer | null} */
// let nextChunk;
// while (nextChunk = await getOneChunk()) {
// bytesTotal += nextChunk.byteLength;
// this.appendBytes(nextChunk);
// this.#bookBinder.appendBytes(nextChunk);
// }

// =============================================================================================
// Option 2: The XHR way (grab all bytes and only then start book binding).
const ab = await response.arrayBuffer();
bytesTotal = ab.byteLength;
await this.#startBookBinding(this.#name, ab, this.#expectedSize);
if (Params['fetchMode'] === 'chunkByChunk') {
// =============================================================================================
// Option 1: Readable code, fetching chunk by chunk using await.
const reader = response.body.getReader();
let numChunks = 0;

/**
* Reads one chunk at a time.
* @returns {Promise<ArrayBuffer | null>}
*/
const getOneChunk = async () => {
const { done, value } = await reader.read();
if (!done) {
numChunks++;
console.log(`debugFetch: Received chunk #${numChunks} of ${value.byteLength} bytes`);
return value.buffer;
}
return null;
};

const firstChunk = await getOneChunk();
if (!firstChunk) {
throw `Could not get one chunk from fetch()`;
}
bytesTotal = firstChunk.byteLength;

// Asynchronously wait for the BookBinder and its implementation to be connected.
await this.#startBookBinding(this.#name, firstChunk, this.#expectedSize);
console.log(`debugFetch: Instantiated the BookBinder`);

// Read out all subsequent chunks.
/** @type {ArrayBuffer | null} */
let nextChunk;
while (nextChunk = await getOneChunk()) {
bytesTotal += nextChunk.byteLength;
this.appendBytes(nextChunk);
this.#bookBinder.appendBytes(nextChunk);
}
} else {
// =============================================================================================
// Option 2: The XHR way (grab all bytes and only then start book binding).
const ab = await response.arrayBuffer();
bytesTotal = ab.byteLength;
await this.#startBookBinding(this.#name, ab, this.#expectedSize);
}

// Send out BookLoadingComplete event and return this book.
this.#finishedLoading = true;
this.dispatchEvent(new BookLoadingCompleteEvent(this));
if (Params['debugFetch'] === 'true') {
if (Params['fetchMode']) {
console.log(`debugFetch: ArrayBuffers were total length ${bytesTotal}`);
}

Expand Down Expand Up @@ -553,7 +561,7 @@ export class Book extends EventTarget {
});

bookBinder.addEventListener(BookEventType.PAGE_EXTRACTED, evt => {
if (Params['debugFetch'] === 'true') {
if (Params['fetchMode']) {
console.log(`debugFetch: Page #${this.#pages.length+1} extracted`);
}
this.#pages.push(evt.page);
Expand All @@ -567,7 +575,7 @@ export class Book extends EventTarget {
this.dispatchEvent(new BookProgressEvent(this, evt.totalPages, evt.message));
});

if (Params['debugFetch'] === 'true') {
if (Params['fetchMode']) {
console.log(`debugFetch: Calling BookBinder.start()`);
}
// Wait for its decompressing implementation to be loaded and ports connected.
Expand Down
13 changes: 5 additions & 8 deletions code/epub/epub-book-binder.js
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ export class EPUBBookBinder extends BookBinder {
constructor(filenameOrUri, ab, totalExpectedSize) {
super(filenameOrUri, ab, totalExpectedSize);

if (Params['debugFetch'] === 'true') {
if (Params['fetchMode']) {
console.log(`debugFetch: Constructing a EPUBBookBinder for ${filenameOrUri}`);
}

Expand Down Expand Up @@ -85,24 +85,21 @@ export class EPUBBookBinder extends BookBinder {

/** @override */
beforeStart_() {
if (Params['debugFetch'] === 'true') {
console.log(`EPubBookBinder.beforeStart_()`);
}
let firstFile = true;
let numExtractions = 0;
this.unarchiver.onExtract(evt => {
numExtractions++;
/** @type {import('../bitjs/archive/decompress.js').UnarchivedFile} */
const theFile = evt.unarchivedFile;
if (Params['debugFetch'] === 'true' && this.fileMap_.has(theFile.filename)) {
if (this.fileMap_.has(theFile.filename)) {
// TODO: How does it get multiple extract events for the same file?
console.error(`debugFetch: Received an EXTRACT event for ${theFile.filename}, but already have that file!`);
console.error(`TODO: Received an EXTRACT event for ${theFile.filename}, but already have that file!`);
return;
}

// This is a new file. Add it to the map.
this.fileMap_.set(theFile.filename, theFile.fileData);
if (Params['debugFetch'] === 'true') {
if (Params['fetchMode']) {
console.log(`debugFetch: Extracted file ${theFile.filename} of size ${theFile.fileData.byteLength}`);
}

Expand All @@ -113,7 +110,7 @@ export class EPUBBookBinder extends BookBinder {
}
});
this.unarchiver.addEventListener(UnarchiveEventType.FINISH, evt => {
if (Params['debugFetch'] === 'true') {
if (Params['fetchMode']) {
console.log(`debugFetch: Received UnarchiveEventType.FINISH event with ${numExtractions} extractions`);
}

Expand Down

0 comments on commit 8d30712

Please sign in to comment.