Skip to content
This repository has been archived by the owner on Sep 30, 2023. It is now read-only.

Commit

Permalink
Remove unused LogIO.fromMultihash method
Browse files Browse the repository at this point in the history
  • Loading branch information
satazor committed Jan 15, 2019
1 parent f45f896 commit 4228b6f
Show file tree
Hide file tree
Showing 9 changed files with 998 additions and 535 deletions.
31 changes: 29 additions & 2 deletions dist/ipfslog.min.js

Large diffs are not rendered by default.

98 changes: 59 additions & 39 deletions lib/es5/entry-io.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,24 @@ function () {

(0, _createClass2.default)(EntryIO, null, [{
key: "fetchParallel",
// Fetch log graphs in parallel

/**
* Fetch log entries in parallel.
* @param {IPFS} ipfs An IPFS instance
* @param {string|Array<string>} cids CIDs of the entries to fetch
* @param {number} [amount=-1] How many entries to fetch
* @param {Array<Entry>} [exclude] Entries to not fetch
* @param {number} [concurrency=] Max concurrent fetch operations
* @param {number} [timeout] Maximum time to wait for each fetch operation, in ms
* @param {function(cid, entry, parent, depth)} onProgressCallback
* @returns {Promise<Array<Entry>>}
*/
value: function () {
var _fetchParallel = (0, _asyncToGenerator2.default)(
/*#__PURE__*/
_regenerator.default.mark(function _callee(ipfs, hashes, length) {
var exclude,
_regenerator.default.mark(function _callee(ipfs, cids) {
var amount,
exclude,
concurrency,
timeout,
onProgressCallback,
Expand All @@ -43,13 +55,14 @@ function () {
while (1) {
switch (_context.prev = _context.next) {
case 0:
amount = _args.length > 2 && _args[2] !== undefined ? _args[2] : -1;
exclude = _args.length > 3 && _args[3] !== undefined ? _args[3] : [];
concurrency = _args.length > 4 ? _args[4] : undefined;
concurrency = _args.length > 4 && _args[4] !== undefined ? _args[4] : null;
timeout = _args.length > 5 ? _args[5] : undefined;
onProgressCallback = _args.length > 6 ? _args[6] : undefined;

fetchOne = function fetchOne(hash) {
return EntryIO.fetchAll(ipfs, hash, length, exclude, timeout, onProgressCallback);
fetchOne = function fetchOne(cid) {
return EntryIO.fetchAll(ipfs, cid, amount, exclude, timeout, onProgressCallback);
};

concatArrays = function concatArrays(arr1, arr2) {
Expand All @@ -60,40 +73,39 @@ function () {
return arr.reduce(concatArrays, []);
};

concurrency = Math.max(concurrency || hashes.length, 1);
_context.next = 10;
return pMap(hashes, fetchOne, {
concurrency = Math.max(concurrency || cids.length, 1);
_context.next = 11;
return pMap(cids, fetchOne, {
concurrency: concurrency
});

case 10:
case 11:
entries = _context.sent;
return _context.abrupt("return", flatten(entries));

case 12:
case 13:
case "end":
return _context.stop();
}
}
}, _callee, this);
}));

function fetchParallel(_x, _x2, _x3) {
function fetchParallel(_x, _x2) {
return _fetchParallel.apply(this, arguments);
}

return fetchParallel;
}()
/**
* Fetch log entries sequentially
*
* @param {IPFS} [ipfs] An IPFS instance
* @param {string} [hash] Multihash of the entry to fetch
* @param {string} [parent] Parent of the node to be fetched
* @param {Object} [all] Entries to skip
* @param {Number} [amount=-1] How many entries to fetch
* @param {Number} [depth=0] Current depth of the recursion
* @param {function(hash, entry, parent, depth)} onProgressCallback
* Fetch log entries sequentially.
* @param {IPFS} ipfs An IPFS instance
* @param {string|Array<string>} cids CIDs of the entries to fetch
* @param {number} [amount=-1] How many entries to fetch
* @param {Array<Entry>} [exclude] Entries to not fetch
* @param {number} [concurrency] Max concurrent fetch operations
* @param {number} [timeout] Maximum time to wait for each fetch operation, in ms
* @param {function(cid, entry, parent, depth)} onProgressCallback
* @returns {Promise<Array<Entry>>}
*/

Expand All @@ -102,8 +114,9 @@ function () {
value: function () {
var _fetchAll = (0, _asyncToGenerator2.default)(
/*#__PURE__*/
_regenerator.default.mark(function _callee3(ipfs, hashes, amount) {
var exclude,
_regenerator.default.mark(function _callee3(ipfs, cids) {
var amount,
exclude,
timeout,
onProgressCallback,
result,
Expand All @@ -118,12 +131,13 @@ function () {
while (1) {
switch (_context3.prev = _context3.next) {
case 0:
amount = _args3.length > 2 && _args3[2] !== undefined ? _args3[2] : -1;
exclude = _args3.length > 3 && _args3[3] !== undefined ? _args3[3] : [];
timeout = _args3.length > 4 && _args3[4] !== undefined ? _args3[4] : null;
onProgressCallback = _args3.length > 5 ? _args3[5] : undefined;
result = [];
cache = {};
loadingQueue = Array.isArray(hashes) ? hashes.slice() : [hashes]; // Add a multihash to the loading queue
loadingQueue = Array.isArray(cids) ? cids.slice() : [cids]; // Add a CID to the loading queue

addToLoadingQueue = function addToLoadingQueue(e) {
return loadingQueue.push(e);
Expand All @@ -133,7 +147,10 @@ function () {
exclude = exclude && Array.isArray(exclude) ? exclude : [];

addToExcludeCache = function addToExcludeCache(e) {
return cache[e.hash] = e;
if (Entry.isEntry(e)) {
result.push(e);
cache[e.cid] = e;
}
};

exclude.forEach(addToExcludeCache);
Expand All @@ -143,9 +160,9 @@ function () {
};

fetchEntry = function fetchEntry() {
var hash = loadingQueue.shift();
var cid = loadingQueue.shift();

if (cache[hash]) {
if (cache[cid]) {
return Promise.resolve();
}

Expand All @@ -163,28 +180,26 @@ function () {
// Resolve the promise after a timeout (if given) in order to
// not get stuck loading a block that is unreachable
timer = timeout ? setTimeout(function () {
console.warn("Warning: Couldn't fetch entry '".concat(hash, "', request timed out (").concat(timeout, "ms)"));
console.warn("Warning: Couldn't fetch entry '".concat(cid, "', request timed out (").concat(timeout, "ms)"));
resolve();
}, timeout) : null;

addToResults = function addToResults(entry) {
clearTimeout(timer);

if (Entry.isEntry(entry)) {
entry.next.forEach(addToLoadingQueue);
result.push(entry);
cache[hash] = entry;
cache[cid] = entry;

if (onProgressCallback) {
onProgressCallback(hash, entry, result.length);
onProgressCallback(cid, entry, result.length);
}
}
}; // Load the entry


_context2.prev = 2;
_context2.next = 5;
return Entry.fromMultihash(ipfs, hash);
return Entry.fromCID(ipfs, cid);

case 5:
entry = _context2.sent;
Expand All @@ -199,34 +214,39 @@ function () {
reject(_context2.t0);

case 13:
_context2.prev = 13;
clearTimeout(timer);
return _context2.finish(13);

case 16:
case "end":
return _context2.stop();
}
}
}, _callee2, this, [[2, 10]]);
}, _callee2, this, [[2, 10, 13, 16]]);
}));

return function (_x7, _x8) {
return function (_x5, _x6) {
return _ref.apply(this, arguments);
};
}());
};

_context3.next = 14;
_context3.next = 15;
return pWhilst(shouldFetchMore, fetchEntry);

case 14:
case 15:
return _context3.abrupt("return", result);

case 15:
case 16:
case "end":
return _context3.stop();
}
}
}, _callee3, this);
}));

function fetchAll(_x4, _x5, _x6) {
function fetchAll(_x3, _x4) {
return _fetchAll.apply(this, arguments);
}

Expand Down
Loading

0 comments on commit 4228b6f

Please sign in to comment.