Skip to content

Commit

Permalink
fix partialHash
Browse files Browse the repository at this point in the history
  • Loading branch information
glamperd committed Mar 31, 2023
1 parent 4cda5d4 commit 46e4953
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 16 deletions.
4 changes: 2 additions & 2 deletions src/misc.js
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ export function formatHash(b, title) {
export function hashToHex(b) {
const a = new DataView(b.buffer, b.byteOffset, b.byteLength);
let S = "";
for (let i=0; i<16; i++) {
for (let i=0; i<a.byteLength / 4; i++) {
S += a.getUint32(i*4).toString(16).padStart(8, "0");
}
return S;
Expand Down Expand Up @@ -198,7 +198,7 @@ export function stringifyBigIntsWithField(Fr, o) {
});
return res;
} else if ((typeof(o) == "bigint") || o.eq !== undefined) {
return o.toString(16);
return o.toString(10);
} else {
return o;
}
Expand Down
2 changes: 1 addition & 1 deletion src/powersoftau_export_json.js
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ export default async function exportJson(pTauFilename, verbose) {
const pTau = {};
pTau.q = curve.q;
pTau.power = power;
pTau.contributions = await utils.readContributions(fd, curve, sections, true);
pTau.contributions = await utils.readContributions(fd, curve, sections);

if (sections[2]) {
pTau.tauG1 = await exportSection(2, "G1", (2 ** power)*2 -1, "tauG1");
Expand Down
5 changes: 2 additions & 3 deletions src/powersoftau_import.js
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,10 @@ export default async function importResponse(oldPtauFilename, contributionFilena
({contributions, power} = jsonObj);

// get curve from q
const qs = Scalar.fromArray(misc.hex2ByteArray(jsonObj.q), 256);
const qs = Scalar.e(jsonObj.q);
curve = await getCurveFromQ(qs);
// no points (sections !)
// Convert contribution hashes to Scalar
// Convert contribution hashes
for (const i in contributions) {
contributions[i].nextChallenge = misc.hex2ByteArray(contributions[i].nextChallenge);
contributions[i].partialHash = misc.hex2ByteArray(contributions[i].partialHash);
Expand All @@ -55,7 +55,6 @@ export default async function importResponse(oldPtauFilename, contributionFilena
contributions[i].betaG1 = misc.hex2ByteArray(contributions[i].betaG1);
contributions[i].betaG2 = misc.hex2ByteArray(contributions[i].betaG2);
contributions[i].key = deserialiseKey(contributions[i].key);

}

} else {
Expand Down
13 changes: 3 additions & 10 deletions src/powersoftau_utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ export async function writePtauPubKey(fd, curve, key, montgomery) {
await fd.write(buff);
}

async function readContribution(fd, curve, formatted = false) {
async function readContribution(fd, curve) {
const c = {};

c.tauG1 = await readG1();
Expand All @@ -171,10 +171,6 @@ async function readContribution(fd, curve, formatted = false) {
c.key = await readPtauPubKey(fd, curve, true);
c.partialHash = await fd.read(216);
c.nextChallenge = await fd.read(64);
if (formatted) {
c.partialHash = misc.hashToHex(c.partialHash);
c.nextChallenge = misc.hashToHex(c.nextChallenge);
}
c.type = await fd.readULE32();

const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3);
Expand All @@ -184,9 +180,6 @@ async function readContribution(fd, curve, formatted = false) {
responseHasher.setPartialHash(c.partialHash);
responseHasher.update(buffV);
c.responseHash = responseHasher.digest();
if (formatted) {
c.responseHash = misc.hashToHex(c.responseHash);
}

const paramLength = await fd.readULE32();
const curPos = fd.pos;
Expand Down Expand Up @@ -231,15 +224,15 @@ async function readContribution(fd, curve, formatted = false) {
}
}

export async function readContributions(fd, curve, sections, formatted = false) {
export async function readContributions(fd, curve, sections) {
if (!sections[7]) throw new Error(fd.fileName + ": File has no contributions");
if (sections[7][0].length>1) throw new Error(fd.fileName +": File has more than one contributions section");

fd.pos = sections[7][0].p;
const nContributions = await fd.readULE32();
const contributions = [];
for (let i=0; i<nContributions; i++) {
const c = await readContribution(fd, curve, formatted);
const c = await readContribution(fd, curve);
c.id = i+1;
contributions.push(c);
}
Expand Down

0 comments on commit 46e4953

Please sign in to comment.