diff --git a/buildkite/scripts/build-test-executive.sh b/buildkite/scripts/build-test-executive.sh deleted file mode 100755 index d41af619530..00000000000 --- a/buildkite/scripts/build-test-executive.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash - -set -eo pipefail - -echo "--- setup opam config environment" -eval `opam config env` -export PATH=/home/opam/.cargo/bin:/usr/lib/go/bin:$PATH -export GO=/usr/lib/go/bin/go - -echo "--- build test-executive" -dune build --verbose --profile=${DUNE_PROFILE} src/app/test_executive/test_executive.exe src/app/logproc/logproc.exe - -echo "--- build complete, preparing test-executive for caching" -# copy built binary to current location and adjust permissions -cp _build/default/src/app/test_executive/test_executive.exe . -chmod +rwx test_executive.exe - -cp _build/default/src/app/logproc/logproc.exe . -chmod +rwx logproc.exe diff --git a/buildkite/scripts/run-test-executive.sh b/buildkite/scripts/run-test-executive.sh index 4b65aef3c7b..4636bdcc5e2 100755 --- a/buildkite/scripts/run-test-executive.sh +++ b/buildkite/scripts/run-test-executive.sh @@ -10,9 +10,21 @@ if [[ "${TEST_NAME:0:15}" == "block-prod-prio" ]] && [[ "$RUN_OPT_TESTS" == "" ] exit 0 fi -./test_executive.exe cloud "$TEST_NAME" \ +# Don't prompt for answers during apt-get install +export DEBIAN_FRONTEND=noninteractive + +echo "deb [trusted=yes] https://apt.releases.hashicorp.com $MINA_DEB_CODENAME main" | tee /etc/apt/sources.list.d/hashicorp.list +apt-get update +apt-get install -y "terraform" + +echo "Installing mina daemon package: mina-test-executive=${MINA_DEB_VERSION}" +echo "deb [trusted=yes] http://packages.o1test.net $MINA_DEB_CODENAME $MINA_DEB_RELEASE" | tee /etc/apt/sources.list.d/mina.list +apt-get update +apt-get install --allow-downgrades -y "mina-test-executive=$MINA_DEB_VERSION" "mina-logproc=$MINA_DEB_VERSION" + +mina-test-executive cloud "$TEST_NAME" \ --mina-image "$MINA_IMAGE" \ --archive-image "$ARCHIVE_IMAGE" \ --mina-automation-location ./automation \ | tee "$TEST_NAME.test.log" \ - | ./logproc.exe -i inline -f '!(.level in ["Debug", "Spam"])' + | mina-logproc -i inline -f '!(.level in ["Debug", "Spam"])' diff --git a/buildkite/scripts/unit-test.sh b/buildkite/scripts/unit-test.sh index 1975eacfb60..5046e143ec0 100755 --- a/buildkite/scripts/unit-test.sh +++ b/buildkite/scripts/unit-test.sh @@ -22,5 +22,8 @@ time make build echo "--- Build all targets" dune build "${path}" --profile="${profile}" -j16 +echo "--- Check for changes to verification keys" +time dune runtest "src/app/print_blockchain_snark_vk" --profile="${profile}" -j16 + echo "--- Run unit tests" time dune runtest "${path}" --profile="${profile}" -j16 || (./scripts/link-coredumps.sh) diff --git a/buildkite/src/Command/TestExecutive.dhall b/buildkite/src/Command/TestExecutive.dhall index 8e7eaa2b164..c8541dccd2d 100644 --- a/buildkite/src/Command/TestExecutive.dhall +++ b/buildkite/src/Command/TestExecutive.dhall @@ -15,36 +15,11 @@ let B/SoftFail = B.definitions/commandStep/properties/soft_fail/Type in { - build = \(duneProfile : Text) -> - Command.build - Command.Config::{ - commands = - -- Build test executive binary - RunInToolchain.runInToolchain [ - "DUNE_PROFILE=${duneProfile}" - ] "./buildkite/scripts/build-test-executive.sh" - - # - - [ - -- Cache test-executive binary - Cmd.run "artifact-cache-helper.sh test_executive.exe --upload", - Cmd.run "artifact-cache-helper.sh logproc.exe --upload" - ], - label = "Build test-executive", - key = "build-test-executive", - target = Size.XLarge - }, - execute = \(testName : Text) -> \(dependsOn : List Command.TaggedKey.Type) -> Command.build Command.Config::{ commands = [ - -- Download test dependencies - Cmd.run "artifact-cache-helper.sh test_executive.exe && chmod +x test_executive.exe", - Cmd.run "artifact-cache-helper.sh logproc.exe && chmod +x logproc.exe", - -- Execute test based on BUILD image Cmd.run "MINA_DEB_CODENAME=bullseye ; source ./buildkite/scripts/export-git-env-vars.sh && ./buildkite/scripts/run-test-executive.sh ${testName}" ], @@ -78,9 +53,6 @@ in Command.Config::{ commands = [ - -- Download test dependencies - Cmd.run "artifact-cache-helper.sh test_executive.exe && chmod +x test_executive.exe", - Cmd.run "artifact-cache-helper.sh logproc.exe && chmod +x logproc.exe", Cmd.run "artifact-cache-helper.sh snarkyjs_test.tar.gz && tar -xzf snarkyjs_test.tar.gz", -- Execute test based on BUILD image diff --git a/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall b/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall index be2e42fee41..96f6ab5e835 100644 --- a/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall +++ b/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall @@ -6,12 +6,10 @@ let PipelineMode = ../../Pipeline/Mode.dhall let TestExecutive = ../../Command/TestExecutive.dhall let dependsOn = [ - { name = "TestnetIntegrationTests", key = "build-test-executive" }, { name = "MinaArtifactBullseye", key = "daemon-berkeley-bullseye-docker-image" }, { name = "MinaArtifactBullseye", key = "archive-bullseye-docker-image" } ] let dependsOnJs = [ - { name = "TestnetIntegrationTests", key = "build-test-executive" }, { name = "TestnetIntegrationTests", key = "build-js-tests" }, { name = "MinaArtifactBullseye", key = "daemon-berkeley-bullseye-docker-image" }, { name = "MinaArtifactBullseye", key = "archive-bullseye-docker-image" } @@ -33,7 +31,6 @@ in Pipeline.build Pipeline.Config::{ mode = PipelineMode.Type.Stable }, steps = [ - TestExecutive.build "integration_tests", TestExecutive.buildJs "integration_tests", TestExecutive.execute "peers-reliability" dependsOn, TestExecutive.execute "chain-reliability" dependsOn, diff --git a/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall b/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall index feef489902b..04dc26cef28 100644 --- a/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall +++ b/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall @@ -6,7 +6,6 @@ let PipelineMode = ../../Pipeline/Mode.dhall let TestExecutive = ../../Command/TestExecutive.dhall let dependsOn = [ - { name = "TestnetIntegrationTests", key = "build-test-executive" }, { name = "MinaArtifactBullseye", key = "daemon-berkeley-bullseye-docker-image" }, { name = "MinaArtifactBullseye", key = "archive-bullseye-docker-image" } ] diff --git a/nix/rust.nix b/nix/rust.nix index 4e8f9e24fad..7ac8a553694 100644 --- a/nix/rust.nix +++ b/nix/rust.nix @@ -9,7 +9,7 @@ let }; toolchainHashes = { "1.72" = "sha256-dxE7lmCFWlq0nl/wKcmYvpP9zqQbBitAQgZ1zx9Ooik="; - "nightly-2023-09-01" = "sha256-nfYc8EgbYl75yIIHmEEmpux4ZpwaIyuC+g6Hf4y1Hyk="; + "nightly-2023-09-01" = "sha256-zek9JAnRaoX8V0U2Y5ssXVe9tvoQ0ERGXfUCUGYdrMA="; # copy the placeholder line with the correct toolchain name when adding a new toolchain # That is, # 1. Put the correct version name; diff --git a/src/app/print_blockchain_snark_vk/.ocamlformat b/src/app/print_blockchain_snark_vk/.ocamlformat new file mode 120000 index 00000000000..8e0a87983aa --- /dev/null +++ b/src/app/print_blockchain_snark_vk/.ocamlformat @@ -0,0 +1 @@ +../../.ocamlformat \ No newline at end of file diff --git a/src/app/print_blockchain_snark_vk/blockchain_snark_vk.json b/src/app/print_blockchain_snark_vk/blockchain_snark_vk.json new file mode 100644 index 00000000000..1febab039e3 --- /dev/null +++ b/src/app/print_blockchain_snark_vk/blockchain_snark_vk.json @@ -0,0 +1 @@ +{"commitments":{"sigma_comm":[["0x1A8B2FCF1A5D4F6D1902517B6BF31A4288186F5AA647804402E14E4DC63ABE1F","0x3224B1C182312B364F659896357A40D2EF566959E6C11AD0A347EA80D82CDBF6"],["0x22CFAD2E118F91BE8E133664585947E25A282017E007700D67F0D72999639BCC","0x31AA847B558743C3CAD651B085327F0B67E1C1BFEA2FF472976589ADD952E6D4"],["0x02FB6661EE11262AFC7C8B0389A9B1E4ED18263C3F2468AAF035A07ECC61C9EB","0x30705D77AB82D6BAAC8E23640BBAB40F7E7FB53826CD5011575CAE52C6D0517D"],["0x0FD615E77CF392BE8285848DF60D4F87350ABCD65200FC4932E309133D145D05","0x3E75EA0E3810A97D8F1D1F8A6B0F9AE5D0F68AE34A4D0EFC29F00AB3BF6D480E"],["0x20C89F37CF8E43831F077DFF9AB9A676907BF9A0F64E418A643667808AB72DAB","0x389F98A86437D8A7F667ABB9B0B0DEACBD7E420B583B795363EBCECDBF1C187B"],["0x2F5C56AA39D6FD87055E7CA6F5FA61E94FCF17336DA4476E328D2CC184F93D47","0x089B80235867482E24087360E161AC76A5249D826CFAC51AB537093D86EAA632"],["0x20216D064B7466C42626F66F3F141997E6F29D17CC6EBA8FF91E1E03D31FE555","0x0082380869AFA6A961738C93BCEEA3042F3ECEA143A585B565A2BF6BD78A1224"]],"coefficients_comm":[["0x2113503AE01039434E72D555369C460EE5C45260396DD4782CE0BE81B19F2F6C","0x2F72056BEC498916F4176C6410A31801F81CDE4D427F1C15566C018393751E2B"],["0x08060FD15238AE40E726A04EA5C1AE41D8A1D267C9AF7355A8D6C70607139922","0x13D36CAD9C78FD0AF514AB7ECAC433592389AE84283024B1D4C85D2E8C952B28"],["0x3AE007735587246066F71A1F09801C359E393CBF49DBDFEDD01E61AA88ECAF0F","0x15882EBC62C0E9BC93A14F2CB2ECE43402063B50C09CE735E56D51447689D2C6"],["0x3DCBFD7F258F598AEFB560A1B3BCE632B4AC20534FEFF3B35C82082FE708B236","0x254668B50AB44E074902E0DB981FDF045FC614C1E61EBDDA27C06DF9131FC71A"],["0x364F1609C5A72CC0A6F5966400AE1A5AA4B7684805B46D0EE09001F7618DD614","0x0F3E45A3DEC2B73BEC5E91ED78BB750B87C02A2339E5A40C2D51FAA0EE1D5B7E"],["0x2CD387D1747E5594DF7FBAE6B40C7A674875F6F9FBB4E3632FBAFD49D41E67A6","0x33C14453CA5F229C77B5259999798D42B36BE08F68C09F969937C2C13FE1C34D"],["0x3E32E6702761E653043644E9DC5D75FE7A351B764CD90D72DD1176DB1A541121","0x0454C684E407E066394B31CFCCBAF22844B7E17D162EFE860030960A38AD2B2E"],["0x1AE2CE814A97742F41565FEB5881767A2BCF55981A3C8ED33BAE069CBE652FCA","0x14DC3F97387117CCA4884F28DCC82C9CF1B7101623B481FD3D1588F568B3E26B"],["0x0A10FA40BB6C61E8185412EE2BAE86C51D19EA7276593DFA7FA3FABB0345521F","0x3A8ACF73B5EF4E52ED5DC96404A60C69A067B84FE77211C5407A48437BD5CF89"],["0x21B2C2D62891139A91056B9D298DA8713B7ADA51963B864A5A2376771A1AA766","0x1AC7782A588865397F219624D70D11A109283E82B9CD50FFEE23753461E755FE"],["0x2763E7A5B2C387147A0381B50D5C504A5012B7F7CA474C2B4A351011B9BBD036","0x13DEA6F4AEBDC606331746A886756C7EA266A52F60B45DE5544A04BFDB277455"],["0x32596E43A053571EE478A3106CABFE9ECB29437F78A978409B4DDE33FE637103","0x3D76AF5EE3EFF37E666087AC2827A8BD0D9690BF440FF24434DA3E6AFF7A2AF4"],["0x1D73FE7224F38A37B2C69E22FA6750BABAED17B0F9998877B33E488D9063CE8E","0x3E24CEADB1BDA92A0DBDA0F90DF3B8FBD7C6F7ABCC039E9D41AB6916A799F645"],["0x2FDF5D887BC70465AFAC06B7A43632732B5AF0B933EA045D35E99D532BD44CAF","0x211A76FD7B76DF3E693CAA0BBB5C178D5DDE827AB6A902AF04AB39D8040E29DF"],["0x0D29BA887D54D915CFB321573463A3CAF5C354848A51FFD6D4FFC0E0B4464D39","0x232829C5C662E87CD033AFB3E84E86EC342F4942AC9D75123A21172EE06CF567"]],"generic_comm":["0x363662743B4E693E18C0CF5FB651DF1205BB27FABCD93ADF54ECD973B21B921B","0x116FBA051A4A146C88BCB0F2B56309429CD63514EEEFBE6EA0B39927E72BB20C"],"psm_comm":["0x0A8B3EF5670C6367C721EDAA40AF511C18B1602A4732FEA01124D5D949304324","0x1DBE04516C4A33CDFBBD1F54F413B9F21B5D41B6CD668B249879A2688693E51B"],"complete_add_comm":["0x1E859218F11F787CE75C06FD5303457CBD307BDEEB693CC66A235CB85B314D4B","0x228167E190903072E8F34BD7AF61A0C02DE0BC3D54FF8760A2BCBFDD6A880688"],"mul_comm":["0x3EC97D3A8CD405A92B31B67184817925B99B1527065A28677AEAAEC37CC7B9C5","0x3844006206FF29A55DBB44A3D06E46610639E24E960B4BC32A663EEC4D04C689"],"emul_comm":["0x114772020FAF5E6660D7D75B666B7121829027A866A8214B42899E824D820CB9","0x01F7FC015E2F0C5E02E34F0FD6FBA0FCE01E40EA183F0F6F7C197553524A96B9"],"endomul_scalar_comm":["0x04C30A9B6412594ECD5EEFCA20D4B759BBC52B08868E01F74FDC82B557A76ADD","0x019413D8112950CB93D20BA29452DC281FFE1A692706C34BD148E331F844D244"]},"index":{"domain":{"log_size_of_group":14,"group_gen":"0x1E5587687024253BB079B38D9C5371594958E496C605D3BD898B34D068AFBEE7"},"max_poly_size":32768,"public":40,"prev_challenges":2,"srs":null,"evals":{"sigma_comm":[{"unshifted":[["Finite",["0x1A8B2FCF1A5D4F6D1902517B6BF31A4288186F5AA647804402E14E4DC63ABE1F","0x3224B1C182312B364F659896357A40D2EF566959E6C11AD0A347EA80D82CDBF6"]]],"shifted":null},{"unshifted":[["Finite",["0x22CFAD2E118F91BE8E133664585947E25A282017E007700D67F0D72999639BCC","0x31AA847B558743C3CAD651B085327F0B67E1C1BFEA2FF472976589ADD952E6D4"]]],"shifted":null},{"unshifted":[["Finite",["0x02FB6661EE11262AFC7C8B0389A9B1E4ED18263C3F2468AAF035A07ECC61C9EB","0x30705D77AB82D6BAAC8E23640BBAB40F7E7FB53826CD5011575CAE52C6D0517D"]]],"shifted":null},{"unshifted":[["Finite",["0x0FD615E77CF392BE8285848DF60D4F87350ABCD65200FC4932E309133D145D05","0x3E75EA0E3810A97D8F1D1F8A6B0F9AE5D0F68AE34A4D0EFC29F00AB3BF6D480E"]]],"shifted":null},{"unshifted":[["Finite",["0x20C89F37CF8E43831F077DFF9AB9A676907BF9A0F64E418A643667808AB72DAB","0x389F98A86437D8A7F667ABB9B0B0DEACBD7E420B583B795363EBCECDBF1C187B"]]],"shifted":null},{"unshifted":[["Finite",["0x2F5C56AA39D6FD87055E7CA6F5FA61E94FCF17336DA4476E328D2CC184F93D47","0x089B80235867482E24087360E161AC76A5249D826CFAC51AB537093D86EAA632"]]],"shifted":null},{"unshifted":[["Finite",["0x20216D064B7466C42626F66F3F141997E6F29D17CC6EBA8FF91E1E03D31FE555","0x0082380869AFA6A961738C93BCEEA3042F3ECEA143A585B565A2BF6BD78A1224"]]],"shifted":null}],"coefficients_comm":[{"unshifted":[["Finite",["0x2113503AE01039434E72D555369C460EE5C45260396DD4782CE0BE81B19F2F6C","0x2F72056BEC498916F4176C6410A31801F81CDE4D427F1C15566C018393751E2B"]]],"shifted":null},{"unshifted":[["Finite",["0x08060FD15238AE40E726A04EA5C1AE41D8A1D267C9AF7355A8D6C70607139922","0x13D36CAD9C78FD0AF514AB7ECAC433592389AE84283024B1D4C85D2E8C952B28"]]],"shifted":null},{"unshifted":[["Finite",["0x3AE007735587246066F71A1F09801C359E393CBF49DBDFEDD01E61AA88ECAF0F","0x15882EBC62C0E9BC93A14F2CB2ECE43402063B50C09CE735E56D51447689D2C6"]]],"shifted":null},{"unshifted":[["Finite",["0x3DCBFD7F258F598AEFB560A1B3BCE632B4AC20534FEFF3B35C82082FE708B236","0x254668B50AB44E074902E0DB981FDF045FC614C1E61EBDDA27C06DF9131FC71A"]]],"shifted":null},{"unshifted":[["Finite",["0x364F1609C5A72CC0A6F5966400AE1A5AA4B7684805B46D0EE09001F7618DD614","0x0F3E45A3DEC2B73BEC5E91ED78BB750B87C02A2339E5A40C2D51FAA0EE1D5B7E"]]],"shifted":null},{"unshifted":[["Finite",["0x2CD387D1747E5594DF7FBAE6B40C7A674875F6F9FBB4E3632FBAFD49D41E67A6","0x33C14453CA5F229C77B5259999798D42B36BE08F68C09F969937C2C13FE1C34D"]]],"shifted":null},{"unshifted":[["Finite",["0x3E32E6702761E653043644E9DC5D75FE7A351B764CD90D72DD1176DB1A541121","0x0454C684E407E066394B31CFCCBAF22844B7E17D162EFE860030960A38AD2B2E"]]],"shifted":null},{"unshifted":[["Finite",["0x1AE2CE814A97742F41565FEB5881767A2BCF55981A3C8ED33BAE069CBE652FCA","0x14DC3F97387117CCA4884F28DCC82C9CF1B7101623B481FD3D1588F568B3E26B"]]],"shifted":null},{"unshifted":[["Finite",["0x0A10FA40BB6C61E8185412EE2BAE86C51D19EA7276593DFA7FA3FABB0345521F","0x3A8ACF73B5EF4E52ED5DC96404A60C69A067B84FE77211C5407A48437BD5CF89"]]],"shifted":null},{"unshifted":[["Finite",["0x21B2C2D62891139A91056B9D298DA8713B7ADA51963B864A5A2376771A1AA766","0x1AC7782A588865397F219624D70D11A109283E82B9CD50FFEE23753461E755FE"]]],"shifted":null},{"unshifted":[["Finite",["0x2763E7A5B2C387147A0381B50D5C504A5012B7F7CA474C2B4A351011B9BBD036","0x13DEA6F4AEBDC606331746A886756C7EA266A52F60B45DE5544A04BFDB277455"]]],"shifted":null},{"unshifted":[["Finite",["0x32596E43A053571EE478A3106CABFE9ECB29437F78A978409B4DDE33FE637103","0x3D76AF5EE3EFF37E666087AC2827A8BD0D9690BF440FF24434DA3E6AFF7A2AF4"]]],"shifted":null},{"unshifted":[["Finite",["0x1D73FE7224F38A37B2C69E22FA6750BABAED17B0F9998877B33E488D9063CE8E","0x3E24CEADB1BDA92A0DBDA0F90DF3B8FBD7C6F7ABCC039E9D41AB6916A799F645"]]],"shifted":null},{"unshifted":[["Finite",["0x2FDF5D887BC70465AFAC06B7A43632732B5AF0B933EA045D35E99D532BD44CAF","0x211A76FD7B76DF3E693CAA0BBB5C178D5DDE827AB6A902AF04AB39D8040E29DF"]]],"shifted":null},{"unshifted":[["Finite",["0x0D29BA887D54D915CFB321573463A3CAF5C354848A51FFD6D4FFC0E0B4464D39","0x232829C5C662E87CD033AFB3E84E86EC342F4942AC9D75123A21172EE06CF567"]]],"shifted":null}],"generic_comm":{"unshifted":[["Finite",["0x363662743B4E693E18C0CF5FB651DF1205BB27FABCD93ADF54ECD973B21B921B","0x116FBA051A4A146C88BCB0F2B56309429CD63514EEEFBE6EA0B39927E72BB20C"]]],"shifted":null},"psm_comm":{"unshifted":[["Finite",["0x0A8B3EF5670C6367C721EDAA40AF511C18B1602A4732FEA01124D5D949304324","0x1DBE04516C4A33CDFBBD1F54F413B9F21B5D41B6CD668B249879A2688693E51B"]]],"shifted":null},"complete_add_comm":{"unshifted":[["Finite",["0x1E859218F11F787CE75C06FD5303457CBD307BDEEB693CC66A235CB85B314D4B","0x228167E190903072E8F34BD7AF61A0C02DE0BC3D54FF8760A2BCBFDD6A880688"]]],"shifted":null},"mul_comm":{"unshifted":[["Finite",["0x3EC97D3A8CD405A92B31B67184817925B99B1527065A28677AEAAEC37CC7B9C5","0x3844006206FF29A55DBB44A3D06E46610639E24E960B4BC32A663EEC4D04C689"]]],"shifted":null},"emul_comm":{"unshifted":[["Finite",["0x114772020FAF5E6660D7D75B666B7121829027A866A8214B42899E824D820CB9","0x01F7FC015E2F0C5E02E34F0FD6FBA0FCE01E40EA183F0F6F7C197553524A96B9"]]],"shifted":null},"endomul_scalar_comm":{"unshifted":[["Finite",["0x04C30A9B6412594ECD5EEFCA20D4B759BBC52B08868E01F74FDC82B557A76ADD","0x019413D8112950CB93D20BA29452DC281FFE1A692706C34BD148E331F844D244"]]],"shifted":null},"xor_comm":null,"range_check0_comm":null,"range_check1_comm":null,"foreign_field_add_comm":null,"foreign_field_mul_comm":null,"rot_comm":null},"shifts":["0x0000000000000000000000000000000000000000000000000000000000000001","0x00B9CDC8FD0BD4B27E2A74AF7AEBD5734D52D75BDF85EBF1CAD03413E914A2E3","0x007CF68160D84012626E0046A932AD12E68B3394D6E2A001A537FFB40D3527C6","0x0077D45AECB939AE97A3952B48189964AA209609F19BE4A4B89F339A33440F6D","0x0077C7E54505D4771F6AF1FED2195500481EF1F3C0397B0AC819E678BD2309B4","0x00B3AF68ECC6AE7A4727F0708EDF4736BE1C99281FA380846E42264C62407484","0x00381CA4536FC0ED935D50A74A87136F1A0675B618898DBCE67E564AB20174A1"],"lookup_index":null},"data":{"constraints":16384}} \ No newline at end of file diff --git a/src/app/print_blockchain_snark_vk/dune b/src/app/print_blockchain_snark_vk/dune new file mode 100644 index 00000000000..3b1137fd543 --- /dev/null +++ b/src/app/print_blockchain_snark_vk/dune @@ -0,0 +1,18 @@ +(executable + (name print_blockchain_snark_vk) + (libraries + blockchain_snark) + (instrumentation (backend bisect_ppx)) + (preprocess (pps ppx_version))) + +(rule + (deps print_blockchain_snark_vk.exe) + (targets blockchain_snark_vk.json.corrected) + (action + (with-stdout-to %{targets} + (run %{deps})))) + +(rule + (alias runtest) + (action + (diff blockchain_snark_vk.json blockchain_snark_vk.json.corrected))) diff --git a/src/app/print_blockchain_snark_vk/dune-project b/src/app/print_blockchain_snark_vk/dune-project new file mode 100644 index 00000000000..7b17fb2d308 --- /dev/null +++ b/src/app/print_blockchain_snark_vk/dune-project @@ -0,0 +1 @@ +(lang dune 3.3) diff --git a/src/app/print_blockchain_snark_vk/print_blockchain_snark_vk.ml b/src/app/print_blockchain_snark_vk/print_blockchain_snark_vk.ml new file mode 100644 index 00000000000..c111eacabd3 --- /dev/null +++ b/src/app/print_blockchain_snark_vk/print_blockchain_snark_vk.ml @@ -0,0 +1,41 @@ +open Core_kernel + +module Config = struct + let constraint_constants = Genesis_constants.Constraint_constants.compiled + + let proof_level = Genesis_constants.Proof_level.Full +end + +let () = Format.eprintf "Generating transaction snark circuit..@." + +let before = Time.now () + +module Transaction_snark_instance = Transaction_snark.Make (Config) + +let after = Time.now () + +let () = + Format.eprintf "Generated transaction snark circuit in %s.@." + (Time.Span.to_string_hum (Time.diff after before)) + +let () = Format.eprintf "Generating blockchain snark circuit..@." + +let before = Time.now () + +module Blockchain_snark_instance = +Blockchain_snark.Blockchain_snark_state.Make (struct + let tag = Transaction_snark_instance.tag + + include Config +end) + +let after = Time.now () + +let () = + Format.eprintf "Generated blockchain snark circuit in %s.@." + (Time.Span.to_string_hum (Time.diff after before)) + +let () = + Lazy.force Blockchain_snark_instance.Proof.verification_key + |> Pickles.Verification_key.to_yojson |> Yojson.Safe.to_string + |> Format.print_string diff --git a/src/lib/crypto/kimchi_backend/common/dlog_plonk_based_keypair.ml b/src/lib/crypto/kimchi_backend/common/dlog_plonk_based_keypair.ml index 5a527071512..84d79803e2c 100644 --- a/src/lib/crypto/kimchi_backend/common/dlog_plonk_based_keypair.ml +++ b/src/lib/crypto/kimchi_backend/common/dlog_plonk_based_keypair.ml @@ -225,13 +225,13 @@ module Make (Inputs : Inputs_intf) = struct } let full_vk_commitments (t : Inputs.Verifier_index.t) : - ( Inputs.Curve.Affine.t - , Inputs.Curve.Affine.t option ) + ( Inputs.Curve.Affine.t array + , Inputs.Curve.Affine.t array option ) Pickles_types.Plonk_verification_key_evals.Step.t = - let g c : Inputs.Curve.Affine.t = + let g c : Inputs.Curve.Affine.t array = match Inputs.Poly_comm.of_backend_without_degree_bound c with | `Without_degree_bound x -> - x.(0) + x | `With_degree_bound _ -> assert false in diff --git a/src/lib/pickles/common.ml b/src/lib/pickles/common.ml index ce75b44a45d..abb3106417a 100644 --- a/src/lib/pickles/common.ml +++ b/src/lib/pickles/common.ml @@ -49,7 +49,8 @@ let hash_messages_for_next_step_proof ~app_state let g (x, y) = [ x; y ] in Tick_field_sponge.digest Tick_field_sponge.params (Types.Step.Proof_state.Messages_for_next_step_proof.to_field_elements t ~g - ~comm:(fun (x : Tock.Curve.Affine.t) -> Array.of_list (g x)) + ~comm:(fun (x : Tock.Curve.Affine.t array) -> + Array.concat_map x ~f:(fun x -> Array.of_list (g x)) ) ~app_state ) let dlog_pcs_batch (type nat proofs_verified total) @@ -230,22 +231,23 @@ let tick_public_input_of_statement ~max_proofs_verified ~f:(Backend.Tick.Field.Vector.get input) let ft_comm ~add:( + ) ~scale ~endoscale:_ ~negate - ~verification_key:(m : _ Plonk_verification_key_evals.t) ~alpha:_ + ~verification_key:(m : _ array Plonk_verification_key_evals.t) ~alpha:_ ~(plonk : _ Types.Wrap.Proof_state.Deferred_values.Plonk.In_circuit.t) ~t_comm = - let ( * ) x g = scale g x in - let _, [ sigma_comm_last ] = - Vector.split m.sigma_comm (snd (Plonk_types.Permuts_minus_1.add Nat.N1.n)) - in - let f_comm = List.reduce_exn ~f:( + ) [ plonk.perm * sigma_comm_last ] in - let chunked_t_comm = - let n = Array.length t_comm in - let res = ref t_comm.(n - 1) in + let reduce_chunks comm = + let n = Array.length comm in + let res = ref comm.(n - 1) in for i = n - 2 downto 0 do - res := t_comm.(i) + scale !res plonk.zeta_to_srs_length + res := comm.(i) + scale !res plonk.zeta_to_srs_length done ; !res in + let _, [ sigma_comm_last ] = + Vector.split m.sigma_comm (snd (Plonk_types.Permuts_minus_1.add Nat.N1.n)) + in + let sigma_comm_last = reduce_chunks sigma_comm_last in + let f_comm = List.reduce_exn ~f:( + ) [ scale sigma_comm_last plonk.perm ] in + let chunked_t_comm = reduce_chunks t_comm in f_comm + chunked_t_comm + negate (scale chunked_t_comm plonk.zeta_to_domain_size) diff --git a/src/lib/pickles/common.mli b/src/lib/pickles/common.mli index 1c7833c5ea8..08aed4604ef 100644 --- a/src/lib/pickles/common.mli +++ b/src/lib/pickles/common.mli @@ -40,7 +40,7 @@ val ft_comm : -> scale:('comm -> 'scalar -> 'comm) -> endoscale:('comm -> 'c -> 'comm) -> negate:('comm -> 'comm) - -> verification_key:'comm Pickles_types.Plonk_verification_key_evals.t + -> verification_key:'comm array Pickles_types.Plonk_verification_key_evals.t -> alpha:'c -> plonk: ( 'd @@ -145,7 +145,7 @@ end val hash_messages_for_next_step_proof : app_state:('a -> Kimchi_pasta.Basic.Fp.Stable.Latest.t Core_kernel.Array.t) - -> ( Backend.Tock.Curve.Affine.t + -> ( Backend.Tock.Curve.Affine.t array , 'a , ( Kimchi_pasta.Basic.Fp.Stable.Latest.t * Kimchi_pasta.Basic.Fp.Stable.Latest.t diff --git a/src/lib/pickles/compile.ml b/src/lib/pickles/compile.ml index 478401383ae..f85f4b271a6 100644 --- a/src/lib/pickles/compile.ml +++ b/src/lib/pickles/compile.ml @@ -117,8 +117,8 @@ type ('max_proofs_verified, 'branches, 'prev_varss) wrap_main_generic = , 'max_local_max_proofs_verifieds ) Full_signature.t -> ('prev_varss, 'branches) Hlist.Length.t - -> ( ( Wrap_main_inputs.Inner_curve.Constant.t - , Wrap_main_inputs.Inner_curve.Constant.t option ) + -> ( ( Wrap_main_inputs.Inner_curve.Constant.t array + , Wrap_main_inputs.Inner_curve.Constant.t array option ) Wrap_verifier.index' , 'branches ) Vector.t @@ -727,7 +727,11 @@ struct let step handler next_state = let wrap_vk = Lazy.force wrap_vk in S.f ?handler branch_data next_state ~prevs_length:prev_vars_length - ~self ~step_domains ~self_dlog_plonk_index:wrap_vk.commitments + ~self ~step_domains + ~self_dlog_plonk_index: + ((* TODO *) Plonk_verification_key_evals.map + ~f:(fun x -> [| x |]) + wrap_vk.commitments ) ~public_input ~auxiliary_typ ~feature_flags (Impls.Step.Keypair.pk (fst (Lazy.force step_pk))) wrap_vk.index @@ -771,8 +775,12 @@ struct Wrap.wrap ~proof_cache ~max_proofs_verified:Max_proofs_verified.n ~feature_flags ~actual_feature_flags:b.feature_flags full_signature.maxes wrap_requests ?tweak_statement - ~dlog_plonk_index:wrap_vk.commitments wrap_main ~typ ~step_vk - ~step_plonk_indices:(Lazy.force step_vks) ~actual_wrap_domains + ~dlog_plonk_index: + ((* TODO *) Plonk_verification_key_evals.map + ~f:(fun x -> [| x |]) + wrap_vk.commitments ) + wrap_main ~typ ~step_vk ~step_plonk_indices:(Lazy.force step_vks) + ~actual_wrap_domains (Impls.Wrap.Keypair.pk (fst (Lazy.force wrap_pk))) proof in @@ -819,7 +827,10 @@ struct ; proofs_verifieds ; max_proofs_verified ; public_input = typ - ; wrap_key = Lazy.map wrap_vk ~f:Verification_key.commitments + ; wrap_key = + Lazy.map wrap_vk ~f:(fun x -> + Plonk_verification_key_evals.map (Verification_key.commitments x) + ~f:(fun x -> [| x |]) ) ; wrap_vk = Lazy.map wrap_vk ~f:Verification_key.index ; wrap_domains ; step_domains @@ -847,7 +858,9 @@ module Side_loaded = struct ~log_2_domain_size:(Lazy.force d.wrap_vk).domain.log_size_of_group in { wrap_vk = Some (Lazy.force d.wrap_vk) - ; wrap_index = Lazy.force d.wrap_key + ; wrap_index = + Plonk_verification_key_evals.map (Lazy.force d.wrap_key) ~f:(fun x -> + x.(0) ) ; max_proofs_verified = Pickles_base.Proofs_verified.of_nat (Nat.Add.n d.max_proofs_verified) ; actual_wrap_domain_size diff --git a/src/lib/pickles/compile.mli b/src/lib/pickles/compile.mli index f54ebfe93ed..8adb2de2955 100644 --- a/src/lib/pickles/compile.mli +++ b/src/lib/pickles/compile.mli @@ -154,8 +154,8 @@ type ('max_proofs_verified, 'branches, 'prev_varss) wrap_main_generic = , 'max_local_max_proofs_verifieds ) Full_signature.t -> ('prev_varss, 'branches) Hlist.Length.t - -> ( ( Wrap_main_inputs.Inner_curve.Constant.t - , Wrap_main_inputs.Inner_curve.Constant.t option ) + -> ( ( Wrap_main_inputs.Inner_curve.Constant.t array + , Wrap_main_inputs.Inner_curve.Constant.t array option ) Wrap_verifier.index' , 'branches ) Vector.t diff --git a/src/lib/pickles/pickles.ml b/src/lib/pickles/pickles.ml index 55fc0828357..8d5690b17ff 100644 --- a/src/lib/pickles/pickles.ml +++ b/src/lib/pickles/pickles.ml @@ -224,7 +224,9 @@ module Make_str (_ : Wire_types.Concrete) = struct ~log_2_domain_size:(Lazy.force d.wrap_vk).domain.log_size_of_group in { wrap_vk = Some (Lazy.force d.wrap_vk) - ; wrap_index = Lazy.force d.wrap_key + ; wrap_index = + Plonk_verification_key_evals.map (Lazy.force d.wrap_key) + ~f:(fun x -> x.(0)) ; max_proofs_verified = Pickles_base.Proofs_verified.of_nat (Nat.Add.n d.max_proofs_verified) @@ -1291,7 +1293,10 @@ module Make_str (_ : Wire_types.Concrete) = struct S.f branch_data () ~feature_flags ~prevs_length:prev_vars_length ~self ~public_input:(Input typ) ~auxiliary_typ:Impls.Step.Typ.unit ~step_domains - ~self_dlog_plonk_index:wrap_vk.commitments + ~self_dlog_plonk_index: + ((* TODO *) Plonk_verification_key_evals.map + ~f:(fun x -> [| x |]) + wrap_vk.commitments ) (Impls.Step.Keypair.pk (fst (Lazy.force step_pk))) wrap_vk.index in @@ -1772,7 +1777,11 @@ module Make_str (_ : Wire_types.Concrete) = struct : _ P.Base.Wrap.t ) in wrap ~max_proofs_verified:Max_proofs_verified.n - full_signature.maxes ~dlog_plonk_index:wrap_vk.commitments + full_signature.maxes + ~dlog_plonk_index: + ((* TODO *) Plonk_verification_key_evals.map + ~f:(fun x -> [| x |]) + wrap_vk.commitments ) wrap_main A_value.to_field_elements ~pairing_vk ~step_domains:b.domains ~pairing_plonk_indices:(Lazy.force step_vks) ~wrap_domains @@ -1800,7 +1809,12 @@ module Make_str (_ : Wire_types.Concrete) = struct ; proofs_verifieds ; max_proofs_verified = (module Max_proofs_verified) ; public_input = typ - ; wrap_key = Lazy.map wrap_vk ~f:Verification_key.commitments + ; wrap_key = + Lazy.map wrap_vk ~f:(fun x -> + (* TODO *) + Plonk_verification_key_evals.map + ~f:(fun x -> [| x |]) + (Verification_key.commitments x) ) ; wrap_vk = Lazy.map wrap_vk ~f:Verification_key.index ; wrap_domains ; step_domains diff --git a/src/lib/pickles/plonk_checks/plonk_checks.ml b/src/lib/pickles/plonk_checks/plonk_checks.ml index f49cc642ac1..900970cfb2d 100644 --- a/src/lib/pickles/plonk_checks/plonk_checks.ml +++ b/src/lib/pickles/plonk_checks/plonk_checks.ml @@ -108,8 +108,9 @@ let expand_feature_flags (type boolean) (features : boolean Plonk_types.Features.t) : boolean all_feature_flags = features |> Plonk_types.Features.map ~f:(fun x -> lazy x) - |> Plonk_types.Features.to_full ~or_:(fun x y -> - lazy B.(Lazy.force x ||| Lazy.force y) ) + |> Plonk_types.Features.to_full + ~or_:(fun x y -> lazy B.(Lazy.force x ||| Lazy.force y)) + ~any:(fun x -> lazy (B.any (List.map ~f:Lazy.force x))) let lookup_tables_used feature_flags = let module Bool = struct diff --git a/src/lib/pickles/requests.ml b/src/lib/pickles/requests.ml index 4ad0875d1c1..352f4a22eaa 100644 --- a/src/lib/pickles/requests.ml +++ b/src/lib/pickles/requests.ml @@ -129,7 +129,7 @@ module Step = struct , local_branches ) H3.T(Per_proof_witness.Constant.No_app_state).t t - | Wrap_index : Tock.Curve.Affine.t Plonk_verification_key_evals.t t + | Wrap_index : Tock.Curve.Affine.t array Plonk_verification_key_evals.t t | App_state : statement t | Return_value : return_value -> unit t | Auxiliary_value : auxiliary_value -> unit t @@ -183,7 +183,8 @@ module Step = struct , local_branches ) H3.T(Per_proof_witness.Constant.No_app_state).t t - | Wrap_index : Tock.Curve.Affine.t Plonk_verification_key_evals.t t + | Wrap_index : + Tock.Curve.Affine.t array Plonk_verification_key_evals.t t | App_state : statement t | Return_value : return_value -> unit t | Auxiliary_value : auxiliary_value -> unit t diff --git a/src/lib/pickles/requests.mli b/src/lib/pickles/requests.mli index 9eef00a1c78..6f9439de8ff 100644 --- a/src/lib/pickles/requests.mli +++ b/src/lib/pickles/requests.mli @@ -35,7 +35,7 @@ module Step : sig Hlist.H3.T(Per_proof_witness.Constant.No_app_state).t Snarky_backendless.Request.t | Wrap_index : - Backend.Tock.Curve.Affine.t Plonk_verification_key_evals.t + Backend.Tock.Curve.Affine.t array Plonk_verification_key_evals.t Snarky_backendless.Request.t | App_state : statement Snarky_backendless.Request.t | Return_value : return_value -> unit Snarky_backendless.Request.t diff --git a/src/lib/pickles/step.ml b/src/lib/pickles/step.ml index 48df56feaf5..ed38eb776ab 100644 --- a/src/lib/pickles/step.ml +++ b/src/lib/pickles/step.ml @@ -121,7 +121,7 @@ struct let expand_proof : type var value local_max_proofs_verified m. Impls.Wrap.Verification_key.t - -> 'a + -> _ array Plonk_verification_key_evals.t -> value -> (local_max_proofs_verified, local_max_proofs_verified) Proof.t -> (var, value, local_max_proofs_verified, m) Tag.t diff --git a/src/lib/pickles/step.mli b/src/lib/pickles/step.mli index e07411fab6d..54a2271ed40 100644 --- a/src/lib/pickles/step.mli +++ b/src/lib/pickles/step.mli @@ -33,7 +33,7 @@ module Make -> step_domains:(Import.Domains.t, 'self_branches) Pickles_types.Vector.t -> feature_flags:Opt.Flag.t Plonk_types.Features.Full.t -> self_dlog_plonk_index: - Backend.Tick.Inner_curve.Affine.t + Backend.Tick.Inner_curve.Affine.t array Pickles_types.Plonk_verification_key_evals.t -> public_input: ( 'var diff --git a/src/lib/pickles/step_main.ml b/src/lib/pickles/step_main.ml index 59079d407db..d5313005dca 100644 --- a/src/lib/pickles/step_main.ml +++ b/src/lib/pickles/step_main.ml @@ -339,9 +339,11 @@ let step_main : in Req.Compute_prev_proof_parts previous_proof_statements ) ; let dlog_plonk_index = + let num_chunks = (* TODO *) 1 in exists ~request:(fun () -> Req.Wrap_index) - (Plonk_verification_key_evals.typ Inner_curve.typ) + (Plonk_verification_key_evals.typ + (Typ.array ~length:num_chunks Inner_curve.typ) ) and prevs = exists (Prev_typ.f prev_proof_typs) ~request:(fun () -> Req.Proof_with_datas ) diff --git a/src/lib/pickles/step_verifier.ml b/src/lib/pickles/step_verifier.ml index 2008c39b4cc..702e9804b71 100644 --- a/src/lib/pickles/step_verifier.ml +++ b/src/lib/pickles/step_verifier.ml @@ -241,6 +241,13 @@ struct let combined_polynomial (* Corresponds to xi in figure 7 of WTS *) = with_label "combined_polynomial" (fun () -> Pcs_batch.combine_split_commitments pcs_batch + ~reduce_without_degree_bound:Array.to_list + ~reduce_with_degree_bound:(fun { Plonk_types.Poly_comm + .With_degree_bound + .unshifted + ; shifted + } -> + Array.to_list unshifted @ [ shifted ] ) ~scale_and_add:(fun ~(acc : [ `Maybe_finite of Boolean.var * Inner_curve.t @@ -572,14 +579,11 @@ struct let without_degree_bound = Vector.append (Vector.map sg_old ~f:(fun g -> [| g |])) - ( [| x_hat |] :: [| ft_comm |] :: z_comm :: [| m.generic_comm |] - :: [| m.psm_comm |] :: [| m.complete_add_comm |] - :: [| m.mul_comm |] :: [| m.emul_comm |] - :: [| m.endomul_scalar_comm |] + ( [| x_hat |] :: [| ft_comm |] :: z_comm :: m.generic_comm + :: m.psm_comm :: m.complete_add_comm :: m.mul_comm :: m.emul_comm + :: m.endomul_scalar_comm :: Vector.append w_comm - (Vector.append - (Vector.map m.coefficients_comm ~f:(fun g -> [| g |])) - (Vector.map sigma_comm_init ~f:(fun g -> [| g |])) + (Vector.append m.coefficients_comm sigma_comm_init (snd Plonk_types.(Columns.add Permuts_minus_1.n)) ) (snd Plonk_types.( @@ -1107,8 +1111,9 @@ struct let sponge = Sponge.create sponge_params in Array.iter (Types.index_to_field_elements - ~g:(fun (z : Inputs.Inner_curve.t) -> - List.to_array (Inner_curve.to_field_elements z) ) + ~g: + (Array.concat_map ~f:(fun (z : Inputs.Inner_curve.t) -> + List.to_array (Inner_curve.to_field_elements z) ) ) index ) ~f:(fun x -> Sponge.absorb sponge (`Field x)) ; sponge diff --git a/src/lib/pickles/step_verifier.mli b/src/lib/pickles/step_verifier.mli index 3feaa3b075d..1bbc74d88b1 100644 --- a/src/lib/pickles/step_verifier.mli +++ b/src/lib/pickles/step_verifier.mli @@ -76,7 +76,7 @@ val finalize_other_proof : val hash_messages_for_next_step_proof : index: - Step_main_inputs.Inner_curve.t + Step_main_inputs.Inner_curve.t array Pickles_types.Plonk_verification_key_evals.t -> ('s -> Step_main_inputs.Impl.Field.t array) -> ( ( 'a @@ -91,7 +91,7 @@ val hash_messages_for_next_step_proof : val hash_messages_for_next_step_proof_opt : index: - Step_main_inputs.Inner_curve.t + Step_main_inputs.Inner_curve.t array Pickles_types.Plonk_verification_key_evals.t -> ('s -> Step_main_inputs.Impl.Field.t array) -> Step_main_inputs.Sponge.t @@ -135,7 +135,7 @@ val verify : Step_main_inputs.Impl.field Composition_types.Branch_data.Proofs_verified.One_hot.Checked.t ] -> wrap_verification_key: - Step_main_inputs.Inner_curve.t + Step_main_inputs.Inner_curve.t array Pickles_types.Plonk_verification_key_evals.t -> ( Step_main_inputs.Impl.field Limb_vector.Challenge.t , Step_main_inputs.Impl.field Limb_vector.Challenge.t diff --git a/src/lib/pickles/types_map.ml b/src/lib/pickles/types_map.ml index a6ce9ba9d3f..3496edd08f3 100644 --- a/src/lib/pickles/types_map.ml +++ b/src/lib/pickles/types_map.ml @@ -16,7 +16,7 @@ module Basic = struct ; public_input : ('var, 'value) Impls.Step.Typ.t ; branches : 'n2 Nat.t ; wrap_domains : Domains.t - ; wrap_key : Tick.Inner_curve.Affine.t Plonk_verification_key_evals.t + ; wrap_key : Tick.Inner_curve.Affine.t array Plonk_verification_key_evals.t ; wrap_vk : Impls.Wrap.Verification_key.t ; feature_flags : Opt.Flag.t Plonk_types.Features.Full.t } @@ -59,7 +59,10 @@ module Side_loaded = struct let wrap_key, wrap_vk = match ephemeral with | Some { index = `In_prover i | `In_both (i, _) } -> - (i.wrap_index, i.wrap_vk) + let wrap_index = + Plonk_verification_key_evals.map i.wrap_index ~f:(fun x -> [| x |]) + in + (wrap_index, i.wrap_vk) | _ -> failwithf "Side_loaded.to_basic: Expected `In_prover (%s)" __LOC__ () in @@ -95,7 +98,8 @@ module Compiled = struct ; proofs_verifieds : (int, 'branches) Vector.t (* For each branch in this rule, how many predecessor proofs does it have? *) ; public_input : ('a_var, 'a_value) Impls.Step.Typ.t - ; wrap_key : Tick.Inner_curve.Affine.t Plonk_verification_key_evals.t Lazy.t + ; wrap_key : + Tick.Inner_curve.Affine.t array Plonk_verification_key_evals.t Lazy.t ; wrap_vk : Impls.Wrap.Verification_key.t Lazy.t ; wrap_domains : Domains.t ; step_domains : (Domains.t, 'branches) Vector.t @@ -134,7 +138,7 @@ module For_step = struct ; proofs_verifieds : [ `Known of (Impls.Step.Field.t, 'branches) Vector.t | `Side_loaded ] ; public_input : ('a_var, 'a_value) Impls.Step.Typ.t - ; wrap_key : inner_curve_var Plonk_verification_key_evals.t + ; wrap_key : inner_curve_var array Plonk_verification_key_evals.t ; wrap_domain : [ `Known of Domain.t | `Side_loaded of @@ -157,11 +161,14 @@ module For_step = struct failwithf "For_step.side_loaded: Expected `In_circuit (%s)" __LOC__ () in let T = Nat.eq_exn branches Side_loaded_verification_key.Max_branches.n in + let wrap_key = + Plonk_verification_key_evals.map index.wrap_index ~f:(fun x -> [| x |]) + in { branches ; max_proofs_verified ; public_input ; proofs_verifieds = `Side_loaded - ; wrap_key = index.wrap_index + ; wrap_key ; wrap_domain = `Side_loaded index.actual_wrap_domain_size ; step_domains = `Side_loaded ; feature_flags @@ -186,7 +193,7 @@ module For_step = struct ; public_input ; wrap_key = Plonk_verification_key_evals.map (Lazy.force wrap_key) - ~f:Step_main_inputs.Inner_curve.constant + ~f:(Array.map ~f:Step_main_inputs.Inner_curve.constant) ; wrap_domain = `Known wrap_domains.h ; step_domains = `Known step_domains ; feature_flags diff --git a/src/lib/pickles/types_map.mli b/src/lib/pickles/types_map.mli index 991be8ff433..7377fce3206 100644 --- a/src/lib/pickles/types_map.mli +++ b/src/lib/pickles/types_map.mli @@ -11,7 +11,7 @@ module Basic : sig ; branches : 'n2 Pickles_types.Nat.t ; wrap_domains : Import.Domains.t ; wrap_key : - Backend.Tick.Inner_curve.Affine.t + Backend.Tick.Inner_curve.Affine.t array Pickles_types.Plonk_verification_key_evals.t ; wrap_vk : Impls.Wrap.Verification_key.t ; feature_flags : Opt.Flag.t Plonk_types.Features.Full.t @@ -69,7 +69,7 @@ module Compiled : sig (* For each branch in this rule, how many predecessor proofs does it have? *) ; public_input : ('a_var, 'a_value) Impls.Step.Typ.t ; wrap_key : - Backend.Tick.Inner_curve.Affine.t + Backend.Tick.Inner_curve.Affine.t array Pickles_types.Plonk_verification_key_evals.t Lazy.t ; wrap_vk : Impls.Wrap.Verification_key.t Lazy.t @@ -88,7 +88,8 @@ module For_step : sig [ `Known of (Impls.Step.Field.t, 'branches) Pickles_types.Vector.t | `Side_loaded ] ; public_input : ('a_var, 'a_value) Impls.Step.Typ.t - ; wrap_key : inner_curve_var Pickles_types.Plonk_verification_key_evals.t + ; wrap_key : + inner_curve_var array Pickles_types.Plonk_verification_key_evals.t ; wrap_domain : [ `Known of Import.Domain.t | `Side_loaded of diff --git a/src/lib/pickles/verify.ml b/src/lib/pickles/verify.ml index b18a252b068..32f2b3464b0 100644 --- a/src/lib/pickles/verify.ml +++ b/src/lib/pickles/verify.ml @@ -157,7 +157,10 @@ let verify_heterogenous (ts : Instance.t list) = Common.hash_messages_for_next_step_proof ~app_state:A_value.to_field_elements (Reduced_messages_for_next_proof_over_same_field.Step.prepare - ~dlog_plonk_index:key.commitments + ~dlog_plonk_index: + (Plonk_verification_key_evals.map + ~f:(fun x -> [| x |]) + key.commitments ) { t.statement.messages_for_next_step_proof with app_state } ) ; proof_state = { deferred_values = diff --git a/src/lib/pickles/wrap.mli b/src/lib/pickles/wrap.mli index 71d58c592b6..c29d587a459 100644 --- a/src/lib/pickles/wrap.mli +++ b/src/lib/pickles/wrap.mli @@ -8,7 +8,8 @@ val wrap : and type ns = 'max_local_max_proofs_verifieds ) -> ('max_proofs_verified, 'max_local_max_proofs_verifieds) Requests.Wrap.t -> dlog_plonk_index: - Backend.Tock.Curve.Affine.t Pickles_types.Plonk_verification_key_evals.t + Backend.Tock.Curve.Affine.t array + Pickles_types.Plonk_verification_key_evals.t -> ( ( Impls.Wrap.Impl.Field.t , Impls.Wrap.Impl.Field.t Composition_types.Scalar_challenge.t , Impls.Wrap.Impl.Field.t Pickles_types.Shifted_value.Type1.t diff --git a/src/lib/pickles/wrap_domains.ml b/src/lib/pickles/wrap_domains.ml index 920b942b0cf..e8d27e2c20c 100644 --- a/src/lib/pickles/wrap_domains.ml +++ b/src/lib/pickles/wrap_domains.ml @@ -28,7 +28,11 @@ struct let dummy_step_keys = lazy (Vector.init num_choices ~f:(fun _ -> - let g = Backend.Tock.Inner_curve.(to_affine_exn one) in + let num_chunks = (* TODO *) 1 in + let g = + Array.init num_chunks ~f:(fun _ -> + Backend.Tock.Inner_curve.(to_affine_exn one) ) + in Verification_key.dummy_step_commitments g ) ) in Timer.clock __LOC__ ; diff --git a/src/lib/pickles/wrap_main.ml b/src/lib/pickles/wrap_main.ml index b751b3a2db5..ccdfbe2b8d2 100644 --- a/src/lib/pickles/wrap_main.ml +++ b/src/lib/pickles/wrap_main.ml @@ -90,8 +90,8 @@ let wrap_main , max_local_max_proofs_verifieds ) Full_signature.t ) (pi_branches : (prev_varss, branches) Hlist.Length.t) (step_keys : - ( ( Wrap_main_inputs.Inner_curve.Constant.t - , Wrap_main_inputs.Inner_curve.Constant.t option ) + ( ( Wrap_main_inputs.Inner_curve.Constant.t array + , Wrap_main_inputs.Inner_curve.Constant.t array option ) Wrap_verifier.index' , branches ) Vector.t @@ -214,11 +214,11 @@ let wrap_main (Vector.map (Lazy.force step_keys) ~f: (Plonk_verification_key_evals.Step.map - ~f:Inner_curve.constant ~f_opt:(function + ~f:(Array.map ~f:Inner_curve.constant) ~f_opt:(function | None -> Opt.nothing | Some x -> - Opt.just (Inner_curve.constant x) ) ) ) ) + Opt.just (Array.map ~f:Inner_curve.constant x) ) ) ) ) in let prev_step_accs = with_label __LOC__ (fun () -> diff --git a/src/lib/pickles/wrap_main.mli b/src/lib/pickles/wrap_main.mli index bb834f15b11..04d903002bc 100644 --- a/src/lib/pickles/wrap_main.mli +++ b/src/lib/pickles/wrap_main.mli @@ -9,8 +9,8 @@ val wrap_main : , 'max_local_max_proofs_verifieds ) Full_signature.t -> ('prev_varss, 'branches) Pickles_types.Hlist.Length.t - -> ( ( Wrap_main_inputs.Inner_curve.Constant.t - , Wrap_main_inputs.Inner_curve.Constant.t option ) + -> ( ( Wrap_main_inputs.Inner_curve.Constant.t array + , Wrap_main_inputs.Inner_curve.Constant.t array option ) Wrap_verifier.index' , 'branches ) Pickles_types.Vector.t diff --git a/src/lib/pickles/wrap_verifier.ml b/src/lib/pickles/wrap_verifier.ml index 049c4e22f45..b89710f2f19 100644 --- a/src/lib/pickles/wrap_verifier.ml +++ b/src/lib/pickles/wrap_verifier.ml @@ -192,10 +192,11 @@ struct let choose_key : type n. n One_hot_vector.t - -> ( (Inner_curve.t, (Inner_curve.t, Boolean.var) Opt.t) index' + -> ( (Inner_curve.t array, (Inner_curve.t array, Boolean.var) Opt.t) index' , n ) Vector.t - -> (Inner_curve.t, (Inner_curve.t, Boolean.var) Opt.t) index' = + -> (Inner_curve.t array, (Inner_curve.t array, Boolean.var) Opt.t) index' + = let open Tuple_lib in fun bs keys -> let open Field in @@ -204,16 +205,16 @@ struct keys ~f:(fun b key -> Plonk_verification_key_evals.Step.map key - ~f:(fun g -> Double.map g ~f:(( * ) (b :> t))) + ~f:(Array.map ~f:(fun g -> Double.map g ~f:(( * ) (b :> t)))) ~f_opt:(function (* Here, we split the 3 variants into 3 separate accumulators. This allows us to only compute the 'maybe' flag when we need to, and - allows us to fall back to the basically-free `None` when a - feature is entirely unused, or to the less expensive `Some` if + allows us to fall back to the basically-free `Nothing` when a + feature is entirely unused, or to the less expensive `Just` if it is used for every circuit. - In particular, it is important that we generate exactly `None` - when none of the optional gates are used, otherwise we will - change the serialization of the protocol circuits. + In particular, it is important that we generate exactly + `Nothing` when none of the optional gates are used, otherwise + we will change the serialization of the protocol circuits. *) | Opt.Nothing -> ([], [], [ b ]) @@ -223,45 +224,49 @@ struct ([ (b, x) ], [], []) ) ) |> Vector.reduce_exn ~f: - (Plonk_verification_key_evals.Step.map2 ~f:(Double.map2 ~f:( + )) + (Plonk_verification_key_evals.Step.map2 + ~f:(Array.map2_exn ~f:(Double.map2 ~f:( + ))) ~f_opt:(fun (yes_1, maybe_1, no_1) (yes_2, maybe_2, no_2) -> (yes_1 @ yes_2, maybe_1 @ maybe_2, no_1 @ no_2) ) ) |> Plonk_verification_key_evals.Step.map ~f:Fn.id ~f_opt:(function | [], [], _nones -> - (* We only have `None`s, so we can emit exactly `None` without - further computation. + (* We only have `Nothing`s, so we can emit exactly `Nothing` + without further computation. *) Opt.Nothing - | somes, [], [] -> + | justs, [], [] -> (* Special case: we don't need to compute the 'maybe' bool - because we know statically that all entries are `Some`. + because we know statically that all entries are `Just`. *) let sum = - somes + justs |> List.map ~f:(fun ((b : Boolean.var), g) -> - Double.map g ~f:(( * ) (b :> t)) ) - |> List.reduce_exn ~f:(Double.map2 ~f:( + )) + Array.map g ~f:(Double.map ~f:(( * ) (b :> t))) ) + |> List.reduce_exn + ~f:(Array.map2_exn ~f:(Double.map2 ~f:( + ))) in Opt.just sum - | somes, maybes, nones -> + | justs, maybes, nones -> let is_none = List.reduce nones ~f:(fun (b1 : Boolean.var) (b2 : Boolean.var) -> Boolean.Unsafe.of_cvar Field.(add (b1 :> t) (b2 :> t)) ) in let none_sum = + let num_chunks = (* TODO *) 1 in Option.map is_none ~f:(fun (b : Boolean.var) -> - Double.map Inner_curve.one ~f:(( * ) (b :> t)) ) + Array.init num_chunks ~f:(fun _ -> + Double.map Inner_curve.one ~f:(( * ) (b :> t)) ) ) in - let some_is_yes, some_sum = - somes + let just_is_yes, just_sum = + justs |> List.map ~f:(fun ((b : Boolean.var), g) -> - (b, Double.map g ~f:(( * ) (b :> t))) ) + (b, Array.map g ~f:(Double.map ~f:(( * ) (b :> t)))) ) |> List.reduce ~f:(fun ((b1 : Boolean.var), g1) ((b2 : Boolean.var), g2) -> ( Boolean.Unsafe.of_cvar Field.(add (b1 :> t) (b2 :> t)) - , Double.map2 ~f:( + ) g1 g2 ) ) + , Array.map2_exn ~f:(Double.map2 ~f:( + )) g1 g2 ) ) |> fun x -> (Option.map ~f:fst x, Option.map ~f:snd x) in let maybe_is_yes, maybe_sum = @@ -269,27 +274,40 @@ struct |> List.map ~f:(fun ((b : Boolean.var), (b_g : Boolean.var), g) -> ( Boolean.Unsafe.of_cvar Field.(mul (b :> t) (b_g :> t)) - , Double.map g ~f:(( * ) (b :> t)) ) ) + , Array.map g ~f:(Double.map ~f:(( * ) (b :> t))) ) ) |> List.reduce ~f:(fun ((b1 : Boolean.var), g1) ((b2 : Boolean.var), g2) -> ( Boolean.Unsafe.of_cvar Field.(add (b1 :> t) (b2 :> t)) - , Double.map2 ~f:( + ) g1 g2 ) ) + , Array.map2_exn ~f:(Double.map2 ~f:( + )) g1 g2 ) ) |> fun x -> (Option.map ~f:fst x, Option.map ~f:snd x) in let is_yes = - [| some_is_yes; maybe_is_yes |] + [| just_is_yes; maybe_is_yes |] |> Array.filter_map ~f:Fn.id |> Array.reduce_exn ~f:(fun (b1 : Boolean.var) (b2 : Boolean.var) -> Boolean.Unsafe.of_cvar ((b1 :> t) + (b2 :> t)) ) in let sum = - [| none_sum; maybe_sum; some_sum |] + [| none_sum; maybe_sum; just_sum |] |> Array.filter_map ~f:Fn.id - |> Array.reduce_exn ~f:(Double.map2 ~f:( + )) + |> Array.reduce_exn + ~f:(Array.map2_exn ~f:(Double.map2 ~f:( + ))) in Opt.Maybe (is_yes, sum) ) + |> Plonk_verification_key_evals.Step.map + ~f:(fun g -> Array.map ~f:(Double.map ~f:(Util.seal (module Impl))) g) + ~f_opt:(function + | Opt.Nothing -> + Opt.Nothing + | Opt.Maybe (b, x) -> + Opt.Maybe + ( Boolean.Unsafe.of_cvar (Util.seal (module Impl) (b :> t)) + , Array.map ~f:(Double.map ~f:(Util.seal (module Impl))) x ) + | Opt.Just x -> + Opt.Just + (Array.map ~f:(Double.map ~f:(Util.seal (module Impl))) x) ) (* TODO: Unify with the code in step_verifier *) let lagrange (type n) @@ -411,12 +429,18 @@ struct [ `Finite of Inner_curve.t | `Maybe_finite of Boolean.var * Inner_curve.t ] - let finite : t -> Boolean.var = function + let _finite : t -> Boolean.var = function | `Finite _ -> Boolean.true_ | `Maybe_finite (b, _) -> b + let assert_finite : t -> unit = function + | `Finite _ -> + () + | `Maybe_finite _ -> + failwith "Not finite" + let add (p : t) (q : Inner_curve.t) = match p with | `Finite p -> @@ -432,10 +456,19 @@ struct end let combine batch ~xi without_bound with_bound = + let reduce_point p = + let point = ref (Point.underlying p.(Array.length p - 1)) in + for i = Array.length p - 2 downto 0 do + point := Point.add p.(i) (Scalar_challenge.endo !point xi) + done ; + !point + in let { Curve_opt.non_zero; point } = Pcs_batch.combine_split_commitments batch + ~reduce_with_degree_bound:(fun _ -> assert false) + ~reduce_without_degree_bound:(fun x -> [ x ]) ~scale_and_add:(fun ~(acc : Curve_opt.t) ~xi - (p : (Point.t, Boolean.var) Opt.t) -> + (p : (Point.t array, Boolean.var) Opt.t) -> (* match acc.non_zero, keep with | false, false -> acc | true, false -> acc @@ -443,22 +476,25 @@ struct | true, true -> { point= p + xi * acc; non_zero= true } *) let point keep p = - let point = + let base_point = + let p = p.(Array.length p - 1) in Inner_curve.( - if_ keep - ~then_: - (if_ acc.non_zero - ~then_: - (Point.add p (Scalar_challenge.endo acc.point xi)) - ~else_: - ((* In this branch, the accumulator was zero, so there is no harm in - putting the potentially junk underlying point here. *) - Point.underlying p ) ) - ~else_:acc.point) + if_ acc.non_zero + ~then_:(Point.add p (Scalar_challenge.endo acc.point xi)) + ~else_: + ((* In this branch, the accumulator was zero, so there is no harm in + putting the potentially junk underlying point here. *) + Point.underlying p )) in - let non_zero = - Boolean.(keep &&& Point.finite p ||| acc.non_zero) + let point = ref base_point in + for i = Array.length p - 2 downto 0 do + point := Point.add p.(i) (Scalar_challenge.endo !point xi) + done ; + let point = + Inner_curve.(if_ keep ~then_:!point ~else_:acc.point) in + Array.iter ~f:Point.assert_finite p ; + let non_zero = Boolean.(keep &&& true_ ||| acc.non_zero) in { Curve_opt.non_zero; point } in match p with @@ -473,14 +509,16 @@ struct | Opt.Nothing -> None | Opt.Maybe (keep, p) -> + Array.iter ~f:Point.assert_finite p ; Some - { non_zero = Boolean.(keep &&& Point.finite p) - ; point = Point.underlying p + { non_zero = Boolean.(keep &&& true_) + ; point = reduce_point p } | Opt.Just p -> + Array.iter ~f:Point.assert_finite p ; Some - { non_zero = Boolean.(true_ &&& Point.finite p) - ; point = Point.underlying p + { non_zero = Boolean.(true_ &&& true_) + ; point = reduce_point p } ) without_bound with_bound in @@ -730,7 +768,8 @@ struct let incrementally_verify_proof (type b) (module Max_proofs_verified : Nat.Add.Intf with type n = b) ~actual_proofs_verified_mask ~step_domains ~srs - ~verification_key:(m : _ Plonk_verification_key_evals.Step.t) ~xi ~sponge + ~verification_key:(m : (_ array, _) Plonk_verification_key_evals.Step.t) + ~xi ~sponge ~(public_input : [ `Field of Field.t * Boolean.var | `Packed_bits of Field.t * int ] array ) ~(sg_old : (_, Max_proofs_verified.n) Vector.t) ~advice @@ -740,7 +779,7 @@ struct let sg_old = with_label __LOC__ (fun () -> Vector.map2 actual_proofs_verified_mask sg_old ~f:(fun keep sg -> - [| (keep, sg) |] ) ) + (keep, sg) ) ) in with_label __LOC__ (fun () -> let sample () = Opt.challenge sponge in @@ -752,8 +791,9 @@ struct let index_sponge = Sponge.create sponge_params in List.iter (index_to_field_elements - ~g:(fun (z : Inputs.Inner_curve.t) -> - List.to_array (Inner_curve.to_field_elements z) ) + ~g: + (Array.concat_map ~f:(fun (z : Inputs.Inner_curve.t) -> + List.to_array (Inner_curve.to_field_elements z) ) ) m ) ~f:(fun x -> let (_ : (unit, _) Pickles_types.Opt.t) = @@ -769,7 +809,7 @@ struct absorb sponge without (Array.map gs ~f:(fun g -> (Boolean.true_, g))) in absorb sponge Field (Boolean.true_, index_digest) ; - Vector.iter ~f:(Array.iter ~f:(absorb sponge PC)) sg_old ; + Vector.iter ~f:(absorb sponge PC) sg_old ; let x_hat = let domain = (which_branch, step_domains) in let public_input = @@ -946,11 +986,7 @@ struct let lookup_table_comm = let compute_lookup_table_comm (l : _ Messages.Lookup.In_circuit.t) joint_combiner = - let (first_column :: second_column :: rest) = - Vector.map - ~f:(Types.Opt.map ~f:(fun x -> [| x |])) - m.lookup_table_comm - in + let (first_column :: second_column :: rest) = m.lookup_table_comm in let second_column_with_runtime = match (second_column, l.runtime) with | Types.Opt.Nothing, comm | comm, Types.Opt.Nothing -> @@ -1004,10 +1040,7 @@ struct let rest_rev = Vector.rev (first_column :: second_column_with_runtime :: rest) in - let table_ids = - Types.Opt.map m.lookup_table_ids ~f:(fun x -> [| x |]) - in - Vector.fold ~init:table_ids rest_rev ~f:(fun acc comm -> + Vector.fold ~init:m.lookup_table_ids rest_rev ~f:(fun acc comm -> match acc with | Types.Opt.Nothing -> comm @@ -1200,11 +1233,6 @@ struct let append_chain len second first = Vector.append first second len in - let undo_chunking = - Types.Opt.map ~f:(fun x -> - assert (Array.length x = 1) ; - x.(0) ) - in (* sg_old x_hat ft_comm @@ -1214,31 +1242,26 @@ struct w_comms all but last sigma_comm *) - Vector.map sg_old - ~f: - (Array.map ~f:(fun (keep, p) -> - Pickles_types.Opt.Maybe (keep, p) ) ) + Vector.map sg_old ~f:(fun (keep, p) -> + Pickles_types.Opt.Maybe (keep, [| p |]) ) |> append_chain (snd (Max_proofs_verified.add len_6)) ( [ [| x_hat |] ; [| ft_comm |] ; z_comm - ; [| m.generic_comm |] - ; [| m.psm_comm |] - ; [| m.complete_add_comm |] - ; [| m.mul_comm |] - ; [| m.emul_comm |] - ; [| m.endomul_scalar_comm |] + ; m.generic_comm + ; m.psm_comm + ; m.complete_add_comm + ; m.mul_comm + ; m.emul_comm + ; m.endomul_scalar_comm ] |> append_chain len_3_add (Vector.append w_comm - (Vector.append - (Vector.map m.coefficients_comm ~f:(fun g -> - [| g |] ) ) - (Vector.map sigma_comm_init ~f:(fun g -> [| g |])) + (Vector.append m.coefficients_comm sigma_comm_init len_1_add ) len_2_add ) - |> Vector.map ~f:(Array.map ~f:Pickles_types.Opt.just) + |> Vector.map ~f:Pickles_types.Opt.just |> append_chain len_6_add ( [ m.range_check0_comm ; m.range_check1_comm @@ -1247,20 +1270,17 @@ struct ; m.xor_comm ; m.rot_comm ] - |> append_chain len_4_add - (Vector.map ~f:undo_chunking lookup_sorted) + |> append_chain len_4_add lookup_sorted |> append_chain len_5_add - [ undo_chunking - @@ Pickles_types.Opt.map messages.lookup - ~f:(fun l -> l.aggreg) - ; undo_chunking lookup_table_comm + [ Pickles_types.Opt.map messages.lookup ~f:(fun l -> + l.aggreg ) + ; lookup_table_comm ; m.runtime_tables_selector ; m.lookup_selector_xor ; m.lookup_selector_lookup ; m.lookup_selector_range_check ; m.lookup_selector_ffmul - ] - |> Vector.map ~f:(fun x -> [| x |]) ) ) + ] ) ) in check_bulletproof ~pcs_batch: @@ -1270,8 +1290,8 @@ struct ~polynomials: ( Vector.map without_degree_bound ~f: - (Array.map - ~f:(Pickles_types.Opt.map ~f:(fun x -> `Finite x)) ) + (Pickles_types.Opt.map + ~f:(Array.map ~f:(fun x -> `Finite x)) ) , [] ) in assert_eq_plonk diff --git a/src/lib/pickles/wrap_verifier.mli b/src/lib/pickles/wrap_verifier.mli index 8a537cd3ab0..baffd665ecd 100644 --- a/src/lib/pickles/wrap_verifier.mli +++ b/src/lib/pickles/wrap_verifier.mli @@ -62,8 +62,8 @@ val incrementally_verify_proof : -> step_domains:(Import.Domains.t, 'a) Pickles_types.Vector.t -> srs:Kimchi_bindings.Protocol.SRS.Fp.t -> verification_key: - ( Wrap_main_inputs.Inner_curve.t - , ( Wrap_main_inputs.Inner_curve.t + ( Wrap_main_inputs.Inner_curve.t array + , ( Wrap_main_inputs.Inner_curve.t array , Impls.Wrap.Boolean.var ) Pickles_types.Opt.t ) Pickles_types.Plonk_verification_key_evals.Step.t @@ -144,15 +144,15 @@ val finalize_other_proof : val choose_key : 'n. 'n One_hot_vector.t - -> ( ( Wrap_main_inputs.Inner_curve.t - , ( Wrap_main_inputs.Inner_curve.t + -> ( ( Wrap_main_inputs.Inner_curve.t array + , ( Wrap_main_inputs.Inner_curve.t array , Impls.Wrap.Boolean.var ) Pickles_types.Opt.t ) index' , 'n ) Pickles_types.Vector.t - -> ( Wrap_main_inputs.Inner_curve.t - , ( Wrap_main_inputs.Inner_curve.t + -> ( Wrap_main_inputs.Inner_curve.t array + , ( Wrap_main_inputs.Inner_curve.t array , Impls.Wrap.Boolean.var ) Pickles_types.Opt.t ) index' diff --git a/src/lib/pickles_types/pcs_batch.ml b/src/lib/pickles_types/pcs_batch.ml index e4a37ef5d20..e115635c46e 100644 --- a/src/lib/pickles_types/pcs_batch.ml +++ b/src/lib/pickles_types/pcs_batch.ml @@ -65,15 +65,16 @@ let combine_evaluations (type f) t ~crs_max_degree ~(mul : f -> f -> f) ~add ~shifted_pow:(fun deg x -> pow x (crs_max_degree - deg)) ~mul ~add ~one ~evaluation_point ~xi -open Plonk_types.Poly_comm - -let combine_split_commitments _t ~scale_and_add ~init:i ~xi (type n) +let combine_split_commitments _t ~scale_and_add ~init:i ~xi + ~reduce_without_degree_bound ~reduce_with_degree_bound (type n) (without_degree_bound : (_, n) Vector.t) with_degree_bound = let flat = - List.concat_map (Vector.to_list without_degree_bound) ~f:Array.to_list - @ List.concat_map (Vector.to_list with_degree_bound) - ~f:(fun { With_degree_bound.unshifted; shifted } -> - Array.to_list unshifted @ [ shifted ] ) + List.concat_map + (Vector.to_list without_degree_bound) + ~f:reduce_without_degree_bound + @ List.concat_map + (Vector.to_list with_degree_bound) + ~f:reduce_with_degree_bound in let rec go = function | [] -> diff --git a/src/lib/pickles_types/pcs_batch.mli b/src/lib/pickles_types/pcs_batch.mli index 7317ae097ec..dee36ea90b4 100644 --- a/src/lib/pickles_types/pcs_batch.mli +++ b/src/lib/pickles_types/pcs_batch.mli @@ -46,15 +46,15 @@ val combine_evaluations' : -> ('f, 'm) Vector.t -> 'f -open Plonk_types.Poly_comm - val combine_split_commitments : (_, 'n, 'm) t -> scale_and_add:(acc:'g_acc -> xi:'f -> 'g -> 'g_acc) -> init:('g -> 'g_acc option) -> xi:'f - -> ('g Without_degree_bound.t, 'n) Vector.t - -> ('g With_degree_bound.t, 'm) Vector.t + -> reduce_without_degree_bound:('without_degree_bound -> 'g list) + -> reduce_with_degree_bound:('with_degree_bound -> 'g list) + -> ('without_degree_bound, 'n) Vector.t + -> ('with_degree_bound, 'm) Vector.t -> 'g_acc val combine_split_evaluations : diff --git a/src/lib/pickles_types/plonk_types.ml b/src/lib/pickles_types/plonk_types.ml index 74a5d259638..452e6d81ff2 100644 --- a/src/lib/pickles_types/plonk_types.ml +++ b/src/lib/pickles_types/plonk_types.ml @@ -248,7 +248,7 @@ module Features = struct ; runtime_tables } - let to_full ~or_:( ||| ) + let to_full ~or_:( ||| ) ?(any = List.reduce_exn ~f:( ||| )) { range_check0 ; range_check1 ; foreign_field_add @@ -277,12 +277,15 @@ module Features = struct in let table_width_at_least_1 = (* RangeCheck, ForeignFieldMul have max_joint_size = 1 *) - table_width_at_least_2 ||| lookup_pattern_range_check - ||| foreign_field_mul + any + [ table_width_at_least_2 + ; lookup_pattern_range_check + ; foreign_field_mul + ] in let lookups_per_row_4 = (* Xor, RangeCheckGate, ForeignFieldMul, have max_lookups_per_row = 4 *) - lookup_pattern_xor ||| lookup_pattern_range_check ||| foreign_field_mul + any [ lookup_pattern_xor; lookup_pattern_range_check; foreign_field_mul ] in let lookups_per_row_3 = (* Lookup has max_lookups_per_row = 3 *) diff --git a/src/lib/pickles_types/plonk_types.mli b/src/lib/pickles_types/plonk_types.mli index 769ce96ae61..7efe9b0d096 100644 --- a/src/lib/pickles_types/plonk_types.mli +++ b/src/lib/pickles_types/plonk_types.mli @@ -55,7 +55,11 @@ module Features : sig end end] - val to_full : or_:('bool -> 'bool -> 'bool) -> 'bool t -> 'bool Full.t + val to_full : + or_:('bool -> 'bool -> 'bool) + -> ?any:('bool list -> 'bool) + -> 'bool t + -> 'bool Full.t val of_full : 'a Full.t -> 'a t