Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

steps feature to extend execution out n in debug mode #46

Merged
merged 3 commits into from
Sep 28, 2022
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 1 addition & 6 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
"verify": ". ./pre.sh && $PILSTARK/main_verifier.js $PIL -s $BDIR/zkevm.starkstruct.json -o $BDIR/zkevm.proof.json -b $BDIR/zkevm.public.json -v $BDIR/zkevm.verkey.json",
"gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js $PIL -s $BDIR/zkevm.starkstruct.json -v $BDIR/zkevm.verkey.json -o $BDIR/zkevm.verifier.circom",
"compilecircom": ". ./pre.sh && circom --O1 --prime goldilocks --r1cs --sym --wasm --c --verbose $BDIR/zkevm.verifier.circom -o $BDIR -l node_modules/pil-stark/circuits.gl",

"c12a_setup": ". ./pre.sh && $PILSTARK/compressor12/main_compressor12_setup.js -r $BDIR/zkevm.verifier.r1cs -p $BDIR/zkevm.c12a.pil -c $BDIR/zkevm.c12a.const -e $BDIR/zkevm.c12a.exec",
"c12a_buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $BDIR/zkevm.c12a.pil -s $BDIR/zkevm.c12a.starkstruct.json -i $BDIR/zkevm.c12a.starkinfo.json",
"c12a_buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -m -p $BDIR/zkevm.c12a.pil -s $BDIR/zkevm.c12a.starkstruct.json -c $BDIR/zkevm.c12a.chelpers/zkevm.c12a.chelpers.cpp",
Expand All @@ -31,7 +30,6 @@
"c12a_verify": ". ./pre.sh && $PILSTARK/main_verifier.js -p $BDIR/zkevm.c12a.pil -s $BDIR/zkevm.c12a.starkstruct.json -o $BDIR/zkevm.c12a.proof.json -b $BDIR/zkevm.c12a.public.json -v $BDIR/zkevm.c12a.verkey.json",
"c12a_gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js -p $BDIR/zkevm.c12a.pil -s $BDIR/zkevm.c12a.starkstruct.json -v $BDIR/zkevm.c12a.verkey.json -o $BDIR/zkevm.c12a.verifier.circom",
"c12a_compilecircom": ". ./pre.sh && circom --r1cs --sym --wasm --c --verbose --O1 --prime goldilocks $BDIR/zkevm.c12a.verifier.circom -o $BDIR -l node_modules/pil-stark/circuits.gl",

"c12b_setup": ". ./pre.sh && $PILSTARK/compressor12/main_compressor12_setup.js -r $BDIR/zkevm.c12a.verifier.r1cs -p $BDIR/zkevm.c12b.pil -c $BDIR/zkevm.c12b.const -e $BDIR/zkevm.c12b.exec",
"c12b_buildstarkinfo": ". ./pre.sh && $PILSTARK/main_genstarkinfo.js -p $BDIR/zkevm.c12b.pil -s $BDIR/zkevm.c12b.starkstruct.json -i $BDIR/zkevm.c12b.starkinfo.json",
"c12b_buildchelpers": ". ./pre.sh && $PILSTARK/main_buildchelpers.js -m -p $BDIR/zkevm.c12b.pil -s $BDIR/zkevm.c12b.starkstruct.json -c $BDIR/zkevm.c12b.chelpers/zkevm.c12b.chelpers.cpp",
Expand All @@ -42,7 +40,6 @@
"c12b_verify": ". ./pre.sh && $PILSTARK/main_verifier.js -p $BDIR/zkevm.c12b.pil -s $BDIR/zkevm.c12b.starkstruct.json -o $BDIR/zkevm.c12b.proof.json -b $BDIR/zkevm.c12b.public.json -v $BDIR/zkevm.c12b.verkey.json",
"c12b_gencircom": ". ./pre.sh && $PILSTARK/main_pil2circom.js -p $BDIR/zkevm.c12b.pil -s $BDIR/zkevm.c12b.starkstruct.json -v $BDIR/zkevm.c12b.verkey.json -o $BDIR/zkevm.c12b.verifier.circom",
"c12b_compilecircom": ". ./pre.sh && circom --r1cs --sym --wasm --c --verbose $BDIR/zkevm.c12b.verifier.circom -o $BDIR -l node_modules/pil-stark/circuits.bn128 -l node_modules/circomlib/circuits -l node_modules/pil-stark/circuits.gl",

"downloadptaw": "wget -P build https://hermez.s3-eu-west-1.amazonaws.com/powersOfTau28_hez_final.ptau",
"g16setup": ". ./pre.sh && $SNARKJS g16s $BDIR/zkevm.c12b.verifier.r1cs build/powersOfTau28_hez_final.ptau $BDIR/zkevm.g16.0000.zkey",
"g16contribute": ". ./pre.sh && $SNARKJS zkc $BDIR/zkevm.g16.0000.zkey $BDIR/zkevm.g16.0001.zkey -e=\"$(dd if=/dev/random bs=64 count=1 | base64 -w0)\"",
Expand All @@ -55,11 +52,9 @@
"prebuildstarkinfo": ". ./pre.sh && if [ \"$npm_config_starkstruct\" = \"debug\" ]; then node tools/gen_debug_starkstruct.js -t GL $PIL -s $BDIR/zkevm.starkstruct.json; else cp src/zkevm.starkstruct.json $BDIR; fi",
"prec12a_buildstarkinfo": ". ./pre.sh && if [ \"$npm_config_starkstruct\" = \"debug\" ]; then node tools/gen_debug_starkstruct.js -t GL -p $BDIR/zkevm.c12a.pil -s $BDIR/zkevm.c12a.starkstruct.json; else cp src/zkevm.c12a.starkstruct.json $BDIR; fi",
"prec12b_buildstarkinfo": ". ./pre.sh && if [ \"$npm_config_starkstruct\" = \"debug\" ]; then node tools/gen_debug_starkstruct.js -t BN128 -p $BDIR/zkevm.c12b.pil -s $BDIR/zkevm.c12b.starkstruct.json; else cp src/zkevm.c12b.starkstruct.json $BDIR; fi",

"buildsetup:basic": "npm run buildsetup --pil=pil/basic_main.pil --build=build/basic_proof --starkstruct=debug",
"buildall:basic": "npm run buildall --pil=pil/basic_main.pil --build=build/basic_proof --starkstruct=debug",
"buildproof:basic": "npm run buildproof --pil=pil/basic_main.pil --build=build/basic_proof --starkstruct=debug",

"test": "mocha",
"test:storage": "mocha test/sm_storage_test.js",
"test:mem_align": "mocha test/sm_mem_align_test.js",
Expand Down Expand Up @@ -92,8 +87,8 @@
},
"devDependencies": {
"chai": "^4.3.4",
"chai-as-promised": "^7.1.1",
"circom_tester": "^0.0.9",
"mocha": "^9.1.3"
}
}

73 changes: 39 additions & 34 deletions src/sm/sm_main/sm_main_exec.js
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ module.exports = async function execute(pols, input, rom, config = {}) {

const debug = config && config.debug;
const N = pols.zkPC.length;
const stepsN = (debug && config.stepsN) ? config.stepsN : N;

if (config && config.unsigned){
if (typeof input.from === 'undefined'){
Expand Down Expand Up @@ -78,22 +79,24 @@ module.exports = async function execute(pols, input, rom, config = {}) {
sto: input.keys,
rom: rom,
outLogs: {},
N: N
N,
stepsN
}

preprocessTxs(ctx);

if (debug) {
if (debug && config.debugInfo) {
iTracer = new Tracer(config.debugInfo.inputName);
fullTracer = new FullTracer(config.debugInfo.inputName)
} else {
iTracer = null
}
const iPrint = new Prints(ctx, smt);

for (i=0; i<N; i++) {
for (step=0; step < stepsN; step++) {
const i = step % N;
ctx.ln = Fr.toObject(pols.zkPC[i]);
ctx.step=i;
ctx.step = step;
ctx.A = [pols.A0[i], pols.A1[i], pols.A2[i], pols.A3[i], pols.A4[i], pols.A5[i], pols.A6[i], pols.A7[i]];
ctx.B = [pols.B0[i], pols.B1[i], pols.B2[i], pols.B3[i], pols.B4[i], pols.B5[i], pols.B6[i], pols.B7[i]];
ctx.C = [pols.C0[i], pols.C1[i], pols.C2[i], pols.C3[i], pols.C4[i], pols.C5[i], pols.C6[i], pols.C7[i]];
Expand Down Expand Up @@ -128,9 +131,9 @@ module.exports = async function execute(pols, input, rom, config = {}) {
let incHashPos = 0;
let incCounter = 0;

// if (i%1000==0) console.log(`Step: ${i}`);
// if (step%1000==0) console.log(`Step: ${step}`);

if (i==330) {
if (step==330) {
// console.log("### > "+l.fileName + ':' + l.line);
}

Expand Down Expand Up @@ -572,7 +575,7 @@ module.exports = async function execute(pols, input, rom, config = {}) {

ctx.lastSWrite.newRoot = res.newRoot;
ctx.lastSWrite.res = res;
ctx.lastSWrite.step=i;
ctx.lastSWrite.step = step;

fi = sr4to8(ctx.Fr, ctx.lastSWrite.newRoot);
nHits++;
Expand All @@ -585,9 +588,9 @@ module.exports = async function execute(pols, input, rom, config = {}) {
if ((size<0) || (size>32)) throw new Error(`Invalid size for hash: ${ctx.ln} at ${ctx.fileName}:${ctx.line}`);
if (pos+size > ctx.hashK[addr].data.length) throw new Error(`Accessing hashK out of bounds ${addr}, ${ctx.ln} at ${ctx.fileName}:${ctx.line}`);
let s = Scalar.zero;
for (let i=0; i<size; i++) {
if (typeof ctx.hashK[addr].data[pos + i] === "undefined") throw new Error(`Accessing hashK not defined place ${addr}, ${ctx.ln} at ${ctx.fileName}:${ctx.line}`);
s = Scalar.add(Scalar.mul(s, 256), Scalar.e(ctx.hashK[addr].data[pos + i]));
for (let k=0; k<size; k++) {
if (typeof ctx.hashK[addr].data[pos + k] === "undefined") throw new Error(`Accessing hashK not defined place ${addr}, ${ctx.ln} at ${ctx.fileName}:${ctx.line}`);
s = Scalar.add(Scalar.mul(s, 256), Scalar.e(ctx.hashK[addr].data[pos + k]));
}
fi = scalar2fea(Fr, s);
nHits++;
Expand All @@ -610,9 +613,9 @@ module.exports = async function execute(pols, input, rom, config = {}) {
if ((size<0) || (size>32)) throw new Error(`Invalid size for hash: ${ctx.ln} at ${ctx.fileName}:${ctx.line}`);
if (pos+size > ctx.hashP[addr].data.length) throw new Error(`Accessing hashP out of bounds ${addr}, ${ctx.ln} at ${ctx.fileName}:${ctx.line}`);
let s = Scalar.zero;
for (let i=0; i<size; i++) {
if (typeof ctx.hashP[addr].data[pos + i] === "undefined") throw new Error(`Accessing hashP not defined place ${addr}, ${ctx.ln} at ${ctx.fileName}:${ctx.line}`);
s = Scalar.add(Scalar.mul(s, 256), Scalar.e(ctx.hashP[addr].data[pos + i]));
for (let k=0; k<size; k++) {
if (typeof ctx.hashP[addr].data[pos + k] === "undefined") throw new Error(`Accessing hashP not defined place ${addr}, ${ctx.ln} at ${ctx.fileName}:${ctx.line}`);
s = Scalar.add(Scalar.mul(s, 256), Scalar.e(ctx.hashP[addr].data[pos + k]));
}
fi = scalar2fea(Fr, s);
nHits++;
Expand Down Expand Up @@ -763,15 +766,15 @@ module.exports = async function execute(pols, input, rom, config = {}) {
required.Mem.push({
bIsWrite: true,
address: addr,
pc: i,
pc: step,
fe0:op0, fe1:op1, fe2:op2, fe3:op3, fe4:op4, fe5:op5, fe6:op6, fe7:op7
});
} else {
pols.mWR[i] = 0n;
required.Mem.push({
bIsWrite: false,
address: addr,
pc: i,
pc: step,
fe0:op0, fe1:op1, fe2:op2, fe3:op3, fe4:op4, fe5:op5, fe6:op6, fe7:op7
});
if (ctx.mem[addr]) {
Expand Down Expand Up @@ -866,7 +869,7 @@ module.exports = async function execute(pols, input, rom, config = {}) {
if (l.sWR) {
pols.sWR[i] = 1n;

if ((!ctx.lastSWrite)||(ctx.lastSWrite.step != i)) {
if ((!ctx.lastSWrite)||(ctx.lastSWrite.step != step)) {
ctx.lastSWrite = {};

const Kin0 = [
Expand Down Expand Up @@ -903,7 +906,7 @@ module.exports = async function execute(pols, input, rom, config = {}) {

ctx.lastSWrite.res = res;
ctx.lastSWrite.newRoot = res.newRoot;
ctx.lastSWrite.step=i;
ctx.lastSWrite.step = step;
}

required.Storage.push({
Expand Down Expand Up @@ -951,19 +954,19 @@ module.exports = async function execute(pols, input, rom, config = {}) {
if ((size<0) || (size>32)) throw new Error(`Invalid size for hashK: ${ctx.ln} at ${ctx.fileName}:${ctx.line}`);
const a = fea2scalar(Fr, [op0, op1, op2, op3, op4, op5, op6, op7]);
const maskByte = Scalar.e("0xFF");
for (let i=0; i<size; i++) {
const bm = Scalar.toNumber(Scalar.band( Scalar.shr( a, (size-i -1)*8 ) , maskByte));
const bh = ctx.hashK[addr].data[pos + i];
for (let k=0; k<size; k++) {
const bm = Scalar.toNumber(Scalar.band( Scalar.shr( a, (size-k -1)*8 ) , maskByte));
const bh = ctx.hashK[addr].data[pos + k];
if (typeof bh === "undefined") {
ctx.hashK[addr].data[pos + i] = bm;
ctx.hashK[addr].data[pos + k] = bm;
} else if (bm != bh) {
throw new Error(`HashK do not match ${addr}:${pos+i} is ${bm} and should be ${bh}: ${ctx.ln} at ${ctx.fileName}:${ctx.line}`)
throw new Error(`HashK do not match ${addr}:${pos+k} is ${bm} and should be ${bh}: ${ctx.ln} at ${ctx.fileName}:${ctx.line}`)
}
}

const paddingA = Scalar.shr(a, size * 8);
if (!Scalar.isZero(paddingA)) {
throw new Error(`Incoherent size (${size}) and data (0x${a.toString(16)}) padding (0x${paddingA.toString(16)}) for hashK (w=${i}): ${ctx.ln} at ${ctx.fileName}:${ctx.line}`);
throw new Error(`Incoherent size (${size}) and data (0x${a.toString(16)}) padding (0x${paddingA.toString(16)}) for hashK (w=${step}): ${ctx.ln} at ${ctx.fileName}:${ctx.line}`);
}

if ((typeof ctx.hashK[addr].reads[pos] !== "undefined") &&
Expand Down Expand Up @@ -1018,18 +1021,18 @@ module.exports = async function execute(pols, input, rom, config = {}) {
if ((size<0) || (size>32)) throw new Error(`Invalid size for hash: ${ctx.ln} at ${ctx.fileName}:${ctx.line}`);
const a = fea2scalar(Fr, [op0, op1, op2, op3, op4, op5, op6, op7]);
const maskByte = Scalar.e("0xFF");
for (let i=0; i<size; i++) {
const bm = Scalar.toNumber(Scalar.band( Scalar.shr( a, (size-i -1)*8 ) , maskByte));
const bh = ctx.hashP[addr].data[pos + i];
for (let k=0; k<size; k++) {
const bm = Scalar.toNumber(Scalar.band( Scalar.shr( a, (size-k -1)*8 ) , maskByte));
const bh = ctx.hashP[addr].data[pos + k];
if (typeof bh === "undefined") {
ctx.hashP[addr].data[pos + i] = bm;
ctx.hashP[addr].data[pos + k] = bm;
} else if (bm != bh) {
throw new Error(`HashP do not match ${addr}:${pos+i} is ${bm} and should be ${bh}`)
throw new Error(`HashP do not match ${addr}:${pos+k} is ${bm} and should be ${bh}`)
}
}
const paddingA = Scalar.shr(a, size * 8);
if (!Scalar.isZero(paddingA)) {
throw new Error(`Incoherent size (${size}) and data (0x${a.toString(16)}) padding (0x${paddingA.toString(16)}) for hashP (w=${i}): ${ctx.ln} at ${ctx.fileName}:${ctx.line}`);
throw new Error(`Incoherent size (${size}) and data (0x${a.toString(16)}) padding (0x${paddingA.toString(16)}) for hashP (w=${step}): ${ctx.ln} at ${ctx.fileName}:${ctx.line}`);
}

if ((typeof ctx.hashP[addr].reads[pos] !== "undefined") &&
Expand Down Expand Up @@ -1332,7 +1335,7 @@ module.exports = async function execute(pols, input, rom, config = {}) {
// SET NEXT REGISTERS
//////////

const nexti = (i+1)%N;
const nexti = (i+1) % N;

if (l.setA == 1) {
pols.setA[i]=1n;
Expand Down Expand Up @@ -1708,7 +1711,9 @@ module.exports = async function execute(pols, input, rom, config = {}) {
}
}

checkFinalState(Fr, pols);
if (!debug || !config.stepsN) {
checkFinalState(Fr, pols);
}

if (iTracer)
iTracer.exportTrace();
Expand All @@ -1729,7 +1734,7 @@ module.exports = async function execute(pols, input, rom, config = {}) {
}
}
if (p!= ctx.hashK[i].data.length) {
throw new Error(`Reading hashK out of limits: ${i}`);
throw new Error(`Reading hashK out of limits: ${step}`);
}
required.PaddingKK.push(h);
}
Expand All @@ -1750,7 +1755,7 @@ module.exports = async function execute(pols, input, rom, config = {}) {
}
}
if (p!= ctx.hashP[i].data.length) {
throw new Error(`Reading hashP out of limits: ${i}`);
throw new Error(`Reading hashP out of limits: ${step}`);
}
required.PaddingPG.push(h);
}
Expand Down Expand Up @@ -2442,7 +2447,7 @@ function eval_bitwise(ctx, tag) {
}

function eval_beforeLast(ctx) {
if (ctx.step >= ctx.N-2) {
if (ctx.step >= ctx.stepsN-2) {
return [0n, 0n, 0n, 0n, 0n, 0n, 0n, 0n];
} else {
return [ctx.Fr.negone, 0n, 0n, 0n, 0n, 0n, 0n, 0n];
Expand Down
40 changes: 40 additions & 0 deletions test/tools/stepsN.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
const chai = require("chai");
const assert = chai.assert;
const expect = chai.expect;
chai.use(require('chai-as-promised'));

const fs = require("fs");
const path = require("path");

const {verifyZkasm} = require("../verify_zkasm");

describe("Test Arith Counter", async function () {
this.timeout(10000000);

it("Verify Arith Zkasm fail Test", async () => {
await expect(
verifyZkasm("../zkasm/tools/stepsN.zkasm", false,
{ defines: {N: 2 ** 16},
namespaces: ['Global', 'Main'],
verbose: true,
color: true,
disableUnusedError: true},
{
stepsN: 2**16,
debug: true
})).to.be.rejectedWith(Error);
});

it("Verify Arith Zkasm Test", async () => {
await verifyZkasm("../zkasm/tools/stepsN.zkasm", false,
{ defines: {N: 2 ** 16},
namespaces: ['Global', 'Main'],
verbose: true,
color: true,
disableUnusedError: true},
{
stepsN: 2**17,
debug: true
});
});
});
5 changes: 3 additions & 2 deletions test/verify_zkasm.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,9 @@ const smPoseidonG = require("../src/sm/sm_poseidong.js");
const smRom = require("../src/sm/sm_rom.js");
const smStorage = require("../src/sm/sm_storage/sm_storage.js");
const { index } = require("../src/sm/sm_main/test_tools.js");
const { config } = require("yargs");

module.exports.verifyZkasm = async function (zkasmFile, verifyPilFlag = true, pilConfig = {}) {
module.exports.verifyZkasm = async function (zkasmFile, verifyPilFlag = true, pilConfig = {}, mainConfig = {}) {

const Fr = new F1Field("0xFFFFFFFF00000001");
const brief = false;
Expand Down Expand Up @@ -115,7 +116,7 @@ module.exports.verifyZkasm = async function (zkasmFile, verifyPilFlag = true, pi
await smBinary.buildConstants(constPols.Binary);
}

const requiredMain = await smMain.execute(cmPols.Main, input, rom);
const requiredMain = await smMain.execute(cmPols.Main, input, rom, mainConfig);

if (cmPols.Byte4) {
console.log("Exec Byte4...");
Expand Down
23 changes: 23 additions & 0 deletions test/zkasm/tools/stepsN.zkasm
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
start:

CONST %UPTO = 40000

STEP => A
0 :ASSERT

0 => A
counterLoop:
A + 1 => A
A - %UPTO :JMPN(counterLoop)

counterEnds:
%UPTO :ASSERT

end:
0 => A,B,C,D,E,CTX, SP, PC, GAS, MAXMEM, SR

finalWait:
${beforeLast()} : JMPN(finalWait)

: JMP(start)
opINVALID: