diff --git a/examples/notebooks/data_attest.ipynb b/examples/notebooks/data_attest.ipynb index 6cc3759b6..d361037f3 100644 --- a/examples/notebooks/data_attest.ipynb +++ b/examples/notebooks/data_attest.ipynb @@ -592,7 +592,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.12.5" }, "orig_nbformat": 4 }, diff --git a/examples/notebooks/data_attest_hashed.ipynb b/examples/notebooks/data_attest_hashed.ipynb index bd6979dd4..de8e58800 100644 --- a/examples/notebooks/data_attest_hashed.ipynb +++ b/examples/notebooks/data_attest_hashed.ipynb @@ -648,10 +648,10 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.15" + "version": "3.12.5" }, "orig_nbformat": 4 }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/examples/notebooks/logistic_regression.ipynb b/examples/notebooks/logistic_regression.ipynb index 0bdd16820..b6c30c10b 100644 --- a/examples/notebooks/logistic_regression.ipynb +++ b/examples/notebooks/logistic_regression.ipynb @@ -271,7 +271,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.12.7" } }, "nbformat": 4, diff --git a/examples/onnx/log/gen.py b/examples/onnx/log/gen.py new file mode 100644 index 000000000..fba681496 --- /dev/null +++ b/examples/onnx/log/gen.py @@ -0,0 +1,42 @@ +from torch import nn +import torch +import json +import numpy as np + + +class MyModel(nn.Module): + def __init__(self): + super(MyModel, self).__init__() + + def forward(self, x): + m = torch.log(x) + + return m + + +circuit = MyModel() + +x = torch.empty(1, 8).uniform_(0, 3) + +out = circuit(x) + +print(out) + +torch.onnx.export(circuit, x, "network.onnx", + export_params=True, # store the trained parameter weights inside the model file + opset_version=17, # the ONNX version to export the model to + do_constant_folding=True, # whether to execute constant folding for optimization + input_names=['input'], # the model's input names + output_names=['output'], # the model's output names + dynamic_axes={'input': {0: 'batch_size'}, # variable length axes + 'output': {0: 'batch_size'}}) + + +d1 = ((x).detach().numpy()).reshape([-1]).tolist() + +data = dict( + input_data=[d1], +) + +# Serialize data into file: +json.dump(data, open("input.json", 'w')) diff --git a/examples/onnx/log/input.json b/examples/onnx/log/input.json new file mode 100644 index 000000000..f9139376c --- /dev/null +++ b/examples/onnx/log/input.json @@ -0,0 +1 @@ +{"input_data": [[1.9252371788024902, 1.8418371677398682, 0.8400403261184692, 2.083845853805542, 0.9760497808456421, 0.6940176486968994, 0.015579521656036377, 2.2689192295074463]]} \ No newline at end of file diff --git a/examples/onnx/log/network.onnx b/examples/onnx/log/network.onnx new file mode 100644 index 000000000..43233b518 --- /dev/null +++ b/examples/onnx/log/network.onnx @@ -0,0 +1,14 @@ +pytorch2.2.2:o + +inputoutput/Log"Log +main_graphZ! +input + +  +batch_size +b" +output + +  +batch_size +B \ No newline at end of file diff --git a/src/bindings/python.rs b/src/bindings/python.rs index 6c3be3077..59fa4aa26 100644 --- a/src/bindings/python.rs +++ b/src/bindings/python.rs @@ -197,6 +197,9 @@ struct PyRunArgs { /// int: The number of legs used for decomposition #[pyo3(get, set)] pub decomp_legs: usize, + /// bool: Should the circuit use unbounded lookups for log + #[pyo3(get, set)] + pub bounded_log_lookup: bool, } /// default instantiation of PyRunArgs @@ -212,6 +215,7 @@ impl PyRunArgs { impl From for RunArgs { fn from(py_run_args: PyRunArgs) -> Self { RunArgs { + bounded_log_lookup: py_run_args.bounded_log_lookup, tolerance: Tolerance::from(py_run_args.tolerance), input_scale: py_run_args.input_scale, param_scale: py_run_args.param_scale, @@ -236,6 +240,7 @@ impl From for RunArgs { impl Into for RunArgs { fn into(self) -> PyRunArgs { PyRunArgs { + bounded_log_lookup: self.bounded_log_lookup, tolerance: self.tolerance.val, input_scale: self.input_scale, param_scale: self.param_scale, diff --git a/src/circuit/ops/hybrid.rs b/src/circuit/ops/hybrid.rs index 0ea1327c7..2bf6b2c11 100644 --- a/src/circuit/ops/hybrid.rs +++ b/src/circuit/ops/hybrid.rs @@ -13,6 +13,10 @@ use serde::{Deserialize, Serialize}; /// An enum representing the operations that consist of both lookups and arithmetic operations. #[derive(Clone, Debug, Serialize, Deserialize)] pub enum HybridOp { + Ln { + scale: utils::F32, + }, + RoundHalfToEven { scale: utils::F32, legs: usize, @@ -112,6 +116,7 @@ impl Op for Hybrid fn as_string(&self) -> String { match self { + HybridOp::Ln { scale } => format!("LN(scale={})", scale), HybridOp::RoundHalfToEven { scale, legs } => { format!("ROUND_HALF_TO_EVEN(scale={}, legs={})", scale, legs) } @@ -189,6 +194,7 @@ impl Op for Hybrid values: &[ValTensor], ) -> Result>, CircuitError> { Ok(Some(match self { + HybridOp::Ln { scale } => layouts::ln(config, region, values[..].try_into()?, *scale)?, HybridOp::RoundHalfToEven { scale, legs } => { layouts::round_half_to_even(config, region, values[..].try_into()?, *scale, *legs)? } @@ -327,6 +333,9 @@ impl Op for Hybrid HybridOp::Softmax { output_scale, .. } | HybridOp::Recip { output_scale, .. } => { multiplier_to_scale(output_scale.0 as f64) } + HybridOp::Ln { + scale: output_scale, + } => 4 * multiplier_to_scale(output_scale.0 as f64), _ => in_scales[0], }; Ok(scale) diff --git a/src/circuit/ops/layouts.rs b/src/circuit/ops/layouts.rs index 2f3d9a31f..85d878bcf 100644 --- a/src/circuit/ops/layouts.rs +++ b/src/circuit/ops/layouts.rs @@ -4507,6 +4507,332 @@ pub fn ceil( ) } +/// integer ln layout +/// # Arguments +/// * `config` - BaseConfig +/// * `region` - RegionCtx +/// * `values` - &[ValTensor; 1] +/// * `scale` - utils::F32 +/// # Returns +/// * ValTensor +/// # Example +/// +/// ``` +/// use ezkl::tensor::Tensor; +/// use ezkl::fieldutils::IntegerRep; +/// use ezkl::circuit::ops::layouts::ln; +/// use ezkl::tensor::val::ValTensor; +/// use halo2curves::bn256::Fr as Fp; +/// use ezkl::circuit::region::RegionCtx; +/// use ezkl::circuit::region::RegionSettings; +/// use ezkl::circuit::BaseConfig; +/// let dummy_config = BaseConfig::dummy(12, 2); +/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2)); +/// let x = ValTensor::from_integer_rep_tensor(Tensor::::new( +/// Some(&[3, 2, 3, 1]), +/// &[1, 1, 2, 2], +/// ).unwrap()); +/// +/// let result = ln::(&dummy_config, &mut dummy_region, &[x], 2.0.into()).unwrap(); +/// let expected = Tensor::::new(Some(&[4, 0, 4, -8]), &[1, 1, 2, 2]).unwrap(); +/// assert_eq!(result.int_evals().unwrap(), expected); +/// +/// ``` +pub fn ln( + config: &BaseConfig, + region: &mut RegionCtx, + values: &[ValTensor; 1], + scale: utils::F32, +) -> Result, CircuitError> { + // first generate the claimed val + + let mut input = values[0].clone(); + + println!("input {}", input.show()); + + let scale_as_felt = integer_rep_to_felt(scale.0.round() as IntegerRep); + + let assigned_triple_scaled_as_felt_tensor = region.assign( + &config.custom_gates.inputs[1], + &create_constant_tensor(scale_as_felt * scale_as_felt * scale_as_felt, 1), + )?; + + // natural ln is log2(x) * ln(2) + let ln2 = utils::F32::from(2.0_f32.ln()); + // now create a constant tensor for ln2 with scale + let ln2_tensor: ValTensor = create_constant_tensor( + integer_rep_to_felt((ln2.0 * scale.0).round() as IntegerRep), + 1, + ); + region.assign(&config.custom_gates.inputs[0], &ln2_tensor)?; + let unit = create_constant_tensor(integer_rep_to_felt(1), 1); + region.assign(&config.custom_gates.inputs[1], &unit)?; + region.increment(1); + + // 2. assign the image + if !input.all_prev_assigned() { + input = region.assign(&config.custom_gates.inputs[0], &input)?; + // don't need to increment because the claimed output is assigned to output and incremented accordingly + } + + let is_assigned = !input.any_unknowns()?; + + let mut claimed_output: ValTensor = if is_assigned { + let input_evals = input.int_evals()?; + // returns an integer with the base 2 logarithm + tensor::ops::nonlinearities::ilog2(&input_evals.clone(), scale.0 as f64) + .par_iter() + .map(|x| Value::known(integer_rep_to_felt(*x))) + .collect::>>() + .into() + } else { + Tensor::new( + Some(&vec![Value::::unknown(); input.len()]), + &[input.len()], + )? + .into() + }; + claimed_output.reshape(input.dims())?; + region.assign(&config.custom_gates.output, &claimed_output)?; + region.increment(claimed_output.len()); + + let pow2_of_claimed_output = nonlinearity( + config, + region, + &[claimed_output.clone()], + &LookupOp::PowersOfTwo { scale }, + )?; + + let num_bits = (std::mem::size_of::() * 8) as IntegerRep; + + region.update_max_min_lookup_inputs_force(-num_bits, num_bits)?; + + // now subtract 1 from the claimed output + let claimed_output_minus_one = pairwise( + config, + region, + &[claimed_output.clone(), unit.clone()], + BaseOp::Sub, + )?; + + // now add 1 to the claimed output + let claimed_output_plus_one = pairwise( + config, + region, + &[claimed_output.clone(), unit.clone()], + BaseOp::Add, + )?; + + // prior power of 2 is less than claimed output + let prior_pow2 = nonlinearity( + config, + region, + &[claimed_output_minus_one], + &LookupOp::PowersOfTwo { scale }, + )?; + + // next power of 2 is greater than claimed output + let next_pow2 = nonlinearity( + config, + region, + &[claimed_output_plus_one], + &LookupOp::PowersOfTwo { scale }, + )?; + + // assert that the original input is closest to the claimed output than the prior power of 2 and the next power of 2 + let distance_to_prior = pairwise( + config, + region, + &[input.clone(), prior_pow2.clone()], + BaseOp::Sub, + )?; + + // now take abs of the distance + let distance_to_prior_l1 = abs(config, region, &[distance_to_prior.clone()])?; + + let distance_to_next = pairwise( + config, + region, + &[input.clone(), next_pow2.clone()], + BaseOp::Sub, + )?; + + // now take abs of the distance + let distance_to_next_l1 = abs(config, region, &[distance_to_next.clone()])?; + + let distance_to_claimed = pairwise( + config, + region, + &[input.clone(), pow2_of_claimed_output.clone()], + BaseOp::Sub, + )?; + + // now take abs of the distance + let distance_to_claimed_l1 = abs(config, region, &[distance_to_claimed.clone()])?; + + // can be less than or equal because we round up + let is_distance_to_prior_less = less_equal( + config, + region, + &[distance_to_claimed_l1.clone(), distance_to_prior_l1.clone()], + )?; + + // should be striclty less because we round up + let is_distance_to_next_less = less( + config, + region, + &[distance_to_claimed_l1, distance_to_next_l1.clone()], + )?; + + let is_distance_to_prior_less_and_distance_to_next_less = and( + config, + region, + &[ + is_distance_to_prior_less.clone(), + is_distance_to_next_less.clone(), + ], + )?; + + let mut comparison_unit = create_constant_tensor( + integer_rep_to_felt(1), + is_distance_to_prior_less_and_distance_to_next_less.len(), + ); + + comparison_unit.reshape(is_distance_to_prior_less_and_distance_to_next_less.dims())?; + + // assigned unit + let assigned_unit = region.assign(&config.custom_gates.inputs[1], &comparison_unit)?; + region.increment(assigned_unit.len()); + + // assert that the values are truthy + enforce_equality( + config, + region, + &[ + is_distance_to_prior_less_and_distance_to_next_less, + assigned_unit.clone(), + ], + )?; + + // get a linear interpolation now + + let sign_of_distance_to_claimed = sign(config, region, &[distance_to_claimed.clone()])?; + let sign_of_distance_to_claimed_is_positive = equals( + config, + region, + &[sign_of_distance_to_claimed.clone(), assigned_unit.clone()], + )?; + + let sign_of_distance_to_claimed_is_negative = not( + config, + region, + &[sign_of_distance_to_claimed_is_positive.clone()], + )?; + + let pow2_prior_to_claimed_distance = pairwise( + config, + region, + &[pow2_of_claimed_output.clone(), prior_pow2.clone()], + BaseOp::Sub, + )?; + + let pow2_next_to_claimed_distance = pairwise( + config, + region, + &[next_pow2.clone(), pow2_of_claimed_output.clone()], + BaseOp::Sub, + )?; + + let recip_pow2_prior_to_claimed_distance = recip( + config, + region, + &[pow2_prior_to_claimed_distance], + scale_as_felt, + scale_as_felt * scale_as_felt, + )?; + + let interpolated_distance = pairwise( + config, + region, + &[ + recip_pow2_prior_to_claimed_distance.clone(), + distance_to_claimed.clone(), + ], + BaseOp::Mult, + )?; + + let gated_prior_interpolated_distance = pairwise( + config, + region, + &[ + interpolated_distance.clone(), + sign_of_distance_to_claimed_is_negative.clone(), + ], + BaseOp::Mult, + )?; + + let recip_next_to_claimed_distance = recip( + config, + region, + &[pow2_next_to_claimed_distance], + scale_as_felt, + scale_as_felt * scale_as_felt, + )?; + + let interpolated_distance_next = pairwise( + config, + region, + &[ + recip_next_to_claimed_distance.clone(), + distance_to_claimed.clone(), + ], + BaseOp::Mult, + )?; + + let gated_next_interpolated_distance = pairwise( + config, + region, + &[ + interpolated_distance_next.clone(), + sign_of_distance_to_claimed_is_positive.clone(), + ], + BaseOp::Mult, + )?; + + let scaled_claimed_output = pairwise( + config, + region, + &[ + claimed_output.clone(), + assigned_triple_scaled_as_felt_tensor, + ], + BaseOp::Mult, + )?; + + let claimed_output = pairwise( + config, + region, + &[ + scaled_claimed_output.clone(), + gated_prior_interpolated_distance.clone(), + ], + BaseOp::Add, + )?; + + let claimed_output = pairwise( + config, + region, + &[ + claimed_output.clone(), + gated_next_interpolated_distance.clone(), + ], + BaseOp::Add, + )?; + + // now multiply the claimed output by ln2 + pairwise(config, region, &[claimed_output, ln2_tensor], BaseOp::Mult) +} + /// round layout /// # Arguments /// * `config` - BaseConfig diff --git a/src/circuit/ops/lookup.rs b/src/circuit/ops/lookup.rs index 0f7ca0852..b6f70679d 100644 --- a/src/circuit/ops/lookup.rs +++ b/src/circuit/ops/lookup.rs @@ -16,10 +16,11 @@ use halo2curves::ff::PrimeField; pub enum LookupOp { Div { denom: utils::F32 }, IsOdd, + PowersOfTwo { scale: utils::F32 }, + Ln { scale: utils::F32 }, Sqrt { scale: utils::F32 }, Rsqrt { scale: utils::F32 }, Sigmoid { scale: utils::F32 }, - Ln { scale: utils::F32 }, Exp { scale: utils::F32 }, Cos { scale: utils::F32 }, ACos { scale: utils::F32 }, @@ -50,6 +51,8 @@ impl LookupOp { pub fn as_path(&self) -> String { match self { LookupOp::Pow { scale, a } => format!("pow_{}_{}", scale, a), + LookupOp::Ln { scale } => format!("ln_{}", scale), + LookupOp::PowersOfTwo { scale } => format!("pow2_{}", scale), LookupOp::IsOdd => "is_odd".to_string(), LookupOp::Div { denom } => format!("div_{}", denom), LookupOp::Sigmoid { scale } => format!("sigmoid_{}", scale), @@ -57,7 +60,6 @@ impl LookupOp { LookupOp::Rsqrt { scale } => format!("rsqrt_{}", scale), LookupOp::Erf { scale } => format!("erf_{}", scale), LookupOp::Exp { scale } => format!("exp_{}", scale), - LookupOp::Ln { scale } => format!("ln_{}", scale), LookupOp::Cos { scale } => format!("cos_{}", scale), LookupOp::ACos { scale } => format!("acos_{}", scale), LookupOp::Cosh { scale } => format!("cosh_{}", scale), @@ -82,6 +84,12 @@ impl LookupOp { let x = x[0].clone().map(|x| felt_to_integer_rep(x)); let res = match &self { + LookupOp::Ln { scale } => { + Ok::<_, TensorError>(tensor::ops::nonlinearities::ln(&x, scale.into())) + } + LookupOp::PowersOfTwo { scale } => { + Ok::<_, TensorError>(tensor::ops::nonlinearities::ipow2(&x, scale.0.into())) + } LookupOp::IsOdd => Ok::<_, TensorError>(tensor::ops::nonlinearities::is_odd(&x)), LookupOp::Pow { scale, a } => Ok::<_, TensorError>( tensor::ops::nonlinearities::pow(&x, scale.0.into(), a.0.into()), @@ -104,9 +112,6 @@ impl LookupOp { LookupOp::Exp { scale } => { Ok::<_, TensorError>(tensor::ops::nonlinearities::exp(&x, scale.into())) } - LookupOp::Ln { scale } => { - Ok::<_, TensorError>(tensor::ops::nonlinearities::ln(&x, scale.into())) - } LookupOp::Cos { scale } => { Ok::<_, TensorError>(tensor::ops::nonlinearities::cos(&x, scale.into())) } @@ -163,10 +168,11 @@ impl Op for Lookup /// Returns the name of the operation fn as_string(&self) -> String { match self { + LookupOp::Ln { scale } => format!("LN(scale={})", scale), + LookupOp::PowersOfTwo { scale } => format!("POWERS_OF_TWO(scale={})", scale), LookupOp::IsOdd => "IS_ODD".to_string(), LookupOp::Pow { a, scale } => format!("POW(scale={}, exponent={})", scale, a), LookupOp::Div { denom, .. } => format!("DIV(denom={})", denom), - LookupOp::Ln { scale } => format!("LN(scale={})", scale), LookupOp::Sigmoid { scale } => format!("SIGMOID(scale={})", scale), LookupOp::Sqrt { scale } => format!("SQRT(scale={})", scale), LookupOp::Erf { scale } => format!("ERF(scale={})", scale), diff --git a/src/graph/utilities.rs b/src/graph/utilities.rs index ae89df0a7..f2379b273 100644 --- a/src/graph/utilities.rs +++ b/src/graph/utilities.rs @@ -851,9 +851,18 @@ pub fn new_op_from_onnx( "Exp" => SupportedOp::Nonlinear(LookupOp::Exp { scale: scale_to_multiplier(input_scales[0]).into(), }), - "Ln" => SupportedOp::Nonlinear(LookupOp::Ln { - scale: scale_to_multiplier(input_scales[0]).into(), - }), + "Ln" => { + if run_args.bounded_log_lookup { + SupportedOp::Hybrid(HybridOp::Ln { + scale: scale_to_multiplier(input_scales[0]).into(), + }) + } else { + SupportedOp::Nonlinear(LookupOp::Ln { + scale: scale_to_multiplier(input_scales[0]).into(), + }) + } + } + "Sin" => SupportedOp::Nonlinear(LookupOp::Sin { scale: scale_to_multiplier(input_scales[0]).into(), }), diff --git a/src/lib.rs b/src/lib.rs index ffa770699..329ecd30b 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -317,11 +317,18 @@ pub struct RunArgs { #[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(long, default_value = "2", value_hint = clap::ValueHint::Other))] /// the number of legs used for decompositions pub decomp_legs: usize, + #[cfg_attr( + all(feature = "ezkl", not(target_arch = "wasm32")), + arg(long, default_value = "false") + )] + /// use unbounded lookup for the log + pub bounded_log_lookup: bool, } impl Default for RunArgs { fn default() -> Self { Self { + bounded_log_lookup: false, tolerance: Tolerance::default(), input_scale: 7, param_scale: 7, diff --git a/src/tensor/ops.rs b/src/tensor/ops.rs index bdd79f651..79f8eb8a7 100644 --- a/src/tensor/ops.rs +++ b/src/tensor/ops.rs @@ -1500,6 +1500,59 @@ pub mod nonlinearities { .unwrap() } + /// Powers of 2 + /// # Arguments + /// * `a` - Tensor + /// * `scale` - Single value + /// # Examples + /// ``` + /// use ezkl::tensor::Tensor; + /// use ezkl::fieldutils::IntegerRep; + /// use ezkl::tensor::ops::nonlinearities::ipow2; + /// let x = Tensor::::new( + /// Some(&[2, 15, 2, 1, 1, 0]), + /// &[2, 3], + /// ).unwrap(); + /// let result = ipow2(&x, 1.0); + /// let expected = Tensor::::new(Some(&[4, 32768, 4, 2, 2, 1]), &[2, 3]).unwrap(); + /// assert_eq!(result, expected); + /// ``` + pub fn ipow2(a: &Tensor, scale_output: f64) -> Tensor { + a.par_enum_map(|_, a_i| { + let kix = a_i as f64; + let kix = scale_output * (2.0_f64).powf(kix); + let rounded = kix.round(); + Ok::<_, TensorError>(rounded as IntegerRep) + }) + .unwrap() + } + + /// Elementwise applies ln base 2 to a tensor of integers. + /// # Arguments + /// * `a` - Tensor + /// * `scale_input` - Single value + /// ``` + /// use ezkl::tensor::Tensor; + /// use ezkl::fieldutils::IntegerRep; + /// use ezkl::tensor::ops::nonlinearities::ilog2; + /// let x = Tensor::::new( + /// Some(&[2, 15, 2, 1, 1, 2]), + /// &[2, 3], + /// ).unwrap(); + /// let result = ilog2(&x, 1.0); + /// let expected = Tensor::::new(Some(&[1, 4, 1, 0, 0, 1]), &[2, 3]).unwrap(); + /// assert_eq!(result, expected); + /// ``` + pub fn ilog2(a: &Tensor, scale_input: f64) -> Tensor { + a.par_enum_map(|_, a_i| { + let kix = (a_i as f64) / scale_input; + let kix = (kix).log2(); + let rounded = kix.round(); + Ok::<_, TensorError>(rounded as IntegerRep) + }) + .unwrap() + } + /// Elementwise applies sigmoid to a tensor of integers. /// # Arguments /// @@ -1628,12 +1681,11 @@ pub mod nonlinearities { .unwrap() } - /// Elementwise applies exponential to a tensor of integers. + /// Elementwise applies ln to a tensor of integers. /// # Arguments /// /// * `a` - Tensor /// * `scale_input` - Single value - /// * `scale_output` - Single value /// # Examples /// ``` /// use ezkl::tensor::Tensor; diff --git a/tests/assets/model.compiled b/tests/assets/model.compiled index d1a3d508a..b665de881 100644 Binary files a/tests/assets/model.compiled and b/tests/assets/model.compiled differ diff --git a/tests/assets/proof.json b/tests/assets/proof.json index 6db6c2445..3a826cc00 100644 --- a/tests/assets/proof.json +++ b/tests/assets/proof.json @@ -1 +1 @@ -{"protocol":null,"instances":[["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]],"proof":[7,102,87,190,153,229,59,6,250,221,10,63,210,51,26,207,46,194,145,97,237,1,245,42,248,173,49,189,21,0,216,125,34,9,199,125,171,108,12,101,89,75,31,133,179,147,239,237,218,207,81,198,82,129,144,247,68,204,166,44,217,66,221,35,17,33,29,160,129,121,229,44,82,145,52,107,213,158,203,98,129,56,97,204,34,80,245,3,34,5,252,31,167,58,56,2,46,210,50,211,53,35,250,143,122,31,14,84,171,83,208,117,236,3,97,98,59,102,89,161,167,18,194,233,26,45,58,38,42,83,206,156,150,9,69,180,201,190,62,248,116,104,6,168,207,112,166,244,168,160,60,83,250,74,40,63,19,85,44,27,30,165,252,35,212,69,204,145,255,18,204,249,66,245,192,116,111,38,175,106,252,101,33,182,129,200,188,246,224,111,18,116,29,136,21,25,83,69,48,217,153,96,162,102,210,156,188,8,183,175,181,194,45,244,231,154,102,19,18,152,102,167,34,175,2,92,46,210,136,40,78,202,251,77,209,104,254,116,21,37,168,44,135,119,129,216,252,171,70,50,243,103,194,115,18,125,22,42,29,183,17,110,66,169,83,226,46,203,216,91,21,148,167,252,33,157,201,156,20,233,149,244,87,184,248,255,14,144,44,49,13,249,77,141,69,211,180,110,63,67,91,75,235,138,88,93,74,113,253,66,111,192,243,132,182,86,184,22,135,179,2,112,0,135,178,191,162,208,159,58,198,75,119,147,118,211,123,5,216,206,242,48,38,98,156,215,28,70,97,33,224,149,28,145,250,188,1,52,183,235,236,117,6,140,80,29,215,31,254,156,17,10,29,184,124,55,228,139,63,193,3,207,68,16,9,207,169,136,53,34,166,195,225,22,236,10,200,246,61,4,236,31,71,161,12,17,126,135,26,197,8,101,142,82,231,57,44,76,64,86,37,222,181,85,166,186,2,138,108,70,116,45,60,86,220,44,23,240,162,185,141,196,147,50,163,42,197,7,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,42,29,23,75,127,138,84,57,241,193,71,212,213,184,25,163,131,79,55,28,182,52,178,65,193,214,211,84,24,52,155,247,21,200,242,170,146,244,46,164,38,166,5,201,19,214,103,89,20,8,5,173,157,189,211,53,137,20,32,222,97,102,44,188,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,5,97,158,236,112,200,133,205,196,108,135,251,243,47,53,102,214,117,81,123,223,159,154,3,168,145,32,206,56,130,244,126,30,123,25,247,42,138,80,13,204,246,88,185,216,221,11,188,68,43,30,137,242,63,67,148,239,99,199,15,250,236,239,178,26,209,34,200,138,140,206,3,67,51,120,204,73,126,249,188,48,66,238,66,204,200,133,243,45,13,1,207,81,184,238,53,33,84,61,78,74,147,16,193,71,46,210,204,179,17,101,87,102,136,143,222,76,225,178,69,62,245,63,20,142,102,17,94,43,74,104,11,146,4,125,21,142,126,135,189,149,143,205,140,34,111,163,62,109,210,31,204,175,50,7,217,26,191,190,60,37,94,107,29,204,10,222,124,188,245,129,125,217,165,220,135,35,91,49,229,85,96,27,25,99,9,225,32,116,153,215,134,13,183,224,204,233,128,102,238,97,14,254,157,39,96,4,145,139,222,112,155,177,102,13,204,135,28,105,114,71,135,178,11,37,61,49,167,12,140,165,109,159,47,215,57,192,92,118,228,228,54,109,17,225,220,56,103,249,249,180,156,65,141,223,161,38,42,160,63,164,253,134,50,29,64,123,102,101,110,56,229,245,8,229,190,244,56,138,30,64,224,126,14,113,160,14,77,9,45,12,94,228,153,98,165,193,202,194,26,128,181,157,153,242,225,127,254,16,186,93,108,0,238,185,30,194,230,7,12,18,221,99,250,238,234,234,195,200,50,226,31,74,56,205,29,227,45,40,5,203,46,50,5,158,63,195,133,196,246,132,193,3,10,109,191,2,167,137,30,58,26,46,165,32,122,120,160,79,101,189,105,241,63,168,45,233,249,123,111,33,198,5,1,18,221,99,250,238,234,234,195,200,50,226,31,74,56,205,29,227,45,40,5,203,46,50,5,158,63,195,133,196,246,132,193,3,10,109,191,2,167,137,30,58,26,46,165,32,122,120,160,79,101,189,105,241,63,168,45,233,249,123,111,33,198,5,1,36,210,211,5,222,163,212,162,90,216,137,227,17,182,242,62,205,101,157,131,103,67,158,89,10,55,203,240,128,163,67,46,11,84,104,82,21,171,88,223,111,11,21,150,190,181,104,80,103,7,135,27,78,112,81,153,55,27,135,206,199,220,51,99,36,210,211,5,222,163,212,162,90,216,137,227,17,182,242,62,205,101,157,131,103,67,158,89,10,55,203,240,128,163,67,46,11,84,104,82,21,171,88,223,111,11,21,150,190,181,104,80,103,7,135,27,78,112,81,153,55,27,135,206,199,220,51,99,5,87,243,180,208,140,106,184,142,189,134,102,168,43,255,122,164,248,72,102,123,91,180,237,154,185,198,160,149,144,188,148,35,193,184,193,223,79,161,201,109,58,212,63,205,212,88,198,28,177,44,24,246,186,130,79,196,42,127,191,219,98,150,210,30,27,55,193,204,141,174,160,179,0,37,53,36,136,216,198,141,151,234,89,24,169,108,27,187,206,93,252,81,9,114,250,11,86,193,237,94,156,198,129,86,30,189,73,132,76,233,115,138,196,104,103,157,87,116,229,106,78,21,39,106,226,58,77,29,111,130,131,204,12,252,184,28,57,227,239,53,123,25,91,179,75,13,217,59,7,184,36,162,99,114,255,33,93,151,64,31,154,80,215,92,243,125,75,158,129,2,100,62,109,114,145,207,19,64,188,194,238,188,161,105,82,215,187,255,129,66,42,0,4,227,243,247,86,220,10,203,72,59,67,3,196,123,150,4,19,206,36,62,73,89,61,9,86,228,15,142,253,131,156,14,182,251,152,110,37,82,148,89,49,233,165,68,238,94,1,242,10,63,217,215,238,27,1,199,122,153,227,5,252,162,130,5,84,176,32,206,60,5,239,203,180,11,27,6,199,83,154,165,93,110,178,50,238,206,176,69,218,52,216,57,244,188,173,22,16,12,205,152,202,57,69,249,46,210,51,37,153,2,62,41,217,203,156,11,168,90,54,97,133,51,192,111,38,80,219,16,1,222,166,157,12,66,31,148,19,80,244,221,88,178,169,184,221,126,138,53,253,22,130,170,205,119,159,63,236,213,154,48,17,177,98,173,245,156,79,8,196,139,150,246,244,48,154,11,52,70,174,209,194,49,64,211,67,102,141,99,90,158,70,11,45,168,40,110,106,228,61,196,238,177,145,125,1,103,223,97,141,114,176,15,30,190,33,51,177,193,109,105,189,236,96,46,207,101,18,69,57,75,81,149,182,24,233,96,82,188,9,85,198,157,14,136,25,78,168,136,0,5,28,43,248,184,141,37,148,167,49,97,233,139,198,187,25,120,167,13,107,231,166,194,199,101,203,82,173,217,251,156,249,122,114,82,6,12,203,38,87,199,54,5,31,32,49,16,106,205,102,32,166,254,116,189,178,128,32,108,46,244,211,79,66,185,42,58,205,128,138,16,88,244,74,214,73,232,99,4,155,116,240,57,196,80,218,213,15,4,34,143,84,229,48,113,131,78,9,225,194,17,183,11,192,60,161,238,58,29,156,145,100,141,166,247,142,233,184,196,87,175,125,170,235,48,91,201,123,62,33,221,99,177,221,43,39,19,164,127,176,230,133,125,253,148,31,156,219,58,108,152,171,61,204,249,46,189,5,187,215,50,97,223,228,176,1,39,173,107,240,75,53,172,163,139,6,192,61,176,75,42,86,117,250,74,97,25,214,9,111,111,14,128,154,243,34,2,5,28,233,147,56,38,111,161,171,144,119,135,81,240,223,184,72,230,156,128,84,158,22,107,182,165,149,176,230,138,10,206,123,43,109,175,154,150,13,135,23,212,71,68,197,70,18,189,50,107,49,87,20,164,202,92,155,125,15,140,12,251,188,133,162,14,14,143,225,255,95,174,138,38,229,15,251,168,183,155,96,255,223,56,223,246,57,163,220,138,153,171,97,87,224,62,138,24,130,72,20,9,31,78,124,205,14,38,55,70,71,212,71,167,145,164,143,163,107,23,245,186,128,204,1,225,98,30,174,30,59,152,201,17,147,225,50,235,78,28,193,217,239,218,64,94,97,248,187,7,90,57,43,117,143,49,237,87,144,166,187,21,156,106,78,40,241,0,35,195,49,160,107,234,81,224,62,5,8,225,77,235,25,14,19,208,212,166,243,206,249,180,45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,35,209,230,242,37,228,69,205,67,153,28,239,220,6,0,195,188,151,40,65,227,90,205,58,252,183,16,238,166,199,77,134,47,230,73,212,139,158,187,8,53,166,234,25,168,58,76,24,116,175,163,11,18,45,217,81,195,120,53,233,31,96,85,242,47,230,73,212,139,158,187,8,53,166,234,25,168,58,76,24,116,175,163,11,18,45,217,81,195,120,53,233,31,96,85,242,12,67,227,37,50,43,130,91,122,84,125,197,38,133,90,47,96,150,174,235,177,51,214,171,202,230,232,176,43,109,178,26,8,108,157,152,171,9,26,58,107,148,231,177,48,183,30,242,168,195,109,162,187,190,64,190,125,146,228,228,205,184,97,197,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,31,44,15,191,195,193,77,155,13,55,48,113,153,233,41,195,53,125,11,255,167,24,114,218,2,220,72,61,73,218,221,139,11,99,215,229,180,215,209,6,19,123,184,99,138,80,231,157,237,137,32,61,6,29,222,33,124,96,82,28,42,176,241,251,12,176,99,105,181,216,72,247,50,143,35,80,6,204,182,51,189,39,225,145,6,26,231,20,29,218,200,194,17,223,96,20,47,139,109,17,222,86,252,37,134,134,198,119,57,121,40,152,12,25,72,63,139,209,76,145,32,151,188,157,31,22,131,73,23,110,9,219,148,170,204,193,76,230,103,31,40,85,128,202,125,35,189,238,83,31,184,233,210,81,50,92,8,155,33,214,18,61,43,29,168,40,4,71,164,214,18,196,56,203,148,184,221,234,211,183,226,115,105,62,239,199,183,167,20,166,247,21,34,215,1,130,242,85,38,152,103,203,234,214,112,141,213,117,46,99,234,76,87,208,3,251,82,109,175,0,173,100,189,77,28,194,58,222,71,65,217,224,83,53,139,138,60,227,88,45,117,222,99,63,74,212,33,203,168,63,183,221,28,63,134,109,28,80,55,120,86,20,124,205,5,240,63,12,112,181,6,4,172,175,2,169,241,187,67,96,135,100,32,211,126,244,221,107,36,204,131,86,21,144,77,92,240,222,146,77,159,37,236,59,193,67,236,142,185,145,167,168,170,156,245,2,15,204,59,88,46,118,114,111,58,58,230,71,234,166,194,91,23,248,139,81,66,136,193,183,173,194,81,124,110,221,46,231,96,123,130,45,33,241,149,44,132,156,251,101,21,95,71,220,21,155,130,195,243,187,14,140,248,34,251,139,147,47,185,183,37,90,205,31,37,200,111,28,95,103,99,4,171,149,97,147,169,192,77,171,193,149,132,171,167,195,192,178,85,168,200,196,49,213,244,184,16,88,33,137,116,140,151,178,238,15,146,16,248,146,153,108,50,59,130,22,186,242,140,95,177,0,51,238,14,210,71,209,34,61,97,202,128,151,46,145,122,152,217,65,95,248,189,2,136,214,240,171,229,110,145,188,215,182,8,52,243,14,226,145,18,160,41,151,114,188,26,90,85,2,139,127,163,35,144,223,203,157,150,128,65,202,50,67,51,60,242,36,213,44,48,82,33,36,50,182,205,69,229,243,26,72,38,180,225,226,179,27,247,185,198,174,136,165,72,228,38,148,93,31,19,107,238,142,41,199,22,205,245,224,214,71,188,237,216,213,115,228,209,160,78,63,59,69,222,17,250,233,199,111,89,147,73,139,0,103,35,96,82,23,227,150,177,37,241,154,115,86,92,117,175,197,62,82,107,233,230,61,81,252,145,59,216,143,216,174,138,81,31,249,191,189,64,116,209,78,148,76,46,239,141,8,128,34,239,3,66,244,142,162,112,62,185,78,71,128,183,7,36,17,32,17,45,199,45,34,199,4,145,34,163,169,2,182,40,151,49,143,135,207,61,209,229,194,0,38,3,68,214,212,166,112,20,197,7,53,166,81,26,213,22,30,23,143,90,146,71,199,110,123,253,84,11,70,40,179,11,190,248,106,176,231,43,170,1,42,114,249,140,209,20,164,133,251,247,197,84,218,50,249,8,182,131,185,250,2,216,165,109,70,21,158,92,100,202,125,30,69,77,86,47,40,219,238,36,226,31,173,188,66,254,142,224,13,10,51,153,42,67,81,254,91,94,23,126,233,121,142,2,233,107,159,217,115,4,28,161,193,77,100,158,193,130,245,233,12,20,209,131,18,86,35,85,80,169,95,232,203,22,96,31,249,191,189,64,116,209,78,148,76,46,239,141,8,128,34,239,3,66,244,142,162,112,62,185,78,71,128,183,7,36,17,30,69,77,86,47,40,219,238,36,226,31,173,188,66,254,142,224,13,10,51,153,42,67,81,254,91,94,23,126,233,121,142,2,233,107,159,217,115,4,28,161,193,77,100,158,193,130,245,233,12,20,209,131,18,86,35,85,80,169,95,232,203,22,96,31,249,191,189,64,116,209,78,148,76,46,239,141,8,128,34,239,3,66,244,142,162,112,62,185,78,71,128,183,7,36,17,26,185,14,39,29,113,251,188,111,49,60,223,55,168,77,109,159,84,171,170,54,214,30,138,177,101,97,193,47,255,208,51,18,89,132,247,167,124,98,64,133,234,83,109,113,86,74,12,227,9,51,94,187,10,44,224,137,186,225,121,105,161,49,126,31,249,191,189,64,116,209,78,148,76,46,239,141,8,128,34,239,3,66,244,142,162,112,62,185,78,71,128,183,7,36,17,26,185,14,39,29,113,251,188,111,49,60,223,55,168,77,109,159,84,171,170,54,214,30,138,177,101,97,193,47,255,208,51,18,89,132,247,167,124,98,64,133,234,83,109,113,86,74,12,227,9,51,94,187,10,44,224,137,186,225,121,105,161,49,126,31,249,191,189,64,116,209,78,148,76,46,239,141,8,128,34,239,3,66,244,142,162,112,62,185,78,71,128,183,7,36,17,19,240,242,58,204,225,223,144,187,3,145,166,71,107,28,123,131,253,65,142,25,76,124,50,30,39,184,114,62,3,53,139,11,122,182,186,105,40,180,249,42,240,92,199,89,116,89,160,60,58,127,129,83,196,215,93,163,127,133,158,68,156,174,247,46,120,152,224,87,12,202,201,126,6,248,193,57,31,12,179,161,38,94,175,197,89,79,154,29,72,50,28,32,154,137,73,47,6,116,7,31,25,50,207,207,218,19,188,25,184,99,248,210,83,242,134,143,245,11,77,53,80,40,200,153,62,79,165],"hex_proof":"0x076657be99e53b06fadd0a3fd2331acf2ec29161ed01f52af8ad31bd1500d87d2209c77dab6c0c65594b1f85b393efeddacf51c6528190f744cca62cd942dd2311211da08179e52c5291346bd59ecb62813861cc2250f5032205fc1fa73a38022ed232d33523fa8f7a1f0e54ab53d075ec0361623b6659a1a712c2e91a2d3a262a53ce9c960945b4c9be3ef8746806a8cf70a6f4a8a03c53fa4a283f13552c1b1ea5fc23d445cc91ff12ccf942f5c0746f26af6afc6521b681c8bcf6e06f12741d881519534530d99960a266d29cbc08b7afb5c22df4e79a6613129866a722af025c2ed288284ecafb4dd168fe741525a82c877781d8fcab4632f367c273127d162a1db7116e42a953e22ecbd85b1594a7fc219dc99c14e995f457b8f8ff0e902c310df94d8d45d3b46e3f435b4beb8a585d4a71fd426fc0f384b656b81687b302700087b2bfa2d09f3ac64b779376d37b05d8cef23026629cd71c466121e0951c91fabc0134b7ebec75068c501dd71ffe9c110a1db87c37e48b3fc103cf441009cfa9883522a6c3e116ec0ac8f63d04ec1f47a10c117e871ac508658e52e7392c4c405625deb555a6ba028a6c46742d3c56dc2c17f0a2b98dc49332a32ac5071dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea392a1d174b7f8a5439f1c147d4d5b819a3834f371cb634b241c1d6d35418349bf715c8f2aa92f42ea426a605c913d66759140805ad9dbdd335891420de61662cbc1dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea391dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea391dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea391dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea3905619eec70c885cdc46c87fbf32f3566d675517bdf9f9a03a89120ce3882f47e1e7b19f72a8a500dccf658b9d8dd0bbc442b1e89f23f4394ef63c70ffaecefb21ad122c88a8cce03433378cc497ef9bc3042ee42ccc885f32d0d01cf51b8ee3521543d4e4a9310c1472ed2ccb311655766888fde4ce1b2453ef53f148e66115e2b4a680b92047d158e7e87bd958fcd8c226fa33e6dd21fccaf3207d91abfbe3c255e6b1dcc0ade7cbcf5817dd9a5dc87235b31e555601b196309e1207499d7860db7e0cce98066ee610efe9d276004918bde709bb1660dcc871c69724787b20b253d31a70c8ca56d9f2fd739c05c76e4e4366d11e1dc3867f9f9b49c418ddfa1262aa03fa4fd86321d407b66656e38e5f508e5bef4388a1e40e07e0e71a00e4d092d0c5ee49962a5c1cac21a80b59d99f2e17ffe10ba5d6c00eeb91ec2e6070c12dd63faeeeaeac3c832e21f4a38cd1de32d2805cb2e32059e3fc385c4f684c1030a6dbf02a7891e3a1a2ea5207a78a04f65bd69f13fa82de9f97b6f21c6050112dd63faeeeaeac3c832e21f4a38cd1de32d2805cb2e32059e3fc385c4f684c1030a6dbf02a7891e3a1a2ea5207a78a04f65bd69f13fa82de9f97b6f21c6050124d2d305dea3d4a25ad889e311b6f23ecd659d8367439e590a37cbf080a3432e0b54685215ab58df6f0b1596beb568506707871b4e705199371b87cec7dc336324d2d305dea3d4a25ad889e311b6f23ecd659d8367439e590a37cbf080a3432e0b54685215ab58df6f0b1596beb568506707871b4e705199371b87cec7dc33630557f3b4d08c6ab88ebd8666a82bff7aa4f848667b5bb4ed9ab9c6a09590bc9423c1b8c1df4fa1c96d3ad43fcdd458c61cb12c18f6ba824fc42a7fbfdb6296d21e1b37c1cc8daea0b30025352488d8c68d97ea5918a96c1bbbce5dfc510972fa0b56c1ed5e9cc681561ebd49844ce9738ac468679d5774e56a4e15276ae23a4d1d6f8283cc0cfcb81c39e3ef357b195bb34b0dd93b07b824a26372ff215d97401f9a50d75cf37d4b9e8102643e6d7291cf1340bcc2eebca16952d7bbff81422a0004e3f3f756dc0acb483b4303c47b960413ce243e49593d0956e40f8efd839c0eb6fb986e2552945931e9a544ee5e01f20a3fd9d7ee1b01c77a99e305fca2820554b020ce3c05efcbb40b1b06c7539aa55d6eb232eeceb045da34d839f4bcad16100ccd98ca3945f92ed2332599023e29d9cb9c0ba85a36618533c06f2650db1001dea69d0c421f941350f4dd58b2a9b8dd7e8a35fd1682aacd779f3fecd59a3011b162adf59c4f08c48b96f6f4309a0b3446aed1c23140d343668d635a9e460b2da8286e6ae43dc4eeb1917d0167df618d72b00f1ebe2133b1c16d69bdec602ecf651245394b5195b618e96052bc0955c69d0e88194ea88800051c2bf8b88d2594a73161e98bc6bb1978a70d6be7a6c2c765cb52add9fb9cf97a7252060ccb2657c736051f2031106acd6620a6fe74bdb280206c2ef4d34f42b92a3acd808a1058f44ad649e863049b74f039c450dad50f04228f54e53071834e09e1c211b70bc03ca1ee3a1d9c91648da6f78ee9b8c457af7daaeb305bc97b3e21dd63b1dd2b2713a47fb0e6857dfd941f9cdb3a6c98ab3dccf92ebd05bbd73261dfe4b00127ad6bf04b35aca38b06c03db04b2a5675fa4a6119d6096f6f0e809af32202051ce99338266fa1ab90778751f0dfb848e69c80549e166bb6a595b0e68a0ace7b2b6daf9a960d8717d44744c54612bd326b315714a4ca5c9b7d0f8c0cfbbc85a20e0e8fe1ff5fae8a26e50ffba8b79b60ffdf38dff639a3dc8a99ab6157e03e8a18824814091f4e7ccd0e26374647d447a791a48fa36b17f5ba80cc01e1621eae1e3b98c91193e132eb4e1cc1d9efda405e61f8bb075a392b758f31ed5790a6bb159c6a4e28f10023c331a06bea51e03e0508e14deb190e13d0d4a6f3cef9b42d000000000000000000000000000000000000000000000000000000000000000023d1e6f225e445cd43991cefdc0600c3bc972841e35acd3afcb710eea6c74d862fe649d48b9ebb0835a6ea19a83a4c1874afa30b122dd951c37835e91f6055f22fe649d48b9ebb0835a6ea19a83a4c1874afa30b122dd951c37835e91f6055f20c43e325322b825b7a547dc526855a2f6096aeebb133d6abcae6e8b02b6db21a086c9d98ab091a3a6b94e7b130b71ef2a8c36da2bbbe40be7d92e4e4cdb861c500000000000000000000000000000000000000000000000000000000000000001f2c0fbfc3c14d9b0d37307199e929c3357d0bffa71872da02dc483d49dadd8b0b63d7e5b4d7d106137bb8638a50e79ded89203d061dde217c60521c2ab0f1fb0cb06369b5d848f7328f235006ccb633bd27e191061ae7141ddac8c211df60142f8b6d11de56fc258686c677397928980c19483f8bd14c912097bc9d1f168349176e09db94aaccc14ce6671f285580ca7d23bdee531fb8e9d251325c089b21d6123d2b1da8280447a4d612c438cb94b8ddead3b7e273693eefc7b7a714a6f71522d70182f255269867cbead6708dd5752e63ea4c57d003fb526daf00ad64bd4d1cc23ade4741d9e053358b8a3ce3582d75de633f4ad421cba83fb7dd1c3f866d1c50377856147ccd05f03f0c70b50604acaf02a9f1bb4360876420d37ef4dd6b24cc835615904d5cf0de924d9f25ec3bc143ec8eb991a7a8aa9cf5020fcc3b582e76726f3a3ae647eaa6c25b17f88b514288c1b7adc2517c6edd2ee7607b822d21f1952c849cfb65155f47dc159b82c3f3bb0e8cf822fb8b932fb9b7255acd1f25c86f1c5f676304ab956193a9c04dabc19584aba7c3c0b255a8c8c431d5f4b810582189748c97b2ee0f9210f892996c323b8216baf28c5fb10033ee0ed247d1223d61ca80972e917a98d9415ff8bd0288d6f0abe56e91bcd7b60834f30ee29112a0299772bc1a5a55028b7fa32390dfcb9d968041ca3243333cf224d52c3052212432b6cd45e5f31a4826b4e1e2b31bf7b9c6ae88a548e426945d1f136bee8e29c716cdf5e0d647bcedd8d573e4d1a04e3f3b45de11fae9c76f5993498b006723605217e396b125f19a73565c75afc53e526be9e63d51fc913bd88fd8ae8a511ff9bfbd4074d14e944c2eef8d088022ef0342f48ea2703eb94e4780b707241120112dc72d22c7049122a3a902b62897318f87cf3dd1e5c200260344d6d4a67014c50735a6511ad5161e178f5a9247c76e7bfd540b4628b30bbef86ab0e72baa012a72f98cd114a485fbf7c554da32f908b683b9fa02d8a56d46159e5c64ca7d1e454d562f28dbee24e21fadbc42fe8ee00d0a33992a4351fe5b5e177ee9798e02e96b9fd973041ca1c14d649ec182f5e90c14d1831256235550a95fe8cb16601ff9bfbd4074d14e944c2eef8d088022ef0342f48ea2703eb94e4780b70724111e454d562f28dbee24e21fadbc42fe8ee00d0a33992a4351fe5b5e177ee9798e02e96b9fd973041ca1c14d649ec182f5e90c14d1831256235550a95fe8cb16601ff9bfbd4074d14e944c2eef8d088022ef0342f48ea2703eb94e4780b70724111ab90e271d71fbbc6f313cdf37a84d6d9f54abaa36d61e8ab16561c12fffd033125984f7a77c624085ea536d71564a0ce309335ebb0a2ce089bae17969a1317e1ff9bfbd4074d14e944c2eef8d088022ef0342f48ea2703eb94e4780b70724111ab90e271d71fbbc6f313cdf37a84d6d9f54abaa36d61e8ab16561c12fffd033125984f7a77c624085ea536d71564a0ce309335ebb0a2ce089bae17969a1317e1ff9bfbd4074d14e944c2eef8d088022ef0342f48ea2703eb94e4780b707241113f0f23acce1df90bb0391a6476b1c7b83fd418e194c7c321e27b8723e03358b0b7ab6ba6928b4f92af05cc7597459a03c3a7f8153c4d75da37f859e449caef72e7898e0570ccac97e06f8c1391f0cb3a1265eafc5594f9a1d48321c209a89492f0674071f1932cfcfda13bc19b863f8d253f2868ff50b4d355028c8993e4fa5","transcript_type":"EVM","split":null,"pretty_public_inputs":{"rescaled_inputs":[],"inputs":[],"processed_inputs":[],"processed_params":[],"processed_outputs":[],"rescaled_outputs":[["0","0","0","0"]],"outputs":[["0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000"]]},"timestamp":1729639678889,"commitment":"KZG"} \ No newline at end of file +{"protocol":null,"instances":[["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]],"proof":[7,166,170,150,155,86,23,198,197,35,251,133,221,164,123,78,188,249,59,57,68,214,193,218,251,86,240,150,129,188,203,171,2,82,85,249,202,239,24,81,72,15,251,86,154,120,59,49,224,158,138,59,250,72,16,206,152,242,234,14,12,239,239,64,36,121,131,215,70,251,111,84,20,211,118,0,173,102,224,224,77,102,1,126,237,116,89,182,104,212,229,154,146,34,12,38,37,234,12,133,171,161,149,31,156,50,176,50,230,35,194,249,68,147,184,25,225,208,90,223,255,135,45,87,159,18,103,9,38,26,111,114,131,90,60,187,118,110,105,132,76,53,128,0,251,122,184,184,7,42,136,15,60,168,159,237,137,136,125,137,46,233,243,96,207,204,54,51,126,70,81,63,106,127,78,237,187,70,140,254,193,197,7,125,75,138,6,118,112,64,38,63,46,118,250,108,116,102,245,97,136,150,190,103,192,67,150,65,205,91,146,221,30,4,178,102,62,233,171,1,118,6,171,119,15,231,152,61,146,207,45,91,47,28,200,187,108,145,95,194,73,243,48,220,54,38,70,130,11,60,160,73,79,225,215,96,21,92,79,192,33,77,28,40,236,231,10,99,61,155,156,2,87,80,18,224,201,112,236,95,193,16,210,136,171,100,46,35,8,208,128,120,133,177,44,170,101,136,59,139,198,132,117,110,130,135,104,95,221,216,71,32,6,234,90,61,191,101,64,22,37,170,191,181,97,199,110,84,142,197,216,181,169,222,248,56,143,211,253,188,201,144,203,28,119,39,253,94,172,13,241,27,18,0,29,149,10,53,212,106,165,173,87,2,49,117,192,249,43,5,128,3,49,101,5,84,10,135,92,124,116,182,168,123,9,207,169,136,53,34,166,195,225,22,236,10,200,246,61,4,236,31,71,161,12,17,126,135,26,197,8,101,142,82,231,57,44,76,64,86,37,222,181,85,166,186,2,138,108,70,116,45,60,86,220,44,23,240,162,185,141,196,147,50,163,42,197,7,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,42,29,23,75,127,138,84,57,241,193,71,212,213,184,25,163,131,79,55,28,182,52,178,65,193,214,211,84,24,52,155,247,21,200,242,170,146,244,46,164,38,166,5,201,19,214,103,89,20,8,5,173,157,189,211,53,137,20,32,222,97,102,44,188,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,29,215,253,51,30,13,160,202,14,34,89,185,112,183,170,9,43,64,87,86,87,223,238,221,185,181,181,105,132,245,167,217,24,206,84,81,109,69,112,31,14,90,22,99,59,222,83,190,241,72,86,103,39,90,98,201,42,29,5,149,233,120,234,57,6,60,144,182,102,46,0,222,98,219,226,109,172,91,58,148,56,20,11,88,195,241,19,1,110,98,177,156,64,183,13,0,8,227,125,110,101,187,226,63,21,182,254,78,231,139,38,249,148,223,106,81,182,235,226,178,34,5,13,124,90,18,197,181,24,203,224,149,72,137,107,169,143,181,237,173,17,228,45,5,207,156,249,147,63,98,169,59,202,51,252,224,155,210,33,201,24,109,97,70,240,188,71,7,181,112,119,42,98,28,118,218,98,63,28,93,54,203,135,12,90,101,175,12,143,85,52,211,20,198,52,176,227,143,0,160,253,150,146,64,70,213,170,48,88,177,207,60,238,88,177,56,176,41,78,143,126,182,167,119,11,149,244,249,121,211,174,15,169,42,244,117,28,168,120,29,1,206,87,171,18,74,26,202,43,168,107,202,145,179,228,232,42,107,22,215,13,35,233,242,9,111,240,140,191,176,237,96,50,155,32,16,196,131,244,229,141,46,37,235,96,175,34,201,35,249,246,138,169,179,126,107,111,11,247,187,192,250,68,143,4,21,248,76,231,195,1,155,35,102,38,13,75,198,28,184,22,76,8,163,215,122,10,153,206,203,73,98,21,191,86,81,213,210,43,192,125,41,208,138,121,162,242,56,211,129,194,3,10,52,191,235,187,136,28,54,27,134,186,73,229,203,5,51,216,135,30,132,146,14,45,161,113,117,147,200,73,253,213,211,39,182,105,16,244,113,184,222,74,32,241,110,13,136,203,190,155,137,134,43,119,0,110,25,161,8,200,153,236,118,0,77,18,46,92,77,82,250,25,50,231,117,127,224,153,103,204,222,210,126,81,112,32,178,123,247,95,102,192,194,53,76,67,232,39,182,105,16,244,113,184,222,74,32,241,110,13,136,203,190,155,137,134,43,119,0,110,25,161,8,200,153,236,118,0,77,18,46,92,77,82,250,25,50,231,117,127,224,153,103,204,222,210,126,81,112,32,178,123,247,95,102,192,194,53,76,67,232,35,201,169,34,254,161,190,110,143,201,176,169,122,134,196,44,87,187,136,202,93,53,249,172,247,241,56,222,100,30,48,240,39,203,175,34,155,62,117,225,133,2,136,137,75,173,135,94,225,43,79,190,72,113,175,61,68,145,129,56,96,113,138,111,35,201,169,34,254,161,190,110,143,201,176,169,122,134,196,44,87,187,136,202,93,53,249,172,247,241,56,222,100,30,48,240,39,203,175,34,155,62,117,225,133,2,136,137,75,173,135,94,225,43,79,190,72,113,175,61,68,145,129,56,96,113,138,111,16,49,253,185,189,156,25,250,172,50,191,206,110,136,210,29,208,89,114,229,116,152,234,23,170,175,63,84,150,104,100,27,5,87,255,214,34,69,43,71,254,238,183,217,189,75,10,214,151,29,232,207,10,133,65,51,68,25,20,205,51,213,94,140,35,30,180,79,186,133,158,169,100,117,238,31,166,110,110,166,180,119,32,252,128,42,176,50,234,46,215,4,114,28,39,104,8,207,225,122,50,38,193,228,225,68,6,194,14,93,125,91,254,75,9,220,37,69,17,214,97,182,252,125,9,37,181,191,15,76,180,187,188,253,141,198,73,225,196,184,214,102,77,136,10,21,248,58,82,120,14,188,59,61,64,31,141,141,20,64,34,78,155,123,130,12,81,145,145,87,236,198,96,65,203,153,90,99,237,199,172,162,115,112,42,26,197,112,190,21,149,107,25,237,224,141,1,191,234,78,56,22,99,19,148,53,135,125,214,157,51,8,51,4,78,203,238,79,174,96,243,31,147,126,39,21,126,242,84,190,70,6,100,36,79,206,23,97,31,253,143,218,245,14,207,228,241,146,249,182,53,220,134,0,19,133,4,0,167,74,45,93,165,158,13,34,255,4,128,203,82,163,170,242,153,89,13,4,195,12,217,217,249,173,81,208,168,163,32,146,127,167,67,21,163,53,101,251,185,229,112,232,157,41,228,245,162,249,148,237,227,123,120,156,145,121,252,2,177,210,11,168,87,5,84,138,132,169,23,128,174,162,140,254,216,71,145,230,233,148,137,246,160,216,104,128,108,80,53,172,58,166,38,127,192,140,247,214,244,255,249,248,106,179,240,248,2,16,89,16,88,57,29,114,10,155,227,178,195,229,89,0,116,163,17,222,207,53,7,2,227,192,125,238,30,159,64,52,230,103,231,23,5,119,200,160,143,225,1,232,30,129,136,195,149,110,31,3,11,56,252,179,18,248,129,178,139,152,149,168,225,239,40,110,58,7,185,158,186,124,180,87,186,114,243,22,229,247,18,39,140,48,167,249,53,63,108,46,118,81,5,146,57,252,25,173,235,199,235,241,69,119,130,40,156,170,109,141,152,165,32,66,188,216,49,205,189,189,232,154,221,18,135,229,192,117,196,95,69,137,94,212,9,117,67,163,106,36,180,108,5,187,19,77,45,222,24,88,9,89,88,57,24,185,211,42,6,74,85,107,148,193,225,249,155,175,31,93,128,213,250,101,115,68,32,99,240,88,46,78,136,35,68,25,119,206,127,239,89,103,188,47,96,220,252,147,195,8,70,36,133,83,101,2,35,17,43,152,40,17,233,172,191,66,134,163,147,58,126,142,40,94,38,47,168,253,189,215,4,75,231,208,242,119,229,251,70,254,13,112,162,91,248,166,104,204,51,105,158,29,75,47,41,100,86,252,8,68,204,9,52,192,231,135,235,18,196,173,13,48,31,153,108,84,110,204,195,77,140,194,169,247,153,252,57,48,79,70,223,42,12,41,11,222,122,66,54,178,70,15,46,90,6,1,178,191,122,32,150,39,100,143,125,123,129,187,178,123,1,13,70,155,154,204,165,213,142,195,57,7,133,204,182,146,40,104,255,247,167,63,125,148,40,149,24,135,21,1,8,91,16,239,194,161,173,69,176,47,197,107,52,92,110,86,104,31,41,210,161,158,85,45,114,183,49,145,190,179,95,184,134,76,194,210,30,169,55,208,229,9,69,214,48,145,171,247,139,87,20,121,104,30,171,112,163,145,250,191,222,65,13,118,240,49,180,213,143,154,235,234,11,0,77,231,37,30,76,170,112,74,21,227,72,199,167,94,195,122,223,250,66,175,232,233,148,57,42,10,75,51,153,148,67,149,189,228,81,169,81,43,233,95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,43,82,30,245,148,185,54,160,171,27,79,105,248,176,192,161,244,25,209,72,146,4,121,102,3,60,82,41,158,26,24,36,46,213,17,106,207,156,166,102,49,135,0,26,205,192,174,204,238,138,20,89,6,141,225,150,229,9,208,163,79,3,197,172,46,213,17,106,207,156,166,102,49,135,0,26,205,192,174,204,238,138,20,89,6,141,225,150,229,9,208,163,79,3,197,172,29,71,111,199,140,15,213,223,167,80,181,234,122,14,76,243,239,251,214,187,120,28,117,245,191,100,21,67,67,45,177,135,42,250,131,138,251,28,216,103,209,238,76,42,143,251,3,48,249,183,96,39,18,70,153,167,16,255,106,133,247,168,140,176,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,44,188,6,250,40,63,151,235,132,131,44,118,53,20,236,49,248,172,74,49,141,0,118,239,93,47,76,58,128,75,156,179,27,252,222,31,46,69,199,133,61,249,116,189,80,173,129,154,199,50,220,127,153,83,226,210,142,13,191,208,53,85,155,16,1,29,147,75,214,85,9,150,210,68,59,75,152,33,237,74,215,28,11,76,231,137,107,47,244,52,22,6,135,5,211,94,45,174,150,176,252,202,143,35,15,241,240,62,17,68,205,207,53,154,249,26,143,121,13,60,18,58,98,226,236,51,111,165,22,32,242,167,252,138,68,140,223,180,153,194,120,245,5,239,89,218,225,50,89,199,99,75,56,211,72,56,176,149,4,202,48,45,251,126,86,0,89,88,241,71,245,8,23,251,187,143,14,108,222,12,115,112,132,200,46,72,19,104,26,94,167,242,40,185,195,193,234,63,50,92,59,38,21,151,10,236,245,22,133,242,234,83,65,30,199,167,192,247,33,36,151,142,155,105,31,208,76,49,83,176,205,58,232,86,103,215,188,64,135,83,234,62,173,67,110,211,214,115,38,163,57,3,154,18,124,240,28,184,170,182,45,32,79,163,127,18,142,203,99,124,117,179,235,76,252,65,1,183,244,223,252,62,248,6,83,102,9,150,11,157,92,149,175,26,197,141,197,78,52,186,219,113,54,42,210,37,45,218,178,231,176,29,56,34,143,63,78,162,246,107,33,51,121,105,95,65,169,29,78,105,112,28,175,241,94,65,208,62,17,235,243,87,243,196,219,74,117,113,100,53,6,45,1,127,33,32,23,109,144,186,9,224,2,102,209,79,114,225,209,148,115,125,16,197,230,44,129,238,72,162,98,9,241,222,21,230,100,179,26,138,169,255,252,116,85,237,98,15,226,82,243,181,195,157,223,38,66,65,88,27,31,225,251,155,71,224,5,213,95,93,57,163,221,5,111,112,70,65,158,206,165,189,80,160,51,174,247,218,179,7,194,169,181,86,61,65,196,128,41,77,231,155,187,102,152,152,64,33,149,118,52,79,103,79,26,149,83,226,131,41,216,207,92,16,160,133,150,217,88,88,32,27,208,15,55,74,137,11,17,22,185,144,24,109,240,189,47,203,171,152,38,213,78,26,149,238,180,54,67,37,38,56,26,91,187,85,108,6,200,5,111,116,202,72,221,83,68,244,130,93,90,243,100,238,163,254,87,143,104,139,30,204,103,49,3,196,42,137,53,237,14,171,171,123,206,33,194,204,4,86,104,138,70,244,116,125,12,178,219,26,206,226,243,87,43,132,30,120,142,18,114,200,124,215,161,167,207,8,112,69,137,87,136,253,167,180,6,113,172,119,191,126,182,218,80,140,197,46,19,60,29,201,28,152,47,45,215,63,236,65,136,227,178,153,47,235,135,80,79,71,38,162,211,217,177,240,15,189,136,220,10,2,224,51,139,150,164,205,46,74,6,66,53,249,25,174,119,212,163,160,167,23,43,80,208,207,117,141,34,167,170,18,29,222,122,198,27,241,220,24,62,17,188,146,76,33,65,170,228,227,49,129,109,255,79,222,85,222,230,231,152,234,46,165,7,31,157,140,79,110,152,215,152,52,222,7,84,111,146,91,82,113,211,163,21,167,125,91,131,181,182,155,15,14,222,85,12,35,119,91,201,232,128,194,20,6,106,58,210,223,65,221,197,45,89,212,35,161,126,25,129,150,150,91,220,173,71,89,43,219,9,74,80,70,77,190,190,244,80,81,20,228,176,115,169,140,135,231,80,205,67,195,125,165,27,53,219,16,253,141,19,60,29,201,28,152,47,45,215,63,236,65,136,227,178,153,47,235,135,80,79,71,38,162,211,217,177,240,15,189,136,220,12,35,119,91,201,232,128,194,20,6,106,58,210,223,65,221,197,45,89,212,35,161,126,25,129,150,150,91,220,173,71,89,43,219,9,74,80,70,77,190,190,244,80,81,20,228,176,115,169,140,135,231,80,205,67,195,125,165,27,53,219,16,253,141,19,60,29,201,28,152,47,45,215,63,236,65,136,227,178,153,47,235,135,80,79,71,38,162,211,217,177,240,15,189,136,220,7,123,194,70,47,156,114,87,242,34,71,150,153,124,255,239,22,222,144,122,185,1,63,220,159,24,133,0,243,43,252,11,46,18,164,46,201,100,171,189,202,204,234,105,51,175,202,246,183,64,231,203,242,22,121,230,124,37,93,123,206,193,132,35,19,60,29,201,28,152,47,45,215,63,236,65,136,227,178,153,47,235,135,80,79,71,38,162,211,217,177,240,15,189,136,220,7,123,194,70,47,156,114,87,242,34,71,150,153,124,255,239,22,222,144,122,185,1,63,220,159,24,133,0,243,43,252,11,46,18,164,46,201,100,171,189,202,204,234,105,51,175,202,246,183,64,231,203,242,22,121,230,124,37,93,123,206,193,132,35,19,60,29,201,28,152,47,45,215,63,236,65,136,227,178,153,47,235,135,80,79,71,38,162,211,217,177,240,15,189,136,220,10,188,78,91,11,221,125,74,247,236,212,71,236,117,176,242,182,228,150,199,25,6,231,159,137,151,227,225,155,167,191,146,15,5,241,228,124,155,31,64,127,88,200,135,66,244,123,180,156,83,235,210,155,17,83,198,255,139,27,211,169,209,181,155,36,223,147,200,80,59,151,233,89,231,109,124,245,22,137,48,92,88,189,129,126,92,236,205,229,187,100,211,82,146,178,231,17,114,155,132,92,254,114,171,94,136,118,37,136,220,53,185,226,197,227,245,131,200,250,142,33,11,233,10,20,178,59,180],"hex_proof":"0x07a6aa969b5617c6c523fb85dda47b4ebcf93b3944d6c1dafb56f09681bccbab025255f9caef1851480ffb569a783b31e09e8a3bfa4810ce98f2ea0e0cefef40247983d746fb6f5414d37600ad66e0e04d66017eed7459b668d4e59a92220c2625ea0c85aba1951f9c32b032e623c2f94493b819e1d05adfff872d579f126709261a6f72835a3cbb766e69844c358000fb7ab8b8072a880f3ca89fed89887d892ee9f360cfcc36337e46513f6a7f4eedbb468cfec1c5077d4b8a06767040263f2e76fa6c7466f5618896be67c0439641cd5b92dd1e04b2663ee9ab017606ab770fe7983d92cf2d5b2f1cc8bb6c915fc249f330dc362646820b3ca0494fe1d760155c4fc0214d1c28ece70a633d9b9c02575012e0c970ec5fc110d288ab642e2308d0807885b12caa65883b8bc684756e8287685fddd8472006ea5a3dbf65401625aabfb561c76e548ec5d8b5a9def8388fd3fdbcc990cb1c7727fd5eac0df11b12001d950a35d46aa5ad57023175c0f92b058003316505540a875c7c74b6a87b09cfa9883522a6c3e116ec0ac8f63d04ec1f47a10c117e871ac508658e52e7392c4c405625deb555a6ba028a6c46742d3c56dc2c17f0a2b98dc49332a32ac5071dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea392a1d174b7f8a5439f1c147d4d5b819a3834f371cb634b241c1d6d35418349bf715c8f2aa92f42ea426a605c913d66759140805ad9dbdd335891420de61662cbc1dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea391dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea391dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea391dd7fd331e0da0ca0e2259b970b7aa092b40575657dfeeddb9b5b56984f5a7d918ce54516d45701f0e5a16633bde53bef1485667275a62c92a1d0595e978ea39063c90b6662e00de62dbe26dac5b3a9438140b58c3f113016e62b19c40b70d0008e37d6e65bbe23f15b6fe4ee78b26f994df6a51b6ebe2b222050d7c5a12c5b518cbe09548896ba98fb5edad11e42d05cf9cf9933f62a93bca33fce09bd221c9186d6146f0bc4707b570772a621c76da623f1c5d36cb870c5a65af0c8f5534d314c634b0e38f00a0fd96924046d5aa3058b1cf3cee58b138b0294e8f7eb6a7770b95f4f979d3ae0fa92af4751ca8781d01ce57ab124a1aca2ba86bca91b3e4e82a6b16d70d23e9f2096ff08cbfb0ed60329b2010c483f4e58d2e25eb60af22c923f9f68aa9b37e6b6f0bf7bbc0fa448f0415f84ce7c3019b2366260d4bc61cb8164c08a3d77a0a99cecb496215bf5651d5d22bc07d29d08a79a2f238d381c2030a34bfebbb881c361b86ba49e5cb0533d8871e84920e2da1717593c849fdd5d327b66910f471b8de4a20f16e0d88cbbe9b89862b77006e19a108c899ec76004d122e5c4d52fa1932e7757fe09967ccded27e517020b27bf75f66c0c2354c43e827b66910f471b8de4a20f16e0d88cbbe9b89862b77006e19a108c899ec76004d122e5c4d52fa1932e7757fe09967ccded27e517020b27bf75f66c0c2354c43e823c9a922fea1be6e8fc9b0a97a86c42c57bb88ca5d35f9acf7f138de641e30f027cbaf229b3e75e1850288894bad875ee12b4fbe4871af3d4491813860718a6f23c9a922fea1be6e8fc9b0a97a86c42c57bb88ca5d35f9acf7f138de641e30f027cbaf229b3e75e1850288894bad875ee12b4fbe4871af3d4491813860718a6f1031fdb9bd9c19faac32bfce6e88d21dd05972e57498ea17aaaf3f549668641b0557ffd622452b47feeeb7d9bd4b0ad6971de8cf0a854133441914cd33d55e8c231eb44fba859ea96475ee1fa66e6ea6b47720fc802ab032ea2ed704721c276808cfe17a3226c1e4e14406c20e5d7d5bfe4b09dc254511d661b6fc7d0925b5bf0f4cb4bbbcfd8dc649e1c4b8d6664d880a15f83a52780ebc3b3d401f8d8d1440224e9b7b820c51919157ecc66041cb995a63edc7aca273702a1ac570be15956b19ede08d01bfea4e381663139435877dd69d330833044ecbee4fae60f31f937e27157ef254be460664244fce17611ffd8fdaf50ecfe4f192f9b635dc860013850400a74a2d5da59e0d22ff0480cb52a3aaf299590d04c30cd9d9f9ad51d0a8a320927fa74315a33565fbb9e570e89d29e4f5a2f994ede37b789c9179fc02b1d20ba85705548a84a91780aea28cfed84791e6e99489f6a0d868806c5035ac3aa6267fc08cf7d6f4fff9f86ab3f0f80210591058391d720a9be3b2c3e5590074a311decf350702e3c07dee1e9f4034e667e7170577c8a08fe101e81e8188c3956e1f030b38fcb312f881b28b9895a8e1ef286e3a07b99eba7cb457ba72f316e5f712278c30a7f9353f6c2e7651059239fc19adebc7ebf1457782289caa6d8d98a52042bcd831cdbdbde89add1287e5c075c45f45895ed4097543a36a24b46c05bb134d2dde18580959583918b9d32a064a556b94c1e1f99baf1f5d80d5fa6573442063f0582e4e8823441977ce7fef5967bc2f60dcfc93c30846248553650223112b982811e9acbf4286a3933a7e8e285e262fa8fdbdd7044be7d0f277e5fb46fe0d70a25bf8a668cc33699e1d4b2f296456fc0844cc0934c0e787eb12c4ad0d301f996c546eccc34d8cc2a9f799fc39304f46df2a0c290bde7a4236b2460f2e5a0601b2bf7a209627648f7d7b81bbb27b010d469b9acca5d58ec3390785ccb6922868fff7a73f7d94289518871501085b10efc2a1ad45b02fc56b345c6e56681f29d2a19e552d72b73191beb35fb8864cc2d21ea937d0e50945d63091abf78b571479681eab70a391fabfde410d76f031b4d58f9aebea0b004de7251e4caa704a15e348c7a75ec37adffa42afe8e994392a0a4b3399944395bde451a9512be95f00000000000000000000000000000000000000000000000000000000000000002b521ef594b936a0ab1b4f69f8b0c0a1f419d14892047966033c52299e1a18242ed5116acf9ca6663187001acdc0aeccee8a1459068de196e509d0a34f03c5ac2ed5116acf9ca6663187001acdc0aeccee8a1459068de196e509d0a34f03c5ac1d476fc78c0fd5dfa750b5ea7a0e4cf3effbd6bb781c75f5bf641543432db1872afa838afb1cd867d1ee4c2a8ffb0330f9b76027124699a710ff6a85f7a88cb000000000000000000000000000000000000000000000000000000000000000002cbc06fa283f97eb84832c763514ec31f8ac4a318d0076ef5d2f4c3a804b9cb31bfcde1f2e45c7853df974bd50ad819ac732dc7f9953e2d28e0dbfd035559b10011d934bd6550996d2443b4b9821ed4ad71c0b4ce7896b2ff43416068705d35e2dae96b0fcca8f230ff1f03e1144cdcf359af91a8f790d3c123a62e2ec336fa51620f2a7fc8a448cdfb499c278f505ef59dae13259c7634b38d34838b09504ca302dfb7e56005958f147f50817fbbb8f0e6cde0c737084c82e4813681a5ea7f228b9c3c1ea3f325c3b2615970aecf51685f2ea53411ec7a7c0f72124978e9b691fd04c3153b0cd3ae85667d7bc408753ea3ead436ed3d67326a339039a127cf01cb8aab62d204fa37f128ecb637c75b3eb4cfc4101b7f4dffc3ef806536609960b9d5c95af1ac58dc54e34badb71362ad2252ddab2e7b01d38228f3f4ea2f66b213379695f41a91d4e69701caff15e41d03e11ebf357f3c4db4a75716435062d017f2120176d90ba09e00266d14f72e1d194737d10c5e62c81ee48a26209f1de15e664b31a8aa9fffc7455ed620fe252f3b5c39ddf264241581b1fe1fb9b47e005d55f5d39a3dd056f7046419ecea5bd50a033aef7dab307c2a9b5563d41c480294de79bbb66989840219576344f674f1a9553e28329d8cf5c10a08596d95858201bd00f374a890b1116b990186df0bd2fcbab9826d54e1a95eeb436432526381a5bbb556c06c8056f74ca48dd5344f4825d5af364eea3fe578f688b1ecc673103c42a8935ed0eabab7bce21c2cc0456688a46f4747d0cb2db1acee2f3572b841e788e1272c87cd7a1a7cf087045895788fda7b40671ac77bf7eb6da508cc52e133c1dc91c982f2dd73fec4188e3b2992feb87504f4726a2d3d9b1f00fbd88dc0a02e0338b96a4cd2e4a064235f919ae77d4a3a0a7172b50d0cf758d22a7aa121dde7ac61bf1dc183e11bc924c2141aae4e331816dff4fde55dee6e798ea2ea5071f9d8c4f6e98d79834de07546f925b5271d3a315a77d5b83b5b69b0f0ede550c23775bc9e880c214066a3ad2df41ddc52d59d423a17e198196965bdcad47592bdb094a50464dbebef4505114e4b073a98c87e750cd43c37da51b35db10fd8d133c1dc91c982f2dd73fec4188e3b2992feb87504f4726a2d3d9b1f00fbd88dc0c23775bc9e880c214066a3ad2df41ddc52d59d423a17e198196965bdcad47592bdb094a50464dbebef4505114e4b073a98c87e750cd43c37da51b35db10fd8d133c1dc91c982f2dd73fec4188e3b2992feb87504f4726a2d3d9b1f00fbd88dc077bc2462f9c7257f2224796997cffef16de907ab9013fdc9f188500f32bfc0b2e12a42ec964abbdcaccea6933afcaf6b740e7cbf21679e67c255d7bcec18423133c1dc91c982f2dd73fec4188e3b2992feb87504f4726a2d3d9b1f00fbd88dc077bc2462f9c7257f2224796997cffef16de907ab9013fdc9f188500f32bfc0b2e12a42ec964abbdcaccea6933afcaf6b740e7cbf21679e67c255d7bcec18423133c1dc91c982f2dd73fec4188e3b2992feb87504f4726a2d3d9b1f00fbd88dc0abc4e5b0bdd7d4af7ecd447ec75b0f2b6e496c71906e79f8997e3e19ba7bf920f05f1e47c9b1f407f58c88742f47bb49c53ebd29b1153c6ff8b1bd3a9d1b59b24df93c8503b97e959e76d7cf51689305c58bd817e5ceccde5bb64d35292b2e711729b845cfe72ab5e88762588dc35b9e2c5e3f583c8fa8e210be90a14b23bb4","transcript_type":"EVM","split":null,"pretty_public_inputs":{"rescaled_inputs":[],"inputs":[],"processed_inputs":[],"processed_params":[],"processed_outputs":[],"rescaled_outputs":[["0","0","0","0"]],"outputs":[["0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000"]]},"timestamp":1730948796270,"commitment":"KZG"} \ No newline at end of file diff --git a/tests/assets/settings.json b/tests/assets/settings.json index a123fc99d..1fc77672e 100644 --- a/tests/assets/settings.json +++ b/tests/assets/settings.json @@ -27,7 +27,8 @@ "check_mode": "UNSAFE", "commitment": "KZG", "decomp_base": 128, - "decomp_legs": 2 + "decomp_legs": 2, + "bounded_log_lookup": false }, "num_rows": 46, "total_assignments": 92, diff --git a/tests/integration_tests.rs b/tests/integration_tests.rs index 820d686ad..f7bfb8801 100644 --- a/tests/integration_tests.rs +++ b/tests/integration_tests.rs @@ -205,7 +205,7 @@ mod native_tests { "1l_tiny_div", ]; - const TESTS: [&str; 95] = [ + const TESTS: [&str; 96] = [ "1l_mlp", //0 "1l_slice", "1l_concat", @@ -305,6 +305,7 @@ mod native_tests { "lstm_medium", // 92 "lenet_5", // 93 "rsqrt", // 94 + "log", // 95 ]; const WASM_TESTS: [&str; 46] = [ @@ -543,7 +544,7 @@ mod native_tests { } }); - seq!(N in 0..=94 { + seq!(N in 0..=95 { #(#[test_case(TESTS[N])])* #[ignore]