chore(security): enhance environment configuration, CI workflows, and wallet daemon with security improvements

- Restructure .env.example with security-focused documentation, service-specific environment file references, and AWS Secrets Manager integration
- Update CLI tests workflow to single Python 3.13 version, add pytest-mock dependency, and consolidate test execution with coverage
- Add comprehensive security validation to package publishing workflow with manual approval gates, secret scanning, and release
This commit is contained in:
oib
2026-03-03 10:33:46 +01:00
parent 00d00cb964
commit f353e00172
220 changed files with 42506 additions and 921 deletions

View File

@@ -0,0 +1,161 @@
// SPDX-License-Identifier: GPL-3.0
/*
Copyright 2021 0KIMS association.
This file is generated with [snarkJS](https://github.com/iden3/snarkjs).
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
pragma solidity >=0.7.0 <0.9.0;
contract Groth16Verifier {
// Scalar field size
uint256 constant r = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
// Base field size
uint256 constant q = 21888242871839275222246405745257275088696311157297823662689037894645226208583;
// Verification Key data
uint256 constant alphax = 17878197547960430839188198659895507284003628546353226099044915418621989763688;
uint256 constant alphay = 2414401954608202804440744777004803831246497417525080466014468287036253862429;
uint256 constant betax1 = 9712108885154437847450578891476498392461803797234760197580929785758376650650;
uint256 constant betax2 = 18272358567695662813397521777636023960648994006030407065408973578488017511142;
uint256 constant betay1 = 21680758250979848935332437508266260788381562861496889541922176243649072173633;
uint256 constant betay2 = 18113399933881081841371513445282849558527348349073876801631247450598780960185;
uint256 constant gammax1 = 11559732032986387107991004021392285783925812861821192530917403151452391805634;
uint256 constant gammax2 = 10857046999023057135944570762232829481370756359578518086990519993285655852781;
uint256 constant gammay1 = 4082367875863433681332203403145435568316851327593401208105741076214120093531;
uint256 constant gammay2 = 8495653923123431417604973247489272438418190587263600148770280649306958101930;
uint256 constant deltax1 = 12774548987221780347146542577375964674074290054683884142054120470956957679394;
uint256 constant deltax2 = 12165843319937710460660491044309080580686643140898844199182757276079170588931;
uint256 constant deltay1 = 5902046582690481723876569491209283634644066206041445880136420948730372505228;
uint256 constant deltay2 = 11495780469843451809285048515398120762160136824338528775648991644403497551783;
uint256 constant IC0x = 4148018046519347596812177481784308374584693326254693053110348164627817172095;
uint256 constant IC0y = 20730985524054218557052728073337277395061462810058907329882330843946617288874;
// Memory data
uint16 constant pVk = 0;
uint16 constant pPairing = 128;
uint16 constant pLastMem = 896;
function verifyProof(uint[2] calldata _pA, uint[2][2] calldata _pB, uint[2] calldata _pC, uint[0] calldata _pubSignals) public view returns (bool) {
assembly {
function checkField(v) {
if iszero(lt(v, r)) {
mstore(0, 0)
return(0, 0x20)
}
}
// G1 function to multiply a G1 value(x,y) to value in an address
function g1_mulAccC(pR, x, y, s) {
let success
let mIn := mload(0x40)
mstore(mIn, x)
mstore(add(mIn, 32), y)
mstore(add(mIn, 64), s)
success := staticcall(sub(gas(), 2000), 7, mIn, 96, mIn, 64)
if iszero(success) {
mstore(0, 0)
return(0, 0x20)
}
mstore(add(mIn, 64), mload(pR))
mstore(add(mIn, 96), mload(add(pR, 32)))
success := staticcall(sub(gas(), 2000), 6, mIn, 128, pR, 64)
if iszero(success) {
mstore(0, 0)
return(0, 0x20)
}
}
function checkPairing(pA, pB, pC, pubSignals, pMem) -> isOk {
let _pPairing := add(pMem, pPairing)
let _pVk := add(pMem, pVk)
mstore(_pVk, IC0x)
mstore(add(_pVk, 32), IC0y)
// Compute the linear combination vk_x
// -A
mstore(_pPairing, calldataload(pA))
mstore(add(_pPairing, 32), mod(sub(q, calldataload(add(pA, 32))), q))
// B
mstore(add(_pPairing, 64), calldataload(pB))
mstore(add(_pPairing, 96), calldataload(add(pB, 32)))
mstore(add(_pPairing, 128), calldataload(add(pB, 64)))
mstore(add(_pPairing, 160), calldataload(add(pB, 96)))
// alpha1
mstore(add(_pPairing, 192), alphax)
mstore(add(_pPairing, 224), alphay)
// beta2
mstore(add(_pPairing, 256), betax1)
mstore(add(_pPairing, 288), betax2)
mstore(add(_pPairing, 320), betay1)
mstore(add(_pPairing, 352), betay2)
// vk_x
mstore(add(_pPairing, 384), mload(add(pMem, pVk)))
mstore(add(_pPairing, 416), mload(add(pMem, add(pVk, 32))))
// gamma2
mstore(add(_pPairing, 448), gammax1)
mstore(add(_pPairing, 480), gammax2)
mstore(add(_pPairing, 512), gammay1)
mstore(add(_pPairing, 544), gammay2)
// C
mstore(add(_pPairing, 576), calldataload(pC))
mstore(add(_pPairing, 608), calldataload(add(pC, 32)))
// delta2
mstore(add(_pPairing, 640), deltax1)
mstore(add(_pPairing, 672), deltax2)
mstore(add(_pPairing, 704), deltay1)
mstore(add(_pPairing, 736), deltay2)
let success := staticcall(sub(gas(), 2000), 8, _pPairing, 768, _pPairing, 0x20)
isOk := and(success, mload(_pPairing))
}
let pMem := mload(0x40)
mstore(0x40, add(pMem, pLastMem))
// Validate that all evaluations ∈ F
// Validate all evaluations
let isValid := checkPairing(_pA, _pB, _pC, _pubSignals, pMem)
mstore(0, isValid)
return(0, 0x20)
}
}
}

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,135 @@
pragma circom 2.0.0;
/*
* Modular ML Circuit Components
*
* Reusable components for machine learning circuits
*/
// Basic parameter update component (gradient descent step)
template ParameterUpdate() {
signal input current_param;
signal input gradient;
signal input learning_rate;
signal output new_param;
// Simple gradient descent: new_param = current_param - learning_rate * gradient
new_param <== current_param - learning_rate * gradient;
}
// Vector parameter update component
template VectorParameterUpdate(PARAM_COUNT) {
signal input current_params[PARAM_COUNT];
signal input gradients[PARAM_COUNT];
signal input learning_rate;
signal output new_params[PARAM_COUNT];
component updates[PARAM_COUNT];
for (var i = 0; i < PARAM_COUNT; i++) {
updates[i] = ParameterUpdate();
updates[i].current_param <== current_params[i];
updates[i].gradient <== gradients[i];
updates[i].learning_rate <== learning_rate;
new_params[i] <== updates[i].new_param;
}
}
// Simple loss constraint component
template LossConstraint() {
signal input predicted_loss;
signal input actual_loss;
signal input tolerance;
// Constrain that |predicted_loss - actual_loss| <= tolerance
signal diff;
diff <== predicted_loss - actual_loss;
// Use absolute value constraint: diff^2 <= tolerance^2
signal diff_squared;
diff_squared <== diff * diff;
signal tolerance_squared;
tolerance_squared <== tolerance * tolerance;
// This constraint ensures the loss is within tolerance
diff_squared * (1 - diff_squared / tolerance_squared) === 0;
}
// Learning rate validation component
template LearningRateValidation() {
signal input learning_rate;
// Removed constraint for optimization - learning rate validation handled externally
// This reduces non-linear constraints from 1 to 0 for better proving performance
}
// Training epoch component
template TrainingEpoch(PARAM_COUNT) {
signal input epoch_params[PARAM_COUNT];
signal input epoch_gradients[PARAM_COUNT];
signal input learning_rate;
signal output next_epoch_params[PARAM_COUNT];
component param_update = VectorParameterUpdate(PARAM_COUNT);
param_update.current_params <== epoch_params;
param_update.gradients <== epoch_gradients;
param_update.learning_rate <== learning_rate;
next_epoch_params <== param_update.new_params;
}
// Main modular training verification using components
template ModularTrainingVerification(PARAM_COUNT, EPOCHS) {
signal input initial_parameters[PARAM_COUNT];
signal input learning_rate;
signal output final_parameters[PARAM_COUNT];
signal output training_complete;
// Learning rate validation
component lr_validator = LearningRateValidation();
lr_validator.learning_rate <== learning_rate;
// Training epochs using modular components
signal current_params[EPOCHS + 1][PARAM_COUNT];
// Initialize
for (var i = 0; i < PARAM_COUNT; i++) {
current_params[0][i] <== initial_parameters[i];
}
// Run training epochs
component epochs[EPOCHS];
for (var e = 0; e < EPOCHS; e++) {
epochs[e] = TrainingEpoch(PARAM_COUNT);
// Input current parameters
for (var i = 0; i < PARAM_COUNT; i++) {
epochs[e].epoch_params[i] <== current_params[e][i];
}
// Use constant gradients for simplicity (would be computed in real implementation)
for (var i = 0; i < PARAM_COUNT; i++) {
epochs[e].epoch_gradients[i] <== 1; // Constant gradient
}
epochs[e].learning_rate <== learning_rate;
// Store results
for (var i = 0; i < PARAM_COUNT; i++) {
current_params[e + 1][i] <== epochs[e].next_epoch_params[i];
}
}
// Output final parameters
for (var i = 0; i < PARAM_COUNT; i++) {
final_parameters[i] <== current_params[EPOCHS][i];
}
training_complete <== 1;
}
component main = ModularTrainingVerification(4, 3);

View File

@@ -0,0 +1,135 @@
pragma circom 2.0.0;
/*
* Modular ML Circuit Components
*
* Reusable components for machine learning circuits
*/
// Basic parameter update component (gradient descent step)
template ParameterUpdate() {
signal input current_param;
signal input gradient;
signal input learning_rate;
signal output new_param;
// Simple gradient descent: new_param = current_param - learning_rate * gradient
new_param <== current_param - learning_rate * gradient;
}
// Vector parameter update component
template VectorParameterUpdate(PARAM_COUNT) {
signal input current_params[PARAM_COUNT];
signal input gradients[PARAM_COUNT];
signal input learning_rate;
signal output new_params[PARAM_COUNT];
component updates[PARAM_COUNT];
for (var i = 0; i < PARAM_COUNT; i++) {
updates[i] = ParameterUpdate();
updates[i].current_param <== current_params[i];
updates[i].gradient <== gradients[i];
updates[i].learning_rate <== learning_rate;
new_params[i] <== updates[i].new_param;
}
}
// Simple loss constraint component
template LossConstraint() {
signal input predicted_loss;
signal input actual_loss;
signal input tolerance;
// Constrain that |predicted_loss - actual_loss| <= tolerance
signal diff;
diff <== predicted_loss - actual_loss;
// Use absolute value constraint: diff^2 <= tolerance^2
signal diff_squared;
diff_squared <== diff * diff;
signal tolerance_squared;
tolerance_squared <== tolerance * tolerance;
// This constraint ensures the loss is within tolerance
diff_squared * (1 - diff_squared / tolerance_squared) === 0;
}
// Learning rate validation component
template LearningRateValidation() {
signal input learning_rate;
// Removed constraint for optimization - learning rate validation handled externally
// This reduces non-linear constraints from 1 to 0 for better proving performance
}
// Training epoch component
template TrainingEpoch(PARAM_COUNT) {
signal input epoch_params[PARAM_COUNT];
signal input epoch_gradients[PARAM_COUNT];
signal input learning_rate;
signal output next_epoch_params[PARAM_COUNT];
component param_update = VectorParameterUpdate(PARAM_COUNT);
param_update.current_params <== epoch_params;
param_update.gradients <== epoch_gradients;
param_update.learning_rate <== learning_rate;
next_epoch_params <== param_update.new_params;
}
// Main modular training verification using components
template ModularTrainingVerification(PARAM_COUNT, EPOCHS) {
signal input initial_parameters[PARAM_COUNT];
signal input learning_rate;
signal output final_parameters[PARAM_COUNT];
signal output training_complete;
// Learning rate validation
component lr_validator = LearningRateValidation();
lr_validator.learning_rate <== learning_rate;
// Training epochs using modular components
signal current_params[EPOCHS + 1][PARAM_COUNT];
// Initialize
for (var i = 0; i < PARAM_COUNT; i++) {
current_params[0][i] <== initial_parameters[i];
}
// Run training epochs
component epochs[EPOCHS];
for (var e = 0; e < EPOCHS; e++) {
epochs[e] = TrainingEpoch(PARAM_COUNT);
// Input current parameters
for (var i = 0; i < PARAM_COUNT; i++) {
epochs[e].epoch_params[i] <== current_params[e][i];
}
// Use constant gradients for simplicity (would be computed in real implementation)
for (var i = 0; i < PARAM_COUNT; i++) {
epochs[e].epoch_gradients[i] <== 1; // Constant gradient
}
epochs[e].learning_rate <== learning_rate;
// Store results
for (var i = 0; i < PARAM_COUNT; i++) {
current_params[e + 1][i] <== epochs[e].next_epoch_params[i];
}
}
// Output final parameters
for (var i = 0; i < PARAM_COUNT; i++) {
final_parameters[i] <== current_params[EPOCHS][i];
}
training_complete <== 1;
}
component main = ModularTrainingVerification(4, 3);

View File

@@ -0,0 +1,136 @@
pragma circom 2.0.0;
/*
* Modular ML Circuit Components
*
* Reusable components for machine learning circuits
*/
// Basic parameter update component (gradient descent step)
template ParameterUpdate() {
signal input current_param;
signal input gradient;
signal input learning_rate;
signal output new_param;
// Simple gradient descent: new_param = current_param - learning_rate * gradient
new_param <== current_param - learning_rate * gradient;
}
// Vector parameter update component
template VectorParameterUpdate(PARAM_COUNT) {
signal input current_params[PARAM_COUNT];
signal input gradients[PARAM_COUNT];
signal input learning_rate;
signal output new_params[PARAM_COUNT];
component updates[PARAM_COUNT];
for (var i = 0; i < PARAM_COUNT; i++) {
updates[i] = ParameterUpdate();
updates[i].current_param <== current_params[i];
updates[i].gradient <== gradients[i];
updates[i].learning_rate <== learning_rate;
new_params[i] <== updates[i].new_param;
}
}
// Simple loss constraint component
template LossConstraint() {
signal input predicted_loss;
signal input actual_loss;
signal input tolerance;
// Constrain that |predicted_loss - actual_loss| <= tolerance
signal diff;
diff <== predicted_loss - actual_loss;
// Use absolute value constraint: diff^2 <= tolerance^2
signal diff_squared;
diff_squared <== diff * diff;
signal tolerance_squared;
tolerance_squared <== tolerance * tolerance;
// This constraint ensures the loss is within tolerance
diff_squared * (1 - diff_squared / tolerance_squared) === 0;
}
// Learning rate validation component
template LearningRateValidation() {
signal input learning_rate;
// Removed constraint for optimization - learning rate validation handled externally
// This reduces non-linear constraints from 1 to 0 for better proving performance
}
// Training epoch component
template TrainingEpoch(PARAM_COUNT) {
signal input epoch_params[PARAM_COUNT];
signal input epoch_gradients[PARAM_COUNT];
signal input learning_rate;
signal output next_epoch_params[PARAM_COUNT];
component param_update = VectorParameterUpdate(PARAM_COUNT);
param_update.current_params <== epoch_params;
param_update.gradients <== epoch_gradients;
param_update.learning_rate <== learning_rate;
next_epoch_params <== param_update.new_params;
}
// Main modular training verification using components
template ModularTrainingVerification(PARAM_COUNT, EPOCHS) {
signal input initial_parameters[PARAM_COUNT];
signal input learning_rate;
signal output final_parameters[PARAM_COUNT];
signal output training_complete;
// Learning rate validation
component lr_validator = LearningRateValidation();
lr_validator.learning_rate <== learning_rate;
// Training epochs using modular components
signal current_params[EPOCHS + 1][PARAM_COUNT];
// Initialize
for (var i = 0; i < PARAM_COUNT; i++) {
current_params[0][i] <== initial_parameters[i];
}
// Run training epochs
component epochs[EPOCHS];
for (var e = 0; e < EPOCHS; e++) {
epochs[e] = TrainingEpoch(PARAM_COUNT);
// Input current parameters
for (var i = 0; i < PARAM_COUNT; i++) {
epochs[e].epoch_params[i] <== current_params[e][i];
}
// Use constant gradients for simplicity (would be computed in real implementation)
for (var i = 0; i < PARAM_COUNT; i++) {
epochs[e].epoch_gradients[i] <== 1; // Constant gradient
}
epochs[e].learning_rate <== learning_rate;
// Store results
for (var i = 0; i < PARAM_COUNT; i++) {
current_params[e + 1][i] <== epochs[e].next_epoch_params[i];
}
}
// Output final parameters
for (var i = 0; i < PARAM_COUNT; i++) {
final_parameters[i] <== current_params[EPOCHS][i];
}
training_complete <== 1;
}
component main = ModularTrainingVerification(4, 3);

View File

@@ -0,0 +1,86 @@
pragma circom 2.0.0;
template ParameterUpdate() {
signal input current_param;
signal input gradient;
signal input learning_rate;
signal output new_param;
new_param <== current_param - learning_rate * gradient;
}
template VectorParameterUpdate(PARAM_COUNT) {
signal input current_params[PARAM_COUNT];
signal input gradients[PARAM_COUNT];
signal input learning_rate;
signal output new_params[PARAM_COUNT];
component updates[PARAM_COUNT];
for (var i = 0; i < PARAM_COUNT; i++) {
updates[i] = ParameterUpdate();
updates[i].current_param <== current_params[i];
updates[i].gradient <== gradients[i];
updates[i].learning_rate <== learning_rate;
new_params[i] <== updates[i].new_param;
}
}
template LossConstraint() {
signal input predicted_loss;
signal input actual_loss;
signal input tolerance;
signal diff;
diff <== predicted_loss - actual_loss;
signal diff_squared;
diff_squared <== diff * diff;
signal tolerance_squared;
tolerance_squared <== tolerance * tolerance;
diff_squared * (1 - diff_squared / tolerance_squared) === 0;
}
template LearningRateValidation() {
signal input learning_rate;
}
template TrainingEpoch(PARAM_COUNT) {
signal input epoch_params[PARAM_COUNT];
signal input epoch_gradients[PARAM_COUNT];
signal input learning_rate;
signal output next_epoch_params[PARAM_COUNT];
component param_update = VectorParameterUpdate(PARAM_COUNT);
param_update.current_params <== epoch_params;
param_update.gradients <== epoch_gradients;
param_update.learning_rate <== learning_rate;
next_epoch_params <== param_update.new_params;
}
template ModularTrainingVerification(PARAM_COUNT, EPOCHS) {
signal input initial_parameters[PARAM_COUNT];
signal input learning_rate;
signal output final_parameters[PARAM_COUNT];
signal output training_complete;
component lr_validator = LearningRateValidation();
lr_validator.learning_rate <== learning_rate;
signal current_params[EPOCHS + 1][PARAM_COUNT];
for (var i = 0; i < PARAM_COUNT; i++) {
current_params[0][i] <== initial_parameters[i];
}
component epochs[EPOCHS];
for (var e = 0; e < EPOCHS; e++) {
epochs[e] = TrainingEpoch(PARAM_COUNT);
for (var i = 0; i < PARAM_COUNT; i++) {
epochs[e].epoch_params[i] <== current_params[e][i];
}
for (var i = 0; i < PARAM_COUNT; i++) {
epochs[e].epoch_gradients[i] <== 1;
}
epochs[e].learning_rate <== learning_rate;
for (var i = 0; i < PARAM_COUNT; i++) {
current_params[e + 1][i] <== epochs[e].next_epoch_params[i];
}
}
for (var i = 0; i < PARAM_COUNT; i++) {
final_parameters[i] <== current_params[EPOCHS][i];
}
training_complete <== 1;
}
component main = ModularTrainingVerification(4, 3);

View File

@@ -0,0 +1,135 @@
pragma circom 2.1.0;
/*
* Modular ML Circuit Components
*
* Reusable components for machine learning circuits
*/
// Basic parameter update component (gradient descent step)
template ParameterUpdate() {
signal input current_param;
signal input gradient;
signal input learning_rate;
signal output new_param;
// Simple gradient descent: new_param = current_param - learning_rate * gradient
new_param <== current_param - learning_rate * gradient;
}
// Vector parameter update component
template VectorParameterUpdate(PARAM_COUNT) {
signal input current_params[PARAM_COUNT];
signal input gradients[PARAM_COUNT];
signal input learning_rate;
signal output new_params[PARAM_COUNT];
component updates[PARAM_COUNT];
for (var i = 0; i < PARAM_COUNT; i++) {
updates[i] = ParameterUpdate();
updates[i].current_param <== current_params[i];
updates[i].gradient <== gradients[i];
updates[i].learning_rate <== learning_rate;
new_params[i] <== updates[i].new_param;
}
}
// Simple loss constraint component
template LossConstraint() {
signal input predicted_loss;
signal input actual_loss;
signal input tolerance;
// Constrain that |predicted_loss - actual_loss| <= tolerance
signal diff;
diff <== predicted_loss - actual_loss;
// Use absolute value constraint: diff^2 <= tolerance^2
signal diff_squared;
diff_squared <== diff * diff;
signal tolerance_squared;
tolerance_squared <== tolerance * tolerance;
// This constraint ensures the loss is within tolerance
diff_squared * (1 - diff_squared / tolerance_squared) === 0;
}
// Learning rate validation component
template LearningRateValidation() {
signal input learning_rate;
// Removed constraint for optimization - learning rate validation handled externally
// This reduces non-linear constraints from 1 to 0 for better proving performance
}
// Training epoch component
template TrainingEpoch(PARAM_COUNT) {
signal input epoch_params[PARAM_COUNT];
signal input epoch_gradients[PARAM_COUNT];
signal input learning_rate;
signal output next_epoch_params[PARAM_COUNT];
component param_update = VectorParameterUpdate(PARAM_COUNT);
param_update.current_params <== epoch_params;
param_update.gradients <== epoch_gradients;
param_update.learning_rate <== learning_rate;
next_epoch_params <== param_update.new_params;
}
// Main modular training verification using components
template ModularTrainingVerification(PARAM_COUNT, EPOCHS) {
signal input initial_parameters[PARAM_COUNT];
signal input learning_rate;
signal output final_parameters[PARAM_COUNT];
signal output training_complete;
// Learning rate validation
component lr_validator = LearningRateValidation();
lr_validator.learning_rate <== learning_rate;
// Training epochs using modular components
signal current_params[EPOCHS + 1][PARAM_COUNT];
// Initialize
for (var i = 0; i < PARAM_COUNT; i++) {
current_params[0][i] <== initial_parameters[i];
}
// Run training epochs
component epochs[EPOCHS];
for (var e = 0; e < EPOCHS; e++) {
epochs[e] = TrainingEpoch(PARAM_COUNT);
// Input current parameters
for (var i = 0; i < PARAM_COUNT; i++) {
epochs[e].epoch_params[i] <== current_params[e][i];
}
// Use constant gradients for simplicity (would be computed in real implementation)
for (var i = 0; i < PARAM_COUNT; i++) {
epochs[e].epoch_gradients[i] <== 1; // Constant gradient
}
epochs[e].learning_rate <== learning_rate;
// Store results
for (var i = 0; i < PARAM_COUNT; i++) {
current_params[e + 1][i] <== epochs[e].next_epoch_params[i];
}
}
// Output final parameters
for (var i = 0; i < PARAM_COUNT; i++) {
final_parameters[i] <== current_params[EPOCHS][i];
}
training_complete <== 1;
}
component main = ModularTrainingVerification(4, 3);

View File

@@ -0,0 +1,135 @@
pragma circom 2.0.0;
/*
* Modular ML Circuit Components
*
* Reusable components for machine learning circuits
*/
// Basic parameter update component (gradient descent step)
template ParameterUpdate() {
signal input current_param;
signal input gradient;
signal input learning_rate;
signal output new_param;
// Simple gradient descent: new_param = current_param - learning_rate * gradient
new_param <== current_param - learning_rate * gradient;
}
// Vector parameter update component
template VectorParameterUpdate(PARAM_COUNT) {
signal input current_params[PARAM_COUNT];
signal input gradients[PARAM_COUNT];
signal input learning_rate;
signal output new_params[PARAM_COUNT];
component updates[PARAM_COUNT];
for (var i = 0; i < PARAM_COUNT; i++) {
updates[i] = ParameterUpdate();
updates[i].current_param <== current_params[i];
updates[i].gradient <== gradients[i];
updates[i].learning_rate <== learning_rate;
new_params[i] <== updates[i].new_param;
}
}
// Simple loss constraint component
template LossConstraint() {
signal input predicted_loss;
signal input actual_loss;
signal input tolerance;
// Constrain that |predicted_loss - actual_loss| <= tolerance
signal diff;
diff <== predicted_loss - actual_loss;
// Use absolute value constraint: diff^2 <= tolerance^2
signal diff_squared;
diff_squared <== diff * diff;
signal tolerance_squared;
tolerance_squared <== tolerance * tolerance;
// This constraint ensures the loss is within tolerance
diff_squared * (1 - diff_squared / tolerance_squared) === 0;
}
// Learning rate validation component
template LearningRateValidation() {
signal input learning_rate;
// Removed constraint for optimization - learning rate validation handled externally
// This reduces non-linear constraints from 1 to 0 for better proving performance
}
// Training epoch component
template TrainingEpoch(PARAM_COUNT) {
signal input epoch_params[PARAM_COUNT];
signal input epoch_gradients[PARAM_COUNT];
signal input learning_rate;
signal output next_epoch_params[PARAM_COUNT];
component param_update = VectorParameterUpdate(PARAM_COUNT);
param_update.current_params <== epoch_params;
param_update.gradients <== epoch_gradients;
param_update.learning_rate <== learning_rate;
next_epoch_params <== param_update.new_params;
}
// Main modular training verification using components
template ModularTrainingVerification(PARAM_COUNT, EPOCHS) {
signal input initial_parameters[PARAM_COUNT];
signal input learning_rate;
signal output final_parameters[PARAM_COUNT];
signal output training_complete;
// Learning rate validation
component lr_validator = LearningRateValidation();
lr_validator.learning_rate <== learning_rate;
// Training epochs using modular components
signal current_params[EPOCHS + 1][PARAM_COUNT];
// Initialize
for (var i = 0; i < PARAM_COUNT; i++) {
current_params[0][i] <== initial_parameters[i];
}
// Run training epochs
component epochs[EPOCHS];
for (var e = 0; e < EPOCHS; e++) {
epochs[e] = TrainingEpoch(PARAM_COUNT);
// Input current parameters
for (var i = 0; i < PARAM_COUNT; i++) {
epochs[e].epoch_params[i] <== current_params[e][i];
}
// Use constant gradients for simplicity (would be computed in real implementation)
for (var i = 0; i < PARAM_COUNT; i++) {
epochs[e].epoch_gradients[i] <== 1; // Constant gradient
}
epochs[e].learning_rate <== learning_rate;
// Store results
for (var i = 0; i < PARAM_COUNT; i++) {
current_params[e + 1][i] <== epochs[e].next_epoch_params[i];
}
}
// Output final parameters
for (var i = 0; i < PARAM_COUNT; i++) {
final_parameters[i] <== current_params[EPOCHS][i];
}
training_complete <== 1;
}
component main = ModularTrainingVerification(4, 3);

Binary file not shown.

View File

@@ -0,0 +1,153 @@
1,1,4,main.final_parameters[0]
2,2,4,main.final_parameters[1]
3,3,4,main.final_parameters[2]
4,4,4,main.final_parameters[3]
5,5,4,main.training_complete
6,6,4,main.initial_parameters[0]
7,7,4,main.initial_parameters[1]
8,8,4,main.initial_parameters[2]
9,9,4,main.initial_parameters[3]
10,10,4,main.learning_rate
11,-1,4,main.current_params[0][0]
12,-1,4,main.current_params[0][1]
13,-1,4,main.current_params[0][2]
14,-1,4,main.current_params[0][3]
15,11,4,main.current_params[1][0]
16,12,4,main.current_params[1][1]
17,13,4,main.current_params[1][2]
18,14,4,main.current_params[1][3]
19,15,4,main.current_params[2][0]
20,16,4,main.current_params[2][1]
21,17,4,main.current_params[2][2]
22,18,4,main.current_params[2][3]
23,-1,4,main.current_params[3][0]
24,-1,4,main.current_params[3][1]
25,-1,4,main.current_params[3][2]
26,-1,4,main.current_params[3][3]
27,-1,3,main.epochs[0].next_epoch_params[0]
28,-1,3,main.epochs[0].next_epoch_params[1]
29,-1,3,main.epochs[0].next_epoch_params[2]
30,-1,3,main.epochs[0].next_epoch_params[3]
31,-1,3,main.epochs[0].epoch_params[0]
32,-1,3,main.epochs[0].epoch_params[1]
33,-1,3,main.epochs[0].epoch_params[2]
34,-1,3,main.epochs[0].epoch_params[3]
35,-1,3,main.epochs[0].epoch_gradients[0]
36,-1,3,main.epochs[0].epoch_gradients[1]
37,-1,3,main.epochs[0].epoch_gradients[2]
38,-1,3,main.epochs[0].epoch_gradients[3]
39,-1,3,main.epochs[0].learning_rate
40,-1,2,main.epochs[0].param_update.new_params[0]
41,-1,2,main.epochs[0].param_update.new_params[1]
42,-1,2,main.epochs[0].param_update.new_params[2]
43,-1,2,main.epochs[0].param_update.new_params[3]
44,-1,2,main.epochs[0].param_update.current_params[0]
45,-1,2,main.epochs[0].param_update.current_params[1]
46,-1,2,main.epochs[0].param_update.current_params[2]
47,-1,2,main.epochs[0].param_update.current_params[3]
48,-1,2,main.epochs[0].param_update.gradients[0]
49,-1,2,main.epochs[0].param_update.gradients[1]
50,-1,2,main.epochs[0].param_update.gradients[2]
51,-1,2,main.epochs[0].param_update.gradients[3]
52,-1,2,main.epochs[0].param_update.learning_rate
53,-1,1,main.epochs[0].param_update.updates[0].new_param
54,-1,1,main.epochs[0].param_update.updates[0].current_param
55,-1,1,main.epochs[0].param_update.updates[0].gradient
56,-1,1,main.epochs[0].param_update.updates[0].learning_rate
57,-1,1,main.epochs[0].param_update.updates[1].new_param
58,-1,1,main.epochs[0].param_update.updates[1].current_param
59,-1,1,main.epochs[0].param_update.updates[1].gradient
60,-1,1,main.epochs[0].param_update.updates[1].learning_rate
61,-1,1,main.epochs[0].param_update.updates[2].new_param
62,-1,1,main.epochs[0].param_update.updates[2].current_param
63,-1,1,main.epochs[0].param_update.updates[2].gradient
64,-1,1,main.epochs[0].param_update.updates[2].learning_rate
65,-1,1,main.epochs[0].param_update.updates[3].new_param
66,-1,1,main.epochs[0].param_update.updates[3].current_param
67,-1,1,main.epochs[0].param_update.updates[3].gradient
68,-1,1,main.epochs[0].param_update.updates[3].learning_rate
69,-1,3,main.epochs[1].next_epoch_params[0]
70,-1,3,main.epochs[1].next_epoch_params[1]
71,-1,3,main.epochs[1].next_epoch_params[2]
72,-1,3,main.epochs[1].next_epoch_params[3]
73,-1,3,main.epochs[1].epoch_params[0]
74,-1,3,main.epochs[1].epoch_params[1]
75,-1,3,main.epochs[1].epoch_params[2]
76,-1,3,main.epochs[1].epoch_params[3]
77,-1,3,main.epochs[1].epoch_gradients[0]
78,-1,3,main.epochs[1].epoch_gradients[1]
79,-1,3,main.epochs[1].epoch_gradients[2]
80,-1,3,main.epochs[1].epoch_gradients[3]
81,-1,3,main.epochs[1].learning_rate
82,-1,2,main.epochs[1].param_update.new_params[0]
83,-1,2,main.epochs[1].param_update.new_params[1]
84,-1,2,main.epochs[1].param_update.new_params[2]
85,-1,2,main.epochs[1].param_update.new_params[3]
86,-1,2,main.epochs[1].param_update.current_params[0]
87,-1,2,main.epochs[1].param_update.current_params[1]
88,-1,2,main.epochs[1].param_update.current_params[2]
89,-1,2,main.epochs[1].param_update.current_params[3]
90,-1,2,main.epochs[1].param_update.gradients[0]
91,-1,2,main.epochs[1].param_update.gradients[1]
92,-1,2,main.epochs[1].param_update.gradients[2]
93,-1,2,main.epochs[1].param_update.gradients[3]
94,-1,2,main.epochs[1].param_update.learning_rate
95,-1,1,main.epochs[1].param_update.updates[0].new_param
96,-1,1,main.epochs[1].param_update.updates[0].current_param
97,-1,1,main.epochs[1].param_update.updates[0].gradient
98,-1,1,main.epochs[1].param_update.updates[0].learning_rate
99,-1,1,main.epochs[1].param_update.updates[1].new_param
100,-1,1,main.epochs[1].param_update.updates[1].current_param
101,-1,1,main.epochs[1].param_update.updates[1].gradient
102,-1,1,main.epochs[1].param_update.updates[1].learning_rate
103,-1,1,main.epochs[1].param_update.updates[2].new_param
104,-1,1,main.epochs[1].param_update.updates[2].current_param
105,-1,1,main.epochs[1].param_update.updates[2].gradient
106,-1,1,main.epochs[1].param_update.updates[2].learning_rate
107,-1,1,main.epochs[1].param_update.updates[3].new_param
108,-1,1,main.epochs[1].param_update.updates[3].current_param
109,-1,1,main.epochs[1].param_update.updates[3].gradient
110,-1,1,main.epochs[1].param_update.updates[3].learning_rate
111,-1,3,main.epochs[2].next_epoch_params[0]
112,-1,3,main.epochs[2].next_epoch_params[1]
113,-1,3,main.epochs[2].next_epoch_params[2]
114,-1,3,main.epochs[2].next_epoch_params[3]
115,-1,3,main.epochs[2].epoch_params[0]
116,-1,3,main.epochs[2].epoch_params[1]
117,-1,3,main.epochs[2].epoch_params[2]
118,-1,3,main.epochs[2].epoch_params[3]
119,-1,3,main.epochs[2].epoch_gradients[0]
120,-1,3,main.epochs[2].epoch_gradients[1]
121,-1,3,main.epochs[2].epoch_gradients[2]
122,-1,3,main.epochs[2].epoch_gradients[3]
123,-1,3,main.epochs[2].learning_rate
124,-1,2,main.epochs[2].param_update.new_params[0]
125,-1,2,main.epochs[2].param_update.new_params[1]
126,-1,2,main.epochs[2].param_update.new_params[2]
127,-1,2,main.epochs[2].param_update.new_params[3]
128,-1,2,main.epochs[2].param_update.current_params[0]
129,-1,2,main.epochs[2].param_update.current_params[1]
130,-1,2,main.epochs[2].param_update.current_params[2]
131,-1,2,main.epochs[2].param_update.current_params[3]
132,-1,2,main.epochs[2].param_update.gradients[0]
133,-1,2,main.epochs[2].param_update.gradients[1]
134,-1,2,main.epochs[2].param_update.gradients[2]
135,-1,2,main.epochs[2].param_update.gradients[3]
136,-1,2,main.epochs[2].param_update.learning_rate
137,-1,1,main.epochs[2].param_update.updates[0].new_param
138,-1,1,main.epochs[2].param_update.updates[0].current_param
139,-1,1,main.epochs[2].param_update.updates[0].gradient
140,-1,1,main.epochs[2].param_update.updates[0].learning_rate
141,-1,1,main.epochs[2].param_update.updates[1].new_param
142,-1,1,main.epochs[2].param_update.updates[1].current_param
143,-1,1,main.epochs[2].param_update.updates[1].gradient
144,-1,1,main.epochs[2].param_update.updates[1].learning_rate
145,-1,1,main.epochs[2].param_update.updates[2].new_param
146,-1,1,main.epochs[2].param_update.updates[2].current_param
147,-1,1,main.epochs[2].param_update.updates[2].gradient
148,-1,1,main.epochs[2].param_update.updates[2].learning_rate
149,-1,1,main.epochs[2].param_update.updates[3].new_param
150,-1,1,main.epochs[2].param_update.updates[3].current_param
151,-1,1,main.epochs[2].param_update.updates[3].gradient
152,-1,1,main.epochs[2].param_update.updates[3].learning_rate
153,-1,0,main.lr_validator.learning_rate

View File

@@ -0,0 +1,21 @@
const wc = require("./witness_calculator.js");
const { readFileSync, writeFile } = require("fs");
if (process.argv.length != 5) {
console.log("Usage: node generate_witness.js <file.wasm> <input.json> <output.wtns>");
} else {
const input = JSON.parse(readFileSync(process.argv[3], "utf8"));
const buffer = readFileSync(process.argv[2]);
wc(buffer).then(async witnessCalculator => {
/*
const w= await witnessCalculator.calculateWitness(input,0);
for (let i=0; i< w.length; i++){
console.log(w[i]);
}*/
const buff= await witnessCalculator.calculateWTNSBin(input,0);
writeFile(process.argv[4], buff, function(err) {
if (err) throw err;
});
});
}

View File

@@ -0,0 +1,381 @@
module.exports = async function builder(code, options) {
options = options || {};
let wasmModule;
try {
wasmModule = await WebAssembly.compile(code);
} catch (err) {
console.log(err);
console.log("\nTry to run circom --c in order to generate c++ code instead\n");
throw new Error(err);
}
let wc;
let errStr = "";
let msgStr = "";
const instance = await WebAssembly.instantiate(wasmModule, {
runtime: {
exceptionHandler : function(code) {
let err;
if (code == 1) {
err = "Signal not found.\n";
} else if (code == 2) {
err = "Too many signals set.\n";
} else if (code == 3) {
err = "Signal already set.\n";
} else if (code == 4) {
err = "Assert Failed.\n";
} else if (code == 5) {
err = "Not enough memory.\n";
} else if (code == 6) {
err = "Input signal array access exceeds the size.\n";
} else {
err = "Unknown error.\n";
}
throw new Error(err + errStr);
},
printErrorMessage : function() {
errStr += getMessage() + "\n";
// console.error(getMessage());
},
writeBufferMessage : function() {
const msg = getMessage();
// Any calls to `log()` will always end with a `\n`, so that's when we print and reset
if (msg === "\n") {
console.log(msgStr);
msgStr = "";
} else {
// If we've buffered other content, put a space in between the items
if (msgStr !== "") {
msgStr += " "
}
// Then append the message to the message we are creating
msgStr += msg;
}
},
showSharedRWMemory : function() {
printSharedRWMemory ();
}
}
});
const sanityCheck =
options
// options &&
// (
// options.sanityCheck ||
// options.logGetSignal ||
// options.logSetSignal ||
// options.logStartComponent ||
// options.logFinishComponent
// );
wc = new WitnessCalculator(instance, sanityCheck);
return wc;
function getMessage() {
var message = "";
var c = instance.exports.getMessageChar();
while ( c != 0 ) {
message += String.fromCharCode(c);
c = instance.exports.getMessageChar();
}
return message;
}
function printSharedRWMemory () {
const shared_rw_memory_size = instance.exports.getFieldNumLen32();
const arr = new Uint32Array(shared_rw_memory_size);
for (let j=0; j<shared_rw_memory_size; j++) {
arr[shared_rw_memory_size-1-j] = instance.exports.readSharedRWMemory(j);
}
// If we've buffered other content, put a space in between the items
if (msgStr !== "") {
msgStr += " "
}
// Then append the value to the message we are creating
msgStr += (fromArray32(arr).toString());
}
};
class WitnessCalculator {
constructor(instance, sanityCheck) {
this.instance = instance;
this.version = this.instance.exports.getVersion();
this.n32 = this.instance.exports.getFieldNumLen32();
this.instance.exports.getRawPrime();
const arr = new Uint32Array(this.n32);
for (let i=0; i<this.n32; i++) {
arr[this.n32-1-i] = this.instance.exports.readSharedRWMemory(i);
}
this.prime = fromArray32(arr);
this.witnessSize = this.instance.exports.getWitnessSize();
this.sanityCheck = sanityCheck;
}
circom_version() {
return this.instance.exports.getVersion();
}
async _doCalculateWitness(input_orig, sanityCheck) {
//input is assumed to be a map from signals to arrays of bigints
this.instance.exports.init((this.sanityCheck || sanityCheck) ? 1 : 0);
let prefix = "";
var input = new Object();
//console.log("Input: ", input_orig);
qualify_input(prefix,input_orig,input);
//console.log("Input after: ",input);
const keys = Object.keys(input);
var input_counter = 0;
keys.forEach( (k) => {
const h = fnvHash(k);
const hMSB = parseInt(h.slice(0,8), 16);
const hLSB = parseInt(h.slice(8,16), 16);
const fArr = flatArray(input[k]);
let signalSize = this.instance.exports.getInputSignalSize(hMSB, hLSB);
if (signalSize < 0){
throw new Error(`Signal ${k} not found\n`);
}
if (fArr.length < signalSize) {
throw new Error(`Not enough values for input signal ${k}\n`);
}
if (fArr.length > signalSize) {
throw new Error(`Too many values for input signal ${k}\n`);
}
for (let i=0; i<fArr.length; i++) {
const arrFr = toArray32(normalize(fArr[i],this.prime),this.n32)
for (let j=0; j<this.n32; j++) {
this.instance.exports.writeSharedRWMemory(j,arrFr[this.n32-1-j]);
}
try {
this.instance.exports.setInputSignal(hMSB, hLSB,i);
input_counter++;
} catch (err) {
// console.log(`After adding signal ${i} of ${k}`)
throw new Error(err);
}
}
});
if (input_counter < this.instance.exports.getInputSize()) {
throw new Error(`Not all inputs have been set. Only ${input_counter} out of ${this.instance.exports.getInputSize()}`);
}
}
async calculateWitness(input, sanityCheck) {
const w = [];
await this._doCalculateWitness(input, sanityCheck);
for (let i=0; i<this.witnessSize; i++) {
this.instance.exports.getWitness(i);
const arr = new Uint32Array(this.n32);
for (let j=0; j<this.n32; j++) {
arr[this.n32-1-j] = this.instance.exports.readSharedRWMemory(j);
}
w.push(fromArray32(arr));
}
return w;
}
async calculateBinWitness(input, sanityCheck) {
const buff32 = new Uint32Array(this.witnessSize*this.n32);
const buff = new Uint8Array( buff32.buffer);
await this._doCalculateWitness(input, sanityCheck);
for (let i=0; i<this.witnessSize; i++) {
this.instance.exports.getWitness(i);
const pos = i*this.n32;
for (let j=0; j<this.n32; j++) {
buff32[pos+j] = this.instance.exports.readSharedRWMemory(j);
}
}
return buff;
}
async calculateWTNSBin(input, sanityCheck) {
const buff32 = new Uint32Array(this.witnessSize*this.n32+this.n32+11);
const buff = new Uint8Array( buff32.buffer);
await this._doCalculateWitness(input, sanityCheck);
//"wtns"
buff[0] = "w".charCodeAt(0)
buff[1] = "t".charCodeAt(0)
buff[2] = "n".charCodeAt(0)
buff[3] = "s".charCodeAt(0)
//version 2
buff32[1] = 2;
//number of sections: 2
buff32[2] = 2;
//id section 1
buff32[3] = 1;
const n8 = this.n32*4;
//id section 1 length in 64bytes
const idSection1length = 8 + n8;
const idSection1lengthHex = idSection1length.toString(16);
buff32[4] = parseInt(idSection1lengthHex.slice(0,8), 16);
buff32[5] = parseInt(idSection1lengthHex.slice(8,16), 16);
//this.n32
buff32[6] = n8;
//prime number
this.instance.exports.getRawPrime();
var pos = 7;
for (let j=0; j<this.n32; j++) {
buff32[pos+j] = this.instance.exports.readSharedRWMemory(j);
}
pos += this.n32;
// witness size
buff32[pos] = this.witnessSize;
pos++;
//id section 2
buff32[pos] = 2;
pos++;
// section 2 length
const idSection2length = n8*this.witnessSize;
const idSection2lengthHex = idSection2length.toString(16);
buff32[pos] = parseInt(idSection2lengthHex.slice(0,8), 16);
buff32[pos+1] = parseInt(idSection2lengthHex.slice(8,16), 16);
pos += 2;
for (let i=0; i<this.witnessSize; i++) {
this.instance.exports.getWitness(i);
for (let j=0; j<this.n32; j++) {
buff32[pos+j] = this.instance.exports.readSharedRWMemory(j);
}
pos += this.n32;
}
return buff;
}
}
function qualify_input_list(prefix,input,input1){
if (Array.isArray(input)) {
for (let i = 0; i<input.length; i++) {
let new_prefix = prefix + "[" + i + "]";
qualify_input_list(new_prefix,input[i],input1);
}
} else {
qualify_input(prefix,input,input1);
}
}
function qualify_input(prefix,input,input1) {
if (Array.isArray(input)) {
a = flatArray(input);
if (a.length > 0) {
let t = typeof a[0];
for (let i = 1; i<a.length; i++) {
if (typeof a[i] != t){
throw new Error(`Types are not the same in the key ${prefix}`);
}
}
if (t == "object") {
qualify_input_list(prefix,input,input1);
} else {
input1[prefix] = input;
}
} else {
input1[prefix] = input;
}
} else if (typeof input == "object") {
const keys = Object.keys(input);
keys.forEach( (k) => {
let new_prefix = prefix == ""? k : prefix + "." + k;
qualify_input(new_prefix,input[k],input1);
});
} else {
input1[prefix] = input;
}
}
function toArray32(rem,size) {
const res = []; //new Uint32Array(size); //has no unshift
const radix = BigInt(0x100000000);
while (rem) {
res.unshift( Number(rem % radix));
rem = rem / radix;
}
if (size) {
var i = size - res.length;
while (i>0) {
res.unshift(0);
i--;
}
}
return res;
}
function fromArray32(arr) { //returns a BigInt
var res = BigInt(0);
const radix = BigInt(0x100000000);
for (let i = 0; i<arr.length; i++) {
res = res*radix + BigInt(arr[i]);
}
return res;
}
function flatArray(a) {
var res = [];
fillArray(res, a);
return res;
function fillArray(res, a) {
if (Array.isArray(a)) {
for (let i=0; i<a.length; i++) {
fillArray(res, a[i]);
}
} else {
res.push(a);
}
}
}
function normalize(n, prime) {
let res = BigInt(n) % prime
if (res < 0) res += prime
return res
}
function fnvHash(str) {
const uint64_max = BigInt(2) ** BigInt(64);
let hash = BigInt("0xCBF29CE484222325");
for (var i = 0; i < str.length; i++) {
hash ^= BigInt(str[i].charCodeAt());
hash *= BigInt(0x100000001B3);
hash %= uint64_max;
}
let shash = hash.toString(16);
let n = 16 - shash.length;
shash = '0'.repeat(n).concat(shash);
return shash;
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,89 @@
{
"protocol": "groth16",
"curve": "bn128",
"nPublic": 0,
"vk_alpha_1": [
"17878197547960430839188198659895507284003628546353226099044915418621989763688",
"2414401954608202804440744777004803831246497417525080466014468287036253862429",
"1"
],
"vk_beta_2": [
[
"18272358567695662813397521777636023960648994006030407065408973578488017511142",
"9712108885154437847450578891476498392461803797234760197580929785758376650650"
],
[
"18113399933881081841371513445282849558527348349073876801631247450598780960185",
"21680758250979848935332437508266260788381562861496889541922176243649072173633"
],
[
"1",
"0"
]
],
"vk_gamma_2": [
[
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
],
[
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
],
[
"1",
"0"
]
],
"vk_delta_2": [
[
"12165843319937710460660491044309080580686643140898844199182757276079170588931",
"12774548987221780347146542577375964674074290054683884142054120470956957679394"
],
[
"11495780469843451809285048515398120762160136824338528775648991644403497551783",
"5902046582690481723876569491209283634644066206041445880136420948730372505228"
],
[
"1",
"0"
]
],
"vk_alphabeta_12": [
[
[
"15043385564103330663613654339919240399186271643017395365045553432108770804738",
"12714364888329096003970077007548283476095989444275664320665990217429249744102"
],
[
"4280923934094610401199612902709542471670738247683892420346396755109361224194",
"5971523870632604777872650089881764809688186504221764776056510270055739855107"
],
[
"14459079939853070802140138225067878054463744988673516330641813466106780423229",
"4839711251154406360161812922311023717557179750909045977849842632717981230632"
]
],
[
[
"17169182168985102987328363961265278197034984474501990558050161317058972083308",
"8549555053510606289302165143849903925761285779139401276438959553414766561582"
],
[
"21525840049875620673656185364575700574261940775297555537759872607176225382844",
"10804170406986327484188973028629688550053758816273117067113206330300963522294"
],
[
"922917354257837537008604464003946574270465496760193887459466960343511330098",
"18548885909581399401732271754936250694134330406851366555038143648512851920594"
]
]
],
"IC": [
[
"4148018046519347596812177481784308374584693326254693053110348164627817172095",
"20730985524054218557052728073337277395061462810058907329882330843946617288874",
"1"
]
]
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,20 @@
pragma circom 2.0.0;
include "node_modules/circomlib/circuits/bitify.circom";
include "node_modules/circomlib/circuits/poseidon.circom";
/*
* Simple Receipt Attestation Circuit
*/
template SimpleReceipt() {
signal input receiptHash;
signal input receipt[4];
component hasher = Poseidon(4);
for (var i = 0; i < 4; i++) {
hasher.inputs[i] <== receipt[i];
}
hasher.out === receiptHash;
}
component main = SimpleReceipt();

View File

@@ -0,0 +1,131 @@
pragma circom 2.0.0;
include "node_modules/circomlib/circuits/bitify.circom";
include "node_modules/circomlib/circuits/poseidon.circom";
/*
* Simple Receipt Attestation Circuit
*
* This circuit proves that a receipt is valid without revealing sensitive details.
*
* Public Inputs:
* - receiptHash: Hash of the receipt (for public verification)
*
* Private Inputs:
* - receipt: The full receipt data (private)
*/
template SimpleReceipt() {
// Public signal
signal input receiptHash;
// Private signals
signal input receipt[4];
// Component for hashing
component hasher = Poseidon(4);
// Connect private inputs to hasher
for (var i = 0; i < 4; i++) {
hasher.inputs[i] <== receipt[i];
}
// Ensure the computed hash matches the public hash
hasher.out === receiptHash;
}
/*
* Membership Proof Circuit
*
* Proves that a value is part of a set without revealing which one
*/
template MembershipProof(n) {
// Public signals
signal input root;
signal input nullifier;
signal input pathIndices[n];
// Private signals
signal input leaf;
signal input pathElements[n];
signal input salt;
// Component for hashing
component hasher[n];
// Initialize hasher for the leaf
hasher[0] = Poseidon(2);
hasher[0].inputs[0] <== leaf;
hasher[0].inputs[1] <== salt;
// Hash up the Merkle tree
for (var i = 0; i < n - 1; i++) {
hasher[i + 1] = Poseidon(2);
// Choose left or right based on path index
hasher[i + 1].inputs[0] <== pathIndices[i] * pathElements[i] + (1 - pathIndices[i]) * hasher[i].out;
hasher[i + 1].inputs[1] <== pathIndices[i] * hasher[i].out + (1 - pathIndices[i]) * pathElements[i];
}
// Ensure final hash equals root
hasher[n - 1].out === root;
// Compute nullifier as hash(leaf, salt)
component nullifierHasher = Poseidon(2);
nullifierHasher.inputs[0] <== leaf;
nullifierHasher.inputs[1] <== salt;
nullifierHasher.out === nullifier;
}
/*
* Bid Range Proof Circuit
*
* Proves that a bid is within a valid range without revealing the amount
*/
template BidRangeProof() {
// Public signals
signal input commitment;
signal input minAmount;
signal input maxAmount;
// Private signals
signal input bid;
signal input salt;
// Component for hashing commitment
component commitmentHasher = Poseidon(2);
commitmentHasher.inputs[0] <== bid;
commitmentHasher.inputs[1] <== salt;
commitmentHasher.out === commitment;
// Components for range checking
component minChecker = GreaterEqThan(8);
component maxChecker = GreaterEqThan(8);
// Convert amounts to 8-bit representation
component bidBits = Num2Bits(64);
component minBits = Num2Bits(64);
component maxBits = Num2Bits(64);
bidBits.in <== bid;
minBits.in <== minAmount;
maxBits.in <== maxAmount;
// Check bid >= minAmount
for (var i = 0; i < 64; i++) {
minChecker.in[i] <== bidBits.out[i] - minBits.out[i];
}
minChecker.out === 1;
// Check maxAmount >= bid
for (var i = 0; i < 64; i++) {
maxChecker.in[i] <== maxBits.out[i] - bidBits.out[i];
}
maxChecker.out === 1;
}
// Main component instantiation
component main = SimpleReceipt();

View File

@@ -0,0 +1,21 @@
const wc = require("./witness_calculator.js");
const { readFileSync, writeFile } = require("fs");
if (process.argv.length != 5) {
console.log("Usage: node generate_witness.js <file.wasm> <input.json> <output.wtns>");
} else {
const input = JSON.parse(readFileSync(process.argv[3], "utf8"));
const buffer = readFileSync(process.argv[2]);
wc(buffer).then(async witnessCalculator => {
/*
const w= await witnessCalculator.calculateWitness(input,0);
for (let i=0; i< w.length; i++){
console.log(w[i]);
}*/
const buff= await witnessCalculator.calculateWTNSBin(input,0);
writeFile(process.argv[4], buff, function(err) {
if (err) throw err;
});
});
}

Binary file not shown.

View File

@@ -0,0 +1,381 @@
module.exports = async function builder(code, options) {
options = options || {};
let wasmModule;
try {
wasmModule = await WebAssembly.compile(code);
} catch (err) {
console.log(err);
console.log("\nTry to run circom --c in order to generate c++ code instead\n");
throw new Error(err);
}
let wc;
let errStr = "";
let msgStr = "";
const instance = await WebAssembly.instantiate(wasmModule, {
runtime: {
exceptionHandler : function(code) {
let err;
if (code == 1) {
err = "Signal not found.\n";
} else if (code == 2) {
err = "Too many signals set.\n";
} else if (code == 3) {
err = "Signal already set.\n";
} else if (code == 4) {
err = "Assert Failed.\n";
} else if (code == 5) {
err = "Not enough memory.\n";
} else if (code == 6) {
err = "Input signal array access exceeds the size.\n";
} else {
err = "Unknown error.\n";
}
throw new Error(err + errStr);
},
printErrorMessage : function() {
errStr += getMessage() + "\n";
// console.error(getMessage());
},
writeBufferMessage : function() {
const msg = getMessage();
// Any calls to `log()` will always end with a `\n`, so that's when we print and reset
if (msg === "\n") {
console.log(msgStr);
msgStr = "";
} else {
// If we've buffered other content, put a space in between the items
if (msgStr !== "") {
msgStr += " "
}
// Then append the message to the message we are creating
msgStr += msg;
}
},
showSharedRWMemory : function() {
printSharedRWMemory ();
}
}
});
const sanityCheck =
options
// options &&
// (
// options.sanityCheck ||
// options.logGetSignal ||
// options.logSetSignal ||
// options.logStartComponent ||
// options.logFinishComponent
// );
wc = new WitnessCalculator(instance, sanityCheck);
return wc;
function getMessage() {
var message = "";
var c = instance.exports.getMessageChar();
while ( c != 0 ) {
message += String.fromCharCode(c);
c = instance.exports.getMessageChar();
}
return message;
}
function printSharedRWMemory () {
const shared_rw_memory_size = instance.exports.getFieldNumLen32();
const arr = new Uint32Array(shared_rw_memory_size);
for (let j=0; j<shared_rw_memory_size; j++) {
arr[shared_rw_memory_size-1-j] = instance.exports.readSharedRWMemory(j);
}
// If we've buffered other content, put a space in between the items
if (msgStr !== "") {
msgStr += " "
}
// Then append the value to the message we are creating
msgStr += (fromArray32(arr).toString());
}
};
class WitnessCalculator {
constructor(instance, sanityCheck) {
this.instance = instance;
this.version = this.instance.exports.getVersion();
this.n32 = this.instance.exports.getFieldNumLen32();
this.instance.exports.getRawPrime();
const arr = new Uint32Array(this.n32);
for (let i=0; i<this.n32; i++) {
arr[this.n32-1-i] = this.instance.exports.readSharedRWMemory(i);
}
this.prime = fromArray32(arr);
this.witnessSize = this.instance.exports.getWitnessSize();
this.sanityCheck = sanityCheck;
}
circom_version() {
return this.instance.exports.getVersion();
}
async _doCalculateWitness(input_orig, sanityCheck) {
//input is assumed to be a map from signals to arrays of bigints
this.instance.exports.init((this.sanityCheck || sanityCheck) ? 1 : 0);
let prefix = "";
var input = new Object();
//console.log("Input: ", input_orig);
qualify_input(prefix,input_orig,input);
//console.log("Input after: ",input);
const keys = Object.keys(input);
var input_counter = 0;
keys.forEach( (k) => {
const h = fnvHash(k);
const hMSB = parseInt(h.slice(0,8), 16);
const hLSB = parseInt(h.slice(8,16), 16);
const fArr = flatArray(input[k]);
let signalSize = this.instance.exports.getInputSignalSize(hMSB, hLSB);
if (signalSize < 0){
throw new Error(`Signal ${k} not found\n`);
}
if (fArr.length < signalSize) {
throw new Error(`Not enough values for input signal ${k}\n`);
}
if (fArr.length > signalSize) {
throw new Error(`Too many values for input signal ${k}\n`);
}
for (let i=0; i<fArr.length; i++) {
const arrFr = toArray32(normalize(fArr[i],this.prime),this.n32)
for (let j=0; j<this.n32; j++) {
this.instance.exports.writeSharedRWMemory(j,arrFr[this.n32-1-j]);
}
try {
this.instance.exports.setInputSignal(hMSB, hLSB,i);
input_counter++;
} catch (err) {
// console.log(`After adding signal ${i} of ${k}`)
throw new Error(err);
}
}
});
if (input_counter < this.instance.exports.getInputSize()) {
throw new Error(`Not all inputs have been set. Only ${input_counter} out of ${this.instance.exports.getInputSize()}`);
}
}
async calculateWitness(input, sanityCheck) {
const w = [];
await this._doCalculateWitness(input, sanityCheck);
for (let i=0; i<this.witnessSize; i++) {
this.instance.exports.getWitness(i);
const arr = new Uint32Array(this.n32);
for (let j=0; j<this.n32; j++) {
arr[this.n32-1-j] = this.instance.exports.readSharedRWMemory(j);
}
w.push(fromArray32(arr));
}
return w;
}
async calculateBinWitness(input, sanityCheck) {
const buff32 = new Uint32Array(this.witnessSize*this.n32);
const buff = new Uint8Array( buff32.buffer);
await this._doCalculateWitness(input, sanityCheck);
for (let i=0; i<this.witnessSize; i++) {
this.instance.exports.getWitness(i);
const pos = i*this.n32;
for (let j=0; j<this.n32; j++) {
buff32[pos+j] = this.instance.exports.readSharedRWMemory(j);
}
}
return buff;
}
async calculateWTNSBin(input, sanityCheck) {
const buff32 = new Uint32Array(this.witnessSize*this.n32+this.n32+11);
const buff = new Uint8Array( buff32.buffer);
await this._doCalculateWitness(input, sanityCheck);
//"wtns"
buff[0] = "w".charCodeAt(0)
buff[1] = "t".charCodeAt(0)
buff[2] = "n".charCodeAt(0)
buff[3] = "s".charCodeAt(0)
//version 2
buff32[1] = 2;
//number of sections: 2
buff32[2] = 2;
//id section 1
buff32[3] = 1;
const n8 = this.n32*4;
//id section 1 length in 64bytes
const idSection1length = 8 + n8;
const idSection1lengthHex = idSection1length.toString(16);
buff32[4] = parseInt(idSection1lengthHex.slice(0,8), 16);
buff32[5] = parseInt(idSection1lengthHex.slice(8,16), 16);
//this.n32
buff32[6] = n8;
//prime number
this.instance.exports.getRawPrime();
var pos = 7;
for (let j=0; j<this.n32; j++) {
buff32[pos+j] = this.instance.exports.readSharedRWMemory(j);
}
pos += this.n32;
// witness size
buff32[pos] = this.witnessSize;
pos++;
//id section 2
buff32[pos] = 2;
pos++;
// section 2 length
const idSection2length = n8*this.witnessSize;
const idSection2lengthHex = idSection2length.toString(16);
buff32[pos] = parseInt(idSection2lengthHex.slice(0,8), 16);
buff32[pos+1] = parseInt(idSection2lengthHex.slice(8,16), 16);
pos += 2;
for (let i=0; i<this.witnessSize; i++) {
this.instance.exports.getWitness(i);
for (let j=0; j<this.n32; j++) {
buff32[pos+j] = this.instance.exports.readSharedRWMemory(j);
}
pos += this.n32;
}
return buff;
}
}
function qualify_input_list(prefix,input,input1){
if (Array.isArray(input)) {
for (let i = 0; i<input.length; i++) {
let new_prefix = prefix + "[" + i + "]";
qualify_input_list(new_prefix,input[i],input1);
}
} else {
qualify_input(prefix,input,input1);
}
}
function qualify_input(prefix,input,input1) {
if (Array.isArray(input)) {
a = flatArray(input);
if (a.length > 0) {
let t = typeof a[0];
for (let i = 1; i<a.length; i++) {
if (typeof a[i] != t){
throw new Error(`Types are not the same in the key ${prefix}`);
}
}
if (t == "object") {
qualify_input_list(prefix,input,input1);
} else {
input1[prefix] = input;
}
} else {
input1[prefix] = input;
}
} else if (typeof input == "object") {
const keys = Object.keys(input);
keys.forEach( (k) => {
let new_prefix = prefix == ""? k : prefix + "." + k;
qualify_input(new_prefix,input[k],input1);
});
} else {
input1[prefix] = input;
}
}
function toArray32(rem,size) {
const res = []; //new Uint32Array(size); //has no unshift
const radix = BigInt(0x100000000);
while (rem) {
res.unshift( Number(rem % radix));
rem = rem / radix;
}
if (size) {
var i = size - res.length;
while (i>0) {
res.unshift(0);
i--;
}
}
return res;
}
function fromArray32(arr) { //returns a BigInt
var res = BigInt(0);
const radix = BigInt(0x100000000);
for (let i = 0; i<arr.length; i++) {
res = res*radix + BigInt(arr[i]);
}
return res;
}
function flatArray(a) {
var res = [];
fillArray(res, a);
return res;
function fillArray(res, a) {
if (Array.isArray(a)) {
for (let i=0; i<a.length; i++) {
fillArray(res, a[i]);
}
} else {
res.push(a);
}
}
}
function normalize(n, prime) {
let res = BigInt(n) % prime
if (res < 0) res += prime
return res
}
function fnvHash(str) {
const uint64_max = BigInt(2) ** BigInt(64);
let hash = BigInt("0xCBF29CE484222325");
for (var i = 0; i < str.length; i++) {
hash ^= BigInt(str[i].charCodeAt());
hash *= BigInt(0x100000001B3);
hash %= uint64_max;
}
let shash = hash.toString(16);
let n = 16 - shash.length;
shash = '0'.repeat(n).concat(shash);
return shash;
}

View File

@@ -0,0 +1,9 @@
pragma circom 2.0.0;
template Test() {
signal input in;
signal output out;
out <== in;
}
component main = Test();

View File

@@ -0,0 +1,9 @@
pragma circom 2.0.0;
template Test() {
signal input in;
signal output out;
out <== in;
}
component main = Test();

View File

@@ -0,0 +1,9 @@
pragma circom 0.5.46;
template Test() {
signal input in;
signal output out;
out <== in;
}
component main = Test();

View File

@@ -0,0 +1,9 @@
pragma circom 2.0.0;
template Test() {
signal input in;
signal output out;
out <== in;
}
component main = Test();

Binary file not shown.

View File

@@ -0,0 +1,2 @@
1,1,0,main.out
2,-1,0,main.in

View File

@@ -0,0 +1,94 @@
{
"protocol": "groth16",
"curve": "bn128",
"nPublic": 1,
"vk_alpha_1": [
"8460216532488165727467564856413555351114670954785488538800357260241591659922",
"18445221864308632061488572037047946806659902339700033382142009763125814749748",
"1"
],
"vk_beta_2": [
[
"6479683735401057464856560780016689003394325158210495956800419236111697402941",
"10756899494323454451849886987287990433636781750938311280590204128566742369499"
],
[
"14397376998117601765034877247086905021783475930686205456376147632056422933833",
"20413115250143543082989954729570048513153861075230117372641105301032124129876"
],
[
"1",
"0"
]
],
"vk_gamma_2": [
[
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
],
[
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
],
[
"1",
"0"
]
],
"vk_delta_2": [
[
"6840503012950456034406412069208230277997775373740741539262294411073505372202",
"4187901564856243153173061219345467014727545819082218143172095490940414594424"
],
[
"15354962623567401613422376703326876887451375834046173755940516337285040531401",
"16312755549775593509550494456994863905270524213647477910622330564896885944010"
],
[
"1",
"0"
]
],
"vk_alphabeta_12": [
[
[
"8995664523327611111940695773435202321527189968635326175993425030330107869209",
"10636865864911719472203481854537187286731767382234618665029688610948280447774"
],
[
"2027301146985302003447473427699486288958511647692214852679531814142772884072",
"16315179087884712852887019534812875478380885062857601375409989804072625917625"
],
[
"5763629345463320911658464985147138827165295056412698652075257157933349925190",
"18007509234277924935356458855535698088409613611430357427754027720054049931159"
]
],
[
[
"12742020694779715461694294344022902700171616940484742698214637693643592478776",
"13449812718618008130272786901682245900092785345108866963867787217638117513710"
],
[
"4697328451762890383542458909679544743549594918890775424620183530718745223176",
"18283933325645065572175183630291803944633449818122421671865200510652516905389"
],
[
"325914140485583140584324883490363676367108249716427038595477057788929554745",
"6765772614216179391904319393793642468016331619939680620407685333447433218960"
]
]
],
"IC": [
[
"7685121570366407724807946503921961619833683410392772870373459476604128011275",
"6915443837935167692630810275110398177336960270031115982900890650376967129575",
"1"
],
[
"10363999014224824591638032348857401078402637116683579765969796919683926972060",
"5716124078230277423780595544607422628270452574948632939527677487979409581469",
"1"
]
]
}

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,21 @@
const wc = require("./witness_calculator.js");
const { readFileSync, writeFile } = require("fs");
if (process.argv.length != 5) {
console.log("Usage: node generate_witness.js <file.wasm> <input.json> <output.wtns>");
} else {
const input = JSON.parse(readFileSync(process.argv[3], "utf8"));
const buffer = readFileSync(process.argv[2]);
wc(buffer).then(async witnessCalculator => {
/*
const w= await witnessCalculator.calculateWitness(input,0);
for (let i=0; i< w.length; i++){
console.log(w[i]);
}*/
const buff= await witnessCalculator.calculateWTNSBin(input,0);
writeFile(process.argv[4], buff, function(err) {
if (err) throw err;
});
});
}

Binary file not shown.

View File

@@ -0,0 +1,381 @@
module.exports = async function builder(code, options) {
options = options || {};
let wasmModule;
try {
wasmModule = await WebAssembly.compile(code);
} catch (err) {
console.log(err);
console.log("\nTry to run circom --c in order to generate c++ code instead\n");
throw new Error(err);
}
let wc;
let errStr = "";
let msgStr = "";
const instance = await WebAssembly.instantiate(wasmModule, {
runtime: {
exceptionHandler : function(code) {
let err;
if (code == 1) {
err = "Signal not found.\n";
} else if (code == 2) {
err = "Too many signals set.\n";
} else if (code == 3) {
err = "Signal already set.\n";
} else if (code == 4) {
err = "Assert Failed.\n";
} else if (code == 5) {
err = "Not enough memory.\n";
} else if (code == 6) {
err = "Input signal array access exceeds the size.\n";
} else {
err = "Unknown error.\n";
}
throw new Error(err + errStr);
},
printErrorMessage : function() {
errStr += getMessage() + "\n";
// console.error(getMessage());
},
writeBufferMessage : function() {
const msg = getMessage();
// Any calls to `log()` will always end with a `\n`, so that's when we print and reset
if (msg === "\n") {
console.log(msgStr);
msgStr = "";
} else {
// If we've buffered other content, put a space in between the items
if (msgStr !== "") {
msgStr += " "
}
// Then append the message to the message we are creating
msgStr += msg;
}
},
showSharedRWMemory : function() {
printSharedRWMemory ();
}
}
});
const sanityCheck =
options
// options &&
// (
// options.sanityCheck ||
// options.logGetSignal ||
// options.logSetSignal ||
// options.logStartComponent ||
// options.logFinishComponent
// );
wc = new WitnessCalculator(instance, sanityCheck);
return wc;
function getMessage() {
var message = "";
var c = instance.exports.getMessageChar();
while ( c != 0 ) {
message += String.fromCharCode(c);
c = instance.exports.getMessageChar();
}
return message;
}
function printSharedRWMemory () {
const shared_rw_memory_size = instance.exports.getFieldNumLen32();
const arr = new Uint32Array(shared_rw_memory_size);
for (let j=0; j<shared_rw_memory_size; j++) {
arr[shared_rw_memory_size-1-j] = instance.exports.readSharedRWMemory(j);
}
// If we've buffered other content, put a space in between the items
if (msgStr !== "") {
msgStr += " "
}
// Then append the value to the message we are creating
msgStr += (fromArray32(arr).toString());
}
};
class WitnessCalculator {
constructor(instance, sanityCheck) {
this.instance = instance;
this.version = this.instance.exports.getVersion();
this.n32 = this.instance.exports.getFieldNumLen32();
this.instance.exports.getRawPrime();
const arr = new Uint32Array(this.n32);
for (let i=0; i<this.n32; i++) {
arr[this.n32-1-i] = this.instance.exports.readSharedRWMemory(i);
}
this.prime = fromArray32(arr);
this.witnessSize = this.instance.exports.getWitnessSize();
this.sanityCheck = sanityCheck;
}
circom_version() {
return this.instance.exports.getVersion();
}
async _doCalculateWitness(input_orig, sanityCheck) {
//input is assumed to be a map from signals to arrays of bigints
this.instance.exports.init((this.sanityCheck || sanityCheck) ? 1 : 0);
let prefix = "";
var input = new Object();
//console.log("Input: ", input_orig);
qualify_input(prefix,input_orig,input);
//console.log("Input after: ",input);
const keys = Object.keys(input);
var input_counter = 0;
keys.forEach( (k) => {
const h = fnvHash(k);
const hMSB = parseInt(h.slice(0,8), 16);
const hLSB = parseInt(h.slice(8,16), 16);
const fArr = flatArray(input[k]);
let signalSize = this.instance.exports.getInputSignalSize(hMSB, hLSB);
if (signalSize < 0){
throw new Error(`Signal ${k} not found\n`);
}
if (fArr.length < signalSize) {
throw new Error(`Not enough values for input signal ${k}\n`);
}
if (fArr.length > signalSize) {
throw new Error(`Too many values for input signal ${k}\n`);
}
for (let i=0; i<fArr.length; i++) {
const arrFr = toArray32(normalize(fArr[i],this.prime),this.n32)
for (let j=0; j<this.n32; j++) {
this.instance.exports.writeSharedRWMemory(j,arrFr[this.n32-1-j]);
}
try {
this.instance.exports.setInputSignal(hMSB, hLSB,i);
input_counter++;
} catch (err) {
// console.log(`After adding signal ${i} of ${k}`)
throw new Error(err);
}
}
});
if (input_counter < this.instance.exports.getInputSize()) {
throw new Error(`Not all inputs have been set. Only ${input_counter} out of ${this.instance.exports.getInputSize()}`);
}
}
async calculateWitness(input, sanityCheck) {
const w = [];
await this._doCalculateWitness(input, sanityCheck);
for (let i=0; i<this.witnessSize; i++) {
this.instance.exports.getWitness(i);
const arr = new Uint32Array(this.n32);
for (let j=0; j<this.n32; j++) {
arr[this.n32-1-j] = this.instance.exports.readSharedRWMemory(j);
}
w.push(fromArray32(arr));
}
return w;
}
async calculateBinWitness(input, sanityCheck) {
const buff32 = new Uint32Array(this.witnessSize*this.n32);
const buff = new Uint8Array( buff32.buffer);
await this._doCalculateWitness(input, sanityCheck);
for (let i=0; i<this.witnessSize; i++) {
this.instance.exports.getWitness(i);
const pos = i*this.n32;
for (let j=0; j<this.n32; j++) {
buff32[pos+j] = this.instance.exports.readSharedRWMemory(j);
}
}
return buff;
}
async calculateWTNSBin(input, sanityCheck) {
const buff32 = new Uint32Array(this.witnessSize*this.n32+this.n32+11);
const buff = new Uint8Array( buff32.buffer);
await this._doCalculateWitness(input, sanityCheck);
//"wtns"
buff[0] = "w".charCodeAt(0)
buff[1] = "t".charCodeAt(0)
buff[2] = "n".charCodeAt(0)
buff[3] = "s".charCodeAt(0)
//version 2
buff32[1] = 2;
//number of sections: 2
buff32[2] = 2;
//id section 1
buff32[3] = 1;
const n8 = this.n32*4;
//id section 1 length in 64bytes
const idSection1length = 8 + n8;
const idSection1lengthHex = idSection1length.toString(16);
buff32[4] = parseInt(idSection1lengthHex.slice(0,8), 16);
buff32[5] = parseInt(idSection1lengthHex.slice(8,16), 16);
//this.n32
buff32[6] = n8;
//prime number
this.instance.exports.getRawPrime();
var pos = 7;
for (let j=0; j<this.n32; j++) {
buff32[pos+j] = this.instance.exports.readSharedRWMemory(j);
}
pos += this.n32;
// witness size
buff32[pos] = this.witnessSize;
pos++;
//id section 2
buff32[pos] = 2;
pos++;
// section 2 length
const idSection2length = n8*this.witnessSize;
const idSection2lengthHex = idSection2length.toString(16);
buff32[pos] = parseInt(idSection2lengthHex.slice(0,8), 16);
buff32[pos+1] = parseInt(idSection2lengthHex.slice(8,16), 16);
pos += 2;
for (let i=0; i<this.witnessSize; i++) {
this.instance.exports.getWitness(i);
for (let j=0; j<this.n32; j++) {
buff32[pos+j] = this.instance.exports.readSharedRWMemory(j);
}
pos += this.n32;
}
return buff;
}
}
function qualify_input_list(prefix,input,input1){
if (Array.isArray(input)) {
for (let i = 0; i<input.length; i++) {
let new_prefix = prefix + "[" + i + "]";
qualify_input_list(new_prefix,input[i],input1);
}
} else {
qualify_input(prefix,input,input1);
}
}
function qualify_input(prefix,input,input1) {
if (Array.isArray(input)) {
a = flatArray(input);
if (a.length > 0) {
let t = typeof a[0];
for (let i = 1; i<a.length; i++) {
if (typeof a[i] != t){
throw new Error(`Types are not the same in the key ${prefix}`);
}
}
if (t == "object") {
qualify_input_list(prefix,input,input1);
} else {
input1[prefix] = input;
}
} else {
input1[prefix] = input;
}
} else if (typeof input == "object") {
const keys = Object.keys(input);
keys.forEach( (k) => {
let new_prefix = prefix == ""? k : prefix + "." + k;
qualify_input(new_prefix,input[k],input1);
});
} else {
input1[prefix] = input;
}
}
function toArray32(rem,size) {
const res = []; //new Uint32Array(size); //has no unshift
const radix = BigInt(0x100000000);
while (rem) {
res.unshift( Number(rem % radix));
rem = rem / radix;
}
if (size) {
var i = size - res.length;
while (i>0) {
res.unshift(0);
i--;
}
}
return res;
}
function fromArray32(arr) { //returns a BigInt
var res = BigInt(0);
const radix = BigInt(0x100000000);
for (let i = 0; i<arr.length; i++) {
res = res*radix + BigInt(arr[i]);
}
return res;
}
function flatArray(a) {
var res = [];
fillArray(res, a);
return res;
function fillArray(res, a) {
if (Array.isArray(a)) {
for (let i=0; i<a.length; i++) {
fillArray(res, a[i]);
}
} else {
res.push(a);
}
}
}
function normalize(n, prime) {
let res = BigInt(n) % prime
if (res < 0) res += prime
return res
}
function fnvHash(str) {
const uint64_max = BigInt(2) ** BigInt(64);
let hash = BigInt("0xCBF29CE484222325");
for (var i = 0; i < str.length; i++) {
hash ^= BigInt(str[i].charCodeAt());
hash *= BigInt(0x100000001B3);
hash %= uint64_max;
}
let shash = hash.toString(16);
let n = 16 - shash.length;
shash = '0'.repeat(n).concat(shash);
return shash;
}

View File

@@ -0,0 +1,168 @@
// SPDX-License-Identifier: GPL-3.0
/*
Copyright 2021 0KIMS association.
This file is generated with [snarkJS](https://github.com/iden3/snarkjs).
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see <https://www.gnu.org/licenses/>.
*/
pragma solidity >=0.7.0 <0.9.0;
contract Groth16Verifier {
// Scalar field size
uint256 constant r = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
// Base field size
uint256 constant q = 21888242871839275222246405745257275088696311157297823662689037894645226208583;
// Verification Key data
uint256 constant alphax = 8460216532488165727467564856413555351114670954785488538800357260241591659922;
uint256 constant alphay = 18445221864308632061488572037047946806659902339700033382142009763125814749748;
uint256 constant betax1 = 10756899494323454451849886987287990433636781750938311280590204128566742369499;
uint256 constant betax2 = 6479683735401057464856560780016689003394325158210495956800419236111697402941;
uint256 constant betay1 = 20413115250143543082989954729570048513153861075230117372641105301032124129876;
uint256 constant betay2 = 14397376998117601765034877247086905021783475930686205456376147632056422933833;
uint256 constant gammax1 = 11559732032986387107991004021392285783925812861821192530917403151452391805634;
uint256 constant gammax2 = 10857046999023057135944570762232829481370756359578518086990519993285655852781;
uint256 constant gammay1 = 4082367875863433681332203403145435568316851327593401208105741076214120093531;
uint256 constant gammay2 = 8495653923123431417604973247489272438418190587263600148770280649306958101930;
uint256 constant deltax1 = 4187901564856243153173061219345467014727545819082218143172095490940414594424;
uint256 constant deltax2 = 6840503012950456034406412069208230277997775373740741539262294411073505372202;
uint256 constant deltay1 = 16312755549775593509550494456994863905270524213647477910622330564896885944010;
uint256 constant deltay2 = 15354962623567401613422376703326876887451375834046173755940516337285040531401;
uint256 constant IC0x = 7685121570366407724807946503921961619833683410392772870373459476604128011275;
uint256 constant IC0y = 6915443837935167692630810275110398177336960270031115982900890650376967129575;
uint256 constant IC1x = 10363999014224824591638032348857401078402637116683579765969796919683926972060;
uint256 constant IC1y = 5716124078230277423780595544607422628270452574948632939527677487979409581469;
// Memory data
uint16 constant pVk = 0;
uint16 constant pPairing = 128;
uint16 constant pLastMem = 896;
function verifyProof(uint[2] calldata _pA, uint[2][2] calldata _pB, uint[2] calldata _pC, uint[1] calldata _pubSignals) public view returns (bool) {
assembly {
function checkField(v) {
if iszero(lt(v, r)) {
mstore(0, 0)
return(0, 0x20)
}
}
// G1 function to multiply a G1 value(x,y) to value in an address
function g1_mulAccC(pR, x, y, s) {
let success
let mIn := mload(0x40)
mstore(mIn, x)
mstore(add(mIn, 32), y)
mstore(add(mIn, 64), s)
success := staticcall(sub(gas(), 2000), 7, mIn, 96, mIn, 64)
if iszero(success) {
mstore(0, 0)
return(0, 0x20)
}
mstore(add(mIn, 64), mload(pR))
mstore(add(mIn, 96), mload(add(pR, 32)))
success := staticcall(sub(gas(), 2000), 6, mIn, 128, pR, 64)
if iszero(success) {
mstore(0, 0)
return(0, 0x20)
}
}
function checkPairing(pA, pB, pC, pubSignals, pMem) -> isOk {
let _pPairing := add(pMem, pPairing)
let _pVk := add(pMem, pVk)
mstore(_pVk, IC0x)
mstore(add(_pVk, 32), IC0y)
// Compute the linear combination vk_x
g1_mulAccC(_pVk, IC1x, IC1y, calldataload(add(pubSignals, 0)))
// -A
mstore(_pPairing, calldataload(pA))
mstore(add(_pPairing, 32), mod(sub(q, calldataload(add(pA, 32))), q))
// B
mstore(add(_pPairing, 64), calldataload(pB))
mstore(add(_pPairing, 96), calldataload(add(pB, 32)))
mstore(add(_pPairing, 128), calldataload(add(pB, 64)))
mstore(add(_pPairing, 160), calldataload(add(pB, 96)))
// alpha1
mstore(add(_pPairing, 192), alphax)
mstore(add(_pPairing, 224), alphay)
// beta2
mstore(add(_pPairing, 256), betax1)
mstore(add(_pPairing, 288), betax2)
mstore(add(_pPairing, 320), betay1)
mstore(add(_pPairing, 352), betay2)
// vk_x
mstore(add(_pPairing, 384), mload(add(pMem, pVk)))
mstore(add(_pPairing, 416), mload(add(pMem, add(pVk, 32))))
// gamma2
mstore(add(_pPairing, 448), gammax1)
mstore(add(_pPairing, 480), gammax2)
mstore(add(_pPairing, 512), gammay1)
mstore(add(_pPairing, 544), gammay2)
// C
mstore(add(_pPairing, 576), calldataload(pC))
mstore(add(_pPairing, 608), calldataload(add(pC, 32)))
// delta2
mstore(add(_pPairing, 640), deltax1)
mstore(add(_pPairing, 672), deltax2)
mstore(add(_pPairing, 704), deltay1)
mstore(add(_pPairing, 736), deltay2)
let success := staticcall(sub(gas(), 2000), 8, _pPairing, 768, _pPairing, 0x20)
isOk := and(success, mload(_pPairing))
}
let pMem := mload(0x40)
mstore(0x40, add(pMem, pLastMem))
// Validate that all evaluations ∈ F
checkField(calldataload(add(_pubSignals, 0)))
// Validate all evaluations
let isValid := checkPairing(_pA, _pB, _pC, _pubSignals, pMem)
mstore(0, isValid)
return(0, 0x20)
}
}
}

View File

@@ -0,0 +1,9 @@
pragma circom 0.5.46;
template Test() {
signal input in;
signal output out;
out <== in;
}
component main = Test();

View File

@@ -0,0 +1,10 @@
pragma circom 0.5.46;
template Test() {
signal input in;
signal output out;
out <== in;
}
component main = Test();

View File

View File

View File