Skip to content

Commit

Permalink
Merge pull request #78 from 0xPolygonHermez/develop
Browse files Browse the repository at this point in the history
Develop
  • Loading branch information
krlosMata authored Dec 22, 2022
2 parents d5637d3 + 7df930d commit 59d4ab8
Show file tree
Hide file tree
Showing 24 changed files with 1,274 additions and 1,028 deletions.
10 changes: 5 additions & 5 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "@0xpolygonhermez/zkevm-commonjs",
"description": "Javascript library implementing common utilities for zkevm",
"version": "0.4.0.1",
"version": "0.6.0.0",
"main": "index.js",
"scripts": {
"setup": "npm i",
Expand Down Expand Up @@ -34,7 +34,7 @@
},
"homepage": "https://github.com/0xPolygonHermez/zkevm-commonjs#readme",
"devDependencies": {
"@0xpolygonhermez/contracts-zkevm": "github:0xPolygonHermez/zkevm-contracts#v0.0.7",
"@0xpolygonhermez/zkevm-contracts": "github:0xPolygonHermez/zkevm-contracts#v0.6.0.0",
"@ethersproject/abi": "^5.6.4",
"@nomiclabs/hardhat-ethers": "^2.1.0",
"@nomiclabs/hardhat-waffle": "^2.0.2",
Expand All @@ -48,12 +48,12 @@
},
"dependencies": {
"@ethereumjs/block": "^3.6.2",
"@ethereumjs/common": "^2.6.0",
"@ethereumjs/tx": "^3.4.0",
"@polygon-hermez/vm": "5.7.22",
"@polygon-hermez/common": "2.6.4",
"@polygon-hermez/vm": "5.7.28",
"ethereumjs-util": "^7.1.4",
"ethers": "^5.5.4",
"ffjavascript": "^0.2.46",
"ffjavascript": "^0.2.55",
"lodash": "^4.17.21"
}
}
13 changes: 10 additions & 3 deletions src/constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,12 @@ const { Scalar } = require('ffjavascript');
// Database keys
module.exports.DB_LAST_BATCH = ethers.utils.id(('ZKEVM_DB_LAST_BATCH'));
module.exports.DB_STATE_ROOT = ethers.utils.id(('ZKEVM_DB_STATE_ROOT'));
module.exports.DB_LOCAL_EXIT_ROOT = ethers.utils.id(('ZKEVM_DB_DB_LOCAL_EXIT_ROOT'));
module.exports.DB_ACC_INPUT_HASH = ethers.utils.id(('ZKEVM_DB_ACC_INPUT_HASH'));
module.exports.DB_LOCAL_EXIT_ROOT = ethers.utils.id(('ZKEVM_DB_LOCAL_EXIT_ROOT'));
module.exports.DB_GLOBAL_EXIT_ROOT = ethers.utils.id(('ZKEVM_DB_GLOBAL_EXIT_ROOT'));
module.exports.DB_ADDRESS_STORAGE = ethers.utils.id(('ZKEVM_DB_ADDRESS_STORAGE'));
module.exports.DB_TOUCHED_ACCOUNTS = ethers.utils.id(('ZKEVM_DB_TOUCHED_ACCOUNTS'));
module.exports.DB_STARK_INPUT = ethers.utils.id(('ZKEVM_DB_STARK_INPUT'));

// Default values and global constants
module.exports.DEFAULT_MAX_TX = 1000;
Expand Down Expand Up @@ -35,5 +37,10 @@ module.exports.GLOBAL_EXIT_ROOT_STORAGE_POS = 0;
module.exports.LOCAL_EXIT_ROOT_STORAGE_POS = 1;
module.exports.BATCH_GAS_LIMIT = 30000000;
module.exports.BATCH_DIFFICULTY = 0;
module.exports.ADDRESS_SYSTEM = '0x0000000000000000000000000000000000000000';
module.exports.STATE_ROOT_STORAGE_POS = 0;
module.exports.ADDRESS_SYSTEM = '0x000000000000000000000000000000005ca1ab1e';
module.exports.STATE_ROOT_STORAGE_POS = 1;
module.exports.LAST_TX_STORAGE_POS = 0;

// Bridge Leaf Types
module.exports.BRIDGE_LEAF_TYPE_ASSET = 0;
module.exports.BRIDGE_LEAF_TYPE_MESSAGE = 1;
135 changes: 62 additions & 73 deletions src/contract-utils.js
Original file line number Diff line number Diff line change
@@ -1,127 +1,116 @@
const ethers = require('ethers');
const { Scalar } = require('ffjavascript');
const { sha256Snark, padZeros } = require('./utils');
const { string2fea } = require('./smt-utils');
const getPoseidon = require('./poseidon');

/**
* Compute globalHash for STARK circuit
* @param {String} currentStateRoot - Current state Root
* @param {String} currentLocalExitRoot - Current local exit root
* @param {String} newStateRoot - New State root once the batch is processed
* @param {String} newLocalExitRoot - New local exit root once the batch is processed
* Compute accumulateInputHash = Keccak256(oldAccInputHash, batchHashData, globalExitRoot, timestamp, seqAddress)
* @param {String} oldAccInputHash - old accumulateInputHash
* @param {String} batchHashData - Batch hash data
* @param {Number} numBatch - Batch number
* @param {String} globalExitRoot - Global Exit Root
* @param {Number} timestamp - Block timestamp
* @param {Number} chainID - L2 chainID
* @returns {String} - global hash in hex encoding
* @param {String} sequencerAddress - Sequencer address
* @returns {String} - accumulateInputHash in hex encoding
*/
function calculateStarkInput(
currentStateRoot,
currentLocalExitRoot,
newStateRoot,
newLocalExitRoot,
function calculateAccInputHash(
oldAccInputHash,
batchHashData,
numBatch,
globalExitRoot,
timestamp,
chainID,
sequencerAddress,
) {
const currentStateRootHex = `0x${Scalar.e(currentStateRoot).toString(16).padStart(64, '0')}`;
const currentLocalExitRootHex = `0x${Scalar.e(currentLocalExitRoot).toString(16).padStart(64, '0')}`;
const newStateRootHex = `0x${Scalar.e(newStateRoot).toString(16).padStart(64, '0')}`;
const newLocalExitRootHex = `0x${Scalar.e(newLocalExitRoot).toString(16).padStart(64, '0')}`;
const oldAccInputHashHex = `0x${Scalar.e(oldAccInputHash).toString(16).padStart(64, '0')}`;

const hashKeccak = ethers.utils.solidityKeccak256(
['bytes32', 'bytes32', 'bytes32', 'bytes32', 'bytes32', 'uint64', 'uint64', 'uint64'],
['bytes32', 'bytes32', 'bytes32', 'uint64', 'address'],
[
currentStateRootHex,
currentLocalExitRootHex,
newStateRootHex,
newLocalExitRootHex,
oldAccInputHashHex,
batchHashData,
numBatch,
globalExitRoot,
timestamp,
chainID,
sequencerAddress,
],
);

return hashKeccak;
}

/**
* Compute input for SNARK circuit
* @param {String} currentStateRoot - Current state Root
* @param {String} currentLocalExitRoot - Current local exit root
* Compute input for SNARK circuit: sha256(aggrAddress, oldStateRoot, oldAccInputHash, oldNumBatch, chainID, newStateRoot, newAccInputHash, newLocalExitRoot, newNumBatch) % FrSNARK
* @param {String} oldStateRoot - Current state Root
* @param {String} newStateRoot - New State root once the batch is processed
* @param {String} newLocalExitRoot - New local exit root once the batch is processed
* @param {String} batchHashData - Batch hash data
* @param {Number} numBatch - Batch number
* @param {Number} timestamp - Block timestamp
* @param {String} oldAccInputHash - initial accumulateInputHash
* @param {String} newAccInputHash - final accumulateInputHash
* @param {String} newLocalExitRoot - New local exit root once the all batches is processed
* @param {Number} oldNumBatch - initial batch number
* @param {Number} newNumBatch - final batch number
* @param {Number} chainID - L2 chainID
* @param {String} aggregatorAddress - Aggregator Ethereum address in hex string
* @returns {String} - sha256(globalHash, aggregatorAddress) % FrSNARK in hex encoding
* @returns {String} - input snark in hex encoding
*/
async function calculateSnarkInput(
currentStateRoot,
currentLocalExitRoot,
oldStateRoot,
newStateRoot,
newLocalExitRoot,
batchHashData,
numBatch,
timestamp,
oldAccInputHash,
newAccInputHash,
oldNumBatch,
newNumBatch,
chainID,
aggregatorAddress,
) {
const poseidon = await getPoseidon();
const { F } = poseidon;

const hashKeccak = calculateStarkInput(
currentStateRoot,
currentLocalExitRoot,
newStateRoot,
newLocalExitRoot,
batchHashData,
numBatch,
timestamp,
chainID,
);

// 20 bytes agggregator adsress
// 20 bytes agggregator address
const strAggregatorAddress = padZeros((Scalar.fromString(aggregatorAddress, 16)).toString(16), 40);

// 8 bytes each field element
const feaHashKeccak = string2fea(F, hashKeccak);
const strFea = feaHashKeccak.reduce(
(previousValue, currentValue) => previousValue + padZeros(currentValue.toString(16), 16),
'',
);
// 32 bytes each field element for oldStateRoot
const strOldStateRoot = padZeros((Scalar.fromString(oldStateRoot, 16)).toString(16), 64);

// 32 bytes each field element for oldStateRoot
const strOldAccInputHash = padZeros((Scalar.fromString(oldAccInputHash, 16)).toString(16), 64);

// 8 bytes for oldNumBatch
const strOldNumBatch = padZeros(Scalar.e(oldNumBatch).toString(16), 16);

// 8 bytes for chainID
const strChainID = padZeros(Scalar.e(chainID).toString(16), 16);

// 32 bytes each field element for oldStateRoot
const strNewStateRoot = padZeros((Scalar.fromString(newStateRoot, 16)).toString(16), 64);

// 32 bytes each field element for oldStateRoot
const strNewAccInputHash = padZeros((Scalar.fromString(newAccInputHash, 16)).toString(16), 64);

// 32 bytes each field element for oldStateRoot
const strNewLocalExitRoot = padZeros((Scalar.fromString(newLocalExitRoot, 16)).toString(16), 64);

// 8 bytes for newNumBatch
const strNewNumBatch = padZeros(Scalar.e(newNumBatch).toString(16), 16);

// build final bytes sha256
const finalStr = strAggregatorAddress.concat(strFea);
const finalStr = strAggregatorAddress
.concat(strOldStateRoot)
.concat(strOldAccInputHash)
.concat(strOldNumBatch)
.concat(strChainID)
.concat(strNewStateRoot)
.concat(strNewAccInputHash)
.concat(strNewLocalExitRoot)
.concat(strNewNumBatch);

return sha256Snark(finalStr);
}

/**
* Batch hash data
* @param {String} transactions - All raw transaction data concatenated
* @param {String} globalExitRoot - Global Exit Root
* @param {String} sequencerAddress - Sequencer address
* @returns {String} - Batch hash data
*/
function calculateBatchHashData(
transactions,
globalExitRoot,
sequencerAddress,
) {
const globalExitRootHex = `0x${Scalar.e(globalExitRoot).toString(16).padStart(64, '0')}`;

return ethers.utils.solidityKeccak256(
['bytes', 'bytes32', 'address'],
['bytes'],
[
transactions,
globalExitRootHex,
sequencerAddress,
],
);
}
Expand Down Expand Up @@ -160,7 +149,7 @@ function generateSolidityInputs(
}

module.exports = {
calculateStarkInput,
calculateAccInputHash,
calculateSnarkInput,
calculateBatchHashData,
generateSolidityInputs,
Expand Down
7 changes: 4 additions & 3 deletions src/mt-bridge-utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,16 +38,17 @@ function verifyMerkleProof(leaf, smtProof, index, root) {

/**
* Calculate leaf value
* @param {Number} leafType - Leaf Type
* @param {Number} originNetwork - Original network
* @param {String} originTokenAddress - Token address
* @param {String} originAddress - Token address
* @param {Number} destinationNetwork - Destination network
* @param {String} destinationAddress - Destination address
* @param {BigNumber} amount - Amount of tokens
* @param {BigNumber} metadataHash - Hash of the metadata
* @returns {Boolean} - Leaf value
*/
function getLeafValue(originNetwork, originTokenAddress, destinationNetwork, destinationAddress, amount, metadataHash) {
return ethers.utils.solidityKeccak256(['uint32', 'address', 'uint32', 'address', 'uint256', 'bytes32'], [originNetwork, originTokenAddress, destinationNetwork, destinationAddress, amount, metadataHash]);
function getLeafValue(leafType, originNetwork, originAddress, destinationNetwork, destinationAddress, amount, metadataHash) {
return ethers.utils.solidityKeccak256(['uint8', 'uint32', 'address', 'uint32', 'address', 'uint256', 'bytes32'], [leafType, originNetwork, originAddress, destinationNetwork, destinationAddress, amount, metadataHash]);
}

module.exports = {
Expand Down
17 changes: 10 additions & 7 deletions src/processor-utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -239,15 +239,18 @@ function decodeCustomRawTxProverMethod(encodedTransactions) {
offset += decodedData.consumed;
txDecoded.data = decodedData.result;

// chainID READ
const decodedChainID = decodeNextShortStringRLP(encodedTxBytes, offset);
offset += decodedChainID.consumed;
txDecoded.chainID = decodedChainID.result;
// Don't decode chainId if tx is legacy
if (txListLength + headerLength !== offset) {
// chainID READ
const decodedChainID = decodeNextShortStringRLP(encodedTxBytes, offset);
offset += decodedChainID.consumed;
txDecoded.chainID = decodedChainID.result;

if ((encodedTxBytes[offset] !== 0x80) || encodedTxBytes[offset + 1] !== 0x80) {
throw new Error('The last 2 values should be 0x8080');
if ((encodedTxBytes[offset] !== 0x80) || encodedTxBytes[offset + 1] !== 0x80) {
throw new Error('The last 2 values should be 0x8080');
}
offset += 2;
}
offset += 2;

if (txListLength + headerLength !== offset) {
throw new Error('Invalid list length');
Expand Down
Loading

0 comments on commit 59d4ab8

Please sign in to comment.