diff --git a/package.json b/package.json index 4d8d3bfc9..b699686cc 100644 --- a/package.json +++ b/package.json @@ -8,6 +8,7 @@ "@testing-library/react": "^12.1.4", "@testing-library/user-event": "^13.5.0", "@types/lodash": "^4.14.181", + "@types/pako": "^2.0.0", "@types/styled-components": "^5.1.24", "addressparser": "^1.0.1", "atob": "^2.1.2", @@ -19,10 +20,12 @@ "ethereumjs-abi": "^0.6.8", "ethers": "^5.7.1", "forge-std": "^1.1.2", + "js-untar": "^2.0.0", "libmime": "^5.1.0", "localforage": "^1.10.0", "lodash": "^4.17.21", "next": "^12.3.1", + "pako": "^2.1.0", "prettier": "^2.7.1", "prettier-plugin-solidity": "^1.0.0-beta.24", "react": "^17.0.2", diff --git a/src/helpers/uncompress.ts b/src/helpers/uncompress.ts new file mode 100644 index 000000000..ac06db175 --- /dev/null +++ b/src/helpers/uncompress.ts @@ -0,0 +1,39 @@ +import pako from 'pako'; +// @ts-ignore +import untar from 'js-untar'; + +// js-tar doesn't have a type.d so we add a type here. +type TarFile = { + name: string, + buffer: ArrayBuffer +} + +// uncompresses a tarball containing a single .zkeyd file. +// returns the contents of that file as an ArrayBuffer +const uncompressZkeydTarball = async (arrayBuffer:ArrayBuffer): Promise => { + console.log(`Started to uncompress tarball...!`); + + // ungzip file + const output = pako.ungzip(arrayBuffer); + const buff = output.buffer; + + // extract file(s) from tar + const files = await untar(buff); + console.log("files in tar file:", files.map((file: TarFile) => file.name)); + // check for files ending in .zkeyd. + const zkeydFiles = files.filter((file: TarFile) => file.name.match(/(.+)\.zkey.$/)?.[0]); + const fileNames = zkeydFiles.map((file: TarFile) => file.name); + console.log(fileNames.length, ".zkey* files in tar file:", fileNames); + + if (zkeydFiles.length === 1) { + // find one file from the tar file. + const file = zkeydFiles[0]; + return file; + } else if (zkeydFiles.length > 1) { + throw new Error("More than one .zkeyd files found in tarball"); + } else { + throw new Error("No .zkey files found in tarball."); + } +} + +export {uncompressZkeydTarball, type TarFile}; diff --git a/src/helpers/zkp.ts b/src/helpers/zkp.ts index 06450d3c9..b16f987b5 100644 --- a/src/helpers/zkp.ts +++ b/src/helpers/zkp.ts @@ -1,11 +1,11 @@ import { vkey } from "./vkey"; +import localforage from 'localforage'; +import { uncompressZkeydTarball as uncompress } from "./uncompress"; -const localforage = require("localforage"); const snarkjs = require("snarkjs"); -const tar = require('tar-stream') -const zlib = require('zlib') const loadURL = "https://zkemail-zkey-chunks.s3.amazonaws.com/"; +// const loadURL = "/zkemail-zkey-chunks/"; export async function downloadFromFilename(filename: string, compressed = false) { const link = loadURL + filename; @@ -18,54 +18,23 @@ export async function downloadFromFilename(filename: string, compressed = false) if(!compressed){ await localforage.setItem(filename, zkeyBuff); } else { - await uncompressAndStore(zkeyBuff, filename); + // uncompress the data + const zkeyUncompressed = uncompress(zkeyBuff); + const rawFilename = filename.replace(/.tar.gz$/, ""); + // store the uncompressed data + console.log("storing file in localforage", rawFilename) + await localforage.setItem(rawFilename, zkeyUncompressed); + console.log("stored file in localforage", rawFilename); + // await localforage.setItem(filename, zkeyBuff); } console.log(`Storage of ${filename} successful!`); } catch (e) { console.log(`Storage of ${filename} unsuccessful, make sure IndexedDB is enabled in your browser.`); + console.log(e); } } const zkeyExtension = ".tar.gz" - -// Un-targz the arrayBuffer into the filename without the .tar.gz on the end -const uncompressAndStore = async function (arrayBuffer: ArrayBuffer, filename: string) { - console.log(`Started to uncompress ${filename}...!`); - const extract = tar.extract() // create a tar extract stream - const gunzip = zlib.createGunzip(arrayBuffer) // create a gunzip stream from the array buffer - gunzip.pipe(extract) // pipe the gunzip stream into the tar extract stream - - // header is the tar header, stream is the content body (might be an empty stream), call next when you are done with this entry - extract.on('entry', function(header: any, stream: any, next: Function) { - // decompress the entry data - const extractedData: any = [] - stream.on('data', function(chunk: any) { - extractedData.push(chunk) - }) - - // make sure to call next when the entry is fully processed - stream.on('end', function() { - next() - - console.assert(filename.endsWith(zkeyExtension), `Filename doesn't end in ${zkeyExtension}`) - const rawFilename = filename.replace(/.tar.gz$/, ""); - // save the extracted data to localForage - localforage.setItem(rawFilename, extractedData, function(err: Error) { - if (err) { - console.error(`Couldn't extract data from ${filename}:` + err.message) - } else { - console.log('Saved extracted file to localForage') - } - }) - }) - }) - - // all entries have been processed - extract.on('finish', function() { - console.log(`Finished extracting ${filename}`) - }) -} - const zkeySuffix = ["b", "c", "d", "e", "f", "g", "h", "i", "j", "k"]; export const downloadProofFiles = async function (filename: string) { @@ -105,7 +74,8 @@ export async function generateProof(input: any, filename: string) { // TODO: figure out how to generate this s.t. it passes build console.log("generating proof for input"); console.log(input); - const { proof, publicSignals } = await snarkjs.groth16.fullProve(input, `https://zkemail-zkey-chunks.s3.amazonaws.com/${filename}.wasm`, `${filename}.zkey`); + //const { proof, publicSignals } = await snarkjs.groth16.fullProve(input, `https://zkemail-zkey-chunks.s3.amazonaws.com/${filename}.wasm`, `${filename}.zkey`); + const { proof, publicSignals } = await snarkjs.groth16.fullProve(input, `${loadURL}/${filename}.wasm`, `${filename}.zkey`); console.log(`Generated proof ${JSON.stringify(proof)}`); return { diff --git a/yarn.lock b/yarn.lock index a9eb75122..cc22541e9 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3337,6 +3337,13 @@ __metadata: languageName: node linkType: hard +"@types/pako@npm:^2.0.0": + version: 2.0.0 + resolution: "@types/pako@npm:2.0.0" + checksum: 50240a036b5e6acabbf36ac4dca93ec9e619241f0404da8d401cdb427bec3029833324b8a04c4b1ae2ecbc33422fdec31dbf9f43653d9d07cafb82ace78dfccd + languageName: node + linkType: hard + "@types/parse-json@npm:^4.0.0": version: 4.0.0 resolution: "@types/parse-json@npm:4.0.0" @@ -7666,6 +7673,7 @@ __metadata: "@types/atob": ^2.1.2 "@types/lodash": ^4.14.181 "@types/node": ^18.0.6 + "@types/pako": ^2.0.0 "@types/styled-components": ^5.1.24 addressparser: ^1.0.1 atob: ^2.1.2 @@ -7677,11 +7685,13 @@ __metadata: ethereumjs-abi: ^0.6.8 ethers: ^5.7.1 forge-std: ^1.1.2 + js-untar: ^2.0.0 libmime: ^5.1.0 localforage: ^1.10.0 lodash: ^4.17.21 next: ^12.3.1 nodemon: ^2.0.19 + pako: ^2.1.0 prettier: ^2.7.1 prettier-plugin-solidity: ^1.0.0-beta.24 react: ^17.0.2 @@ -11790,6 +11800,13 @@ __metadata: languageName: node linkType: hard +"js-untar@npm:^2.0.0": + version: 2.0.0 + resolution: "js-untar@npm:2.0.0" + checksum: 36fbcd3cda9049dc7da968998e44e79b0d030ac05d64949d2143dc7cfbe78aa69d7c0a01888edbbca8dff7b5332afcd17d5f91449105165499bb26a95816092d + languageName: node + linkType: hard + "js-yaml@npm:^3.13.1": version: 3.14.1 resolution: "js-yaml@npm:3.14.1" @@ -13707,6 +13724,13 @@ __metadata: languageName: node linkType: hard +"pako@npm:^2.1.0": + version: 2.1.0 + resolution: "pako@npm:2.1.0" + checksum: 71666548644c9a4d056bcaba849ca6fd7242c6cf1af0646d3346f3079a1c7f4a66ffec6f7369ee0dc88f61926c10d6ab05da3e1fca44b83551839e89edd75a3e + languageName: node + linkType: hard + "pako@npm:~1.0.5": version: 1.0.11 resolution: "pako@npm:1.0.11"