diff --git a/Cargo.lock b/Cargo.lock index bd70c8fef2c..829ca7c792c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2897,6 +2897,8 @@ dependencies = [ "num-bigint", "proptest", "serde", + "serde_json", + "serde_with", "thiserror", "tracing", ] diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index 2e185c69461..72c95823553 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -66,10 +66,16 @@ pub struct CompileOptions { #[arg(long = "force")] pub force_compile: bool, - /// Emit debug information for the intermediate SSA IR + /// Emit debug information for the intermediate SSA IR to stdout #[arg(long, hide = true)] pub show_ssa: bool, + /// Emit the unoptimized SSA IR to file. + /// The IR will be dumped into the workspace target directory, + /// under `[compiled-package].ssa.json`. + #[arg(long, hide = true)] + pub emit_ssa: bool, + #[arg(long, hide = true)] pub show_brillig: bool, @@ -548,8 +554,11 @@ pub fn compile_no_check( // If user has specified that they want to see intermediate steps printed then we should // force compilation even if the program hasn't changed. - let force_compile = - force_compile || options.print_acir || options.show_brillig || options.show_ssa; + let force_compile = force_compile + || options.print_acir + || options.show_brillig + || options.show_ssa + || options.emit_ssa; if !force_compile && hashes_match { info!("Program matches existing artifact, returning early"); @@ -566,6 +575,7 @@ pub fn compile_no_check( } else { ExpressionWidth::default() }, + emit_ssa: if options.emit_ssa { Some(context.package_build_path.clone()) } else { None }, }; let SsaProgramArtifact { program, debug, warnings, names, error_types, .. } = diff --git a/compiler/noirc_evaluator/Cargo.toml b/compiler/noirc_evaluator/Cargo.toml index 52f154fd1f3..66c770b5064 100644 --- a/compiler/noirc_evaluator/Cargo.toml +++ b/compiler/noirc_evaluator/Cargo.toml @@ -19,6 +19,8 @@ thiserror.workspace = true num-bigint = "0.4" im.workspace = true serde.workspace = true +serde_json.workspace = true +serde_with = "3.2.0" tracing.workspace = true chrono = "0.4.37" diff --git a/compiler/noirc_evaluator/src/ssa.rs b/compiler/noirc_evaluator/src/ssa.rs index 41dbf3b7272..032e3d07d7b 100644 --- a/compiler/noirc_evaluator/src/ssa.rs +++ b/compiler/noirc_evaluator/src/ssa.rs @@ -7,7 +7,12 @@ //! This module heavily borrows from Cranelift #![allow(dead_code)] -use std::collections::{BTreeMap, BTreeSet}; +use std::{ + collections::{BTreeMap, BTreeSet}, + fs::File, + io::Write, + path::{Path, PathBuf}, +}; use crate::errors::{RuntimeError, SsaReport}; use acvm::{ @@ -56,6 +61,9 @@ pub struct SsaEvaluatorOptions { /// Width of expressions to be used for ACIR pub expression_width: ExpressionWidth, + + /// Dump the unoptimized SSA to the supplied path if it exists + pub emit_ssa: Option, } pub(crate) struct ArtifactsAndWarnings(Artifacts, Vec); @@ -76,6 +84,7 @@ pub(crate) fn optimize_into_acir( options.enable_ssa_logging, options.force_brillig_output, options.print_codegen_timings, + &options.emit_ssa, )? .run_pass(Ssa::defunctionalize, "After Defunctionalization:") .run_pass(Ssa::remove_paired_rc, "After Removing Paired rc_inc & rc_decs:") @@ -346,8 +355,18 @@ impl SsaBuilder { print_ssa_passes: bool, force_brillig_runtime: bool, print_codegen_timings: bool, + emit_ssa: &Option, ) -> Result { let ssa = ssa_gen::generate_ssa(program, force_brillig_runtime)?; + if let Some(emit_ssa) = emit_ssa { + let mut emit_ssa_dir = emit_ssa.clone(); + // We expect the full package artifact path to be passed in here, + // and attempt to create the target directory if it does not exist. + emit_ssa_dir.pop(); + create_named_dir(emit_ssa_dir.as_ref(), "target"); + let ssa_path = emit_ssa.with_extension("ssa.json"); + write_to_file(&serde_json::to_vec(&ssa).unwrap(), &ssa_path); + } Ok(SsaBuilder { print_ssa_passes, print_codegen_timings, ssa }.print("Initial SSA:")) } @@ -378,3 +397,23 @@ impl SsaBuilder { self } } + +fn create_named_dir(named_dir: &Path, name: &str) -> PathBuf { + std::fs::create_dir_all(named_dir) + .unwrap_or_else(|_| panic!("could not create the `{name}` directory")); + + PathBuf::from(named_dir) +} + +fn write_to_file(bytes: &[u8], path: &Path) { + let display = path.display(); + + let mut file = match File::create(path) { + Err(why) => panic!("couldn't create {display}: {why}"), + Ok(file) => file, + }; + + if let Err(why) = file.write_all(bytes) { + panic!("couldn't write to {display}: {why}"); + } +} diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs b/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs index 50964e9161b..0cdeed6153c 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs @@ -6,6 +6,7 @@ use acvm::FieldElement; use fxhash::FxHashMap as HashMap; use noirc_frontend::ast; use noirc_frontend::hir_def::function::FunctionSignature; +use serde::{Deserialize, Serialize}; use super::FunctionBuilder; @@ -52,13 +53,13 @@ impl DataBusBuilder { } } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Serialize, Deserialize)] pub(crate) struct CallData { pub(crate) array_id: ValueId, pub(crate) index_map: HashMap, } -#[derive(Clone, Default, Debug)] +#[derive(Clone, Default, Debug, Serialize, Deserialize)] pub(crate) struct DataBus { pub(crate) call_data: Vec, pub(crate) return_data: Option, diff --git a/compiler/noirc_evaluator/src/ssa/ir/basic_block.rs b/compiler/noirc_evaluator/src/ssa/ir/basic_block.rs index 981afa3e380..a7c637dedd0 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/basic_block.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/basic_block.rs @@ -4,6 +4,7 @@ use super::{ map::Id, value::ValueId, }; +use serde::{Deserialize, Serialize}; /// A Basic block is a maximal collection of instructions /// such that there are only jumps at the end of block @@ -11,7 +12,7 @@ use super::{ /// /// This means that if one instruction is executed in a basic /// block, then all instructions are executed. ie single-entry single-exit. -#[derive(Debug, PartialEq, Eq, Hash, Clone)] +#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] pub(crate) struct BasicBlock { /// Parameters to the basic block. parameters: Vec, diff --git a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index 994386f8197..34d7d595eb9 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -17,12 +17,16 @@ use acvm::{acir::AcirField, FieldElement}; use fxhash::FxHashMap as HashMap; use iter_extended::vecmap; use noirc_errors::Location; +use serde::{Deserialize, Serialize}; +use serde_with::serde_as; +use serde_with::DisplayFromStr; /// The DataFlowGraph contains most of the actual data in a function including /// its blocks, instructions, and values. This struct is largely responsible for /// owning most data in a function and handing out Ids to this data that can be /// shared without worrying about ownership. -#[derive(Debug, Default, Clone)] +#[serde_as] +#[derive(Debug, Default, Clone, Serialize, Deserialize)] pub(crate) struct DataFlowGraph { /// All of the instructions in a function instructions: DenseMap, @@ -36,6 +40,7 @@ pub(crate) struct DataFlowGraph { /// Currently, we need to define them in a better way /// Call instructions require the func signature, but /// other instructions may need some more reading on my part + #[serde_as(as = "HashMap")] results: HashMap>, /// Storage for all of the values defined in this @@ -44,21 +49,25 @@ pub(crate) struct DataFlowGraph { /// Each constant is unique, attempting to insert the same constant /// twice will return the same ValueId. + #[serde(skip)] constants: HashMap<(FieldElement, Type), ValueId>, /// Contains each function that has been imported into the current function. /// A unique `ValueId` for each function's [`Value::Function`] is stored so any given FunctionId /// will always have the same ValueId within this function. + #[serde(skip)] functions: HashMap, /// Contains each intrinsic that has been imported into the current function. /// This map is used to ensure that the ValueId for any given intrinsic is always /// represented by only 1 ValueId within this function. + #[serde(skip)] intrinsics: HashMap, /// Contains each foreign function that has been imported into the current function. /// This map is used to ensure that the ValueId for any given foreign function is always /// represented by only 1 ValueId within this function. + #[serde(skip)] foreign_functions: HashMap, /// All blocks in a function @@ -67,6 +76,7 @@ pub(crate) struct DataFlowGraph { /// Debugging information about which `ValueId`s have had their underlying `Value` substituted /// for that of another. This information is purely used for printing the SSA, and has no /// material effect on the SSA itself. + #[serde(skip)] replaced_value_ids: HashMap, /// Source location of each instruction for debugging and issuing errors. @@ -79,8 +89,10 @@ pub(crate) struct DataFlowGraph { /// /// Instructions inserted by internal SSA passes that don't correspond to user code /// may not have a corresponding location. + #[serde(skip)] locations: HashMap, + #[serde(skip)] pub(crate) data_bus: DataBus, } diff --git a/compiler/noirc_evaluator/src/ssa/ir/function.rs b/compiler/noirc_evaluator/src/ssa/ir/function.rs index c44824b464b..bae9f82e4f1 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/function.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/function.rs @@ -2,6 +2,7 @@ use std::collections::BTreeSet; use iter_extended::vecmap; use noirc_frontend::monomorphization::ast::InlineType; +use serde::{Deserialize, Serialize}; use super::basic_block::BasicBlockId; use super::dfg::DataFlowGraph; @@ -10,7 +11,7 @@ use super::map::Id; use super::types::Type; use super::value::ValueId; -#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] +#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)] pub(crate) enum RuntimeType { // A noir function, to be compiled in ACIR and executed by ACVM Acir(InlineType), @@ -37,7 +38,7 @@ impl RuntimeType { /// All functions outside of the current function are seen as external. /// To reference external functions its FunctionId can be used but this /// cannot be checked for correctness until inlining is performed. -#[derive(Debug)] +#[derive(Debug, Serialize, Deserialize)] pub(crate) struct Function { /// The first basic block in the function entry_block: BasicBlockId, diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index 8cbae732ef9..0573b1c9ce1 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -1,3 +1,4 @@ +use serde::{Deserialize, Serialize}; use std::hash::{Hash, Hasher}; use acvm::{ @@ -47,7 +48,7 @@ pub(crate) type InstructionId = Id; /// - Opcodes which have no function definition in the /// source code and must be processed by the IR. An example /// of this is println. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] pub(crate) enum Intrinsic { ArrayLen, AsSlice, @@ -169,13 +170,13 @@ impl Intrinsic { } /// The endian-ness of bits when encoding values as bits in e.g. ToBits or ToRadix -#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)] +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Serialize, Deserialize)] pub(crate) enum Endian { Big, Little, } -#[derive(Debug, PartialEq, Eq, Hash, Clone)] +#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] /// Instructions are used to perform tasks. /// The instructions that the IR is able to specify are listed below. pub(crate) enum Instruction { @@ -753,7 +754,7 @@ pub(crate) fn error_selector_from_type(typ: &ErrorType) -> ErrorSelector { } } -#[derive(Debug, PartialEq, Eq, Hash, Clone)] +#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] pub(crate) enum ConstrainError { // These are errors which have been hardcoded during SSA gen Intrinsic(String), @@ -795,7 +796,7 @@ pub(crate) enum InstructionResultType { /// Since our IR needs to be in SSA form, it makes sense /// to split up instructions like this, as we are sure that these instructions /// will not be in the list of instructions for a basic block. -#[derive(Debug, PartialEq, Eq, Hash, Clone)] +#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] pub(crate) enum TerminatorInstruction { /// Control flow /// diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs index fba727ca226..03262be0a06 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs @@ -1,4 +1,5 @@ use acvm::{acir::AcirField, FieldElement}; +use serde::{Deserialize, Serialize}; use super::{ DataFlowGraph, Instruction, InstructionResultType, NumericType, SimplifyResult, Type, ValueId, @@ -11,7 +12,7 @@ use super::{ /// All binary operators are also only for numeric types. To implement /// e.g. equality for a compound type like a struct, one must add a /// separate Eq operation for each field and combine them later with And. -#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] +#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone, Serialize, Deserialize)] pub(crate) enum BinaryOp { /// Addition of lhs + rhs. Add, @@ -64,7 +65,7 @@ impl std::fmt::Display for BinaryOp { } /// A binary instruction in the IR. -#[derive(Debug, PartialEq, Eq, Hash, Clone)] +#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] pub(crate) struct Binary { /// Left hand side of the binary operation pub(crate) lhs: ValueId, diff --git a/compiler/noirc_evaluator/src/ssa/ir/map.rs b/compiler/noirc_evaluator/src/ssa/ir/map.rs index 3c3feabc390..1c9a31f0c99 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/map.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/map.rs @@ -1,8 +1,11 @@ use fxhash::FxHashMap as HashMap; +use serde::{Deserialize, Serialize}; use std::{ hash::Hash, + str::FromStr, sync::atomic::{AtomicUsize, Ordering}, }; +use thiserror::Error; /// A unique ID corresponding to a value of type T. /// This type can be used to retrieve a value of type T from @@ -12,8 +15,11 @@ use std::{ /// DenseMap or SparseMap. If an Id was created to correspond to one /// particular map type, users need to take care not to use it with /// another map where it will likely be invalid. +#[derive(Serialize, Deserialize)] pub(crate) struct Id { index: usize, + // If we do not skip this field it will simply serialize as `"_marker":null` which is useless extra data + #[serde(skip)] _marker: std::marker::PhantomData, } @@ -106,7 +112,58 @@ impl std::fmt::Display for Id { impl std::fmt::Display for Id { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "f{}", self.index) + write!(f, "i{}", self.index) + } +} + +#[derive(Error, Debug)] +pub(crate) enum IdDisplayFromStrErr { + #[error("Invalid id when deserializing SSA: {0}")] + InvalidId(String), +} + +/// The implementation of display and FromStr allows serializing and deserializing an Id to a string. +/// This is useful when used as key in a map that has to be serialized to JSON/TOML. +impl FromStr for Id { + type Err = IdDisplayFromStrErr; + fn from_str(s: &str) -> Result { + id_from_str_helper::(s, 'b') + } +} + +impl FromStr for Id { + type Err = IdDisplayFromStrErr; + fn from_str(s: &str) -> Result { + id_from_str_helper::(s, 'v') + } +} + +impl FromStr for Id { + type Err = IdDisplayFromStrErr; + fn from_str(s: &str) -> Result { + id_from_str_helper::(s, 'f') + } +} + +impl FromStr for Id { + type Err = IdDisplayFromStrErr; + fn from_str(s: &str) -> Result { + id_from_str_helper::(s, 'i') + } +} + +fn id_from_str_helper(s: &str, value_prefix: char) -> Result, IdDisplayFromStrErr> { + if s.len() < 2 { + return Err(IdDisplayFromStrErr::InvalidId(s.to_string())); + } + + let index = &s[1..]; + let index = index.parse().map_err(|_| IdDisplayFromStrErr::InvalidId(s.to_string()))?; + + if s.chars().next().unwrap() == value_prefix { + Ok(Id::::new(index)) + } else { + Err(IdDisplayFromStrErr::InvalidId(s.to_string())) } } @@ -115,7 +172,7 @@ impl std::fmt::Display for Id { /// access to indices is provided. Since IDs must be stable and correspond /// to indices in the internal Vec, operations that would change element /// ordering like pop, remove, swap_remove, etc, are not possible. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub(crate) struct DenseMap { storage: Vec, } @@ -300,7 +357,7 @@ impl std::ops::Index<&K> for TwoWayMap { /// for types that have no single owner. /// /// This type wraps an AtomicUsize so it can safely be used across threads. -#[derive(Debug)] +#[derive(Debug, Serialize, Deserialize)] pub(crate) struct AtomicCounter { next: AtomicUsize, _marker: std::marker::PhantomData, diff --git a/compiler/noirc_evaluator/src/ssa/ir/types.rs b/compiler/noirc_evaluator/src/ssa/ir/types.rs index 56729a5cba9..7e62883f57c 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -1,3 +1,4 @@ +use serde::{Deserialize, Serialize}; use std::rc::Rc; use acvm::{acir::AcirField, FieldElement}; @@ -13,7 +14,7 @@ use crate::ssa::ssa_gen::SSA_WORD_SIZE; /// /// Fields do not have a notion of ordering, so this distinction /// is reasonable. -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Ord, PartialOrd)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Ord, PartialOrd, Serialize, Deserialize)] pub enum NumericType { Signed { bit_size: u32 }, Unsigned { bit_size: u32 }, @@ -65,7 +66,7 @@ impl NumericType { } /// All types representable in the IR. -#[derive(Clone, Debug, PartialEq, Eq, Hash, Ord, PartialOrd)] +#[derive(Clone, Debug, PartialEq, Eq, Hash, Ord, PartialOrd, Serialize, Deserialize)] pub(crate) enum Type { /// Represents numeric types in the IR, including field elements Numeric(NumericType), diff --git a/compiler/noirc_evaluator/src/ssa/ir/value.rs b/compiler/noirc_evaluator/src/ssa/ir/value.rs index c83609dec1f..795d45c75e9 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/value.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/value.rs @@ -1,4 +1,5 @@ use acvm::FieldElement; +use serde::{Deserialize, Serialize}; use crate::ssa::ir::basic_block::BasicBlockId; @@ -13,7 +14,7 @@ pub(crate) type ValueId = Id; /// Value is the most basic type allowed in the IR. /// Transition Note: A Id is similar to `NodeId` in our previous IR. -#[derive(Debug, PartialEq, Eq, Hash, Clone)] +#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] pub(crate) enum Value { /// This value was created due to an instruction /// diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/program.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/program.rs index 7a77aa76101..fe786da16ca 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/program.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/program.rs @@ -2,6 +2,8 @@ use std::{collections::BTreeMap, fmt::Display}; use acvm::acir::circuit::ErrorSelector; use iter_extended::btree_map; +use serde::{Deserialize, Serialize}; +use serde_with::serde_as; use crate::ssa::ir::{ function::{Function, FunctionId, RuntimeType}, @@ -10,15 +12,23 @@ use crate::ssa::ir::{ use noirc_frontend::hir_def::types::Type as HirType; /// Contains the entire SSA representation of the program. +#[serde_as] +#[derive(Serialize, Deserialize)] pub(crate) struct Ssa { + #[serde_as(as = "Vec<(_, _)>")] pub(crate) functions: BTreeMap, pub(crate) main_id: FunctionId, + #[serde(skip)] pub(crate) next_id: AtomicCounter, /// Maps SSA entry point function ID -> Final generated ACIR artifact index. /// There can be functions specified in SSA which do not act as ACIR entry points. /// This mapping is necessary to use the correct function pointer for an ACIR call, /// as the final program artifact will be a list of only entry point functions. + #[serde(skip)] pub(crate) entry_point_to_generated_index: BTreeMap, + // We can skip serializing this field as the error selector types end up as part of the + // ABI not the actual SSA IR. + #[serde(skip)] pub(crate) error_selector_to_type: BTreeMap, } @@ -98,3 +108,43 @@ impl Display for Ssa { Ok(()) } } + +#[cfg(test)] +mod test { + use crate::ssa::ir::map::Id; + + use crate::ssa::ssa_gen::Ssa; + use crate::ssa::{ + function_builder::FunctionBuilder, + ir::{instruction::BinaryOp, types::Type}, + }; + + #[test] + fn serialization_roundtrip() { + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id); + let v0 = builder.add_parameter(Type::field()); + + let one = builder.field_constant(1u128); + let three = builder.field_constant(3u128); + + let v1 = builder.insert_binary(v0, BinaryOp::Add, one); + let v2 = builder.insert_binary(v1, BinaryOp::Mul, three); + builder.terminate_with_return(vec![v2]); + + let ssa = builder.finish(); + let serialized_ssa = &serde_json::to_string(&ssa).unwrap(); + let deserialized_ssa: Ssa = serde_json::from_str(serialized_ssa).unwrap(); + let actual_string = format!("{}", deserialized_ssa); + + let expected_string = "acir(inline) fn main f0 {\n \ + b0(v0: Field):\n \ + v3 = add v0, Field 1\n \ + v4 = mul v3, Field 3\n \ + return v4\n\ + }\n"; + assert_eq!(actual_string, expected_string); + } +} diff --git a/compiler/noirc_frontend/src/hir/mod.rs b/compiler/noirc_frontend/src/hir/mod.rs index 6e91f2fdb61..e4f000778d1 100644 --- a/compiler/noirc_frontend/src/hir/mod.rs +++ b/compiler/noirc_frontend/src/hir/mod.rs @@ -19,6 +19,7 @@ use iter_extended::vecmap; use noirc_errors::Location; use std::borrow::Cow; use std::collections::{BTreeMap, HashMap}; +use std::path::PathBuf; use std::rc::Rc; use self::def_map::TestFunction; @@ -47,6 +48,8 @@ pub struct Context<'file_manager, 'parsed_files> { // Same as the file manager, we take ownership of the parsed files in the WASM context. // Parsed files is also read only. pub parsed_files: Cow<'parsed_files, ParsedFiles>, + + pub package_build_path: PathBuf, } #[derive(Debug, Copy, Clone)] @@ -66,6 +69,7 @@ impl Context<'_, '_> { file_manager: Cow::Owned(file_manager), debug_instrumenter: DebugInstrumenter::default(), parsed_files: Cow::Owned(parsed_files), + package_build_path: PathBuf::default(), } } @@ -81,6 +85,7 @@ impl Context<'_, '_> { file_manager: Cow::Borrowed(file_manager), debug_instrumenter: DebugInstrumenter::default(), parsed_files: Cow::Borrowed(parsed_files), + package_build_path: PathBuf::default(), } } diff --git a/compiler/noirc_frontend/src/monomorphization/ast.rs b/compiler/noirc_frontend/src/monomorphization/ast.rs index 47698c5b65c..f2ed9433e61 100644 --- a/compiler/noirc_frontend/src/monomorphization/ast.rs +++ b/compiler/noirc_frontend/src/monomorphization/ast.rs @@ -10,6 +10,7 @@ use crate::{ ast::{BinaryOpKind, IntegerBitSize, Signedness, Visibility}, token::{Attributes, FunctionAttribute}, }; +use serde::{Deserialize, Serialize}; use super::HirType; @@ -207,7 +208,7 @@ pub type Parameters = Vec<(LocalId, /*mutable:*/ bool, /*name:*/ String, Type)>; /// Represents how an Acir function should be inlined. /// This type is only relevant for ACIR functions as we do not inline any Brillig functions -#[derive(Default, Clone, Copy, PartialEq, Eq, Debug, Hash)] +#[derive(Default, Clone, Copy, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)] pub enum InlineType { /// The most basic entry point can expect all its functions to be inlined. /// All function calls are expected to be inlined into a single ACIR. diff --git a/tooling/nargo/src/ops/compile.rs b/tooling/nargo/src/ops/compile.rs index d7c7cc2c123..cd9ccf67957 100644 --- a/tooling/nargo/src/ops/compile.rs +++ b/tooling/nargo/src/ops/compile.rs @@ -31,7 +31,9 @@ pub fn compile_workspace( // Compile all of the packages in parallel. let program_results: Vec> = binary_packages .par_iter() - .map(|package| compile_program(file_manager, parsed_files, package, compile_options, None)) + .map(|package| { + compile_program(file_manager, parsed_files, workspace, package, compile_options, None) + }) .collect(); let contract_results: Vec> = contract_packages .par_iter() @@ -57,6 +59,7 @@ pub fn compile_workspace( pub fn compile_program( file_manager: &FileManager, parsed_files: &ParsedFiles, + workspace: &Workspace, package: &Package, compile_options: &CompileOptions, cached_program: Option, @@ -64,6 +67,7 @@ pub fn compile_program( compile_program_with_debug_instrumenter( file_manager, parsed_files, + workspace, package, compile_options, cached_program, @@ -74,6 +78,7 @@ pub fn compile_program( pub fn compile_program_with_debug_instrumenter( file_manager: &FileManager, parsed_files: &ParsedFiles, + workspace: &Workspace, package: &Package, compile_options: &CompileOptions, cached_program: Option, @@ -82,6 +87,7 @@ pub fn compile_program_with_debug_instrumenter( let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); link_to_debug_crate(&mut context, crate_id); context.debug_instrumenter = debug_instrumenter; + context.package_build_path = workspace.package_build_path(package); noirc_driver::compile_main(&mut context, crate_id, compile_options, cached_program) } diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index 3e3560c91bf..21cf9751a8b 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -187,6 +187,7 @@ fn compile_programs( let (program, warnings) = compile_program( file_manager, parsed_files, + workspace, package, compile_options, load_cached_program(package), diff --git a/tooling/nargo_cli/src/cli/debug_cmd.rs b/tooling/nargo_cli/src/cli/debug_cmd.rs index 311af9b9db0..0a593e09c17 100644 --- a/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -113,13 +113,21 @@ pub(crate) fn compile_bin_package_for_debugging( compile_program_with_debug_instrumenter( &workspace_file_manager, &parsed_files, + workspace, package, &compile_options, None, debug_state, ) } else { - compile_program(&workspace_file_manager, &parsed_files, package, &compile_options, None) + compile_program( + &workspace_file_manager, + &parsed_files, + workspace, + package, + &compile_options, + None, + ) }; report_errors( diff --git a/tooling/nargo_cli/src/cli/fs/program.rs b/tooling/nargo_cli/src/cli/fs/program.rs index caeaafd4ab3..323cd2c6a06 100644 --- a/tooling/nargo_cli/src/cli/fs/program.rs +++ b/tooling/nargo_cli/src/cli/fs/program.rs @@ -31,7 +31,6 @@ fn save_build_artifact_to_file, T: ?Sized + serde::Serialize>( ) -> PathBuf { create_named_dir(circuit_dir.as_ref(), "target"); let circuit_path = circuit_dir.as_ref().join(artifact_name).with_extension("json"); - write_to_file(&serde_json::to_vec(build_artifact).unwrap(), &circuit_path); circuit_path