Skip to content

Commit

Permalink
[1 changes] feat(ssa): Simplify signed casts (noir-lang/noir#6166)
Browse files Browse the repository at this point in the history
feat: visibility for modules (noir-lang/noir#6165)
  • Loading branch information
AztecBot committed Sep 29, 2024
1 parent b0d1bab commit 03977f7
Show file tree
Hide file tree
Showing 95 changed files with 1,911 additions and 1,091 deletions.
2 changes: 1 addition & 1 deletion .noir-sync-commit
Original file line number Diff line number Diff line change
@@ -1 +1 @@
164d29e4d1960d16fdeafe2cc8ea8144a769f7b2
eec3a6152493e56866ec5338ff52f823c530778e
2 changes: 1 addition & 1 deletion noir/noir-repo/.github/workflows/gates_report_brillig.yml
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ jobs:
- name: Compare Brillig bytecode size reports
id: brillig_bytecode_diff
uses: noir-lang/noir-gates-diff@3fb844067b25d1b59727ea600b614503b33503f4
uses: noir-lang/noir-gates-diff@d88f7523b013b9edd3f31c5cfddaef87a3fe1b48
with:
report: gates_report_brillig.json
header: |
Expand Down
8 changes: 0 additions & 8 deletions noir/noir-repo/.github/workflows/mirror-external_libs.yml

This file was deleted.

14 changes: 5 additions & 9 deletions noir/noir-repo/.github/workflows/test-js-packages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -519,16 +519,12 @@ jobs:
fail-fast: false
matrix:
project:
# Disabled as these are currently failing with many visibility errors
# - { repo: AztecProtocol/aztec-nr, path: ./ }
# - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-contracts }
# Disabled as aztec-packages requires a setup-step in order to generate a `Nargo.toml`
#- { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits }
# Disabled as these are currently failing with many visibility errors
- { repo: AztecProtocol/aztec-nr, path: ./ }
- { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-contracts }
# Disabled as aztec-packages requires a setup-step in order to generate a `Nargo.toml`
#- { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits }
- { repo: zac-williamson/noir-edwards, path: ./, ref: 037e44b2ee8557c51f6aef9bb9d63ea9e32722d1 }
# TODO: Enable these once they're passing against master again.
# - { repo: zac-williamson/noir-bignum, path: ./, ref: 030c2acce1e6b97c44a3bbbf3429ed96f20d72d3 }
# - { repo: vlayer-xyz/monorepo, path: ./, ref: ee46af88c025863872234eb05d890e1e447907cb }
# - { repo: hashcloak/noir-bigint, path: ./, ref: 940ddba3a5201b508e7b37a2ef643551afcf5ed8 }
name: Check external repo - ${{ matrix.project.repo }}
steps:
- name: Checkout
Expand Down
1 change: 1 addition & 0 deletions noir/noir-repo/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion noir/noir-repo/acvm-repo/acvm_js/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ function run_if_available {
require_command jq
require_command cargo
require_command wasm-bindgen
#require_command wasm-opt
require_command wasm-opt

self_path=$(dirname "$(readlink -f "$0")")
pname=$(cargo read-manifest | jq -r '.name')
Expand Down
5 changes: 4 additions & 1 deletion noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,10 @@ mod schnorr;
use ark_ec::AffineRepr;
pub use embedded_curve_ops::{embedded_curve_add, multi_scalar_mul};
pub use generator::generators::derive_generators;
pub use poseidon2::{field_from_hex, poseidon2_permutation, Poseidon2Config, POSEIDON2_CONFIG};
pub use poseidon2::{
field_from_hex, poseidon2_permutation, poseidon_hash, Poseidon2Config, Poseidon2Sponge,
POSEIDON2_CONFIG,
};

// Temporary hack, this ensure that we always use a bn254 field here
// without polluting the feature flags of the `acir_field` crate.
Expand Down
84 changes: 84 additions & 0 deletions noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -543,6 +543,75 @@ impl<'a> Poseidon2<'a> {
}
}

/// Performs a poseidon hash with a sponge construction equivalent to the one in poseidon2.nr
pub fn poseidon_hash(inputs: &[FieldElement]) -> Result<FieldElement, BlackBoxResolutionError> {
let two_pow_64 = 18446744073709551616_u128.into();
let iv = FieldElement::from(inputs.len()) * two_pow_64;
let mut sponge = Poseidon2Sponge::new(iv, 3);
for input in inputs.iter() {
sponge.absorb(*input)?;
}
sponge.squeeze()
}

pub struct Poseidon2Sponge<'a> {
rate: usize,
poseidon: Poseidon2<'a>,
squeezed: bool,
cache: Vec<FieldElement>,
state: Vec<FieldElement>,
}

impl<'a> Poseidon2Sponge<'a> {
pub fn new(iv: FieldElement, rate: usize) -> Poseidon2Sponge<'a> {
let mut result = Poseidon2Sponge {
cache: Vec::with_capacity(rate),
state: vec![FieldElement::zero(); rate + 1],
squeezed: false,
rate,
poseidon: Poseidon2::new(),
};
result.state[rate] = iv;
result
}

fn perform_duplex(&mut self) -> Result<(), BlackBoxResolutionError> {
// zero-pad the cache
for _ in self.cache.len()..self.rate {
self.cache.push(FieldElement::zero());
}
// add the cache into sponge state
for i in 0..self.rate {
self.state[i] += self.cache[i];
}
self.state = self.poseidon.permutation(&self.state, 4)?;
Ok(())
}

pub fn absorb(&mut self, input: FieldElement) -> Result<(), BlackBoxResolutionError> {
assert!(!self.squeezed);
if self.cache.len() == self.rate {
// If we're absorbing, and the cache is full, apply the sponge permutation to compress the cache
self.perform_duplex()?;
self.cache = vec![input];
} else {
// If we're absorbing, and the cache is not full, add the input into the cache
self.cache.push(input);
}
Ok(())
}

pub fn squeeze(&mut self) -> Result<FieldElement, BlackBoxResolutionError> {
assert!(!self.squeezed);
// If we're in absorb mode, apply sponge permutation to compress the cache.
self.perform_duplex()?;
self.squeezed = true;

// Pop one item off the top of the permutation and return it.
Ok(self.state[0])
}
}

#[cfg(test)]
mod test {
use acir::AcirField;
Expand All @@ -562,4 +631,19 @@ mod test {
];
assert_eq!(result, expected_result);
}

#[test]
fn hash_smoke_test() {
let fields = [
FieldElement::from(1u128),
FieldElement::from(2u128),
FieldElement::from(3u128),
FieldElement::from(4u128),
];
let result = super::poseidon_hash(&fields).expect("should hash successfully");
assert_eq!(
result,
field_from_hex("130bf204a32cac1f0ace56c78b731aa3809f06df2731ebcf6b3464a15788b1b9"),
);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -553,14 +553,7 @@ impl<'block> BrilligBlock<'block> {
let results = dfg.instruction_results(instruction_id);

let source = self.convert_ssa_single_addr_value(arguments[0], dfg);

let radix: u32 = dfg
.get_numeric_constant(arguments[1])
.expect("Radix should be known")
.try_to_u64()
.expect("Radix should fit in u64")
.try_into()
.expect("Radix should be u32");
let radix = self.convert_ssa_single_addr_value(arguments[1], dfg);

let target_array = self
.variables
Expand Down Expand Up @@ -595,13 +588,17 @@ impl<'block> BrilligBlock<'block> {
)
.extract_array();

let two = self.brillig_context.make_usize_constant_instruction(2_usize.into());

self.brillig_context.codegen_to_radix(
source,
target_array,
2,
two,
matches!(endianness, Endian::Big),
true,
);

self.brillig_context.deallocate_single_addr(two);
}

// `Intrinsic::AsWitness` is used to provide hints to acir-gen on optimal expression splitting.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
//! This module analyzes the usage of constants in a given function and decides an allocation point for them.
//! The allocation point will be the common dominator of all the places where the constant is used.
//! By allocating in the common dominator, we can cache the constants for all subsequent uses.
use fxhash::FxHashMap as HashMap;
use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet};

use crate::ssa::ir::{
basic_block::BasicBlockId,
Expand All @@ -26,20 +26,23 @@ pub(crate) struct ConstantAllocation {
constant_usage: HashMap<ValueId, HashMap<BasicBlockId, Vec<InstructionLocation>>>,
allocation_points: HashMap<BasicBlockId, HashMap<InstructionLocation, Vec<ValueId>>>,
dominator_tree: DominatorTree,
blocks_within_loops: HashSet<BasicBlockId>,
}

impl ConstantAllocation {
pub(crate) fn from_function(func: &Function) -> Self {
let cfg = ControlFlowGraph::with_function(func);
let post_order = PostOrder::with_function(func);
let dominator_tree = DominatorTree::with_cfg_and_post_order(&cfg, &post_order);
let mut dominator_tree = DominatorTree::with_cfg_and_post_order(&cfg, &post_order);
let blocks_within_loops = find_all_blocks_within_loops(func, &cfg, &mut dominator_tree);
let mut instance = ConstantAllocation {
constant_usage: HashMap::default(),
allocation_points: HashMap::default(),
dominator_tree,
blocks_within_loops,
};
instance.collect_constant_usage(func);
instance.decide_allocation_points();
instance.decide_allocation_points(func);

instance
}
Expand Down Expand Up @@ -95,16 +98,16 @@ impl ConstantAllocation {
}
}

fn decide_allocation_points(&mut self) {
fn decide_allocation_points(&mut self, func: &Function) {
for (constant_id, usage_in_blocks) in self.constant_usage.iter() {
let block_ids: Vec<_> = usage_in_blocks.iter().map(|(block_id, _)| *block_id).collect();

let common_dominator = self.common_dominator(&block_ids);
let allocation_point = self.decide_allocation_point(*constant_id, &block_ids, func);

// If the common dominator is one of the places where it's used, we take the first usage in the common dominator.
// Otherwise, we allocate it at the terminator of the common dominator.
// If the allocation point is one of the places where it's used, we take the first usage in the allocation point.
// Otherwise, we allocate it at the terminator of the allocation point.
let location = if let Some(locations_in_common_dominator) =
usage_in_blocks.get(&common_dominator)
usage_in_blocks.get(&allocation_point)
{
*locations_in_common_dominator
.first()
Expand All @@ -114,29 +117,105 @@ impl ConstantAllocation {
};

self.allocation_points
.entry(common_dominator)
.entry(allocation_point)
.or_default()
.entry(location)
.or_default()
.push(*constant_id);
}
}

fn common_dominator(&self, block_ids: &[BasicBlockId]) -> BasicBlockId {
if block_ids.len() == 1 {
return block_ids[0];
}

let mut common_dominator = block_ids[0];
fn decide_allocation_point(
&self,
constant_id: ValueId,
blocks_where_is_used: &[BasicBlockId],
func: &Function,
) -> BasicBlockId {
// Find the common dominator of all the blocks where the constant is used.
let common_dominator = if blocks_where_is_used.len() == 1 {
blocks_where_is_used[0]
} else {
let mut common_dominator = blocks_where_is_used[0];

for block_id in blocks_where_is_used.iter().skip(1) {
common_dominator =
self.dominator_tree.common_dominator(common_dominator, *block_id);
}

for block_id in block_ids.iter().skip(1) {
common_dominator = self.dominator_tree.common_dominator(common_dominator, *block_id);
common_dominator
};
// If the value only contains constants, it's safe to hoist outside of any loop
if func.dfg.is_constant(constant_id) {
self.exit_loops(common_dominator)
} else {
common_dominator
}
}

common_dominator
/// Returns the nearest dominator that is outside of any loop.
fn exit_loops(&self, block: BasicBlockId) -> BasicBlockId {
let mut current_block = block;
while self.blocks_within_loops.contains(&current_block) {
current_block = self
.dominator_tree
.immediate_dominator(current_block)
.expect("No dominator found when trying to allocate a constant outside of a loop");
}
current_block
}
}

pub(crate) fn is_constant_value(id: ValueId, dfg: &DataFlowGraph) -> bool {
matches!(&dfg[dfg.resolve(id)], Value::NumericConstant { .. } | Value::Array { .. })
}

/// For a given function, finds all the blocks that are within loops
fn find_all_blocks_within_loops(
func: &Function,
cfg: &ControlFlowGraph,
dominator_tree: &mut DominatorTree,
) -> HashSet<BasicBlockId> {
let mut blocks_in_loops = HashSet::default();
for block_id in func.reachable_blocks() {
let block = &func.dfg[block_id];
let successors = block.successors();
for successor_id in successors {
if dominator_tree.dominates(successor_id, block_id) {
blocks_in_loops.extend(find_blocks_in_loop(successor_id, block_id, cfg));
}
}
}

blocks_in_loops
}

/// Return each block that is in a loop starting in the given header block.
/// Expects back_edge_start -> header to be the back edge of the loop.
fn find_blocks_in_loop(
header: BasicBlockId,
back_edge_start: BasicBlockId,
cfg: &ControlFlowGraph,
) -> HashSet<BasicBlockId> {
let mut blocks = HashSet::default();
blocks.insert(header);

let mut insert = |block, stack: &mut Vec<BasicBlockId>| {
if !blocks.contains(&block) {
blocks.insert(block);
stack.push(block);
}
};

// Starting from the back edge of the loop, each predecessor of this block until
// the header is within the loop.
let mut stack = vec![];
insert(back_edge_start, &mut stack);

while let Some(block) = stack.pop() {
for predecessor in cfg.predecessors(block) {
insert(predecessor, &mut stack);
}
}

blocks
}
Loading

0 comments on commit 03977f7

Please sign in to comment.