From 35a486e2796f3ab37fc3c541a1210f40d01ed23a Mon Sep 17 00:00:00 2001 From: bing Date: Wed, 5 Jun 2024 20:59:44 +0800 Subject: [PATCH] chore: fix all lints There's a lot of changes - I'll briefly summarize what I did: I ran, as much as possible, the following: ```rust cargo fix --all --allow-dirty cargo fmt cargo clippy --fix --all --allow--dirty ``` Some parts, I had to manually fix, or was out of scope of the current PR, so I added lint allowances + TODOs instead. An example of such 'out of scope' fixes include the `unused_must_use`s in `r1cs/snarkjs.rs` - these would require additional work on error handling which should probably belong in its own PR. For these cases, I prefer lint allowances since they allow us to easily find the spots where we want to fix later on. Note that some of the lint allowances seem sane, so I didn't add TODOs for those, but only for those I felt would improve code quality if removed. --- src/backends/kimchi/asm.rs | 4 ++- src/backends/kimchi/builtin.rs | 2 +- src/backends/kimchi/mod.rs | 37 ++++++++++++------- src/backends/kimchi/prover.rs | 5 +-- src/backends/mod.rs | 14 +++++--- src/backends/r1cs/mod.rs | 63 +++++++++++++++++++-------------- src/backends/r1cs/snarkjs.rs | 32 ++++++++++------- src/circuit_writer/fn_env.rs | 46 ++++++++++++++---------- src/circuit_writer/mod.rs | 11 ++++-- src/circuit_writer/writer.rs | 21 +++++------ src/cli/cmd_build_and_check.rs | 38 ++++++++++---------- src/cli/cmd_new_and_init.rs | 14 ++++---- src/cli/cmd_prove_and_verify.rs | 12 +++---- src/cli/manifest.rs | 2 +- src/cli/mod.rs | 2 +- src/cli/packages.rs | 16 +++++---- src/compiler.rs | 18 +++++++--- src/constants.rs | 4 +++ src/error.rs | 3 +- src/imports.rs | 6 ++-- src/inputs.rs | 21 +++++------ src/lexer/mod.rs | 23 ++++++++---- src/lexer/tokens.rs | 15 ++++---- src/lib.rs | 3 +- src/name_resolution/context.rs | 4 +-- src/name_resolution/mod.rs | 4 +-- src/negative_tests.rs | 12 +++---- src/parser/expr.rs | 4 +-- src/parser/mod.rs | 18 +++++----- src/parser/structs.rs | 7 ++-- src/parser/types.rs | 62 +++++++++++++++++++++----------- src/serialization.rs | 10 +++--- src/stdlib/crypto.rs | 2 ++ src/stdlib/mod.rs | 6 ++-- src/syntax.rs | 9 +++-- src/tests/examples.rs | 24 ++++++------- src/tests/modules.rs | 8 ++--- src/type_checker/checker.rs | 37 ++++++++++--------- src/type_checker/fn_env.rs | 14 ++++++-- src/type_checker/mod.rs | 26 ++++++++++---- src/utils.rs | 5 +-- src/var.rs | 11 ++++-- src/witness.rs | 3 +- 43 files changed, 405 insertions(+), 273 deletions(-) diff --git a/src/backends/kimchi/asm.rs b/src/backends/kimchi/asm.rs index da1c6907e..46d54a9cf 100644 --- a/src/backends/kimchi/asm.rs +++ b/src/backends/kimchi/asm.rs @@ -22,7 +22,6 @@ //! use std::collections::{HashMap, HashSet}; -use std::fmt::Write; use std::hash::Hash; use crate::helpers::PrettyField; @@ -38,6 +37,7 @@ pub fn extract_vars_from_coeffs(vars: &mut OrderedHashSet, coeffs: & } } +#[must_use] pub fn parse_coeffs(vars: &OrderedHashSet, coeffs: &[VestaField]) -> Vec { let mut coeffs: Vec<_> = coeffs .iter() @@ -90,10 +90,12 @@ where self.map[value] } + #[must_use] pub fn len(&self) -> usize { self.ordered.len() } + #[must_use] pub fn is_empty(&self) -> bool { self.ordered.is_empty() } diff --git a/src/backends/kimchi/builtin.rs b/src/backends/kimchi/builtin.rs index 566eabd18..36a4a4fe7 100644 --- a/src/backends/kimchi/builtin.rs +++ b/src/backends/kimchi/builtin.rs @@ -172,7 +172,7 @@ pub fn poseidon( let vars = final_row .iter() .flatten() - .cloned() + .copied() .map(ConstOrCell::Cell) .collect(); diff --git a/src/backends/kimchi/mod.rs b/src/backends/kimchi/mod.rs index 5d96e2197..409d145c4 100644 --- a/src/backends/kimchi/mod.rs +++ b/src/backends/kimchi/mod.rs @@ -47,9 +47,9 @@ pub struct Witness(Vec<[VestaField; NUM_REGISTERS]>); pub struct GeneratedWitness { /// contains all the witness values pub all_witness: Witness, - /// contains the public inputs, which are also part of the all_witness + /// contains the public inputs, which are also part of the `all_witness` pub full_public_inputs: Vec, - /// contains the public outputs, which are also part of the all_witness + /// contains the public outputs, which are also part of the `all_witness` pub public_outputs: Vec, } @@ -103,8 +103,11 @@ pub struct KimchiVesta { impl Witness { /// kimchi uses a transposed witness + #[must_use] pub fn to_kimchi_witness(&self) -> [Vec; NUM_REGISTERS] { - let transposed = vec![Vec::with_capacity(self.0.len()); NUM_REGISTERS]; + let transposed = (0..NUM_REGISTERS) + .map(|_| Vec::with_capacity(self.0.len())) + .collect::>(); let mut transposed: [_; NUM_REGISTERS] = transposed.try_into().unwrap(); for row in &self.0 { for (col, field) in row.iter().enumerate() { @@ -114,23 +117,29 @@ impl Witness { transposed } + #[must_use] pub fn len(&self) -> usize { self.0.len() } + #[must_use] pub fn is_empty(&self) -> bool { self.0.is_empty() } pub fn debug(&self) { for (row, values) in self.0.iter().enumerate() { - let values = values.iter().map(|v| v.pretty()).join(" | "); + let values = values + .iter() + .map(super::super::helpers::PrettyField::pretty) + .join(" | "); println!("{row} - {values}"); } } } impl KimchiVesta { + #[must_use] pub fn new(double_generic_gate_optimization: bool) -> Self { Self { next_variable: 0, @@ -191,7 +200,7 @@ impl KimchiVesta { .entry(var.index) .and_modify(|w| match w { Wiring::NotWired(old_cell) => { - *w = Wiring::Wired(vec![old_cell.clone(), annotated_cell.clone()]) + *w = Wiring::Wired(vec![old_cell.clone(), annotated_cell.clone()]); } Wiring::Wired(ref mut cells) => { cells.push(annotated_cell.clone()); @@ -292,7 +301,7 @@ impl Backend for KimchiVesta { let zero = VestaField::zero(); - let _ = &self.add_generic_gate( + let () = &self.add_generic_gate( label.unwrap_or("hardcode a constant"), vec![Some(var)], vec![VestaField::one(), zero, zero, zero, value.neg()], @@ -302,6 +311,7 @@ impl Backend for KimchiVesta { var } + #[allow(unused_variables)] fn finalize_circuit( &mut self, public_output: Option>, @@ -322,7 +332,7 @@ impl Backend for KimchiVesta { // for sanity check, we make sure that every cellvar created has ended up in a gate let mut written_vars = HashSet::new(); - for row in self.witness_table.iter() { + for row in &self.witness_table { row.iter().flatten().for_each(|cvar| { written_vars.insert(cvar.index); }); @@ -349,9 +359,10 @@ impl Backend for KimchiVesta { } // kimchi hack - if self.gates.len() <= 2 { - panic!("the circuit is either too small or does not constrain anything (TODO: better error)"); - } + assert!( + self.gates.len() > 2, + "the circuit is either too small or does not constrain anything (TODO: better error)" + ); // store the return value in the public input that was created for that ^ if let Some(public_output) = public_output { @@ -488,7 +499,7 @@ impl Backend for KimchiVesta { } fn generate_asm(&self, sources: &Sources, debug: bool) -> String { - let mut res = "".to_string(); + let mut res = String::new(); // version res.push_str(&crate::utils::noname_version()); @@ -496,7 +507,7 @@ impl Backend for KimchiVesta { // vars let mut vars: OrderedHashSet = OrderedHashSet::default(); - for Gate { coeffs, .. } in self.gates.iter() { + for Gate { coeffs, .. } in &self.gates { extract_vars_from_coeffs(&mut vars, coeffs); } @@ -516,7 +527,7 @@ impl Backend for KimchiVesta { for (row, (Gate { typ, coeffs }, debug_info)) in self.gates.iter().zip(&self.debug_info).enumerate() { - println!("gate {:?}", row); + println!("gate {row:?}"); // gate # if debug { writeln!(res, "╭{s}", s = "─".repeat(80)).unwrap(); diff --git a/src/backends/kimchi/prover.rs b/src/backends/kimchi/prover.rs index a2818892b..fa60ef23a 100644 --- a/src/backends/kimchi/prover.rs +++ b/src/backends/kimchi/prover.rs @@ -62,6 +62,7 @@ pub struct VerifierIndex { // Setup // +#[allow(clippy::type_complexity)] impl KimchiVesta { pub fn compile_to_indexes( &self, @@ -247,12 +248,12 @@ mod tests { #[test] fn test_public_output_constraint() -> miette::Result<()> { - let code = r#"fn main(pub public_input: Field, private_input: Field) -> Field { + let code = r"fn main(pub public_input: Field, private_input: Field) -> Field { let xx = private_input + public_input; assert_eq(xx, 2); let yy = xx + 6; return yy; - }"#; + }"; let mut sources = Sources::new(); let mut tast = TypeChecker::new(); diff --git a/src/backends/mod.rs b/src/backends/mod.rs index 10ec127d3..d5741542f 100644 --- a/src/backends/mod.rs +++ b/src/backends/mod.rs @@ -1,4 +1,4 @@ -use std::{fmt::Debug, hash::Hash, str::FromStr}; +use std::{fmt::Debug, str::FromStr}; use ark_ff::{Field, Zero}; use num_bigint::BigUint; @@ -9,7 +9,6 @@ use crate::{ error::{Error, ErrorKind, Result}, helpers::PrettyField, imports::FnHandle, - parser::FunctionDef, var::{Value, Var}, witness::WitnessEnv, }; @@ -32,6 +31,8 @@ pub trait BackendField: /// It is intended to make it opaque to the frondend. pub trait BackendVar: Clone + Debug + PartialEq + Eq {} +// TODO: Fix this by `Box`ing `KimchiVesta`. +#[allow(clippy::large_enum_variant)] pub enum BackendKind { KimchiVesta(KimchiVesta), R1csBls12_381(R1CS), @@ -39,14 +40,17 @@ pub enum BackendKind { } impl BackendKind { + #[must_use] pub fn new_kimchi_vesta(use_double_generic: bool) -> Self { Self::KimchiVesta(KimchiVesta::new(use_double_generic)) } + #[must_use] pub fn new_r1cs_bls12_381() -> Self { Self::R1csBls12_381(R1CS::new()) } + #[must_use] pub fn new_r1cs_bn254() -> Self { Self::R1csBn254(R1CS::new()) } @@ -57,8 +61,8 @@ pub trait Backend: Clone { /// The circuit field / scalar field that the circuit is written on. type Field: BackendField; - /// The CellVar type for the backend. - /// Different backend is allowed to have different CellVar types. + /// The `CellVar` type for the backend. + /// Different backend is allowed to have different `CellVar` types. type Var: BackendVar; /// The generated witness type for the backend. Each backend may define its own witness format to be generated. @@ -122,7 +126,7 @@ pub trait Backend: Clone { span: Span, ) -> Self::Var; - /// Backends should implement this function to load and compute the value of a CellVar. + /// Backends should implement this function to load and compute the value of a `CellVar`. fn compute_var( &self, env: &mut WitnessEnv, diff --git a/src/backends/r1cs/mod.rs b/src/backends/r1cs/mod.rs index d7b9efeb6..9ce0a9b1a 100644 --- a/src/backends/r1cs/mod.rs +++ b/src/backends/r1cs/mod.rs @@ -1,3 +1,7 @@ +// TODO: There is a bunch of places where there are unused vars. +// Remove this lint allowance when fixed. +#![allow(unused_variables)] + pub mod builtin; pub mod snarkjs; @@ -10,8 +14,6 @@ use serde::{Deserialize, Serialize}; use crate::constants::Span; use crate::error::{Error, ErrorKind, Result}; -use crate::helpers::PrettyField; -use crate::parser::FunctionDef; use crate::{circuit_writer::DebugInfo, var::Value}; use super::{Backend, BackendField, BackendVar}; @@ -39,10 +41,10 @@ impl CellVar { impl BackendVar for LinearCombination {} /// Linear combination of variables and constants. -/// For example, the linear combination is represented as a * f_a + b * f_b + f_c -/// f_a and f_b are the coefficients of a and b respectively. -/// a and b are represented by CellVar. -/// The constant f_c is represented by the constant field, will always multiply with the variable at index 0 which is always 1. +/// For example, the linear combination is represented as a * `f_a` + b * `f_b` + `f_c` +/// `f_a` and `f_b` are the coefficients of a and b respectively. +/// a and b are represented by `CellVar`. +/// The constant `f_c` is represented by the constant field, will always multiply with the variable at index 0 which is always 1. #[derive(Clone, Debug, PartialEq, Eq)] pub struct LinearCombination where @@ -57,7 +59,7 @@ impl LinearCombination where F: BackendField, { - /// Convert to a CellVar. + /// Convert to a `CellVar`. /// It should /// - be used when the linear combination is a single variable. /// - panic if the linear combination is not a single variable or has a non-zero constant. @@ -95,6 +97,7 @@ where } /// Create a linear combination to represent constant zero. + #[allow(dead_code)] fn zero(span: Span) -> Self { LinearCombination { terms: HashMap::new(), @@ -104,6 +107,7 @@ where } /// Create a linear combination from a list of vars + #[allow(dead_code)] fn from_vars(vars: Vec, span: Span) -> Self { let terms = vars.into_iter().map(|var| (var, F::one())).collect(); LinearCombination { @@ -152,8 +156,8 @@ where } } - /// Enforces a constraint for the multiplication of two CellVars. - /// The constraint reduces the multiplication to a new CellVar variable, + /// Enforces a constraint for the multiplication of two `CellVars`. + /// The constraint reduces the multiplication to a new `CellVar` variable, /// which represents: self * other = res. fn mul(&self, cs: &mut R1CS, other: &Self, span: Span) -> Self { let res = cs.new_internal_var(Value::Mul(self.clone(), other.clone()), span); @@ -162,12 +166,12 @@ where res } - /// Enforces a constraint for the equality of two CellVars. + /// Enforces a constraint for the equality of two `CellVars`. /// It needs to constraint: self * 1 = other. fn assert_eq(&self, cs: &mut R1CS, other: &Self, span: Span) { let one_cvar = LinearCombination::one(span); - cs.enforce_constraint(self, &one_cvar, other, span) + cs.enforce_constraint(self, &one_cvar, other, span); } } @@ -207,7 +211,7 @@ where } } -/// R1CS backend with bls12_381 field. +/// R1CS backend with `bls12_381` field. #[derive(Clone)] pub struct R1CS where @@ -226,10 +230,20 @@ where finalized: bool, } +impl Default for R1CS +where + F: BackendField, +{ + fn default() -> Self { + Self::new() + } +} + impl R1CS where F: BackendField, { + #[must_use] pub fn new() -> Self { Self { constraints: Vec::new(), @@ -243,6 +257,7 @@ where } /// Returns the number of constraints. + #[must_use] pub fn num_constraints(&self) -> usize { self.constraints.len() } @@ -290,7 +305,7 @@ where } #[derive(Debug)] -/// An intermediate struct for SnarkjsExporter to reorder the witness and convert to snarkjs format. +/// An intermediate struct for `SnarkjsExporter` to reorder the witness and convert to snarkjs format. pub struct GeneratedWitness where F: BackendField, @@ -316,8 +331,8 @@ where self.new_internal_var(Value::Constant(F::one()), Span::default()); } - /// Create a new CellVar and record in witness_vector vector. - /// The underlying type of CellVar is always WitnessVar. + /// Create a new `CellVar` and record in `witness_vector` vector. + /// The underlying type of `CellVar` is always `WitnessVar`. fn new_internal_var( &mut self, val: crate::var::Value, @@ -348,7 +363,7 @@ where /// Final checks for generating the circuit. /// todo: we might need to rethink about this interface - /// - we might just need the returned_cells argument, as the backend can record the public outputs itself? + /// - we might just need the `returned_cells` argument, as the backend can record the public outputs itself? fn finalize_circuit( &mut self, public_output: Option>, @@ -532,7 +547,7 @@ where let (a, b, c) = (&fmt_lcs[0], &fmt_lcs[1], &fmt_lcs[2]); // format an entire constraint - res.push_str(&format!("{} == ({}) * ({})\n", c, a, b)); + res.push_str(&format!("{c} == ({a}) * ({b})\n")); if debug { // link the constraint to the source code @@ -580,7 +595,7 @@ where fn assert_eq_const(&mut self, x: &LinearCombination, cst: F, span: Span) { let c = LinearCombination::from_const(cst, span); - x.assert_eq(self, &c, span) + x.assert_eq(self, &c, span); } fn assert_eq_var( @@ -589,7 +604,7 @@ where rhs: &LinearCombination, span: Span, ) { - lhs.assert_eq(self, rhs, span) + lhs.assert_eq(self, rhs, span); } /// Adds the public input cell vars. @@ -620,13 +635,9 @@ where #[cfg(test)] mod tests { - use crate::{ - backends::{ - r1cs::{R1csBls12381Field, R1CS}, - Backend, BackendKind, - }, - lexer::Token, - parser::{types::FnSig, FunctionDef, ParserCtx}, + use crate::backends::{ + r1cs::{R1csBls12381Field, R1CS}, + Backend, BackendKind, }; use ark_ff::One; use rstest::rstest; diff --git a/src/backends/r1cs/snarkjs.rs b/src/backends/r1cs/snarkjs.rs index bc49e1175..3ab357a1c 100644 --- a/src/backends/r1cs/snarkjs.rs +++ b/src/backends/r1cs/snarkjs.rs @@ -1,3 +1,7 @@ +// TODO: The impls in this file return a `noname::error::Result`, but writers return +// std::io::Error. Remove this allowance once we properly handle errors. +#![allow(unused_must_use)] + use crate::backends::BackendField; use crate::error::Result; use constraint_writers::r1cs_writer::{ConstraintSection, HeaderData, R1CSWriter}; @@ -28,10 +32,11 @@ impl SnarkjsLinearCombination { let mut terms = self.terms.clone(); // add the constant term with var indexed at 0 - if terms.insert(0, self.constant.clone()).is_some() { - // sanity check - panic!("The first var should be preserved for constant term"); - } + // sanity check + assert!( + terms.insert(0, self.constant.clone()).is_none(), + "The first var should be preserved for constant term" + ); terms } @@ -55,16 +60,20 @@ where r1cs_backend: R1CS, } +// TODO: The impls in this file return a `noname::error::Result`, but writers return +// std::io::Error. Remove this allowance once we properly handle errors. +#[allow(unused_results)] impl SnarkjsExporter where F: BackendField, { + #[must_use] pub fn new(r1cs_backend: R1CS) -> SnarkjsExporter { SnarkjsExporter { r1cs_backend } } /// Restructure the linear combination to align with the snarkjs format. - /// - convert the factors to BigInt. + /// - convert the factors to `BigInt`. fn restructure_lc(&self, lc: &LinearCombination) -> SnarkjsLinearCombination { let terms = lc .terms @@ -208,9 +217,10 @@ impl WitnessWriter { // Write the file type (magic string) as bytes let file_type_bytes = file_type.as_bytes(); - if file_type_bytes.len() != 4 { - panic!("File type must be 4 characters long"); - } + assert!( + file_type_bytes.len() == 4, + "File type must be 4 characters long" + ); writer.write_all(file_type_bytes); // Write the version as a 32-bit unsigned integer in little endian @@ -277,15 +287,13 @@ impl WitnessWriter { // Start writing the second section self.start_write_section(2); - /// Write the witness values to the file - /// Each witness value occupies the same number of bytes as the prime field for value in witness { - self.write_big_int(value.clone(), field_n_bytes as usize); + self.write_big_int(value.clone(), field_n_bytes); } self.end_write_section(); } - /// Write a BigInt to the file + /// Write a `BigInt` to the file fn write_big_int(&mut self, value: BigInt, size: usize) { let bytes = value.to_bytes_le().1; diff --git a/src/circuit_writer/fn_env.rs b/src/circuit_writer/fn_env.rs index a94708839..4212428ef 100644 --- a/src/circuit_writer/fn_env.rs +++ b/src/circuit_writer/fn_env.rs @@ -26,10 +26,12 @@ where } impl VarInfo { + #[must_use] pub fn new(var: Var, mutable: bool, typ: Option) -> Self { Self { var, mutable, typ } } + #[must_use] pub fn reassign(&self, var: Var) -> Self { Self { var, @@ -38,6 +40,7 @@ impl VarInfo { } } + #[must_use] pub fn reassign_range(&self, var: Var, start: usize, len: usize) -> Self { // sanity check assert_eq!(var.len(), len); @@ -78,7 +81,8 @@ where } impl FnEnv { - /// Creates a new FnEnv + /// Creates a new `FnEnv` + #[must_use] pub fn new() -> Self { Self { current_scope: 0, @@ -99,7 +103,7 @@ impl FnEnv { // (we don't need to keep them around to detect shadowing, // as we already did that in type checker) let mut to_delete = vec![]; - for (name, (scope, _)) in self.vars.iter() { + for (name, (scope, _)) in &self.vars { if *scope > self.current_scope { to_delete.push(name.clone()); } @@ -131,14 +135,16 @@ impl FnEnv { /// Retrieves type information on a variable, given a name. /// If the variable is not in scope, return false. // TODO: return an error no? + #[must_use] pub fn get_local_var(&self, var_name: &str) -> VarInfo { let (scope, var_info) = self .vars .get(var_name) .unwrap_or_else(|| panic!("type checking bug: local variable `{var_name}` not found")); - if !self.is_in_scope(*scope) { - panic!("type checking bug: local variable `{var_name}` not in scope"); - } + assert!( + self.is_in_scope(*scope), + "type checking bug: local variable `{var_name}` not in scope" + ); var_info.clone() } @@ -150,19 +156,21 @@ impl FnEnv { .get(var_name) .expect("type checking bug: local variable for reassigning not found"); - if !self.is_in_scope(*scope) { - panic!("type checking bug: local variable for reassigning not in scope"); - } + assert!( + self.is_in_scope(*scope), + "type checking bug: local variable for reassigning not in scope" + ); - if !var_info.mutable { - panic!("type checking bug: local variable for reassigning is not mutable"); - } + assert!( + var_info.mutable, + "type checking bug: local variable for reassigning is not mutable" + ); let var_info = var_info.reassign(var); self.vars.insert(var_name.to_string(), (*scope, var_info)); } - /// Same as [Self::reassign_var], but only reassigns a specific range of the variable. + /// Same as [`Self::reassign_var`], but only reassigns a specific range of the variable. pub fn reassign_var_range(&mut self, var_name: &str, var: Var, start: usize, len: usize) { // get the scope first, we don't want to modify that let (scope, var_info) = self @@ -170,13 +178,15 @@ impl FnEnv { .get(var_name) .expect("type checking bug: local variable for reassigning not found"); - if !self.is_in_scope(*scope) { - panic!("type checking bug: local variable for reassigning not in scope"); - } + assert!( + self.is_in_scope(*scope), + "type checking bug: local variable for reassigning not in scope" + ); - if !var_info.mutable { - panic!("type checking bug: local variable for reassigning is not mutable"); - } + assert!( + var_info.mutable, + "type checking bug: local variable for reassigning is not mutable" + ); let var_info = var_info.reassign_range(var, start, len); self.vars.insert(var_name.to_string(), (*scope, var_info)); diff --git a/src/circuit_writer/mod.rs b/src/circuit_writer/mod.rs index ec935ae9b..5e6243830 100644 --- a/src/circuit_writer/mod.rs +++ b/src/circuit_writer/mod.rs @@ -1,3 +1,7 @@ +// TODO: There is a bunch of places where there are unused vars. +// Remove this lint allowance when fixed. +#![allow(unused_variables)] + use crate::{ backends::Backend, constants::Span, @@ -98,7 +102,7 @@ impl CircuitWriter { } // - fn_env.add_local_var(var_name, var_info) + fn_env.add_local_var(var_name, var_info); } pub fn get_local_var( @@ -117,7 +121,7 @@ impl CircuitWriter { fn_env.get_local_var(var_name) } - /// Retrieves the [FnInfo] for the `main()` function. + /// Retrieves the [`FnInfo`] for the `main()` function. /// This function should only be called if we know there's a main function, /// if there's no main function it'll panic. pub fn main_info(&self) -> Result<&FnInfo> { @@ -215,7 +219,7 @@ impl CircuitWriter { Ok(CompiledCircuit::new(circuit_writer)) } - /// A wrapper for the backend generate_witness + /// A wrapper for the backend `generate_witness` pub fn generate_witness( &self, witness_env: &mut WitnessEnv, @@ -223,6 +227,7 @@ impl CircuitWriter { self.backend.generate_witness(witness_env) } + #[allow(clippy::type_complexity)] fn handle_arg( &mut self, arg: &FnArg, diff --git a/src/circuit_writer/writer.rs b/src/circuit_writer/writer.rs index 7b0d714da..c1a9ec0c1 100644 --- a/src/circuit_writer/writer.rs +++ b/src/circuit_writer/writer.rs @@ -35,7 +35,7 @@ pub enum GateKind { impl From for kimchi::circuits::gate::GateType { fn from(gate_kind: GateKind) -> Self { - use kimchi::circuits::gate::GateType::*; + use kimchi::circuits::gate::GateType::{Generic, Poseidon, Zero}; match gate_kind { GateKind::Zero => Zero, GateKind::DoubleGeneric => Generic, @@ -56,6 +56,7 @@ pub struct Gate { } impl Gate { + #[must_use] pub fn to_kimchi_gate(&self, row: usize) -> kimchi::circuits::gate::CircuitGate { kimchi::circuits::gate::CircuitGate { typ: self.typ.into(), @@ -99,7 +100,7 @@ impl PartialEq for AnnotatedCell { impl PartialOrd for AnnotatedCell { fn partial_cmp(&self, other: &Self) -> Option { - self.cell.partial_cmp(&other.cell) + Some(self.cmp(other)) } } @@ -235,7 +236,7 @@ impl CircuitWriter { module, name: struct_name, } => { - let qualified = FullyQualified::new(module, &struct_name); + let qualified = FullyQualified::new(module, struct_name); let struct_info = self .struct_info(&qualified) .ok_or(self.error(ErrorKind::UnexpectedError("struct not found"), span))? @@ -340,7 +341,8 @@ impl CircuitWriter { vars.push(var_info); } - let res = match &fn_info.kind { + // + match &fn_info.kind { // assert() <-- for example FnKind::BuiltIn(_sig, handle) => { let res = handle(self, &vars, expr.span); @@ -352,13 +354,10 @@ impl CircuitWriter { FnKind::Native(func) => { // module::fn_name(args) // ^^^^^^ - self.compile_native_function_call(&func, vars) + self.compile_native_function_call(func, vars) .map(|r| r.map(VarOrRef::Var)) } - }; - - // - res + } } ExprKind::FieldAccess { lhs, rhs } => { @@ -570,9 +569,7 @@ impl CircuitWriter { ExprKind::BigInt(b) => { let biguint = BigUint::from_str_radix(b, 10).expect("failed to parse number."); - let ff = B::Field::try_from(biguint).map_err(|_| { - self.error(ErrorKind::CannotConvertToField(b.to_string()), expr.span) - })?; + let ff = B::Field::from(biguint); let res = VarOrRef::Var(Var::new_constant(ff, expr.span)); Ok(Some(res)) diff --git a/src/cli/cmd_build_and_check.rs b/src/cli/cmd_build_and_check.rs index e1e0cda1f..705284669 100644 --- a/src/cli/cmd_build_and_check.rs +++ b/src/cli/cmd_build_and_check.rs @@ -54,7 +54,7 @@ static SUPPORTED_BACKENDS: Lazy = Lazy::new(|| { "Supported backends: `{}`", BACKEND_OPT_MAP .keys() - .map(|s| s.to_string()) + .map(|s| (*s).to_string()) .collect::>() .join(", ") ) @@ -92,7 +92,7 @@ pub fn cmd_build(args: CmdBuild) -> miette::Result<()> { .path .unwrap_or_else(|| std::env::current_dir().unwrap().try_into().unwrap()); - let (sources, prover_index, verifier_index) = build(&curr_dir, args.asm, args.debug)?; + let (_sources, _prover_index, verifier_index) = build(&curr_dir, args.asm, args.debug)?; // create COMPILED_DIR let compiled_path = curr_dir.join(COMPILED_DIR); @@ -122,14 +122,11 @@ pub fn cmd_build(args: CmdBuild) -> miette::Result<()> { let verifier_params = args .verifier_params .unwrap_or(compiled_path.join("verifier.nope")); - std::fs::write( - &verifier_params, - rmp_serde::to_vec(&verifier_index).unwrap(), - ) - .into_diagnostic() - .wrap_err(format!( - "could not write prover params to `{prover_params}`" - ))?; + std::fs::write(verifier_params, rmp_serde::to_vec(&verifier_index).unwrap()) + .into_diagnostic() + .wrap_err(format!( + "could not write prover params to `{prover_params}`" + ))?; println!("successfully built"); @@ -158,13 +155,14 @@ pub fn cmd_check(args: CmdCheck) -> miette::Result<()> { fn produce_all_asts(path: &PathBuf) -> miette::Result<(Sources, TypeChecker)> { // find manifest - let manifest = validate_package_and_get_manifest(&path, false)?; + let manifest = validate_package_and_get_manifest(path, false)?; // get all dependencies - get_deps_of_package(&manifest); + // TODO: Handle error properly + let _ = get_deps_of_package(&manifest); // produce dependency graph - let is_lib = is_lib(&path); + let is_lib = is_lib(path); let this = if is_lib { Some(UserRepo::new(&manifest.package.name)) @@ -396,10 +394,10 @@ pub fn cmd_run(args: CmdRun) -> miette::Result<()> { unimplemented!("kimchi-vesta backend is not yet supported for this command") } BackendKind::R1csBls12_381(r1cs) => { - run_r1cs_backend(r1cs, &curr_dir, public_inputs, private_inputs)? + run_r1cs_backend(r1cs, &curr_dir, public_inputs, private_inputs)?; } BackendKind::R1csBn254(r1cs) => { - run_r1cs_backend(r1cs, &curr_dir, public_inputs, private_inputs)? + run_r1cs_backend(r1cs, &curr_dir, public_inputs, private_inputs)?; } } @@ -433,13 +431,13 @@ where snarkjs_exporter.gen_wtns_file(&wtns_output_path.clone().into_string(), generated_witness); // display the info for the generated files - println!("Snarkjs R1CS file generated at: {}", r1cs_output_path); - println!("Snarkjs Witness file generated at: {}", wtns_output_path); + println!("Snarkjs R1CS file generated at: {r1cs_output_path}"); + println!("Snarkjs Witness file generated at: {wtns_output_path}"); Ok(()) } -fn test_r1cs_backend( +fn test_r1cs_backend( r1cs: R1CS, path: &PathBuf, public_inputs: JsonInputs, @@ -457,7 +455,7 @@ where let asm = compiled_circuit.asm(&sources, debug); - println!("{}", asm); + println!("{asm}"); Ok(()) } @@ -465,7 +463,7 @@ where fn typecheck_file(path: &PathBuf) -> miette::Result<(TypeChecker, Sources)> { let code = std::fs::read_to_string(path) .into_diagnostic() - .wrap_err_with(|| format!("could not read file: `{}` (are you sure it exists?)", path))?; + .wrap_err_with(|| format!("could not read file: `{path}` (are you sure it exists?)"))?; let mut sources = Sources::new(); let mut tast = TypeChecker::::new(); diff --git a/src/cli/cmd_new_and_init.rs b/src/cli/cmd_new_and_init.rs index 178bf0f4c..17ddcc827 100644 --- a/src/cli/cmd_new_and_init.rs +++ b/src/cli/cmd_new_and_init.rs @@ -1,16 +1,16 @@ use camino::Utf8PathBuf as PathBuf; use miette::{IntoDiagnostic, Result, WrapErr}; -const MAIN_CONTENT: &str = r#"fn main(pub xx: Field, yy: Field) { +const MAIN_CONTENT: &str = r"fn main(pub xx: Field, yy: Field) { let zz = yy + 1; assert_eq(zz, xx); } -"#; +"; -const LIB_CONTENT: &str = r#"fn add(xx: Field, yy: Field) -> Field { +const LIB_CONTENT: &str = r"fn add(xx: Field, yy: Field) -> Field { return xx + yy; } -"#; +"; #[derive(clap::Parser)] pub struct CmdNew { @@ -143,13 +143,11 @@ fn get_git_user() -> String { .output() .expect("failed to execute git command"); - if !output.status.success() { - panic!("failed to get git user name"); - } + assert!(output.status.success(), "failed to get git user name"); let output = String::from_utf8(output.stdout).expect("couldn't parse git output as utf8"); let username = output.trim().to_owned(); - username.replace(" ", "_").to_lowercase() + username.replace(' ', "_").to_lowercase() } diff --git a/src/cli/cmd_prove_and_verify.rs b/src/cli/cmd_prove_and_verify.rs index 27152333e..91d6fcac5 100644 --- a/src/cli/cmd_prove_and_verify.rs +++ b/src/cli/cmd_prove_and_verify.rs @@ -62,7 +62,7 @@ pub fn cmd_prove(args: CmdProve) -> miette::Result<()> { "proof created at path `{proof_path}`. You can use `noname --verify` to verify it. Note that you will need to pass the same JSON-encoded public inputs as you did when creating the proof. (If you didn't use the `--public-inputs` flag, then you don't need to pass any public inputs.)", ); } else { - println!("proof created at path `{proof_path}`. Since running the proof produced a public output `{public:?}`, you will need to also pass the expected public output to the verifier (who can run `noname verify --public-output '{public:?}'`).", public=public_output); + println!("proof created at path `{proof_path}`. Since running the proof produced a public output `{public_output:?}`, you will need to also pass the expected public output to the verifier (who can run `noname verify --public-output '{public_output:?}'`)."); } // @@ -93,13 +93,13 @@ pub fn cmd_verify(args: CmdVerify) -> miette::Result<()> { .path .unwrap_or_else(|| std::env::current_dir().unwrap().try_into().unwrap()); - let (_sources, _prover_index, verifier_index) = build(&curr_dir, false, false)?; + let (_sources, _prover_index, _verifier_index) = build(&curr_dir, false, false)?; // parse inputs - let mut public_inputs = parse_inputs(&args.public_inputs).unwrap(); + let _public_inputs = parse_inputs(&args.public_inputs).unwrap(); if let Some(public_output) = &args.public_output { - let public_output = parse_inputs(public_output).unwrap(); + let _public_output = parse_inputs(public_output).unwrap(); // TODO: add it to the public input todo!(); @@ -114,7 +114,7 @@ pub fn cmd_verify(args: CmdVerify) -> miette::Result<()> { miette::bail!("proof does not exist at path `{proof_path}`. Perhaps pass the correct path via the `--proof-path` flag?"); } - let proof = rmp_serde::from_read(std::fs::File::open(&proof_path).unwrap()) + rmp_serde::from_read(std::fs::File::open(&proof_path).unwrap()) .into_diagnostic() .wrap_err(format!( "could not deserialize the given proof at `{proof_path}`" @@ -129,6 +129,4 @@ pub fn cmd_verify(args: CmdVerify) -> miette::Result<()> { .wrap_err("Failed to verify proof")?; */ // - - Ok(()) } diff --git a/src/cli/manifest.rs b/src/cli/manifest.rs index c304f485d..3ce0271f8 100644 --- a/src/cli/manifest.rs +++ b/src/cli/manifest.rs @@ -19,7 +19,7 @@ pub struct Package { impl Manifest { pub(crate) fn dependencies(&self) -> Vec { - self.package.dependencies.clone().unwrap_or(vec![]) + self.package.dependencies.clone().unwrap_or_default() } } diff --git a/src/cli/mod.rs b/src/cli/mod.rs index 84fb80ee8..8e90c1489 100644 --- a/src/cli/mod.rs +++ b/src/cli/mod.rs @@ -13,5 +13,5 @@ pub use cmd_prove_and_verify::{cmd_prove, cmd_verify, CmdProve, CmdVerify}; /// The directory under the user home directory containing all noname-related files. pub const NONAME_DIRECTORY: &str = ".noname"; -/// The directory under [NONAME_DIRECTORY] containing all package-related files. +/// The directory under [`NONAME_DIRECTORY`] containing all package-related files. pub const PACKAGE_DIRECTORY: &str = "packages"; diff --git a/src/cli/packages.rs b/src/cli/packages.rs index 03060c6d2..dd68a70c0 100644 --- a/src/cli/packages.rs +++ b/src/cli/packages.rs @@ -21,7 +21,7 @@ pub struct UserRepo { impl UserRepo { pub(crate) fn new(arg: &str) -> Self { - let mut args = arg.split("/"); + let mut args = arg.split('/'); let user = args.next().unwrap().to_string(); let repo = args.next().unwrap().to_string(); assert!(args.next().is_none()); @@ -137,6 +137,9 @@ impl DependencyGraph { Ok(node) } + // TODO: Fix usage of self in `from_*` fn. + // Either this should be renamed, or `self` shouldn't be used. + #[allow(clippy::wrong_self_convention)] pub(crate) fn from_leaves_to_roots(&self) -> Vec { let mut res = vec![]; @@ -200,6 +203,7 @@ pub fn get_dep(dep: &UserRepo) -> Result { } /// Returns the dependencies of a package (given it's manifest). +#[must_use] pub fn get_deps_of_package(manifest: &Manifest) -> Vec { manifest .dependencies() @@ -222,7 +226,7 @@ pub fn get_dep_code(dep: &UserRepo) -> Result { let path = path_to_package(dep); let lib_file = path.join("src").join("lib.no"); - let lib_content = std::fs::read_to_string(&lib_file) + let lib_content = std::fs::read_to_string(lib_file) .into_diagnostic() .wrap_err_with(|| format!("could not read file `{path}`"))?; @@ -264,6 +268,7 @@ pub fn download_from_github(dep: &UserRepo) -> Result<()> { Ok(()) } +#[must_use] pub fn is_lib(path: &PathBuf) -> bool { path.join("src").join("lib.no").exists() } @@ -335,7 +340,7 @@ mod tests { childs.push(child.clone()); // make sure that each child has their own presence in the cache - dep_graph.cached_manifests.entry(child).or_insert(vec![]); + dep_graph.cached_manifests.entry(child).or_default(); } dep_graph.cached_manifests.insert(parent, childs); @@ -347,10 +352,7 @@ mod tests { let mut libs = vec![]; for dep_str in deps_str { let dep = dep(dep_str); - dep_graph - .cached_manifests - .entry(dep.clone()) - .or_insert(vec![]); + dep_graph.cached_manifests.entry(dep.clone()).or_default(); libs.push(dep); } diff --git a/src/compiler.rs b/src/compiler.rs index bbcc7da9f..4994f9f3f 100644 --- a/src/compiler.rs +++ b/src/compiler.rs @@ -1,8 +1,8 @@ //! This module is a wrapper API around noname. //! It is important that user-facing features use the functions here, //! as they attach the correct filename and source code to errors. -//! It does that by transforming our [Error] type into a [miette::Error] type for all functions here. -//! (via the [IntoMiette] trait that we define here.) +//! It does that by transforming our [Error] type into a [`miette::Error`] type for all functions here. +//! (via the [`IntoMiette`] trait that we define here.) use std::collections::HashMap; @@ -24,7 +24,14 @@ pub struct Sources { pub map: HashMap, } +impl Default for Sources { + fn default() -> Self { + Self::new() + } +} + impl Sources { + #[must_use] pub fn new() -> Self { let mut map = HashMap::new(); map.insert( @@ -40,6 +47,7 @@ impl Sources { self.id } + #[must_use] pub fn get(&self, id: &usize) -> Option<&(String, String)> { self.map.get(id) } @@ -66,7 +74,7 @@ impl IntoMiette for Result { .expect("couldn't find source") .clone(); let report: miette::Report = err.into(); - return Err(report.with_source_code(NamedSource::new(filename, source))); + Err(report.with_source_code(NamedSource::new(filename, source))) } } } @@ -84,7 +92,7 @@ pub fn typecheck_next_file( .into_miette(sources) } -/// This should not be used directly. Check [get_tast] instead. +/// This should not be used directly. Check [`get_tast`] instead. pub fn typecheck_next_file_inner( typechecker: &mut TypeChecker, this_module: Option, @@ -116,7 +124,7 @@ pub fn get_nast( let code = &sources.map[&filename_id].1; // lexer - let tokens = Token::parse(filename_id, &code)?; + let tokens = Token::parse(filename_id, code)?; if std::env::var("NONAME_VERBOSE").is_ok() { println!("lexer succeeded"); } diff --git a/src/constants.rs b/src/constants.rs index 467c22495..20d2cdca1 100644 --- a/src/constants.rs +++ b/src/constants.rs @@ -24,6 +24,7 @@ impl From for miette::SourceSpan { } impl Span { + #[must_use] pub fn new(filename_id: usize, start: usize, len: usize) -> Self { Self { filename_id, @@ -32,14 +33,17 @@ impl Span { } } + #[must_use] pub fn is_empty(&self) -> bool { self.start == 0 } + #[must_use] pub fn end(&self) -> usize { self.start + self.len } + #[must_use] pub fn merge_with(self, other: Self) -> Self { assert_eq!(self.filename_id, other.filename_id); let real_len = other.end() - self.start; diff --git a/src/error.rs b/src/error.rs index 8922e00dc..cb819b2b0 100644 --- a/src/error.rs +++ b/src/error.rs @@ -27,7 +27,8 @@ pub struct Error { } impl Error { - /// Creates a new [Error] from an [ErrorKind]. + /// Creates a new [Error] from an [`ErrorKind`]. + #[must_use] pub fn new(label: &'static str, kind: ErrorKind, span: Span) -> Self { Self { label, kind, span } } diff --git a/src/imports.rs b/src/imports.rs index 9006d2d14..a028c051d 100644 --- a/src/imports.rs +++ b/src/imports.rs @@ -62,11 +62,11 @@ impl std::fmt::Debug for BuiltinModule { /// * `&[Var]`: take an unbounded list of variables, this is because built-ins can take any number of arguments, and different built-ins might take different types of arguments /// * `Span`: take a span to return user-friendly errors /// * `-> Result>`: return a `Result` with an `Option` of a `Var`. This is because built-ins can return a variable, or they can return nothing. If they return nothing, then the `Option` will be `None`. If they return a variable, then the `Option` will be `Some(Var)`. -pub type FnHandle = fn( +pub type FnHandle = fn( &mut CircuitWriter, - &[VarInfo], + &[VarInfo<::Field, ::Var>], Span, -) -> Result>>; +) -> Result::Field, ::Var>>>; /// The different types of a noname function. #[derive(Clone, Serialize, Deserialize)] diff --git a/src/inputs.rs b/src/inputs.rs index baaa95fdb..5b1df955c 100644 --- a/src/inputs.rs +++ b/src/inputs.rs @@ -76,9 +76,7 @@ impl CompiledCircuit { } (TyKind::Array(el_typ, size), Value::Array(values)) => { - if values.len() != (*size as usize) { - panic!("wrong size of array"); - } + assert!(values.len() == (*size as usize), "wrong size of array"); let mut res = vec![]; for value in values { let el = self.parse_single_input(value, el_typ)?; @@ -103,9 +101,10 @@ impl CompiledCircuit { let fields = &struct_info.fields; // make sure that they're the same length - if fields.len() != map.len() { - panic!("wrong number of fields in struct (TODO: better error)"); - } + assert!( + fields.len() == map.len(), + "wrong number of fields in struct (TODO: better error)" + ); // parse each field let mut res = vec![]; @@ -122,12 +121,10 @@ impl CompiledCircuit { Ok(res) } - (expected, observed) => { - return Err(ParsingError::MismatchJsonArgument( - expected.clone(), - observed, - )); - } + (expected, observed) => Err(ParsingError::MismatchJsonArgument( + expected.clone(), + observed, + )), } } } diff --git a/src/lexer/mod.rs b/src/lexer/mod.rs index 4fd60c0b5..a1f403338 100644 --- a/src/lexer/mod.rs +++ b/src/lexer/mod.rs @@ -20,6 +20,7 @@ pub struct LexerCtx { } impl LexerCtx { + #[must_use] pub fn new(filename_id: usize) -> Self { Self { offset: 0, @@ -27,10 +28,12 @@ impl LexerCtx { } } + #[must_use] pub fn error(&self, kind: ErrorKind, span: Span) -> Error { Error::new("lexer", kind, span) } + #[must_use] pub fn span(&self, start: usize, len: usize) -> Span { Span::new(self.filename_id, start, len) } @@ -69,6 +72,7 @@ pub enum Keyword { } impl Keyword { + #[must_use] pub fn parse(s: &str) -> Option { match s { "use" => Some(Self::Use), @@ -109,7 +113,7 @@ impl Display for Keyword { Self::Const => "const", }; - write!(f, "{}", desc) + write!(f, "{desc}") } } @@ -152,7 +156,12 @@ pub enum TokenKind { impl Display for TokenKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - use TokenKind::*; + use TokenKind::{ + Ampersand, BigInt, Colon, Comma, Comment, Dot, DoubleAmpersand, DoubleColon, DoubleDot, + DoubleEqual, DoublePipe, Equal, Exclamation, Greater, Hex, Identifier, Keyword, + LeftBracket, LeftCurlyBracket, LeftParen, Less, Minus, Pipe, Plus, Question, + RightArrow, RightBracket, RightCurlyBracket, RightParen, SemiColon, Slash, Star, + }; let desc = match self { Keyword(_) => "keyword (use, let, etc.)", Identifier(_) => { @@ -191,7 +200,7 @@ impl Display for TokenKind { // TokenType::Literal => "`\"something\"", }; - write!(f, "{}", desc) + write!(f, "{desc}") } } @@ -432,22 +441,22 @@ impl Token { mod tests { use super::*; - const CODE: &str = r#"use crypto::poseidon; + const CODE: &str = r"use crypto::poseidon; fn main(public_input: [fel; 3], private_input: [fel; 3]) -> [fel; 8] { let digest = poseidon(private_input); assert(digest == public_input); } -"#; +"; #[test] fn test_lexer() { match Token::parse(0, CODE) { Ok(root) => { - println!("{:#?}", root); + println!("{root:#?}"); } Err(e) => { - println!("{:?}", e); + println!("{e:?}"); } } } diff --git a/src/lexer/tokens.rs b/src/lexer/tokens.rs index 430c5018d..34af7ddd5 100644 --- a/src/lexer/tokens.rs +++ b/src/lexer/tokens.rs @@ -1,4 +1,4 @@ -//! Since [std::iter::Peekable] in Rust advances the iterator, +//! Since [`std::iter::Peekable`] in Rust advances the iterator, //! I can't use it for peeking tokens. //! I haven't found a better way than implementing a wrapper //! that allows me to peek... @@ -18,6 +18,7 @@ pub struct Tokens { } impl Tokens { + #[must_use] pub fn new(tokens: Vec) -> Self { Self { peeked: None, @@ -33,12 +34,12 @@ impl Tokens { } else { // otherwise get from iterator and store in peeked let token = self.inner.next(); - self.peeked = token.clone(); + self.peeked.clone_from(&token); token } } - /// Like next() except that it also stores the last seen token in the given context + /// Like `next()` except that it also stores the last seen token in the given context /// (useful for debugging) pub fn bump(&mut self, ctx: &mut ParserCtx) -> Option { if let Some(token) = self.peeked.take() { @@ -47,18 +48,18 @@ impl Tokens { } else { let token = self.inner.next(); if token.is_some() { - ctx.last_token = token.clone(); + ctx.last_token.clone_from(&token); } token } } - /// Like [Self::bump] but errors with `err` pointing to the latest token + /// Like [`Self::bump`] but errors with `err` pointing to the latest token pub fn bump_err(&mut self, ctx: &mut ParserCtx, err: ErrorKind) -> Result { self.bump(ctx) .ok_or_else(|| ctx.error(err, ctx.last_span())) } - /// Like [Self::bump] but errors if the token is not `typ` + /// Like [`Self::bump`] but errors if the token is not `typ` pub fn bump_expected(&mut self, ctx: &mut ParserCtx, typ: TokenKind) -> Result { let token = self.bump_err(ctx, ErrorKind::MissingToken)?; if token.kind == typ { @@ -74,7 +75,7 @@ impl Tokens { Some(Token { kind: TokenKind::Identifier(value), span, - }) => Ok(Ident { value: value, span }), + }) => Ok(Ident { value, span }), Some(token) => Err(ctx.error(kind, token.span)), None => Err(ctx.error(kind, ctx.last_span())), } diff --git a/src/lib.rs b/src/lib.rs index dad2d58d4..de92103f5 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -51,7 +51,7 @@ pub mod helpers { let bigint: num_bigint::BigUint = (*self).into(); let inv: num_bigint::BigUint = self.neg().into(); // gettho way of splitting the field into positive and negative elements if inv < bigint { - format!("-{}", inv) + format!("-{inv}") } else { bigint.to_string() } @@ -62,6 +62,7 @@ pub mod helpers { impl PrettyField for R1csBls12381Field {} impl PrettyField for R1csBn254Field {} + #[must_use] pub fn poseidon(input: [VestaField; 2]) -> VestaField { let mut sponge: ArithmeticSponge = ArithmeticSponge::new(fp_kimchi::static_params()); diff --git a/src/name_resolution/context.rs b/src/name_resolution/context.rs index cffeb9c1c..7a8c63ab5 100644 --- a/src/name_resolution/context.rs +++ b/src/name_resolution/context.rs @@ -32,8 +32,8 @@ impl NameResCtx { Error::new("name resolution", kind, span) } - /// Resolves a single [ModulePath]. - /// `force` is set to `true` if it is expected that the [ModulePath] is set to [ModulePath::Local]. + /// Resolves a single [`ModulePath`]. + /// `force` is set to `true` if it is expected that the [`ModulePath`] is set to [`ModulePath::Local`]. /// This is usually the case for things like struct or function definitions. pub(crate) fn resolve(&self, module: &mut ModulePath, local: bool) -> Result<()> { match module { diff --git a/src/name_resolution/mod.rs b/src/name_resolution/mod.rs index 116ddb771..97b2b2bbd 100644 --- a/src/name_resolution/mod.rs +++ b/src/name_resolution/mod.rs @@ -86,7 +86,7 @@ mod tests { use super::*; - const CODE: &str = r#" + const CODE: &str = r" use user::repo; const some_cst = 0; @@ -99,7 +99,7 @@ mod tests { let aa = Thing { bb: yy }; return repo::thing(xx, aa); } -"#; +"; #[test] fn test_name_res() { diff --git a/src/negative_tests.rs b/src/negative_tests.rs index 2acc77cb0..43fa68446 100644 --- a/src/negative_tests.rs +++ b/src/negative_tests.rs @@ -8,11 +8,11 @@ use crate::{ #[test] fn test_return() { // no return expected - let code = r#" + let code = r" fn thing(xx: Field) { return xx; } - "#; + "; let mut tast = TypeChecker::::new(); let res = typecheck_next_file_inner( @@ -30,11 +30,11 @@ fn test_return() { #[test] fn test_return_expected() { // return expected - let code = r#" + let code = r" fn thing(xx: Field) -> Field { let yy = xx + 1; } - "#; + "; let mut tast = TypeChecker::::new(); let res = typecheck_next_file_inner( @@ -52,11 +52,11 @@ fn test_return_expected() { #[test] fn test_return_mismatch() { // return type mismatch - let code = r#" + let code = r" fn thing(xx: Field) -> Field { return true; } - "#; + "; let mut tast = TypeChecker::::new(); let res = typecheck_next_file_inner( diff --git a/src/parser/expr.rs b/src/parser/expr.rs index 816a878f4..197821ad6 100644 --- a/src/parser/expr.rs +++ b/src/parser/expr.rs @@ -206,7 +206,7 @@ impl Expr { } // true or false - TokenKind::Keyword(Keyword::True) | TokenKind::Keyword(Keyword::False) => { + TokenKind::Keyword(Keyword::True | Keyword::False) => { let is_true = matches!(token.kind, TokenKind::Keyword(Keyword::True)); Expr::new(ctx, ExprKind::Bool(is_true), span) @@ -343,7 +343,7 @@ impl Expr { lhs.parse_rhs(ctx, tokens) } - /// an expression is sometimes unfinished when we parse it with [Self::parse], + /// an expression is sometimes unfinished when we parse it with [`Self::parse`], /// we use this function to see if the expression we just parsed (`self`) is actually part of a bigger expression fn parse_rhs(self, ctx: &mut ParserCtx, tokens: &mut Tokens) -> Result { // we peek into what's next to see if there's an expression that uses diff --git a/src/parser/mod.rs b/src/parser/mod.rs index da765cbe3..28a0700da 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -62,6 +62,7 @@ pub struct ParserCtx { } impl ParserCtx { + #[must_use] pub fn new(filename_id: usize, node_id: usize) -> Self { Self { node_id, @@ -70,6 +71,7 @@ impl ParserCtx { } } + #[must_use] pub fn error(&self, kind: ErrorKind, span: Span) -> Error { Error::new("parser", kind, span) } @@ -81,12 +83,12 @@ impl ParserCtx { } // TODO: I think I don't need this, I should always be able to use the last token I read if I don't see anything, otherwise maybe just write -1 to say "EOF" + #[must_use] pub fn last_span(&self) -> Span { let span = self .last_token .as_ref() - .map(|token| token.span) - .unwrap_or(Span::new(self.filename_id, 0, 0)); + .map_or(Span::new(self.filename_id, 0, 0), |token| token.span); Span::new(self.filename_id, span.end(), 0) } } @@ -197,28 +199,28 @@ mod tests { #[test] fn fn_signature() { - let code = r#"main(pub public_input: [Fel; 3], private_input: [Fel; 3]) -> [Fel; 3] { return public_input; }"#; + let code = r"main(pub public_input: [Fel; 3], private_input: [Fel; 3]) -> [Fel; 3] { return public_input; }"; let tokens = &mut Token::parse(0, code).unwrap(); let ctx = &mut ParserCtx::default(); let parsed = FunctionDef::parse(ctx, tokens).unwrap(); - println!("{:?}", parsed); + println!("{parsed:?}"); } #[test] fn statement_assign() { - let code = r#"let digest = poseidon(private_input);"#; + let code = r"let digest = poseidon(private_input);"; let tokens = &mut Token::parse(0, code).unwrap(); let ctx = &mut ParserCtx::default(); let parsed = Stmt::parse(ctx, tokens).unwrap(); - println!("{:?}", parsed); + println!("{parsed:?}"); } #[test] fn statement_assert() { - let code = r#"assert(digest == public_input);"#; + let code = r"assert(digest == public_input);"; let tokens = &mut Token::parse(0, code).unwrap(); let ctx = &mut ParserCtx::default(); let parsed = Stmt::parse(ctx, tokens).unwrap(); - println!("{:?}", parsed); + println!("{parsed:?}"); } } diff --git a/src/parser/structs.rs b/src/parser/structs.rs index 38c5f62bf..f145b12b7 100644 --- a/src/parser/structs.rs +++ b/src/parser/structs.rs @@ -115,9 +115,10 @@ impl CustomType { pub fn parse(ctx: &mut ParserCtx, tokens: &mut Tokens) -> Result { let ty_name = tokens.bump_ident(ctx, ErrorKind::InvalidType)?; - if !is_type(&ty_name.value) { - panic!("type name should start with uppercase letter (TODO: better error"); - } + assert!( + is_type(&ty_name.value), + "type name should start with uppercase letter (TODO: better error" + ); // make sure that this type is allowed if !matches!( diff --git a/src/parser/types.rs b/src/parser/types.rs index c0168fd2c..a590fca29 100644 --- a/src/parser/types.rs +++ b/src/parser/types.rs @@ -1,4 +1,8 @@ -use std::{fmt::Display, str::FromStr}; +use std::{ + fmt::Display, + hash::{Hash, Hasher}, + str::FromStr, +}; use ark_ff::Field; use serde::{Deserialize, Serialize}; @@ -153,7 +157,7 @@ pub struct Ty { /// The module preceding structs, functions, or variables. // TODO: Hash should probably be implemented manually, right now two alias might have different span and so this will give different hashes -#[derive(Default, Debug, Clone, Serialize, Deserialize, Hash, Eq)] +#[derive(Default, Debug, Clone, Serialize, Deserialize, Eq)] pub enum ModulePath { #[default] /// This is a local type, not imported from another module. @@ -167,6 +171,16 @@ pub enum ModulePath { Absolute(UserRepo), } +impl Hash for ModulePath { + fn hash(&self, state: &mut H) { + match &self { + Self::Local => self.hash(state), + Self::Alias(i) => i.hash(state), + Self::Absolute(u) => u.hash(state), + } + } +} + // TODO: do we want to implement this on Ident instead? impl PartialEq for ModulePath { fn eq(&self, other: &Self) -> bool { @@ -206,6 +220,7 @@ pub enum TyKind { } impl TyKind { + #[must_use] pub fn match_expected(&self, expected: &TyKind) -> bool { match (self, expected) { (TyKind::BigInt, TyKind::Field) => true, @@ -224,6 +239,7 @@ impl TyKind { } } + #[must_use] pub fn same_as(&self, other: &TyKind) -> bool { match (self, other) { (TyKind::BigInt, TyKind::Field) | (TyKind::Field, TyKind::BigInt) => true, @@ -260,17 +276,18 @@ impl Display for TyKind { name = name, module = module.value ), - ModulePath::Local => write!(f, "a `{}` struct", name), + ModulePath::Local => write!(f, "a `{name}` struct"), }, TyKind::Field => write!(f, "Field"), TyKind::BigInt => write!(f, "BigInt"), - TyKind::Array(ty, size) => write!(f, "[{}; {}]", ty, size), + TyKind::Array(ty, size) => write!(f, "[{ty}; {size}]"), TyKind::Bool => write!(f, "Bool"), } } } impl Ty { + #[must_use] pub fn reserved_types(module: ModulePath, name: Ident) -> TyKind { match name.value.as_ref() { "Field" | "Bool" if !matches!(module, ModulePath::Local) => { @@ -349,7 +366,7 @@ impl Ty { .map_err(|_e| ctx.error(ErrorKind::InvalidArraySize, siz.span))?, _ => { return Err(ctx.error( - ErrorKind::ExpectedToken(TokenKind::BigInt("".to_string())), + ErrorKind::ExpectedToken(TokenKind::BigInt(String::new())), siz.span, )); } @@ -408,6 +425,7 @@ pub struct Ident { } impl Ident { + #[must_use] pub fn new(value: String, span: Span) -> Self { Self { value, span } } @@ -421,7 +439,7 @@ impl Ident { }), _ => Err(ctx.error( - ErrorKind::ExpectedToken(TokenKind::Identifier("".to_string())), + ErrorKind::ExpectedToken(TokenKind::Identifier(String::new())), token.span, )), } @@ -435,10 +453,12 @@ pub enum AttributeKind { } impl AttributeKind { + #[must_use] pub fn is_public(&self) -> bool { matches!(self, Self::Pub) } + #[must_use] pub fn is_constant(&self) -> bool { matches!(self, Self::Const) } @@ -451,10 +471,12 @@ pub struct Attribute { } impl Attribute { + #[must_use] pub fn is_public(&self) -> bool { self.kind.is_public() } + #[must_use] pub fn is_constant(&self) -> bool { self.kind.is_constant() } @@ -517,18 +539,14 @@ pub struct FnArg { } impl FnArg { + #[must_use] pub fn is_public(&self) -> bool { - self.attribute - .as_ref() - .map(|attr| attr.is_public()) - .unwrap_or(false) + self.attribute.as_ref().is_some_and(Attribute::is_public) } + #[must_use] pub fn is_constant(&self) -> bool { - self.attribute - .as_ref() - .map(|attr| attr.is_constant()) - .unwrap_or(false) + self.attribute.as_ref().is_some_and(Attribute::is_constant) } } @@ -577,6 +595,7 @@ impl FuncOrMethod { } impl FunctionDef { + #[must_use] pub fn is_main(&self) -> bool { self.sig.name.value == "main" } @@ -690,12 +709,10 @@ impl FunctionDef { } else { attr.span.merge_with(arg_typ.span) } + } else if &arg_name.value == "self" { + arg_name.span } else { - if &arg_name.value == "self" { - arg_name.span - } else { - arg_name.span.merge_with(arg_typ.span) - } + arg_name.span.merge_with(arg_typ.span) }; let arg = FnArg { @@ -814,6 +831,7 @@ impl FunctionDef { } // TODO: enforce snake_case? +#[must_use] pub fn is_valid_fn_name(name: &str) -> bool { if let Some(first_char) = name.chars().next() { // first character is not a number @@ -828,6 +846,7 @@ pub fn is_valid_fn_name(name: &str) -> bool { } // TODO: enforce CamelCase? +#[must_use] pub fn is_valid_fn_type(name: &str) -> bool { if let Some(first_char) = name.chars().next() { // first character is not a number or alpha @@ -867,6 +886,7 @@ pub struct Range { } impl Range { + #[must_use] pub fn range(&self) -> std::ops::Range { self.start..self.end } @@ -979,7 +999,7 @@ impl Stmt { } _ => { return Err(ctx.error( - ErrorKind::ExpectedToken(TokenKind::BigInt("".to_string())), + ErrorKind::ExpectedToken(TokenKind::BigInt(String::new())), ctx.last_span(), )) } @@ -1003,7 +1023,7 @@ impl Stmt { } _ => { return Err(ctx.error( - ErrorKind::ExpectedToken(TokenKind::BigInt("".to_string())), + ErrorKind::ExpectedToken(TokenKind::BigInt(String::new())), ctx.last_span(), )) } diff --git a/src/serialization.rs b/src/serialization.rs index ffa638a27..e374d9130 100644 --- a/src/serialization.rs +++ b/src/serialization.rs @@ -1,5 +1,5 @@ //! This adds a few utility functions for serializing and deserializing -//! [arkworks](http://arkworks.rs/) types that implement [CanonicalSerialize] and [CanonicalDeserialize]. +//! [arkworks](http://arkworks.rs/) types that implement [`CanonicalSerialize`] and [`CanonicalDeserialize`]. use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use serde_with::Bytes; @@ -13,10 +13,10 @@ pub mod ser { //! Simply use the following attribute on your field: //! `#[serde(with = "o1_utils::serialization::ser") attribute"]` - use super::*; + use super::{Bytes, CanonicalDeserialize, CanonicalSerialize}; use serde_with::{DeserializeAs, SerializeAs}; - /// You can use this to serialize an arkworks type with serde and the "serialize_with" attribute. + /// You can use this to serialize an arkworks type with serde and the "`serialize_with`" attribute. /// See pub fn serialize(val: impl CanonicalSerialize, serializer: S) -> Result where @@ -29,7 +29,7 @@ pub mod ser { Bytes::serialize_as(&bytes, serializer) } - /// You can use this to deserialize an arkworks type with serde and the "deserialize_with" attribute. + /// You can use this to deserialize an arkworks type with serde and the "`deserialize_with`" attribute. /// See pub fn deserialize<'de, T, D>(deserializer: D) -> Result where @@ -45,7 +45,7 @@ pub mod ser { // Serialization with [serde_with] // -/// You can use [SerdeAs] with [serde_with] in order to serialize and deserialize types that implement [CanonicalSerialize] and [CanonicalDeserialize], +/// You can use [`SerdeAs`] with [`serde_with`] in order to serialize and deserialize types that implement [`CanonicalSerialize`] and [`CanonicalDeserialize`], /// or containers of types that implement these traits (Vec, arrays, etc.) /// Simply add annotations like `#[serde_as(as = "o1_utils::serialization::SerdeAs")]` /// See diff --git a/src/stdlib/crypto.rs b/src/stdlib/crypto.rs index d1e5c230b..8e077e19e 100644 --- a/src/stdlib/crypto.rs +++ b/src/stdlib/crypto.rs @@ -10,6 +10,7 @@ const POSEIDON_FN: &str = "poseidon(input: [Field; 2]) -> [Field; 3]"; pub const CRYPTO_SIGS: &[&str] = &[POSEIDON_FN]; +#[must_use] pub fn get_crypto_fn(name: &str) -> Option> { let ctx = &mut ParserCtx::default(); let mut tokens = Token::parse(0, name).unwrap(); @@ -27,6 +28,7 @@ pub fn get_crypto_fn(name: &str) -> Option> { } /// a function returns crypto functions +#[must_use] pub fn crypto_fns() -> Vec> { CRYPTO_SIGS .iter() diff --git a/src/stdlib/mod.rs b/src/stdlib/mod.rs index c115fdc07..a17a7ddc5 100644 --- a/src/stdlib/mod.rs +++ b/src/stdlib/mod.rs @@ -46,6 +46,7 @@ pub static BUILTIN_FN_NAMES: Lazy> = Lazy::new(|| { .collect() }); +#[must_use] pub fn get_builtin_fn(name: &str) -> Option> { let ctx = &mut ParserCtx::default(); let mut tokens = Token::parse(0, name).unwrap(); @@ -64,6 +65,7 @@ pub fn get_builtin_fn(name: &str) -> Option> { } /// a function returns builtin functions +#[must_use] pub fn builtin_fns() -> Vec> { BUILTIN_SIGS .iter() @@ -121,10 +123,10 @@ fn assert_eq( // a const and a var (ConstOrCell::Const(cst), ConstOrCell::Cell(cvar)) | (ConstOrCell::Cell(cvar), ConstOrCell::Const(cst)) => { - compiler.backend.assert_eq_const(cvar, *cst, span) + compiler.backend.assert_eq_const(cvar, *cst, span); } (ConstOrCell::Cell(lhs), ConstOrCell::Cell(rhs)) => { - compiler.backend.assert_eq_var(lhs, rhs, span) + compiler.backend.assert_eq_var(lhs, rhs, span); } } diff --git a/src/syntax.rs b/src/syntax.rs index ed23177d3..41bce02fc 100644 --- a/src/syntax.rs +++ b/src/syntax.rs @@ -1,16 +1,18 @@ //! A number of helper function to check the syntax of some types. /// Returns true if the given string is a number in decimal. +#[must_use] pub fn is_numeric(s: &str) -> bool { s.chars().all(|c| c.is_ascii_digit()) } /// Returns true if the given string is an hexadecimal string (0x...) +#[must_use] pub fn is_hexadecimal(s: &str) -> bool { let mut s = s.chars(); let s0 = s.next(); let s1 = s.next(); - if matches!((s0, s1), (Some('0'), Some('x') | Some('X'))) { + if matches!((s0, s1), (Some('0'), Some('x' | 'X'))) { s.all(|c| c.is_ascii_hexdigit()) } else { false @@ -18,6 +20,7 @@ pub fn is_hexadecimal(s: &str) -> bool { } /// Returns true if the given string is an identifier or type +#[must_use] pub fn is_identifier_or_type(s: &str) -> bool { let mut chars = s.chars(); let first_letter = chars.next().unwrap(); @@ -30,6 +33,7 @@ pub fn is_identifier_or_type(s: &str) -> bool { /// Returns true if the given string is an identifier /// (starts with a lowercase letter) +#[must_use] pub fn is_identifier(s: &str) -> bool { let mut chars = s.chars(); let first_letter = chars.next().unwrap(); @@ -42,12 +46,13 @@ pub fn is_identifier(s: &str) -> bool { /// Returns true if the given string is a type /// (first letter is an uppercase) +#[must_use] pub fn is_type(s: &str) -> bool { let mut chars = s.chars(); let first_char = chars.next().unwrap(); // first char is an uppercase letter // rest are lowercase alphanumeric - first_char.is_alphabetic() && first_char.is_uppercase() && chars.all(|c| (c.is_alphanumeric())) + first_char.is_alphabetic() && first_char.is_uppercase() && chars.all(char::is_alphanumeric) // TODO: check camel case? } diff --git a/src/tests/examples.rs b/src/tests/examples.rs index bf86255fb..f96b476b6 100644 --- a/src/tests/examples.rs +++ b/src/tests/examples.rs @@ -211,7 +211,7 @@ fn test_lc_return(#[case] backend: BackendKind) -> miette::Result<()> { fn test_poseidon(#[case] backend: BackendKind) -> miette::Result<()> { let private_inputs = r#"{"private_input": ["1", "1"]}"#; let private_input = [1.into(), 1.into()]; - let digest = crate::helpers::poseidon(private_input.clone()); + let digest = crate::helpers::poseidon(private_input); let digest_dec = digest.to_dec_string(); assert_eq!( "3654913405619483358804575553468071097765421484960111776885779739261304758583", @@ -242,7 +242,7 @@ fn test_bool(#[case] backend: BackendKind) -> miette::Result<()> { #[case::r1cs(BackendKind::R1csBls12_381(R1CS::new()))] fn test_mutable(#[case] backend: BackendKind) -> miette::Result<()> { let private_inputs = r#"{"xx": "2", "yy": "3"}"#; - let public_inputs = r#"{}"#; + let public_inputs = r"{}"; test_file("mutable", public_inputs, private_inputs, vec![], backend)?; @@ -265,7 +265,7 @@ fn test_for_loop(#[case] backend: BackendKind) -> miette::Result<()> { #[case::kimchi_vesta(BackendKind::KimchiVesta(KimchiVesta::new(false)))] #[case::r1cs(BackendKind::R1csBls12_381(R1CS::new()))] fn test_array(#[case] backend: BackendKind) -> miette::Result<()> { - let private_inputs = r#"{}"#; + let private_inputs = r"{}"; let public_inputs = r#"{"public_input": ["1", "2"]}"#; test_file("array", public_inputs, private_inputs, vec![], backend)?; @@ -277,7 +277,7 @@ fn test_array(#[case] backend: BackendKind) -> miette::Result<()> { #[case::kimchi_vesta(BackendKind::KimchiVesta(KimchiVesta::new(false)))] #[case::r1cs(BackendKind::R1csBls12_381(R1CS::new()))] fn test_equals(#[case] backend: BackendKind) -> miette::Result<()> { - let private_inputs = r#"{}"#; + let private_inputs = r"{}"; let public_inputs = r#"{"xx": ["3", "3"]}"#; test_file("equals", public_inputs, private_inputs, vec![], backend)?; @@ -289,7 +289,7 @@ fn test_equals(#[case] backend: BackendKind) -> miette::Result<()> { #[case::kimchi_vesta(BackendKind::KimchiVesta(KimchiVesta::new(false)))] #[case::r1cs(BackendKind::R1csBls12_381(R1CS::new()))] fn test_types(#[case] backend: BackendKind) -> miette::Result<()> { - let private_inputs = r#"{}"#; + let private_inputs = r"{}"; let public_inputs = r#"{"xx": "1", "yy": "2"}"#; test_file("types", public_inputs, private_inputs, vec![], backend)?; @@ -301,7 +301,7 @@ fn test_types(#[case] backend: BackendKind) -> miette::Result<()> { #[case::kimchi_vesta(BackendKind::KimchiVesta(KimchiVesta::new(false)))] #[case::r1cs(BackendKind::R1csBls12_381(R1CS::new()))] fn test_const(#[case] backend: BackendKind) -> miette::Result<()> { - let private_inputs = r#"{}"#; + let private_inputs = r"{}"; let public_inputs = r#"{"player": "1"}"#; let expected_public_output = vec!["2"]; @@ -320,7 +320,7 @@ fn test_const(#[case] backend: BackendKind) -> miette::Result<()> { #[case::kimchi_vesta(BackendKind::KimchiVesta(KimchiVesta::new(false)))] #[case::r1cs(BackendKind::R1csBls12_381(R1CS::new()))] fn test_functions(#[case] backend: BackendKind) -> miette::Result<()> { - let private_inputs = r#"{}"#; + let private_inputs = r"{}"; let public_inputs = r#"{"one": "1"}"#; test_file("functions", public_inputs, private_inputs, vec![], backend)?; @@ -332,7 +332,7 @@ fn test_functions(#[case] backend: BackendKind) -> miette::Result<()> { #[case::kimchi_vesta(BackendKind::KimchiVesta(KimchiVesta::new(false)))] #[case::r1cs(BackendKind::R1csBls12_381(R1CS::new()))] fn test_methods(#[case] backend: BackendKind) -> miette::Result<()> { - let private_inputs = r#"{}"#; + let private_inputs = r"{}"; let public_inputs = r#"{"xx": "1"}"#; test_file("methods", public_inputs, private_inputs, vec![], backend)?; @@ -344,7 +344,7 @@ fn test_methods(#[case] backend: BackendKind) -> miette::Result<()> { #[case::kimchi_vesta(BackendKind::KimchiVesta(KimchiVesta::new(false)))] #[case::r1cs(BackendKind::R1csBls12_381(R1CS::new()))] fn test_types_array(#[case] backend: BackendKind) -> miette::Result<()> { - let private_inputs = r#"{}"#; + let private_inputs = r"{}"; let public_inputs = r#"{"xx": "1", "yy": "4"}"#; test_file( @@ -362,7 +362,7 @@ fn test_types_array(#[case] backend: BackendKind) -> miette::Result<()> { #[case::kimchi_vesta(BackendKind::KimchiVesta(KimchiVesta::new(false)))] #[case::r1cs(BackendKind::R1csBls12_381(R1CS::new()))] fn test_iterate(#[case] backend: BackendKind) -> miette::Result<()> { - let private_inputs = r#"{}"#; + let private_inputs = r"{}"; let public_inputs = r#"{"bedroom_holes": "2"}"#; let expected_public_output = vec!["4"]; @@ -381,7 +381,7 @@ fn test_iterate(#[case] backend: BackendKind) -> miette::Result<()> { #[case::kimchi_vesta(BackendKind::KimchiVesta(KimchiVesta::new(false)))] #[case::r1cs(BackendKind::R1csBls12_381(R1CS::new()))] fn test_assignment(#[case] backend: BackendKind) -> miette::Result<()> { - let private_inputs = r#"{}"#; + let private_inputs = r"{}"; let public_inputs = r#"{"xx": "2"}"#; test_file("assignment", public_inputs, private_inputs, vec![], backend)?; @@ -393,7 +393,7 @@ fn test_assignment(#[case] backend: BackendKind) -> miette::Result<()> { #[case::kimchi_vesta(BackendKind::KimchiVesta(KimchiVesta::new(false)))] #[case::r1cs(BackendKind::R1csBls12_381(R1CS::new()))] fn test_if_else(#[case] backend: BackendKind) -> miette::Result<()> { - let private_inputs = r#"{}"#; + let private_inputs = r"{}"; let public_inputs = r#"{"xx": "1"}"#; test_file("if_else", public_inputs, private_inputs, vec![], backend)?; diff --git a/src/tests/modules.rs b/src/tests/modules.rs index 471a14e9b..259ed9355 100644 --- a/src/tests/modules.rs +++ b/src/tests/modules.rs @@ -26,7 +26,7 @@ fn Lol.new() -> Lol { } "; -const LIB: &str = r#" +const LIB: &str = r" use mimoo::liblib; // test a library's type that links to its own type @@ -60,9 +60,9 @@ fn new_liblib() -> liblib::Lol { fn test_liblib(ff: Field, lol: liblib::Lol) { lol.match(ff); } -"#; +"; -const MAIN: &str = r#" +const MAIN: &str = r" use mimoo::lib; fn main(pub xx: Field, yy: Field) { @@ -79,7 +79,7 @@ fn main(pub xx: Field, yy: Field) { let lol = lib::new_liblib(); lib::test_liblib(1, lol); } -"#; +"; #[test] fn test_simple_module() -> miette::Result<()> { diff --git a/src/type_checker/checker.rs b/src/type_checker/checker.rs index a3d989ec9..4ce728a9a 100644 --- a/src/type_checker/checker.rs +++ b/src/type_checker/checker.rs @@ -27,6 +27,7 @@ where } impl FnInfo { + #[must_use] pub fn sig(&self) -> &FnSig { match &self.kind { FnKind::BuiltIn(sig, _) => sig, @@ -117,7 +118,7 @@ impl TypeChecker { args, } => { // retrieve the function signature - let qualified = FullyQualified::new(&module, &fn_name.value); + let qualified = FullyQualified::new(module, &fn_name.value); let fn_info = self.fn_info(&qualified).ok_or_else(|| { self.error( ErrorKind::UndefinedFunction(fn_name.value.clone()), @@ -186,7 +187,7 @@ impl TypeChecker { // note: the only way to check that atm is to check in the constants hashmap // this is because we don't differentiate const vars from normal variables // (perhaps we should) - let qualified = FullyQualified::new(&module, &name.value); + let qualified = FullyQualified::new(module, &name.value); if let Some(_cst_info) = self.const_info(&qualified) { return Err(self.error( ErrorKind::UnexpectedError("cannot assign to an external variable"), @@ -198,7 +199,7 @@ impl TypeChecker { } // `array[idx] = ` - ExprKind::ArrayAccess { array, idx } => { + ExprKind::ArrayAccess { array, .. } => { // get variable behind array let array_node = self .compute_type(array, typed_fn_env)? @@ -210,7 +211,7 @@ impl TypeChecker { } // `struct.field = ` - ExprKind::FieldAccess { lhs, rhs } => { + ExprKind::FieldAccess { lhs, .. } => { // get variable behind lhs let lhs_node = self .compute_type(lhs, typed_fn_env)? @@ -237,9 +238,10 @@ impl TypeChecker { // and is of the same type as the rhs let rhs_typ = self.compute_type(rhs, typed_fn_env)?.unwrap(); - if !rhs_typ.typ.match_expected(&lhs_node.typ) { - panic!("lhs type doesn't match rhs type (TODO: replace with error)"); - } + assert!( + rhs_typ.typ.match_expected(&lhs_node.typ), + "lhs type doesn't match rhs type (TODO: replace with error)" + ); None } @@ -442,9 +444,10 @@ impl TypeChecker { .expect("can't compute type of first branch of `if/else`"); // make sure that the type of then_ and else_ match - if then_node.typ != else_node.typ { - panic!("`if` branch and `else` branch must have matching types"); - } + assert!( + !(then_node.typ != else_node.typ), + "`if` branch and `else` branch must have matching types" + ); // Some(ExprTyInfo::new_anon(then_node.typ)) @@ -521,9 +524,10 @@ impl TypeChecker { let mut return_typ = None; for stmt in stmts { - if return_typ.is_some() { - panic!("early return detected: we don't allow that for now (TODO: return error"); - } + assert!( + return_typ.is_none(), + "early return detected: we don't allow that for now (TODO: return error" + ); return_typ = self.check_stmt(typed_fn_env, stmt)?; } @@ -583,9 +587,10 @@ impl TypeChecker { .store_type(var.value.clone(), TypeInfo::new(TyKind::BigInt, var.span))?; // ensure start..end makes sense - if range.end < range.start { - panic!("end can't be smaller than start (TODO: better error)"); - } + assert!( + range.end >= range.start, + "end can't be smaller than start (TODO: better error)" + ); // check block self.check_block(typed_fn_env, body, None)?; diff --git a/src/type_checker/fn_env.rs b/src/type_checker/fn_env.rs index 8e6e6930a..26fe5d50b 100644 --- a/src/type_checker/fn_env.rs +++ b/src/type_checker/fn_env.rs @@ -8,7 +8,7 @@ use crate::{ parser::types::TyKind, }; -/// Some type information on local variables that we want to track in the [TypedFnEnv] environment. +/// Some type information on local variables that we want to track in the [`TypedFnEnv`] environment. #[derive(Debug, Clone)] pub struct TypeInfo { /// If the variable can be mutated or not. @@ -29,6 +29,7 @@ pub struct TypeInfo { } impl TypeInfo { + #[must_use] pub fn new(typ: TyKind, span: Span) -> Self { Self { mutable: false, @@ -39,6 +40,7 @@ impl TypeInfo { } } + #[must_use] pub fn new_mut(typ: TyKind, span: Span) -> Self { Self { mutable: true, @@ -46,6 +48,7 @@ impl TypeInfo { } } + #[must_use] pub fn new_cst(typ: TyKind, span: Span) -> Self { Self { constant: true, @@ -68,7 +71,8 @@ pub struct TypedFnEnv { } impl TypedFnEnv { - /// Creates a new TypeEnv + /// Creates a new `TypeEnv` + #[must_use] pub fn new() -> Self { Self::default() } @@ -83,7 +87,7 @@ impl TypedFnEnv { self.current_scope.checked_sub(1).expect("scope bug"); // disable variables as we exit the scope - for (_name, (scope, type_info)) in self.vars.iter_mut() { + for (scope, type_info) in self.vars.values_mut() { if *scope > self.current_scope { type_info.disabled = true; } @@ -91,6 +95,7 @@ impl TypedFnEnv { } /// Returns true if a scope is a prefix of our scope. + #[must_use] pub fn is_in_scope(&self, prefix_scope: usize) -> bool { self.current_scope >= prefix_scope } @@ -111,10 +116,12 @@ impl TypedFnEnv { } } + #[must_use] pub fn get_type(&self, ident: &str) -> Option<&TyKind> { self.get_type_info(ident).map(|type_info| &type_info.typ) } + #[must_use] pub fn mutable(&self, ident: &str) -> Option { self.get_type_info(ident).map(|type_info| type_info.mutable) } @@ -122,6 +129,7 @@ impl TypedFnEnv { /// Retrieves type information on a variable, given a name. /// If the variable is not in scope, return false. // TODO: return an error no? + #[must_use] pub fn get_type_info(&self, ident: &str) -> Option<&TypeInfo> { if let Some((scope, type_info)) = self.vars.get(ident) { if self.is_in_scope(*scope) && !type_info.disabled { diff --git a/src/type_checker/mod.rs b/src/type_checker/mod.rs index ff0cb9d95..ad82c9c35 100644 --- a/src/type_checker/mod.rs +++ b/src/type_checker/mod.rs @@ -45,11 +45,15 @@ pub struct FullyQualified { } impl FullyQualified { + #[must_use] pub fn local(name: String) -> Self { Self { module: None, name } } - pub fn new(module: &ModulePath, name: &String) -> Self { + #[must_use] + // TODO: Pass in `String`, instead of `&str`, so we don't hide an + // allocation within. + pub fn new(module: &ModulePath, name: &str) -> Self { let module = match module { ModulePath::Local => None, ModulePath::Alias(_) => unreachable!(), @@ -57,7 +61,7 @@ impl FullyQualified { }; Self { module, - name: name.clone(), + name: name.to_string(), } } } @@ -69,7 +73,7 @@ where B: Backend, { /// the functions present in the scope - /// contains at least the set of builtin functions (like assert_eq) + /// contains at least the set of builtin functions (like `assert_eq`) functions: HashMap>, /// Custom structs type information and ASTs for methods. @@ -78,7 +82,7 @@ where /// Constants declared in this module. constants: HashMap>, - /// Mapping from node id to TyKind. + /// Mapping from node id to `TyKind`. /// This can be used by the circuit-writer when it needs type information. // TODO: I think we should get rid of this if we can node_types: HashMap, @@ -103,7 +107,7 @@ impl TypeChecker { } pub(crate) fn const_info(&self, qualified: &FullyQualified) -> Option<&ConstInfo> { - self.constants.get(&qualified) + self.constants.get(qualified) } /// Returns the number of field elements contained in the given type. @@ -112,7 +116,7 @@ impl TypeChecker { match typ { TyKind::Field => 1, TyKind::Custom { module, name } => { - let qualified = FullyQualified::new(&module, &name); + let qualified = FullyQualified::new(module, name); let struct_info = self .struct_info(&qualified) .expect("bug in the type checker: cannot find struct info"); @@ -132,8 +136,15 @@ impl TypeChecker { } } +impl Default for TypeChecker { + fn default() -> Self { + Self::new() + } +} + impl TypeChecker { // TODO: we can probably lazy const this + #[must_use] pub fn new() -> Self { let mut type_checker = Self { functions: HashMap::new(), @@ -172,6 +183,7 @@ impl TypeChecker { type_checker } + #[must_use] pub fn error(&self, kind: ErrorKind, span: Span) -> Error { Error::new("type-checker", kind, span) } @@ -301,7 +313,7 @@ impl TypeChecker { } // save the function in the typed global env - let fn_kind = FnKind::Native(function.clone()); + let fn_kind = FnKind::::Native(function.clone()); let fn_info = FnInfo { kind: fn_kind, span: function.span, diff --git a/src/utils.rs b/src/utils.rs index 4622f420f..275433d12 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,5 +1,6 @@ use std::fmt::Write; +#[must_use] pub fn noname_version() -> String { format!("@ noname.{}\n\n", env!("CARGO_PKG_VERSION")) } @@ -19,7 +20,7 @@ pub fn display_source( res.push('\n'); // display filename - writeln!(res, "│ FILE: {}", file).unwrap(); + writeln!(res, "│ FILE: {file}").unwrap(); writeln!(res, "│{s}", s = "─".repeat(80)).unwrap(); // source @@ -61,7 +62,7 @@ fn find_exact_line(source: &str, span: crate::constants::Span) -> (usize, usize, pub fn title(res: &mut String, s: &str) { writeln!(res, "╭{s}╮", s = "─".repeat(s.len())).unwrap(); - writeln!(res, "│{s}│", s = s).unwrap(); + writeln!(res, "│{s}│").unwrap(); writeln!(res, "╰{s}╯", s = "─".repeat(s.len())).unwrap(); writeln!(res).unwrap(); } diff --git a/src/var.rs b/src/var.rs index 747821d6c..08663ccf6 100644 --- a/src/var.rs +++ b/src/var.rs @@ -13,7 +13,8 @@ use crate::{ }; /// The signature of a hint function -pub type HintFn = dyn Fn(&B, &mut WitnessEnv) -> Result; +pub type HintFn = + dyn Fn(&B, &mut WitnessEnv<::Field>) -> Result<::Field>; /// A variable's actual value in the witness can be computed in different ways. #[derive(Clone, Serialize, Deserialize)] @@ -120,6 +121,7 @@ where } impl Var { + #[must_use] pub fn new(cvars: Vec>, span: Span) -> Self { Self { cvars, span } } @@ -147,19 +149,22 @@ impl Var { pub fn new_constant_typ(cst_info: &ConstInfo, span: Span) -> Self { let ConstInfo { value, typ: _ } = cst_info; - let cvars = value.into_iter().cloned().map(ConstOrCell::Const).collect(); + let cvars = value.iter().copied().map(ConstOrCell::Const).collect(); Self { cvars, span } } + #[must_use] pub fn len(&self) -> usize { self.cvars.len() } + #[must_use] pub fn is_empty(&self) -> bool { self.cvars.is_empty() } + #[must_use] pub fn get(&self, idx: usize) -> Option<&ConstOrCell> { if idx < self.cvars.len() { Some(&self.cvars[idx]) @@ -168,6 +173,7 @@ impl Var { } } + #[must_use] pub fn constant(&self) -> Option { if self.cvars.len() == 1 { self.cvars[0].cst() @@ -176,6 +182,7 @@ impl Var { } } + #[must_use] pub fn range(&self, start: usize, len: usize) -> &[ConstOrCell] { &self.cvars[start..(start + len)] } diff --git a/src/witness.rs b/src/witness.rs index ed47813ed..16e9d9ecb 100644 --- a/src/witness.rs +++ b/src/witness.rs @@ -5,7 +5,7 @@ use itertools::chain; //use serde::{Deserialize, Serialize}; use crate::{ - backends::{Backend, BackendVar}, + backends::Backend, circuit_writer::CircuitWriter, compiler::Sources, error::{Error, ErrorKind, Result}, @@ -28,6 +28,7 @@ impl WitnessEnv { assert!(self.var_values.insert(name, val).is_none()); } + #[must_use] pub fn get_external(&self, name: &str) -> Vec { // TODO: return an error instead of crashing self.var_values.get(name).unwrap().clone()