Skip to content
This repository has been archived by the owner on Oct 20, 2024. It is now read-only.

Add support for a padded directive #305

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions huff_lexer/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,7 @@ impl<'a> Lexer<'a> {
TokenKind::Indexed,
TokenKind::View,
TokenKind::Pure,
TokenKind::Padded,
// First check for packed jump table
TokenKind::JumpTablePacked,
// Match with jump table if not
Expand All @@ -195,6 +196,9 @@ impl<'a> Lexer<'a> {
];
for kind in keys.into_iter() {
if self.context == Context::MacroBody {
if word == "padded" {
found_kind = Some(TokenKind::Padded)
}
break
}
let key = kind.to_string();
Expand Down
15 changes: 15 additions & 0 deletions huff_lexer/tests/padded.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
use huff_lexer::Lexer;
use huff_utils::prelude::*;

#[cfg(test)]
use std::println as info;

#[test]
fn padded_with_simple_body() {
let source =
"#define macro HELLO_WORLD() = takes(3) returns(0) {\n #define padded(32) {\n 0x00 mstore\n 0x01 0x02 add \n} 0x69 0x69 return\n}";
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let lexer = Lexer::new(flattened_source.source);
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
info!("{:#?}", tokens);
}
49 changes: 47 additions & 2 deletions huff_parser/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use huff_utils::{
ast::*,
error::*,
files,
prelude::{bytes32_to_string, hash_bytes, str_to_bytes32, Span},
prelude::{bytes32_to_string, hash_bytes, str_to_bytes32, Opcode, Span},
token::{Token, TokenKind},
types::*,
};
Expand Down Expand Up @@ -536,7 +536,7 @@ impl Parser {

/// Parse the body of a macro.
///
/// Only HEX, OPCODES, labels, builtins, and MACRO calls should be authorized.
/// Only HEX, OPCODES, labels, builtins, MACRO calls and padded blocks should be authorized.
pub fn parse_body(&mut self) -> Result<Vec<Statement>, ParserError> {
let mut statements: Vec<Statement> = Vec::new();
self.match_kind(TokenKind::OpenBrace)?;
Expand Down Expand Up @@ -677,7 +677,20 @@ impl Parser {
span: AstSpan(curr_spans),
});
}
TokenKind::Padded => {
let padded_statements = self.parse_padded()?;
tracing::info!(target: "parser", "PARSING MACRO BODY : [PADDED CODE BLOCK]");
statements.extend(padded_statements);
}
kind => {
if let TokenKind::Define = kind {
// allow for define within a macro body only if it's followed by "padded"
let expected_padded = self.peek().unwrap();
if expected_padded.kind == TokenKind::Padded {
self.consume();
continue
}
}
tracing::error!(target: "parser", "TOKEN MISMATCH - MACRO BODY: {}", kind);
return Err(ParserError {
kind: ParserErrorKind::InvalidTokenInMacroBody(kind),
Expand All @@ -692,6 +705,38 @@ impl Parser {
Ok(statements)
}

/// Parse a padded codeblock
pub fn parse_padded(&mut self) -> Result<Vec<Statement>, ParserError> {
// consume the Padded token
self.consume();

// parse the padded code block's size
let padded_block_size = self.parse_single_arg()?;

// parse the padded code block's body
let mut padded_statements = self.parse_body()?;

if padded_statements.len() > padded_block_size {
return Err(ParserError {
kind: ParserErrorKind::InvalidPaddedSize(
padded_block_size,
padded_statements.len(),
),
hint: Some("Ensure the padded block's size is >= than its body's size".to_string()),
spans: AstSpan(vec![self.current_token.span.clone()]),
})
}

while padded_statements.len() < padded_block_size {
padded_statements.push(Statement {
ty: StatementType::Opcode(Opcode::Stop),
span: AstSpan(vec![]), // TODO what span do I put here?
});
}

Ok(padded_statements)
}

/// Parse the body of a label.
///
/// ## Examples
Expand Down
82 changes: 82 additions & 0 deletions huff_parser/tests/padded.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
use huff_lexer::Lexer;
use huff_parser::*;
use huff_utils::{evm::Opcode, prelude::*};

#[test]
fn macro_with_simple_body() {
let source =
"#define macro HELLO_WORLD() = takes(3) returns(0) {\n #define padded(7) {\n 0x00 mstore\n 0x01 0x02 add \n}\n}";
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let lexer = Lexer::new(flattened_source.source);
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
let mut parser = Parser::new(tokens, None);

// Grab the first macro
let macro_definition = parser.parse().unwrap().macros[0].clone();

// TODO fix expected spans
let expected = MacroDefinition {
name: "HELLO_WORLD".to_string(),
decorator: None,
parameters: vec![],
statements: vec![
Statement {
ty: StatementType::Literal(str_to_bytes32("00")),
span: AstSpan(vec![Span { start: 54, end: 55, file: None }]),
},
Statement {
ty: StatementType::Opcode(Opcode::Mstore),
span: AstSpan(vec![Span { start: 57, end: 62, file: None }]),
},
Statement {
ty: StatementType::Literal(str_to_bytes32("01")),
span: AstSpan(vec![Span { start: 67, end: 68, file: None }]),
},
Statement {
ty: StatementType::Literal(str_to_bytes32("02")),
span: AstSpan(vec![Span { start: 72, end: 73, file: None }]),
},
Statement {
ty: StatementType::Opcode(Opcode::Add),
span: AstSpan(vec![Span { start: 75, end: 77, file: None }]),
},
Statement {
ty: StatementType::Opcode(Opcode::Stop),
span: AstSpan(vec![]), // TODO wat do?
},
Statement {
ty: StatementType::Opcode(Opcode::Stop),
span: AstSpan(vec![]), // TODO wat do?
},
],
takes: 3,
returns: 0,
span: AstSpan(vec![
Span { start: 0, end: 6, file: None },
Span { start: 8, end: 12, file: None },
Span { start: 14, end: 24, file: None },
Span { start: 25, end: 25, file: None },
Span { start: 26, end: 26, file: None },
Span { start: 28, end: 28, file: None },
Span { start: 30, end: 34, file: None },
Span { start: 35, end: 35, file: None },
Span { start: 36, end: 36, file: None },
Span { start: 37, end: 37, file: None },
Span { start: 39, end: 45, file: None },
Span { start: 46, end: 46, file: None },
Span { start: 47, end: 47, file: None },
Span { start: 48, end: 48, file: None },
Span { start: 50, end: 50, file: None },
Span { start: 54, end: 55, file: None },
Span { start: 57, end: 62, file: None },
Span { start: 67, end: 68, file: None },
Span { start: 72, end: 73, file: None },
Span { start: 75, end: 77, file: None },
Span { start: 79, end: 79, file: None },
]),
outlined: false,
test: false,
};
assert_eq!(macro_definition, expected);
assert_eq!(parser.current_token.kind, TokenKind::Eof);
}
5 changes: 5 additions & 0 deletions huff_utils/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ pub enum ParserErrorKind {
InvalidDecoratorFlag(String),
/// Invalid decorator flag argument
InvalidDecoratorFlagArg(TokenKind),
/// Invalid padded code block size
InvalidPaddedSize(usize, usize),
}

/// A Lexing Error
Expand Down Expand Up @@ -488,6 +490,9 @@ impl fmt::Display for CompilerError {
pe.spans.error(pe.hint.as_ref())
)
}
ParserErrorKind::InvalidPaddedSize(declared_size, actual_size) => {
write!(f, "\nError: Invalid padded code block size: declared size : {} , actual size {}", declared_size, actual_size)
}
},
CompilerError::PathBufRead(os_str) => {
write!(
Expand Down
3 changes: 3 additions & 0 deletions huff_utils/src/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ pub enum TokenKind {
Indexed,
/// "FREE_STORAGE_POINTER()" keyword
FreeStoragePointer,
/// "padded" keyword
Padded,
/// An Identifier
Ident(String),
/// Equal Sign
Expand Down Expand Up @@ -166,6 +168,7 @@ impl fmt::Display for TokenKind {
TokenKind::Takes => "takes",
TokenKind::Returns => "returns",
TokenKind::FreeStoragePointer => "FREE_STORAGE_POINTER()",
TokenKind::Padded => "padded",
TokenKind::Ident(s) => return write!(f, "{s}"),
TokenKind::Assign => "=",
TokenKind::OpenParen => "(",
Expand Down