Skip to content

Commit

Permalink
fix: Fix proc-macro server not accounting for string delimiters corre…
Browse files Browse the repository at this point in the history
…ctly
  • Loading branch information
Veykril committed Feb 22, 2024
1 parent 543d7e9 commit cdfb73a
Show file tree
Hide file tree
Showing 6 changed files with 85 additions and 55 deletions.
6 changes: 3 additions & 3 deletions crates/proc-macro-srv/src/proc_macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ impl ProcMacros {
&bridge::server::SameThread,
S::make_server(call_site, def_site, mixed_site),
parsed_body,
false,
cfg!(debug_assertions),
);
return res
.map(|it| it.into_subtree(call_site))
Expand All @@ -75,7 +75,7 @@ impl ProcMacros {
&bridge::server::SameThread,
S::make_server(call_site, def_site, mixed_site),
parsed_body,
false,
cfg!(debug_assertions),
);
return res
.map(|it| it.into_subtree(call_site))
Expand All @@ -87,7 +87,7 @@ impl ProcMacros {
S::make_server(call_site, def_site, mixed_site),
parsed_attributes,
parsed_body,
false,
cfg!(debug_assertions),
);
return res
.map(|it| it.into_subtree(call_site))
Expand Down
9 changes: 8 additions & 1 deletion crates/proc-macro-srv/src/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,14 @@ impl<S> LiteralFormatter<S> {
let hashes = get_hashes_str(n);
f(&["br", hashes, "\"", symbol, "\"", hashes, suffix])
}
_ => f(&[symbol, suffix]),
bridge::LitKind::CStr => f(&["c\"", symbol, "\"", suffix]),
bridge::LitKind::CStrRaw(n) => {
let hashes = get_hashes_str(n);
f(&["cr", hashes, "\"", symbol, "\"", hashes, suffix])
}
bridge::LitKind::Integer | bridge::LitKind::Float | bridge::LitKind::ErrWithGuar => {
f(&[symbol, suffix])
}
})
}

Expand Down
43 changes: 25 additions & 18 deletions crates/proc-macro-srv/src/server/rust_analyzer_span.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,22 +97,33 @@ impl server::FreeFunctions for RaSpanServer {
}

let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
let kind = match kind {
LiteralKind::Int { .. } => LitKind::Integer,
LiteralKind::Float { .. } => LitKind::Float,
LiteralKind::Char { .. } => LitKind::Char,
LiteralKind::Byte { .. } => LitKind::Byte,
LiteralKind::Str { .. } => LitKind::Str,
LiteralKind::ByteStr { .. } => LitKind::ByteStr,
LiteralKind::CStr { .. } => LitKind::CStr,
LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()),
LiteralKind::RawByteStr { n_hashes } => {
LitKind::ByteStrRaw(n_hashes.unwrap_or_default())
}
LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()),
let (kind, start_offset, end_offset) = match kind {
LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
LiteralKind::RawStr { n_hashes } => (
LitKind::StrRaw(n_hashes.unwrap_or_default()),
2 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawByteStr { n_hashes } => (
LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawCStr { n_hashes } => (
LitKind::CStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
};

let (lit, suffix) = s.split_at(suffix_start as usize);
let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix {
"" | "_" => None,
suffix => Some(Symbol::intern(self.interner, suffix)),
Expand Down Expand Up @@ -248,12 +259,8 @@ impl server::TokenStream for RaSpanServer {
}
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
bridge::TokenTree::Literal(bridge::Literal {
// FIXME: handle literal kinds
kind: bridge::LitKind::Integer, // dummy
symbol: Symbol::intern(self.interner, &lit.text),
// FIXME: handle suffixes
suffix: None,
span: lit.span,
..server::FreeFunctions::literal_from_str(self, &lit.text).unwrap()
})
}
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
Expand Down
45 changes: 27 additions & 18 deletions crates/proc-macro-srv/src/server/token_id.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,22 +89,34 @@ impl server::FreeFunctions for TokenIdServer {
}

let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
let kind = match kind {
LiteralKind::Int { .. } => LitKind::Integer,
LiteralKind::Float { .. } => LitKind::Float,
LiteralKind::Char { .. } => LitKind::Char,
LiteralKind::Byte { .. } => LitKind::Byte,
LiteralKind::Str { .. } => LitKind::Str,
LiteralKind::ByteStr { .. } => LitKind::ByteStr,
LiteralKind::CStr { .. } => LitKind::CStr,
LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()),
LiteralKind::RawByteStr { n_hashes } => {
LitKind::ByteStrRaw(n_hashes.unwrap_or_default())
}
LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()),

let (kind, start_offset, end_offset) = match kind {
LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
LiteralKind::RawStr { n_hashes } => (
LitKind::StrRaw(n_hashes.unwrap_or_default()),
2 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawByteStr { n_hashes } => (
LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawCStr { n_hashes } => (
LitKind::CStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
};

let (lit, suffix) = s.split_at(suffix_start as usize);
let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix {
"" | "_" => None,
suffix => Some(Symbol::intern(self.interner, suffix)),
Expand Down Expand Up @@ -233,12 +245,9 @@ impl server::TokenStream for TokenIdServer {
}
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
bridge::TokenTree::Literal(bridge::Literal {
// FIXME: handle literal kinds
kind: bridge::LitKind::Integer, // dummy
symbol: Symbol::intern(self.interner, &lit.text),
// FIXME: handle suffixes
suffix: None,
span: lit.span,
..server::FreeFunctions::literal_from_str(self, &lit.text)
.unwrap_or_else(|_| panic!("`{}`", lit.text))
})
}
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
Expand Down
11 changes: 3 additions & 8 deletions crates/proc-macro-srv/src/server/token_stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,22 +115,17 @@ pub(super) mod token_stream {
}
}

type LexError = String;

/// Attempts to break the string into tokens and parse those tokens into a token stream.
/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
/// or characters not existing in the language.
/// All tokens in the parsed stream get `Span::call_site()` spans.
///
/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
/// change these errors into `LexError`s later.
#[rustfmt::skip]
impl<S: tt::Span> /*FromStr for*/ TokenStream<S> {
// type Err = LexError;

pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, LexError> {
impl<S: tt::Span> TokenStream<S> {
pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> {
let subtree =
mbe::parse_to_token_tree_static_span(call_site, src).ok_or("Failed to parse from mbe")?;
mbe::parse_to_token_tree_static_span(call_site, src).ok_or("lexing error")?;

Ok(TokenStream::with_subtree(subtree))
}
Expand Down
26 changes: 19 additions & 7 deletions crates/proc-macro-srv/src/tests/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ fn test_fn_like_mk_idents() {
fn test_fn_like_macro_clone_literals() {
assert_expand(
"fn_like_clone_tokens",
r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##"###,
r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##, 'a', b'b', c"null""###,
expect![[r###"
SUBTREE $$ 1 1
LITERAL 1u16 1
Expand All @@ -181,11 +181,17 @@ fn test_fn_like_macro_clone_literals() {
PUNCH , [alone] 1
LITERAL 3.14f32 1
PUNCH , [alone] 1
LITERAL ""hello bridge"" 1
LITERAL "hello bridge" 1
PUNCH , [alone] 1
LITERAL ""suffixed""suffix 1
LITERAL "suffixed"suffix 1
PUNCH , [alone] 1
LITERAL r##"r##"raw"##"## 1"###]],
LITERAL r##"raw"## 1
PUNCH , [alone] 1
LITERAL 'a' 1
PUNCH , [alone] 1
LITERAL b'b' 1
PUNCH , [alone] 1
LITERAL c"null" 1"###]],
expect![[r###"
SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL 1u16 SpanData { range: 0..4, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
Expand All @@ -197,11 +203,17 @@ fn test_fn_like_macro_clone_literals() {
PUNCH , [alone] SpanData { range: 18..19, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL 3.14f32 SpanData { range: 20..27, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
PUNCH , [alone] SpanData { range: 27..28, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL ""hello bridge"" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL "hello bridge" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
PUNCH , [alone] SpanData { range: 43..44, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL ""suffixed""suffix SpanData { range: 45..61, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL "suffixed"suffix SpanData { range: 45..61, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
PUNCH , [alone] SpanData { range: 61..62, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL r##"r##"raw"##"## SpanData { range: 63..73, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"###]],
LITERAL r##"raw"## SpanData { range: 63..73, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
PUNCH , [alone] SpanData { range: 73..74, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL 'a' SpanData { range: 75..78, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
PUNCH , [alone] SpanData { range: 78..79, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL b'b' SpanData { range: 80..84, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
PUNCH , [alone] SpanData { range: 84..85, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
LITERAL c"null" SpanData { range: 86..93, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"###]],
);
}

Expand Down

0 comments on commit cdfb73a

Please sign in to comment.