Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make entrypoints more efficient part 2 #5621

Merged
merged 3 commits into from
Apr 15, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
185 changes: 143 additions & 42 deletions tokio-macros/src/entry.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use proc_macro::TokenStream;
use proc_macro2::Span;
use proc_macro2::{Span, TokenStream, TokenTree};
use quote::{quote, quote_spanned, ToTokens};
use syn::{parse::Parser, Ident, Path};
use syn::parse::{Parse, ParseStream, Parser};
use syn::{braced, Attribute, Ident, Path, Signature, Visibility};

// syn::AttributeArgs does not implement syn::Parse
type AttributeArgs = syn::punctuated::Punctuated<syn::Meta, syn::Token![,]>;
Expand Down Expand Up @@ -230,7 +230,7 @@ fn parse_bool(bool: syn::Lit, span: Span, field: &str) -> Result<bool, syn::Erro
}

fn build_config(
input: syn::ItemFn,
input: &ItemFn,
args: AttributeArgs,
is_test: bool,
rt_multi_thread: bool,
Expand Down Expand Up @@ -324,18 +324,13 @@ fn build_config(
config.build()
}

fn parse_knobs(mut input: syn::ItemFn, is_test: bool, config: FinalConfig) -> TokenStream {
fn parse_knobs(mut input: ItemFn, is_test: bool, config: FinalConfig) -> TokenStream {
input.sig.asyncness = None;

// If type mismatch occurs, the current rustc points to the last statement.
let (last_stmt_start_span, last_stmt_end_span) = {
let mut last_stmt = input
.block
.stmts
.last()
.map(ToTokens::into_token_stream)
.unwrap_or_default()
.into_iter();
let mut last_stmt = input.stmts.last().cloned().unwrap_or_default().into_iter();

// `Span` on stable Rust has a limitation that only points to the first
// token, not the whole tokens. We can work around this limitation by
// using the first/last span of the tokens like
Expand Down Expand Up @@ -373,10 +368,8 @@ fn parse_knobs(mut input: syn::ItemFn, is_test: bool, config: FinalConfig) -> To
quote! {}
};

let body = &input.block;
let brace_token = input.block.brace_token;
let body_ident = quote! { body };
let block_expr = quote_spanned! {last_stmt_end_span=>
let last_block = quote_spanned! {last_stmt_end_span=>
#[allow(clippy::expect_used, clippy::diverging_sub_expression)]
{
return #rt
Expand All @@ -387,6 +380,8 @@ fn parse_knobs(mut input: syn::ItemFn, is_test: bool, config: FinalConfig) -> To
}
};

let body = input.body();

// For test functions pin the body to the stack and use `Pin<&mut dyn
// Future>` to reduce the amount of `Runtime::block_on` (and related
// functions) copies we generate during compilation due to the generic
Expand Down Expand Up @@ -415,25 +410,11 @@ fn parse_knobs(mut input: syn::ItemFn, is_test: bool, config: FinalConfig) -> To
}
};

input.block = syn::parse2(quote! {
{
#body
#block_expr
}
})
.expect("Parsing failure");
input.block.brace_token = brace_token;

let result = quote! {
#header
#input
};

result.into()
input.into_tokens(header, body, last_block)
}

fn token_stream_with_error(mut tokens: TokenStream, error: syn::Error) -> TokenStream {
tokens.extend(TokenStream::from(error.into_compile_error()));
tokens.extend(error.into_compile_error());
tokens
}

Expand All @@ -442,7 +423,7 @@ pub(crate) fn main(args: TokenStream, item: TokenStream, rt_multi_thread: bool)
// If any of the steps for this macro fail, we still want to expand to an item that is as close
// to the expected output as possible. This helps out IDEs such that completions and other
// related features keep working.
let input: syn::ItemFn = match syn::parse(item.clone()) {
let input: ItemFn = match syn::parse2(item.clone()) {
Ok(it) => it,
Err(e) => return token_stream_with_error(item, e),
};
Expand All @@ -452,8 +433,8 @@ pub(crate) fn main(args: TokenStream, item: TokenStream, rt_multi_thread: bool)
Err(syn::Error::new_spanned(&input.sig.ident, msg))
} else {
AttributeArgs::parse_terminated
.parse(args)
.and_then(|args| build_config(input.clone(), args, false, rt_multi_thread))
.parse2(args)
.and_then(|args| build_config(&input, args, false, rt_multi_thread))
};

match config {
Expand All @@ -466,25 +447,145 @@ pub(crate) fn test(args: TokenStream, item: TokenStream, rt_multi_thread: bool)
// If any of the steps for this macro fail, we still want to expand to an item that is as close
// to the expected output as possible. This helps out IDEs such that completions and other
// related features keep working.
let input: syn::ItemFn = match syn::parse(item.clone()) {
let input: ItemFn = match syn::parse2(item.clone()) {
Ok(it) => it,
Err(e) => return token_stream_with_error(item, e),
};
let config = if let Some(attr) = input
.attrs
.iter()
.find(|attr| attr.meta.path().is_ident("test"))
{
let config = if let Some(attr) = input.attrs().find(|attr| attr.meta.path().is_ident("test")) {
let msg = "second test attribute is supplied";
Err(syn::Error::new_spanned(attr, msg))
} else {
AttributeArgs::parse_terminated
.parse(args)
.and_then(|args| build_config(input.clone(), args, true, rt_multi_thread))
.parse2(args)
.and_then(|args| build_config(&input, args, true, rt_multi_thread))
};

match config {
Ok(config) => parse_knobs(input, true, config),
Err(e) => token_stream_with_error(parse_knobs(input, true, DEFAULT_ERROR_CONFIG), e),
}
}

struct ItemFn {
outer_attrs: Vec<Attribute>,
vis: Visibility,
sig: Signature,
brace_token: syn::token::Brace,
inner_attrs: Vec<Attribute>,
stmts: Vec<proc_macro2::TokenStream>,
}

impl ItemFn {
/// Access all attributes of the function item.
fn attrs(&self) -> impl Iterator<Item = &Attribute> {
self.outer_attrs.iter().chain(self.inner_attrs.iter())
}

/// Get the body of the function item in a manner so that it can be
/// conveniently used with the `quote!` macro.
fn body(&self) -> Body<'_> {
Body {
brace_token: self.brace_token,
stmts: &self.stmts,
}
}

/// Convert our local function item into a token stream.
fn into_tokens(
self,
header: proc_macro2::TokenStream,
body: proc_macro2::TokenStream,
last_block: proc_macro2::TokenStream,
) -> TokenStream {
let mut tokens = proc_macro2::TokenStream::new();
header.to_tokens(&mut tokens);

// Outer attributes are simply streamed as-is.
for attr in self.outer_attrs {
attr.to_tokens(&mut tokens);
}

// Inner attributes require extra care, since they're not supported on
// blocks (which is what we're expanded into) we instead lift them
// outside of the function. This matches the behaviour of `syn`.
for mut attr in self.inner_attrs {
attr.style = syn::AttrStyle::Outer;
attr.to_tokens(&mut tokens);
}

self.vis.to_tokens(&mut tokens);
self.sig.to_tokens(&mut tokens);

self.brace_token.surround(&mut tokens, |tokens| {
body.to_tokens(tokens);
last_block.to_tokens(tokens);
});

tokens
}
}

impl Parse for ItemFn {
#[inline]
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
// This parse implementation has been largely lifted from `syn`, with
// the exception of:
// * We don't have access to the plumbing necessary to parse inner
// attributes in-place.
// * We do our own statements parsing to avoid recursively parsing
// entire statements and only look for the parts we're interested in.

let outer_attrs = input.call(Attribute::parse_outer)?;
let vis: Visibility = input.parse()?;
let sig: Signature = input.parse()?;

let content;
let brace_token = braced!(content in input);
let inner_attrs = Attribute::parse_inner(&content)?;

let mut buf = proc_macro2::TokenStream::new();
let mut stmts = Vec::new();

while !content.is_empty() {
if let Some(semi) = content.parse::<Option<syn::Token![;]>>()? {
semi.to_tokens(&mut buf);
stmts.push(buf);
buf = proc_macro2::TokenStream::new();
continue;
}

// Parse a single token tree and extend our current buffer with it.
// This avoids parsing the entire content of the sub-tree.
buf.extend([content.parse::<TokenTree>()?]);
}

if !buf.is_empty() {
stmts.push(buf);
}

Ok(Self {
outer_attrs,
vis,
sig,
brace_token,
inner_attrs,
stmts,
})
}
}

struct Body<'a> {
brace_token: syn::token::Brace,
// Statements, with terminating `;`.
stmts: &'a [TokenStream],
}

impl ToTokens for Body<'_> {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
self.brace_token.surround(tokens, |tokens| {
for stmt in self.stmts {
stmt.to_tokens(tokens);
}
})
}
}
8 changes: 4 additions & 4 deletions tokio-macros/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ use proc_macro::TokenStream;
#[proc_macro_attribute]
#[cfg(not(test))] // Work around for rust-lang/rust#62127
pub fn main(args: TokenStream, item: TokenStream) -> TokenStream {
entry::main(args, item, true)
entry::main(args.into(), item.into(), true).into()
}

/// Marks async function to be executed by selected runtime. This macro helps set up a `Runtime`
Expand Down Expand Up @@ -269,7 +269,7 @@ pub fn main(args: TokenStream, item: TokenStream) -> TokenStream {
#[proc_macro_attribute]
#[cfg(not(test))] // Work around for rust-lang/rust#62127
pub fn main_rt(args: TokenStream, item: TokenStream) -> TokenStream {
entry::main(args, item, false)
entry::main(args.into(), item.into(), false).into()
}

/// Marks async function to be executed by runtime, suitable to test environment.
Expand Down Expand Up @@ -427,7 +427,7 @@ pub fn main_rt(args: TokenStream, item: TokenStream) -> TokenStream {
/// ```
#[proc_macro_attribute]
pub fn test(args: TokenStream, item: TokenStream) -> TokenStream {
entry::test(args, item, true)
entry::test(args.into(), item.into(), true).into()
}

/// Marks async function to be executed by runtime, suitable to test environment
Expand All @@ -442,7 +442,7 @@ pub fn test(args: TokenStream, item: TokenStream) -> TokenStream {
/// ```
#[proc_macro_attribute]
pub fn test_rt(args: TokenStream, item: TokenStream) -> TokenStream {
entry::test(args, item, false)
entry::test(args.into(), item.into(), false).into()
}

/// Always fails with the error message below.
Expand Down