From 508959cfd5c33db0c19b3a7cd06a295ede35ee84 Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Thu, 17 Feb 2022 16:08:34 +0100 Subject: [PATCH 01/24] Make processing of entrypoints more efficient --- .../tests/fail/macros_invalid_input.stderr | 58 +-- tokio-macros/Cargo.toml | 7 - tokio-macros/src/entry.rs | 428 ------------------ tokio-macros/src/entry/mod.rs | 51 +++ tokio-macros/src/entry/output.rs | 310 +++++++++++++ tokio-macros/src/entry/parser.rs | 254 +++++++++++ tokio-macros/src/error.rs | 50 ++ tokio-macros/src/lib.rs | 54 ++- tokio-macros/src/parsing.rs | 188 ++++++++ tokio-macros/src/select.rs | 179 ++++---- tokio-macros/src/to_tokens.rs | 226 +++++++++ tokio-macros/src/token_stream.rs | 58 +++ tokio/src/macros/select.rs | 2 +- 13 files changed, 1279 insertions(+), 586 deletions(-) delete mode 100644 tokio-macros/src/entry.rs create mode 100644 tokio-macros/src/entry/mod.rs create mode 100644 tokio-macros/src/entry/output.rs create mode 100644 tokio-macros/src/entry/parser.rs create mode 100644 tokio-macros/src/error.rs create mode 100644 tokio-macros/src/parsing.rs create mode 100644 tokio-macros/src/to_tokens.rs create mode 100644 tokio-macros/src/token_stream.rs diff --git a/tests-build/tests/fail/macros_invalid_input.stderr b/tests-build/tests/fail/macros_invalid_input.stderr index 11337a94fe5..d595c8cf29a 100644 --- a/tests-build/tests/fail/macros_invalid_input.stderr +++ b/tests-build/tests/fail/macros_invalid_input.stderr @@ -1,71 +1,53 @@ -error: the `async` keyword is missing from the function declaration - --> $DIR/macros_invalid_input.rs:4:1 +error: functions marked with `#[tokio::main]` must be `async` + --> tests/fail/macros_invalid_input.rs:4:1 | 4 | fn main_is_not_async() {} | ^^ -error: Unknown attribute foo is specified; expected one of: `flavor`, `worker_threads`, `start_paused` - --> $DIR/macros_invalid_input.rs:6:15 +error: unknown option `foo`, expected one of: `flavor`, `worker_threads`, `start_paused` + --> tests/fail/macros_invalid_input.rs:6:15 | 6 | #[tokio::main(foo)] | ^^^ -error: Must have specified ident - --> $DIR/macros_invalid_input.rs:9:15 +error: unknown option `threadpool`, expected one of: `flavor`, `worker_threads`, `start_paused` + --> tests/fail/macros_invalid_input.rs:9:15 | 9 | #[tokio::main(threadpool::bar)] - | ^^^^^^^^^^^^^^^ + | ^^^^^^^^^^ -error: the `async` keyword is missing from the function declaration - --> $DIR/macros_invalid_input.rs:13:1 +error: functions marked with `#[tokio::test]` must be `async` + --> tests/fail/macros_invalid_input.rs:13:1 | 13 | fn test_is_not_async() {} | ^^ -error: Unknown attribute foo is specified; expected one of: `flavor`, `worker_threads`, `start_paused` - --> $DIR/macros_invalid_input.rs:15:15 +error: unknown option `foo`, expected one of: `flavor`, `worker_threads`, `start_paused` + --> tests/fail/macros_invalid_input.rs:15:15 | 15 | #[tokio::test(foo)] | ^^^ -error: Unknown attribute foo is specified; expected one of: `flavor`, `worker_threads`, `start_paused` - --> $DIR/macros_invalid_input.rs:18:15 +error: unknown option `foo`, expected one of: `flavor`, `worker_threads`, `start_paused` + --> tests/fail/macros_invalid_input.rs:18:15 | 18 | #[tokio::test(foo = 123)] - | ^^^^^^^^^ + | ^^^ -error: Failed to parse value of `flavor` as string. - --> $DIR/macros_invalid_input.rs:21:24 +error: no such runtime flavor, the runtime flavors are: "current_thread", "multi_thread" + --> tests/fail/macros_invalid_input.rs:21:24 | 21 | #[tokio::test(flavor = 123)] | ^^^ -error: No such runtime flavor `foo`. The runtime flavors are `current_thread` and `multi_thread`. - --> $DIR/macros_invalid_input.rs:24:24 +error: no such runtime flavor, the runtime flavors are: "current_thread", "multi_thread" + --> tests/fail/macros_invalid_input.rs:24:24 | 24 | #[tokio::test(flavor = "foo")] | ^^^^^ -error: The `start_paused` option requires the `current_thread` runtime flavor. Use `#[tokio::test(flavor = "current_thread")]` - --> $DIR/macros_invalid_input.rs:27:55 - | -27 | #[tokio::test(flavor = "multi_thread", start_paused = false)] - | ^^^^^ - -error: Failed to parse value of `worker_threads` as integer. - --> $DIR/macros_invalid_input.rs:30:57 - | -30 | #[tokio::test(flavor = "multi_thread", worker_threads = "foo")] - | ^^^^^ - -error: The `worker_threads` option requires the `multi_thread` runtime flavor. Use `#[tokio::test(flavor = "multi_thread")]` - --> $DIR/macros_invalid_input.rs:33:59 +error: the `worker_threads` option requires the "multi_thread" runtime flavor. Use `#[tokio::test(flavor = "multi_thread")]` + --> tests/fail/macros_invalid_input.rs:33:59 | 33 | #[tokio::test(flavor = "current_thread", worker_threads = 4)] | ^ - -error: second test attribute is supplied - --> $DIR/macros_invalid_input.rs:37:1 - | -37 | #[test] - | ^^^^^^^ diff --git a/tokio-macros/Cargo.toml b/tokio-macros/Cargo.toml index b17844f3798..53234afccea 100644 --- a/tokio-macros/Cargo.toml +++ b/tokio-macros/Cargo.toml @@ -19,13 +19,6 @@ categories = ["asynchronous"] [lib] proc-macro = true -[features] - -[dependencies] -proc-macro2 = "1.0.7" -quote = "1" -syn = { version = "1.0.56", features = ["full"] } - [dev-dependencies] tokio = { version = "1.0.0", path = "../tokio", features = ["full"] } diff --git a/tokio-macros/src/entry.rs b/tokio-macros/src/entry.rs deleted file mode 100644 index 5cb4a49b430..00000000000 --- a/tokio-macros/src/entry.rs +++ /dev/null @@ -1,428 +0,0 @@ -use proc_macro::TokenStream; -use proc_macro2::Span; -use quote::{quote, quote_spanned, ToTokens}; -use syn::parse::Parser; - -// syn::AttributeArgs does not implement syn::Parse -type AttributeArgs = syn::punctuated::Punctuated; - -#[derive(Clone, Copy, PartialEq)] -enum RuntimeFlavor { - CurrentThread, - Threaded, -} - -impl RuntimeFlavor { - fn from_str(s: &str) -> Result { - match s { - "current_thread" => Ok(RuntimeFlavor::CurrentThread), - "multi_thread" => Ok(RuntimeFlavor::Threaded), - "single_thread" => Err("The single threaded runtime flavor is called `current_thread`.".to_string()), - "basic_scheduler" => Err("The `basic_scheduler` runtime flavor has been renamed to `current_thread`.".to_string()), - "threaded_scheduler" => Err("The `threaded_scheduler` runtime flavor has been renamed to `multi_thread`.".to_string()), - _ => Err(format!("No such runtime flavor `{}`. The runtime flavors are `current_thread` and `multi_thread`.", s)), - } - } -} - -struct FinalConfig { - flavor: RuntimeFlavor, - worker_threads: Option, - start_paused: Option, -} - -/// Config used in case of the attribute not being able to build a valid config -const DEFAULT_ERROR_CONFIG: FinalConfig = FinalConfig { - flavor: RuntimeFlavor::CurrentThread, - worker_threads: None, - start_paused: None, -}; - -struct Configuration { - rt_multi_thread_available: bool, - default_flavor: RuntimeFlavor, - flavor: Option, - worker_threads: Option<(usize, Span)>, - start_paused: Option<(bool, Span)>, - is_test: bool, -} - -impl Configuration { - fn new(is_test: bool, rt_multi_thread: bool) -> Self { - Configuration { - rt_multi_thread_available: rt_multi_thread, - default_flavor: match is_test { - true => RuntimeFlavor::CurrentThread, - false => RuntimeFlavor::Threaded, - }, - flavor: None, - worker_threads: None, - start_paused: None, - is_test, - } - } - - fn set_flavor(&mut self, runtime: syn::Lit, span: Span) -> Result<(), syn::Error> { - if self.flavor.is_some() { - return Err(syn::Error::new(span, "`flavor` set multiple times.")); - } - - let runtime_str = parse_string(runtime, span, "flavor")?; - let runtime = - RuntimeFlavor::from_str(&runtime_str).map_err(|err| syn::Error::new(span, err))?; - self.flavor = Some(runtime); - Ok(()) - } - - fn set_worker_threads( - &mut self, - worker_threads: syn::Lit, - span: Span, - ) -> Result<(), syn::Error> { - if self.worker_threads.is_some() { - return Err(syn::Error::new( - span, - "`worker_threads` set multiple times.", - )); - } - - let worker_threads = parse_int(worker_threads, span, "worker_threads")?; - if worker_threads == 0 { - return Err(syn::Error::new(span, "`worker_threads` may not be 0.")); - } - self.worker_threads = Some((worker_threads, span)); - Ok(()) - } - - fn set_start_paused(&mut self, start_paused: syn::Lit, span: Span) -> Result<(), syn::Error> { - if self.start_paused.is_some() { - return Err(syn::Error::new(span, "`start_paused` set multiple times.")); - } - - let start_paused = parse_bool(start_paused, span, "start_paused")?; - self.start_paused = Some((start_paused, span)); - Ok(()) - } - - fn macro_name(&self) -> &'static str { - if self.is_test { - "tokio::test" - } else { - "tokio::main" - } - } - - fn build(&self) -> Result { - let flavor = self.flavor.unwrap_or(self.default_flavor); - use RuntimeFlavor::*; - - let worker_threads = match (flavor, self.worker_threads) { - (CurrentThread, Some((_, worker_threads_span))) => { - let msg = format!( - "The `worker_threads` option requires the `multi_thread` runtime flavor. Use `#[{}(flavor = \"multi_thread\")]`", - self.macro_name(), - ); - return Err(syn::Error::new(worker_threads_span, msg)); - } - (CurrentThread, None) => None, - (Threaded, worker_threads) if self.rt_multi_thread_available => { - worker_threads.map(|(val, _span)| val) - } - (Threaded, _) => { - let msg = if self.flavor.is_none() { - "The default runtime flavor is `multi_thread`, but the `rt-multi-thread` feature is disabled." - } else { - "The runtime flavor `multi_thread` requires the `rt-multi-thread` feature." - }; - return Err(syn::Error::new(Span::call_site(), msg)); - } - }; - - let start_paused = match (flavor, self.start_paused) { - (Threaded, Some((_, start_paused_span))) => { - let msg = format!( - "The `start_paused` option requires the `current_thread` runtime flavor. Use `#[{}(flavor = \"current_thread\")]`", - self.macro_name(), - ); - return Err(syn::Error::new(start_paused_span, msg)); - } - (CurrentThread, Some((start_paused, _))) => Some(start_paused), - (_, None) => None, - }; - - Ok(FinalConfig { - flavor, - worker_threads, - start_paused, - }) - } -} - -fn parse_int(int: syn::Lit, span: Span, field: &str) -> Result { - match int { - syn::Lit::Int(lit) => match lit.base10_parse::() { - Ok(value) => Ok(value), - Err(e) => Err(syn::Error::new( - span, - format!("Failed to parse value of `{}` as integer: {}", field, e), - )), - }, - _ => Err(syn::Error::new( - span, - format!("Failed to parse value of `{}` as integer.", field), - )), - } -} - -fn parse_string(int: syn::Lit, span: Span, field: &str) -> Result { - match int { - syn::Lit::Str(s) => Ok(s.value()), - syn::Lit::Verbatim(s) => Ok(s.to_string()), - _ => Err(syn::Error::new( - span, - format!("Failed to parse value of `{}` as string.", field), - )), - } -} - -fn parse_bool(bool: syn::Lit, span: Span, field: &str) -> Result { - match bool { - syn::Lit::Bool(b) => Ok(b.value), - _ => Err(syn::Error::new( - span, - format!("Failed to parse value of `{}` as bool.", field), - )), - } -} - -fn build_config( - input: syn::ItemFn, - args: AttributeArgs, - is_test: bool, - rt_multi_thread: bool, -) -> Result { - if input.sig.asyncness.is_none() { - let msg = "the `async` keyword is missing from the function declaration"; - return Err(syn::Error::new_spanned(input.sig.fn_token, msg)); - } - - let mut config = Configuration::new(is_test, rt_multi_thread); - let macro_name = config.macro_name(); - - for arg in args { - match arg { - syn::NestedMeta::Meta(syn::Meta::NameValue(namevalue)) => { - let ident = namevalue - .path - .get_ident() - .ok_or_else(|| { - syn::Error::new_spanned(&namevalue, "Must have specified ident") - })? - .to_string() - .to_lowercase(); - match ident.as_str() { - "worker_threads" => { - config.set_worker_threads( - namevalue.lit.clone(), - syn::spanned::Spanned::span(&namevalue.lit), - )?; - } - "flavor" => { - config.set_flavor( - namevalue.lit.clone(), - syn::spanned::Spanned::span(&namevalue.lit), - )?; - } - "start_paused" => { - config.set_start_paused( - namevalue.lit.clone(), - syn::spanned::Spanned::span(&namevalue.lit), - )?; - } - "core_threads" => { - let msg = "Attribute `core_threads` is renamed to `worker_threads`"; - return Err(syn::Error::new_spanned(namevalue, msg)); - } - name => { - let msg = format!( - "Unknown attribute {} is specified; expected one of: `flavor`, `worker_threads`, `start_paused`", - name, - ); - return Err(syn::Error::new_spanned(namevalue, msg)); - } - } - } - syn::NestedMeta::Meta(syn::Meta::Path(path)) => { - let name = path - .get_ident() - .ok_or_else(|| syn::Error::new_spanned(&path, "Must have specified ident"))? - .to_string() - .to_lowercase(); - let msg = match name.as_str() { - "threaded_scheduler" | "multi_thread" => { - format!( - "Set the runtime flavor with #[{}(flavor = \"multi_thread\")].", - macro_name - ) - } - "basic_scheduler" | "current_thread" | "single_threaded" => { - format!( - "Set the runtime flavor with #[{}(flavor = \"current_thread\")].", - macro_name - ) - } - "flavor" | "worker_threads" | "start_paused" => { - format!("The `{}` attribute requires an argument.", name) - } - name => { - format!("Unknown attribute {} is specified; expected one of: `flavor`, `worker_threads`, `start_paused`", name) - } - }; - return Err(syn::Error::new_spanned(path, msg)); - } - other => { - return Err(syn::Error::new_spanned( - other, - "Unknown attribute inside the macro", - )); - } - } - } - - config.build() -} - -fn parse_knobs(mut input: syn::ItemFn, is_test: bool, config: FinalConfig) -> TokenStream { - input.sig.asyncness = None; - - // If type mismatch occurs, the current rustc points to the last statement. - let (last_stmt_start_span, last_stmt_end_span) = { - let mut last_stmt = input - .block - .stmts - .last() - .map(ToTokens::into_token_stream) - .unwrap_or_default() - .into_iter(); - // `Span` on stable Rust has a limitation that only points to the first - // token, not the whole tokens. We can work around this limitation by - // using the first/last span of the tokens like - // `syn::Error::new_spanned` does. - let start = last_stmt.next().map_or_else(Span::call_site, |t| t.span()); - let end = last_stmt.last().map_or(start, |t| t.span()); - (start, end) - }; - - let mut rt = match config.flavor { - RuntimeFlavor::CurrentThread => quote_spanned! {last_stmt_start_span=> - tokio::runtime::Builder::new_current_thread() - }, - RuntimeFlavor::Threaded => quote_spanned! {last_stmt_start_span=> - tokio::runtime::Builder::new_multi_thread() - }, - }; - if let Some(v) = config.worker_threads { - rt = quote! { #rt.worker_threads(#v) }; - } - if let Some(v) = config.start_paused { - rt = quote! { #rt.start_paused(#v) }; - } - - let header = if is_test { - quote! { - #[::core::prelude::v1::test] - } - } else { - quote! {} - }; - - let body = &input.block; - let brace_token = input.block.brace_token; - let (tail_return, tail_semicolon) = match body.stmts.last() { - Some(syn::Stmt::Semi(syn::Expr::Return(_), _)) => (quote! { return }, quote! { ; }), - Some(syn::Stmt::Semi(..)) | Some(syn::Stmt::Local(..)) | None => { - match &input.sig.output { - syn::ReturnType::Type(_, ty) if matches!(&**ty, syn::Type::Tuple(ty) if ty.elems.is_empty()) => - { - (quote! {}, quote! { ; }) // unit - } - syn::ReturnType::Default => (quote! {}, quote! { ; }), // unit - syn::ReturnType::Type(..) => (quote! {}, quote! {}), // ! or another - } - } - _ => (quote! {}, quote! {}), - }; - input.block = syn::parse2(quote_spanned! {last_stmt_end_span=> - { - let body = async #body; - #[allow(clippy::expect_used)] - #tail_return #rt - .enable_all() - .build() - .expect("Failed building the Runtime") - .block_on(body)#tail_semicolon - } - }) - .expect("Parsing failure"); - input.block.brace_token = brace_token; - - let result = quote! { - #header - #input - }; - - result.into() -} - -fn token_stream_with_error(mut tokens: TokenStream, error: syn::Error) -> TokenStream { - tokens.extend(TokenStream::from(error.into_compile_error())); - tokens -} - -#[cfg(not(test))] // Work around for rust-lang/rust#62127 -pub(crate) fn main(args: TokenStream, item: TokenStream, rt_multi_thread: bool) -> TokenStream { - // If any of the steps for this macro fail, we still want to expand to an item that is as close - // to the expected output as possible. This helps out IDEs such that completions and other - // related features keep working. - let input: syn::ItemFn = match syn::parse(item.clone()) { - Ok(it) => it, - Err(e) => return token_stream_with_error(item, e), - }; - - let config = if input.sig.ident == "main" && !input.sig.inputs.is_empty() { - let msg = "the main function cannot accept arguments"; - Err(syn::Error::new_spanned(&input.sig.ident, msg)) - } else { - AttributeArgs::parse_terminated - .parse(args) - .and_then(|args| build_config(input.clone(), args, false, rt_multi_thread)) - }; - - match config { - Ok(config) => parse_knobs(input, false, config), - Err(e) => token_stream_with_error(parse_knobs(input, false, DEFAULT_ERROR_CONFIG), e), - } -} - -pub(crate) fn test(args: TokenStream, item: TokenStream, rt_multi_thread: bool) -> TokenStream { - // If any of the steps for this macro fail, we still want to expand to an item that is as close - // to the expected output as possible. This helps out IDEs such that completions and other - // related features keep working. - let input: syn::ItemFn = match syn::parse(item.clone()) { - Ok(it) => it, - Err(e) => return token_stream_with_error(item, e), - }; - let config = if let Some(attr) = input.attrs.iter().find(|attr| attr.path.is_ident("test")) { - let msg = "second test attribute is supplied"; - Err(syn::Error::new_spanned(&attr, msg)) - } else { - AttributeArgs::parse_terminated - .parse(args) - .and_then(|args| build_config(input.clone(), args, true, rt_multi_thread)) - }; - - match config { - Ok(config) => parse_knobs(input, true, config), - Err(e) => token_stream_with_error(parse_knobs(input, true, DEFAULT_ERROR_CONFIG), e), - } -} diff --git a/tokio-macros/src/entry/mod.rs b/tokio-macros/src/entry/mod.rs new file mode 100644 index 00000000000..ff9c1a11c6e --- /dev/null +++ b/tokio-macros/src/entry/mod.rs @@ -0,0 +1,51 @@ +mod output; +pub(crate) use self::output::{EntryKind, SupportsThreading}; + +mod parser; + +use crate::error::Error; +use crate::parsing::Buf; +use crate::to_tokens::{from_fn, ToTokens}; +use crate::token_stream::TokenStream; + +/// Configurable macro code to build entry. +pub(crate) fn build( + kind: EntryKind, + supports_threading: SupportsThreading, + args: proc_macro::TokenStream, + item_stream: proc_macro::TokenStream, +) -> proc_macro::TokenStream { + let mut buf = Buf::new(); + let mut errors = Vec::new(); + + let config = parser::ConfigParser::new(args, &mut buf, &mut errors); + let config = config.parse(kind, supports_threading); + + config.validate(kind, &mut errors); + + let item = parser::ItemParser::new(item_stream.clone(), &mut buf); + let item = item.parse(); + + item.validate(kind, &mut errors); + + let mut stream = TokenStream::default(); + + let (start, end) = item.block_spans(); + + item.expand_item(kind, config, start) + .to_tokens(&mut stream, end); + format_item_errors(errors).to_tokens(&mut stream, end); + + stream.into_token_stream() +} + +fn format_item_errors(errors: I) -> impl ToTokens +where + I: IntoIterator, +{ + from_fn(move |s| { + for error in errors { + s.write(error); + } + }) +} diff --git a/tokio-macros/src/entry/output.rs b/tokio-macros/src/entry/output.rs new file mode 100644 index 00000000000..da59a56b918 --- /dev/null +++ b/tokio-macros/src/entry/output.rs @@ -0,0 +1,310 @@ +use std::ops; + +use proc_macro::{Delimiter, Span, TokenTree}; + +use crate::error::Error; +use crate::to_tokens::{bracketed, from_fn, parens, string, ToTokens, S}; +use crate::token_stream::TokenStream; + +#[derive(Default)] +pub(crate) struct TailState { + pub(crate) block: Option, + pub(crate) start: Option, + pub(crate) end: Option, + /// Indicates if last expression is a return. + pub(crate) return_: bool, +} + +#[derive(Debug, Clone, Copy)] +pub(crate) enum EntryKind { + // Because of how all entries in this crate that use this are marked with + // `#[cfg(not(test))]` this yields a warning when performing a test build. + #[allow(unused)] + Main, + Test, +} + +#[derive(Debug, Clone, Copy)] +pub(crate) enum SupportsThreading { + Supported, + NotSupported, +} + +impl EntryKind { + /// The name of the attribute used as the entry kind. + pub(crate) fn name(&self) -> &str { + match self { + EntryKind::Main => "tokio::main", + EntryKind::Test => "tokio::test", + } + } +} + +#[derive(Debug, Clone, Copy)] +pub(crate) enum RuntimeFlavor { + CurrentThread, + Threaded, +} + +impl RuntimeFlavor { + /// Parse a literal (as it appears in Rust code) as a runtime flavor. This + /// means that it includes quotes. + pub(crate) fn from_literal(s: &str) -> Result { + match s { + "\"current_thread\"" => Ok(RuntimeFlavor::CurrentThread), + "\"multi_thread\"" => Ok(RuntimeFlavor::Threaded), + "\"single_thread\"" => Err("the single threaded runtime flavor is called \"current_thread\""), + "\"basic_scheduler\"" => Err("the \"basic_scheduler\" runtime flavor has been renamed to \"current_thread\""), + "\"threaded_scheduler\"" => Err("the \"threaded_scheduler\" runtime flavor has been renamed to \"multi_thread\""), + _ => Err("no such runtime flavor, the runtime flavors are: \"current_thread\", \"multi_thread\""), + } + } +} + +/// The parsed arguments output. +#[derive(Debug)] +pub(crate) struct Config { + pub(crate) supports_threading: SupportsThreading, + /// The default runtime flavor to use if left unspecified. + default_flavor: RuntimeFlavor, + /// The runtime flavor to use. + pub(crate) flavor: Option<(Span, RuntimeFlavor)>, + /// The number of worker threads to configure. + pub(crate) worker_threads: Option, + /// If the runtime should start paused. + pub(crate) start_paused: Option, +} + +impl Config { + pub(crate) fn new(kind: EntryKind, supports_threading: SupportsThreading) -> Self { + Self { + supports_threading, + default_flavor: match (kind, supports_threading) { + (EntryKind::Main, SupportsThreading::Supported) => RuntimeFlavor::Threaded, + (EntryKind::Main, SupportsThreading::NotSupported) => RuntimeFlavor::CurrentThread, + (EntryKind::Test, _) => RuntimeFlavor::CurrentThread, + }, + flavor: None, + worker_threads: None, + start_paused: None, + } + } + + pub(crate) fn validate(&self, kind: EntryKind, errors: &mut Vec) { + match (self.flavor(), &self.start_paused) { + (RuntimeFlavor::Threaded, Some(tt)) => { + if tt.to_string() == "true" { + errors.push(Error::new(tt.span(), format!("the `start_paused` option requires the \"current_thread\" runtime flavor. Use `#[{}(flavor = \"current_thread\")]`", kind.name()))); + } + } + _ => {} + } + + match (self.flavor(), &self.worker_threads) { + (RuntimeFlavor::CurrentThread, Some(tt)) => { + errors.push(Error::new(tt.span(), format!("the `worker_threads` option requires the \"multi_thread\" runtime flavor. Use `#[{}(flavor = \"multi_thread\")]`", kind.name()))); + } + _ => {} + } + } + + /// Get the runtime flavor to use. + fn flavor(&self) -> RuntimeFlavor { + match &self.flavor { + Some((_, flavor)) => *flavor, + None => self.default_flavor, + } + } +} + +/// The parsed item output. +pub(crate) struct ItemOutput { + tokens: Vec, + pub(crate) has_async: bool, + signature: Option>, + block: Option>, + tail_state: TailState, +} + +impl ItemOutput { + pub(crate) fn new( + tokens: Vec, + has_async: bool, + signature: Option>, + block: Option>, + tail_state: TailState, + ) -> Self { + Self { + tokens, + has_async, + signature, + block, + tail_state, + } + } + + /// Validate the parsed item. + pub(crate) fn validate(&self, kind: EntryKind, errors: &mut Vec) { + if !self.has_async { + let span = self + .signature + .as_ref() + .and_then(|s| self.tokens.get(s.clone())) + .and_then(|t| t.first()) + .map(|tt| tt.span()) + .unwrap_or_else(Span::call_site); + + errors.push(Error::new( + span, + format!("functions marked with `#[{}]` must be `async`", kind.name()), + )); + } + } + + pub(crate) fn block_spans(&self) -> (Span, Span) { + let start = self + .tail_state + .start + .or(self.tail_state.block) + .unwrap_or_else(Span::call_site); + let end = self + .tail_state + .end + .or(self.tail_state.block) + .unwrap_or_else(Span::call_site); + (start, end) + } + + /// Expand into a function item. + pub(crate) fn expand_item( + &self, + kind: EntryKind, + config: Config, + start: Span, + ) -> impl ToTokens + '_ { + from_fn(move |s| { + if let (Some(signature), Some(block)) = (self.signature.clone(), self.block.clone()) { + let block_span = self.tail_state.block.unwrap_or_else(Span::call_site); + + s.write(( + self.entry_kind_attribute(kind), + &self.tokens[signature], + group_with_span( + Delimiter::Brace, + self.item_body(config, block, start), + block_span, + ), + )) + } else { + s.write(&self.tokens[..]); + } + }) + } + + /// Generate attribute associated with entry kind. + fn entry_kind_attribute(&self, kind: EntryKind) -> impl ToTokens { + from_fn(move |s| { + if let EntryKind::Test = kind { + s.write(( + '#', + bracketed((S, "core", S, "prelude", S, "v1", S, "test")), + )) + } + }) + } + + /// Expanded item body. + fn item_body( + &self, + config: Config, + block: ops::Range, + start: Span, + ) -> impl ToTokens + '_ { + // NB: override the first generated part with the detected start span. + let rt = ("tokio", S, "runtime", S, "Builder"); + + let rt = from_fn(move |s| { + s.write(rt); + + match config.flavor() { + RuntimeFlavor::CurrentThread => { + s.write((S, "new_current_thread", parens(()))); + } + RuntimeFlavor::Threaded => { + s.write((S, "new_multi_thread", parens(()))); + } + } + + if let Some(start_paused) = config.start_paused { + s.write(('.', "start_paused", parens(start_paused))); + } + + if let Some(worker_threads) = config.worker_threads { + s.write(('.', "worker_threads", parens(worker_threads))); + } + }); + + let build = ( + (rt, '.', "enable_all", parens(()), '.', "build", parens(())), + '.', + "expect", + parens(string("Failed building the Runtime")), + ); + + from_fn(move |s| { + if self.tail_state.return_ { + s.write(( + with_span(("return", build, '.', "block_on"), start), + parens(("async", &self.tokens[block])), + ';', + )); + } else { + s.write(( + with_span((build, '.', "block_on"), start), + parens(("async", &self.tokens[block])), + )); + } + }) + } +} + +/// Insert the given tokens with a custom span. +pub(crate) fn with_span(inner: T, span: Span) -> impl ToTokens +where + T: ToTokens, +{ + WithSpan(inner, span) +} + +struct WithSpan(T, Span); + +impl ToTokens for WithSpan +where + T: ToTokens, +{ + fn to_tokens(self, stream: &mut TokenStream, _: Span) { + self.0.to_tokens(stream, self.1); + } +} + +/// Construct a custom group with a custom span that is not inherited by its +/// children. +fn group_with_span(delimiter: Delimiter, inner: T, span: Span) -> impl ToTokens +where + T: ToTokens, +{ + GroupWithSpan(delimiter, inner, span) +} + +struct GroupWithSpan(Delimiter, T, Span); + +impl ToTokens for GroupWithSpan +where + T: ToTokens, +{ + fn to_tokens(self, stream: &mut TokenStream, span: Span) { + let checkpoint = stream.checkpoint(); + self.1.to_tokens(stream, span); + stream.group(self.2, self.0, checkpoint); + } +} diff --git a/tokio-macros/src/entry/parser.rs b/tokio-macros/src/entry/parser.rs new file mode 100644 index 00000000000..e1019e45f4d --- /dev/null +++ b/tokio-macros/src/entry/parser.rs @@ -0,0 +1,254 @@ +use proc_macro::{Delimiter, Group, Literal, Span, TokenTree}; + +use crate::entry::output::{ + Config, EntryKind, ItemOutput, RuntimeFlavor, SupportsThreading, TailState, +}; +use crate::error::Error; +use crate::parsing::{BaseParser, Buf}; +use crate::parsing::{Punct, COMMA, EQ}; + +/// A parser for the arguments provided to an entry macro. +pub(crate) struct ConfigParser<'a> { + base: BaseParser<'a>, + errors: &'a mut Vec, +} + +impl<'a> ConfigParser<'a> { + /// Construct a new parser around the given token stream. + pub(crate) fn new( + stream: proc_macro::TokenStream, + buf: &'a mut Buf, + errors: &'a mut Vec, + ) -> Self { + Self { + base: BaseParser::new(stream, buf), + errors, + } + } + + /// Parse and produce the corresponding token stream. + pub(crate) fn parse( + mut self, + kind: EntryKind, + supports_threading: SupportsThreading, + ) -> Config { + let mut config = Config::new(kind, supports_threading); + + while self.base.nth(0).is_some() { + if self.parse_option(&mut config).is_none() { + self.recover(); + continue; + } + + if !self.base.skip_punct(COMMA) { + break; + } + } + + if let Some(tt) = self.base.nth(0) { + self.errors.push(Error::new(tt.span(), "trailing token")); + } + + config + } + + /// Recover by parsing either to the next comma `,`, or end of input. + fn recover(&mut self) { + loop { + if let Some(p @ Punct { chars: COMMA, .. }) = self.base.peek_punct() { + self.base.step(p.len()); + break; + } + + if self.base.bump().is_none() { + break; + } + } + } + + /// Parse a single option. + fn parse_option(&mut self, config: &mut Config) -> Option<()> { + match self.base.bump() { + Some(TokenTree::Ident(ident)) => match self.base.buf.display_as_str(&ident) { + "worker_threads" => { + self.parse_eq()?; + config.worker_threads = Some(self.parse_token()?); + Some(()) + } + "start_paused" => { + self.parse_eq()?; + config.start_paused = Some(self.parse_token()?); + Some(()) + } + "flavor" => { + self.parse_eq()?; + let literal = self.parse_literal()?; + + let flavor = + match RuntimeFlavor::from_literal(self.base.buf.display_as_str(&literal)) { + Ok(flavor) => flavor, + Err(error) => { + self.errors.push(Error::new(literal.span(), error)); + return None; + } + }; + + if matches!( + (flavor, config.supports_threading), + (RuntimeFlavor::Threaded, SupportsThreading::NotSupported) + ) { + self.errors.push(Error::new( + ident.span(), + "the runtime flavor \"multi_thread\" requires the `rt-multi-thread` feature", + )); + } + + if let Some((existing, _)) = &config.flavor { + self.errors.push(Error::new( + ident.span(), + "the `flavor` option must only be used once", + )); + self.errors.push(Error::new( + existing.clone(), + "first use of the `flavor` here", + )); + } + + config.flavor = Some((ident.span(), flavor)); + Some(()) + } + "core_threads" => { + self.errors.push(Error::new( + ident.span(), + "the `core_threads` option is renamed to `worker_threads`", + )); + self.parse_eq()?; + self.parse_literal()?; + None + } + name => { + self.errors.push(Error::new(ident.span(), format!("unknown option `{}`, expected one of: `flavor`, `worker_threads`, `start_paused`", name))); + None + } + }, + tt => { + let span = tt.map(|tt| tt.span()).unwrap_or_else(Span::call_site); + self.errors.push(Error::new(span, "expected identifier")); + None + } + } + } + + /// Parse the next element as a literal value. + fn parse_literal(&mut self) -> Option { + match self.base.bump() { + Some(TokenTree::Literal(literal)) => Some(literal), + tt => { + let span = tt.map(|tt| tt.span()).unwrap_or_else(Span::call_site); + self.errors.push(Error::new(span, "expected literal")); + None + } + } + } + + /// Parse a token. + fn parse_token(&mut self) -> Option { + match self.base.bump() { + Some(t) => Some(t), + tt => { + let span = tt.map(|tt| tt.span()).unwrap_or_else(Span::call_site); + self.errors.push(Error::new(span, "expected token")); + None + } + } + } + + /// Parse the next element as an `=` punctuation. + fn parse_eq(&mut self) -> Option<()> { + match self.base.peek_punct()? { + p @ Punct { chars: EQ, .. } => { + self.base.step(p.len()); + Some(()) + } + p => { + self.errors + .push(Error::new(p.span, "expected assignment `=`")); + None + } + } + } +} + +/// A parser for the item annotated with an entry macro. +pub(crate) struct ItemParser<'a> { + base: BaseParser<'a>, +} + +impl<'a> ItemParser<'a> { + /// Construct a new parser around the given token stream. + pub(crate) fn new(stream: proc_macro::TokenStream, buf: &'a mut Buf) -> Self { + Self { + base: BaseParser::new(stream, buf), + } + } + + /// Parse and produce the corresponding token stream. + pub(crate) fn parse(mut self) -> ItemOutput { + let start = self.base.len(); + let mut signature = None; + let mut block = None; + + let mut has_async = false; + + let mut tail_state = TailState::default(); + + while let Some(tt) = self.base.bump() { + match tt { + TokenTree::Ident(ident) if self.base.buf.display_as_str(&ident) == "async" => { + // NB: intentionally skip over this token. + has_async = true; + } + TokenTree::Group(g) if matches!(g.delimiter(), Delimiter::Brace) => { + signature = Some(start..self.base.len()); + let start = self.base.len(); + tail_state.block = Some(g.span()); + self.find_last_stmt_range(&g, &mut tail_state); + self.base.push(TokenTree::Group(g)); + block = Some(start..self.base.len()); + } + tt => { + self.base.push(tt); + } + } + } + + let tokens = self.base.into_tokens(); + + ItemOutput::new(tokens, has_async, signature, block, tail_state) + } + + /// Find the range of spans that is defined by the last statement in the + /// block so that they can be used for the generated expression. + /// + /// This in turn improves upon diagnostics when return types do not match. + fn find_last_stmt_range(&mut self, g: &Group, tail_state: &mut TailState) { + let mut update = true; + + for tt in g.stream() { + let span = tt.span(); + tail_state.end = Some(span); + + match tt { + TokenTree::Punct(p) if p.as_char() == ';' => { + update = true; + } + tt => { + if std::mem::take(&mut update) { + tail_state.return_ = matches!(&tt, TokenTree::Ident(ident) if self.base.buf.display_as_str(ident) == "return"); + tail_state.start = Some(span); + } + } + } + } + } +} diff --git a/tokio-macros/src/error.rs b/tokio-macros/src/error.rs new file mode 100644 index 00000000000..6861a079854 --- /dev/null +++ b/tokio-macros/src/error.rs @@ -0,0 +1,50 @@ +use core::fmt; +use core::iter::once; +use core::iter::FromIterator; + +use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenTree}; + +use crate::to_tokens::ToTokens; +use crate::token_stream::TokenStream; + +/// Expand a message as an error. +pub(crate) fn expand(message: &str) -> proc_macro::TokenStream { + let error = Error::new(Span::call_site(), message); + let mut stream = TokenStream::default(); + error.to_tokens(&mut stream, Span::call_site()); + stream.into_token_stream() +} + +/// An error that can be raised during parsing which is associated with a span. +#[derive(Debug)] +pub(crate) struct Error { + span: Span, + message: Box, +} + +impl Error { + pub(crate) fn new(span: Span, message: impl fmt::Display) -> Self { + Self { + span, + message: message.to_string().into(), + } + } +} + +impl ToTokens for Error { + fn to_tokens(self, stream: &mut TokenStream, _: Span) { + stream.push(TokenTree::Ident(Ident::new("compile_error", self.span))); + let mut exclamation = Punct::new('!', Spacing::Alone); + exclamation.set_span(self.span); + stream.push(TokenTree::Punct(exclamation)); + + let mut message = Literal::string(self.message.as_ref()); + message.set_span(self.span); + + let message = proc_macro::TokenStream::from_iter(once(TokenTree::Literal(message))); + let mut group = Group::new(Delimiter::Brace, message); + group.set_span(self.span); + + stream.push(TokenTree::Group(group)); + } +} diff --git a/tokio-macros/src/lib.rs b/tokio-macros/src/lib.rs index 38638a1df8a..5b76b30249e 100644 --- a/tokio-macros/src/lib.rs +++ b/tokio-macros/src/lib.rs @@ -18,7 +18,11 @@ extern crate proc_macro; mod entry; +mod error; +mod parsing; mod select; +mod to_tokens; +mod token_stream; use proc_macro::TokenStream; @@ -176,8 +180,13 @@ use proc_macro::TokenStream; /// available as `tokio` in the module where this macro is expanded. #[proc_macro_attribute] #[cfg(not(test))] // Work around for rust-lang/rust#62127 -pub fn main(args: TokenStream, item: TokenStream) -> TokenStream { - entry::main(args, item, true) +pub fn main(args: TokenStream, item_stream: TokenStream) -> TokenStream { + crate::entry::build( + crate::entry::EntryKind::Main, + crate::entry::SupportsThreading::Supported, + args, + item_stream, + ) } /// Marks async function to be executed by selected runtime. This macro helps set up a `Runtime` @@ -221,8 +230,13 @@ pub fn main(args: TokenStream, item: TokenStream) -> TokenStream { /// available as `tokio` in the module where this macro is expanded. #[proc_macro_attribute] #[cfg(not(test))] // Work around for rust-lang/rust#62127 -pub fn main_rt(args: TokenStream, item: TokenStream) -> TokenStream { - entry::main(args, item, false) +pub fn main_rt(args: TokenStream, item_stream: TokenStream) -> TokenStream { + crate::entry::build( + crate::entry::EntryKind::Main, + crate::entry::SupportsThreading::NotSupported, + args, + item_stream, + ) } /// Marks async function to be executed by runtime, suitable to test environment @@ -267,8 +281,13 @@ pub fn main_rt(args: TokenStream, item: TokenStream) -> TokenStream { /// older version of Tokio, you _must_ make the current version of Tokio /// available as `tokio` in the module where this macro is expanded. #[proc_macro_attribute] -pub fn test(args: TokenStream, item: TokenStream) -> TokenStream { - entry::test(args, item, true) +pub fn test(args: TokenStream, item_stream: TokenStream) -> TokenStream { + crate::entry::build( + crate::entry::EntryKind::Test, + crate::entry::SupportsThreading::Supported, + args, + item_stream, + ) } /// Marks async function to be executed by runtime, suitable to test environment @@ -289,8 +308,13 @@ pub fn test(args: TokenStream, item: TokenStream) -> TokenStream { /// older version of Tokio, you _must_ make the current version of Tokio /// available as `tokio` in the module where this macro is expanded. #[proc_macro_attribute] -pub fn test_rt(args: TokenStream, item: TokenStream) -> TokenStream { - entry::test(args, item, false) +pub fn test_rt(args: TokenStream, item_stream: TokenStream) -> TokenStream { + crate::entry::build( + crate::entry::EntryKind::Test, + crate::entry::SupportsThreading::NotSupported, + args, + item_stream, + ) } /// Always fails with the error message below. @@ -299,12 +323,7 @@ pub fn test_rt(args: TokenStream, item: TokenStream) -> TokenStream { /// ``` #[proc_macro_attribute] pub fn main_fail(_args: TokenStream, _item: TokenStream) -> TokenStream { - syn::Error::new( - proc_macro2::Span::call_site(), - "The #[tokio::main] macro requires rt or rt-multi-thread.", - ) - .to_compile_error() - .into() + error::expand("The #[tokio::main] macro requires rt or rt-multi-thread.") } /// Always fails with the error message below. @@ -313,12 +332,7 @@ pub fn main_fail(_args: TokenStream, _item: TokenStream) -> TokenStream { /// ``` #[proc_macro_attribute] pub fn test_fail(_args: TokenStream, _item: TokenStream) -> TokenStream { - syn::Error::new( - proc_macro2::Span::call_site(), - "The #[tokio::test] macro requires rt or rt-multi-thread.", - ) - .to_compile_error() - .into() + error::expand("The #[tokio::test] macro requires rt or rt-multi-thread.") } /// Implementation detail of the `select!` macro. This macro is **not** intended diff --git a/tokio-macros/src/parsing.rs b/tokio-macros/src/parsing.rs new file mode 100644 index 00000000000..4f6a4ad8214 --- /dev/null +++ b/tokio-macros/src/parsing.rs @@ -0,0 +1,188 @@ +use core::fmt; + +use proc_macro::{Spacing, Span, TokenTree}; + +const BUF: usize = 2; + +// Punctuations that we look for. +pub(crate) const COMMA: [char; 2] = [',', '\0']; +pub(crate) const EQ: [char; 2] = ['=', '\0']; + +pub(crate) struct Buf { + // Static ring buffer used for processing tokens. + ring: [Option; BUF], + head: usize, + tail: usize, + // Re-usable string buffer. + string: String, +} + +impl Buf { + pub(crate) fn new() -> Self { + Self { + ring: [None, None], + string: String::new(), + head: 0, + tail: 0, + } + } + + /// Clear the buffer. + fn clear(&mut self) { + self.ring = [None, None]; + self.head = 0; + self.tail = 0; + self.string.clear(); + } + + /// Get the next element out of the ring buffer. + pub(crate) fn next(&mut self) -> Option { + if let Some(head) = self.ring.get_mut(self.tail % BUF).and_then(|s| s.take()) { + self.tail += 1; + Some(head) + } else { + None + } + } + + fn fill(&mut self, n: usize, mut it: I) -> Option<()> + where + I: Iterator, + { + assert!(n <= BUF); + + while (self.head - self.tail) <= n { + self.ring[self.head % BUF] = Some(it.next()?); + self.head += 1; + } + + Some(()) + } + + /// Try to get the `n`th token and fill from the provided iterator if neede. + pub(crate) fn nth(&mut self, n: usize, it: I) -> Option<&TokenTree> + where + I: Iterator, + { + self.fill(n, it)?; + self.ring.get((self.tail + n) % BUF)?.as_ref() + } + + /// Coerce the given value into a string by formatting into an existing + /// string buffer. + pub(crate) fn display_as_str(&mut self, value: impl fmt::Display) -> &str { + use std::fmt::Write; + + self.string.clear(); + let _ = write!(&mut self.string, "{}", value); + self.string.as_str() + } +} + +/// Parser base. +pub(crate) struct BaseParser<'a> { + it: proc_macro::token_stream::IntoIter, + tokens: Vec, + pub(crate) buf: &'a mut Buf, +} + +impl<'a> BaseParser<'a> { + pub(crate) fn new(stream: proc_macro::TokenStream, buf: &'a mut Buf) -> Self { + buf.clear(); + + Self { + it: stream.into_iter(), + tokens: Vec::new(), + buf, + } + } + + /// Push a single token onto the token buffer. + pub(crate) fn push(&mut self, tt: TokenTree) { + self.tokens.push(tt); + } + + /// The current length in number of tokens recorded. + pub(crate) fn len(&self) -> usize { + self.tokens.len() + } + + /// Access the token at the given offset. + pub(crate) fn nth(&mut self, n: usize) -> Option<&TokenTree> { + self.buf.nth(n, &mut self.it) + } + + /// Bump the last token. + pub(crate) fn bump(&mut self) -> Option { + if let Some(head) = self.buf.next() { + return Some(head); + } + + self.it.next() + } + + /// Step over the given number of tokens. + pub(crate) fn step(&mut self, n: usize) { + for _ in 0..n { + self.bump(); + } + } + + /// Process a punctuation. + pub(crate) fn peek_punct(&mut self) -> Option { + let mut out = [None; 2]; + + for (n, o) in out.iter_mut().enumerate() { + match (n, self.nth(n)) { + (_, Some(TokenTree::Punct(punct))) => { + *o = Some((punct.span(), punct.as_char())); + + if !matches!(punct.spacing(), Spacing::Joint) { + break; + } + } + _ => { + break; + } + } + } + + match out { + [Some((span, head)), tail] => Some(Punct { + span, + chars: [head, tail.map(|(_, c)| c).unwrap_or('\0')], + }), + _ => None, + } + } + + /// Skip the specified punctuations and return a boolean indicating if it was skipped. + pub(crate) fn skip_punct(&mut self, expected: [char; 2]) -> bool { + if let Some(p) = self.peek_punct() { + if p.chars == expected { + self.step(p.len()); + return true; + } + } + + false + } + + /// Convert the current parser into a collection of tokens it has retained. + pub(crate) fn into_tokens(self) -> Vec { + self.tokens + } +} + +/// A complete punctuation. +#[derive(Debug)] +pub(crate) struct Punct { + pub(crate) span: Span, + pub(crate) chars: [char; 2], +} + +impl Punct { + pub(crate) fn len(&self) -> usize { + self.chars.iter().take_while(|c| **c != '\0').count() + } +} diff --git a/tokio-macros/src/select.rs b/tokio-macros/src/select.rs index 23e280a1056..6f21f0565f7 100644 --- a/tokio-macros/src/select.rs +++ b/tokio-macros/src/select.rs @@ -1,110 +1,105 @@ -use proc_macro::{TokenStream, TokenTree}; -use proc_macro2::Span; -use quote::quote; -use syn::Ident; +use proc_macro::{Ident, Spacing, Span, TokenTree}; -pub(crate) fn declare_output_enum(input: TokenStream) -> TokenStream { - // passed in is: `(_ _ _)` with one `_` per branch - let branches = match input.into_iter().next() { - Some(TokenTree::Group(group)) => group.stream().into_iter().count(), - _ => panic!("unexpected macro input"), - }; +use crate::{ + parsing::Buf, + to_tokens::{braced, from_fn, group, parens, ToTokens}, + token_stream::TokenStream, +}; + +pub(crate) fn declare_output_enum(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + // passed in is: `_ _ _` with one `_` per branch + let branches = input.into_iter().count(); let variants = (0..branches) .map(|num| Ident::new(&format!("_{}", num), Span::call_site())) .collect::>(); // Use a bitfield to track which futures completed - let mask = Ident::new( - if branches <= 8 { - "u8" - } else if branches <= 16 { - "u16" - } else if branches <= 32 { - "u32" - } else if branches <= 64 { - "u64" - } else { - panic!("up to 64 branches supported"); - }, - Span::call_site(), - ); + let mask = if branches <= 8 { + "u8" + } else if branches <= 16 { + "u16" + } else if branches <= 32 { + "u32" + } else if branches <= 64 { + "u64" + } else { + panic!("up to 64 branches supported"); + }; - TokenStream::from(quote! { - pub(super) enum Out<#( #variants ),*> { - #( #variants(#variants), )* - // Include a `Disabled` variant signifying that all select branches - // failed to resolve. - Disabled, + let generics = from_fn(|s| { + for variant in &variants { + s.write((TokenTree::Ident(variant.clone()), ',')); } + }); - pub(super) type Mask = #mask; - }) + let variants = from_fn(|s| { + for variant in &variants { + s.write(( + TokenTree::Ident(variant.clone()), + parens(TokenTree::Ident(variant.clone())), + ',', + )); + } + + s.write(("Disabled", ',')); + }); + + let out_enum = ( + ("pub", parens("super"), "enum", "Out", '<', generics, '>'), + braced(variants), + ); + + let out = ( + out_enum, + ("pub", parens("super"), "type", "Mask", '=', mask, ';'), + ); + + let mut stream = TokenStream::default(); + out.to_tokens(&mut stream, Span::call_site()); + stream.into_token_stream() } -pub(crate) fn clean_pattern_macro(input: TokenStream) -> TokenStream { - // If this isn't a pattern, we return the token stream as-is. The select! - // macro is using it in a location requiring a pattern, so an error will be - // emitted there. - let mut input: syn::Pat = match syn::parse(input.clone()) { - Ok(it) => it, - Err(_) => return input, - }; +pub(crate) fn clean_pattern_macro(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let mut buf = Buf::new(); - clean_pattern(&mut input); - quote::ToTokens::into_token_stream(input).into() + let mut stream = TokenStream::default(); + clean_pattern(input.into_iter(), &mut buf).to_tokens(&mut stream, Span::call_site()); + stream.into_token_stream() } -// Removes any occurrences of ref or mut in the provided pattern. -fn clean_pattern(pat: &mut syn::Pat) { - match pat { - syn::Pat::Box(_box) => {} - syn::Pat::Lit(_literal) => {} - syn::Pat::Macro(_macro) => {} - syn::Pat::Path(_path) => {} - syn::Pat::Range(_range) => {} - syn::Pat::Rest(_rest) => {} - syn::Pat::Verbatim(_tokens) => {} - syn::Pat::Wild(_underscore) => {} - syn::Pat::Ident(ident) => { - ident.by_ref = None; - ident.mutability = None; - if let Some((_at, pat)) = &mut ident.subpat { - clean_pattern(&mut *pat); - } - } - syn::Pat::Or(or) => { - for case in or.cases.iter_mut() { - clean_pattern(case); - } - } - syn::Pat::Slice(slice) => { - for elem in slice.elems.iter_mut() { - clean_pattern(elem); - } - } - syn::Pat::Struct(struct_pat) => { - for field in struct_pat.fields.iter_mut() { - clean_pattern(&mut field.pat); - } - } - syn::Pat::Tuple(tuple) => { - for elem in tuple.elems.iter_mut() { - clean_pattern(elem); - } - } - syn::Pat::TupleStruct(tuple) => { - for elem in tuple.pat.elems.iter_mut() { - clean_pattern(elem); +/// Clean up a pattern by skipping over any `mut` and `&` tokens. +fn clean_pattern<'a, I: 'a>(tree: I, buf: &'a mut Buf) -> impl ToTokens + 'a +where + I: Iterator, +{ + from_fn(move |s| { + for tt in tree { + match tt { + TokenTree::Group(g) => { + s.write(group( + g.delimiter(), + clean_pattern(g.stream().into_iter(), buf), + )); + } + TokenTree::Ident(i) => { + if buf.display_as_str(&i) == "mut" { + continue; + } + + s.push(TokenTree::Ident(i)); + } + TokenTree::Punct(p) => { + if matches!(p.spacing(), Spacing::Alone) && p.as_char() == '&' { + continue; + } + + s.push(TokenTree::Punct(p)); + } + tt => { + s.push(tt); + } } } - syn::Pat::Reference(reference) => { - reference.mutability = None; - clean_pattern(&mut *reference.pat); - } - syn::Pat::Type(type_pat) => { - clean_pattern(&mut *type_pat.pat); - } - _ => {} - } + }) } diff --git a/tokio-macros/src/to_tokens.rs b/tokio-macros/src/to_tokens.rs new file mode 100644 index 00000000000..c2ccb101610 --- /dev/null +++ b/tokio-macros/src/to_tokens.rs @@ -0,0 +1,226 @@ +use proc_macro::{Delimiter, Ident, Literal, Punct, Spacing, Span, TokenTree}; + +use crate::token_stream::TokenStream; + +/// `::` +pub(crate) const S: [char; 2] = [':', ':']; + +pub(crate) trait ToTokens { + /// Convert into tokens. + fn to_tokens(self, stream: &mut TokenStream, span: Span); +} + +impl ToTokens for TokenStream { + fn to_tokens(self, stream: &mut TokenStream, _: Span) { + stream.extend(self); + } +} + +impl ToTokens for proc_macro::TokenStream { + fn to_tokens(self, stream: &mut TokenStream, _: Span) { + for tt in self { + stream.push(tt); + } + } +} + +impl ToTokens for TokenTree { + fn to_tokens(self, stream: &mut TokenStream, _: Span) { + stream.push(self); + } +} + +impl ToTokens for Option +where + T: ToTokens, +{ + fn to_tokens(self, stream: &mut TokenStream, span: Span) { + if let Some(tt) = self { + tt.to_tokens(stream, span); + } + } +} + +impl ToTokens for &str { + fn to_tokens(self, stream: &mut TokenStream, span: Span) { + let mut ident = Ident::new(self, span); + ident.set_span(span); + stream.push(TokenTree::Ident(ident)); + } +} + +impl ToTokens for [char; N] { + fn to_tokens(self, stream: &mut TokenStream, span: Span) { + let mut it = self.iter(); + + if let Some(last) = it.next_back() { + for c in it { + let mut punct = Punct::new(*c, Spacing::Joint); + punct.set_span(span); + stream.push(TokenTree::Punct(punct)); + } + + let mut punct = Punct::new(*last, Spacing::Alone); + punct.set_span(span); + stream.push(TokenTree::Punct(punct)); + } + } +} + +impl ToTokens for char { + fn to_tokens(self, stream: &mut TokenStream, span: Span) { + let mut punct = Punct::new(self, Spacing::Alone); + punct.set_span(span); + stream.push(TokenTree::Punct(punct)); + } +} + +impl ToTokens for usize { + fn to_tokens(self, stream: &mut TokenStream, span: Span) { + let mut literal = Literal::usize_unsuffixed(self); + literal.set_span(span); + stream.push(TokenTree::Literal(literal)); + } +} + +impl ToTokens for () { + fn to_tokens(self, _: &mut TokenStream, _: Span) {} +} + +macro_rules! tuple { + ($($gen:ident $var:ident),*) => { + impl<$($gen,)*> ToTokens for ($($gen,)*) where $($gen: ToTokens),* { + fn to_tokens(self, stream: &mut TokenStream, span: Span) { + let ($($var,)*) = self; + $($var.to_tokens(stream, span);)* + } + } + } +} + +tuple!(A a); +tuple!(A a, B b); +tuple!(A a, B b, C c); +tuple!(A a, B b, C c, D d); +tuple!(A a, B b, C c, D d, E e); +tuple!(A a, B b, C c, D d, E e, F f); +tuple!(A a, B b, C c, D d, E e, F f, G g); +tuple!(A a, B b, C c, D d, E e, F f, G g, H h); +tuple!(A a, B b, C c, D d, E e, F f, G g, H h, I i); + +struct Group(Delimiter, T); + +impl ToTokens for Group +where + T: ToTokens, +{ + fn to_tokens(self, stream: &mut TokenStream, span: Span) { + let checkpoint = stream.checkpoint(); + self.1.to_tokens(stream, span); + stream.group(span, self.0, checkpoint); + } +} + +/// Construct a parenthesized group `()`. +pub(crate) fn parens(inner: T) -> impl ToTokens +where + T: ToTokens, +{ + Group(Delimiter::Parenthesis, inner) +} + +/// Construct a braced group `{}`. +pub(crate) fn braced(inner: T) -> impl ToTokens +where + T: ToTokens, +{ + Group(Delimiter::Brace, inner) +} + +/// Construct a bracketed group `[]`. +pub(crate) fn bracketed(inner: T) -> impl ToTokens +where + T: ToTokens, +{ + Group(Delimiter::Bracket, inner) +} + +/// Construct a custom group. +pub(crate) fn group(delimiter: Delimiter, inner: T) -> impl ToTokens +where + T: ToTokens, +{ + Group(delimiter, inner) +} + +struct StringLiteral<'a>(&'a str); + +impl ToTokens for StringLiteral<'_> { + fn to_tokens(self, stream: &mut TokenStream, span: Span) { + let mut literal = Literal::string(self.0); + literal.set_span(span); + stream.push(TokenTree::Literal(literal)); + } +} + +/// Construct a string literal. +pub(crate) fn string(s: &str) -> impl ToTokens + '_ { + StringLiteral(s) +} + +impl ToTokens for &[TokenTree] { + fn to_tokens(self, stream: &mut TokenStream, _: Span) { + for tt in self { + stream.push(tt.clone()); + } + } +} + +pub(crate) struct FromFn(T); + +impl ToTokens for FromFn +where + T: FnOnce(&mut SpannedStream<'_>), +{ + fn to_tokens(self, stream: &mut TokenStream, span: Span) { + let mut stream = SpannedStream { stream, span }; + (self.0)(&mut stream); + } +} + +/// Construct a [ToTokens] implementation from a callback function. +pub(crate) fn from_fn(f: T) -> FromFn +where + T: FnOnce(&mut SpannedStream<'_>), +{ + FromFn(f) +} + +impl Clone for FromFn +where + T: Clone, +{ + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl Copy for FromFn where T: Copy {} + +/// A stream that has an implicit span associated with it. +pub(crate) struct SpannedStream<'a> { + stream: &'a mut TokenStream, + span: Span, +} + +impl SpannedStream<'_> { + /// Push a raw token onto the stream. + pub(crate) fn push(&mut self, tt: TokenTree) { + self.stream.push(tt); + } + + /// Push the given sequence of tokens. + pub(crate) fn write(&mut self, tt: impl ToTokens) { + self.stream.write(self.span, tt); + } +} diff --git a/tokio-macros/src/token_stream.rs b/tokio-macros/src/token_stream.rs new file mode 100644 index 00000000000..d5e42b18f1e --- /dev/null +++ b/tokio-macros/src/token_stream.rs @@ -0,0 +1,58 @@ +use core::iter::FromIterator; + +use proc_macro::{Delimiter, Group, Span, TokenTree}; + +use crate::to_tokens::ToTokens; + +/// A checkpoint of the current location in the stream. +#[repr(transparent)] +pub(crate) struct Checkpoint(usize); + +/// A token stream that can be modified by this crate. +#[derive(Default)] +pub(crate) struct TokenStream { + inner: Vec, +} + +impl TokenStream { + /// Push a single token tree. + pub(crate) fn push(&mut self, tt: TokenTree) { + self.inner.push(tt); + } + + /// Push the given sequence of tokens. + pub(crate) fn write(&mut self, span: Span, tt: T) + where + T: ToTokens, + { + tt.to_tokens(self, span); + } + + /// Get a checkpoint of the current location in the tree. + pub(crate) fn checkpoint(&self) -> Checkpoint { + Checkpoint(self.inner.len()) + } + + /// Push the given stream as a group. + pub(crate) fn group( + &mut self, + span: Span, + delimiter: Delimiter, + Checkpoint(start): Checkpoint, + ) { + let it = self.inner.drain(start..); + let mut group = Group::new(delimiter, proc_macro::TokenStream::from_iter(it)); + group.set_span(span); + self.push(TokenTree::Group(group)); + } + + /// Coerce into a token stream. + pub(crate) fn into_token_stream(self) -> proc_macro::TokenStream { + proc_macro::TokenStream::from_iter(self.inner) + } + + /// Extend the current stream from another. + pub(crate) fn extend(&mut self, mut other: Self) { + self.inner.append(&mut other.inner); + } +} diff --git a/tokio/src/macros/select.rs b/tokio/src/macros/select.rs index 051f8cb72a8..9b5d1f9c419 100644 --- a/tokio/src/macros/select.rs +++ b/tokio/src/macros/select.rs @@ -431,7 +431,7 @@ macro_rules! select { // macro. mod util { // Generate an enum with one variant per select branch - $crate::select_priv_declare_output_enum!( ( $($count)* ) ); + $crate::select_priv_declare_output_enum!($($count)*); } // `tokio::macros::support` is a public, but doc(hidden) module From b5209073e849a965d81611c8a65284884dc2d739 Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Thu, 17 Feb 2022 17:33:53 +0100 Subject: [PATCH 02/24] Abide by minimum Rust requirement --- tokio-macros/src/to_tokens.rs | 34 ++++++++++++++++++++-------------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/tokio-macros/src/to_tokens.rs b/tokio-macros/src/to_tokens.rs index c2ccb101610..9fb16e52071 100644 --- a/tokio-macros/src/to_tokens.rs +++ b/tokio-macros/src/to_tokens.rs @@ -49,24 +49,30 @@ impl ToTokens for &str { } } -impl ToTokens for [char; N] { - fn to_tokens(self, stream: &mut TokenStream, span: Span) { - let mut it = self.iter(); - - if let Some(last) = it.next_back() { - for c in it { - let mut punct = Punct::new(*c, Spacing::Joint); - punct.set_span(span); - stream.push(TokenTree::Punct(punct)); +macro_rules! joint_punct { + ($n:tt) => { + impl ToTokens for [char; $n] { + fn to_tokens(self, stream: &mut TokenStream, span: Span) { + let mut it = self.iter(); + + if let Some(last) = it.next_back() { + for c in it { + let mut punct = Punct::new(*c, Spacing::Joint); + punct.set_span(span); + stream.push(TokenTree::Punct(punct)); + } + + let mut punct = Punct::new(*last, Spacing::Alone); + punct.set_span(span); + stream.push(TokenTree::Punct(punct)); + } } - - let mut punct = Punct::new(*last, Spacing::Alone); - punct.set_span(span); - stream.push(TokenTree::Punct(punct)); } - } + }; } +joint_punct!(2); + impl ToTokens for char { fn to_tokens(self, stream: &mut TokenStream, span: Span) { let mut punct = Punct::new(self, Spacing::Alone); From b593c9045b27453cdd7b0be1f42cbf6ee08f8593 Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Thu, 17 Feb 2022 17:57:33 +0100 Subject: [PATCH 03/24] Fix clippy lints --- tokio-macros/src/entry/mod.rs | 12 +-- tokio-macros/src/entry/output.rs | 52 +++++------ tokio-macros/src/entry/parser.rs | 8 +- tokio-macros/src/error.rs | 8 +- .../src/{to_tokens.rs => into_tokens.rs} | 92 +++++++++---------- tokio-macros/src/lib.rs | 2 +- tokio-macros/src/parsing.rs | 2 +- tokio-macros/src/select.rs | 8 +- tokio-macros/src/token_stream.rs | 6 +- 9 files changed, 92 insertions(+), 98 deletions(-) rename tokio-macros/src/{to_tokens.rs => into_tokens.rs} (61%) diff --git a/tokio-macros/src/entry/mod.rs b/tokio-macros/src/entry/mod.rs index ff9c1a11c6e..d25f5a0b891 100644 --- a/tokio-macros/src/entry/mod.rs +++ b/tokio-macros/src/entry/mod.rs @@ -4,8 +4,8 @@ pub(crate) use self::output::{EntryKind, SupportsThreading}; mod parser; use crate::error::Error; +use crate::into_tokens::{from_fn, IntoTokens}; use crate::parsing::Buf; -use crate::to_tokens::{from_fn, ToTokens}; use crate::token_stream::TokenStream; /// Configurable macro code to build entry. @@ -21,9 +21,9 @@ pub(crate) fn build( let config = parser::ConfigParser::new(args, &mut buf, &mut errors); let config = config.parse(kind, supports_threading); - config.validate(kind, &mut errors); + config.validate(kind, &mut errors, &mut buf); - let item = parser::ItemParser::new(item_stream.clone(), &mut buf); + let item = parser::ItemParser::new(item_stream, &mut buf); let item = item.parse(); item.validate(kind, &mut errors); @@ -33,13 +33,13 @@ pub(crate) fn build( let (start, end) = item.block_spans(); item.expand_item(kind, config, start) - .to_tokens(&mut stream, end); - format_item_errors(errors).to_tokens(&mut stream, end); + .into_tokens(&mut stream, end); + format_item_errors(errors).into_tokens(&mut stream, end); stream.into_token_stream() } -fn format_item_errors(errors: I) -> impl ToTokens +fn format_item_errors(errors: I) -> impl IntoTokens where I: IntoIterator, { diff --git a/tokio-macros/src/entry/output.rs b/tokio-macros/src/entry/output.rs index da59a56b918..0286049cd21 100644 --- a/tokio-macros/src/entry/output.rs +++ b/tokio-macros/src/entry/output.rs @@ -3,7 +3,8 @@ use std::ops; use proc_macro::{Delimiter, Span, TokenTree}; use crate::error::Error; -use crate::to_tokens::{bracketed, from_fn, parens, string, ToTokens, S}; +use crate::into_tokens::{bracketed, from_fn, parens, string, IntoTokens, S}; +use crate::parsing::Buf; use crate::token_stream::TokenStream; #[derive(Default)] @@ -90,21 +91,16 @@ impl Config { } } - pub(crate) fn validate(&self, kind: EntryKind, errors: &mut Vec) { - match (self.flavor(), &self.start_paused) { - (RuntimeFlavor::Threaded, Some(tt)) => { - if tt.to_string() == "true" { - errors.push(Error::new(tt.span(), format!("the `start_paused` option requires the \"current_thread\" runtime flavor. Use `#[{}(flavor = \"current_thread\")]`", kind.name()))); - } + /// Validate the current configuration. + pub(crate) fn validate(&self, kind: EntryKind, errors: &mut Vec, buf: &mut Buf) { + if let (RuntimeFlavor::Threaded, Some(tt)) = (self.flavor(), &self.start_paused) { + if buf.display_as_str(tt) == "true" { + errors.push(Error::new(tt.span(), format!("the `start_paused` option requires the \"current_thread\" runtime flavor. Use `#[{}(flavor = \"current_thread\")]`", kind.name()))); } - _ => {} } - match (self.flavor(), &self.worker_threads) { - (RuntimeFlavor::CurrentThread, Some(tt)) => { - errors.push(Error::new(tt.span(), format!("the `worker_threads` option requires the \"multi_thread\" runtime flavor. Use `#[{}(flavor = \"multi_thread\")]`", kind.name()))); - } - _ => {} + if let (RuntimeFlavor::CurrentThread, Some(tt)) = (self.flavor(), &self.worker_threads) { + errors.push(Error::new(tt.span(), format!("the `worker_threads` option requires the \"multi_thread\" runtime flavor. Use `#[{}(flavor = \"multi_thread\")]`", kind.name()))); } } @@ -181,7 +177,7 @@ impl ItemOutput { kind: EntryKind, config: Config, start: Span, - ) -> impl ToTokens + '_ { + ) -> impl IntoTokens + '_ { from_fn(move |s| { if let (Some(signature), Some(block)) = (self.signature.clone(), self.block.clone()) { let block_span = self.tail_state.block.unwrap_or_else(Span::call_site); @@ -202,7 +198,7 @@ impl ItemOutput { } /// Generate attribute associated with entry kind. - fn entry_kind_attribute(&self, kind: EntryKind) -> impl ToTokens { + fn entry_kind_attribute(&self, kind: EntryKind) -> impl IntoTokens { from_fn(move |s| { if let EntryKind::Test = kind { s.write(( @@ -219,7 +215,7 @@ impl ItemOutput { config: Config, block: ops::Range, start: Span, - ) -> impl ToTokens + '_ { + ) -> impl IntoTokens + '_ { // NB: override the first generated part with the detected start span. let rt = ("tokio", S, "runtime", S, "Builder"); @@ -269,42 +265,42 @@ impl ItemOutput { } /// Insert the given tokens with a custom span. -pub(crate) fn with_span(inner: T, span: Span) -> impl ToTokens +pub(crate) fn with_span(inner: T, span: Span) -> impl IntoTokens where - T: ToTokens, + T: IntoTokens, { WithSpan(inner, span) } struct WithSpan(T, Span); -impl ToTokens for WithSpan +impl IntoTokens for WithSpan where - T: ToTokens, + T: IntoTokens, { - fn to_tokens(self, stream: &mut TokenStream, _: Span) { - self.0.to_tokens(stream, self.1); + fn into_tokens(self, stream: &mut TokenStream, _: Span) { + self.0.into_tokens(stream, self.1); } } /// Construct a custom group with a custom span that is not inherited by its /// children. -fn group_with_span(delimiter: Delimiter, inner: T, span: Span) -> impl ToTokens +fn group_with_span(delimiter: Delimiter, inner: T, span: Span) -> impl IntoTokens where - T: ToTokens, + T: IntoTokens, { GroupWithSpan(delimiter, inner, span) } struct GroupWithSpan(Delimiter, T, Span); -impl ToTokens for GroupWithSpan +impl IntoTokens for GroupWithSpan where - T: ToTokens, + T: IntoTokens, { - fn to_tokens(self, stream: &mut TokenStream, span: Span) { + fn into_tokens(self, stream: &mut TokenStream, span: Span) { let checkpoint = stream.checkpoint(); - self.1.to_tokens(stream, span); + self.1.into_tokens(stream, span); stream.group(self.2, self.0, checkpoint); } } diff --git a/tokio-macros/src/entry/parser.rs b/tokio-macros/src/entry/parser.rs index e1019e45f4d..e3451f534b7 100644 --- a/tokio-macros/src/entry/parser.rs +++ b/tokio-macros/src/entry/parser.rs @@ -108,10 +108,8 @@ impl<'a> ConfigParser<'a> { ident.span(), "the `flavor` option must only be used once", )); - self.errors.push(Error::new( - existing.clone(), - "first use of the `flavor` here", - )); + self.errors + .push(Error::new(*existing, "first use of the `flavor` here")); } config.flavor = Some((ident.span(), flavor)); @@ -222,7 +220,7 @@ impl<'a> ItemParser<'a> { } } - let tokens = self.base.into_tokens(); + let tokens = self.base.ininto_tokens(); ItemOutput::new(tokens, has_async, signature, block, tail_state) } diff --git a/tokio-macros/src/error.rs b/tokio-macros/src/error.rs index 6861a079854..880ceb7d646 100644 --- a/tokio-macros/src/error.rs +++ b/tokio-macros/src/error.rs @@ -4,14 +4,14 @@ use core::iter::FromIterator; use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenTree}; -use crate::to_tokens::ToTokens; +use crate::into_tokens::IntoTokens; use crate::token_stream::TokenStream; /// Expand a message as an error. pub(crate) fn expand(message: &str) -> proc_macro::TokenStream { let error = Error::new(Span::call_site(), message); let mut stream = TokenStream::default(); - error.to_tokens(&mut stream, Span::call_site()); + error.into_tokens(&mut stream, Span::call_site()); stream.into_token_stream() } @@ -31,8 +31,8 @@ impl Error { } } -impl ToTokens for Error { - fn to_tokens(self, stream: &mut TokenStream, _: Span) { +impl IntoTokens for Error { + fn into_tokens(self, stream: &mut TokenStream, _: Span) { stream.push(TokenTree::Ident(Ident::new("compile_error", self.span))); let mut exclamation = Punct::new('!', Spacing::Alone); exclamation.set_span(self.span); diff --git a/tokio-macros/src/to_tokens.rs b/tokio-macros/src/into_tokens.rs similarity index 61% rename from tokio-macros/src/to_tokens.rs rename to tokio-macros/src/into_tokens.rs index 9fb16e52071..4db2f88e368 100644 --- a/tokio-macros/src/to_tokens.rs +++ b/tokio-macros/src/into_tokens.rs @@ -5,44 +5,44 @@ use crate::token_stream::TokenStream; /// `::` pub(crate) const S: [char; 2] = [':', ':']; -pub(crate) trait ToTokens { +pub(crate) trait IntoTokens { /// Convert into tokens. - fn to_tokens(self, stream: &mut TokenStream, span: Span); + fn into_tokens(self, stream: &mut TokenStream, span: Span); } -impl ToTokens for TokenStream { - fn to_tokens(self, stream: &mut TokenStream, _: Span) { +impl IntoTokens for TokenStream { + fn into_tokens(self, stream: &mut TokenStream, _: Span) { stream.extend(self); } } -impl ToTokens for proc_macro::TokenStream { - fn to_tokens(self, stream: &mut TokenStream, _: Span) { +impl IntoTokens for proc_macro::TokenStream { + fn into_tokens(self, stream: &mut TokenStream, _: Span) { for tt in self { stream.push(tt); } } } -impl ToTokens for TokenTree { - fn to_tokens(self, stream: &mut TokenStream, _: Span) { +impl IntoTokens for TokenTree { + fn into_tokens(self, stream: &mut TokenStream, _: Span) { stream.push(self); } } -impl ToTokens for Option +impl IntoTokens for Option where - T: ToTokens, + T: IntoTokens, { - fn to_tokens(self, stream: &mut TokenStream, span: Span) { + fn into_tokens(self, stream: &mut TokenStream, span: Span) { if let Some(tt) = self { - tt.to_tokens(stream, span); + tt.into_tokens(stream, span); } } } -impl ToTokens for &str { - fn to_tokens(self, stream: &mut TokenStream, span: Span) { +impl IntoTokens for &str { + fn into_tokens(self, stream: &mut TokenStream, span: Span) { let mut ident = Ident::new(self, span); ident.set_span(span); stream.push(TokenTree::Ident(ident)); @@ -51,8 +51,8 @@ impl ToTokens for &str { macro_rules! joint_punct { ($n:tt) => { - impl ToTokens for [char; $n] { - fn to_tokens(self, stream: &mut TokenStream, span: Span) { + impl IntoTokens for [char; $n] { + fn into_tokens(self, stream: &mut TokenStream, span: Span) { let mut it = self.iter(); if let Some(last) = it.next_back() { @@ -73,32 +73,32 @@ macro_rules! joint_punct { joint_punct!(2); -impl ToTokens for char { - fn to_tokens(self, stream: &mut TokenStream, span: Span) { +impl IntoTokens for char { + fn into_tokens(self, stream: &mut TokenStream, span: Span) { let mut punct = Punct::new(self, Spacing::Alone); punct.set_span(span); stream.push(TokenTree::Punct(punct)); } } -impl ToTokens for usize { - fn to_tokens(self, stream: &mut TokenStream, span: Span) { +impl IntoTokens for usize { + fn into_tokens(self, stream: &mut TokenStream, span: Span) { let mut literal = Literal::usize_unsuffixed(self); literal.set_span(span); stream.push(TokenTree::Literal(literal)); } } -impl ToTokens for () { - fn to_tokens(self, _: &mut TokenStream, _: Span) {} +impl IntoTokens for () { + fn into_tokens(self, _: &mut TokenStream, _: Span) {} } macro_rules! tuple { ($($gen:ident $var:ident),*) => { - impl<$($gen,)*> ToTokens for ($($gen,)*) where $($gen: ToTokens),* { - fn to_tokens(self, stream: &mut TokenStream, span: Span) { + impl<$($gen,)*> IntoTokens for ($($gen,)*) where $($gen: IntoTokens),* { + fn into_tokens(self, stream: &mut TokenStream, span: Span) { let ($($var,)*) = self; - $($var.to_tokens(stream, span);)* + $($var.into_tokens(stream, span);)* } } } @@ -116,53 +116,53 @@ tuple!(A a, B b, C c, D d, E e, F f, G g, H h, I i); struct Group(Delimiter, T); -impl ToTokens for Group +impl IntoTokens for Group where - T: ToTokens, + T: IntoTokens, { - fn to_tokens(self, stream: &mut TokenStream, span: Span) { + fn into_tokens(self, stream: &mut TokenStream, span: Span) { let checkpoint = stream.checkpoint(); - self.1.to_tokens(stream, span); + self.1.into_tokens(stream, span); stream.group(span, self.0, checkpoint); } } /// Construct a parenthesized group `()`. -pub(crate) fn parens(inner: T) -> impl ToTokens +pub(crate) fn parens(inner: T) -> impl IntoTokens where - T: ToTokens, + T: IntoTokens, { Group(Delimiter::Parenthesis, inner) } /// Construct a braced group `{}`. -pub(crate) fn braced(inner: T) -> impl ToTokens +pub(crate) fn braced(inner: T) -> impl IntoTokens where - T: ToTokens, + T: IntoTokens, { Group(Delimiter::Brace, inner) } /// Construct a bracketed group `[]`. -pub(crate) fn bracketed(inner: T) -> impl ToTokens +pub(crate) fn bracketed(inner: T) -> impl IntoTokens where - T: ToTokens, + T: IntoTokens, { Group(Delimiter::Bracket, inner) } /// Construct a custom group. -pub(crate) fn group(delimiter: Delimiter, inner: T) -> impl ToTokens +pub(crate) fn group(delimiter: Delimiter, inner: T) -> impl IntoTokens where - T: ToTokens, + T: IntoTokens, { Group(delimiter, inner) } struct StringLiteral<'a>(&'a str); -impl ToTokens for StringLiteral<'_> { - fn to_tokens(self, stream: &mut TokenStream, span: Span) { +impl IntoTokens for StringLiteral<'_> { + fn into_tokens(self, stream: &mut TokenStream, span: Span) { let mut literal = Literal::string(self.0); literal.set_span(span); stream.push(TokenTree::Literal(literal)); @@ -170,12 +170,12 @@ impl ToTokens for StringLiteral<'_> { } /// Construct a string literal. -pub(crate) fn string(s: &str) -> impl ToTokens + '_ { +pub(crate) fn string(s: &str) -> impl IntoTokens + '_ { StringLiteral(s) } -impl ToTokens for &[TokenTree] { - fn to_tokens(self, stream: &mut TokenStream, _: Span) { +impl IntoTokens for &[TokenTree] { + fn into_tokens(self, stream: &mut TokenStream, _: Span) { for tt in self { stream.push(tt.clone()); } @@ -184,17 +184,17 @@ impl ToTokens for &[TokenTree] { pub(crate) struct FromFn(T); -impl ToTokens for FromFn +impl IntoTokens for FromFn where T: FnOnce(&mut SpannedStream<'_>), { - fn to_tokens(self, stream: &mut TokenStream, span: Span) { + fn into_tokens(self, stream: &mut TokenStream, span: Span) { let mut stream = SpannedStream { stream, span }; (self.0)(&mut stream); } } -/// Construct a [ToTokens] implementation from a callback function. +/// Construct a [IntoTokens] implementation from a callback function. pub(crate) fn from_fn(f: T) -> FromFn where T: FnOnce(&mut SpannedStream<'_>), @@ -226,7 +226,7 @@ impl SpannedStream<'_> { } /// Push the given sequence of tokens. - pub(crate) fn write(&mut self, tt: impl ToTokens) { + pub(crate) fn write(&mut self, tt: impl IntoTokens) { self.stream.write(self.span, tt); } } diff --git a/tokio-macros/src/lib.rs b/tokio-macros/src/lib.rs index 5b76b30249e..785fec52c03 100644 --- a/tokio-macros/src/lib.rs +++ b/tokio-macros/src/lib.rs @@ -19,9 +19,9 @@ extern crate proc_macro; mod entry; mod error; +mod into_tokens; mod parsing; mod select; -mod to_tokens; mod token_stream; use proc_macro::TokenStream; diff --git a/tokio-macros/src/parsing.rs b/tokio-macros/src/parsing.rs index 4f6a4ad8214..4392f180642 100644 --- a/tokio-macros/src/parsing.rs +++ b/tokio-macros/src/parsing.rs @@ -169,7 +169,7 @@ impl<'a> BaseParser<'a> { } /// Convert the current parser into a collection of tokens it has retained. - pub(crate) fn into_tokens(self) -> Vec { + pub(crate) fn ininto_tokens(self) -> Vec { self.tokens } } diff --git a/tokio-macros/src/select.rs b/tokio-macros/src/select.rs index 6f21f0565f7..0dcba41f388 100644 --- a/tokio-macros/src/select.rs +++ b/tokio-macros/src/select.rs @@ -1,8 +1,8 @@ use proc_macro::{Ident, Spacing, Span, TokenTree}; use crate::{ + into_tokens::{braced, from_fn, group, parens, IntoTokens}, parsing::Buf, - to_tokens::{braced, from_fn, group, parens, ToTokens}, token_stream::TokenStream, }; @@ -56,7 +56,7 @@ pub(crate) fn declare_output_enum(input: proc_macro::TokenStream) -> proc_macro: ); let mut stream = TokenStream::default(); - out.to_tokens(&mut stream, Span::call_site()); + out.into_tokens(&mut stream, Span::call_site()); stream.into_token_stream() } @@ -64,12 +64,12 @@ pub(crate) fn clean_pattern_macro(input: proc_macro::TokenStream) -> proc_macro: let mut buf = Buf::new(); let mut stream = TokenStream::default(); - clean_pattern(input.into_iter(), &mut buf).to_tokens(&mut stream, Span::call_site()); + clean_pattern(input.into_iter(), &mut buf).into_tokens(&mut stream, Span::call_site()); stream.into_token_stream() } /// Clean up a pattern by skipping over any `mut` and `&` tokens. -fn clean_pattern<'a, I: 'a>(tree: I, buf: &'a mut Buf) -> impl ToTokens + 'a +fn clean_pattern<'a, I: 'a>(tree: I, buf: &'a mut Buf) -> impl IntoTokens + 'a where I: Iterator, { diff --git a/tokio-macros/src/token_stream.rs b/tokio-macros/src/token_stream.rs index d5e42b18f1e..f8c0958f076 100644 --- a/tokio-macros/src/token_stream.rs +++ b/tokio-macros/src/token_stream.rs @@ -2,7 +2,7 @@ use core::iter::FromIterator; use proc_macro::{Delimiter, Group, Span, TokenTree}; -use crate::to_tokens::ToTokens; +use crate::into_tokens::IntoTokens; /// A checkpoint of the current location in the stream. #[repr(transparent)] @@ -23,9 +23,9 @@ impl TokenStream { /// Push the given sequence of tokens. pub(crate) fn write(&mut self, span: Span, tt: T) where - T: ToTokens, + T: IntoTokens, { - tt.to_tokens(self, span); + tt.into_tokens(self, span); } /// Get a checkpoint of the current location in the tree. From 61ea5f96d660ba77618a0c79dc896d7337010334 Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Thu, 17 Feb 2022 18:22:36 +0100 Subject: [PATCH 04/24] Make sure to test for missing runtime --- .../tests/fail/macros_core_no_default.stderr | 4 +-- tokio-macros/src/entry/output.rs | 33 ++++++++++++------- tokio-macros/src/lib.rs | 4 +-- 3 files changed, 26 insertions(+), 15 deletions(-) diff --git a/tests-build/tests/fail/macros_core_no_default.stderr b/tests-build/tests/fail/macros_core_no_default.stderr index 676acc8dbe3..2c4407e5bc7 100644 --- a/tests-build/tests/fail/macros_core_no_default.stderr +++ b/tests-build/tests/fail/macros_core_no_default.stderr @@ -1,5 +1,5 @@ -error: The default runtime flavor is `multi_thread`, but the `rt-multi-thread` feature is disabled. - --> $DIR/macros_core_no_default.rs:3:1 +error: the default runtime flavor is `multi_thread`, but the `rt-multi-thread` feature is disabled + --> tests/fail/macros_core_no_default.rs:3:1 | 3 | #[tokio::main] | ^^^^^^^^^^^^^^ diff --git a/tokio-macros/src/entry/output.rs b/tokio-macros/src/entry/output.rs index 0286049cd21..33132ce2ab4 100644 --- a/tokio-macros/src/entry/output.rs +++ b/tokio-macros/src/entry/output.rs @@ -67,7 +67,7 @@ impl RuntimeFlavor { pub(crate) struct Config { pub(crate) supports_threading: SupportsThreading, /// The default runtime flavor to use if left unspecified. - default_flavor: RuntimeFlavor, + default_flavor: Option, /// The runtime flavor to use. pub(crate) flavor: Option<(Span, RuntimeFlavor)>, /// The number of worker threads to configure. @@ -81,9 +81,9 @@ impl Config { Self { supports_threading, default_flavor: match (kind, supports_threading) { - (EntryKind::Main, SupportsThreading::Supported) => RuntimeFlavor::Threaded, - (EntryKind::Main, SupportsThreading::NotSupported) => RuntimeFlavor::CurrentThread, - (EntryKind::Test, _) => RuntimeFlavor::CurrentThread, + (EntryKind::Main, SupportsThreading::Supported) => Some(RuntimeFlavor::Threaded), + (EntryKind::Main, SupportsThreading::NotSupported) => None, + (EntryKind::Test, _) => Some(RuntimeFlavor::CurrentThread), }, flavor: None, worker_threads: None, @@ -93,21 +93,29 @@ impl Config { /// Validate the current configuration. pub(crate) fn validate(&self, kind: EntryKind, errors: &mut Vec, buf: &mut Buf) { - if let (RuntimeFlavor::Threaded, Some(tt)) = (self.flavor(), &self.start_paused) { + if let (None, SupportsThreading::NotSupported) = + (self.default_flavor, self.supports_threading) + { + errors.push(Error::new(Span::call_site(), "the default runtime flavor is `multi_thread`, but the `rt-multi-thread` feature is disabled")) + } + + if let (Some(RuntimeFlavor::Threaded), Some(tt)) = (self.flavor(), &self.start_paused) { if buf.display_as_str(tt) == "true" { errors.push(Error::new(tt.span(), format!("the `start_paused` option requires the \"current_thread\" runtime flavor. Use `#[{}(flavor = \"current_thread\")]`", kind.name()))); } } - if let (RuntimeFlavor::CurrentThread, Some(tt)) = (self.flavor(), &self.worker_threads) { + if let (Some(RuntimeFlavor::CurrentThread), Some(tt)) = + (self.flavor(), &self.worker_threads) + { errors.push(Error::new(tt.span(), format!("the `worker_threads` option requires the \"multi_thread\" runtime flavor. Use `#[{}(flavor = \"multi_thread\")]`", kind.name()))); } } /// Get the runtime flavor to use. - fn flavor(&self) -> RuntimeFlavor { + fn flavor(&self) -> Option { match &self.flavor { - Some((_, flavor)) => *flavor, + Some((_, flavor)) => Some(*flavor), None => self.default_flavor, } } @@ -179,7 +187,9 @@ impl ItemOutput { start: Span, ) -> impl IntoTokens + '_ { from_fn(move |s| { - if let (Some(signature), Some(block)) = (self.signature.clone(), self.block.clone()) { + if let (Some(signature), Some(block), Some(flavor)) = + (self.signature.clone(), self.block.clone(), config.flavor()) + { let block_span = self.tail_state.block.unwrap_or_else(Span::call_site); s.write(( @@ -187,7 +197,7 @@ impl ItemOutput { &self.tokens[signature], group_with_span( Delimiter::Brace, - self.item_body(config, block, start), + self.item_body(config, block, flavor, start), block_span, ), )) @@ -214,6 +224,7 @@ impl ItemOutput { &self, config: Config, block: ops::Range, + flavor: RuntimeFlavor, start: Span, ) -> impl IntoTokens + '_ { // NB: override the first generated part with the detected start span. @@ -222,7 +233,7 @@ impl ItemOutput { let rt = from_fn(move |s| { s.write(rt); - match config.flavor() { + match flavor { RuntimeFlavor::CurrentThread => { s.write((S, "new_current_thread", parens(()))); } diff --git a/tokio-macros/src/lib.rs b/tokio-macros/src/lib.rs index 785fec52c03..0b1587c0756 100644 --- a/tokio-macros/src/lib.rs +++ b/tokio-macros/src/lib.rs @@ -323,7 +323,7 @@ pub fn test_rt(args: TokenStream, item_stream: TokenStream) -> TokenStream { /// ``` #[proc_macro_attribute] pub fn main_fail(_args: TokenStream, _item: TokenStream) -> TokenStream { - error::expand("The #[tokio::main] macro requires rt or rt-multi-thread.") + error::expand("the #[tokio::main] macro requires rt or rt-multi-thread") } /// Always fails with the error message below. @@ -332,7 +332,7 @@ pub fn main_fail(_args: TokenStream, _item: TokenStream) -> TokenStream { /// ``` #[proc_macro_attribute] pub fn test_fail(_args: TokenStream, _item: TokenStream) -> TokenStream { - error::expand("The #[tokio::test] macro requires rt or rt-multi-thread.") + error::expand("the #[tokio::test] macro requires rt or rt-multi-thread") } /// Implementation detail of the `select!` macro. This macro is **not** intended From 59e648f98f291bdbd3757b6762928debbc63466b Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Fri, 18 Feb 2022 15:39:53 +0100 Subject: [PATCH 05/24] Skip over angle brackets and treat last braced block seen as body --- .../tests/fail/macros_main_generics.rs | 10 +++ .../tests/fail/macros_main_generics.stderr | 13 +++ tests-build/tests/macros.rs | 6 ++ .../tests/pass/macros_main_generics.rs | 16 ++++ tokio-macros/src/entry/output.rs | 85 ++++++++++++------- tokio-macros/src/entry/parser.rs | 85 ++++++++++++++----- tokio-macros/src/parsing.rs | 44 +++++++++- 7 files changed, 207 insertions(+), 52 deletions(-) create mode 100644 tests-build/tests/fail/macros_main_generics.rs create mode 100644 tests-build/tests/fail/macros_main_generics.stderr create mode 100644 tests-build/tests/pass/macros_main_generics.rs diff --git a/tests-build/tests/fail/macros_main_generics.rs b/tests-build/tests/fail/macros_main_generics.rs new file mode 100644 index 00000000000..3bf9c28a5e0 --- /dev/null +++ b/tests-build/tests/fail/macros_main_generics.rs @@ -0,0 +1,10 @@ +use tests_build::tokio; + +// This should parse but fail since default values for const generic parameters +// are experimental. +#[tokio::main] +async fn where_clause_const_generics() { +} + +fn main() { +} diff --git a/tests-build/tests/fail/macros_main_generics.stderr b/tests-build/tests/fail/macros_main_generics.stderr new file mode 100644 index 00000000000..9e6aa666158 --- /dev/null +++ b/tests-build/tests/fail/macros_main_generics.stderr @@ -0,0 +1,13 @@ +error[E0658]: default values for const generic parameters are experimental + --> tests/fail/macros_main_generics.rs:6:53 + | +6 | async fn where_clause_const_generics() { + | ^^^^^^^^^ + | + = note: see issue #44580 for more information + +error: defaults for const parameters are only allowed in `struct`, `enum`, `type`, or `trait` definitions + --> tests/fail/macros_main_generics.rs:6:44 + | +6 | async fn where_clause_const_generics() { + | ^ diff --git a/tests-build/tests/macros.rs b/tests-build/tests/macros.rs index 0a180dfb74f..d0bc3ba0485 100644 --- a/tests-build/tests/macros.rs +++ b/tests-build/tests/macros.rs @@ -11,6 +11,9 @@ fn compile_fail_full() { #[cfg(feature = "full")] t.pass("tests/pass/macros_main_loop.rs"); + #[cfg(feature = "full")] + t.pass("tests/pass/macros_main_generics.rs"); + #[cfg(feature = "full")] t.compile_fail("tests/fail/macros_invalid_input.rs"); @@ -20,6 +23,9 @@ fn compile_fail_full() { #[cfg(feature = "full")] t.compile_fail("tests/fail/macros_type_mismatch.rs"); + #[cfg(feature = "full")] + t.compile_fail("tests/fail/macros_main_generics.rs"); + #[cfg(all(feature = "rt", not(feature = "full")))] t.compile_fail("tests/fail/macros_core_no_default.rs"); diff --git a/tests-build/tests/pass/macros_main_generics.rs b/tests-build/tests/pass/macros_main_generics.rs new file mode 100644 index 00000000000..086018d2be7 --- /dev/null +++ b/tests-build/tests/pass/macros_main_generics.rs @@ -0,0 +1,16 @@ +use tests_build::tokio; + +use std::fmt::Debug; + +#[tokio::main] +async fn single_parameter() { +} + +// This should parse since we treat angle brackets as groups during parsing and +// simply skip over them. +#[tokio::main] +async fn where_clause() where T: Iterator, ::Item: Debug { +} + +fn main() { +} diff --git a/tokio-macros/src/entry/output.rs b/tokio-macros/src/entry/output.rs index 33132ce2ab4..b6aec686623 100644 --- a/tokio-macros/src/entry/output.rs +++ b/tokio-macros/src/entry/output.rs @@ -9,7 +9,6 @@ use crate::token_stream::TokenStream; #[derive(Default)] pub(crate) struct TailState { - pub(crate) block: Option, pub(crate) start: Option, pub(crate) end: Option, /// Indicates if last expression is a return. @@ -124,23 +123,23 @@ impl Config { /// The parsed item output. pub(crate) struct ItemOutput { tokens: Vec, - pub(crate) has_async: bool, + async_keyword: Option, signature: Option>, - block: Option>, + block: Option, tail_state: TailState, } impl ItemOutput { pub(crate) fn new( tokens: Vec, - has_async: bool, + async_keyword: Option, signature: Option>, - block: Option>, + block: Option, tail_state: TailState, ) -> Self { Self { tokens, - has_async, + async_keyword, signature, block, tail_state, @@ -149,7 +148,7 @@ impl ItemOutput { /// Validate the parsed item. pub(crate) fn validate(&self, kind: EntryKind, errors: &mut Vec) { - if !self.has_async { + if self.async_keyword.is_none() { let span = self .signature .as_ref() @@ -165,16 +164,21 @@ impl ItemOutput { } } + /// Calculate the block span to use for diagnostics. This will correspond to + /// the last tail statement in the block of the function body. pub(crate) fn block_spans(&self) -> (Span, Span) { + let fallback_span = self + .block + .and_then(|index| Some(self.tokens.get(index)?.span())); let start = self .tail_state .start - .or(self.tail_state.block) + .or(fallback_span) .unwrap_or_else(Span::call_site); let end = self .tail_state .end - .or(self.tail_state.block) + .or(fallback_span) .unwrap_or_else(Span::call_site); (start, end) } @@ -187,26 +191,49 @@ impl ItemOutput { start: Span, ) -> impl IntoTokens + '_ { from_fn(move |s| { - if let (Some(signature), Some(block), Some(flavor)) = - (self.signature.clone(), self.block.clone(), config.flavor()) - { - let block_span = self.tail_state.block.unwrap_or_else(Span::call_site); - - s.write(( - self.entry_kind_attribute(kind), - &self.tokens[signature], - group_with_span( - Delimiter::Brace, - self.item_body(config, block, flavor, start), - block_span, - ), - )) + if let Some(item) = self.maybe_expand_item(kind, config, start) { + s.write(item); } else { s.write(&self.tokens[..]); } }) } + /// Expand item if all prerequsites are available. + fn maybe_expand_item( + &self, + kind: EntryKind, + config: Config, + start: Span, + ) -> Option { + let signature = self.tokens.get(self.signature.as_ref()?.clone())?; + let block = self.tokens.get(self.block?)?; + let flavor = config.flavor()?; + + Some(( + self.entry_kind_attribute(kind), + from_fn(move |s| { + // Optionally filter the async keyword (if it's present). We + // still want to be able to produce a signature cause we get + // better diagnostics. + if let Some(index) = self.async_keyword { + for (n, tt) in signature.iter().enumerate() { + if n != index { + s.write(tt.clone()); + } + } + } else { + s.write(signature); + } + }), + group_with_span( + Delimiter::Brace, + self.item_body(config, block, flavor, start), + block.span(), + ), + )) + } + /// Generate attribute associated with entry kind. fn entry_kind_attribute(&self, kind: EntryKind) -> impl IntoTokens { from_fn(move |s| { @@ -220,13 +247,13 @@ impl ItemOutput { } /// Expanded item body. - fn item_body( - &self, + fn item_body<'a>( + &'a self, config: Config, - block: ops::Range, + block: &'a TokenTree, flavor: RuntimeFlavor, start: Span, - ) -> impl IntoTokens + '_ { + ) -> impl IntoTokens + 'a { // NB: override the first generated part with the detected start span. let rt = ("tokio", S, "runtime", S, "Builder"); @@ -262,13 +289,13 @@ impl ItemOutput { if self.tail_state.return_ { s.write(( with_span(("return", build, '.', "block_on"), start), - parens(("async", &self.tokens[block])), + parens(("async", block.clone())), ';', )); } else { s.write(( with_span((build, '.', "block_on"), start), - parens(("async", &self.tokens[block])), + parens(("async", block.clone())), )); } }) diff --git a/tokio-macros/src/entry/parser.rs b/tokio-macros/src/entry/parser.rs index e3451f534b7..3fd456d4946 100644 --- a/tokio-macros/src/entry/parser.rs +++ b/tokio-macros/src/entry/parser.rs @@ -1,4 +1,4 @@ -use proc_macro::{Delimiter, Group, Literal, Span, TokenTree}; +use proc_macro::{Delimiter, Group, Literal, Spacing, Span, TokenTree}; use crate::entry::output::{ Config, EntryKind, ItemOutput, RuntimeFlavor, SupportsThreading, TailState, @@ -190,39 +190,86 @@ impl<'a> ItemParser<'a> { } } - /// Parse and produce the corresponding token stream. + /// Parse and produce the corresponding item output. + /// + /// Note that this mode of parsing is intentionally promiscious and tries + /// its best not to produce any errors, because the more tokens we can feed + /// to straight to `rustc` the better diagnostics we can expect it to + /// produce. If we were to perform strict parsing here instead, we'd have to + /// rely on the kinds of errors we can produce ourselves directly here. pub(crate) fn parse(mut self) -> ItemOutput { let start = self.base.len(); + let mut signature = None; let mut block = None; - - let mut has_async = false; - + let mut async_keyword = None; + let mut generics = None; let mut tail_state = TailState::default(); while let Some(tt) = self.base.bump() { - match tt { + match &tt { TokenTree::Ident(ident) if self.base.buf.display_as_str(&ident) == "async" => { - // NB: intentionally skip over this token. - has_async = true; - } - TokenTree::Group(g) if matches!(g.delimiter(), Delimiter::Brace) => { - signature = Some(start..self.base.len()); - let start = self.base.len(); - tail_state.block = Some(g.span()); - self.find_last_stmt_range(&g, &mut tail_state); - self.base.push(TokenTree::Group(g)); - block = Some(start..self.base.len()); + if async_keyword.is_none() { + async_keyword = Some(self.base.len()); + } } - tt => { + // Skip over generics which might contain a block (due to + // constant generics). Angle brackets are not treated like a + // group, so we have to balance them ourselves in + // `skip_angle_brackets`. + TokenTree::Punct(p) + if generics.is_none() + && p.as_char() == '<' + && p.spacing() == Spacing::Alone => + { + generics = Some(p.span()); self.base.push(tt); + self.skip_angle_brackets(); + continue; + } + // We treat the last encountered braced group as the block of + // the function. The preceding span is considered its signature. + TokenTree::Group(g) if g.delimiter() == Delimiter::Brace => { + signature = Some(start..self.base.len()); + block = Some(self.base.len()); + self.find_last_stmt_range(g, &mut tail_state); } + _ => {} } + + self.base.push(tt); } - let tokens = self.base.ininto_tokens(); + let tokens = self.base.into_tokens(); + + ItemOutput::new(tokens, async_keyword, signature, block, tail_state) + } + + /// Since generics are implemented using angle brackets. + fn skip_angle_brackets(&mut self) { + // NB: one bracket encountered already. + let mut level = 1u32; + + while level > 0 { + let tt = match self.base.bump() { + Some(tt) => tt, + None => break, + }; - ItemOutput::new(tokens, has_async, signature, block, tail_state) + if let TokenTree::Punct(p) = &tt { + match p.as_char() { + '<' => { + level += 1; + } + '>' => { + level -= 1; + } + _ => {} + } + } + + self.base.push(tt); + } } /// Find the range of spans that is defined by the last statement in the diff --git a/tokio-macros/src/parsing.rs b/tokio-macros/src/parsing.rs index 4392f180642..16f8dd9ba6a 100644 --- a/tokio-macros/src/parsing.rs +++ b/tokio-macros/src/parsing.rs @@ -1,6 +1,6 @@ use core::fmt; -use proc_macro::{Spacing, Span, TokenTree}; +use proc_macro::{Delimiter, Spacing, Span, TokenTree}; const BUF: usize = 2; @@ -81,7 +81,7 @@ impl Buf { /// Parser base. pub(crate) struct BaseParser<'a> { - it: proc_macro::token_stream::IntoIter, + it: StreamIter, tokens: Vec, pub(crate) buf: &'a mut Buf, } @@ -91,7 +91,7 @@ impl<'a> BaseParser<'a> { buf.clear(); Self { - it: stream.into_iter(), + it: StreamIter::new(stream), tokens: Vec::new(), buf, } @@ -169,11 +169,47 @@ impl<'a> BaseParser<'a> { } /// Convert the current parser into a collection of tokens it has retained. - pub(crate) fn ininto_tokens(self) -> Vec { + pub(crate) fn into_tokens(self) -> Vec { self.tokens } } +struct StreamIter { + stack: Vec, +} + +impl StreamIter { + fn new(stream: proc_macro::TokenStream) -> Self { + Self { + stack: vec![stream.into_iter()], + } + } +} + +impl Iterator for StreamIter { + type Item = TokenTree; + + fn next(&mut self) -> Option { + while let Some(it) = self.stack.last_mut() { + if let Some(tt) = it.next() { + match &tt { + TokenTree::Group(g) if g.delimiter() == Delimiter::None => { + self.stack.push(g.stream().into_iter()); + continue; + } + _ => {} + } + + return Some(tt); + } + + let _ = self.stack.pop(); + } + + None + } +} + /// A complete punctuation. #[derive(Debug)] pub(crate) struct Punct { From 53690436836e4552027691b774b89defdc1c2bcd Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Fri, 18 Feb 2022 17:50:00 +0100 Subject: [PATCH 06/24] rustfmt --- tests-build/tests/fail/macros_main_generics.rs | 6 ++---- tests-build/tests/pass/macros_main_generics.rs | 12 +++++++----- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/tests-build/tests/fail/macros_main_generics.rs b/tests-build/tests/fail/macros_main_generics.rs index 3bf9c28a5e0..afb975b4d44 100644 --- a/tests-build/tests/fail/macros_main_generics.rs +++ b/tests-build/tests/fail/macros_main_generics.rs @@ -3,8 +3,6 @@ use tests_build::tokio; // This should parse but fail since default values for const generic parameters // are experimental. #[tokio::main] -async fn where_clause_const_generics() { -} +async fn where_clause_const_generics() {} -fn main() { -} +fn main() {} diff --git a/tests-build/tests/pass/macros_main_generics.rs b/tests-build/tests/pass/macros_main_generics.rs index 086018d2be7..f4dc978f9f7 100644 --- a/tests-build/tests/pass/macros_main_generics.rs +++ b/tests-build/tests/pass/macros_main_generics.rs @@ -3,14 +3,16 @@ use tests_build::tokio; use std::fmt::Debug; #[tokio::main] -async fn single_parameter() { -} +async fn single_parameter() {} // This should parse since we treat angle brackets as groups during parsing and // simply skip over them. #[tokio::main] -async fn where_clause() where T: Iterator, ::Item: Debug { +async fn where_clause() +where + T: Iterator, + ::Item: Debug, +{ } -fn main() { -} +fn main() {} From 6122d6d55ef1362de6091de0415470ae2936380b Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Fri, 18 Feb 2022 19:07:19 +0100 Subject: [PATCH 07/24] fix trybuild output --- tests-build/tests/fail/macros_main_generics.rs | 3 +++ tests-build/tests/fail/macros_main_generics.stderr | 10 +++++----- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/tests-build/tests/fail/macros_main_generics.rs b/tests-build/tests/fail/macros_main_generics.rs index afb975b4d44..a40e8b722d8 100644 --- a/tests-build/tests/fail/macros_main_generics.rs +++ b/tests-build/tests/fail/macros_main_generics.rs @@ -2,6 +2,9 @@ use tests_build::tokio; // This should parse but fail since default values for const generic parameters // are experimental. +// +// TODO(udoprog): might want to version constrain this check with `rustversion` +// since this won't always be experimental moving forward (!). #[tokio::main] async fn where_clause_const_generics() {} diff --git a/tests-build/tests/fail/macros_main_generics.stderr b/tests-build/tests/fail/macros_main_generics.stderr index 9e6aa666158..1be6beb1953 100644 --- a/tests-build/tests/fail/macros_main_generics.stderr +++ b/tests-build/tests/fail/macros_main_generics.stderr @@ -1,13 +1,13 @@ error[E0658]: default values for const generic parameters are experimental - --> tests/fail/macros_main_generics.rs:6:53 + --> tests/fail/macros_main_generics.rs:9:53 | -6 | async fn where_clause_const_generics() { - | ^^^^^^^^^ +9 | async fn where_clause_const_generics() {} + | ^^^^^^^^^^^ | = note: see issue #44580 for more information error: defaults for const parameters are only allowed in `struct`, `enum`, `type`, or `trait` definitions - --> tests/fail/macros_main_generics.rs:6:44 + --> tests/fail/macros_main_generics.rs:9:44 | -6 | async fn where_clause_const_generics() { +9 | async fn where_clause_const_generics() {} | ^ From ce27bec7450764dd814df9ed71dded5ae7eb58c2 Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Fri, 18 Feb 2022 21:36:28 +0100 Subject: [PATCH 08/24] Trim more stuff from patterns --- tokio-macros/src/parsing.rs | 14 +++++--------- tokio-macros/src/select.rs | 2 +- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/tokio-macros/src/parsing.rs b/tokio-macros/src/parsing.rs index 16f8dd9ba6a..4ecf3d83297 100644 --- a/tokio-macros/src/parsing.rs +++ b/tokio-macros/src/parsing.rs @@ -190,23 +190,19 @@ impl Iterator for StreamIter { type Item = TokenTree; fn next(&mut self) -> Option { - while let Some(it) = self.stack.last_mut() { - if let Some(tt) = it.next() { - match &tt { + loop { + if let Some(tt) = self.stack.last_mut()?.next() { + return match tt { TokenTree::Group(g) if g.delimiter() == Delimiter::None => { self.stack.push(g.stream().into_iter()); continue; } - _ => {} - } - - return Some(tt); + tt => Some(tt), + }; } let _ = self.stack.pop(); } - - None } } diff --git a/tokio-macros/src/select.rs b/tokio-macros/src/select.rs index 0dcba41f388..e939ef24b41 100644 --- a/tokio-macros/src/select.rs +++ b/tokio-macros/src/select.rs @@ -83,7 +83,7 @@ where )); } TokenTree::Ident(i) => { - if buf.display_as_str(&i) == "mut" { + if matches!(buf.display_as_str(&i), "mut" | "ref") { continue; } From 439841d1027f17edc71306785d66ab008300748b Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Fri, 18 Feb 2022 22:00:59 +0100 Subject: [PATCH 09/24] Bump async-stream dependency to 0.3.2 to avoid minimal-versions issue --- tokio-stream/Cargo.toml | 2 +- tokio-test/Cargo.toml | 2 +- tokio-util/Cargo.toml | 2 +- tokio/Cargo.toml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tokio-stream/Cargo.toml b/tokio-stream/Cargo.toml index 39041448fff..8477f96c937 100644 --- a/tokio-stream/Cargo.toml +++ b/tokio-stream/Cargo.toml @@ -33,7 +33,7 @@ tokio-util = { version = "0.7.0", path = "../tokio-util", optional = true } [dev-dependencies] tokio = { version = "1.2.0", path = "../tokio", features = ["full", "test-util"] } -async-stream = "0.3" +async-stream = "0.3.2" tokio-test = { path = "../tokio-test" } futures = { version = "0.3", default-features = false } diff --git a/tokio-test/Cargo.toml b/tokio-test/Cargo.toml index e889dcec85a..3c1c88ebf4c 100644 --- a/tokio-test/Cargo.toml +++ b/tokio-test/Cargo.toml @@ -19,7 +19,7 @@ categories = ["asynchronous", "testing"] [dependencies] tokio = { version = "1.2.0", path = "../tokio", features = ["rt", "sync", "time", "test-util"] } tokio-stream = { version = "0.1.1", path = "../tokio-stream" } -async-stream = "0.3" +async-stream = "0.3.2" bytes = "1.0.0" futures-core = "0.3.0" diff --git a/tokio-util/Cargo.toml b/tokio-util/Cargo.toml index b4782d5d9a3..48d075e7b02 100644 --- a/tokio-util/Cargo.toml +++ b/tokio-util/Cargo.toml @@ -50,7 +50,7 @@ tokio = { version = "1.0.0", path = "../tokio", features = ["full"] } tokio-test = { version = "0.4.0", path = "../tokio-test" } tokio-stream = { version = "0.1", path = "../tokio-stream" } -async-stream = "0.3.0" +async-stream = "0.3.2" futures = "0.3.0" futures-test = "0.3.5" diff --git a/tokio/Cargo.toml b/tokio/Cargo.toml index ba165d2556c..8ad8abb7559 100644 --- a/tokio/Cargo.toml +++ b/tokio/Cargo.toml @@ -129,7 +129,7 @@ tokio-stream = { version = "0.1", path = "../tokio-stream" } futures = { version = "0.3.0", features = ["async-await"] } mockall = "0.10.2" tempfile = "3.1.0" -async-stream = "0.3" +async-stream = "0.3.2" [target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies] proptest = "1" From dcc8a7ae1081e407f8375d157b34450059b63bae Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Sat, 19 Feb 2022 00:00:25 +0100 Subject: [PATCH 10/24] Perform return type heuristics to determine when to return and not --- tokio-macros/src/entry/output.rs | 38 +++++++++++++++++------- tokio-macros/src/entry/parser.rs | 51 +++++++++++++++++++++++++++++--- tokio-macros/src/parsing.rs | 27 ++++++++++++----- 3 files changed, 94 insertions(+), 22 deletions(-) diff --git a/tokio-macros/src/entry/output.rs b/tokio-macros/src/entry/output.rs index b6aec686623..26210c6a943 100644 --- a/tokio-macros/src/entry/output.rs +++ b/tokio-macros/src/entry/output.rs @@ -7,12 +7,22 @@ use crate::into_tokens::{bracketed, from_fn, parens, string, IntoTokens, S}; use crate::parsing::Buf; use crate::token_stream::TokenStream; +#[derive(Debug, Clone, Copy)] +pub(crate) enum ReturnHeuristics { + /// Unknown how to treat the return type. + Unknown, + /// Generated function explicitly returns the special `()` unit type. + Unit, + /// Generated function explicitly returns the special `!` never type. + Never, +} + #[derive(Default)] pub(crate) struct TailState { pub(crate) start: Option, pub(crate) end: Option, /// Indicates if last expression is a return. - pub(crate) return_: bool, + pub(crate) has_return: bool, } #[derive(Debug, Clone, Copy)] @@ -126,7 +136,10 @@ pub(crate) struct ItemOutput { async_keyword: Option, signature: Option>, block: Option, + /// What's known about the tail statement. tail_state: TailState, + /// Best effort heuristics to determine the return value of the function being procssed. + return_heuristics: ReturnHeuristics, } impl ItemOutput { @@ -136,6 +149,7 @@ impl ItemOutput { signature: Option>, block: Option, tail_state: TailState, + return_heuristics: ReturnHeuristics, ) -> Self { Self { tokens, @@ -143,6 +157,7 @@ impl ItemOutput { signature, block, tail_state, + return_heuristics, } } @@ -285,18 +300,19 @@ impl ItemOutput { parens(string("Failed building the Runtime")), ); + let statement = ( + with_span((build, '.', "block_on"), start), + parens(("async", block.clone())), + ); + + let should_return = + self.tail_state.has_return || matches!(self.return_heuristics, ReturnHeuristics::Unit); + from_fn(move |s| { - if self.tail_state.return_ { - s.write(( - with_span(("return", build, '.', "block_on"), start), - parens(("async", block.clone())), - ';', - )); + if should_return { + s.write(((with_span("return", start), statement), ';')); } else { - s.write(( - with_span((build, '.', "block_on"), start), - parens(("async", block.clone())), - )); + s.write(statement); } }) } diff --git a/tokio-macros/src/entry/parser.rs b/tokio-macros/src/entry/parser.rs index 3fd456d4946..4bbe94f1fd2 100644 --- a/tokio-macros/src/entry/parser.rs +++ b/tokio-macros/src/entry/parser.rs @@ -4,9 +4,11 @@ use crate::entry::output::{ Config, EntryKind, ItemOutput, RuntimeFlavor, SupportsThreading, TailState, }; use crate::error::Error; -use crate::parsing::{BaseParser, Buf}; +use crate::parsing::{BaseParser, Buf, ROCKET}; use crate::parsing::{Punct, COMMA, EQ}; +use super::output::ReturnHeuristics; + /// A parser for the arguments provided to an entry macro. pub(crate) struct ConfigParser<'a> { base: BaseParser<'a>, @@ -206,7 +208,22 @@ impl<'a> ItemParser<'a> { let mut generics = None; let mut tail_state = TailState::default(); - while let Some(tt) = self.base.bump() { + // We default to assuming that the return is a unit, until we've spot + // a `->` token at which point we try and process it. + let mut return_heuristics = ReturnHeuristics::Unit; + + while self.base.nth(0).is_some() { + if let Some(p @ Punct { chars: ROCKET, .. }) = self.base.peek_punct() { + self.base.consume(p.len()); + self.parse_return_heuristics(&mut return_heuristics); + continue; + } + + let tt = match self.base.bump() { + Some(tt) => tt, + None => break, + }; + match &tt { TokenTree::Ident(ident) if self.base.buf.display_as_str(&ident) == "async" => { if async_keyword.is_none() { @@ -242,7 +259,33 @@ impl<'a> ItemParser<'a> { let tokens = self.base.into_tokens(); - ItemOutput::new(tokens, async_keyword, signature, block, tail_state) + ItemOutput::new( + tokens, + async_keyword, + signature, + block, + tail_state, + return_heuristics, + ) + } + + /// Parse out return type heuristics. There is a *very* limited number of + /// things we understand here. + fn parse_return_heuristics(&mut self, return_heuristics: &mut ReturnHeuristics) { + match self.base.nth(0) { + Some(TokenTree::Punct(p)) if p.as_char() == '!' => { + *return_heuristics = ReturnHeuristics::Never; + } + Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Parenthesis => { + if g.stream().is_empty() { + *return_heuristics = ReturnHeuristics::Unit; + } + } + _ => { + // Return type is something we don't understand :( + *return_heuristics = ReturnHeuristics::Unknown; + } + } } /// Since generics are implemented using angle brackets. @@ -289,7 +332,7 @@ impl<'a> ItemParser<'a> { } tt => { if std::mem::take(&mut update) { - tail_state.return_ = matches!(&tt, TokenTree::Ident(ident) if self.base.buf.display_as_str(ident) == "return"); + tail_state.has_return = matches!(&tt, TokenTree::Ident(ident) if self.base.buf.display_as_str(ident) == "return"); tail_state.start = Some(span); } } diff --git a/tokio-macros/src/parsing.rs b/tokio-macros/src/parsing.rs index 4ecf3d83297..a4db3889843 100644 --- a/tokio-macros/src/parsing.rs +++ b/tokio-macros/src/parsing.rs @@ -2,11 +2,12 @@ use core::fmt; use proc_macro::{Delimiter, Spacing, Span, TokenTree}; -const BUF: usize = 2; +const BUF: usize = 4; // Punctuations that we look for. pub(crate) const COMMA: [char; 2] = [',', '\0']; pub(crate) const EQ: [char; 2] = ['=', '\0']; +pub(crate) const ROCKET: [char; 2] = ['-', '>']; pub(crate) struct Buf { // Static ring buffer used for processing tokens. @@ -20,7 +21,7 @@ pub(crate) struct Buf { impl Buf { pub(crate) fn new() -> Self { Self { - ring: [None, None], + ring: [None, None, None, None], string: String::new(), head: 0, tail: 0, @@ -29,7 +30,7 @@ impl Buf { /// Clear the buffer. fn clear(&mut self) { - self.ring = [None, None]; + self.ring = [None, None, None, None]; self.head = 0; self.tail = 0; self.string.clear(); @@ -128,9 +129,21 @@ impl<'a> BaseParser<'a> { } } - /// Process a punctuation. + /// Step over the given number of tokens. + pub(crate) fn consume(&mut self, n: usize) { + for _ in 0..n { + if let Some(tt) = self.bump() { + self.push(tt); + } + } + } + + /// Peek a punctuation with joint characters. + /// + /// This processes the next 3 punctuations (if present) to ensure that when + /// we encounter a particular punctuation it occurs in isolation. pub(crate) fn peek_punct(&mut self) -> Option { - let mut out = [None; 2]; + let mut out = [None; 3]; for (n, o) in out.iter_mut().enumerate() { match (n, self.nth(n)) { @@ -148,7 +161,7 @@ impl<'a> BaseParser<'a> { } match out { - [Some((span, head)), tail] => Some(Punct { + [Some((span, head)), tail, None] => Some(Punct { span, chars: [head, tail.map(|(_, c)| c).unwrap_or('\0')], }), @@ -206,7 +219,7 @@ impl Iterator for StreamIter { } } -/// A complete punctuation. +/// A complete punctuation with a maximum up to two characters. #[derive(Debug)] pub(crate) struct Punct { pub(crate) span: Span, From 04e95aaa30dc3754d0ebdd6c1c86eb58db698270 Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Sat, 19 Feb 2022 01:03:21 +0100 Subject: [PATCH 11/24] Actually implement heuristics --- tokio-macros/src/entry/output.rs | 60 ++++++++++++++++++++++++-------- tokio-macros/src/entry/parser.rs | 38 +++++++++++--------- 2 files changed, 66 insertions(+), 32 deletions(-) diff --git a/tokio-macros/src/entry/output.rs b/tokio-macros/src/entry/output.rs index 26210c6a943..65d15855aef 100644 --- a/tokio-macros/src/entry/output.rs +++ b/tokio-macros/src/entry/output.rs @@ -13,16 +13,35 @@ pub(crate) enum ReturnHeuristics { Unknown, /// Generated function explicitly returns the special `()` unit type. Unit, - /// Generated function explicitly returns the special `!` never type. - Never, +} + +/// The kind of the tail expression. +#[derive(Debug, Clone, Copy)] +pub(crate) enum TailKind { + /// Body is empty. + Empty, + /// Tail is a return statement (prefix `return`). + Return, + /// Body is non-empty but the tail is not specifically recognized. + Unknown, +} + +impl Default for TailKind { + fn default() -> Self { + TailKind::Empty + } } #[derive(Default)] -pub(crate) struct TailState { +pub(crate) struct Tail { + /// The start span of the tail. pub(crate) start: Option, + /// The end span of the tail including a trailing semi. pub(crate) end: Option, /// Indicates if last expression is a return. - pub(crate) has_return: bool, + pub(crate) kind: TailKind, + /// If the tail statement has a semi-colon. + pub(crate) has_semi: bool, } #[derive(Debug, Clone, Copy)] @@ -137,8 +156,9 @@ pub(crate) struct ItemOutput { signature: Option>, block: Option, /// What's known about the tail statement. - tail_state: TailState, + tail: Tail, /// Best effort heuristics to determine the return value of the function being procssed. + #[allow(unused)] return_heuristics: ReturnHeuristics, } @@ -148,7 +168,7 @@ impl ItemOutput { async_keyword: Option, signature: Option>, block: Option, - tail_state: TailState, + tail: Tail, return_heuristics: ReturnHeuristics, ) -> Self { Self { @@ -156,7 +176,7 @@ impl ItemOutput { async_keyword, signature, block, - tail_state, + tail, return_heuristics, } } @@ -186,12 +206,12 @@ impl ItemOutput { .block .and_then(|index| Some(self.tokens.get(index)?.span())); let start = self - .tail_state + .tail .start .or(fallback_span) .unwrap_or_else(Span::call_site); let end = self - .tail_state + .tail .end .or(fallback_span) .unwrap_or_else(Span::call_site); @@ -305,14 +325,24 @@ impl ItemOutput { parens(("async", block.clone())), ); - let should_return = - self.tail_state.has_return || matches!(self.return_heuristics, ReturnHeuristics::Unit); + let has_return = self.tail.has_semi && matches!(self.tail.kind, TailKind::Return); - from_fn(move |s| { - if should_return { - s.write(((with_span("return", start), statement), ';')); + let has_semi = + if !has_return && (self.tail.has_semi || matches!(self.tail.kind, TailKind::Empty)) { + matches!(self.return_heuristics, ReturnHeuristics::Unit) } else { - s.write(statement); + false + }; + + from_fn(move |s| { + if has_return { + s.write(with_span("return", start)); + } + + s.write(statement); + + if has_semi { + s.write(';'); } }) } diff --git a/tokio-macros/src/entry/parser.rs b/tokio-macros/src/entry/parser.rs index 4bbe94f1fd2..9e3234f8472 100644 --- a/tokio-macros/src/entry/parser.rs +++ b/tokio-macros/src/entry/parser.rs @@ -1,13 +1,11 @@ use proc_macro::{Delimiter, Group, Literal, Spacing, Span, TokenTree}; -use crate::entry::output::{ - Config, EntryKind, ItemOutput, RuntimeFlavor, SupportsThreading, TailState, -}; +use crate::entry::output::{Config, EntryKind, ItemOutput, RuntimeFlavor, SupportsThreading, Tail}; use crate::error::Error; use crate::parsing::{BaseParser, Buf, ROCKET}; use crate::parsing::{Punct, COMMA, EQ}; -use super::output::ReturnHeuristics; +use super::output::{ReturnHeuristics, TailKind}; /// A parser for the arguments provided to an entry macro. pub(crate) struct ConfigParser<'a> { @@ -206,7 +204,7 @@ impl<'a> ItemParser<'a> { let mut block = None; let mut async_keyword = None; let mut generics = None; - let mut tail_state = TailState::default(); + let mut tail = Tail::default(); // We default to assuming that the return is a unit, until we've spot // a `->` token at which point we try and process it. @@ -249,7 +247,7 @@ impl<'a> ItemParser<'a> { TokenTree::Group(g) if g.delimiter() == Delimiter::Brace => { signature = Some(start..self.base.len()); block = Some(self.base.len()); - self.find_last_stmt_range(g, &mut tail_state); + self.find_last_stmt_range(g, &mut tail); } _ => {} } @@ -264,7 +262,7 @@ impl<'a> ItemParser<'a> { async_keyword, signature, block, - tail_state, + tail, return_heuristics, ) } @@ -273,9 +271,6 @@ impl<'a> ItemParser<'a> { /// things we understand here. fn parse_return_heuristics(&mut self, return_heuristics: &mut ReturnHeuristics) { match self.base.nth(0) { - Some(TokenTree::Punct(p)) if p.as_char() == '!' => { - *return_heuristics = ReturnHeuristics::Never; - } Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Parenthesis => { if g.stream().is_empty() { *return_heuristics = ReturnHeuristics::Unit; @@ -319,21 +314,30 @@ impl<'a> ItemParser<'a> { /// block so that they can be used for the generated expression. /// /// This in turn improves upon diagnostics when return types do not match. - fn find_last_stmt_range(&mut self, g: &Group, tail_state: &mut TailState) { - let mut update = true; + fn find_last_stmt_range(&mut self, g: &Group, tail: &mut Tail) { + let mut new_stmt = true; for tt in g.stream() { let span = tt.span(); - tail_state.end = Some(span); + tail.end = Some(span); match tt { TokenTree::Punct(p) if p.as_char() == ';' => { - update = true; + new_stmt = true; + tail.has_semi = true; } tt => { - if std::mem::take(&mut update) { - tail_state.has_return = matches!(&tt, TokenTree::Ident(ident) if self.base.buf.display_as_str(ident) == "return"); - tail_state.start = Some(span); + tail.has_semi = false; + + if std::mem::take(&mut new_stmt) { + tail.kind = if matches!(&tt, TokenTree::Ident(ident) if self.base.buf.display_as_str(ident) == "return") + { + TailKind::Return + } else { + TailKind::Unknown + }; + + tail.start = Some(span); } } } From 5824c57e2d7567946263cccb8dae35ab1c770e09 Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Sat, 19 Feb 2022 17:29:13 +0100 Subject: [PATCH 12/24] Update tokio-macros/src/select.rs Co-authored-by: Taiki Endo --- tokio-macros/src/select.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tokio-macros/src/select.rs b/tokio-macros/src/select.rs index e939ef24b41..c869c4a3aaf 100644 --- a/tokio-macros/src/select.rs +++ b/tokio-macros/src/select.rs @@ -68,7 +68,7 @@ pub(crate) fn clean_pattern_macro(input: proc_macro::TokenStream) -> proc_macro: stream.into_token_stream() } -/// Clean up a pattern by skipping over any `mut` and `&` tokens. +/// Clean up a pattern by skipping over any `mut`, `ref`, and `&` tokens. fn clean_pattern<'a, I: 'a>(tree: I, buf: &'a mut Buf) -> impl IntoTokens + 'a where I: Iterator, From b78862f7b61dcb8330ff9d4ce5f707402241162e Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Sat, 19 Feb 2022 16:26:47 +0100 Subject: [PATCH 13/24] Improve return heuristics by checking for `where` --- .../tests/fail/macros_type_mismatch.rs | 6 +++ .../tests/fail/macros_type_mismatch.stderr | 16 ++++++++ tokio-macros/src/entry/parser.rs | 39 ++++++++++++++----- tokio-macros/src/parsing.rs | 2 +- 4 files changed, 53 insertions(+), 10 deletions(-) diff --git a/tests-build/tests/fail/macros_type_mismatch.rs b/tests-build/tests/fail/macros_type_mismatch.rs index 0a5b9c4c727..2b4b880e8d5 100644 --- a/tests-build/tests/fail/macros_type_mismatch.rs +++ b/tests-build/tests/fail/macros_type_mismatch.rs @@ -23,4 +23,10 @@ async fn extra_semicolon() -> Result<(), ()> { Ok(()); } +/// Intentionally try and make return heuristics misfire by introducing a generic return. +#[tokio::main] +async fn extra_semicolon_generic() -> Result<(), ()> where T: Fn() -> () { + Ok(()); +} + fn main() {} diff --git a/tests-build/tests/fail/macros_type_mismatch.stderr b/tests-build/tests/fail/macros_type_mismatch.stderr index f98031514ff..cc80797a87b 100644 --- a/tests-build/tests/fail/macros_type_mismatch.stderr +++ b/tests-build/tests/fail/macros_type_mismatch.stderr @@ -42,3 +42,19 @@ help: try adding an expression at the end of the block 23 ~ Ok(());; 24 + Ok(()) | + +error[E0308]: mismatched types + --> tests/fail/macros_type_mismatch.rs:29:5 + | +28 | async fn extra_semicolon_generic() -> Result<(), ()> where T: Fn() -> () { + | -------------- expected `Result<(), ()>` because of return type +29 | Ok(()); + | ^^^^^^^ expected enum `Result`, found `()` + | + = note: expected enum `Result<(), ()>` + found unit type `()` +help: try adding an expression at the end of the block + | +29 ~ Ok(());; +30 + Ok(()) + | diff --git a/tokio-macros/src/entry/parser.rs b/tokio-macros/src/entry/parser.rs index 9e3234f8472..87e1aa77180 100644 --- a/tokio-macros/src/entry/parser.rs +++ b/tokio-macros/src/entry/parser.rs @@ -2,7 +2,7 @@ use proc_macro::{Delimiter, Group, Literal, Spacing, Span, TokenTree}; use crate::entry::output::{Config, EntryKind, ItemOutput, RuntimeFlavor, SupportsThreading, Tail}; use crate::error::Error; -use crate::parsing::{BaseParser, Buf, ROCKET}; +use crate::parsing::{BaseParser, Buf, ARROW}; use crate::parsing::{Punct, COMMA, EQ}; use super::output::{ReturnHeuristics, TailKind}; @@ -206,15 +206,30 @@ impl<'a> ItemParser<'a> { let mut generics = None; let mut tail = Tail::default(); + #[derive(Clone, Copy)] + enum State { + /// Initial state. + Initial, + /// After `where`. + Where, + } + + let mut state = State::Initial; + // We default to assuming that the return is a unit, until we've spot // a `->` token at which point we try and process it. let mut return_heuristics = ReturnHeuristics::Unit; while self.base.nth(0).is_some() { - if let Some(p @ Punct { chars: ROCKET, .. }) = self.base.peek_punct() { - self.base.consume(p.len()); - self.parse_return_heuristics(&mut return_heuristics); - continue; + match state { + State::Initial => { + if let Some(p @ Punct { chars: ARROW, .. }) = self.base.peek_punct() { + self.base.consume(p.len()); + self.parse_return_heuristics(&mut return_heuristics); + continue; + } + } + _ => {} } let tt = match self.base.bump() { @@ -223,11 +238,17 @@ impl<'a> ItemParser<'a> { }; match &tt { - TokenTree::Ident(ident) if self.base.buf.display_as_str(&ident) == "async" => { - if async_keyword.is_none() { - async_keyword = Some(self.base.len()); + TokenTree::Ident(ident) => match self.base.buf.display_as_str(&ident) { + "async" => { + if async_keyword.is_none() { + async_keyword = Some(self.base.len()); + } } - } + "where" => { + state = State::Where; + } + _ => {} + }, // Skip over generics which might contain a block (due to // constant generics). Angle brackets are not treated like a // group, so we have to balance them ourselves in diff --git a/tokio-macros/src/parsing.rs b/tokio-macros/src/parsing.rs index a4db3889843..4a5ba1efb43 100644 --- a/tokio-macros/src/parsing.rs +++ b/tokio-macros/src/parsing.rs @@ -7,7 +7,7 @@ const BUF: usize = 4; // Punctuations that we look for. pub(crate) const COMMA: [char; 2] = [',', '\0']; pub(crate) const EQ: [char; 2] = ['=', '\0']; -pub(crate) const ROCKET: [char; 2] = ['-', '>']; +pub(crate) const ARROW: [char; 2] = ['-', '>']; pub(crate) struct Buf { // Static ring buffer used for processing tokens. From 5c97ae40551f242dc0e1faace6a78d0e4fd43529 Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Sat, 19 Feb 2022 16:32:54 +0100 Subject: [PATCH 14/24] Only do find_last_stmt_range for last block found --- tokio-macros/src/entry/parser.rs | 67 +++++++++++++++++--------------- tokio-macros/src/parsing.rs | 4 +- 2 files changed, 38 insertions(+), 33 deletions(-) diff --git a/tokio-macros/src/entry/parser.rs b/tokio-macros/src/entry/parser.rs index 87e1aa77180..661109fa617 100644 --- a/tokio-macros/src/entry/parser.rs +++ b/tokio-macros/src/entry/parser.rs @@ -204,7 +204,6 @@ impl<'a> ItemParser<'a> { let mut block = None; let mut async_keyword = None; let mut generics = None; - let mut tail = Tail::default(); #[derive(Clone, Copy)] enum State { @@ -268,7 +267,6 @@ impl<'a> ItemParser<'a> { TokenTree::Group(g) if g.delimiter() == Delimiter::Brace => { signature = Some(start..self.base.len()); block = Some(self.base.len()); - self.find_last_stmt_range(g, &mut tail); } _ => {} } @@ -276,7 +274,14 @@ impl<'a> ItemParser<'a> { self.base.push(tt); } - let tokens = self.base.into_tokens(); + let (tokens, buf) = self.base.into_tokens_and_buf(); + + let mut tail = Tail::default(); + + // Only process statement ranges after we've found the block. + if let Some(TokenTree::Group(g)) = block.and_then(|index| tokens.get(index)) { + find_last_stmt_range(g, &mut tail, buf); + } ItemOutput::new( tokens, @@ -330,36 +335,36 @@ impl<'a> ItemParser<'a> { self.base.push(tt); } } +} - /// Find the range of spans that is defined by the last statement in the - /// block so that they can be used for the generated expression. - /// - /// This in turn improves upon diagnostics when return types do not match. - fn find_last_stmt_range(&mut self, g: &Group, tail: &mut Tail) { - let mut new_stmt = true; - - for tt in g.stream() { - let span = tt.span(); - tail.end = Some(span); - - match tt { - TokenTree::Punct(p) if p.as_char() == ';' => { - new_stmt = true; - tail.has_semi = true; - } - tt => { - tail.has_semi = false; - - if std::mem::take(&mut new_stmt) { - tail.kind = if matches!(&tt, TokenTree::Ident(ident) if self.base.buf.display_as_str(ident) == "return") - { - TailKind::Return - } else { - TailKind::Unknown - }; +/// Find the range of spans that is defined by the last statement in the +/// block so that they can be used for the generated expression. +/// +/// This in turn improves upon diagnostics when return types do not match. +fn find_last_stmt_range(g: &Group, tail: &mut Tail, buf: &mut Buf) { + let mut new_stmt = true; + + for tt in g.stream() { + let span = tt.span(); + tail.end = Some(span); + + match tt { + TokenTree::Punct(p) if p.as_char() == ';' => { + new_stmt = true; + tail.has_semi = true; + } + tt => { + tail.has_semi = false; - tail.start = Some(span); - } + if std::mem::take(&mut new_stmt) { + tail.kind = if matches!(&tt, TokenTree::Ident(ident) if buf.display_as_str(ident) == "return") + { + TailKind::Return + } else { + TailKind::Unknown + }; + + tail.start = Some(span); } } } diff --git a/tokio-macros/src/parsing.rs b/tokio-macros/src/parsing.rs index 4a5ba1efb43..d9cc8bd9923 100644 --- a/tokio-macros/src/parsing.rs +++ b/tokio-macros/src/parsing.rs @@ -182,8 +182,8 @@ impl<'a> BaseParser<'a> { } /// Convert the current parser into a collection of tokens it has retained. - pub(crate) fn into_tokens(self) -> Vec { - self.tokens + pub(crate) fn into_tokens_and_buf(self) -> (Vec, &'a mut Buf) { + (self.tokens, self.buf) } } From 9d83d8379979e1a105c9e6bcd3ce89e0eb01c73f Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Sat, 19 Feb 2022 17:25:02 +0100 Subject: [PATCH 15/24] Ensure that skip_angle_brackets skips over composite tokens that contains angle brackets --- .../tests/pass/macros_main_generics.rs | 9 ++++ tokio-macros/src/entry/parser.rs | 48 +++++++++++++------ 2 files changed, 43 insertions(+), 14 deletions(-) diff --git a/tests-build/tests/pass/macros_main_generics.rs b/tests-build/tests/pass/macros_main_generics.rs index f4dc978f9f7..12fb3f99292 100644 --- a/tests-build/tests/pass/macros_main_generics.rs +++ b/tests-build/tests/pass/macros_main_generics.rs @@ -15,4 +15,13 @@ where { } +#[tokio::main] +async fn join_bracket_in_return() -> Option ()> { + todo!() +} + +#[tokio::main] +async fn joint_bracket_in_generic ()>>>(_: T) { +} + fn main() {} diff --git a/tokio-macros/src/entry/parser.rs b/tokio-macros/src/entry/parser.rs index 661109fa617..42fc908ea15 100644 --- a/tokio-macros/src/entry/parser.rs +++ b/tokio-macros/src/entry/parser.rs @@ -192,11 +192,11 @@ impl<'a> ItemParser<'a> { /// Parse and produce the corresponding item output. /// - /// Note that this mode of parsing is intentionally promiscious and tries - /// its best not to produce any errors, because the more tokens we can feed - /// to straight to `rustc` the better diagnostics we can expect it to - /// produce. If we were to perform strict parsing here instead, we'd have to - /// rely on the kinds of errors we can produce ourselves directly here. + /// Note that this mode of parsing is intentionally loose and tries its best + /// not to produce any errors, because the more tokens we can feed to + /// straight to `rustc` the better diagnostics we can expect it to produce. + /// If we were to perform strict parsing here instead, we'd have to rely on + /// the kinds of errors we can produce ourselves directly here. pub(crate) fn parse(mut self) -> ItemOutput { let start = self.base.len(); @@ -205,6 +205,10 @@ impl<'a> ItemParser<'a> { let mut async_keyword = None; let mut generics = None; + // Note: Might want to consider adding more states if we need parsing to + // be more constrained, but the principle as stated in the documentation + // to the `parse` function follows. We will never be as good as `rustc` + // at providing diagnostics, so the less parsing we can do the better. #[derive(Clone, Copy)] enum State { /// Initial state. @@ -220,15 +224,12 @@ impl<'a> ItemParser<'a> { let mut return_heuristics = ReturnHeuristics::Unit; while self.base.nth(0).is_some() { - match state { - State::Initial => { - if let Some(p @ Punct { chars: ARROW, .. }) = self.base.peek_punct() { - self.base.consume(p.len()); - self.parse_return_heuristics(&mut return_heuristics); - continue; - } + if let State::Initial = state { + if let Some(p @ Punct { chars: ARROW, .. }) = self.base.peek_punct() { + self.base.consume(p.len()); + self.parse_return_heuristics(&mut return_heuristics); + continue; } - _ => {} } let tt = match self.base.bump() { @@ -309,7 +310,8 @@ impl<'a> ItemParser<'a> { } } - /// Since generics are implemented using angle brackets. + /// Skip over angle bracket "groups". These are not marked as groups in the + /// tt, so we have to do some light processing ourselves here instead. fn skip_angle_brackets(&mut self) { // NB: one bracket encountered already. let mut level = 1u32; @@ -328,6 +330,24 @@ impl<'a> ItemParser<'a> { '>' => { level -= 1; } + // Consume any sequence of other joint punctuations tokens + // seen (unless they are prefixed by one of the above). This + // will catch composite punctuations such as `->` despite + // them containing an angle bracket. + _ if matches!(p.spacing(), Spacing::Joint) => { + self.base.push(tt); + + loop { + self.base.consume(1); + + if !matches!(self.base.nth(0), Some(TokenTree::Punct(p)) if p.spacing() == Spacing::Joint) + { + break; + } + } + + continue; + } _ => {} } } From 27ea4b8b89a1e70595f05f02c330c5d21e03a775 Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Sat, 19 Feb 2022 17:40:23 +0100 Subject: [PATCH 16/24] rustfmt --- tests-build/tests/fail/macros_type_mismatch.rs | 5 ++++- tests-build/tests/fail/macros_type_mismatch.stderr | 11 ++++++----- tests-build/tests/pass/macros_main_generics.rs | 3 +-- 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/tests-build/tests/fail/macros_type_mismatch.rs b/tests-build/tests/fail/macros_type_mismatch.rs index 2b4b880e8d5..6fbc05945c6 100644 --- a/tests-build/tests/fail/macros_type_mismatch.rs +++ b/tests-build/tests/fail/macros_type_mismatch.rs @@ -25,7 +25,10 @@ async fn extra_semicolon() -> Result<(), ()> { /// Intentionally try and make return heuristics misfire by introducing a generic return. #[tokio::main] -async fn extra_semicolon_generic() -> Result<(), ()> where T: Fn() -> () { +async fn extra_semicolon_generic() -> Result<(), ()> +where + T: Fn() -> (), +{ Ok(()); } diff --git a/tests-build/tests/fail/macros_type_mismatch.stderr b/tests-build/tests/fail/macros_type_mismatch.stderr index cc80797a87b..6aeb4539beb 100644 --- a/tests-build/tests/fail/macros_type_mismatch.stderr +++ b/tests-build/tests/fail/macros_type_mismatch.stderr @@ -44,17 +44,18 @@ help: try adding an expression at the end of the block | error[E0308]: mismatched types - --> tests/fail/macros_type_mismatch.rs:29:5 + --> tests/fail/macros_type_mismatch.rs:32:5 | -28 | async fn extra_semicolon_generic() -> Result<(), ()> where T: Fn() -> () { +28 | async fn extra_semicolon_generic() -> Result<(), ()> | -------------- expected `Result<(), ()>` because of return type -29 | Ok(()); +... +32 | Ok(()); | ^^^^^^^ expected enum `Result`, found `()` | = note: expected enum `Result<(), ()>` found unit type `()` help: try adding an expression at the end of the block | -29 ~ Ok(());; -30 + Ok(()) +32 ~ Ok(());; +33 + Ok(()) | diff --git a/tests-build/tests/pass/macros_main_generics.rs b/tests-build/tests/pass/macros_main_generics.rs index 12fb3f99292..63ce951d7a3 100644 --- a/tests-build/tests/pass/macros_main_generics.rs +++ b/tests-build/tests/pass/macros_main_generics.rs @@ -21,7 +21,6 @@ async fn join_bracket_in_return() -> Option ()> { } #[tokio::main] -async fn joint_bracket_in_generic ()>>>(_: T) { -} +async fn joint_bracket_in_generic ()>>>(_: T) {} fn main() {} From 7d74defc3c5057df09e385b4277e892828dcff87 Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Sat, 19 Feb 2022 18:10:19 +0100 Subject: [PATCH 17/24] Fix logic for determining continutations in skip_angle_brackets --- tests-build/tests/macros.rs | 3 +++ tests-build/tests/pass/macros_main.rs | 10 ++++++++ tokio-macros/src/entry/output.rs | 36 +++++++++++++++++++-------- tokio-macros/src/entry/parser.rs | 13 +++++++--- 4 files changed, 48 insertions(+), 14 deletions(-) create mode 100644 tests-build/tests/pass/macros_main.rs diff --git a/tests-build/tests/macros.rs b/tests-build/tests/macros.rs index d0bc3ba0485..6c3d597023d 100644 --- a/tests-build/tests/macros.rs +++ b/tests-build/tests/macros.rs @@ -5,6 +5,9 @@ fn compile_fail_full() { #[cfg(feature = "full")] t.pass("tests/pass/forward_args_and_output.rs"); + #[cfg(feature = "full")] + t.pass("tests/pass/macros_main.rs"); + #[cfg(feature = "full")] t.pass("tests/pass/macros_main_return.rs"); diff --git a/tests-build/tests/pass/macros_main.rs b/tests-build/tests/pass/macros_main.rs new file mode 100644 index 00000000000..5f80d0c1a56 --- /dev/null +++ b/tests-build/tests/pass/macros_main.rs @@ -0,0 +1,10 @@ +use tests_build::tokio; + +// This ensures that `'static>>` is not being incorrectly consumed as one +// sequence of joint tokens while parsing the angle bracket group. +#[tokio::main] +async fn ensure_proper_continuation() -> Result<(), Box> { + todo!() +} + +fn main() {} diff --git a/tokio-macros/src/entry/output.rs b/tokio-macros/src/entry/output.rs index 65d15855aef..121af566828 100644 --- a/tokio-macros/src/entry/output.rs +++ b/tokio-macros/src/entry/output.rs @@ -1,3 +1,4 @@ +use std::fmt; use std::ops; use proc_macro::{Delimiter, Span, TokenTree}; @@ -32,7 +33,7 @@ impl Default for TailKind { } } -#[derive(Default)] +#[derive(Debug, Default)] pub(crate) struct Tail { /// The start span of the tail. pub(crate) start: Option, @@ -158,7 +159,6 @@ pub(crate) struct ItemOutput { /// What's known about the tail statement. tail: Tail, /// Best effort heuristics to determine the return value of the function being procssed. - #[allow(unused)] return_heuristics: ReturnHeuristics, } @@ -183,20 +183,24 @@ impl ItemOutput { /// Validate the parsed item. pub(crate) fn validate(&self, kind: EntryKind, errors: &mut Vec) { - if self.async_keyword.is_none() { - let span = self - .signature - .as_ref() - .and_then(|s| self.tokens.get(s.clone())) - .and_then(|t| t.first()) - .map(|tt| tt.span()) - .unwrap_or_else(Span::call_site); + let span = self + .signature + .as_ref() + .and_then(|s| self.tokens.get(s.clone())) + .and_then(|t| t.first()) + .map(|tt| tt.span()) + .unwrap_or_else(Span::call_site); + if self.async_keyword.is_none() { errors.push(Error::new( span, format!("functions marked with `#[{}]` must be `async`", kind.name()), )); } + + if self.signature.is_none() || self.block.is_none() { + errors.push(Error::new(span, format!("failed to parse function"))); + } } /// Calculate the block span to use for diagnostics. This will correspond to @@ -348,6 +352,18 @@ impl ItemOutput { } } +impl fmt::Debug for ItemOutput { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ItemOutput") + .field("async_keyword", &self.async_keyword) + .field("signature", &self.signature) + .field("block", &self.block) + .field("tail", &self.tail) + .field("return_heuristics", &self.return_heuristics) + .finish() + } +} + /// Insert the given tokens with a custom span. pub(crate) fn with_span(inner: T, span: Span) -> impl IntoTokens where diff --git a/tokio-macros/src/entry/parser.rs b/tokio-macros/src/entry/parser.rs index 42fc908ea15..843eef0c6fe 100644 --- a/tokio-macros/src/entry/parser.rs +++ b/tokio-macros/src/entry/parser.rs @@ -337,11 +337,16 @@ impl<'a> ItemParser<'a> { _ if matches!(p.spacing(), Spacing::Joint) => { self.base.push(tt); - loop { - self.base.consume(1); + while let Some(tt) = self.base.bump() { + // This ensures that we only continue if we + // encounter a sequence of joined continuations, and + // not something like `'static`. + let do_next = + matches!(&tt, TokenTree::Punct(p) if p.spacing() == Spacing::Joint); - if !matches!(self.base.nth(0), Some(TokenTree::Punct(p)) if p.spacing() == Spacing::Joint) - { + self.base.push(tt); + + if !do_next { break; } } From 3de31aa98f41d96f3abf3e32e56c801090918c41 Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Sat, 19 Feb 2022 18:16:59 +0100 Subject: [PATCH 18/24] Ensure we're always reporting some error about failing to process --- tokio-macros/src/entry/mod.rs | 2 +- tokio-macros/src/entry/output.rs | 31 +++++++++++++++++++++++++------ 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/tokio-macros/src/entry/mod.rs b/tokio-macros/src/entry/mod.rs index d25f5a0b891..7a496bbfb1e 100644 --- a/tokio-macros/src/entry/mod.rs +++ b/tokio-macros/src/entry/mod.rs @@ -32,7 +32,7 @@ pub(crate) fn build( let (start, end) = item.block_spans(); - item.expand_item(kind, config, start) + item.expand_item(kind, config, start, &mut errors) .into_tokens(&mut stream, end); format_item_errors(errors).into_tokens(&mut stream, end); diff --git a/tokio-macros/src/entry/output.rs b/tokio-macros/src/entry/output.rs index 121af566828..5c1b46e29c0 100644 --- a/tokio-macros/src/entry/output.rs +++ b/tokio-macros/src/entry/output.rs @@ -181,6 +181,19 @@ impl ItemOutput { } } + /// Determine the best span to report general errors about processing. + /// + /// This picks the first token in the signature or the call span (if + /// available). + fn error_span(&self) -> Span { + self.signature + .as_ref() + .and_then(|s| self.tokens.get(s.clone())) + .and_then(|t| t.first()) + .map(|tt| tt.span()) + .unwrap_or_else(Span::call_site) + } + /// Validate the parsed item. pub(crate) fn validate(&self, kind: EntryKind, errors: &mut Vec) { let span = self @@ -197,10 +210,6 @@ impl ItemOutput { format!("functions marked with `#[{}]` must be `async`", kind.name()), )); } - - if self.signature.is_none() || self.block.is_none() { - errors.push(Error::new(span, format!("failed to parse function"))); - } } /// Calculate the block span to use for diagnostics. This will correspond to @@ -223,16 +232,26 @@ impl ItemOutput { } /// Expand into a function item. - pub(crate) fn expand_item( - &self, + pub(crate) fn expand_item<'a>( + &'a self, kind: EntryKind, config: Config, start: Span, + errors: &'a mut Vec, ) -> impl IntoTokens + '_ { from_fn(move |s| { if let Some(item) = self.maybe_expand_item(kind, config, start) { s.write(item); } else { + // Report a general "failed to do the thing" error to ensure + // we're never completely silent. + // + // If this is encountered, it need to be troubleshot regardless. + errors.push(Error::new( + self.error_span(), + format!("`#[{}]` failed to process function", kind.name()), + )); + s.write(&self.tokens[..]); } }) From d924545422509fed2cede6c1e262a006dd2e023e Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Sat, 19 Feb 2022 18:25:51 +0100 Subject: [PATCH 19/24] Ensure we always print some error when failing to expand --- .../tests/fail/macros_main_missing_async.rs | 6 ++++ .../fail/macros_main_missing_async.stderr | 5 +++ tests-build/tests/macros.rs | 3 ++ tokio-macros/src/entry/mod.rs | 28 +++++++++++++-- tokio-macros/src/entry/output.rs | 35 ++++--------------- 5 files changed, 47 insertions(+), 30 deletions(-) create mode 100644 tests-build/tests/fail/macros_main_missing_async.rs create mode 100644 tests-build/tests/fail/macros_main_missing_async.stderr diff --git a/tests-build/tests/fail/macros_main_missing_async.rs b/tests-build/tests/fail/macros_main_missing_async.rs new file mode 100644 index 00000000000..846a14887e6 --- /dev/null +++ b/tests-build/tests/fail/macros_main_missing_async.rs @@ -0,0 +1,6 @@ +use tests_build::tokio; + +#[tokio::main] +fn test_missing_async() {} + +fn main() {} diff --git a/tests-build/tests/fail/macros_main_missing_async.stderr b/tests-build/tests/fail/macros_main_missing_async.stderr new file mode 100644 index 00000000000..1f6944c5840 --- /dev/null +++ b/tests-build/tests/fail/macros_main_missing_async.stderr @@ -0,0 +1,5 @@ +error: functions marked with `#[tokio::main]` must be `async` + --> tests/fail/macros_main_missing_async.rs:4:1 + | +4 | fn test_missing_async() {} + | ^^ diff --git a/tests-build/tests/macros.rs b/tests-build/tests/macros.rs index 6c3d597023d..f6a630d5293 100644 --- a/tests-build/tests/macros.rs +++ b/tests-build/tests/macros.rs @@ -29,6 +29,9 @@ fn compile_fail_full() { #[cfg(feature = "full")] t.compile_fail("tests/fail/macros_main_generics.rs"); + #[cfg(feature = "full")] + t.compile_fail("tests/fail/macros_main_missing_async.rs"); + #[cfg(all(feature = "rt", not(feature = "full")))] t.compile_fail("tests/fail/macros_core_no_default.rs"); diff --git a/tokio-macros/src/entry/mod.rs b/tokio-macros/src/entry/mod.rs index 7a496bbfb1e..44f1aa3ac33 100644 --- a/tokio-macros/src/entry/mod.rs +++ b/tokio-macros/src/entry/mod.rs @@ -8,6 +8,10 @@ use crate::into_tokens::{from_fn, IntoTokens}; use crate::parsing::Buf; use crate::token_stream::TokenStream; +/// Url printed when we failed to expand, but we haven't provided end-user +/// workable diagnostics for why. +const REPORT_URL: &str = "https://github.com/tokio-rs/tokio/issues/new/choose"; + /// Configurable macro code to build entry. pub(crate) fn build( kind: EntryKind, @@ -32,8 +36,28 @@ pub(crate) fn build( let (start, end) = item.block_spans(); - item.expand_item(kind, config, start, &mut errors) - .into_tokens(&mut stream, end); + if let Some(item) = item.expand_item(kind, config, start) { + item.into_tokens(&mut stream, end); + } else { + // Report a general "failed to do the thing" error to ensure we're never + // completely silent. + // + // If this is encountered in the wild, it should be troubleshot to + // ensure we're providing good diagnostics for whatever failed. + if errors.is_empty() { + errors.push(Error::new( + item.error_span(), + format!( + "#[{}] failed to process function, please report this as a bug to {}", + kind.name(), + REPORT_URL + ), + )); + } + + item.expand_fallback().into_tokens(&mut stream, end); + } + format_item_errors(errors).into_tokens(&mut stream, end); stream.into_token_stream() diff --git a/tokio-macros/src/entry/output.rs b/tokio-macros/src/entry/output.rs index 5c1b46e29c0..6e61f9ab390 100644 --- a/tokio-macros/src/entry/output.rs +++ b/tokio-macros/src/entry/output.rs @@ -185,7 +185,7 @@ impl ItemOutput { /// /// This picks the first token in the signature or the call span (if /// available). - fn error_span(&self) -> Span { + pub(crate) fn error_span(&self) -> Span { self.signature .as_ref() .and_then(|s| self.tokens.get(s.clone())) @@ -232,33 +232,7 @@ impl ItemOutput { } /// Expand into a function item. - pub(crate) fn expand_item<'a>( - &'a self, - kind: EntryKind, - config: Config, - start: Span, - errors: &'a mut Vec, - ) -> impl IntoTokens + '_ { - from_fn(move |s| { - if let Some(item) = self.maybe_expand_item(kind, config, start) { - s.write(item); - } else { - // Report a general "failed to do the thing" error to ensure - // we're never completely silent. - // - // If this is encountered, it need to be troubleshot regardless. - errors.push(Error::new( - self.error_span(), - format!("`#[{}]` failed to process function", kind.name()), - )); - - s.write(&self.tokens[..]); - } - }) - } - - /// Expand item if all prerequsites are available. - fn maybe_expand_item( + pub(crate) fn expand_item( &self, kind: EntryKind, config: Config, @@ -292,6 +266,11 @@ impl ItemOutput { )) } + /// Expand the original tokens as fallback. + pub(crate) fn expand_fallback(&self) -> impl IntoTokens + '_ { + &self.tokens[..] + } + /// Generate attribute associated with entry kind. fn entry_kind_attribute(&self, kind: EntryKind) -> impl IntoTokens { from_fn(move |s| { From fd293b75b887d1bea80b253315d90245841b88eb Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Sun, 20 Feb 2022 16:40:10 +0100 Subject: [PATCH 20/24] Add groups_in_return_position test --- tests-build/tests/pass/macros_main_generics.rs | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests-build/tests/pass/macros_main_generics.rs b/tests-build/tests/pass/macros_main_generics.rs index 63ce951d7a3..5ed4d03d36d 100644 --- a/tests-build/tests/pass/macros_main_generics.rs +++ b/tests-build/tests/pass/macros_main_generics.rs @@ -23,4 +23,14 @@ async fn join_bracket_in_return() -> Option ()> { #[tokio::main] async fn joint_bracket_in_generic ()>>>(_: T) {} +struct GroupsInReturnPosition; + +// Tests both bracket groups `<{inner}>` and braces `{}` in the return +// position. The latter which should already be skipped over as part of the +// angle bracket processing. +#[tokio::main] +async fn groups_in_return_position() -> GroupsInReturnPosition<1, { 2 + 1 }> { + todo!() +} + fn main() {} From c71a88899259e28793c7d0c36dedde3c2908046d Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Fri, 25 Feb 2022 17:57:42 +0100 Subject: [PATCH 21/24] Regenerate trybuild with -Dwarnings --- tests-build/tests/fail/macros_invalid_input.stderr | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests-build/tests/fail/macros_invalid_input.stderr b/tests-build/tests/fail/macros_invalid_input.stderr index da362d0e7ff..25f90233f7d 100644 --- a/tests-build/tests/fail/macros_invalid_input.stderr +++ b/tests-build/tests/fail/macros_invalid_input.stderr @@ -52,10 +52,10 @@ error: the `worker_threads` option requires the "multi_thread" runtime flavor. U 33 | #[tokio::test(flavor = "current_thread", worker_threads = 4)] | ^ -warning: duplicated attribute +error: duplicated attribute --> tests/fail/macros_invalid_input.rs:37:1 | 37 | #[test] | ^^^^^^^ | - = note: `#[warn(duplicate_macro_attributes)]` on by default + = note: `-D duplicate-macro-attributes` implied by `-D warnings` From df3cbb6a8a32c9acd9a55873b174ca85c46bff54 Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Sat, 26 Feb 2022 14:22:29 +0100 Subject: [PATCH 22/24] deny duplicate_macro_attributes --- .../tests/fail/macros_invalid_input.rs | 2 + .../tests/fail/macros_invalid_input.stderr | 50 ++++++++++--------- 2 files changed, 29 insertions(+), 23 deletions(-) diff --git a/tests-build/tests/fail/macros_invalid_input.rs b/tests-build/tests/fail/macros_invalid_input.rs index eb04eca76b6..6d5d8cedce6 100644 --- a/tests-build/tests/fail/macros_invalid_input.rs +++ b/tests-build/tests/fail/macros_invalid_input.rs @@ -1,3 +1,5 @@ +#![deny(duplicate_macro_attributes)] + use tests_build::tokio; #[tokio::main] diff --git a/tests-build/tests/fail/macros_invalid_input.stderr b/tests-build/tests/fail/macros_invalid_input.stderr index 25f90233f7d..2b6162868ed 100644 --- a/tests-build/tests/fail/macros_invalid_input.stderr +++ b/tests-build/tests/fail/macros_invalid_input.stderr @@ -1,61 +1,65 @@ error: functions marked with `#[tokio::main]` must be `async` - --> tests/fail/macros_invalid_input.rs:4:1 + --> tests/fail/macros_invalid_input.rs:6:1 | -4 | fn main_is_not_async() {} +6 | fn main_is_not_async() {} | ^^ error: unknown option `foo`, expected one of: `flavor`, `worker_threads`, `start_paused` - --> tests/fail/macros_invalid_input.rs:6:15 + --> tests/fail/macros_invalid_input.rs:8:15 | -6 | #[tokio::main(foo)] +8 | #[tokio::main(foo)] | ^^^ error: unknown option `threadpool`, expected one of: `flavor`, `worker_threads`, `start_paused` - --> tests/fail/macros_invalid_input.rs:9:15 - | -9 | #[tokio::main(threadpool::bar)] - | ^^^^^^^^^^ + --> tests/fail/macros_invalid_input.rs:11:15 + | +11 | #[tokio::main(threadpool::bar)] + | ^^^^^^^^^^ error: functions marked with `#[tokio::test]` must be `async` - --> tests/fail/macros_invalid_input.rs:13:1 + --> tests/fail/macros_invalid_input.rs:15:1 | -13 | fn test_is_not_async() {} +15 | fn test_is_not_async() {} | ^^ error: unknown option `foo`, expected one of: `flavor`, `worker_threads`, `start_paused` - --> tests/fail/macros_invalid_input.rs:15:15 + --> tests/fail/macros_invalid_input.rs:17:15 | -15 | #[tokio::test(foo)] +17 | #[tokio::test(foo)] | ^^^ error: unknown option `foo`, expected one of: `flavor`, `worker_threads`, `start_paused` - --> tests/fail/macros_invalid_input.rs:18:15 + --> tests/fail/macros_invalid_input.rs:20:15 | -18 | #[tokio::test(foo = 123)] +20 | #[tokio::test(foo = 123)] | ^^^ error: no such runtime flavor, the runtime flavors are: "current_thread", "multi_thread" - --> tests/fail/macros_invalid_input.rs:21:24 + --> tests/fail/macros_invalid_input.rs:23:24 | -21 | #[tokio::test(flavor = 123)] +23 | #[tokio::test(flavor = 123)] | ^^^ error: no such runtime flavor, the runtime flavors are: "current_thread", "multi_thread" - --> tests/fail/macros_invalid_input.rs:24:24 + --> tests/fail/macros_invalid_input.rs:26:24 | -24 | #[tokio::test(flavor = "foo")] +26 | #[tokio::test(flavor = "foo")] | ^^^^^ error: the `worker_threads` option requires the "multi_thread" runtime flavor. Use `#[tokio::test(flavor = "multi_thread")]` - --> tests/fail/macros_invalid_input.rs:33:59 + --> tests/fail/macros_invalid_input.rs:35:59 | -33 | #[tokio::test(flavor = "current_thread", worker_threads = 4)] +35 | #[tokio::test(flavor = "current_thread", worker_threads = 4)] | ^ error: duplicated attribute - --> tests/fail/macros_invalid_input.rs:37:1 + --> tests/fail/macros_invalid_input.rs:39:1 | -37 | #[test] +39 | #[test] | ^^^^^^^ | - = note: `-D duplicate-macro-attributes` implied by `-D warnings` +note: the lint level is defined here + --> tests/fail/macros_invalid_input.rs:1:9 + | +1 | #![deny(duplicate_macro_attributes)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ From e314165698881b66007c83975a943765df067b96 Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Thu, 24 Mar 2022 05:18:38 +0100 Subject: [PATCH 23/24] bump tracing dependency to abide by minimal-versions --- tokio/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tokio/Cargo.toml b/tokio/Cargo.toml index 8ad8abb7559..21e5ad6844c 100644 --- a/tokio/Cargo.toml +++ b/tokio/Cargo.toml @@ -105,7 +105,7 @@ parking_lot = { version = "0.12.0", optional = true } # Currently unstable. The API exposed by these features may be broken at any time. # Requires `--cfg tokio_unstable` to enable. [target.'cfg(tokio_unstable)'.dependencies] -tracing = { version = "0.1.25", default-features = false, features = ["std"], optional = true } # Not in full +tracing = { version = "0.1.32", default-features = false, features = ["std"], optional = true } # Not in full [target.'cfg(unix)'.dependencies] libc = { version = "0.2.42", optional = true } From 59d49ffe032db8ac5fe1e14db2774325e57c3f96 Mon Sep 17 00:00:00 2001 From: John-John Tedro Date: Thu, 24 Mar 2022 05:21:26 +0100 Subject: [PATCH 24/24] bump tracing in tokio-util --- tokio-util/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tokio-util/Cargo.toml b/tokio-util/Cargo.toml index 09015cfe232..4264b5673dd 100644 --- a/tokio-util/Cargo.toml +++ b/tokio-util/Cargo.toml @@ -43,7 +43,7 @@ futures-io = { version = "0.3.0", optional = true } futures-util = { version = "0.3.0", optional = true } pin-project-lite = "0.2.0" slab = { version = "0.4.4", optional = true } # Backs `DelayQueue` -tracing = { version = "0.1.25", optional = true } +tracing = { version = "0.1.32", optional = true } [dev-dependencies] tokio = { version = "1.0.0", path = "../tokio", features = ["full"] }