aboutsummaryrefslogtreecommitdiff
path: root/syn/codegen
diff options
context:
space:
mode:
authorRobin Krahl <robin.krahl@ireas.org>2020-01-07 11:18:04 +0000
committerDaniel Mueller <deso@posteo.net>2020-01-08 09:20:25 -0800
commit5e20a29b4fdc8a2d442d1093681b396dcb4b816b (patch)
tree55ab083fa8999d2ccbb5e921c1ffe52560dca152 /syn/codegen
parent203e691f46d591a2cc8acdfd850fa9f5b0fb8a98 (diff)
downloadnitrocli-5e20a29b4fdc8a2d442d1093681b396dcb4b816b.tar.gz
nitrocli-5e20a29b4fdc8a2d442d1093681b396dcb4b816b.tar.bz2
Add structopt dependency in version 0.3.7
This patch series replaces argparse with structopt in the argument handling code. As a first step, we need structopt as a dependency. Import subrepo structopt/:structopt at efbdda4753592e27bc430fb01f7b9650b2f3174d Import subrepo bitflags/:bitflags at 30668016aca6bd3b02c766e8347e0b4080d4c296 Import subrepo clap/:clap at 784524f7eb193e35f81082cc69454c8c21b948f7 Import subrepo heck/:heck at 093d56fbf001e1506e56dbfa38631d99b1066df1 Import subrepo proc-macro-error/:proc-macro-error at 6c4cfe79a622c5de8ae68557993542be46eacae2 Import subrepo proc-macro2/:proc-macro2 at d5d48eddca4566e5438e8a2cbed4a74e049544de Import subrepo quote/:quote at 727436c6c137b20f0f34dde5d8fda2679b9747ad Import subrepo rustversion/:rustversion at 0c5663313516263059ce9059ef81fc7a1cf655ca Import subrepo syn-mid/:syn-mid at 5d3d85414a9e6674e1857ec22a87b96e04a6851a Import subrepo syn/:syn at e87c27e87f6f4ef8919d0372bdb056d53ef0d8f3 Import subrepo textwrap/:textwrap at abcd618beae3f74841032aa5b53c1086b0a57ca2 Import subrepo unicode-segmentation/:unicode-segmentation at 637c9874c4fe0c205ff27787faf150a40295c6c3 Import subrepo unicode-width/:unicode-width at 3033826f8bf05e82724140a981d5941e48fce393 Import subrepo unicode-xid/:unicode-xid at 4baae9fffb156ba229665b972a9cd5991787ceb7
Diffstat (limited to 'syn/codegen')
-rw-r--r--syn/codegen/Cargo.toml31
-rw-r--r--syn/codegen/README.md12
-rw-r--r--syn/codegen/src/debug.rs308
-rw-r--r--syn/codegen/src/file.rs32
-rw-r--r--syn/codegen/src/fold.rs284
-rw-r--r--syn/codegen/src/full.rs20
-rw-r--r--syn/codegen/src/gen.rs45
-rw-r--r--syn/codegen/src/json.rs18
-rw-r--r--syn/codegen/src/main.rs36
-rw-r--r--syn/codegen/src/operand.rs38
-rw-r--r--syn/codegen/src/parse.rs657
-rw-r--r--syn/codegen/src/version.rs24
-rw-r--r--syn/codegen/src/visit.rs265
-rw-r--r--syn/codegen/src/visit_mut.rs262
14 files changed, 2032 insertions, 0 deletions
diff --git a/syn/codegen/Cargo.toml b/syn/codegen/Cargo.toml
new file mode 100644
index 0000000..44d890b
--- /dev/null
+++ b/syn/codegen/Cargo.toml
@@ -0,0 +1,31 @@
+[package]
+name = "syn-internal-codegen"
+version = "0.0.0"
+authors = ["David Tolnay <dtolnay@gmail.com>", "Nika Layzell <nika@thelayzells.com>"]
+edition = "2018"
+
+publish = false # this is an internal crate which should never be published
+
+[dependencies]
+anyhow = "1.0"
+color-backtrace = "0.2"
+indexmap = { version = "1.0", features = ["serde-1"] }
+inflections = "1.1"
+proc-macro2 = { version = "1.0", features = ["span-locations"] }
+quote = "1.0"
+rustfmt = { package = "rustfmt-nightly", git = "https://github.com/rust-lang-nursery/rustfmt" }
+semver = { version = "0.9", features = ["serde"] }
+serde = { version = "1.0.88", features = ["derive"] }
+serde_json = "1.0.38"
+syn-codegen = { path = "../json" }
+syn = { path = "..", features = ["full", "extra-traits"] }
+thiserror = "1.0"
+toml = "0.4.10"
+
+# work around https://github.com/crossbeam-rs/crossbeam/issues/435
+# until https://github.com/BurntSushi/ripgrep/pull/1427 is released
+crossbeam-utils = "=0.6.5"
+
+[workspace]
+# Prefer that `cargo clean` in syn's directory does not require a rebuild of
+# rustfmt in the codegen directory.
diff --git a/syn/codegen/README.md b/syn/codegen/README.md
new file mode 100644
index 0000000..df46bd2
--- /dev/null
+++ b/syn/codegen/README.md
@@ -0,0 +1,12 @@
+# syn_codegen
+
+This is an internal (not published on crates.io) crate which is used to generate
+the files in the `gen/` directory of `syn`. It is used to ensure that the
+implementations for `Fold`, `Visit`, and `VisitMut` remain in sync with the
+actual AST.
+
+To run this program, run `cargo run` in this directory, and the `gen/` folder
+will be re-generated.
+
+This program is slow, and is therefore not run when building `syn` as part of
+the build script to save on compile time.
diff --git a/syn/codegen/src/debug.rs b/syn/codegen/src/debug.rs
new file mode 100644
index 0000000..9193881
--- /dev/null
+++ b/syn/codegen/src/debug.rs
@@ -0,0 +1,308 @@
+use crate::file;
+use anyhow::Result;
+use proc_macro2::{Ident, Span, TokenStream};
+use quote::quote;
+use syn::Index;
+use syn_codegen::{Data, Definitions, Node, Type};
+
+const DEBUG_SRC: &str = "../tests/debug/gen.rs";
+
+fn rust_type(ty: &Type) -> TokenStream {
+ match ty {
+ Type::Syn(ty) => {
+ let ident = Ident::new(ty, Span::call_site());
+ quote!(syn::#ident)
+ }
+ Type::Std(ty) => {
+ let ident = Ident::new(ty, Span::call_site());
+ quote!(#ident)
+ }
+ Type::Ext(ty) => {
+ let ident = Ident::new(ty, Span::call_site());
+ quote!(proc_macro2::#ident)
+ }
+ Type::Token(ty) | Type::Group(ty) => {
+ let ident = Ident::new(ty, Span::call_site());
+ quote!(syn::token::#ident)
+ }
+ Type::Punctuated(ty) => {
+ let element = rust_type(&ty.element);
+ let punct = Ident::new(&ty.punct, Span::call_site());
+ quote!(syn::punctuated::Punctuated<#element, #punct>)
+ }
+ Type::Option(ty) => {
+ let inner = rust_type(ty);
+ quote!(Option<#inner>)
+ }
+ Type::Box(ty) => {
+ let inner = rust_type(ty);
+ quote!(Box<#inner>)
+ }
+ Type::Vec(ty) => {
+ let inner = rust_type(ty);
+ quote!(Vec<#inner>)
+ }
+ Type::Tuple(ty) => {
+ let inner = ty.iter().map(rust_type);
+ quote!((#(#inner,)*))
+ }
+ }
+}
+
+fn is_printable(ty: &Type) -> bool {
+ match ty {
+ Type::Ext(name) => name != "Span",
+ Type::Box(ty) => is_printable(ty),
+ Type::Tuple(ty) => ty.iter().any(is_printable),
+ Type::Token(_) | Type::Group(_) => false,
+ Type::Syn(name) => name != "Reserved",
+ Type::Std(_) | Type::Punctuated(_) | Type::Option(_) | Type::Vec(_) => true,
+ }
+}
+
+fn format_field(val: &TokenStream, ty: &Type) -> Option<TokenStream> {
+ if !is_printable(ty) {
+ return None;
+ }
+ let format = match ty {
+ Type::Option(ty) => {
+ let inner = quote!(_val);
+ let format = format_field(&inner, ty).map(|format| {
+ quote! {
+ formatter.write_str("(")?;
+ Debug::fmt(#format, formatter)?;
+ formatter.write_str(")")?;
+ }
+ });
+ let ty = rust_type(ty);
+ quote!({
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(Option<#ty>);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &self.0 {
+ Some(#inner) => {
+ formatter.write_str("Some")?;
+ #format
+ Ok(())
+ }
+ None => formatter.write_str("None"),
+ }
+ }
+ }
+ Print::ref_cast(#val)
+ })
+ }
+ Type::Tuple(ty) => {
+ let printable: Vec<TokenStream> = ty
+ .iter()
+ .enumerate()
+ .filter_map(|(i, ty)| {
+ let index = Index::from(i);
+ let val = quote!(&#val.#index);
+ format_field(&val, ty)
+ })
+ .collect();
+ if printable.len() == 1 {
+ printable.into_iter().next().unwrap()
+ } else {
+ quote! {
+ &(#(#printable),*)
+ }
+ }
+ }
+ _ => quote! { Lite(#val) },
+ };
+ Some(format)
+}
+
+fn syntax_tree_enum<'a>(outer: &str, inner: &str, fields: &'a [Type]) -> Option<&'a str> {
+ if fields.len() != 1 {
+ return None;
+ }
+ const WHITELIST: &[&str] = &["PathArguments", "Visibility"];
+ match &fields[0] {
+ Type::Syn(ty) if WHITELIST.contains(&outer) || outer.to_owned() + inner == *ty => Some(ty),
+ _ => None,
+ }
+}
+
+fn lookup<'a>(defs: &'a Definitions, name: &str) -> &'a Node {
+ for node in &defs.types {
+ if node.ident == name {
+ return node;
+ }
+ }
+ panic!("not found: {}", name)
+}
+
+fn expand_impl_body(defs: &Definitions, node: &Node, name: &str) -> TokenStream {
+ let ident = Ident::new(&node.ident, Span::call_site());
+
+ match &node.data {
+ Data::Enum(variants) => {
+ let arms = variants.iter().map(|(v, fields)| {
+ let variant = Ident::new(v, Span::call_site());
+ if fields.is_empty() {
+ quote! {
+ syn::#ident::#variant => formatter.write_str(#v),
+ }
+ } else if let Some(inner) = syntax_tree_enum(name, v, fields) {
+ let path = format!("{}::{}", name, v);
+ let format = expand_impl_body(defs, lookup(defs, inner), &path);
+ quote! {
+ syn::#ident::#variant(_val) => {
+ #format
+ }
+ }
+ } else if fields.len() == 1 {
+ let ty = &fields[0];
+ let val = quote!(_val);
+ let format = format_field(&val, ty).map(|format| {
+ quote! {
+ formatter.write_str("(")?;
+ Debug::fmt(#format, formatter)?;
+ formatter.write_str(")")?;
+ }
+ });
+ quote! {
+ syn::#ident::#variant(_val) => {
+ formatter.write_str(#v)?;
+ #format
+ Ok(())
+ }
+ }
+ } else {
+ let pats = (0..fields.len())
+ .map(|i| Ident::new(&format!("_v{}", i), Span::call_site()));
+ let fields = fields.iter().enumerate().filter_map(|(i, ty)| {
+ let index = Ident::new(&format!("_v{}", i), Span::call_site());
+ let val = quote!(#index);
+ let format = format_field(&val, ty)?;
+ Some(quote! {
+ formatter.field(#format);
+ })
+ });
+ quote! {
+ syn::#ident::#variant(#(#pats),*) => {
+ let mut formatter = formatter.debug_tuple(#v);
+ #(#fields)*
+ formatter.finish()
+ }
+ }
+ }
+ });
+ let nonexhaustive = if node.exhaustive {
+ None
+ } else {
+ Some(quote!(_ => unreachable!()))
+ };
+ quote! {
+ match _val {
+ #(#arms)*
+ #nonexhaustive
+ }
+ }
+ }
+ Data::Struct(fields) => {
+ let fields = fields.iter().filter_map(|(f, ty)| {
+ let ident = Ident::new(f, Span::call_site());
+ if let Type::Option(ty) = ty {
+ let inner = quote!(_val);
+ let format = format_field(&inner, ty).map(|format| {
+ quote! {
+ let #inner = &self.0;
+ formatter.write_str("(")?;
+ Debug::fmt(#format, formatter)?;
+ formatter.write_str(")")?;
+ }
+ });
+ let ty = rust_type(ty);
+ Some(quote! {
+ if let Some(val) = &_val.#ident {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(#ty);
+ impl Debug for Print {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Some")?;
+ #format
+ Ok(())
+ }
+ }
+ formatter.field(#f, Print::ref_cast(val));
+ }
+ })
+ } else {
+ let val = quote!(&_val.#ident);
+ let format = format_field(&val, ty)?;
+ let mut call = quote! {
+ formatter.field(#f, #format);
+ };
+ if let Type::Vec(_) | Type::Punctuated(_) = ty {
+ call = quote! {
+ if !_val.#ident.is_empty() {
+ #call
+ }
+ };
+ }
+ Some(call)
+ }
+ });
+ quote! {
+ let mut formatter = formatter.debug_struct(#name);
+ #(#fields)*
+ formatter.finish()
+ }
+ }
+ Data::Private => {
+ if node.ident == "LitInt" || node.ident == "LitFloat" {
+ quote! {
+ write!(formatter, "{}", _val)
+ }
+ } else {
+ quote! {
+ write!(formatter, "{:?}", _val.value())
+ }
+ }
+ }
+ }
+}
+
+fn expand_impl(defs: &Definitions, node: &Node) -> TokenStream {
+ if node.ident == "Reserved" {
+ return TokenStream::new();
+ }
+
+ let ident = Ident::new(&node.ident, Span::call_site());
+ let body = expand_impl_body(defs, node, &node.ident);
+
+ quote! {
+ impl Debug for Lite<syn::#ident> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let _val = &self.value;
+ #body
+ }
+ }
+ }
+}
+
+pub fn generate(defs: &Definitions) -> Result<()> {
+ let mut impls = TokenStream::new();
+ for node in &defs.types {
+ impls.extend(expand_impl(&defs, node));
+ }
+
+ file::write(
+ DEBUG_SRC,
+ quote! {
+ use super::{Lite, RefCast};
+ use std::fmt::{self, Debug};
+
+ #impls
+ },
+ )?;
+
+ Ok(())
+}
diff --git a/syn/codegen/src/file.rs b/syn/codegen/src/file.rs
new file mode 100644
index 0000000..5521d75
--- /dev/null
+++ b/syn/codegen/src/file.rs
@@ -0,0 +1,32 @@
+use anyhow::Result;
+use proc_macro2::TokenStream;
+use std::fs;
+use std::io::Write;
+use std::path::Path;
+
+pub fn write<P: AsRef<Path>>(path: P, content: TokenStream) -> Result<()> {
+ let mut formatted = Vec::new();
+ writeln!(
+ formatted,
+ "// This file is @generated by syn-internal-codegen."
+ )?;
+ writeln!(formatted, "// It is not intended for manual editing.")?;
+ writeln!(formatted)?;
+
+ let mut config = rustfmt::Config::default();
+ config.set().emit_mode(rustfmt::EmitMode::Stdout);
+ config.set().verbose(rustfmt::Verbosity::Quiet);
+ config.set().format_macro_matchers(true);
+ config.set().normalize_doc_attributes(true);
+
+ let mut session = rustfmt::Session::new(config, Some(&mut formatted));
+ session.format(rustfmt::Input::Text(content.to_string()))?;
+ drop(session);
+
+ if path.as_ref().is_file() && fs::read(&path)? == formatted {
+ return Ok(());
+ }
+
+ fs::write(path, formatted)?;
+ Ok(())
+}
diff --git a/syn/codegen/src/fold.rs b/syn/codegen/src/fold.rs
new file mode 100644
index 0000000..6914d76
--- /dev/null
+++ b/syn/codegen/src/fold.rs
@@ -0,0 +1,284 @@
+use crate::{file, full, gen};
+use anyhow::Result;
+use proc_macro2::{Ident, Span, TokenStream};
+use quote::quote;
+use syn::Index;
+use syn_codegen::{Data, Definitions, Features, Node, Type};
+
+const FOLD_SRC: &str = "../src/gen/fold.rs";
+
+fn simple_visit(item: &str, name: &TokenStream) -> TokenStream {
+ let ident = gen::under_name(item);
+ let method = Ident::new(&format!("fold_{}", ident), Span::call_site());
+ quote! {
+ f.#method(#name)
+ }
+}
+
+fn visit(
+ ty: &Type,
+ features: &Features,
+ defs: &Definitions,
+ name: &TokenStream,
+) -> Option<TokenStream> {
+ match ty {
+ Type::Box(t) => {
+ let res = visit(t, features, defs, &quote!(*#name))?;
+ Some(quote! {
+ Box::new(#res)
+ })
+ }
+ Type::Vec(t) => {
+ let operand = quote!(it);
+ let val = visit(t, features, defs, &operand)?;
+ Some(quote! {
+ FoldHelper::lift(#name, |it| { #val })
+ })
+ }
+ Type::Punctuated(p) => {
+ let operand = quote!(it);
+ let val = visit(&p.element, features, defs, &operand)?;
+ Some(quote! {
+ FoldHelper::lift(#name, |it| { #val })
+ })
+ }
+ Type::Option(t) => {
+ let it = quote!(it);
+ let val = visit(t, features, defs, &it)?;
+ Some(quote! {
+ (#name).map(|it| { #val })
+ })
+ }
+ Type::Tuple(t) => {
+ let mut code = TokenStream::new();
+ for (i, elem) in t.iter().enumerate() {
+ let i = Index::from(i);
+ let it = quote!((#name).#i);
+ let val = visit(elem, features, defs, &it).unwrap_or(it);
+ code.extend(val);
+ code.extend(quote!(,));
+ }
+ Some(quote! {
+ (#code)
+ })
+ }
+ Type::Token(t) => {
+ let repr = &defs.tokens[t];
+ let is_keyword = repr.chars().next().unwrap().is_alphabetic();
+ let spans = if is_keyword {
+ quote!(span)
+ } else {
+ quote!(spans)
+ };
+ let ty = if repr == "await" {
+ quote!(crate::token::Await)
+ } else {
+ syn::parse_str(&format!("Token![{}]", repr)).unwrap()
+ };
+ Some(quote! {
+ #ty(tokens_helper(f, &#name.#spans))
+ })
+ }
+ Type::Group(t) => {
+ let ty = Ident::new(t, Span::call_site());
+ Some(quote! {
+ #ty(tokens_helper(f, &#name.span))
+ })
+ }
+ Type::Syn(t) => {
+ fn requires_full(features: &Features) -> bool {
+ features.any.contains("full") && features.any.len() == 1
+ }
+ let mut res = simple_visit(t, name);
+ let target = defs.types.iter().find(|ty| ty.ident == *t).unwrap();
+ if requires_full(&target.features) && !requires_full(features) {
+ res = quote!(full!(#res));
+ }
+ Some(res)
+ }
+ Type::Ext(t) if gen::TERMINAL_TYPES.contains(&&t[..]) => Some(simple_visit(t, name)),
+ Type::Ext(_) | Type::Std(_) => None,
+ }
+}
+
+fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Definitions) {
+ let under_name = gen::under_name(&s.ident);
+ let ty = Ident::new(&s.ident, Span::call_site());
+ let fold_fn = Ident::new(&format!("fold_{}", under_name), Span::call_site());
+
+ let mut fold_impl = TokenStream::new();
+
+ match &s.data {
+ Data::Enum(variants) => {
+ let mut fold_variants = TokenStream::new();
+
+ for (variant, fields) in variants {
+ let variant_ident = Ident::new(variant, Span::call_site());
+
+ if fields.is_empty() {
+ fold_variants.extend(quote! {
+ #ty::#variant_ident => {
+ #ty::#variant_ident
+ }
+ });
+ } else {
+ let mut bind_fold_fields = TokenStream::new();
+ let mut fold_fields = TokenStream::new();
+
+ for (idx, ty) in fields.iter().enumerate() {
+ let name = format!("_binding_{}", idx);
+ let binding = Ident::new(&name, Span::call_site());
+
+ bind_fold_fields.extend(quote! {
+ #binding,
+ });
+
+ let owned_binding = quote!(#binding);
+
+ fold_fields.extend(
+ visit(ty, &s.features, defs, &owned_binding).unwrap_or(owned_binding),
+ );
+
+ fold_fields.extend(quote!(,));
+ }
+
+ fold_variants.extend(quote! {
+ #ty::#variant_ident(#bind_fold_fields) => {
+ #ty::#variant_ident(
+ #fold_fields
+ )
+ }
+ });
+ }
+ }
+
+ let nonexhaustive = if s.exhaustive {
+ None
+ } else {
+ Some(quote!(_ => unreachable!()))
+ };
+
+ fold_impl.extend(quote! {
+ match node {
+ #fold_variants
+ #nonexhaustive
+ }
+ });
+ }
+ Data::Struct(fields) => {
+ let mut fold_fields = TokenStream::new();
+
+ for (field, ty) in fields {
+ let id = Ident::new(&field, Span::call_site());
+ let ref_toks = quote!(node.#id);
+
+ if let Type::Syn(ty) = ty {
+ if ty == "Reserved" {
+ fold_fields.extend(quote! {
+ #id: #ref_toks,
+ });
+ continue;
+ }
+ }
+
+ let fold = visit(&ty, &s.features, defs, &ref_toks).unwrap_or(ref_toks);
+
+ fold_fields.extend(quote! {
+ #id: #fold,
+ });
+ }
+
+ if !fields.is_empty() {
+ fold_impl.extend(quote! {
+ #ty {
+ #fold_fields
+ }
+ })
+ } else {
+ if ty == "Ident" {
+ fold_impl.extend(quote! {
+ let mut node = node;
+ let span = f.fold_span(node.span());
+ node.set_span(span);
+ });
+ }
+ fold_impl.extend(quote! {
+ node
+ });
+ }
+ }
+ Data::Private => {
+ if ty == "Ident" {
+ fold_impl.extend(quote! {
+ let mut node = node;
+ let span = f.fold_span(node.span());
+ node.set_span(span);
+ });
+ }
+ fold_impl.extend(quote! {
+ node
+ });
+ }
+ }
+
+ let fold_span_only =
+ s.data == Data::Private && !gen::TERMINAL_TYPES.contains(&s.ident.as_str());
+ if fold_span_only {
+ fold_impl = quote! {
+ let span = f.fold_span(node.span());
+ let mut node = node;
+ node.set_span(span);
+ node
+ };
+ }
+
+ traits.extend(quote! {
+ fn #fold_fn(&mut self, i: #ty) -> #ty {
+ #fold_fn(self, i)
+ }
+ });
+
+ impls.extend(quote! {
+ pub fn #fold_fn<F>(f: &mut F, node: #ty) -> #ty
+ where
+ F: Fold + ?Sized,
+ {
+ #fold_impl
+ }
+ });
+}
+
+pub fn generate(defs: &Definitions) -> Result<()> {
+ let (traits, impls) = gen::traverse(defs, node);
+ let full_macro = full::get_macro();
+ file::write(
+ FOLD_SRC,
+ quote! {
+ // Unreachable code is generated sometimes without the full feature.
+ #![allow(unreachable_code, unused_variables)]
+
+ use crate::*;
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::token::{Brace, Bracket, Paren, Group};
+ use proc_macro2::Span;
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::gen::helper::fold::*;
+
+ #full_macro
+
+ /// Syntax tree traversal to transform the nodes of an owned syntax tree.
+ ///
+ /// See the [module documentation] for details.
+ ///
+ /// [module documentation]: self
+ ///
+ /// *This trait is available if Syn is built with the `"fold"` feature.*
+ pub trait Fold {
+ #traits
+ }
+
+ #impls
+ },
+ )?;
+ Ok(())
+}
diff --git a/syn/codegen/src/full.rs b/syn/codegen/src/full.rs
new file mode 100644
index 0000000..a410031
--- /dev/null
+++ b/syn/codegen/src/full.rs
@@ -0,0 +1,20 @@
+use proc_macro2::TokenStream;
+use quote::quote;
+
+pub fn get_macro() -> TokenStream {
+ quote! {
+ #[cfg(feature = "full")]
+ macro_rules! full {
+ ($e:expr) => {
+ $e
+ };
+ }
+
+ #[cfg(all(feature = "derive", not(feature = "full")))]
+ macro_rules! full {
+ ($e:expr) => {
+ unreachable!()
+ };
+ }
+ }
+}
diff --git a/syn/codegen/src/gen.rs b/syn/codegen/src/gen.rs
new file mode 100644
index 0000000..ef43182
--- /dev/null
+++ b/syn/codegen/src/gen.rs
@@ -0,0 +1,45 @@
+use inflections::Inflect;
+use proc_macro2::{Ident, Span, TokenStream};
+use quote::quote;
+use syn_codegen::{Data, Definitions, Features, Node};
+
+pub const TERMINAL_TYPES: &[&str] = &["Span", "Ident"];
+
+pub fn under_name(name: &str) -> Ident {
+ Ident::new(&name.to_snake_case(), Span::call_site())
+}
+
+pub fn traverse(
+ defs: &Definitions,
+ node: fn(&mut TokenStream, &mut TokenStream, &Node, &Definitions),
+) -> (TokenStream, TokenStream) {
+ let mut types = defs.types.clone();
+ for terminal in TERMINAL_TYPES {
+ types.push(Node {
+ ident: terminal.to_string(),
+ features: Features::default(),
+ data: Data::Private,
+ exhaustive: true,
+ });
+ }
+ types.sort_by(|a, b| a.ident.cmp(&b.ident));
+
+ let mut traits = TokenStream::new();
+ let mut impls = TokenStream::new();
+ for s in types {
+ if s.ident == "Reserved" {
+ continue;
+ }
+ let features = &s.features.any;
+ let features = match features.len() {
+ 0 => quote!(),
+ 1 => quote!(#[cfg(feature = #(#features)*)]),
+ _ => quote!(#[cfg(any(#(feature = #features),*))]),
+ };
+ traits.extend(features.clone());
+ impls.extend(features);
+ node(&mut traits, &mut impls, &s, defs);
+ }
+
+ (traits, impls)
+}
diff --git a/syn/codegen/src/json.rs b/syn/codegen/src/json.rs
new file mode 100644
index 0000000..53904bf
--- /dev/null
+++ b/syn/codegen/src/json.rs
@@ -0,0 +1,18 @@
+use anyhow::Result;
+use std::fs;
+use std::path::Path;
+use syn_codegen::Definitions;
+
+pub fn generate(defs: &Definitions) -> Result<()> {
+ let mut j = serde_json::to_string_pretty(&defs)?;
+ j.push('\n');
+
+ let check: Definitions = serde_json::from_str(&j)?;
+ assert_eq!(*defs, check);
+
+ let codegen_root = Path::new(env!("CARGO_MANIFEST_DIR"));
+ let json_path = codegen_root.join("../syn.json");
+ fs::write(json_path, j)?;
+
+ Ok(())
+}
diff --git a/syn/codegen/src/main.rs b/syn/codegen/src/main.rs
new file mode 100644
index 0000000..9b1b5dd
--- /dev/null
+++ b/syn/codegen/src/main.rs
@@ -0,0 +1,36 @@
+// This crate crawls the Syn source directory to find all structs and enums that
+// form the Syn syntax tree.
+//
+// A machine-readable representation of the syntax tree is saved to syn.json in
+// the repo root for other code generation tools to consume. The syn-codegen
+// crate (https://docs.rs/syn-codegen/) provides the data structures for parsing
+// and making use of syn.json from Rust code.
+//
+// Finally this crate generates the Visit, VisitMut, and Fold traits in Syn
+// programmatically from the syntax tree description.
+
+#![recursion_limit = "128"]
+#![allow(clippy::needless_pass_by_value)]
+
+mod debug;
+mod file;
+mod fold;
+mod full;
+mod gen;
+mod json;
+mod operand;
+mod parse;
+mod version;
+mod visit;
+mod visit_mut;
+
+fn main() -> anyhow::Result<()> {
+ color_backtrace::install();
+ let defs = parse::parse()?;
+ json::generate(&defs)?;
+ fold::generate(&defs)?;
+ visit::generate(&defs)?;
+ visit_mut::generate(&defs)?;
+ debug::generate(&defs)?;
+ Ok(())
+}
diff --git a/syn/codegen/src/operand.rs b/syn/codegen/src/operand.rs
new file mode 100644
index 0000000..db3bd18
--- /dev/null
+++ b/syn/codegen/src/operand.rs
@@ -0,0 +1,38 @@
+use proc_macro2::TokenStream;
+use quote::quote;
+
+pub enum Operand {
+ Borrowed(TokenStream),
+ Owned(TokenStream),
+}
+
+pub use self::Operand::*;
+
+impl Operand {
+ pub fn tokens(&self) -> &TokenStream {
+ match self {
+ Borrowed(n) | Owned(n) => n,
+ }
+ }
+
+ pub fn ref_tokens(&self) -> TokenStream {
+ match self {
+ Borrowed(n) => n.clone(),
+ Owned(n) => quote!(&#n),
+ }
+ }
+
+ pub fn ref_mut_tokens(&self) -> TokenStream {
+ match self {
+ Borrowed(n) => n.clone(),
+ Owned(n) => quote!(&mut #n),
+ }
+ }
+
+ pub fn owned_tokens(&self) -> TokenStream {
+ match self {
+ Borrowed(n) => quote!(*#n),
+ Owned(n) => n.clone(),
+ }
+ }
+}
diff --git a/syn/codegen/src/parse.rs b/syn/codegen/src/parse.rs
new file mode 100644
index 0000000..cdd6085
--- /dev/null
+++ b/syn/codegen/src/parse.rs
@@ -0,0 +1,657 @@
+use crate::version;
+use anyhow::{bail, Result};
+use indexmap::IndexMap;
+use quote::quote;
+use syn::parse::Parser;
+use syn::{parse_quote, Data, DataStruct, DeriveInput, Ident, Item};
+use syn_codegen as types;
+use thiserror::Error;
+
+use std::collections::BTreeMap;
+use std::fs;
+use std::path::{Path, PathBuf};
+
+const SYN_CRATE_ROOT: &str = "../src/lib.rs";
+const TOKEN_SRC: &str = "../src/token.rs";
+const IGNORED_MODS: &[&str] = &["fold", "visit", "visit_mut"];
+const EXTRA_TYPES: &[&str] = &["Lifetime"];
+const NONEXHAUSTIVE: &str = "__Nonexhaustive";
+
+// NOTE: BTreeMap is used here instead of HashMap to have deterministic output.
+type ItemLookup = BTreeMap<Ident, AstItem>;
+type TokenLookup = BTreeMap<String, String>;
+
+/// Parse the contents of `src` and return a list of AST types.
+pub fn parse() -> Result<types::Definitions> {
+ let mut item_lookup = BTreeMap::new();
+ load_file(SYN_CRATE_ROOT, &[], &mut item_lookup)?;
+
+ let token_lookup = load_token_file(TOKEN_SRC)?;
+
+ let version = version::get()?;
+
+ let types = item_lookup
+ .values()
+ .map(|item| introspect_item(item, &item_lookup, &token_lookup))
+ .collect();
+
+ let tokens = token_lookup
+ .into_iter()
+ .map(|(name, ty)| (ty, name))
+ .collect();
+
+ Ok(types::Definitions {
+ version,
+ types,
+ tokens,
+ })
+}
+
+/// Data extracted from syn source
+#[derive(Clone)]
+pub struct AstItem {
+ ast: DeriveInput,
+ features: Vec<syn::Attribute>,
+}
+
+fn introspect_item(item: &AstItem, items: &ItemLookup, tokens: &TokenLookup) -> types::Node {
+ let features = introspect_features(&item.features);
+
+ match &item.ast.data {
+ Data::Enum(ref data) => types::Node {
+ ident: item.ast.ident.to_string(),
+ features,
+ data: types::Data::Enum(introspect_enum(data, items, tokens)),
+ exhaustive: data.variants.iter().all(|v| v.ident != NONEXHAUSTIVE),
+ },
+ Data::Struct(ref data) => types::Node {
+ ident: item.ast.ident.to_string(),
+ features,
+ data: {
+ if data.fields.iter().all(|f| is_pub(&f.vis)) {
+ types::Data::Struct(introspect_struct(data, items, tokens))
+ } else {
+ types::Data::Private
+ }
+ },
+ exhaustive: true,
+ },
+ Data::Union(..) => panic!("Union not supported"),
+ }
+}
+
+fn introspect_enum(
+ item: &syn::DataEnum,
+ items: &ItemLookup,
+ tokens: &TokenLookup,
+) -> types::Variants {
+ item.variants
+ .iter()
+ .filter_map(|variant| {
+ if variant.ident == NONEXHAUSTIVE {
+ return None;
+ }
+ let fields = match &variant.fields {
+ syn::Fields::Unnamed(fields) => fields
+ .unnamed
+ .iter()
+ .map(|field| introspect_type(&field.ty, items, tokens))
+ .collect(),
+ syn::Fields::Unit => vec![],
+ _ => panic!("Enum representation not supported"),
+ };
+ Some((variant.ident.to_string(), fields))
+ })
+ .collect()
+}
+
+fn introspect_struct(
+ item: &syn::DataStruct,
+ items: &ItemLookup,
+ tokens: &TokenLookup,
+) -> types::Fields {
+ match &item.fields {
+ syn::Fields::Named(fields) => fields
+ .named
+ .iter()
+ .map(|field| {
+ (
+ field.ident.as_ref().unwrap().to_string(),
+ introspect_type(&field.ty, items, tokens),
+ )
+ })
+ .collect(),
+ syn::Fields::Unit => IndexMap::new(),
+ _ => panic!("Struct representation not supported"),
+ }
+}
+
+fn introspect_type(item: &syn::Type, items: &ItemLookup, tokens: &TokenLookup) -> types::Type {
+ match item {
+ syn::Type::Path(syn::TypePath {
+ qself: None,
+ ref path,
+ }) => {
+ let last = path.segments.last().unwrap();
+ let string = last.ident.to_string();
+
+ match string.as_str() {
+ "Option" => {
+ let nested = introspect_type(first_arg(&last.arguments), items, tokens);
+ types::Type::Option(Box::new(nested))
+ }
+ "Punctuated" => {
+ let nested = introspect_type(first_arg(&last.arguments), items, tokens);
+ let punct = match introspect_type(last_arg(&last.arguments), items, tokens) {
+ types::Type::Token(s) => s,
+ _ => panic!(),
+ };
+
+ types::Type::Punctuated(types::Punctuated {
+ element: Box::new(nested),
+ punct,
+ })
+ }
+ "Vec" => {
+ let nested = introspect_type(first_arg(&last.arguments), items, tokens);
+ types::Type::Vec(Box::new(nested))
+ }
+ "Box" => {
+ let nested = introspect_type(first_arg(&last.arguments), items, tokens);
+ types::Type::Box(Box::new(nested))
+ }
+ "Brace" | "Bracket" | "Paren" | "Group" => types::Type::Group(string),
+ "TokenStream" | "Literal" | "Ident" | "Span" => types::Type::Ext(string),
+ "String" | "u32" | "usize" | "bool" => types::Type::Std(string),
+ "Await" => types::Type::Token("Await".to_string()),
+ _ => {
+ if items.get(&last.ident).is_some() || last.ident == "Reserved" {
+ types::Type::Syn(string)
+ } else {
+ unimplemented!("{}", string);
+ }
+ }
+ }
+ }
+ syn::Type::Tuple(syn::TypeTuple { ref elems, .. }) => {
+ let tys = elems
+ .iter()
+ .map(|ty| introspect_type(&ty, items, tokens))
+ .collect();
+ types::Type::Tuple(tys)
+ }
+ syn::Type::Macro(syn::TypeMacro { ref mac })
+ if mac.path.segments.last().unwrap().ident == "Token" =>
+ {
+ let content = mac.tokens.to_string();
+ let ty = tokens.get(&content).unwrap().to_string();
+
+ types::Type::Token(ty)
+ }
+ _ => panic!("{}", quote!(#item).to_string()),
+ }
+}
+
+fn introspect_features(attrs: &[syn::Attribute]) -> types::Features {
+ let mut ret = types::Features::default();
+
+ for attr in attrs {
+ if !attr.path.is_ident("cfg") {
+ continue;
+ }
+
+ let features = parsing::parse_features.parse2(attr.tokens.clone()).unwrap();
+
+ if ret.any.is_empty() {
+ ret = features;
+ } else if ret.any.len() < features.any.len() {
+ assert!(ret.any.iter().all(|f| features.any.contains(f)));
+ } else {
+ assert!(features.any.iter().all(|f| ret.any.contains(f)));
+ ret = features;
+ }
+ }
+
+ ret
+}
+
+fn is_pub(vis: &syn::Visibility) -> bool {
+ match vis {
+ syn::Visibility::Public(_) => true,
+ _ => false,
+ }
+}
+
+fn first_arg(params: &syn::PathArguments) -> &syn::Type {
+ let data = match *params {
+ syn::PathArguments::AngleBracketed(ref data) => data,
+ _ => panic!("Expected at least 1 type argument here"),
+ };
+
+ match *data
+ .args
+ .first()
+ .expect("Expected at least 1 type argument here")
+ {
+ syn::GenericArgument::Type(ref ty) => ty,
+ _ => panic!("Expected at least 1 type argument here"),
+ }
+}
+
+fn last_arg(params: &syn::PathArguments) -> &syn::Type {
+ let data = match *params {
+ syn::PathArguments::AngleBracketed(ref data) => data,
+ _ => panic!("Expected at least 1 type argument here"),
+ };
+
+ match *data
+ .args
+ .last()
+ .expect("Expected at least 1 type argument here")
+ {
+ syn::GenericArgument::Type(ref ty) => ty,
+ _ => panic!("Expected at least 1 type argument here"),
+ }
+}
+
+mod parsing {
+ use super::{AstItem, TokenLookup};
+
+ use proc_macro2::{TokenStream, TokenTree};
+ use quote::quote;
+ use syn;
+ use syn::parse::{ParseStream, Result};
+ use syn::*;
+ use syn_codegen as types;
+
+ use std::collections::{BTreeMap, BTreeSet};
+
+ fn peek_tag(input: ParseStream, tag: &str) -> bool {
+ let ahead = input.fork();
+ ahead.parse::<Token![#]>().is_ok()
+ && ahead
+ .parse::<Ident>()
+ .map(|ident| ident == tag)
+ .unwrap_or(false)
+ }
+
+ // Parses #full - returns #[cfg(feature = "full")] if it is present, and
+ // nothing otherwise.
+ fn full(input: ParseStream) -> Vec<syn::Attribute> {
+ if peek_tag(input, "full") {
+ input.parse::<Token![#]>().unwrap();
+ input.parse::<Ident>().unwrap();
+ vec![parse_quote!(#[cfg(feature = "full")])]
+ } else {
+ vec![]
+ }
+ }
+
+ fn skip_manual_extra_traits(input: ParseStream) {
+ if peek_tag(input, "manual_extra_traits") || peek_tag(input, "manual_extra_traits_debug") {
+ input.parse::<Token![#]>().unwrap();
+ input.parse::<Ident>().unwrap();
+ }
+ }
+
+ // Parses a simple AstStruct without the `pub struct` prefix.
+ fn ast_struct_inner(input: ParseStream) -> Result<AstItem> {
+ let ident: Ident = input.parse()?;
+ let features = full(input);
+ skip_manual_extra_traits(input);
+ let rest: TokenStream = input.parse()?;
+ Ok(AstItem {
+ ast: syn::parse2(quote! {
+ pub struct #ident #rest
+ })?,
+ features,
+ })
+ }
+
+ pub fn ast_struct(input: ParseStream) -> Result<AstItem> {
+ input.call(Attribute::parse_outer)?;
+ input.parse::<Token![pub]>()?;
+ input.parse::<Token![struct]>()?;
+ let res = input.call(ast_struct_inner)?;
+ Ok(res)
+ }
+
+ fn no_visit(input: ParseStream) -> bool {
+ if peek_tag(input, "no_visit") {
+ input.parse::<Token![#]>().unwrap();
+ input.parse::<Ident>().unwrap();
+ true
+ } else {
+ false
+ }
+ }
+
+ pub fn ast_enum(input: ParseStream) -> Result<Option<AstItem>> {
+ input.call(Attribute::parse_outer)?;
+ input.parse::<Token![pub]>()?;
+ input.parse::<Token![enum]>()?;
+ let ident: Ident = input.parse()?;
+ skip_manual_extra_traits(input);
+ let no_visit = no_visit(input);
+ let rest: TokenStream = input.parse()?;
+ Ok(if no_visit {
+ None
+ } else {
+ Some(AstItem {
+ ast: syn::parse2(quote! {
+ pub enum #ident #rest
+ })?,
+ features: vec![],
+ })
+ })
+ }
+
+ // A single variant of an ast_enum_of_structs!
+ struct EosVariant {
+ name: Ident,
+ member: Option<Path>,
+ }
+ fn eos_variant(input: ParseStream) -> Result<EosVariant> {
+ input.call(Attribute::parse_outer)?;
+ let variant: Ident = input.parse()?;
+ let member = if input.peek(token::Paren) {
+ let content;
+ parenthesized!(content in input);
+ let path: Path = content.parse()?;
+ Some(path)
+ } else {
+ None
+ };
+ input.parse::<Token![,]>()?;
+ Ok(EosVariant {
+ name: variant,
+ member,
+ })
+ }
+
+ pub fn ast_enum_of_structs(input: ParseStream) -> Result<AstItem> {
+ input.call(Attribute::parse_outer)?;
+ input.parse::<Token![pub]>()?;
+ input.parse::<Token![enum]>()?;
+ let ident: Ident = input.parse()?;
+ skip_manual_extra_traits(input);
+
+ let content;
+ braced!(content in input);
+ let mut variants = Vec::new();
+ while !content.is_empty() {
+ variants.push(content.call(eos_variant)?);
+ }
+
+ if let Some(ident) = input.parse::<Option<Ident>>()? {
+ assert_eq!(ident, "do_not_generate_to_tokens");
+ }
+
+ let enum_item = {
+ let variants = variants.iter().map(|v| {
+ let name = v.name.clone();
+ match v.member {
+ Some(ref member) => quote!(#name(#member)),
+ None => quote!(#name),
+ }
+ });
+ parse_quote! {
+ pub enum #ident {
+ #(#variants),*
+ }
+ }
+ };
+ Ok(AstItem {
+ ast: enum_item,
+ features: vec![],
+ })
+ }
+
+ mod kw {
+ syn::custom_keyword!(macro_rules);
+ syn::custom_keyword!(Token);
+ }
+
+ pub fn parse_token_macro(input: ParseStream) -> Result<TokenLookup> {
+ input.parse::<TokenTree>()?;
+ input.parse::<Token![=>]>()?;
+
+ let definition;
+ braced!(definition in input);
+ definition.call(Attribute::parse_outer)?;
+ definition.parse::<kw::macro_rules>()?;
+ definition.parse::<Token![!]>()?;
+ definition.parse::<kw::Token>()?;
+
+ let rules;
+ braced!(rules in definition);
+ input.parse::<Token![;]>()?;
+
+ let mut tokens = BTreeMap::new();
+ while !rules.is_empty() {
+ if rules.peek(Token![$]) {
+ rules.parse::<Token![$]>()?;
+ rules.parse::<TokenTree>()?;
+ rules.parse::<Token![*]>()?;
+ tokens.insert("await".to_owned(), "Await".to_owned());
+ } else {
+ let pattern;
+ parenthesized!(pattern in rules);
+ let token = pattern.parse::<TokenStream>()?.to_string();
+ rules.parse::<Token![=>]>()?;
+ let expansion;
+ braced!(expansion in rules);
+ rules.parse::<Token![;]>()?;
+ expansion.parse::<Token![$]>()?;
+ let path: Path = expansion.parse()?;
+ let ty = path.segments.last().unwrap().ident.to_string();
+ tokens.insert(token, ty.to_string());
+ }
+ }
+ Ok(tokens)
+ }
+
+ fn parse_feature(input: ParseStream) -> Result<String> {
+ let i: syn::Ident = input.parse()?;
+ assert_eq!(i, "feature");
+
+ input.parse::<Token![=]>()?;
+ let s = input.parse::<syn::LitStr>()?;
+
+ Ok(s.value())
+ }
+
+ pub fn parse_features(input: ParseStream) -> Result<types::Features> {
+ let mut features = BTreeSet::new();
+
+ let level_1;
+ parenthesized!(level_1 in input);
+
+ let i: syn::Ident = level_1.fork().parse()?;
+
+ if i == "any" {
+ level_1.parse::<syn::Ident>()?;
+
+ let level_2;
+ parenthesized!(level_2 in level_1);
+
+ while !level_2.is_empty() {
+ features.insert(parse_feature(&level_2)?);
+
+ if !level_2.is_empty() {
+ level_2.parse::<Token![,]>()?;
+ }
+ }
+ } else if i == "feature" {
+ features.insert(parse_feature(&level_1)?);
+ assert!(level_1.is_empty());
+ } else {
+ panic!("{:?}", i);
+ }
+
+ assert!(input.is_empty());
+
+ Ok(types::Features { any: features })
+ }
+}
+
+fn get_features(attrs: &[syn::Attribute], base: &[syn::Attribute]) -> Vec<syn::Attribute> {
+ let mut ret = base.to_owned();
+
+ for attr in attrs {
+ if attr.path.is_ident("cfg") {
+ ret.push(attr.clone());
+ }
+ }
+
+ ret
+}
+
+#[derive(Error, Debug)]
+#[error("{path}:{line}:{column}: {error}")]
+struct LoadFileError {
+ path: PathBuf,
+ line: usize,
+ column: usize,
+ error: syn::Error,
+}
+
+fn load_file<P: AsRef<Path>>(
+ name: P,
+ features: &[syn::Attribute],
+ lookup: &mut ItemLookup,
+) -> Result<()> {
+ let error = match do_load_file(&name, features, lookup).err() {
+ None => return Ok(()),
+ Some(error) => error,
+ };
+
+ let error = error.downcast::<syn::Error>()?;
+ let span = error.span().start();
+
+ bail!(LoadFileError {
+ path: name.as_ref().to_owned(),
+ line: span.line,
+ column: span.column + 1,
+ error,
+ })
+}
+
+fn do_load_file<P: AsRef<Path>>(
+ name: P,
+ features: &[syn::Attribute],
+ lookup: &mut ItemLookup,
+) -> Result<()> {
+ let name = name.as_ref();
+ let parent = name.parent().expect("no parent path");
+
+ // Parse the file
+ let src = fs::read_to_string(name)?;
+ let file = syn::parse_file(&src)?;
+
+ // Collect all of the interesting AstItems declared in this file or submodules.
+ 'items: for item in file.items {
+ match item {
+ Item::Mod(item) => {
+ // Don't inspect inline modules.
+ if item.content.is_some() {
+ continue;
+ }
+
+ // We don't want to try to load the generated rust files and
+ // parse them, so we ignore them here.
+ for name in IGNORED_MODS {
+ if item.ident == name {
+ continue 'items;
+ }
+ }
+
+ // Lookup any #[cfg()] attributes on the module and add them to
+ // the feature set.
+ //
+ // The derive module is weird because it is built with either
+ // `full` or `derive` but exported only under `derive`.
+ let features = if item.ident == "derive" {
+ vec![parse_quote!(#[cfg(feature = "derive")])]
+ } else {
+ get_features(&item.attrs, features)
+ };
+
+ // Look up the submodule file, and recursively parse it.
+ // Only handles same-directory .rs file submodules for now.
+ let path = parent.join(&format!("{}.rs", item.ident));
+ load_file(path, &features, lookup)?;
+ }
+ Item::Macro(item) => {
+ // Lookip any #[cfg()] attributes directly on the macro
+ // invocation, and add them to the feature set.
+ let features = get_features(&item.attrs, features);
+
+ // Try to parse the AstItem declaration out of the item.
+ let tts = item.mac.tokens.clone();
+ let found = if item.mac.path.is_ident("ast_struct") {
+ Some(parsing::ast_struct.parse2(tts)?)
+ } else if item.mac.path.is_ident("ast_enum") {
+ parsing::ast_enum.parse2(tts)?
+ } else if item.mac.path.is_ident("ast_enum_of_structs") {
+ Some(parsing::ast_enum_of_structs.parse2(tts)?)
+ } else {
+ continue;
+ };
+
+ // Record our features on the parsed AstItems.
+ for mut item in found {
+ if item.ast.ident != "Reserved" {
+ item.features.extend(features.clone());
+ lookup.insert(item.ast.ident.clone(), item);
+ }
+ }
+ }
+ Item::Struct(item) => {
+ let ident = item.ident;
+ if EXTRA_TYPES.contains(&&ident.to_string()[..]) {
+ lookup.insert(
+ ident.clone(),
+ AstItem {
+ ast: DeriveInput {
+ ident,
+ vis: item.vis,
+ attrs: item.attrs,
+ generics: item.generics,
+ data: Data::Struct(DataStruct {
+ fields: item.fields,
+ struct_token: item.struct_token,
+ semi_token: item.semi_token,
+ }),
+ },
+ features: features.to_owned(),
+ },
+ );
+ }
+ }
+ _ => {}
+ }
+ }
+ Ok(())
+}
+
+fn load_token_file<P: AsRef<Path>>(name: P) -> Result<TokenLookup> {
+ let name = name.as_ref();
+ let src = fs::read_to_string(name)?;
+ let file = syn::parse_file(&src)?;
+ for item in file.items {
+ match item {
+ Item::Macro(item) => {
+ match item.ident {
+ Some(ref i) if i == "export_token_macro" => {}
+ _ => continue,
+ }
+ let tokens = item.mac.parse_body_with(parsing::parse_token_macro)?;
+ return Ok(tokens);
+ }
+ _ => {}
+ }
+ }
+
+ panic!("failed to parse Token macro")
+}
diff --git a/syn/codegen/src/version.rs b/syn/codegen/src/version.rs
new file mode 100644
index 0000000..9374624
--- /dev/null
+++ b/syn/codegen/src/version.rs
@@ -0,0 +1,24 @@
+use anyhow::Result;
+use semver::Version;
+use serde::Deserialize;
+
+use std::fs;
+use std::path::Path;
+
+pub fn get() -> Result<Version> {
+ let codegen_root = Path::new(env!("CARGO_MANIFEST_DIR"));
+ let syn_cargo_toml = codegen_root.join("../Cargo.toml");
+ let manifest = fs::read_to_string(syn_cargo_toml)?;
+ let parsed: Manifest = toml::from_str(&manifest)?;
+ Ok(parsed.package.version)
+}
+
+#[derive(Debug, Deserialize)]
+struct Manifest {
+ package: Package,
+}
+
+#[derive(Debug, Deserialize)]
+struct Package {
+ version: Version,
+}
diff --git a/syn/codegen/src/visit.rs b/syn/codegen/src/visit.rs
new file mode 100644
index 0000000..41bc9e9
--- /dev/null
+++ b/syn/codegen/src/visit.rs
@@ -0,0 +1,265 @@
+use crate::operand::{Borrowed, Operand, Owned};
+use crate::{file, full, gen};
+use anyhow::Result;
+use proc_macro2::{Ident, Span, TokenStream};
+use quote::quote;
+use syn::Index;
+use syn_codegen::{Data, Definitions, Features, Node, Type};
+
+const VISIT_SRC: &str = "../src/gen/visit.rs";
+
+fn simple_visit(item: &str, name: &Operand) -> TokenStream {
+ let ident = gen::under_name(item);
+ let method = Ident::new(&format!("visit_{}", ident), Span::call_site());
+ let name = name.ref_tokens();
+ quote! {
+ v.#method(#name)
+ }
+}
+
+fn noop_visit(name: &Operand) -> TokenStream {
+ let name = name.tokens();
+ quote! {
+ skip!(#name)
+ }
+}
+
+fn visit(
+ ty: &Type,
+ features: &Features,
+ defs: &Definitions,
+ name: &Operand,
+) -> Option<TokenStream> {
+ match ty {
+ Type::Box(t) => {
+ let name = name.owned_tokens();
+ visit(t, features, defs, &Owned(quote!(*#name)))
+ }
+ Type::Vec(t) => {
+ let operand = Borrowed(quote!(it));
+ let val = visit(t, features, defs, &operand)?;
+ let name = name.ref_tokens();
+ Some(quote! {
+ for it in #name {
+ #val
+ }
+ })
+ }
+ Type::Punctuated(p) => {
+ let operand = Borrowed(quote!(it));
+ let val = visit(&p.element, features, defs, &operand)?;
+ let name = name.ref_tokens();
+ Some(quote! {
+ for el in Punctuated::pairs(#name) {
+ let (it, p) = el.into_tuple();
+ #val;
+ if let Some(p) = p {
+ tokens_helper(v, &p.spans);
+ }
+ }
+ })
+ }
+ Type::Option(t) => {
+ let it = Borrowed(quote!(it));
+ let val = visit(t, features, defs, &it)?;
+ let name = name.owned_tokens();
+ Some(quote! {
+ if let Some(it) = &#name {
+ #val
+ }
+ })
+ }
+ Type::Tuple(t) => {
+ let mut code = TokenStream::new();
+ for (i, elem) in t.iter().enumerate() {
+ let name = name.tokens();
+ let i = Index::from(i);
+ let it = Owned(quote!((#name).#i));
+ let val = visit(elem, features, defs, &it).unwrap_or_else(|| noop_visit(&it));
+ code.extend(val);
+ code.extend(quote!(;));
+ }
+ Some(code)
+ }
+ Type::Token(t) => {
+ let name = name.tokens();
+ let repr = &defs.tokens[t];
+ let is_keyword = repr.chars().next().unwrap().is_alphabetic();
+ let spans = if is_keyword {
+ quote!(span)
+ } else {
+ quote!(spans)
+ };
+ Some(quote! {
+ tokens_helper(v, &#name.#spans)
+ })
+ }
+ Type::Group(_) => {
+ let name = name.tokens();
+ Some(quote! {
+ tokens_helper(v, &#name.span)
+ })
+ }
+ Type::Syn(t) => {
+ fn requires_full(features: &Features) -> bool {
+ features.any.contains("full") && features.any.len() == 1
+ }
+ let mut res = simple_visit(t, name);
+ let target = defs.types.iter().find(|ty| ty.ident == *t).unwrap();
+ if requires_full(&target.features) && !requires_full(features) {
+ res = quote!(full!(#res));
+ }
+ Some(res)
+ }
+ Type::Ext(t) if gen::TERMINAL_TYPES.contains(&&t[..]) => Some(simple_visit(t, name)),
+ Type::Ext(_) | Type::Std(_) => None,
+ }
+}
+
+fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Definitions) {
+ let under_name = gen::under_name(&s.ident);
+ let ty = Ident::new(&s.ident, Span::call_site());
+ let visit_fn = Ident::new(&format!("visit_{}", under_name), Span::call_site());
+
+ let mut visit_impl = TokenStream::new();
+
+ match &s.data {
+ Data::Enum(variants) => {
+ let mut visit_variants = TokenStream::new();
+
+ for (variant, fields) in variants {
+ let variant_ident = Ident::new(variant, Span::call_site());
+
+ if fields.is_empty() {
+ visit_variants.extend(quote! {
+ #ty::#variant_ident => {}
+ });
+ } else {
+ let mut bind_visit_fields = TokenStream::new();
+ let mut visit_fields = TokenStream::new();
+
+ for (idx, ty) in fields.iter().enumerate() {
+ let name = format!("_binding_{}", idx);
+ let binding = Ident::new(&name, Span::call_site());
+
+ bind_visit_fields.extend(quote! {
+ #binding,
+ });
+
+ let borrowed_binding = Borrowed(quote!(#binding));
+
+ visit_fields.extend(
+ visit(ty, &s.features, defs, &borrowed_binding)
+ .unwrap_or_else(|| noop_visit(&borrowed_binding)),
+ );
+
+ visit_fields.extend(quote!(;));
+ }
+
+ visit_variants.extend(quote! {
+ #ty::#variant_ident(#bind_visit_fields) => {
+ #visit_fields
+ }
+ });
+ }
+ }
+
+ let nonexhaustive = if s.exhaustive {
+ None
+ } else {
+ Some(quote!(_ => unreachable!()))
+ };
+
+ visit_impl.extend(quote! {
+ match node {
+ #visit_variants
+ #nonexhaustive
+ }
+ });
+ }
+ Data::Struct(fields) => {
+ for (field, ty) in fields {
+ if let Type::Syn(ty) = ty {
+ if ty == "Reserved" {
+ continue;
+ }
+ }
+
+ let id = Ident::new(&field, Span::call_site());
+ let ref_toks = Owned(quote!(node.#id));
+ let visit_field = visit(&ty, &s.features, defs, &ref_toks)
+ .unwrap_or_else(|| noop_visit(&ref_toks));
+ visit_impl.extend(quote! {
+ #visit_field;
+ });
+ }
+ }
+ Data::Private => {
+ if ty == "Ident" {
+ visit_impl.extend(quote! {
+ v.visit_span(&node.span());
+ });
+ }
+ }
+ }
+
+ let ast_lifetime = if s.ident == "Span" {
+ None
+ } else {
+ Some(quote!('ast))
+ };
+
+ traits.extend(quote! {
+ fn #visit_fn(&mut self, i: &#ast_lifetime #ty) {
+ #visit_fn(self, i)
+ }
+ });
+
+ impls.extend(quote! {
+ pub fn #visit_fn<'ast, V>(v: &mut V, node: &#ast_lifetime #ty)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ #visit_impl
+ }
+ });
+}
+
+pub fn generate(defs: &Definitions) -> Result<()> {
+ let (traits, impls) = gen::traverse(defs, node);
+ let full_macro = full::get_macro();
+ file::write(
+ VISIT_SRC,
+ quote! {
+ #![allow(unused_variables)]
+
+ use crate::*;
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::punctuated::Punctuated;
+ use proc_macro2::Span;
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::gen::helper::visit::*;
+
+ #full_macro
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! skip {
+ ($($tt:tt)*) => {};
+ }
+
+ /// Syntax tree traversal to walk a shared borrow of a syntax tree.
+ ///
+ /// See the [module documentation] for details.
+ ///
+ /// [module documentation]: self
+ ///
+ /// *This trait is available if Syn is built with the `"visit"` feature.*
+ pub trait Visit<'ast> {
+ #traits
+ }
+
+ #impls
+ },
+ )?;
+ Ok(())
+}
diff --git a/syn/codegen/src/visit_mut.rs b/syn/codegen/src/visit_mut.rs
new file mode 100644
index 0000000..71e56b3
--- /dev/null
+++ b/syn/codegen/src/visit_mut.rs
@@ -0,0 +1,262 @@
+use crate::operand::{Borrowed, Operand, Owned};
+use crate::{file, full, gen};
+use anyhow::Result;
+use proc_macro2::{Ident, Span, TokenStream};
+use quote::quote;
+use syn::Index;
+use syn_codegen::{Data, Definitions, Features, Node, Type};
+
+const VISIT_MUT_SRC: &str = "../src/gen/visit_mut.rs";
+
+fn simple_visit(item: &str, name: &Operand) -> TokenStream {
+ let ident = gen::under_name(item);
+ let method = Ident::new(&format!("visit_{}_mut", ident), Span::call_site());
+ let name = name.ref_mut_tokens();
+ quote! {
+ v.#method(#name)
+ }
+}
+
+fn noop_visit(name: &Operand) -> TokenStream {
+ let name = name.tokens();
+ quote! {
+ skip!(#name)
+ }
+}
+
+fn visit(
+ ty: &Type,
+ features: &Features,
+ defs: &Definitions,
+ name: &Operand,
+) -> Option<TokenStream> {
+ match ty {
+ Type::Box(t) => {
+ let name = name.owned_tokens();
+ visit(t, features, defs, &Owned(quote!(*#name)))
+ }
+ Type::Vec(t) => {
+ let operand = Borrowed(quote!(it));
+ let val = visit(t, features, defs, &operand)?;
+ let name = name.ref_mut_tokens();
+ Some(quote! {
+ for it in #name {
+ #val
+ }
+ })
+ }
+ Type::Punctuated(p) => {
+ let operand = Borrowed(quote!(it));
+ let val = visit(&p.element, features, defs, &operand)?;
+ let name = name.ref_mut_tokens();
+ Some(quote! {
+ for el in Punctuated::pairs_mut(#name) {
+ let (it, p) = el.into_tuple();
+ #val;
+ if let Some(p) = p {
+ tokens_helper(v, &mut p.spans);
+ }
+ }
+ })
+ }
+ Type::Option(t) => {
+ let it = Borrowed(quote!(it));
+ let val = visit(t, features, defs, &it)?;
+ let name = name.owned_tokens();
+ Some(quote! {
+ if let Some(it) = &mut #name {
+ #val
+ }
+ })
+ }
+ Type::Tuple(t) => {
+ let mut code = TokenStream::new();
+ for (i, elem) in t.iter().enumerate() {
+ let name = name.tokens();
+ let i = Index::from(i);
+ let it = Owned(quote!((#name).#i));
+ let val = visit(elem, features, defs, &it).unwrap_or_else(|| noop_visit(&it));
+ code.extend(val);
+ code.extend(quote!(;));
+ }
+ Some(code)
+ }
+ Type::Token(t) => {
+ let name = name.tokens();
+ let repr = &defs.tokens[t];
+ let is_keyword = repr.chars().next().unwrap().is_alphabetic();
+ let spans = if is_keyword {
+ quote!(span)
+ } else {
+ quote!(spans)
+ };
+ Some(quote! {
+ tokens_helper(v, &mut #name.#spans)
+ })
+ }
+ Type::Group(_) => {
+ let name = name.tokens();
+ Some(quote! {
+ tokens_helper(v, &mut #name.span)
+ })
+ }
+ Type::Syn(t) => {
+ fn requires_full(features: &Features) -> bool {
+ features.any.contains("full") && features.any.len() == 1
+ }
+ let mut res = simple_visit(t, name);
+ let target = defs.types.iter().find(|ty| ty.ident == *t).unwrap();
+ if requires_full(&target.features) && !requires_full(features) {
+ res = quote!(full!(#res));
+ }
+ Some(res)
+ }
+ Type::Ext(t) if gen::TERMINAL_TYPES.contains(&&t[..]) => Some(simple_visit(t, name)),
+ Type::Ext(_) | Type::Std(_) => None,
+ }
+}
+
+fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Definitions) {
+ let under_name = gen::under_name(&s.ident);
+ let ty = Ident::new(&s.ident, Span::call_site());
+ let visit_mut_fn = Ident::new(&format!("visit_{}_mut", under_name), Span::call_site());
+
+ let mut visit_mut_impl = TokenStream::new();
+
+ match &s.data {
+ Data::Enum(variants) => {
+ let mut visit_mut_variants = TokenStream::new();
+
+ for (variant, fields) in variants {
+ let variant_ident = Ident::new(variant, Span::call_site());
+
+ if fields.is_empty() {
+ visit_mut_variants.extend(quote! {
+ #ty::#variant_ident => {}
+ });
+ } else {
+ let mut bind_visit_mut_fields = TokenStream::new();
+ let mut visit_mut_fields = TokenStream::new();
+
+ for (idx, ty) in fields.iter().enumerate() {
+ let name = format!("_binding_{}", idx);
+ let binding = Ident::new(&name, Span::call_site());
+
+ bind_visit_mut_fields.extend(quote! {
+ #binding,
+ });
+
+ let borrowed_binding = Borrowed(quote!(#binding));
+
+ visit_mut_fields.extend(
+ visit(ty, &s.features, defs, &borrowed_binding)
+ .unwrap_or_else(|| noop_visit(&borrowed_binding)),
+ );
+
+ visit_mut_fields.extend(quote!(;));
+ }
+
+ visit_mut_variants.extend(quote! {
+ #ty::#variant_ident(#bind_visit_mut_fields) => {
+ #visit_mut_fields
+ }
+ });
+ }
+ }
+
+ let nonexhaustive = if s.exhaustive {
+ None
+ } else {
+ Some(quote!(_ => unreachable!()))
+ };
+
+ visit_mut_impl.extend(quote! {
+ match node {
+ #visit_mut_variants
+ #nonexhaustive
+ }
+ });
+ }
+ Data::Struct(fields) => {
+ for (field, ty) in fields {
+ if let Type::Syn(ty) = ty {
+ if ty == "Reserved" {
+ continue;
+ }
+ }
+
+ let id = Ident::new(&field, Span::call_site());
+ let ref_toks = Owned(quote!(node.#id));
+ let visit_mut_field = visit(&ty, &s.features, defs, &ref_toks)
+ .unwrap_or_else(|| noop_visit(&ref_toks));
+ visit_mut_impl.extend(quote! {
+ #visit_mut_field;
+ });
+ }
+ }
+ Data::Private => {
+ if ty == "Ident" {
+ visit_mut_impl.extend(quote! {
+ let mut span = node.span();
+ v.visit_span_mut(&mut span);
+ node.set_span(span);
+ });
+ }
+ }
+ }
+
+ traits.extend(quote! {
+ fn #visit_mut_fn(&mut self, i: &mut #ty) {
+ #visit_mut_fn(self, i)
+ }
+ });
+
+ impls.extend(quote! {
+ pub fn #visit_mut_fn<V>(v: &mut V, node: &mut #ty)
+ where
+ V: VisitMut + ?Sized,
+ {
+ #visit_mut_impl
+ }
+ });
+}
+
+pub fn generate(defs: &Definitions) -> Result<()> {
+ let (traits, impls) = gen::traverse(defs, node);
+ let full_macro = full::get_macro();
+ file::write(
+ VISIT_MUT_SRC,
+ quote! {
+ #![allow(unused_variables)]
+
+ use crate::*;
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::punctuated::Punctuated;
+ use proc_macro2::Span;
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::gen::helper::visit_mut::*;
+
+ #full_macro
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! skip {
+ ($($tt:tt)*) => {};
+ }
+
+ /// Syntax tree traversal to mutate an exclusive borrow of a syntax tree in
+ /// place.
+ ///
+ /// See the [module documentation] for details.
+ ///
+ /// [module documentation]: self
+ ///
+ /// *This trait is available if Syn is built with the `"visit-mut"` feature.*
+ pub trait VisitMut {
+ #traits
+ }
+
+ #impls
+ },
+ )?;
+ Ok(())
+}