aboutsummaryrefslogtreecommitdiff
path: root/proc-macro2
diff options
context:
space:
mode:
Diffstat (limited to 'proc-macro2')
-rw-r--r--proc-macro2/.gitignore3
-rw-r--r--proc-macro2/.travis.yml36
-rw-r--r--proc-macro2/Cargo.toml57
-rw-r--r--proc-macro2/LICENSE-APACHE201
-rw-r--r--proc-macro2/LICENSE-MIT25
-rw-r--r--proc-macro2/README.md93
-rw-r--r--proc-macro2/benches/bench-libproc-macro/Cargo.toml13
-rw-r--r--proc-macro2/benches/bench-libproc-macro/README.md10
-rw-r--r--proc-macro2/benches/bench-libproc-macro/lib.rs49
-rw-r--r--proc-macro2/benches/bench-libproc-macro/main.rs3
-rw-r--r--proc-macro2/build.rs129
-rw-r--r--proc-macro2/src/fallback.rs1458
-rw-r--r--proc-macro2/src/lib.rs1199
-rw-r--r--proc-macro2/src/strnom.rs391
-rw-r--r--proc-macro2/src/wrapper.rs927
-rw-r--r--proc-macro2/tests/features.rs8
-rw-r--r--proc-macro2/tests/marker.rs59
-rw-r--r--proc-macro2/tests/test.rs466
18 files changed, 0 insertions, 5127 deletions
diff --git a/proc-macro2/.gitignore b/proc-macro2/.gitignore
deleted file mode 100644
index 6936990..0000000
--- a/proc-macro2/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-/target
-**/*.rs.bk
-Cargo.lock
diff --git a/proc-macro2/.travis.yml b/proc-macro2/.travis.yml
deleted file mode 100644
index acddb57..0000000
--- a/proc-macro2/.travis.yml
+++ /dev/null
@@ -1,36 +0,0 @@
-language: rust
-sudo: false
-
-matrix:
- include:
- - rust: 1.31.0
- - rust: stable
- - rust: beta
- - rust: nightly
- script:
- - cargo test
- - cargo test --no-default-features
- - cargo test --no-default-features -- --ignored # run the ignored test to make sure the `proc-macro` feature is disabled
- - cargo test --features span-locations
- - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
- - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test --no-default-features
- - RUSTFLAGS='-Z allow-features=' cargo test
- - cargo update -Z minimal-versions && cargo build
- - rust: nightly
- name: WebAssembly
- install: rustup target add wasm32-unknown-unknown
- script: cargo test --target wasm32-unknown-unknown --no-run
-
-before_script:
- - set -o errexit
-
-script:
- - cargo test
- - cargo test --no-default-features
- - cargo test --features span-locations
- - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
- - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test --no-default-features
-
-notifications:
- email:
- on_success: never
diff --git a/proc-macro2/Cargo.toml b/proc-macro2/Cargo.toml
deleted file mode 100644
index a7d0865..0000000
--- a/proc-macro2/Cargo.toml
+++ /dev/null
@@ -1,57 +0,0 @@
-[package]
-name = "proc-macro2"
-version = "1.0.7" # remember to update html_root_url
-authors = ["Alex Crichton <alex@alexcrichton.com>"]
-license = "MIT OR Apache-2.0"
-readme = "README.md"
-keywords = ["macros"]
-repository = "https://github.com/alexcrichton/proc-macro2"
-homepage = "https://github.com/alexcrichton/proc-macro2"
-documentation = "https://docs.rs/proc-macro2"
-edition = "2018"
-description = """
-A stable implementation of the upcoming new `proc_macro` API. Comes with an
-option, off by default, to also reimplement itself in terms of the upstream
-unstable API.
-"""
-
-[package.metadata.docs.rs]
-rustc-args = ["--cfg", "procmacro2_semver_exempt"]
-rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
-
-[dependencies]
-unicode-xid = "0.2"
-
-[dev-dependencies]
-quote = { version = "1.0", default_features = false }
-
-[features]
-proc-macro = []
-default = ["proc-macro"]
-
-# Expose methods Span::start and Span::end which give the line/column location
-# of a token.
-span-locations = []
-
-# This feature no longer means anything.
-nightly = []
-
-[badges]
-travis-ci = { repository = "alexcrichton/proc-macro2" }
-
-[workspace]
-members = ["benches/bench-libproc-macro"]
-
-[patch.crates-io]
-# Our doc tests depend on quote which depends on proc-macro2. Without this line,
-# the proc-macro2 dependency of quote would be the released version of
-# proc-macro2. Quote would implement its traits for types from that proc-macro2,
-# meaning impls would be missing when tested against types from the local
-# proc-macro2.
-#
-# Travis builds that are in progress at the time that you publish may spuriously
-# fail. This is because they'll be building a local proc-macro2 which carries
-# the second-most-recent version number, pulling in quote which resolves to a
-# dependency on the just-published most recent version number. Thus the patch
-# will fail to apply because the version numbers are different.
-proc-macro2 = { path = "." }
diff --git a/proc-macro2/LICENSE-APACHE b/proc-macro2/LICENSE-APACHE
deleted file mode 100644
index 16fe87b..0000000
--- a/proc-macro2/LICENSE-APACHE
+++ /dev/null
@@ -1,201 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/proc-macro2/LICENSE-MIT b/proc-macro2/LICENSE-MIT
deleted file mode 100644
index 39e0ed6..0000000
--- a/proc-macro2/LICENSE-MIT
+++ /dev/null
@@ -1,25 +0,0 @@
-Copyright (c) 2014 Alex Crichton
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
diff --git a/proc-macro2/README.md b/proc-macro2/README.md
deleted file mode 100644
index 19b0c3b..0000000
--- a/proc-macro2/README.md
+++ /dev/null
@@ -1,93 +0,0 @@
-# proc-macro2
-
-[![Build Status](https://api.travis-ci.com/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.com/alexcrichton/proc-macro2)
-[![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
-[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
-
-A wrapper around the procedural macro API of the compiler's `proc_macro` crate.
-This library serves two purposes:
-
-- **Bring proc-macro-like functionality to other contexts like build.rs and
- main.rs.** Types from `proc_macro` are entirely specific to procedural macros
- and cannot ever exist in code outside of a procedural macro. Meanwhile
- `proc_macro2` types may exist anywhere including non-macro code. By developing
- foundational libraries like [syn] and [quote] against `proc_macro2` rather
- than `proc_macro`, the procedural macro ecosystem becomes easily applicable to
- many other use cases and we avoid reimplementing non-macro equivalents of
- those libraries.
-
-- **Make procedural macros unit testable.** As a consequence of being specific
- to procedural macros, nothing that uses `proc_macro` can be executed from a
- unit test. In order for helper libraries or components of a macro to be
- testable in isolation, they must be implemented using `proc_macro2`.
-
-[syn]: https://github.com/dtolnay/syn
-[quote]: https://github.com/dtolnay/quote
-
-## Usage
-
-```toml
-[dependencies]
-proc-macro2 = "1.0"
-```
-
-The skeleton of a typical procedural macro typically looks like this:
-
-```rust
-extern crate proc_macro;
-
-#[proc_macro_derive(MyDerive)]
-pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
- let input = proc_macro2::TokenStream::from(input);
-
- let output: proc_macro2::TokenStream = {
- /* transform input */
- };
-
- proc_macro::TokenStream::from(output)
-}
-```
-
-If parsing with [Syn], you'll use [`parse_macro_input!`] instead to propagate
-parse errors correctly back to the compiler when parsing fails.
-
-[`parse_macro_input!`]: https://docs.rs/syn/1.0/syn/macro.parse_macro_input.html
-
-## Unstable features
-
-The default feature set of proc-macro2 tracks the most recent stable compiler
-API. Functionality in `proc_macro` that is not yet stable is not exposed by
-proc-macro2 by default.
-
-To opt into the additional APIs available in the most recent nightly compiler,
-the `procmacro2_semver_exempt` config flag must be passed to rustc. We will
-polyfill those nightly-only APIs back to Rust 1.31.0. As these are unstable APIs
-that track the nightly compiler, minor versions of proc-macro2 may make breaking
-changes to them at any time.
-
-```
-RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
-```
-
-Note that this must not only be done for your crate, but for any crate that
-depends on your crate. This infectious nature is intentional, as it serves as a
-reminder that you are outside of the normal semver guarantees.
-
-Semver exempt methods are marked as such in the proc-macro2 documentation.
-
-<br>
-
-#### License
-
-<sup>
-Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
-2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
-</sup>
-
-<br>
-
-<sub>
-Unless you explicitly state otherwise, any contribution intentionally submitted
-for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
-be dual licensed as above, without any additional terms or conditions.
-</sub>
diff --git a/proc-macro2/benches/bench-libproc-macro/Cargo.toml b/proc-macro2/benches/bench-libproc-macro/Cargo.toml
deleted file mode 100644
index 41d106d..0000000
--- a/proc-macro2/benches/bench-libproc-macro/Cargo.toml
+++ /dev/null
@@ -1,13 +0,0 @@
-[package]
-name = "bench-libproc-macro"
-version = "0.0.0"
-edition = "2018"
-publish = false
-
-[lib]
-path = "lib.rs"
-proc-macro = true
-
-[[bin]]
-name = "bench-libproc-macro"
-path = "main.rs"
diff --git a/proc-macro2/benches/bench-libproc-macro/README.md b/proc-macro2/benches/bench-libproc-macro/README.md
deleted file mode 100644
index 80e4939..0000000
--- a/proc-macro2/benches/bench-libproc-macro/README.md
+++ /dev/null
@@ -1,10 +0,0 @@
-Example output:
-
-```console
-$ cargo check --release
-
- Compiling bench-libproc-macro v0.0.0
-STRING: 8 millis
-TOKENSTREAM: 25721 millis
- Finished release [optimized] target(s) in 26.15s
-```
diff --git a/proc-macro2/benches/bench-libproc-macro/lib.rs b/proc-macro2/benches/bench-libproc-macro/lib.rs
deleted file mode 100644
index 46b3711..0000000
--- a/proc-macro2/benches/bench-libproc-macro/lib.rs
+++ /dev/null
@@ -1,49 +0,0 @@
-extern crate proc_macro;
-
-use proc_macro::{Ident, Punct, Spacing, Span, TokenStream, TokenTree};
-use std::iter::once;
-use std::time::Instant;
-
-const N: u32 = 7000;
-
-#[proc_macro]
-pub fn bench(_input: TokenStream) -> TokenStream {
- let start = Instant::now();
- let mut string = String::new();
- for _ in 0..N {
- string += "core";
- string += ":";
- string += ":";
- string += "option";
- string += ":";
- string += ":";
- string += "Option";
- string += ":";
- string += ":";
- string += "None";
- string += ",";
- }
- string.parse::<TokenStream>().unwrap();
- eprintln!("STRING: {} millis", start.elapsed().as_millis());
-
- let start = Instant::now();
- let span = Span::call_site();
- let mut tokens = TokenStream::new();
- for _ in 0..N {
- // Similar to what is emitted by quote.
- tokens.extend(once(TokenTree::Ident(Ident::new("core", span))));
- tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Joint))));
- tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Alone))));
- tokens.extend(once(TokenTree::Ident(Ident::new("option", span))));
- tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Joint))));
- tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Alone))));
- tokens.extend(once(TokenTree::Ident(Ident::new("Option", span))));
- tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Joint))));
- tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Alone))));
- tokens.extend(once(TokenTree::Ident(Ident::new("None", span))));
- tokens.extend(once(TokenTree::Punct(Punct::new(',', Spacing::Joint))));
- }
- eprintln!("TOKENSTREAM: {} millis", start.elapsed().as_millis());
-
- TokenStream::new()
-}
diff --git a/proc-macro2/benches/bench-libproc-macro/main.rs b/proc-macro2/benches/bench-libproc-macro/main.rs
deleted file mode 100644
index 34eedf6..0000000
--- a/proc-macro2/benches/bench-libproc-macro/main.rs
+++ /dev/null
@@ -1,3 +0,0 @@
-bench_libproc_macro::bench!();
-
-fn main() {}
diff --git a/proc-macro2/build.rs b/proc-macro2/build.rs
deleted file mode 100644
index deb9b92..0000000
--- a/proc-macro2/build.rs
+++ /dev/null
@@ -1,129 +0,0 @@
-// rustc-cfg emitted by the build script:
-//
-// "use_proc_macro"
-// Link to extern crate proc_macro. Available on any compiler and any target
-// except wasm32. Requires "proc-macro" Cargo cfg to be enabled (default is
-// enabled). On wasm32 we never link to proc_macro even if "proc-macro" cfg
-// is enabled.
-//
-// "wrap_proc_macro"
-// Wrap types from libproc_macro rather than polyfilling the whole API.
-// Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set,
-// because we can't emulate the unstable API without emulating everything
-// else. Also enabled unconditionally on nightly, in which case the
-// procmacro2_semver_exempt surface area is implemented by using the
-// nightly-only proc_macro API.
-//
-// "proc_macro_span"
-// Enable non-dummy behavior of Span::start and Span::end methods which
-// requires an unstable compiler feature. Enabled when building with
-// nightly, unless `-Z allow-feature` in RUSTFLAGS disallows unstable
-// features.
-//
-// "super_unstable"
-// Implement the semver exempt API in terms of the nightly-only proc_macro
-// API. Enabled when using procmacro2_semver_exempt on a nightly compiler.
-//
-// "span_locations"
-// Provide methods Span::start and Span::end which give the line/column
-// location of a token. Enabled by procmacro2_semver_exempt or the
-// "span-locations" Cargo cfg. This is behind a cfg because tracking
-// location inside spans is a performance hit.
-
-use std::env;
-use std::process::{self, Command};
-use std::str;
-
-fn main() {
- println!("cargo:rerun-if-changed=build.rs");
-
- let version = match rustc_version() {
- Some(version) => version,
- None => return,
- };
-
- if version.minor < 31 {
- eprintln!("Minimum supported rustc version is 1.31");
- process::exit(1);
- }
-
- let semver_exempt = cfg!(procmacro2_semver_exempt);
- if semver_exempt {
- // https://github.com/alexcrichton/proc-macro2/issues/147
- println!("cargo:rustc-cfg=procmacro2_semver_exempt");
- }
-
- if semver_exempt || cfg!(feature = "span-locations") {
- println!("cargo:rustc-cfg=span_locations");
- }
-
- let target = env::var("TARGET").unwrap();
- if !enable_use_proc_macro(&target) {
- return;
- }
-
- println!("cargo:rustc-cfg=use_proc_macro");
-
- if version.nightly || !semver_exempt {
- println!("cargo:rustc-cfg=wrap_proc_macro");
- }
-
- if version.nightly && feature_allowed("proc_macro_span") {
- println!("cargo:rustc-cfg=proc_macro_span");
- }
-
- if semver_exempt && version.nightly {
- println!("cargo:rustc-cfg=super_unstable");
- }
-}
-
-fn enable_use_proc_macro(target: &str) -> bool {
- // wasm targets don't have the `proc_macro` crate, disable this feature.
- if target.contains("wasm32") {
- return false;
- }
-
- // Otherwise, only enable it if our feature is actually enabled.
- cfg!(feature = "proc-macro")
-}
-
-struct RustcVersion {
- minor: u32,
- nightly: bool,
-}
-
-fn rustc_version() -> Option<RustcVersion> {
- let rustc = env::var_os("RUSTC")?;
- let output = Command::new(rustc).arg("--version").output().ok()?;
- let version = str::from_utf8(&output.stdout).ok()?;
- let nightly = version.contains("nightly") || version.contains("dev");
- let mut pieces = version.split('.');
- if pieces.next() != Some("rustc 1") {
- return None;
- }
- let minor = pieces.next()?.parse().ok()?;
- Some(RustcVersion { minor, nightly })
-}
-
-fn feature_allowed(feature: &str) -> bool {
- // Recognized formats:
- //
- // -Z allow-features=feature1,feature2
- //
- // -Zallow-features=feature1,feature2
-
- if let Some(rustflags) = env::var_os("RUSTFLAGS") {
- for mut flag in rustflags.to_string_lossy().split(' ') {
- if flag.starts_with("-Z") {
- flag = &flag["-Z".len()..];
- }
- if flag.starts_with("allow-features=") {
- flag = &flag["allow-features=".len()..];
- return flag.split(',').any(|allowed| allowed == feature);
- }
- }
- }
-
- // No allow-features= flag, allowed by default.
- true
-}
diff --git a/proc-macro2/src/fallback.rs b/proc-macro2/src/fallback.rs
deleted file mode 100644
index 9762d3b..0000000
--- a/proc-macro2/src/fallback.rs
+++ /dev/null
@@ -1,1458 +0,0 @@
-#[cfg(span_locations)]
-use std::cell::RefCell;
-#[cfg(span_locations)]
-use std::cmp;
-use std::fmt;
-use std::iter;
-use std::ops::RangeBounds;
-#[cfg(procmacro2_semver_exempt)]
-use std::path::Path;
-use std::path::PathBuf;
-use std::str::FromStr;
-use std::vec;
-
-use crate::strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
-use crate::{Delimiter, Punct, Spacing, TokenTree};
-use unicode_xid::UnicodeXID;
-
-#[derive(Clone)]
-pub struct TokenStream {
- inner: Vec<TokenTree>,
-}
-
-#[derive(Debug)]
-pub struct LexError;
-
-impl TokenStream {
- pub fn new() -> TokenStream {
- TokenStream { inner: Vec::new() }
- }
-
- pub fn is_empty(&self) -> bool {
- self.inner.len() == 0
- }
-}
-
-#[cfg(span_locations)]
-fn get_cursor(src: &str) -> Cursor {
- // Create a dummy file & add it to the source map
- SOURCE_MAP.with(|cm| {
- let mut cm = cm.borrow_mut();
- let name = format!("<parsed string {}>", cm.files.len());
- let span = cm.add_file(&name, src);
- Cursor {
- rest: src,
- off: span.lo,
- }
- })
-}
-
-#[cfg(not(span_locations))]
-fn get_cursor(src: &str) -> Cursor {
- Cursor { rest: src }
-}
-
-impl FromStr for TokenStream {
- type Err = LexError;
-
- fn from_str(src: &str) -> Result<TokenStream, LexError> {
- // Create a dummy file & add it to the source map
- let cursor = get_cursor(src);
-
- match token_stream(cursor) {
- Ok((input, output)) => {
- if skip_whitespace(input).len() != 0 {
- Err(LexError)
- } else {
- Ok(output)
- }
- }
- Err(LexError) => Err(LexError),
- }
- }
-}
-
-impl fmt::Display for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let mut joint = false;
- for (i, tt) in self.inner.iter().enumerate() {
- if i != 0 && !joint {
- write!(f, " ")?;
- }
- joint = false;
- match *tt {
- TokenTree::Group(ref tt) => {
- let (start, end) = match tt.delimiter() {
- Delimiter::Parenthesis => ("(", ")"),
- Delimiter::Brace => ("{", "}"),
- Delimiter::Bracket => ("[", "]"),
- Delimiter::None => ("", ""),
- };
- if tt.stream().into_iter().next().is_none() {
- write!(f, "{} {}", start, end)?
- } else {
- write!(f, "{} {} {}", start, tt.stream(), end)?
- }
- }
- TokenTree::Ident(ref tt) => write!(f, "{}", tt)?,
- TokenTree::Punct(ref tt) => {
- write!(f, "{}", tt.as_char())?;
- match tt.spacing() {
- Spacing::Alone => {}
- Spacing::Joint => joint = true,
- }
- }
- TokenTree::Literal(ref tt) => write!(f, "{}", tt)?,
- }
- }
-
- Ok(())
- }
-}
-
-impl fmt::Debug for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.write_str("TokenStream ")?;
- f.debug_list().entries(self.clone()).finish()
- }
-}
-
-#[cfg(use_proc_macro)]
-impl From<proc_macro::TokenStream> for TokenStream {
- fn from(inner: proc_macro::TokenStream) -> TokenStream {
- inner
- .to_string()
- .parse()
- .expect("compiler token stream parse failed")
- }
-}
-
-#[cfg(use_proc_macro)]
-impl From<TokenStream> for proc_macro::TokenStream {
- fn from(inner: TokenStream) -> proc_macro::TokenStream {
- inner
- .to_string()
- .parse()
- .expect("failed to parse to compiler tokens")
- }
-}
-
-impl From<TokenTree> for TokenStream {
- fn from(tree: TokenTree) -> TokenStream {
- TokenStream { inner: vec![tree] }
- }
-}
-
-impl iter::FromIterator<TokenTree> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
- let mut v = Vec::new();
-
- for token in streams.into_iter() {
- v.push(token);
- }
-
- TokenStream { inner: v }
- }
-}
-
-impl iter::FromIterator<TokenStream> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
- let mut v = Vec::new();
-
- for stream in streams.into_iter() {
- v.extend(stream.inner);
- }
-
- TokenStream { inner: v }
- }
-}
-
-impl Extend<TokenTree> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
- self.inner.extend(streams);
- }
-}
-
-impl Extend<TokenStream> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
- self.inner
- .extend(streams.into_iter().flat_map(|stream| stream));
- }
-}
-
-pub type TokenTreeIter = vec::IntoIter<TokenTree>;
-
-impl IntoIterator for TokenStream {
- type Item = TokenTree;
- type IntoIter = TokenTreeIter;
-
- fn into_iter(self) -> TokenTreeIter {
- self.inner.into_iter()
- }
-}
-
-#[derive(Clone, PartialEq, Eq)]
-pub struct SourceFile {
- path: PathBuf,
-}
-
-impl SourceFile {
- /// Get the path to this source file as a string.
- pub fn path(&self) -> PathBuf {
- self.path.clone()
- }
-
- pub fn is_real(&self) -> bool {
- // XXX(nika): Support real files in the future?
- false
- }
-}
-
-impl fmt::Debug for SourceFile {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_struct("SourceFile")
- .field("path", &self.path())
- .field("is_real", &self.is_real())
- .finish()
- }
-}
-
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-pub struct LineColumn {
- pub line: usize,
- pub column: usize,
-}
-
-#[cfg(span_locations)]
-thread_local! {
- static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
- // NOTE: We start with a single dummy file which all call_site() and
- // def_site() spans reference.
- files: vec![{
- #[cfg(procmacro2_semver_exempt)]
- {
- FileInfo {
- name: "<unspecified>".to_owned(),
- span: Span { lo: 0, hi: 0 },
- lines: vec![0],
- }
- }
-
- #[cfg(not(procmacro2_semver_exempt))]
- {
- FileInfo {
- span: Span { lo: 0, hi: 0 },
- lines: vec![0],
- }
- }
- }],
- });
-}
-
-#[cfg(span_locations)]
-struct FileInfo {
- #[cfg(procmacro2_semver_exempt)]
- name: String,
- span: Span,
- lines: Vec<usize>,
-}
-
-#[cfg(span_locations)]
-impl FileInfo {
- fn offset_line_column(&self, offset: usize) -> LineColumn {
- assert!(self.span_within(Span {
- lo: offset as u32,
- hi: offset as u32
- }));
- let offset = offset - self.span.lo as usize;
- match self.lines.binary_search(&offset) {
- Ok(found) => LineColumn {
- line: found + 1,
- column: 0,
- },
- Err(idx) => LineColumn {
- line: idx,
- column: offset - self.lines[idx - 1],
- },
- }
- }
-
- fn span_within(&self, span: Span) -> bool {
- span.lo >= self.span.lo && span.hi <= self.span.hi
- }
-}
-
-/// Computesthe offsets of each line in the given source string.
-#[cfg(span_locations)]
-fn lines_offsets(s: &str) -> Vec<usize> {
- let mut lines = vec![0];
- let mut prev = 0;
- while let Some(len) = s[prev..].find('\n') {
- prev += len + 1;
- lines.push(prev);
- }
- lines
-}
-
-#[cfg(span_locations)]
-struct SourceMap {
- files: Vec<FileInfo>,
-}
-
-#[cfg(span_locations)]
-impl SourceMap {
- fn next_start_pos(&self) -> u32 {
- // Add 1 so there's always space between files.
- //
- // We'll always have at least 1 file, as we initialize our files list
- // with a dummy file.
- self.files.last().unwrap().span.hi + 1
- }
-
- fn add_file(&mut self, name: &str, src: &str) -> Span {
- let lines = lines_offsets(src);
- let lo = self.next_start_pos();
- // XXX(nika): Shouild we bother doing a checked cast or checked add here?
- let span = Span {
- lo,
- hi: lo + (src.len() as u32),
- };
-
- #[cfg(procmacro2_semver_exempt)]
- self.files.push(FileInfo {
- name: name.to_owned(),
- span,
- lines,
- });
-
- #[cfg(not(procmacro2_semver_exempt))]
- self.files.push(FileInfo { span, lines });
- let _ = name;
-
- span
- }
-
- fn fileinfo(&self, span: Span) -> &FileInfo {
- for file in &self.files {
- if file.span_within(span) {
- return file;
- }
- }
- panic!("Invalid span with no related FileInfo!");
- }
-}
-
-#[derive(Clone, Copy, PartialEq, Eq)]
-pub struct Span {
- #[cfg(span_locations)]
- lo: u32,
- #[cfg(span_locations)]
- hi: u32,
-}
-
-impl Span {
- #[cfg(not(span_locations))]
- pub fn call_site() -> Span {
- Span {}
- }
-
- #[cfg(span_locations)]
- pub fn call_site() -> Span {
- Span { lo: 0, hi: 0 }
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn def_site() -> Span {
- Span::call_site()
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn resolved_at(&self, _other: Span) -> Span {
- // Stable spans consist only of line/column information, so
- // `resolved_at` and `located_at` only select which span the
- // caller wants line/column information from.
- *self
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn located_at(&self, other: Span) -> Span {
- other
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn source_file(&self) -> SourceFile {
- SOURCE_MAP.with(|cm| {
- let cm = cm.borrow();
- let fi = cm.fileinfo(*self);
- SourceFile {
- path: Path::new(&fi.name).to_owned(),
- }
- })
- }
-
- #[cfg(span_locations)]
- pub fn start(&self) -> LineColumn {
- SOURCE_MAP.with(|cm| {
- let cm = cm.borrow();
- let fi = cm.fileinfo(*self);
- fi.offset_line_column(self.lo as usize)
- })
- }
-
- #[cfg(span_locations)]
- pub fn end(&self) -> LineColumn {
- SOURCE_MAP.with(|cm| {
- let cm = cm.borrow();
- let fi = cm.fileinfo(*self);
- fi.offset_line_column(self.hi as usize)
- })
- }
-
- #[cfg(not(span_locations))]
- pub fn join(&self, _other: Span) -> Option<Span> {
- Some(Span {})
- }
-
- #[cfg(span_locations)]
- pub fn join(&self, other: Span) -> Option<Span> {
- SOURCE_MAP.with(|cm| {
- let cm = cm.borrow();
- // If `other` is not within the same FileInfo as us, return None.
- if !cm.fileinfo(*self).span_within(other) {
- return None;
- }
- Some(Span {
- lo: cmp::min(self.lo, other.lo),
- hi: cmp::max(self.hi, other.hi),
- })
- })
- }
-
- #[cfg(not(span_locations))]
- fn first_byte(self) -> Self {
- self
- }
-
- #[cfg(span_locations)]
- fn first_byte(self) -> Self {
- Span {
- lo: self.lo,
- hi: cmp::min(self.lo.saturating_add(1), self.hi),
- }
- }
-
- #[cfg(not(span_locations))]
- fn last_byte(self) -> Self {
- self
- }
-
- #[cfg(span_locations)]
- fn last_byte(self) -> Self {
- Span {
- lo: cmp::max(self.hi.saturating_sub(1), self.lo),
- hi: self.hi,
- }
- }
-}
-
-impl fmt::Debug for Span {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- #[cfg(procmacro2_semver_exempt)]
- return write!(f, "bytes({}..{})", self.lo, self.hi);
-
- #[cfg(not(procmacro2_semver_exempt))]
- write!(f, "Span")
- }
-}
-
-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
- if cfg!(procmacro2_semver_exempt) {
- debug.field("span", &span);
- }
-}
-
-#[derive(Clone)]
-pub struct Group {
- delimiter: Delimiter,
- stream: TokenStream,
- span: Span,
-}
-
-impl Group {
- pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
- Group {
- delimiter,
- stream,
- span: Span::call_site(),
- }
- }
-
- pub fn delimiter(&self) -> Delimiter {
- self.delimiter
- }
-
- pub fn stream(&self) -> TokenStream {
- self.stream.clone()
- }
-
- pub fn span(&self) -> Span {
- self.span
- }
-
- pub fn span_open(&self) -> Span {
- self.span.first_byte()
- }
-
- pub fn span_close(&self) -> Span {
- self.span.last_byte()
- }
-
- pub fn set_span(&mut self, span: Span) {
- self.span = span;
- }
-}
-
-impl fmt::Display for Group {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let (left, right) = match self.delimiter {
- Delimiter::Parenthesis => ("(", ")"),
- Delimiter::Brace => ("{", "}"),
- Delimiter::Bracket => ("[", "]"),
- Delimiter::None => ("", ""),
- };
-
- f.write_str(left)?;
- self.stream.fmt(f)?;
- f.write_str(right)?;
-
- Ok(())
- }
-}
-
-impl fmt::Debug for Group {
- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- let mut debug = fmt.debug_struct("Group");
- debug.field("delimiter", &self.delimiter);
- debug.field("stream", &self.stream);
- #[cfg(procmacro2_semver_exempt)]
- debug.field("span", &self.span);
- debug.finish()
- }
-}
-
-#[derive(Clone)]
-pub struct Ident {
- sym: String,
- span: Span,
- raw: bool,
-}
-
-impl Ident {
- fn _new(string: &str, raw: bool, span: Span) -> Ident {
- validate_ident(string);
-
- Ident {
- sym: string.to_owned(),
- span,
- raw,
- }
- }
-
- pub fn new(string: &str, span: Span) -> Ident {
- Ident::_new(string, false, span)
- }
-
- pub fn new_raw(string: &str, span: Span) -> Ident {
- Ident::_new(string, true, span)
- }
-
- pub fn span(&self) -> Span {
- self.span
- }
-
- pub fn set_span(&mut self, span: Span) {
- self.span = span;
- }
-}
-
-fn is_ident_start(c: char) -> bool {
- ('a' <= c && c <= 'z')
- || ('A' <= c && c <= 'Z')
- || c == '_'
- || (c > '\x7f' && UnicodeXID::is_xid_start(c))
-}
-
-fn is_ident_continue(c: char) -> bool {
- ('a' <= c && c <= 'z')
- || ('A' <= c && c <= 'Z')
- || c == '_'
- || ('0' <= c && c <= '9')
- || (c > '\x7f' && UnicodeXID::is_xid_continue(c))
-}
-
-fn validate_ident(string: &str) {
- let validate = string;
- if validate.is_empty() {
- panic!("Ident is not allowed to be empty; use Option<Ident>");
- }
-
- if validate.bytes().all(|digit| digit >= b'0' && digit <= b'9') {
- panic!("Ident cannot be a number; use Literal instead");
- }
-
- fn ident_ok(string: &str) -> bool {
- let mut chars = string.chars();
- let first = chars.next().unwrap();
- if !is_ident_start(first) {
- return false;
- }
- for ch in chars {
- if !is_ident_continue(ch) {
- return false;
- }
- }
- true
- }
-
- if !ident_ok(validate) {
- panic!("{:?} is not a valid Ident", string);
- }
-}
-
-impl PartialEq for Ident {
- fn eq(&self, other: &Ident) -> bool {
- self.sym == other.sym && self.raw == other.raw
- }
-}
-
-impl<T> PartialEq<T> for Ident
-where
- T: ?Sized + AsRef<str>,
-{
- fn eq(&self, other: &T) -> bool {
- let other = other.as_ref();
- if self.raw {
- other.starts_with("r#") && self.sym == other[2..]
- } else {
- self.sym == other
- }
- }
-}
-
-impl fmt::Display for Ident {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- if self.raw {
- "r#".fmt(f)?;
- }
- self.sym.fmt(f)
- }
-}
-
-impl fmt::Debug for Ident {
- // Ident(proc_macro), Ident(r#union)
- #[cfg(not(procmacro2_semver_exempt))]
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let mut debug = f.debug_tuple("Ident");
- debug.field(&format_args!("{}", self));
- debug.finish()
- }
-
- // Ident {
- // sym: proc_macro,
- // span: bytes(128..138)
- // }
- #[cfg(procmacro2_semver_exempt)]
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let mut debug = f.debug_struct("Ident");
- debug.field("sym", &format_args!("{}", self));
- debug.field("span", &self.span);
- debug.finish()
- }
-}
-
-#[derive(Clone)]
-pub struct Literal {
- text: String,
- span: Span,
-}
-
-macro_rules! suffixed_numbers {
- ($($name:ident => $kind:ident,)*) => ($(
- pub fn $name(n: $kind) -> Literal {
- Literal::_new(format!(concat!("{}", stringify!($kind)), n))
- }
- )*)
-}
-
-macro_rules! unsuffixed_numbers {
- ($($name:ident => $kind:ident,)*) => ($(
- pub fn $name(n: $kind) -> Literal {
- Literal::_new(n.to_string())
- }
- )*)
-}
-
-impl Literal {
- fn _new(text: String) -> Literal {
- Literal {
- text,
- span: Span::call_site(),
- }
- }
-
- suffixed_numbers! {
- u8_suffixed => u8,
- u16_suffixed => u16,
- u32_suffixed => u32,
- u64_suffixed => u64,
- u128_suffixed => u128,
- usize_suffixed => usize,
- i8_suffixed => i8,
- i16_suffixed => i16,
- i32_suffixed => i32,
- i64_suffixed => i64,
- i128_suffixed => i128,
- isize_suffixed => isize,
-
- f32_suffixed => f32,
- f64_suffixed => f64,
- }
-
- unsuffixed_numbers! {
- u8_unsuffixed => u8,
- u16_unsuffixed => u16,
- u32_unsuffixed => u32,
- u64_unsuffixed => u64,
- u128_unsuffixed => u128,
- usize_unsuffixed => usize,
- i8_unsuffixed => i8,
- i16_unsuffixed => i16,
- i32_unsuffixed => i32,
- i64_unsuffixed => i64,
- i128_unsuffixed => i128,
- isize_unsuffixed => isize,
- }
-
- pub fn f32_unsuffixed(f: f32) -> Literal {
- let mut s = f.to_string();
- if !s.contains(".") {
- s.push_str(".0");
- }
- Literal::_new(s)
- }
-
- pub fn f64_unsuffixed(f: f64) -> Literal {
- let mut s = f.to_string();
- if !s.contains(".") {
- s.push_str(".0");
- }
- Literal::_new(s)
- }
-
- pub fn string(t: &str) -> Literal {
- let mut text = String::with_capacity(t.len() + 2);
- text.push('"');
- for c in t.chars() {
- if c == '\'' {
- // escape_default turns this into "\'" which is unnecessary.
- text.push(c);
- } else {
- text.extend(c.escape_default());
- }
- }
- text.push('"');
- Literal::_new(text)
- }
-
- pub fn character(t: char) -> Literal {
- let mut text = String::new();
- text.push('\'');
- if t == '"' {
- // escape_default turns this into '\"' which is unnecessary.
- text.push(t);
- } else {
- text.extend(t.escape_default());
- }
- text.push('\'');
- Literal::_new(text)
- }
-
- pub fn byte_string(bytes: &[u8]) -> Literal {
- let mut escaped = "b\"".to_string();
- for b in bytes {
- match *b {
- b'\0' => escaped.push_str(r"\0"),
- b'\t' => escaped.push_str(r"\t"),
- b'\n' => escaped.push_str(r"\n"),
- b'\r' => escaped.push_str(r"\r"),
- b'"' => escaped.push_str("\\\""),
- b'\\' => escaped.push_str("\\\\"),
- b'\x20'..=b'\x7E' => escaped.push(*b as char),
- _ => escaped.push_str(&format!("\\x{:02X}", b)),
- }
- }
- escaped.push('"');
- Literal::_new(escaped)
- }
-
- pub fn span(&self) -> Span {
- self.span
- }
-
- pub fn set_span(&mut self, span: Span) {
- self.span = span;
- }
-
- pub fn subspan<R: RangeBounds<usize>>(&self, _range: R) -> Option<Span> {
- None
- }
-}
-
-impl fmt::Display for Literal {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.text.fmt(f)
- }
-}
-
-impl fmt::Debug for Literal {
- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- let mut debug = fmt.debug_struct("Literal");
- debug.field("lit", &format_args!("{}", self.text));
- #[cfg(procmacro2_semver_exempt)]
- debug.field("span", &self.span);
- debug.finish()
- }
-}
-
-fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
- let mut trees = Vec::new();
- loop {
- let input_no_ws = skip_whitespace(input);
- if input_no_ws.rest.len() == 0 {
- break;
- }
- if let Ok((a, tokens)) = doc_comment(input_no_ws) {
- input = a;
- trees.extend(tokens);
- continue;
- }
-
- let (a, tt) = match token_tree(input_no_ws) {
- Ok(p) => p,
- Err(_) => break,
- };
- trees.push(tt);
- input = a;
- }
- Ok((input, TokenStream { inner: trees }))
-}
-
-#[cfg(not(span_locations))]
-fn spanned<'a, T>(
- input: Cursor<'a>,
- f: fn(Cursor<'a>) -> PResult<'a, T>,
-) -> PResult<'a, (T, crate::Span)> {
- let (a, b) = f(skip_whitespace(input))?;
- Ok((a, ((b, crate::Span::_new_stable(Span::call_site())))))
-}
-
-#[cfg(span_locations)]
-fn spanned<'a, T>(
- input: Cursor<'a>,
- f: fn(Cursor<'a>) -> PResult<'a, T>,
-) -> PResult<'a, (T, crate::Span)> {
- let input = skip_whitespace(input);
- let lo = input.off;
- let (a, b) = f(input)?;
- let hi = a.off;
- let span = crate::Span::_new_stable(Span { lo, hi });
- Ok((a, (b, span)))
-}
-
-fn token_tree(input: Cursor) -> PResult<TokenTree> {
- let (rest, (mut tt, span)) = spanned(input, token_kind)?;
- tt.set_span(span);
- Ok((rest, tt))
-}
-
-named!(token_kind -> TokenTree, alt!(
- map!(group, |g| TokenTree::Group(crate::Group::_new_stable(g)))
- |
- map!(literal, |l| TokenTree::Literal(crate::Literal::_new_stable(l))) // must be before symbol
- |
- map!(op, TokenTree::Punct)
- |
- symbol_leading_ws
-));
-
-named!(group -> Group, alt!(
- delimited!(
- punct!("("),
- token_stream,
- punct!(")")
- ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
- |
- delimited!(
- punct!("["),
- token_stream,
- punct!("]")
- ) => { |ts| Group::new(Delimiter::Bracket, ts) }
- |
- delimited!(
- punct!("{"),
- token_stream,
- punct!("}")
- ) => { |ts| Group::new(Delimiter::Brace, ts) }
-));
-
-fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
- symbol(skip_whitespace(input))
-}
-
-fn symbol(input: Cursor) -> PResult<TokenTree> {
- let raw = input.starts_with("r#");
- let rest = input.advance((raw as usize) << 1);
-
- let (rest, sym) = symbol_not_raw(rest)?;
-
- if !raw {
- let ident = crate::Ident::new(sym, crate::Span::call_site());
- return Ok((rest, ident.into()));
- }
-
- if sym == "_" {
- return Err(LexError);
- }
-
- let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
- Ok((rest, ident.into()))
-}
-
-fn symbol_not_raw(input: Cursor) -> PResult<&str> {
- let mut chars = input.char_indices();
-
- match chars.next() {
- Some((_, ch)) if is_ident_start(ch) => {}
- _ => return Err(LexError),
- }
-
- let mut end = input.len();
- for (i, ch) in chars {
- if !is_ident_continue(ch) {
- end = i;
- break;
- }
- }
-
- Ok((input.advance(end), &input.rest[..end]))
-}
-
-fn literal(input: Cursor) -> PResult<Literal> {
- let input_no_ws = skip_whitespace(input);
-
- match literal_nocapture(input_no_ws) {
- Ok((a, ())) => {
- let start = input.len() - input_no_ws.len();
- let len = input_no_ws.len() - a.len();
- let end = start + len;
- Ok((a, Literal::_new(input.rest[start..end].to_string())))
- }
- Err(LexError) => Err(LexError),
- }
-}
-
-named!(literal_nocapture -> (), alt!(
- string
- |
- byte_string
- |
- byte
- |
- character
- |
- float
- |
- int
-));
-
-named!(string -> (), alt!(
- quoted_string
- |
- preceded!(
- punct!("r"),
- raw_string
- ) => { |_| () }
-));
-
-named!(quoted_string -> (), do_parse!(
- punct!("\"") >>
- cooked_string >>
- tag!("\"") >>
- option!(symbol_not_raw) >>
- (())
-));
-
-fn cooked_string(input: Cursor) -> PResult<()> {
- let mut chars = input.char_indices().peekable();
- while let Some((byte_offset, ch)) = chars.next() {
- match ch {
- '"' => {
- return Ok((input.advance(byte_offset), ()));
- }
- '\r' => {
- if let Some((_, '\n')) = chars.next() {
- // ...
- } else {
- break;
- }
- }
- '\\' => match chars.next() {
- Some((_, 'x')) => {
- if !backslash_x_char(&mut chars) {
- break;
- }
- }
- Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
- | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
- Some((_, 'u')) => {
- if !backslash_u(&mut chars) {
- break;
- }
- }
- Some((_, '\n')) | Some((_, '\r')) => {
- while let Some(&(_, ch)) = chars.peek() {
- if ch.is_whitespace() {
- chars.next();
- } else {
- break;
- }
- }
- }
- _ => break,
- },
- _ch => {}
- }
- }
- Err(LexError)
-}
-
-named!(byte_string -> (), alt!(
- delimited!(
- punct!("b\""),
- cooked_byte_string,
- tag!("\"")
- ) => { |_| () }
- |
- preceded!(
- punct!("br"),
- raw_string
- ) => { |_| () }
-));
-
-fn cooked_byte_string(mut input: Cursor) -> PResult<()> {
- let mut bytes = input.bytes().enumerate();
- 'outer: while let Some((offset, b)) = bytes.next() {
- match b {
- b'"' => {
- return Ok((input.advance(offset), ()));
- }
- b'\r' => {
- if let Some((_, b'\n')) = bytes.next() {
- // ...
- } else {
- break;
- }
- }
- b'\\' => match bytes.next() {
- Some((_, b'x')) => {
- if !backslash_x_byte(&mut bytes) {
- break;
- }
- }
- Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
- | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
- Some((newline, b'\n')) | Some((newline, b'\r')) => {
- let rest = input.advance(newline + 1);
- for (offset, ch) in rest.char_indices() {
- if !ch.is_whitespace() {
- input = rest.advance(offset);
- bytes = input.bytes().enumerate();
- continue 'outer;
- }
- }
- break;
- }
- _ => break,
- },
- b if b < 0x80 => {}
- _ => break,
- }
- }
- Err(LexError)
-}
-
-fn raw_string(input: Cursor) -> PResult<()> {
- let mut chars = input.char_indices();
- let mut n = 0;
- while let Some((byte_offset, ch)) = chars.next() {
- match ch {
- '"' => {
- n = byte_offset;
- break;
- }
- '#' => {}
- _ => return Err(LexError),
- }
- }
- for (byte_offset, ch) in chars {
- match ch {
- '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
- let rest = input.advance(byte_offset + 1 + n);
- return Ok((rest, ()));
- }
- '\r' => {}
- _ => {}
- }
- }
- Err(LexError)
-}
-
-named!(byte -> (), do_parse!(
- punct!("b") >>
- tag!("'") >>
- cooked_byte >>
- tag!("'") >>
- (())
-));
-
-fn cooked_byte(input: Cursor) -> PResult<()> {
- let mut bytes = input.bytes().enumerate();
- let ok = match bytes.next().map(|(_, b)| b) {
- Some(b'\\') => match bytes.next().map(|(_, b)| b) {
- Some(b'x') => backslash_x_byte(&mut bytes),
- Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
- | Some(b'"') => true,
- _ => false,
- },
- b => b.is_some(),
- };
- if ok {
- match bytes.next() {
- Some((offset, _)) => {
- if input.chars().as_str().is_char_boundary(offset) {
- Ok((input.advance(offset), ()))
- } else {
- Err(LexError)
- }
- }
- None => Ok((input.advance(input.len()), ())),
- }
- } else {
- Err(LexError)
- }
-}
-
-named!(character -> (), do_parse!(
- punct!("'") >>
- cooked_char >>
- tag!("'") >>
- (())
-));
-
-fn cooked_char(input: Cursor) -> PResult<()> {
- let mut chars = input.char_indices();
- let ok = match chars.next().map(|(_, ch)| ch) {
- Some('\\') => match chars.next().map(|(_, ch)| ch) {
- Some('x') => backslash_x_char(&mut chars),
- Some('u') => backslash_u(&mut chars),
- Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
- true
- }
- _ => false,
- },
- ch => ch.is_some(),
- };
- if ok {
- match chars.next() {
- Some((idx, _)) => Ok((input.advance(idx), ())),
- None => Ok((input.advance(input.len()), ())),
- }
- } else {
- Err(LexError)
- }
-}
-
-macro_rules! next_ch {
- ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
- match $chars.next() {
- Some((_, ch)) => match ch {
- $pat $(| $rest)* => ch,
- _ => return false,
- },
- None => return false
- }
- };
-}
-
-fn backslash_x_char<I>(chars: &mut I) -> bool
-where
- I: Iterator<Item = (usize, char)>,
-{
- next_ch!(chars @ '0'..='7');
- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
- true
-}
-
-fn backslash_x_byte<I>(chars: &mut I) -> bool
-where
- I: Iterator<Item = (usize, u8)>,
-{
- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
- true
-}
-
-fn backslash_u<I>(chars: &mut I) -> bool
-where
- I: Iterator<Item = (usize, char)>,
-{
- next_ch!(chars @ '{');
- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
- loop {
- let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
- if c == '}' {
- return true;
- }
- }
-}
-
-fn float(input: Cursor) -> PResult<()> {
- let (mut rest, ()) = float_digits(input)?;
- if let Some(ch) = rest.chars().next() {
- if is_ident_start(ch) {
- rest = symbol_not_raw(rest)?.0;
- }
- }
- word_break(rest)
-}
-
-fn float_digits(input: Cursor) -> PResult<()> {
- let mut chars = input.chars().peekable();
- match chars.next() {
- Some(ch) if ch >= '0' && ch <= '9' => {}
- _ => return Err(LexError),
- }
-
- let mut len = 1;
- let mut has_dot = false;
- let mut has_exp = false;
- while let Some(&ch) = chars.peek() {
- match ch {
- '0'..='9' | '_' => {
- chars.next();
- len += 1;
- }
- '.' => {
- if has_dot {
- break;
- }
- chars.next();
- if chars
- .peek()
- .map(|&ch| ch == '.' || is_ident_start(ch))
- .unwrap_or(false)
- {
- return Err(LexError);
- }
- len += 1;
- has_dot = true;
- }
- 'e' | 'E' => {
- chars.next();
- len += 1;
- has_exp = true;
- break;
- }
- _ => break,
- }
- }
-
- let rest = input.advance(len);
- if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
- return Err(LexError);
- }
-
- if has_exp {
- let mut has_exp_value = false;
- while let Some(&ch) = chars.peek() {
- match ch {
- '+' | '-' => {
- if has_exp_value {
- break;
- }
- chars.next();
- len += 1;
- }
- '0'..='9' => {
- chars.next();
- len += 1;
- has_exp_value = true;
- }
- '_' => {
- chars.next();
- len += 1;
- }
- _ => break,
- }
- }
- if !has_exp_value {
- return Err(LexError);
- }
- }
-
- Ok((input.advance(len), ()))
-}
-
-fn int(input: Cursor) -> PResult<()> {
- let (mut rest, ()) = digits(input)?;
- if let Some(ch) = rest.chars().next() {
- if is_ident_start(ch) {
- rest = symbol_not_raw(rest)?.0;
- }
- }
- word_break(rest)
-}
-
-fn digits(mut input: Cursor) -> PResult<()> {
- let base = if input.starts_with("0x") {
- input = input.advance(2);
- 16
- } else if input.starts_with("0o") {
- input = input.advance(2);
- 8
- } else if input.starts_with("0b") {
- input = input.advance(2);
- 2
- } else {
- 10
- };
-
- let mut len = 0;
- let mut empty = true;
- for b in input.bytes() {
- let digit = match b {
- b'0'..=b'9' => (b - b'0') as u64,
- b'a'..=b'f' => 10 + (b - b'a') as u64,
- b'A'..=b'F' => 10 + (b - b'A') as u64,
- b'_' => {
- if empty && base == 10 {
- return Err(LexError);
- }
- len += 1;
- continue;
- }
- _ => break,
- };
- if digit >= base {
- return Err(LexError);
- }
- len += 1;
- empty = false;
- }
- if empty {
- Err(LexError)
- } else {
- Ok((input.advance(len), ()))
- }
-}
-
-fn op(input: Cursor) -> PResult<Punct> {
- let input = skip_whitespace(input);
- match op_char(input) {
- Ok((rest, '\'')) => {
- symbol(rest)?;
- Ok((rest, Punct::new('\'', Spacing::Joint)))
- }
- Ok((rest, ch)) => {
- let kind = match op_char(rest) {
- Ok(_) => Spacing::Joint,
- Err(LexError) => Spacing::Alone,
- };
- Ok((rest, Punct::new(ch, kind)))
- }
- Err(LexError) => Err(LexError),
- }
-}
-
-fn op_char(input: Cursor) -> PResult<char> {
- if input.starts_with("//") || input.starts_with("/*") {
- // Do not accept `/` of a comment as an op.
- return Err(LexError);
- }
-
- let mut chars = input.chars();
- let first = match chars.next() {
- Some(ch) => ch,
- None => {
- return Err(LexError);
- }
- };
- let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
- if recognized.contains(first) {
- Ok((input.advance(first.len_utf8()), first))
- } else {
- Err(LexError)
- }
-}
-
-fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
- let mut trees = Vec::new();
- let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
- trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
- if inner {
- trees.push(Punct::new('!', Spacing::Alone).into());
- }
- let mut stream = vec![
- TokenTree::Ident(crate::Ident::new("doc", span)),
- TokenTree::Punct(Punct::new('=', Spacing::Alone)),
- TokenTree::Literal(crate::Literal::string(comment)),
- ];
- for tt in stream.iter_mut() {
- tt.set_span(span);
- }
- let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
- trees.push(crate::Group::_new_stable(group).into());
- for tt in trees.iter_mut() {
- tt.set_span(span);
- }
- Ok((rest, trees))
-}
-
-named!(doc_comment_contents -> (&str, bool), alt!(
- do_parse!(
- punct!("//!") >>
- s: take_until_newline_or_eof!() >>
- ((s, true))
- )
- |
- do_parse!(
- option!(whitespace) >>
- peek!(tag!("/*!")) >>
- s: block_comment >>
- ((s, true))
- )
- |
- do_parse!(
- punct!("///") >>
- not!(tag!("/")) >>
- s: take_until_newline_or_eof!() >>
- ((s, false))
- )
- |
- do_parse!(
- option!(whitespace) >>
- peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
- s: block_comment >>
- ((s, false))
- )
-));
diff --git a/proc-macro2/src/lib.rs b/proc-macro2/src/lib.rs
deleted file mode 100644
index bbfb375..0000000
--- a/proc-macro2/src/lib.rs
+++ /dev/null
@@ -1,1199 +0,0 @@
-//! A wrapper around the procedural macro API of the compiler's [`proc_macro`]
-//! crate. This library serves two purposes:
-//!
-//! [`proc_macro`]: https://doc.rust-lang.org/proc_macro/
-//!
-//! - **Bring proc-macro-like functionality to other contexts like build.rs and
-//! main.rs.** Types from `proc_macro` are entirely specific to procedural
-//! macros and cannot ever exist in code outside of a procedural macro.
-//! Meanwhile `proc_macro2` types may exist anywhere including non-macro code.
-//! By developing foundational libraries like [syn] and [quote] against
-//! `proc_macro2` rather than `proc_macro`, the procedural macro ecosystem
-//! becomes easily applicable to many other use cases and we avoid
-//! reimplementing non-macro equivalents of those libraries.
-//!
-//! - **Make procedural macros unit testable.** As a consequence of being
-//! specific to procedural macros, nothing that uses `proc_macro` can be
-//! executed from a unit test. In order for helper libraries or components of
-//! a macro to be testable in isolation, they must be implemented using
-//! `proc_macro2`.
-//!
-//! [syn]: https://github.com/dtolnay/syn
-//! [quote]: https://github.com/dtolnay/quote
-//!
-//! # Usage
-//!
-//! The skeleton of a typical procedural macro typically looks like this:
-//!
-//! ```
-//! extern crate proc_macro;
-//!
-//! # const IGNORE: &str = stringify! {
-//! #[proc_macro_derive(MyDerive)]
-//! # };
-//! # #[cfg(wrap_proc_macro)]
-//! pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
-//! let input = proc_macro2::TokenStream::from(input);
-//!
-//! let output: proc_macro2::TokenStream = {
-//! /* transform input */
-//! # input
-//! };
-//!
-//! proc_macro::TokenStream::from(output)
-//! }
-//! ```
-//!
-//! If parsing with [Syn], you'll use [`parse_macro_input!`] instead to
-//! propagate parse errors correctly back to the compiler when parsing fails.
-//!
-//! [`parse_macro_input!`]: https://docs.rs/syn/1.0/syn/macro.parse_macro_input.html
-//!
-//! # Unstable features
-//!
-//! The default feature set of proc-macro2 tracks the most recent stable
-//! compiler API. Functionality in `proc_macro` that is not yet stable is not
-//! exposed by proc-macro2 by default.
-//!
-//! To opt into the additional APIs available in the most recent nightly
-//! compiler, the `procmacro2_semver_exempt` config flag must be passed to
-//! rustc. We will polyfill those nightly-only APIs back to Rust 1.31.0. As
-//! these are unstable APIs that track the nightly compiler, minor versions of
-//! proc-macro2 may make breaking changes to them at any time.
-//!
-//! ```sh
-//! RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
-//! ```
-//!
-//! Note that this must not only be done for your crate, but for any crate that
-//! depends on your crate. This infectious nature is intentional, as it serves
-//! as a reminder that you are outside of the normal semver guarantees.
-//!
-//! Semver exempt methods are marked as such in the proc-macro2 documentation.
-//!
-//! # Thread-Safety
-//!
-//! Most types in this crate are `!Sync` because the underlying compiler
-//! types make use of thread-local memory, meaning they cannot be accessed from
-//! a different thread.
-
-// Proc-macro2 types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.7")]
-#![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
-#![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
-
-#[cfg(use_proc_macro)]
-extern crate proc_macro;
-
-use std::cmp::Ordering;
-use std::fmt;
-use std::hash::{Hash, Hasher};
-use std::iter::FromIterator;
-use std::marker;
-use std::ops::RangeBounds;
-#[cfg(procmacro2_semver_exempt)]
-use std::path::PathBuf;
-use std::rc::Rc;
-use std::str::FromStr;
-
-#[macro_use]
-mod strnom;
-mod fallback;
-
-#[cfg(not(wrap_proc_macro))]
-use crate::fallback as imp;
-#[path = "wrapper.rs"]
-#[cfg(wrap_proc_macro)]
-mod imp;
-
-/// An abstract stream of tokens, or more concretely a sequence of token trees.
-///
-/// This type provides interfaces for iterating over token trees and for
-/// collecting token trees into one stream.
-///
-/// Token stream is both the input and output of `#[proc_macro]`,
-/// `#[proc_macro_attribute]` and `#[proc_macro_derive]` definitions.
-#[derive(Clone)]
-pub struct TokenStream {
- inner: imp::TokenStream,
- _marker: marker::PhantomData<Rc<()>>,
-}
-
-/// Error returned from `TokenStream::from_str`.
-pub struct LexError {
- inner: imp::LexError,
- _marker: marker::PhantomData<Rc<()>>,
-}
-
-impl TokenStream {
- fn _new(inner: imp::TokenStream) -> TokenStream {
- TokenStream {
- inner,
- _marker: marker::PhantomData,
- }
- }
-
- fn _new_stable(inner: fallback::TokenStream) -> TokenStream {
- TokenStream {
- inner: inner.into(),
- _marker: marker::PhantomData,
- }
- }
-
- /// Returns an empty `TokenStream` containing no token trees.
- pub fn new() -> TokenStream {
- TokenStream::_new(imp::TokenStream::new())
- }
-
- /// Checks if this `TokenStream` is empty.
- pub fn is_empty(&self) -> bool {
- self.inner.is_empty()
- }
-}
-
-/// `TokenStream::default()` returns an empty stream,
-/// i.e. this is equivalent with `TokenStream::new()`.
-impl Default for TokenStream {
- fn default() -> Self {
- TokenStream::new()
- }
-}
-
-/// Attempts to break the string into tokens and parse those tokens into a token
-/// stream.
-///
-/// May fail for a number of reasons, for example, if the string contains
-/// unbalanced delimiters or characters not existing in the language.
-///
-/// NOTE: Some errors may cause panics instead of returning `LexError`. We
-/// reserve the right to change these errors into `LexError`s later.
-impl FromStr for TokenStream {
- type Err = LexError;
-
- fn from_str(src: &str) -> Result<TokenStream, LexError> {
- let e = src.parse().map_err(|e| LexError {
- inner: e,
- _marker: marker::PhantomData,
- })?;
- Ok(TokenStream::_new(e))
- }
-}
-
-#[cfg(use_proc_macro)]
-impl From<proc_macro::TokenStream> for TokenStream {
- fn from(inner: proc_macro::TokenStream) -> TokenStream {
- TokenStream::_new(inner.into())
- }
-}
-
-#[cfg(use_proc_macro)]
-impl From<TokenStream> for proc_macro::TokenStream {
- fn from(inner: TokenStream) -> proc_macro::TokenStream {
- inner.inner.into()
- }
-}
-
-impl From<TokenTree> for TokenStream {
- fn from(token: TokenTree) -> Self {
- TokenStream::_new(imp::TokenStream::from(token))
- }
-}
-
-impl Extend<TokenTree> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
- self.inner.extend(streams)
- }
-}
-
-impl Extend<TokenStream> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
- self.inner
- .extend(streams.into_iter().map(|stream| stream.inner))
- }
-}
-
-/// Collects a number of token trees into a single stream.
-impl FromIterator<TokenTree> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
- TokenStream::_new(streams.into_iter().collect())
- }
-}
-impl FromIterator<TokenStream> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
- TokenStream::_new(streams.into_iter().map(|i| i.inner).collect())
- }
-}
-
-/// Prints the token stream as a string that is supposed to be losslessly
-/// convertible back into the same token stream (modulo spans), except for
-/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
-/// numeric literals.
-impl fmt::Display for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
-}
-
-/// Prints token in a form convenient for debugging.
-impl fmt::Debug for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
-}
-
-impl fmt::Debug for LexError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
-}
-
-/// The source file of a given `Span`.
-///
-/// This type is semver exempt and not exposed by default.
-#[cfg(procmacro2_semver_exempt)]
-#[derive(Clone, PartialEq, Eq)]
-pub struct SourceFile {
- inner: imp::SourceFile,
- _marker: marker::PhantomData<Rc<()>>,
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl SourceFile {
- fn _new(inner: imp::SourceFile) -> Self {
- SourceFile {
- inner,
- _marker: marker::PhantomData,
- }
- }
-
- /// Get the path to this source file.
- ///
- /// ### Note
- ///
- /// If the code span associated with this `SourceFile` was generated by an
- /// external macro, this may not be an actual path on the filesystem. Use
- /// [`is_real`] to check.
- ///
- /// Also note that even if `is_real` returns `true`, if
- /// `--remap-path-prefix` was passed on the command line, the path as given
- /// may not actually be valid.
- ///
- /// [`is_real`]: #method.is_real
- pub fn path(&self) -> PathBuf {
- self.inner.path()
- }
-
- /// Returns `true` if this source file is a real source file, and not
- /// generated by an external macro's expansion.
- pub fn is_real(&self) -> bool {
- self.inner.is_real()
- }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl fmt::Debug for SourceFile {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
-}
-
-/// A line-column pair representing the start or end of a `Span`.
-///
-/// This type is semver exempt and not exposed by default.
-#[cfg(span_locations)]
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub struct LineColumn {
- /// The 1-indexed line in the source file on which the span starts or ends
- /// (inclusive).
- pub line: usize,
- /// The 0-indexed column (in UTF-8 characters) in the source file on which
- /// the span starts or ends (inclusive).
- pub column: usize,
-}
-
-/// A region of source code, along with macro expansion information.
-#[derive(Copy, Clone)]
-pub struct Span {
- inner: imp::Span,
- _marker: marker::PhantomData<Rc<()>>,
-}
-
-impl Span {
- fn _new(inner: imp::Span) -> Span {
- Span {
- inner,
- _marker: marker::PhantomData,
- }
- }
-
- fn _new_stable(inner: fallback::Span) -> Span {
- Span {
- inner: inner.into(),
- _marker: marker::PhantomData,
- }
- }
-
- /// The span of the invocation of the current procedural macro.
- ///
- /// Identifiers created with this span will be resolved as if they were
- /// written directly at the macro call location (call-site hygiene) and
- /// other code at the macro call site will be able to refer to them as well.
- pub fn call_site() -> Span {
- Span::_new(imp::Span::call_site())
- }
-
- /// A span that resolves at the macro definition site.
- ///
- /// This method is semver exempt and not exposed by default.
- #[cfg(procmacro2_semver_exempt)]
- pub fn def_site() -> Span {
- Span::_new(imp::Span::def_site())
- }
-
- /// Creates a new span with the same line/column information as `self` but
- /// that resolves symbols as though it were at `other`.
- ///
- /// This method is semver exempt and not exposed by default.
- #[cfg(procmacro2_semver_exempt)]
- pub fn resolved_at(&self, other: Span) -> Span {
- Span::_new(self.inner.resolved_at(other.inner))
- }
-
- /// Creates a new span with the same name resolution behavior as `self` but
- /// with the line/column information of `other`.
- ///
- /// This method is semver exempt and not exposed by default.
- #[cfg(procmacro2_semver_exempt)]
- pub fn located_at(&self, other: Span) -> Span {
- Span::_new(self.inner.located_at(other.inner))
- }
-
- /// Convert `proc_macro2::Span` to `proc_macro::Span`.
- ///
- /// This method is available when building with a nightly compiler, or when
- /// building with rustc 1.29+ *without* semver exempt features.
- ///
- /// # Panics
- ///
- /// Panics if called from outside of a procedural macro. Unlike
- /// `proc_macro2::Span`, the `proc_macro::Span` type can only exist within
- /// the context of a procedural macro invocation.
- #[cfg(wrap_proc_macro)]
- pub fn unwrap(self) -> proc_macro::Span {
- self.inner.unwrap()
- }
-
- // Soft deprecated. Please use Span::unwrap.
- #[cfg(wrap_proc_macro)]
- #[doc(hidden)]
- pub fn unstable(self) -> proc_macro::Span {
- self.unwrap()
- }
-
- /// The original source file into which this span points.
- ///
- /// This method is semver exempt and not exposed by default.
- #[cfg(procmacro2_semver_exempt)]
- pub fn source_file(&self) -> SourceFile {
- SourceFile::_new(self.inner.source_file())
- }
-
- /// Get the starting line/column in the source file for this span.
- ///
- /// This method requires the `"span-locations"` feature to be enabled.
- #[cfg(span_locations)]
- pub fn start(&self) -> LineColumn {
- let imp::LineColumn { line, column } = self.inner.start();
- LineColumn { line, column }
- }
-
- /// Get the ending line/column in the source file for this span.
- ///
- /// This method requires the `"span-locations"` feature to be enabled.
- #[cfg(span_locations)]
- pub fn end(&self) -> LineColumn {
- let imp::LineColumn { line, column } = self.inner.end();
- LineColumn { line, column }
- }
-
- /// Create a new span encompassing `self` and `other`.
- ///
- /// Returns `None` if `self` and `other` are from different files.
- ///
- /// Warning: the underlying [`proc_macro::Span::join`] method is
- /// nightly-only. When called from within a procedural macro not using a
- /// nightly compiler, this method will always return `None`.
- ///
- /// [`proc_macro::Span::join`]: https://doc.rust-lang.org/proc_macro/struct.Span.html#method.join
- pub fn join(&self, other: Span) -> Option<Span> {
- self.inner.join(other.inner).map(Span::_new)
- }
-
- /// Compares two spans to see if they're equal.
- ///
- /// This method is semver exempt and not exposed by default.
- #[cfg(procmacro2_semver_exempt)]
- pub fn eq(&self, other: &Span) -> bool {
- self.inner.eq(&other.inner)
- }
-}
-
-/// Prints a span in a form convenient for debugging.
-impl fmt::Debug for Span {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
-}
-
-/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
-#[derive(Clone)]
-pub enum TokenTree {
- /// A token stream surrounded by bracket delimiters.
- Group(Group),
- /// An identifier.
- Ident(Ident),
- /// A single punctuation character (`+`, `,`, `$`, etc.).
- Punct(Punct),
- /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
- Literal(Literal),
-}
-
-impl TokenTree {
- /// Returns the span of this tree, delegating to the `span` method of
- /// the contained token or a delimited stream.
- pub fn span(&self) -> Span {
- match *self {
- TokenTree::Group(ref t) => t.span(),
- TokenTree::Ident(ref t) => t.span(),
- TokenTree::Punct(ref t) => t.span(),
- TokenTree::Literal(ref t) => t.span(),
- }
- }
-
- /// Configures the span for *only this token*.
- ///
- /// Note that if this token is a `Group` then this method will not configure
- /// the span of each of the internal tokens, this will simply delegate to
- /// the `set_span` method of each variant.
- pub fn set_span(&mut self, span: Span) {
- match *self {
- TokenTree::Group(ref mut t) => t.set_span(span),
- TokenTree::Ident(ref mut t) => t.set_span(span),
- TokenTree::Punct(ref mut t) => t.set_span(span),
- TokenTree::Literal(ref mut t) => t.set_span(span),
- }
- }
-}
-
-impl From<Group> for TokenTree {
- fn from(g: Group) -> TokenTree {
- TokenTree::Group(g)
- }
-}
-
-impl From<Ident> for TokenTree {
- fn from(g: Ident) -> TokenTree {
- TokenTree::Ident(g)
- }
-}
-
-impl From<Punct> for TokenTree {
- fn from(g: Punct) -> TokenTree {
- TokenTree::Punct(g)
- }
-}
-
-impl From<Literal> for TokenTree {
- fn from(g: Literal) -> TokenTree {
- TokenTree::Literal(g)
- }
-}
-
-/// Prints the token tree as a string that is supposed to be losslessly
-/// convertible back into the same token tree (modulo spans), except for
-/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
-/// numeric literals.
-impl fmt::Display for TokenTree {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match *self {
- TokenTree::Group(ref t) => t.fmt(f),
- TokenTree::Ident(ref t) => t.fmt(f),
- TokenTree::Punct(ref t) => t.fmt(f),
- TokenTree::Literal(ref t) => t.fmt(f),
- }
- }
-}
-
-/// Prints token tree in a form convenient for debugging.
-impl fmt::Debug for TokenTree {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- // Each of these has the name in the struct type in the derived debug,
- // so don't bother with an extra layer of indirection
- match *self {
- TokenTree::Group(ref t) => t.fmt(f),
- TokenTree::Ident(ref t) => {
- let mut debug = f.debug_struct("Ident");
- debug.field("sym", &format_args!("{}", t));
- imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner);
- debug.finish()
- }
- TokenTree::Punct(ref t) => t.fmt(f),
- TokenTree::Literal(ref t) => t.fmt(f),
- }
- }
-}
-
-/// A delimited token stream.
-///
-/// A `Group` internally contains a `TokenStream` which is surrounded by
-/// `Delimiter`s.
-#[derive(Clone)]
-pub struct Group {
- inner: imp::Group,
-}
-
-/// Describes how a sequence of token trees is delimited.
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
-pub enum Delimiter {
- /// `( ... )`
- Parenthesis,
- /// `{ ... }`
- Brace,
- /// `[ ... ]`
- Bracket,
- /// `Ø ... Ø`
- ///
- /// An implicit delimiter, that may, for example, appear around tokens
- /// coming from a "macro variable" `$var`. It is important to preserve
- /// operator priorities in cases like `$var * 3` where `$var` is `1 + 2`.
- /// Implicit delimiters may not survive roundtrip of a token stream through
- /// a string.
- None,
-}
-
-impl Group {
- fn _new(inner: imp::Group) -> Self {
- Group { inner }
- }
-
- fn _new_stable(inner: fallback::Group) -> Self {
- Group {
- inner: inner.into(),
- }
- }
-
- /// Creates a new `Group` with the given delimiter and token stream.
- ///
- /// This constructor will set the span for this group to
- /// `Span::call_site()`. To change the span you can use the `set_span`
- /// method below.
- pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
- Group {
- inner: imp::Group::new(delimiter, stream.inner),
- }
- }
-
- /// Returns the delimiter of this `Group`
- pub fn delimiter(&self) -> Delimiter {
- self.inner.delimiter()
- }
-
- /// Returns the `TokenStream` of tokens that are delimited in this `Group`.
- ///
- /// Note that the returned token stream does not include the delimiter
- /// returned above.
- pub fn stream(&self) -> TokenStream {
- TokenStream::_new(self.inner.stream())
- }
-
- /// Returns the span for the delimiters of this token stream, spanning the
- /// entire `Group`.
- ///
- /// ```text
- /// pub fn span(&self) -> Span {
- /// ^^^^^^^
- /// ```
- pub fn span(&self) -> Span {
- Span::_new(self.inner.span())
- }
-
- /// Returns the span pointing to the opening delimiter of this group.
- ///
- /// ```text
- /// pub fn span_open(&self) -> Span {
- /// ^
- /// ```
- pub fn span_open(&self) -> Span {
- Span::_new(self.inner.span_open())
- }
-
- /// Returns the span pointing to the closing delimiter of this group.
- ///
- /// ```text
- /// pub fn span_close(&self) -> Span {
- /// ^
- /// ```
- pub fn span_close(&self) -> Span {
- Span::_new(self.inner.span_close())
- }
-
- /// Configures the span for this `Group`'s delimiters, but not its internal
- /// tokens.
- ///
- /// This method will **not** set the span of all the internal tokens spanned
- /// by this group, but rather it will only set the span of the delimiter
- /// tokens at the level of the `Group`.
- pub fn set_span(&mut self, span: Span) {
- self.inner.set_span(span.inner)
- }
-}
-
-/// Prints the group as a string that should be losslessly convertible back
-/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
-/// with `Delimiter::None` delimiters.
-impl fmt::Display for Group {
- fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
- fmt::Display::fmt(&self.inner, formatter)
- }
-}
-
-impl fmt::Debug for Group {
- fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
- fmt::Debug::fmt(&self.inner, formatter)
- }
-}
-
-/// An `Punct` is an single punctuation character like `+`, `-` or `#`.
-///
-/// Multicharacter operators like `+=` are represented as two instances of
-/// `Punct` with different forms of `Spacing` returned.
-#[derive(Clone)]
-pub struct Punct {
- op: char,
- spacing: Spacing,
- span: Span,
-}
-
-/// Whether an `Punct` is followed immediately by another `Punct` or followed by
-/// another token or whitespace.
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
-pub enum Spacing {
- /// E.g. `+` is `Alone` in `+ =`, `+ident` or `+()`.
- Alone,
- /// E.g. `+` is `Joint` in `+=` or `'` is `Joint` in `'#`.
- ///
- /// Additionally, single quote `'` can join with identifiers to form
- /// lifetimes `'ident`.
- Joint,
-}
-
-impl Punct {
- /// Creates a new `Punct` from the given character and spacing.
- ///
- /// The `ch` argument must be a valid punctuation character permitted by the
- /// language, otherwise the function will panic.
- ///
- /// The returned `Punct` will have the default span of `Span::call_site()`
- /// which can be further configured with the `set_span` method below.
- pub fn new(op: char, spacing: Spacing) -> Punct {
- Punct {
- op,
- spacing,
- span: Span::call_site(),
- }
- }
-
- /// Returns the value of this punctuation character as `char`.
- pub fn as_char(&self) -> char {
- self.op
- }
-
- /// Returns the spacing of this punctuation character, indicating whether
- /// it's immediately followed by another `Punct` in the token stream, so
- /// they can potentially be combined into a multicharacter operator
- /// (`Joint`), or it's followed by some other token or whitespace (`Alone`)
- /// so the operator has certainly ended.
- pub fn spacing(&self) -> Spacing {
- self.spacing
- }
-
- /// Returns the span for this punctuation character.
- pub fn span(&self) -> Span {
- self.span
- }
-
- /// Configure the span for this punctuation character.
- pub fn set_span(&mut self, span: Span) {
- self.span = span;
- }
-}
-
-/// Prints the punctuation character as a string that should be losslessly
-/// convertible back into the same character.
-impl fmt::Display for Punct {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.op.fmt(f)
- }
-}
-
-impl fmt::Debug for Punct {
- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- let mut debug = fmt.debug_struct("Punct");
- debug.field("op", &self.op);
- debug.field("spacing", &self.spacing);
- imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner);
- debug.finish()
- }
-}
-
-/// A word of Rust code, which may be a keyword or legal variable name.
-///
-/// An identifier consists of at least one Unicode code point, the first of
-/// which has the XID_Start property and the rest of which have the XID_Continue
-/// property.
-///
-/// - The empty string is not an identifier. Use `Option<Ident>`.
-/// - A lifetime is not an identifier. Use `syn::Lifetime` instead.
-///
-/// An identifier constructed with `Ident::new` is permitted to be a Rust
-/// keyword, though parsing one through its [`Parse`] implementation rejects
-/// Rust keywords. Use `input.call(Ident::parse_any)` when parsing to match the
-/// behaviour of `Ident::new`.
-///
-/// [`Parse`]: https://docs.rs/syn/1.0/syn/parse/trait.Parse.html
-///
-/// # Examples
-///
-/// A new ident can be created from a string using the `Ident::new` function.
-/// A span must be provided explicitly which governs the name resolution
-/// behavior of the resulting identifier.
-///
-/// ```
-/// use proc_macro2::{Ident, Span};
-///
-/// fn main() {
-/// let call_ident = Ident::new("calligraphy", Span::call_site());
-///
-/// println!("{}", call_ident);
-/// }
-/// ```
-///
-/// An ident can be interpolated into a token stream using the `quote!` macro.
-///
-/// ```
-/// use proc_macro2::{Ident, Span};
-/// use quote::quote;
-///
-/// fn main() {
-/// let ident = Ident::new("demo", Span::call_site());
-///
-/// // Create a variable binding whose name is this ident.
-/// let expanded = quote! { let #ident = 10; };
-///
-/// // Create a variable binding with a slightly different name.
-/// let temp_ident = Ident::new(&format!("new_{}", ident), Span::call_site());
-/// let expanded = quote! { let #temp_ident = 10; };
-/// }
-/// ```
-///
-/// A string representation of the ident is available through the `to_string()`
-/// method.
-///
-/// ```
-/// # use proc_macro2::{Ident, Span};
-/// #
-/// # let ident = Ident::new("another_identifier", Span::call_site());
-/// #
-/// // Examine the ident as a string.
-/// let ident_string = ident.to_string();
-/// if ident_string.len() > 60 {
-/// println!("Very long identifier: {}", ident_string)
-/// }
-/// ```
-#[derive(Clone)]
-pub struct Ident {
- inner: imp::Ident,
- _marker: marker::PhantomData<Rc<()>>,
-}
-
-impl Ident {
- fn _new(inner: imp::Ident) -> Ident {
- Ident {
- inner,
- _marker: marker::PhantomData,
- }
- }
-
- /// Creates a new `Ident` with the given `string` as well as the specified
- /// `span`.
- ///
- /// The `string` argument must be a valid identifier permitted by the
- /// language, otherwise the function will panic.
- ///
- /// Note that `span`, currently in rustc, configures the hygiene information
- /// for this identifier.
- ///
- /// As of this time `Span::call_site()` explicitly opts-in to "call-site"
- /// hygiene meaning that identifiers created with this span will be resolved
- /// as if they were written directly at the location of the macro call, and
- /// other code at the macro call site will be able to refer to them as well.
- ///
- /// Later spans like `Span::def_site()` will allow to opt-in to
- /// "definition-site" hygiene meaning that identifiers created with this
- /// span will be resolved at the location of the macro definition and other
- /// code at the macro call site will not be able to refer to them.
- ///
- /// Due to the current importance of hygiene this constructor, unlike other
- /// tokens, requires a `Span` to be specified at construction.
- ///
- /// # Panics
- ///
- /// Panics if the input string is neither a keyword nor a legal variable
- /// name. If you are not sure whether the string contains an identifier and
- /// need to handle an error case, use
- /// <a href="https://docs.rs/syn/1.0/syn/fn.parse_str.html"><code
- /// style="padding-right:0;">syn::parse_str</code></a><code
- /// style="padding-left:0;">::&lt;Ident&gt;</code>
- /// rather than `Ident::new`.
- pub fn new(string: &str, span: Span) -> Ident {
- Ident::_new(imp::Ident::new(string, span.inner))
- }
-
- /// Same as `Ident::new`, but creates a raw identifier (`r#ident`).
- ///
- /// This method is semver exempt and not exposed by default.
- #[cfg(procmacro2_semver_exempt)]
- pub fn new_raw(string: &str, span: Span) -> Ident {
- Ident::_new_raw(string, span)
- }
-
- fn _new_raw(string: &str, span: Span) -> Ident {
- Ident::_new(imp::Ident::new_raw(string, span.inner))
- }
-
- /// Returns the span of this `Ident`.
- pub fn span(&self) -> Span {
- Span::_new(self.inner.span())
- }
-
- /// Configures the span of this `Ident`, possibly changing its hygiene
- /// context.
- pub fn set_span(&mut self, span: Span) {
- self.inner.set_span(span.inner);
- }
-}
-
-impl PartialEq for Ident {
- fn eq(&self, other: &Ident) -> bool {
- self.inner == other.inner
- }
-}
-
-impl<T> PartialEq<T> for Ident
-where
- T: ?Sized + AsRef<str>,
-{
- fn eq(&self, other: &T) -> bool {
- self.inner == other
- }
-}
-
-impl Eq for Ident {}
-
-impl PartialOrd for Ident {
- fn partial_cmp(&self, other: &Ident) -> Option<Ordering> {
- Some(self.cmp(other))
- }
-}
-
-impl Ord for Ident {
- fn cmp(&self, other: &Ident) -> Ordering {
- self.to_string().cmp(&other.to_string())
- }
-}
-
-impl Hash for Ident {
- fn hash<H: Hasher>(&self, hasher: &mut H) {
- self.to_string().hash(hasher)
- }
-}
-
-/// Prints the identifier as a string that should be losslessly convertible back
-/// into the same identifier.
-impl fmt::Display for Ident {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
-}
-
-impl fmt::Debug for Ident {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
-}
-
-/// A literal string (`"hello"`), byte string (`b"hello"`), character (`'a'`),
-/// byte character (`b'a'`), an integer or floating point number with or without
-/// a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
-///
-/// Boolean literals like `true` and `false` do not belong here, they are
-/// `Ident`s.
-#[derive(Clone)]
-pub struct Literal {
- inner: imp::Literal,
- _marker: marker::PhantomData<Rc<()>>,
-}
-
-macro_rules! suffixed_int_literals {
- ($($name:ident => $kind:ident,)*) => ($(
- /// Creates a new suffixed integer literal with the specified value.
- ///
- /// This function will create an integer like `1u32` where the integer
- /// value specified is the first part of the token and the integral is
- /// also suffixed at the end. Literals created from negative numbers may
- /// not survive rountrips through `TokenStream` or strings and may be
- /// broken into two tokens (`-` and positive literal).
- ///
- /// Literals created through this method have the `Span::call_site()`
- /// span by default, which can be configured with the `set_span` method
- /// below.
- pub fn $name(n: $kind) -> Literal {
- Literal::_new(imp::Literal::$name(n))
- }
- )*)
-}
-
-macro_rules! unsuffixed_int_literals {
- ($($name:ident => $kind:ident,)*) => ($(
- /// Creates a new unsuffixed integer literal with the specified value.
- ///
- /// This function will create an integer like `1` where the integer
- /// value specified is the first part of the token. No suffix is
- /// specified on this token, meaning that invocations like
- /// `Literal::i8_unsuffixed(1)` are equivalent to
- /// `Literal::u32_unsuffixed(1)`. Literals created from negative numbers
- /// may not survive rountrips through `TokenStream` or strings and may
- /// be broken into two tokens (`-` and positive literal).
- ///
- /// Literals created through this method have the `Span::call_site()`
- /// span by default, which can be configured with the `set_span` method
- /// below.
- pub fn $name(n: $kind) -> Literal {
- Literal::_new(imp::Literal::$name(n))
- }
- )*)
-}
-
-impl Literal {
- fn _new(inner: imp::Literal) -> Literal {
- Literal {
- inner,
- _marker: marker::PhantomData,
- }
- }
-
- fn _new_stable(inner: fallback::Literal) -> Literal {
- Literal {
- inner: inner.into(),
- _marker: marker::PhantomData,
- }
- }
-
- suffixed_int_literals! {
- u8_suffixed => u8,
- u16_suffixed => u16,
- u32_suffixed => u32,
- u64_suffixed => u64,
- u128_suffixed => u128,
- usize_suffixed => usize,
- i8_suffixed => i8,
- i16_suffixed => i16,
- i32_suffixed => i32,
- i64_suffixed => i64,
- i128_suffixed => i128,
- isize_suffixed => isize,
- }
-
- unsuffixed_int_literals! {
- u8_unsuffixed => u8,
- u16_unsuffixed => u16,
- u32_unsuffixed => u32,
- u64_unsuffixed => u64,
- u128_unsuffixed => u128,
- usize_unsuffixed => usize,
- i8_unsuffixed => i8,
- i16_unsuffixed => i16,
- i32_unsuffixed => i32,
- i64_unsuffixed => i64,
- i128_unsuffixed => i128,
- isize_unsuffixed => isize,
- }
-
- /// Creates a new unsuffixed floating-point literal.
- ///
- /// This constructor is similar to those like `Literal::i8_unsuffixed` where
- /// the float's value is emitted directly into the token but no suffix is
- /// used, so it may be inferred to be a `f64` later in the compiler.
- /// Literals created from negative numbers may not survive rountrips through
- /// `TokenStream` or strings and may be broken into two tokens (`-` and
- /// positive literal).
- ///
- /// # Panics
- ///
- /// This function requires that the specified float is finite, for example
- /// if it is infinity or NaN this function will panic.
- pub fn f64_unsuffixed(f: f64) -> Literal {
- assert!(f.is_finite());
- Literal::_new(imp::Literal::f64_unsuffixed(f))
- }
-
- /// Creates a new suffixed floating-point literal.
- ///
- /// This constructor will create a literal like `1.0f64` where the value
- /// specified is the preceding part of the token and `f64` is the suffix of
- /// the token. This token will always be inferred to be an `f64` in the
- /// compiler. Literals created from negative numbers may not survive
- /// rountrips through `TokenStream` or strings and may be broken into two
- /// tokens (`-` and positive literal).
- ///
- /// # Panics
- ///
- /// This function requires that the specified float is finite, for example
- /// if it is infinity or NaN this function will panic.
- pub fn f64_suffixed(f: f64) -> Literal {
- assert!(f.is_finite());
- Literal::_new(imp::Literal::f64_suffixed(f))
- }
-
- /// Creates a new unsuffixed floating-point literal.
- ///
- /// This constructor is similar to those like `Literal::i8_unsuffixed` where
- /// the float's value is emitted directly into the token but no suffix is
- /// used, so it may be inferred to be a `f64` later in the compiler.
- /// Literals created from negative numbers may not survive rountrips through
- /// `TokenStream` or strings and may be broken into two tokens (`-` and
- /// positive literal).
- ///
- /// # Panics
- ///
- /// This function requires that the specified float is finite, for example
- /// if it is infinity or NaN this function will panic.
- pub fn f32_unsuffixed(f: f32) -> Literal {
- assert!(f.is_finite());
- Literal::_new(imp::Literal::f32_unsuffixed(f))
- }
-
- /// Creates a new suffixed floating-point literal.
- ///
- /// This constructor will create a literal like `1.0f32` where the value
- /// specified is the preceding part of the token and `f32` is the suffix of
- /// the token. This token will always be inferred to be an `f32` in the
- /// compiler. Literals created from negative numbers may not survive
- /// rountrips through `TokenStream` or strings and may be broken into two
- /// tokens (`-` and positive literal).
- ///
- /// # Panics
- ///
- /// This function requires that the specified float is finite, for example
- /// if it is infinity or NaN this function will panic.
- pub fn f32_suffixed(f: f32) -> Literal {
- assert!(f.is_finite());
- Literal::_new(imp::Literal::f32_suffixed(f))
- }
-
- /// String literal.
- pub fn string(string: &str) -> Literal {
- Literal::_new(imp::Literal::string(string))
- }
-
- /// Character literal.
- pub fn character(ch: char) -> Literal {
- Literal::_new(imp::Literal::character(ch))
- }
-
- /// Byte string literal.
- pub fn byte_string(s: &[u8]) -> Literal {
- Literal::_new(imp::Literal::byte_string(s))
- }
-
- /// Returns the span encompassing this literal.
- pub fn span(&self) -> Span {
- Span::_new(self.inner.span())
- }
-
- /// Configures the span associated for this literal.
- pub fn set_span(&mut self, span: Span) {
- self.inner.set_span(span.inner);
- }
-
- /// Returns a `Span` that is a subset of `self.span()` containing only
- /// the source bytes in range `range`. Returns `None` if the would-be
- /// trimmed span is outside the bounds of `self`.
- ///
- /// Warning: the underlying [`proc_macro::Literal::subspan`] method is
- /// nightly-only. When called from within a procedural macro not using a
- /// nightly compiler, this method will always return `None`.
- ///
- /// [`proc_macro::Literal::subspan`]: https://doc.rust-lang.org/proc_macro/struct.Literal.html#method.subspan
- pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
- self.inner.subspan(range).map(Span::_new)
- }
-}
-
-impl fmt::Debug for Literal {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
-}
-
-impl fmt::Display for Literal {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
-}
-
-/// Public implementation details for the `TokenStream` type, such as iterators.
-pub mod token_stream {
- use std::fmt;
- use std::marker;
- use std::rc::Rc;
-
- pub use crate::TokenStream;
- use crate::{imp, TokenTree};
-
- /// An iterator over `TokenStream`'s `TokenTree`s.
- ///
- /// The iteration is "shallow", e.g. the iterator doesn't recurse into
- /// delimited groups, and returns whole groups as token trees.
- #[derive(Clone)]
- pub struct IntoIter {
- inner: imp::TokenTreeIter,
- _marker: marker::PhantomData<Rc<()>>,
- }
-
- impl Iterator for IntoIter {
- type Item = TokenTree;
-
- fn next(&mut self) -> Option<TokenTree> {
- self.inner.next()
- }
- }
-
- impl fmt::Debug for IntoIter {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
- }
-
- impl IntoIterator for TokenStream {
- type Item = TokenTree;
- type IntoIter = IntoIter;
-
- fn into_iter(self) -> IntoIter {
- IntoIter {
- inner: self.inner.into_iter(),
- _marker: marker::PhantomData,
- }
- }
- }
-}
diff --git a/proc-macro2/src/strnom.rs b/proc-macro2/src/strnom.rs
deleted file mode 100644
index eb7d0b8..0000000
--- a/proc-macro2/src/strnom.rs
+++ /dev/null
@@ -1,391 +0,0 @@
-//! Adapted from [`nom`](https://github.com/Geal/nom).
-
-use crate::fallback::LexError;
-use std::str::{Bytes, CharIndices, Chars};
-use unicode_xid::UnicodeXID;
-
-#[derive(Copy, Clone, Eq, PartialEq)]
-pub struct Cursor<'a> {
- pub rest: &'a str,
- #[cfg(span_locations)]
- pub off: u32,
-}
-
-impl<'a> Cursor<'a> {
- #[cfg(not(span_locations))]
- pub fn advance(&self, amt: usize) -> Cursor<'a> {
- Cursor {
- rest: &self.rest[amt..],
- }
- }
- #[cfg(span_locations)]
- pub fn advance(&self, amt: usize) -> Cursor<'a> {
- Cursor {
- rest: &self.rest[amt..],
- off: self.off + (amt as u32),
- }
- }
-
- pub fn find(&self, p: char) -> Option<usize> {
- self.rest.find(p)
- }
-
- pub fn starts_with(&self, s: &str) -> bool {
- self.rest.starts_with(s)
- }
-
- pub fn is_empty(&self) -> bool {
- self.rest.is_empty()
- }
-
- pub fn len(&self) -> usize {
- self.rest.len()
- }
-
- pub fn as_bytes(&self) -> &'a [u8] {
- self.rest.as_bytes()
- }
-
- pub fn bytes(&self) -> Bytes<'a> {
- self.rest.bytes()
- }
-
- pub fn chars(&self) -> Chars<'a> {
- self.rest.chars()
- }
-
- pub fn char_indices(&self) -> CharIndices<'a> {
- self.rest.char_indices()
- }
-}
-
-pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
-
-pub fn whitespace(input: Cursor) -> PResult<()> {
- if input.is_empty() {
- return Err(LexError);
- }
-
- let bytes = input.as_bytes();
- let mut i = 0;
- while i < bytes.len() {
- let s = input.advance(i);
- if bytes[i] == b'/' {
- if s.starts_with("//")
- && (!s.starts_with("///") || s.starts_with("////"))
- && !s.starts_with("//!")
- {
- if let Some(len) = s.find('\n') {
- i += len + 1;
- continue;
- }
- break;
- } else if s.starts_with("/**/") {
- i += 4;
- continue;
- } else if s.starts_with("/*")
- && (!s.starts_with("/**") || s.starts_with("/***"))
- && !s.starts_with("/*!")
- {
- let (_, com) = block_comment(s)?;
- i += com.len();
- continue;
- }
- }
- match bytes[i] {
- b' ' | 0x09..=0x0d => {
- i += 1;
- continue;
- }
- b if b <= 0x7f => {}
- _ => {
- let ch = s.chars().next().unwrap();
- if is_whitespace(ch) {
- i += ch.len_utf8();
- continue;
- }
- }
- }
- return if i > 0 { Ok((s, ())) } else { Err(LexError) };
- }
- Ok((input.advance(input.len()), ()))
-}
-
-pub fn block_comment(input: Cursor) -> PResult<&str> {
- if !input.starts_with("/*") {
- return Err(LexError);
- }
-
- let mut depth = 0;
- let bytes = input.as_bytes();
- let mut i = 0;
- let upper = bytes.len() - 1;
- while i < upper {
- if bytes[i] == b'/' && bytes[i + 1] == b'*' {
- depth += 1;
- i += 1; // eat '*'
- } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
- depth -= 1;
- if depth == 0 {
- return Ok((input.advance(i + 2), &input.rest[..i + 2]));
- }
- i += 1; // eat '/'
- }
- i += 1;
- }
- Err(LexError)
-}
-
-pub fn skip_whitespace(input: Cursor) -> Cursor {
- match whitespace(input) {
- Ok((rest, _)) => rest,
- Err(LexError) => input,
- }
-}
-
-fn is_whitespace(ch: char) -> bool {
- // Rust treats left-to-right mark and right-to-left mark as whitespace
- ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
-}
-
-pub fn word_break(input: Cursor) -> PResult<()> {
- match input.chars().next() {
- Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
- Some(_) | None => Ok((input, ())),
- }
-}
-
-macro_rules! named {
- ($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
- fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
- $submac!(i, $($args)*)
- }
- };
-}
-
-macro_rules! alt {
- ($i:expr, $e:ident | $($rest:tt)*) => {
- alt!($i, call!($e) | $($rest)*)
- };
-
- ($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
- match $subrule!($i, $($args)*) {
- res @ Ok(_) => res,
- _ => alt!($i, $($rest)*)
- }
- };
-
- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
- match $subrule!($i, $($args)*) {
- Ok((i, o)) => Ok((i, $gen(o))),
- Err(LexError) => alt!($i, $($rest)*)
- }
- };
-
- ($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
- alt!($i, call!($e) => { $gen } | $($rest)*)
- };
-
- ($i:expr, $e:ident => { $gen:expr }) => {
- alt!($i, call!($e) => { $gen })
- };
-
- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
- match $subrule!($i, $($args)*) {
- Ok((i, o)) => Ok((i, $gen(o))),
- Err(LexError) => Err(LexError),
- }
- };
-
- ($i:expr, $e:ident) => {
- alt!($i, call!($e))
- };
-
- ($i:expr, $subrule:ident!( $($args:tt)*)) => {
- $subrule!($i, $($args)*)
- };
-}
-
-macro_rules! do_parse {
- ($i:expr, ( $($rest:expr),* )) => {
- Ok(($i, ( $($rest),* )))
- };
-
- ($i:expr, $e:ident >> $($rest:tt)*) => {
- do_parse!($i, call!($e) >> $($rest)*)
- };
-
- ($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
- match $submac!($i, $($args)*) {
- Err(LexError) => Err(LexError),
- Ok((i, _)) => do_parse!(i, $($rest)*),
- }
- };
-
- ($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
- do_parse!($i, $field: call!($e) >> $($rest)*)
- };
-
- ($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
- match $submac!($i, $($args)*) {
- Err(LexError) => Err(LexError),
- Ok((i, o)) => {
- let $field = o;
- do_parse!(i, $($rest)*)
- },
- }
- };
-}
-
-macro_rules! peek {
- ($i:expr, $submac:ident!( $($args:tt)* )) => {
- match $submac!($i, $($args)*) {
- Ok((_, o)) => Ok(($i, o)),
- Err(LexError) => Err(LexError),
- }
- };
-}
-
-macro_rules! call {
- ($i:expr, $fun:expr $(, $args:expr)*) => {
- $fun($i $(, $args)*)
- };
-}
-
-macro_rules! option {
- ($i:expr, $f:expr) => {
- match $f($i) {
- Ok((i, o)) => Ok((i, Some(o))),
- Err(LexError) => Ok(($i, None)),
- }
- };
-}
-
-macro_rules! take_until_newline_or_eof {
- ($i:expr,) => {{
- if $i.len() == 0 {
- Ok(($i, ""))
- } else {
- match $i.find('\n') {
- Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
- None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
- }
- }
- }};
-}
-
-macro_rules! tuple {
- ($i:expr, $($rest:tt)*) => {
- tuple_parser!($i, (), $($rest)*)
- };
-}
-
-/// Do not use directly. Use `tuple!`.
-macro_rules! tuple_parser {
- ($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
- tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
- };
-
- ($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
- match $submac!($i, $($args)*) {
- Err(LexError) => Err(LexError),
- Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
- }
- };
-
- ($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
- match $submac!($i, $($args)*) {
- Err(LexError) => Err(LexError),
- Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
- }
- };
-
- ($i:expr, ($($parsed:tt),*), $e:ident) => {
- tuple_parser!($i, ($($parsed),*), call!($e))
- };
-
- ($i:expr, (), $submac:ident!( $($args:tt)* )) => {
- $submac!($i, $($args)*)
- };
-
- ($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
- match $submac!($i, $($args)*) {
- Err(LexError) => Err(LexError),
- Ok((i, o)) => Ok((i, ($($parsed),*, o)))
- }
- };
-
- ($i:expr, ($($parsed:expr),*)) => {
- Ok(($i, ($($parsed),*)))
- };
-}
-
-macro_rules! not {
- ($i:expr, $submac:ident!( $($args:tt)* )) => {
- match $submac!($i, $($args)*) {
- Ok((_, _)) => Err(LexError),
- Err(LexError) => Ok(($i, ())),
- }
- };
-}
-
-macro_rules! tag {
- ($i:expr, $tag:expr) => {
- if $i.starts_with($tag) {
- Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
- } else {
- Err(LexError)
- }
- };
-}
-
-macro_rules! punct {
- ($i:expr, $punct:expr) => {
- $crate::strnom::punct($i, $punct)
- };
-}
-
-/// Do not use directly. Use `punct!`.
-pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
- let input = skip_whitespace(input);
- if input.starts_with(token) {
- Ok((input.advance(token.len()), token))
- } else {
- Err(LexError)
- }
-}
-
-macro_rules! preceded {
- ($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
- match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
- Ok((remaining, (_, o))) => Ok((remaining, o)),
- Err(LexError) => Err(LexError),
- }
- };
-
- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
- preceded!($i, $submac!($($args)*), call!($g))
- };
-}
-
-macro_rules! delimited {
- ($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
- match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
- Err(LexError) => Err(LexError),
- Ok((i1, (_, o, _))) => Ok((i1, o))
- }
- };
-}
-
-macro_rules! map {
- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
- match $submac!($i, $($args)*) {
- Err(LexError) => Err(LexError),
- Ok((i, o)) => Ok((i, call!(o, $g)))
- }
- };
-
- ($i:expr, $f:expr, $g:expr) => {
- map!($i, call!($f), $g)
- };
-}
diff --git a/proc-macro2/src/wrapper.rs b/proc-macro2/src/wrapper.rs
deleted file mode 100644
index 552b938..0000000
--- a/proc-macro2/src/wrapper.rs
+++ /dev/null
@@ -1,927 +0,0 @@
-use std::fmt;
-use std::iter;
-use std::ops::RangeBounds;
-use std::panic::{self, PanicInfo};
-#[cfg(super_unstable)]
-use std::path::PathBuf;
-use std::str::FromStr;
-
-use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
-
-#[derive(Clone)]
-pub enum TokenStream {
- Compiler(DeferredTokenStream),
- Fallback(fallback::TokenStream),
-}
-
-// Work around https://github.com/rust-lang/rust/issues/65080.
-// In `impl Extend<TokenTree> for TokenStream` which is used heavily by quote,
-// we hold on to the appended tokens and do proc_macro::TokenStream::extend as
-// late as possible to batch together consecutive uses of the Extend impl.
-#[derive(Clone)]
-pub struct DeferredTokenStream {
- stream: proc_macro::TokenStream,
- extra: Vec<proc_macro::TokenTree>,
-}
-
-pub enum LexError {
- Compiler(proc_macro::LexError),
- Fallback(fallback::LexError),
-}
-
-fn nightly_works() -> bool {
- use std::sync::atomic::*;
- use std::sync::Once;
-
- static WORKS: AtomicUsize = AtomicUsize::new(0);
- static INIT: Once = Once::new();
-
- match WORKS.load(Ordering::SeqCst) {
- 1 => return false,
- 2 => return true,
- _ => {}
- }
-
- // Swap in a null panic hook to avoid printing "thread panicked" to stderr,
- // then use catch_unwind to determine whether the compiler's proc_macro is
- // working. When proc-macro2 is used from outside of a procedural macro all
- // of the proc_macro crate's APIs currently panic.
- //
- // The Once is to prevent the possibility of this ordering:
- //
- // thread 1 calls take_hook, gets the user's original hook
- // thread 1 calls set_hook with the null hook
- // thread 2 calls take_hook, thinks null hook is the original hook
- // thread 2 calls set_hook with the null hook
- // thread 1 calls set_hook with the actual original hook
- // thread 2 calls set_hook with what it thinks is the original hook
- //
- // in which the user's hook has been lost.
- //
- // There is still a race condition where a panic in a different thread can
- // happen during the interval that the user's original panic hook is
- // unregistered such that their hook is incorrectly not called. This is
- // sufficiently unlikely and less bad than printing panic messages to stderr
- // on correct use of this crate. Maybe there is a libstd feature request
- // here. For now, if a user needs to guarantee that this failure mode does
- // not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
- // the main thread before launching any other threads.
- INIT.call_once(|| {
- type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
-
- let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
- let sanity_check = &*null_hook as *const PanicHook;
- let original_hook = panic::take_hook();
- panic::set_hook(null_hook);
-
- let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
- WORKS.store(works as usize + 1, Ordering::SeqCst);
-
- let hopefully_null_hook = panic::take_hook();
- panic::set_hook(original_hook);
- if sanity_check != &*hopefully_null_hook {
- panic!("observed race condition in proc_macro2::nightly_works");
- }
- });
- nightly_works()
-}
-
-fn mismatch() -> ! {
- panic!("stable/nightly mismatch")
-}
-
-impl DeferredTokenStream {
- fn new(stream: proc_macro::TokenStream) -> Self {
- DeferredTokenStream {
- stream,
- extra: Vec::new(),
- }
- }
-
- fn is_empty(&self) -> bool {
- self.stream.is_empty() && self.extra.is_empty()
- }
-
- fn evaluate_now(&mut self) {
- self.stream.extend(self.extra.drain(..));
- }
-
- fn into_token_stream(mut self) -> proc_macro::TokenStream {
- self.evaluate_now();
- self.stream
- }
-}
-
-impl TokenStream {
- pub fn new() -> TokenStream {
- if nightly_works() {
- TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
- } else {
- TokenStream::Fallback(fallback::TokenStream::new())
- }
- }
-
- pub fn is_empty(&self) -> bool {
- match self {
- TokenStream::Compiler(tts) => tts.is_empty(),
- TokenStream::Fallback(tts) => tts.is_empty(),
- }
- }
-
- fn unwrap_nightly(self) -> proc_macro::TokenStream {
- match self {
- TokenStream::Compiler(s) => s.into_token_stream(),
- TokenStream::Fallback(_) => mismatch(),
- }
- }
-
- fn unwrap_stable(self) -> fallback::TokenStream {
- match self {
- TokenStream::Compiler(_) => mismatch(),
- TokenStream::Fallback(s) => s,
- }
- }
-}
-
-impl FromStr for TokenStream {
- type Err = LexError;
-
- fn from_str(src: &str) -> Result<TokenStream, LexError> {
- if nightly_works() {
- Ok(TokenStream::Compiler(DeferredTokenStream::new(
- src.parse()?,
- )))
- } else {
- Ok(TokenStream::Fallback(src.parse()?))
- }
- }
-}
-
-impl fmt::Display for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
- TokenStream::Fallback(tts) => tts.fmt(f),
- }
- }
-}
-
-impl From<proc_macro::TokenStream> for TokenStream {
- fn from(inner: proc_macro::TokenStream) -> TokenStream {
- TokenStream::Compiler(DeferredTokenStream::new(inner))
- }
-}
-
-impl From<TokenStream> for proc_macro::TokenStream {
- fn from(inner: TokenStream) -> proc_macro::TokenStream {
- match inner {
- TokenStream::Compiler(inner) => inner.into_token_stream(),
- TokenStream::Fallback(inner) => inner.to_string().parse().unwrap(),
- }
- }
-}
-
-impl From<fallback::TokenStream> for TokenStream {
- fn from(inner: fallback::TokenStream) -> TokenStream {
- TokenStream::Fallback(inner)
- }
-}
-
-// Assumes nightly_works().
-fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
- match token {
- TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
- TokenTree::Punct(tt) => {
- let spacing = match tt.spacing() {
- Spacing::Joint => proc_macro::Spacing::Joint,
- Spacing::Alone => proc_macro::Spacing::Alone,
- };
- let mut op = proc_macro::Punct::new(tt.as_char(), spacing);
- op.set_span(tt.span().inner.unwrap_nightly());
- op.into()
- }
- TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
- TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
- }
-}
-
-impl From<TokenTree> for TokenStream {
- fn from(token: TokenTree) -> TokenStream {
- if nightly_works() {
- TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
- } else {
- TokenStream::Fallback(token.into())
- }
- }
-}
-
-impl iter::FromIterator<TokenTree> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
- if nightly_works() {
- TokenStream::Compiler(DeferredTokenStream::new(
- trees.into_iter().map(into_compiler_token).collect(),
- ))
- } else {
- TokenStream::Fallback(trees.into_iter().collect())
- }
- }
-}
-
-impl iter::FromIterator<TokenStream> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
- let mut streams = streams.into_iter();
- match streams.next() {
- Some(TokenStream::Compiler(mut first)) => {
- first.evaluate_now();
- first.stream.extend(streams.map(|s| match s {
- TokenStream::Compiler(s) => s.into_token_stream(),
- TokenStream::Fallback(_) => mismatch(),
- }));
- TokenStream::Compiler(first)
- }
- Some(TokenStream::Fallback(mut first)) => {
- first.extend(streams.map(|s| match s {
- TokenStream::Fallback(s) => s,
- TokenStream::Compiler(_) => mismatch(),
- }));
- TokenStream::Fallback(first)
- }
- None => TokenStream::new(),
- }
- }
-}
-
-impl Extend<TokenTree> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
- match self {
- TokenStream::Compiler(tts) => {
- // Here is the reason for DeferredTokenStream.
- tts.extra
- .extend(streams.into_iter().map(into_compiler_token));
- }
- TokenStream::Fallback(tts) => tts.extend(streams),
- }
- }
-}
-
-impl Extend<TokenStream> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
- match self {
- TokenStream::Compiler(tts) => {
- tts.evaluate_now();
- tts.stream
- .extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
- }
- TokenStream::Fallback(tts) => {
- tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()));
- }
- }
- }
-}
-
-impl fmt::Debug for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
- TokenStream::Fallback(tts) => tts.fmt(f),
- }
- }
-}
-
-impl From<proc_macro::LexError> for LexError {
- fn from(e: proc_macro::LexError) -> LexError {
- LexError::Compiler(e)
- }
-}
-
-impl From<fallback::LexError> for LexError {
- fn from(e: fallback::LexError) -> LexError {
- LexError::Fallback(e)
- }
-}
-
-impl fmt::Debug for LexError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- LexError::Compiler(e) => e.fmt(f),
- LexError::Fallback(e) => e.fmt(f),
- }
- }
-}
-
-#[derive(Clone)]
-pub enum TokenTreeIter {
- Compiler(proc_macro::token_stream::IntoIter),
- Fallback(fallback::TokenTreeIter),
-}
-
-impl IntoIterator for TokenStream {
- type Item = TokenTree;
- type IntoIter = TokenTreeIter;
-
- fn into_iter(self) -> TokenTreeIter {
- match self {
- TokenStream::Compiler(tts) => {
- TokenTreeIter::Compiler(tts.into_token_stream().into_iter())
- }
- TokenStream::Fallback(tts) => TokenTreeIter::Fallback(tts.into_iter()),
- }
- }
-}
-
-impl Iterator for TokenTreeIter {
- type Item = TokenTree;
-
- fn next(&mut self) -> Option<TokenTree> {
- let token = match self {
- TokenTreeIter::Compiler(iter) => iter.next()?,
- TokenTreeIter::Fallback(iter) => return iter.next(),
- };
- Some(match token {
- proc_macro::TokenTree::Group(tt) => crate::Group::_new(Group::Compiler(tt)).into(),
- proc_macro::TokenTree::Punct(tt) => {
- let spacing = match tt.spacing() {
- proc_macro::Spacing::Joint => Spacing::Joint,
- proc_macro::Spacing::Alone => Spacing::Alone,
- };
- let mut o = Punct::new(tt.as_char(), spacing);
- o.set_span(crate::Span::_new(Span::Compiler(tt.span())));
- o.into()
- }
- proc_macro::TokenTree::Ident(s) => crate::Ident::_new(Ident::Compiler(s)).into(),
- proc_macro::TokenTree::Literal(l) => crate::Literal::_new(Literal::Compiler(l)).into(),
- })
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- match self {
- TokenTreeIter::Compiler(tts) => tts.size_hint(),
- TokenTreeIter::Fallback(tts) => tts.size_hint(),
- }
- }
-}
-
-impl fmt::Debug for TokenTreeIter {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_struct("TokenTreeIter").finish()
- }
-}
-
-#[derive(Clone, PartialEq, Eq)]
-#[cfg(super_unstable)]
-pub enum SourceFile {
- Compiler(proc_macro::SourceFile),
- Fallback(fallback::SourceFile),
-}
-
-#[cfg(super_unstable)]
-impl SourceFile {
- fn nightly(sf: proc_macro::SourceFile) -> Self {
- SourceFile::Compiler(sf)
- }
-
- /// Get the path to this source file as a string.
- pub fn path(&self) -> PathBuf {
- match self {
- SourceFile::Compiler(a) => a.path(),
- SourceFile::Fallback(a) => a.path(),
- }
- }
-
- pub fn is_real(&self) -> bool {
- match self {
- SourceFile::Compiler(a) => a.is_real(),
- SourceFile::Fallback(a) => a.is_real(),
- }
- }
-}
-
-#[cfg(super_unstable)]
-impl fmt::Debug for SourceFile {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- SourceFile::Compiler(a) => a.fmt(f),
- SourceFile::Fallback(a) => a.fmt(f),
- }
- }
-}
-
-#[cfg(any(super_unstable, feature = "span-locations"))]
-pub struct LineColumn {
- pub line: usize,
- pub column: usize,
-}
-
-#[derive(Copy, Clone)]
-pub enum Span {
- Compiler(proc_macro::Span),
- Fallback(fallback::Span),
-}
-
-impl Span {
- pub fn call_site() -> Span {
- if nightly_works() {
- Span::Compiler(proc_macro::Span::call_site())
- } else {
- Span::Fallback(fallback::Span::call_site())
- }
- }
-
- #[cfg(super_unstable)]
- pub fn def_site() -> Span {
- if nightly_works() {
- Span::Compiler(proc_macro::Span::def_site())
- } else {
- Span::Fallback(fallback::Span::def_site())
- }
- }
-
- #[cfg(super_unstable)]
- pub fn resolved_at(&self, other: Span) -> Span {
- match (self, other) {
- (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
- (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
- _ => mismatch(),
- }
- }
-
- #[cfg(super_unstable)]
- pub fn located_at(&self, other: Span) -> Span {
- match (self, other) {
- (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
- (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
- _ => mismatch(),
- }
- }
-
- pub fn unwrap(self) -> proc_macro::Span {
- match self {
- Span::Compiler(s) => s,
- Span::Fallback(_) => panic!("proc_macro::Span is only available in procedural macros"),
- }
- }
-
- #[cfg(super_unstable)]
- pub fn source_file(&self) -> SourceFile {
- match self {
- Span::Compiler(s) => SourceFile::nightly(s.source_file()),
- Span::Fallback(s) => SourceFile::Fallback(s.source_file()),
- }
- }
-
- #[cfg(any(super_unstable, feature = "span-locations"))]
- pub fn start(&self) -> LineColumn {
- match self {
- #[cfg(proc_macro_span)]
- Span::Compiler(s) => {
- let proc_macro::LineColumn { line, column } = s.start();
- LineColumn { line, column }
- }
- #[cfg(not(proc_macro_span))]
- Span::Compiler(_) => LineColumn { line: 0, column: 0 },
- Span::Fallback(s) => {
- let fallback::LineColumn { line, column } = s.start();
- LineColumn { line, column }
- }
- }
- }
-
- #[cfg(any(super_unstable, feature = "span-locations"))]
- pub fn end(&self) -> LineColumn {
- match self {
- #[cfg(proc_macro_span)]
- Span::Compiler(s) => {
- let proc_macro::LineColumn { line, column } = s.end();
- LineColumn { line, column }
- }
- #[cfg(not(proc_macro_span))]
- Span::Compiler(_) => LineColumn { line: 0, column: 0 },
- Span::Fallback(s) => {
- let fallback::LineColumn { line, column } = s.end();
- LineColumn { line, column }
- }
- }
- }
-
- pub fn join(&self, other: Span) -> Option<Span> {
- let ret = match (self, other) {
- #[cfg(proc_macro_span)]
- (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.join(b)?),
- (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.join(b)?),
- _ => return None,
- };
- Some(ret)
- }
-
- #[cfg(super_unstable)]
- pub fn eq(&self, other: &Span) -> bool {
- match (self, other) {
- (Span::Compiler(a), Span::Compiler(b)) => a.eq(b),
- (Span::Fallback(a), Span::Fallback(b)) => a.eq(b),
- _ => false,
- }
- }
-
- fn unwrap_nightly(self) -> proc_macro::Span {
- match self {
- Span::Compiler(s) => s,
- Span::Fallback(_) => mismatch(),
- }
- }
-}
-
-impl From<proc_macro::Span> for crate::Span {
- fn from(proc_span: proc_macro::Span) -> crate::Span {
- crate::Span::_new(Span::Compiler(proc_span))
- }
-}
-
-impl From<fallback::Span> for Span {
- fn from(inner: fallback::Span) -> Span {
- Span::Fallback(inner)
- }
-}
-
-impl fmt::Debug for Span {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- Span::Compiler(s) => s.fmt(f),
- Span::Fallback(s) => s.fmt(f),
- }
- }
-}
-
-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
- match span {
- Span::Compiler(s) => {
- debug.field("span", &s);
- }
- Span::Fallback(s) => fallback::debug_span_field_if_nontrivial(debug, s),
- }
-}
-
-#[derive(Clone)]
-pub enum Group {
- Compiler(proc_macro::Group),
- Fallback(fallback::Group),
-}
-
-impl Group {
- pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
- match stream {
- TokenStream::Compiler(tts) => {
- let delimiter = match delimiter {
- Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
- Delimiter::Bracket => proc_macro::Delimiter::Bracket,
- Delimiter::Brace => proc_macro::Delimiter::Brace,
- Delimiter::None => proc_macro::Delimiter::None,
- };
- Group::Compiler(proc_macro::Group::new(delimiter, tts.into_token_stream()))
- }
- TokenStream::Fallback(stream) => {
- Group::Fallback(fallback::Group::new(delimiter, stream))
- }
- }
- }
-
- pub fn delimiter(&self) -> Delimiter {
- match self {
- Group::Compiler(g) => match g.delimiter() {
- proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
- proc_macro::Delimiter::Bracket => Delimiter::Bracket,
- proc_macro::Delimiter::Brace => Delimiter::Brace,
- proc_macro::Delimiter::None => Delimiter::None,
- },
- Group::Fallback(g) => g.delimiter(),
- }
- }
-
- pub fn stream(&self) -> TokenStream {
- match self {
- Group::Compiler(g) => TokenStream::Compiler(DeferredTokenStream::new(g.stream())),
- Group::Fallback(g) => TokenStream::Fallback(g.stream()),
- }
- }
-
- pub fn span(&self) -> Span {
- match self {
- Group::Compiler(g) => Span::Compiler(g.span()),
- Group::Fallback(g) => Span::Fallback(g.span()),
- }
- }
-
- pub fn span_open(&self) -> Span {
- match self {
- #[cfg(proc_macro_span)]
- Group::Compiler(g) => Span::Compiler(g.span_open()),
- #[cfg(not(proc_macro_span))]
- Group::Compiler(g) => Span::Compiler(g.span()),
- Group::Fallback(g) => Span::Fallback(g.span_open()),
- }
- }
-
- pub fn span_close(&self) -> Span {
- match self {
- #[cfg(proc_macro_span)]
- Group::Compiler(g) => Span::Compiler(g.span_close()),
- #[cfg(not(proc_macro_span))]
- Group::Compiler(g) => Span::Compiler(g.span()),
- Group::Fallback(g) => Span::Fallback(g.span_close()),
- }
- }
-
- pub fn set_span(&mut self, span: Span) {
- match (self, span) {
- (Group::Compiler(g), Span::Compiler(s)) => g.set_span(s),
- (Group::Fallback(g), Span::Fallback(s)) => g.set_span(s),
- _ => mismatch(),
- }
- }
-
- fn unwrap_nightly(self) -> proc_macro::Group {
- match self {
- Group::Compiler(g) => g,
- Group::Fallback(_) => mismatch(),
- }
- }
-}
-
-impl From<fallback::Group> for Group {
- fn from(g: fallback::Group) -> Self {
- Group::Fallback(g)
- }
-}
-
-impl fmt::Display for Group {
- fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
- match self {
- Group::Compiler(group) => group.fmt(formatter),
- Group::Fallback(group) => group.fmt(formatter),
- }
- }
-}
-
-impl fmt::Debug for Group {
- fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
- match self {
- Group::Compiler(group) => group.fmt(formatter),
- Group::Fallback(group) => group.fmt(formatter),
- }
- }
-}
-
-#[derive(Clone)]
-pub enum Ident {
- Compiler(proc_macro::Ident),
- Fallback(fallback::Ident),
-}
-
-impl Ident {
- pub fn new(string: &str, span: Span) -> Ident {
- match span {
- Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new(string, s)),
- Span::Fallback(s) => Ident::Fallback(fallback::Ident::new(string, s)),
- }
- }
-
- pub fn new_raw(string: &str, span: Span) -> Ident {
- match span {
- Span::Compiler(s) => {
- let p: proc_macro::TokenStream = string.parse().unwrap();
- let ident = match p.into_iter().next() {
- Some(proc_macro::TokenTree::Ident(mut i)) => {
- i.set_span(s);
- i
- }
- _ => panic!(),
- };
- Ident::Compiler(ident)
- }
- Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw(string, s)),
- }
- }
-
- pub fn span(&self) -> Span {
- match self {
- Ident::Compiler(t) => Span::Compiler(t.span()),
- Ident::Fallback(t) => Span::Fallback(t.span()),
- }
- }
-
- pub fn set_span(&mut self, span: Span) {
- match (self, span) {
- (Ident::Compiler(t), Span::Compiler(s)) => t.set_span(s),
- (Ident::Fallback(t), Span::Fallback(s)) => t.set_span(s),
- _ => mismatch(),
- }
- }
-
- fn unwrap_nightly(self) -> proc_macro::Ident {
- match self {
- Ident::Compiler(s) => s,
- Ident::Fallback(_) => mismatch(),
- }
- }
-}
-
-impl PartialEq for Ident {
- fn eq(&self, other: &Ident) -> bool {
- match (self, other) {
- (Ident::Compiler(t), Ident::Compiler(o)) => t.to_string() == o.to_string(),
- (Ident::Fallback(t), Ident::Fallback(o)) => t == o,
- _ => mismatch(),
- }
- }
-}
-
-impl<T> PartialEq<T> for Ident
-where
- T: ?Sized + AsRef<str>,
-{
- fn eq(&self, other: &T) -> bool {
- let other = other.as_ref();
- match self {
- Ident::Compiler(t) => t.to_string() == other,
- Ident::Fallback(t) => t == other,
- }
- }
-}
-
-impl fmt::Display for Ident {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- Ident::Compiler(t) => t.fmt(f),
- Ident::Fallback(t) => t.fmt(f),
- }
- }
-}
-
-impl fmt::Debug for Ident {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- Ident::Compiler(t) => t.fmt(f),
- Ident::Fallback(t) => t.fmt(f),
- }
- }
-}
-
-#[derive(Clone)]
-pub enum Literal {
- Compiler(proc_macro::Literal),
- Fallback(fallback::Literal),
-}
-
-macro_rules! suffixed_numbers {
- ($($name:ident => $kind:ident,)*) => ($(
- pub fn $name(n: $kind) -> Literal {
- if nightly_works() {
- Literal::Compiler(proc_macro::Literal::$name(n))
- } else {
- Literal::Fallback(fallback::Literal::$name(n))
- }
- }
- )*)
-}
-
-macro_rules! unsuffixed_integers {
- ($($name:ident => $kind:ident,)*) => ($(
- pub fn $name(n: $kind) -> Literal {
- if nightly_works() {
- Literal::Compiler(proc_macro::Literal::$name(n))
- } else {
- Literal::Fallback(fallback::Literal::$name(n))
- }
- }
- )*)
-}
-
-impl Literal {
- suffixed_numbers! {
- u8_suffixed => u8,
- u16_suffixed => u16,
- u32_suffixed => u32,
- u64_suffixed => u64,
- u128_suffixed => u128,
- usize_suffixed => usize,
- i8_suffixed => i8,
- i16_suffixed => i16,
- i32_suffixed => i32,
- i64_suffixed => i64,
- i128_suffixed => i128,
- isize_suffixed => isize,
-
- f32_suffixed => f32,
- f64_suffixed => f64,
- }
-
- unsuffixed_integers! {
- u8_unsuffixed => u8,
- u16_unsuffixed => u16,
- u32_unsuffixed => u32,
- u64_unsuffixed => u64,
- u128_unsuffixed => u128,
- usize_unsuffixed => usize,
- i8_unsuffixed => i8,
- i16_unsuffixed => i16,
- i32_unsuffixed => i32,
- i64_unsuffixed => i64,
- i128_unsuffixed => i128,
- isize_unsuffixed => isize,
- }
-
- pub fn f32_unsuffixed(f: f32) -> Literal {
- if nightly_works() {
- Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
- } else {
- Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
- }
- }
-
- pub fn f64_unsuffixed(f: f64) -> Literal {
- if nightly_works() {
- Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
- } else {
- Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
- }
- }
-
- pub fn string(t: &str) -> Literal {
- if nightly_works() {
- Literal::Compiler(proc_macro::Literal::string(t))
- } else {
- Literal::Fallback(fallback::Literal::string(t))
- }
- }
-
- pub fn character(t: char) -> Literal {
- if nightly_works() {
- Literal::Compiler(proc_macro::Literal::character(t))
- } else {
- Literal::Fallback(fallback::Literal::character(t))
- }
- }
-
- pub fn byte_string(bytes: &[u8]) -> Literal {
- if nightly_works() {
- Literal::Compiler(proc_macro::Literal::byte_string(bytes))
- } else {
- Literal::Fallback(fallback::Literal::byte_string(bytes))
- }
- }
-
- pub fn span(&self) -> Span {
- match self {
- Literal::Compiler(lit) => Span::Compiler(lit.span()),
- Literal::Fallback(lit) => Span::Fallback(lit.span()),
- }
- }
-
- pub fn set_span(&mut self, span: Span) {
- match (self, span) {
- (Literal::Compiler(lit), Span::Compiler(s)) => lit.set_span(s),
- (Literal::Fallback(lit), Span::Fallback(s)) => lit.set_span(s),
- _ => mismatch(),
- }
- }
-
- pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
- match self {
- #[cfg(proc_macro_span)]
- Literal::Compiler(lit) => lit.subspan(range).map(Span::Compiler),
- #[cfg(not(proc_macro_span))]
- Literal::Compiler(_lit) => None,
- Literal::Fallback(lit) => lit.subspan(range).map(Span::Fallback),
- }
- }
-
- fn unwrap_nightly(self) -> proc_macro::Literal {
- match self {
- Literal::Compiler(s) => s,
- Literal::Fallback(_) => mismatch(),
- }
- }
-}
-
-impl From<fallback::Literal> for Literal {
- fn from(s: fallback::Literal) -> Literal {
- Literal::Fallback(s)
- }
-}
-
-impl fmt::Display for Literal {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- Literal::Compiler(t) => t.fmt(f),
- Literal::Fallback(t) => t.fmt(f),
- }
- }
-}
-
-impl fmt::Debug for Literal {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- Literal::Compiler(t) => t.fmt(f),
- Literal::Fallback(t) => t.fmt(f),
- }
- }
-}
diff --git a/proc-macro2/tests/features.rs b/proc-macro2/tests/features.rs
deleted file mode 100644
index 073f6e6..0000000
--- a/proc-macro2/tests/features.rs
+++ /dev/null
@@ -1,8 +0,0 @@
-#[test]
-#[ignore]
-fn make_sure_no_proc_macro() {
- assert!(
- !cfg!(feature = "proc-macro"),
- "still compiled with proc_macro?"
- );
-}
diff --git a/proc-macro2/tests/marker.rs b/proc-macro2/tests/marker.rs
deleted file mode 100644
index 7af2539..0000000
--- a/proc-macro2/tests/marker.rs
+++ /dev/null
@@ -1,59 +0,0 @@
-use proc_macro2::*;
-
-macro_rules! assert_impl {
- ($ty:ident is $($marker:ident) and +) => {
- #[test]
- #[allow(non_snake_case)]
- fn $ty() {
- fn assert_implemented<T: $($marker +)+>() {}
- assert_implemented::<$ty>();
- }
- };
-
- ($ty:ident is not $($marker:ident) or +) => {
- #[test]
- #[allow(non_snake_case)]
- fn $ty() {
- $(
- {
- // Implemented for types that implement $marker.
- trait IsNotImplemented {
- fn assert_not_implemented() {}
- }
- impl<T: $marker> IsNotImplemented for T {}
-
- // Implemented for the type being tested.
- trait IsImplemented {
- fn assert_not_implemented() {}
- }
- impl IsImplemented for $ty {}
-
- // If $ty does not implement $marker, there is no ambiguity
- // in the following trait method call.
- <$ty>::assert_not_implemented();
- }
- )+
- }
- };
-}
-
-assert_impl!(Delimiter is Send and Sync);
-assert_impl!(Spacing is Send and Sync);
-
-assert_impl!(Group is not Send or Sync);
-assert_impl!(Ident is not Send or Sync);
-assert_impl!(LexError is not Send or Sync);
-assert_impl!(Literal is not Send or Sync);
-assert_impl!(Punct is not Send or Sync);
-assert_impl!(Span is not Send or Sync);
-assert_impl!(TokenStream is not Send or Sync);
-assert_impl!(TokenTree is not Send or Sync);
-
-#[cfg(procmacro2_semver_exempt)]
-mod semver_exempt {
- use super::*;
-
- assert_impl!(LineColumn is Send and Sync);
-
- assert_impl!(SourceFile is not Send or Sync);
-}
diff --git a/proc-macro2/tests/test.rs b/proc-macro2/tests/test.rs
deleted file mode 100644
index 7528388..0000000
--- a/proc-macro2/tests/test.rs
+++ /dev/null
@@ -1,466 +0,0 @@
-use std::str::{self, FromStr};
-
-use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
-
-#[test]
-fn idents() {
- assert_eq!(
- Ident::new("String", Span::call_site()).to_string(),
- "String"
- );
- assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
- assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
-}
-
-#[test]
-#[cfg(procmacro2_semver_exempt)]
-fn raw_idents() {
- assert_eq!(
- Ident::new_raw("String", Span::call_site()).to_string(),
- "r#String"
- );
- assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
- assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_");
-}
-
-#[test]
-#[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
-fn ident_empty() {
- Ident::new("", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "Ident cannot be a number; use Literal instead")]
-fn ident_number() {
- Ident::new("255", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "\"a#\" is not a valid Ident")]
-fn ident_invalid() {
- Ident::new("a#", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "not a valid Ident")]
-fn raw_ident_empty() {
- Ident::new("r#", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "not a valid Ident")]
-fn raw_ident_number() {
- Ident::new("r#255", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "\"r#a#\" is not a valid Ident")]
-fn raw_ident_invalid() {
- Ident::new("r#a#", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "not a valid Ident")]
-fn lifetime_empty() {
- Ident::new("'", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "not a valid Ident")]
-fn lifetime_number() {
- Ident::new("'255", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = r#""\'a#" is not a valid Ident"#)]
-fn lifetime_invalid() {
- Ident::new("'a#", Span::call_site());
-}
-
-#[test]
-fn literal_string() {
- assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
- assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
- assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
-}
-
-#[test]
-fn literal_character() {
- assert_eq!(Literal::character('x').to_string(), "'x'");
- assert_eq!(Literal::character('\'').to_string(), "'\\''");
- assert_eq!(Literal::character('"').to_string(), "'\"'");
-}
-
-#[test]
-fn literal_float() {
- assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
-}
-
-#[test]
-fn literal_suffix() {
- fn token_count(p: &str) -> usize {
- p.parse::<TokenStream>().unwrap().into_iter().count()
- }
-
- assert_eq!(token_count("999u256"), 1);
- assert_eq!(token_count("999r#u256"), 3);
- assert_eq!(token_count("1."), 1);
- assert_eq!(token_count("1.f32"), 3);
- assert_eq!(token_count("1.0_0"), 1);
- assert_eq!(token_count("1._0"), 3);
- assert_eq!(token_count("1._m"), 3);
- assert_eq!(token_count("\"\"s"), 1);
-}
-
-#[test]
-fn roundtrip() {
- fn roundtrip(p: &str) {
- println!("parse: {}", p);
- let s = p.parse::<TokenStream>().unwrap().to_string();
- println!("first: {}", s);
- let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
- assert_eq!(s, s2);
- }
- roundtrip("a");
- roundtrip("<<");
- roundtrip("<<=");
- roundtrip(
- "
- 1
- 1.0
- 1f32
- 2f64
- 1usize
- 4isize
- 4e10
- 1_000
- 1_0i32
- 8u8
- 9
- 0
- 0xffffffffffffffffffffffffffffffff
- 1x
- 1u80
- 1f320
- ",
- );
- roundtrip("'a");
- roundtrip("'_");
- roundtrip("'static");
- roundtrip("'\\u{10__FFFF}'");
- roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
-}
-
-#[test]
-fn fail() {
- fn fail(p: &str) {
- if let Ok(s) = p.parse::<TokenStream>() {
- panic!("should have failed to parse: {}\n{:#?}", p, s);
- }
- }
- fail("' static");
- fail("r#1");
- fail("r#_");
-}
-
-#[cfg(span_locations)]
-#[test]
-fn span_test() {
- use proc_macro2::TokenTree;
-
- fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
- let ts = p.parse::<TokenStream>().unwrap();
- check_spans_internal(ts, &mut lines);
- }
-
- fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
- for i in ts {
- if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
- *lines = rest;
-
- let start = i.span().start();
- assert_eq!(start.line, sline, "sline did not match for {}", i);
- assert_eq!(start.column, scol, "scol did not match for {}", i);
-
- let end = i.span().end();
- assert_eq!(end.line, eline, "eline did not match for {}", i);
- assert_eq!(end.column, ecol, "ecol did not match for {}", i);
-
- match i {
- TokenTree::Group(ref g) => {
- check_spans_internal(g.stream().clone(), lines);
- }
- _ => {}
- }
- }
- }
- }
-
- check_spans(
- "\
-/// This is a document comment
-testing 123
-{
- testing 234
-}",
- &[
- (1, 0, 1, 30), // #
- (1, 0, 1, 30), // [ ... ]
- (1, 0, 1, 30), // doc
- (1, 0, 1, 30), // =
- (1, 0, 1, 30), // "This is..."
- (2, 0, 2, 7), // testing
- (2, 8, 2, 11), // 123
- (3, 0, 5, 1), // { ... }
- (4, 2, 4, 9), // testing
- (4, 10, 4, 13), // 234
- ],
- );
-}
-
-#[cfg(procmacro2_semver_exempt)]
-#[cfg(not(nightly))]
-#[test]
-fn default_span() {
- let start = Span::call_site().start();
- assert_eq!(start.line, 1);
- assert_eq!(start.column, 0);
- let end = Span::call_site().end();
- assert_eq!(end.line, 1);
- assert_eq!(end.column, 0);
- let source_file = Span::call_site().source_file();
- assert_eq!(source_file.path().to_string_lossy(), "<unspecified>");
- assert!(!source_file.is_real());
-}
-
-#[cfg(procmacro2_semver_exempt)]
-#[test]
-fn span_join() {
- let source1 = "aaa\nbbb"
- .parse::<TokenStream>()
- .unwrap()
- .into_iter()
- .collect::<Vec<_>>();
- let source2 = "ccc\nddd"
- .parse::<TokenStream>()
- .unwrap()
- .into_iter()
- .collect::<Vec<_>>();
-
- assert!(source1[0].span().source_file() != source2[0].span().source_file());
- assert_eq!(
- source1[0].span().source_file(),
- source1[1].span().source_file()
- );
-
- let joined1 = source1[0].span().join(source1[1].span());
- let joined2 = source1[0].span().join(source2[0].span());
- assert!(joined1.is_some());
- assert!(joined2.is_none());
-
- let start = joined1.unwrap().start();
- let end = joined1.unwrap().end();
- assert_eq!(start.line, 1);
- assert_eq!(start.column, 0);
- assert_eq!(end.line, 2);
- assert_eq!(end.column, 3);
-
- assert_eq!(
- joined1.unwrap().source_file(),
- source1[0].span().source_file()
- );
-}
-
-#[test]
-fn no_panic() {
- let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
- assert!(s.parse::<proc_macro2::TokenStream>().is_err());
-}
-
-#[test]
-fn tricky_doc_comment() {
- let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
- let tokens = stream.into_iter().collect::<Vec<_>>();
- assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
-
- let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
- let tokens = stream.into_iter().collect::<Vec<_>>();
- assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
- match tokens[0] {
- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
- _ => panic!("wrong token {:?}", tokens[0]),
- }
- let mut tokens = match tokens[1] {
- proc_macro2::TokenTree::Group(ref tt) => {
- assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
- tt.stream().into_iter()
- }
- _ => panic!("wrong token {:?}", tokens[0]),
- };
-
- match tokens.next().unwrap() {
- proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
- t => panic!("wrong token {:?}", t),
- }
- match tokens.next().unwrap() {
- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
- t => panic!("wrong token {:?}", t),
- }
- match tokens.next().unwrap() {
- proc_macro2::TokenTree::Literal(ref tt) => {
- assert_eq!(tt.to_string(), "\" doc\"");
- }
- t => panic!("wrong token {:?}", t),
- }
- assert!(tokens.next().is_none());
-
- let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
- let tokens = stream.into_iter().collect::<Vec<_>>();
- assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
-}
-
-#[test]
-fn op_before_comment() {
- let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
- match tts.next().unwrap() {
- TokenTree::Punct(tt) => {
- assert_eq!(tt.as_char(), '~');
- assert_eq!(tt.spacing(), Spacing::Alone);
- }
- wrong => panic!("wrong token {:?}", wrong),
- }
-}
-
-#[test]
-fn raw_identifier() {
- let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
- match tts.next().unwrap() {
- TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()),
- wrong => panic!("wrong token {:?}", wrong),
- }
- assert!(tts.next().is_none());
-}
-
-#[test]
-fn test_debug_ident() {
- let ident = Ident::new("proc_macro", Span::call_site());
-
- #[cfg(not(procmacro2_semver_exempt))]
- let expected = "Ident(proc_macro)";
-
- #[cfg(procmacro2_semver_exempt)]
- let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
-
- assert_eq!(expected, format!("{:?}", ident));
-}
-
-#[test]
-fn test_debug_tokenstream() {
- let tts = TokenStream::from_str("[a + 1]").unwrap();
-
- #[cfg(not(procmacro2_semver_exempt))]
- let expected = "\
-TokenStream [
- Group {
- delimiter: Bracket,
- stream: TokenStream [
- Ident {
- sym: a,
- },
- Punct {
- op: '+',
- spacing: Alone,
- },
- Literal {
- lit: 1,
- },
- ],
- },
-]\
- ";
-
- #[cfg(not(procmacro2_semver_exempt))]
- let expected_before_trailing_commas = "\
-TokenStream [
- Group {
- delimiter: Bracket,
- stream: TokenStream [
- Ident {
- sym: a
- },
- Punct {
- op: '+',
- spacing: Alone
- },
- Literal {
- lit: 1
- }
- ]
- }
-]\
- ";
-
- #[cfg(procmacro2_semver_exempt)]
- let expected = "\
-TokenStream [
- Group {
- delimiter: Bracket,
- stream: TokenStream [
- Ident {
- sym: a,
- span: bytes(2..3),
- },
- Punct {
- op: '+',
- spacing: Alone,
- span: bytes(4..5),
- },
- Literal {
- lit: 1,
- span: bytes(6..7),
- },
- ],
- span: bytes(1..8),
- },
-]\
- ";
-
- #[cfg(procmacro2_semver_exempt)]
- let expected_before_trailing_commas = "\
-TokenStream [
- Group {
- delimiter: Bracket,
- stream: TokenStream [
- Ident {
- sym: a,
- span: bytes(2..3)
- },
- Punct {
- op: '+',
- spacing: Alone,
- span: bytes(4..5)
- },
- Literal {
- lit: 1,
- span: bytes(6..7)
- }
- ],
- span: bytes(1..8)
- }
-]\
- ";
-
- let actual = format!("{:#?}", tts);
- if actual.ends_with(",\n]") {
- assert_eq!(expected, actual);
- } else {
- assert_eq!(expected_before_trailing_commas, actual);
- }
-}
-
-#[test]
-fn default_tokenstream_is_empty() {
- let default_token_stream: TokenStream = Default::default();
-
- assert!(default_token_stream.is_empty());
-}