Page MenuHomeFreeBSD

No OneTemporary

This file is larger than 256 KB, so syntax highlighting was skipped.
Index: head/Mk/Uses/cargo.mk
===================================================================
--- head/Mk/Uses/cargo.mk (revision 552220)
+++ head/Mk/Uses/cargo.mk (revision 552221)
@@ -1,338 +1,338 @@
# $FreeBSD$
#
# This file contains logic to ease porting of Rust packages or
# binaries using the `cargo` command.
#
# Feature: cargo
# Usage: USES=cargo
# Valid ARGS: none
#
# MAINTAINER: rust@FreeBSD.org
.if !defined(_INCLUDE_USES_CARGO_MK)
_INCLUDE_USES_CARGO_MK= yes
.if !empty(cargo_ARGS)
IGNORE+= USES=cargo takes no arguments
.endif
# List of static dependencies. The format is cratename-version.
# CARGO_CRATES will be downloaded from MASTER_SITES_CRATESIO.
CARGO_CRATES?=
# List of features to build (space separated list).
# Use special token --no-default-features to disable default
# features by passing it to cargo build/install/test.
CARGO_FEATURES?=
# Name of the local directory for vendoring crates.
CARGO_VENDOR_DIR?= ${WRKSRC}/cargo-crates
# Default path for cargo manifest.
CARGO_CARGOTOML?= ${WRKSRC}/Cargo.toml
CARGO_CARGOLOCK?= ${WRKSRC}/Cargo.lock
# Save crates inside ${DISTDIR}/rust/crates by default.
CARGO_DIST_SUBDIR?= rust/crates
# Generate list of DISTFILES.
.for _crate in ${CARGO_CRATES}
MASTER_SITES+= CRATESIO/${_crate:C/^([-_a-zA-Z0-9]+)-[0-9].*/\1/}/${_crate:C/^[-_a-zA-Z0-9]+-([0-9].*)/\1/}:cargo_${_crate:C/[^a-zA-Z0-9_]//g}
DISTFILES+= ${CARGO_DIST_SUBDIR}/${_crate}.tar.gz:cargo_${_crate:C/[^a-zA-Z0-9_]//g}
.endfor
# Build dependencies.
CARGO_BUILDDEP?= yes
.if ${CARGO_BUILDDEP:tl} == "yes"
-BUILD_DEPENDS+= ${RUST_DEFAULT}>=1.46.0:lang/${RUST_DEFAULT}
+BUILD_DEPENDS+= ${RUST_DEFAULT}>=1.47.0:lang/${RUST_DEFAULT}
.endif
# Location of cargo binary (default to lang/rust's Cargo binary)
CARGO_CARGO_BIN?= ${LOCALBASE}/bin/cargo
# Location of the cargo output directory.
CARGO_TARGET_DIR?= ${WRKDIR}/target
# Environment for cargo
# - CARGO_HOME: local cache of the registry index
# - CARGO_BUILD_JOBS: configure number of jobs to run
# - CARGO_TARGET_DIR: location of where to place all generated artifacts
# - RUST_BACKTRACE: produce backtraces when something in the build panics
# - RUSTC: path of rustc binary (default to lang/rust)
# - RUSTDOC: path of rustdoc binary (default to lang/rust)
# - RUSTFLAGS: custom flags to pass to all compiler invocations that Cargo performs
CARGO_ENV+= \
CARGO_HOME=${WRKDIR}/cargo-home \
CARGO_BUILD_JOBS=${MAKE_JOBS_NUMBER} \
CARGO_TARGET_DIR=${CARGO_TARGET_DIR} \
RUST_BACKTRACE=1 \
RUSTC=${LOCALBASE}/bin/rustc \
RUSTDOC=${LOCALBASE}/bin/rustdoc \
RUSTFLAGS="${RUSTFLAGS} -C linker=${CC:Q} ${LDFLAGS:C/.+/-C link-arg=&/}"
# Adjust -C target-cpu if -march/-mcpu is set by bsd.cpu.mk
.if ${ARCH} == amd64 || ${ARCH} == i386
RUSTFLAGS+= ${CFLAGS:M-march=*:S/-march=/-C target-cpu=/}
.elif ${ARCH} == powerpc64
RUSTFLAGS+= ${CFLAGS:M-mcpu=*:S/-mcpu=/-C target-cpu=/:S/power/pwr/}
.else
RUSTFLAGS+= ${CFLAGS:M-mcpu=*:S/-mcpu=/-C target-cpu=/}
.endif
.if defined(PPC_ABI) && ${PPC_ABI} == ELFv1
USE_GCC?= yes
.endif
# Helper to shorten cargo calls.
CARGO_CARGO_RUN= \
cd ${WRKSRC} && ${SETENV} ${MAKE_ENV} ${CARGO_ENV} \
${CARGO_CARGO_BIN}
# User arguments for cargo targets.
CARGO_BUILD_ARGS?=
CARGO_INSTALL_ARGS?=
CARGO_INSTALL_PATH?= .
CARGO_TEST_ARGS?=
CARGO_UPDATE_ARGS?=
# Use module targets ?
CARGO_BUILD?= yes
CARGO_CONFIGURE?= yes
CARGO_INSTALL?= yes
CARGO_TEST?= yes
# Set CARGO_USE_GIT{HUB,LAB} to yes if your application requires
# some dependencies from git repositories hosted on GitHub or
# GitLab instances. All Cargo.toml files will be patched to point
# to the right offline sources based on what is defined in
# {GH,GL}_TUPLE. This makes sure that cargo does not attempt to
# access the network during the build.
CARGO_USE_GITHUB?= no
CARGO_USE_GITLAB?= no
# Manage crate features.
.if !empty(CARGO_FEATURES:M--no-default-features)
CARGO_BUILD_ARGS+= --no-default-features
CARGO_INSTALL_ARGS+= --no-default-features
CARGO_TEST_ARGS+= --no-default-features
.endif
.if !empty(CARGO_FEATURES:N--no-default-features)
CARGO_BUILD_ARGS+= --features='${CARGO_FEATURES:N--no-default-features}'
CARGO_INSTALL_ARGS+= --features='${CARGO_FEATURES:N--no-default-features}'
CARGO_TEST_ARGS+= --features='${CARGO_FEATURES:N--no-default-features}'
.endif
.if !defined(WITH_DEBUG)
CARGO_BUILD_ARGS+= --release
CARGO_TEST_ARGS+= --release
.else
CARGO_INSTALL_ARGS+= --debug
.endif
.if ${CARGO_CRATES:Mcmake-[0-9]*}
BUILD_DEPENDS+= cmake:devel/cmake
.endif
.if ${CARGO_CRATES:Mgettext-sys-[0-9]*}
CARGO_ENV+= GETTEXT_BIN_DIR=${LOCALBASE}/bin \
GETTEXT_INCLUDE_DIR=${LOCALBASE}/include \
GETTEXT_LIB_DIR=${LOCALBASE}/lib
.endif
.if ${CARGO_CRATES:Mjemalloc-sys-[0-9]*}
BUILD_DEPENDS+= gmake:devel/gmake
.endif
.for libc in ${CARGO_CRATES:Mlibc-[0-9]*}
# FreeBSD 12.0 changed ABI: r318736 and r320043
# https://github.com/rust-lang/libc/commit/78f93220d70e
# https://github.com/rust-lang/libc/commit/969ad2b73cdc
_libc_VER= ${libc:C/.*-//}
. if ${_libc_VER:R:R} == 0 && (${_libc_VER:R:E} < 2 || ${_libc_VER:R:E} == 2 && ${_libc_VER:E} < 38)
DEV_ERROR+= "CARGO_CRATES=${libc} may be unstable on FreeBSD 12.0. Consider updating to the latest version \(higher than 0.2.37\)."
. endif
. if ${_libc_VER:R:R} == 0 && (${_libc_VER:R:E} < 2 || ${_libc_VER:R:E} == 2 && ${_libc_VER:E} < 49)
DEV_ERROR+= "CARGO_CRATES=${libc} may be unstable on aarch64 or not build on armv6, armv7, powerpc64. Consider updating to the latest version \(higher than 0.2.49\)."
. endif
.undef _libc_VER
.endfor
.if ${CARGO_CRATES:Mlibgit2-sys-[0-9]*}
# Use the system's libgit2 instead of building the bundled version
CARGO_ENV+= LIBGIT2_SYS_USE_PKG_CONFIG=1
.endif
.if ${CARGO_CRATES:Mlibssh2-sys-[0-9]*}
# Use the system's libssh2 instead of building the bundled version
CARGO_ENV+= LIBSSH2_SYS_USE_PKG_CONFIG=1
.endif
.if ${CARGO_CRATES:Monig_sys-[0-9]*}
# onig_sys always prefers the system library but will try to link
# statically with it. Since devel/oniguruma doesn't provide a static
# library it'll link to libonig.so instead. Strictly speaking setting
# RUSTONIG_SYSTEM_LIBONIG is not necessary, but will force onig_sys to
# always use the system's libonig as returned by `pkg-config oniguruma`.
CARGO_ENV+= RUSTONIG_SYSTEM_LIBONIG=1
.endif
.if ${CARGO_CRATES:Mopenssl-0.[0-9].*}
# FreeBSD 12.0 updated base OpenSSL in r339270:
# https://github.com/sfackler/rust-openssl/commit/276577553501
. if !exists(${PATCHDIR}/patch-openssl-1.1.1) # skip if backported
_openssl_VER= ${CARGO_CRATES:Mopenssl-0.[0-9].*:C/.*-//}
. if ${_openssl_VER:R:R} == 0 && (${_openssl_VER:R:E} < 10 || ${_openssl_VER:R:E} == 10 && ${_openssl_VER:E} < 4)
DEV_WARNING+= "CARGO_CRATES=openssl-0.10.3 or older do not support OpenSSL 1.1.1. Consider updating to the latest version."
. endif
. endif
.undef _openssl_VER
.endif
.if ${CARGO_CRATES:Mopenssl-src-[0-9]*}
DEV_WARNING+= "Please make sure this port uses the system OpenSSL and consider removing CARGO_CRATES=${CARGO_CRATES:Mopenssl-src-[0-9]*} (a vendored copy of OpenSSL) from the build, e.g., by patching Cargo.toml appropriately."
.endif
.if ${CARGO_CRATES:Mopenssl-sys-[0-9]*}
# Make sure that openssl-sys can find the correct version of OpenSSL
CARGO_ENV+= OPENSSL_LIB_DIR=${OPENSSLLIB} \
OPENSSL_INCLUDE_DIR=${OPENSSLINC}
.endif
.if ${CARGO_CRATES:Mpkg-config-[0-9]*}
.include "${USESDIR}/pkgconfig.mk"
.endif
_USES_extract+= 600:cargo-extract
cargo-extract:
# target for preparing crates directory. It will put all crates in
# the local crates directory.
@${ECHO_MSG} "===> Moving crates to ${CARGO_VENDOR_DIR}"
@${MKDIR} ${CARGO_VENDOR_DIR}
.for _crate in ${CARGO_CRATES}
@${MV} ${WRKDIR}/${_crate} ${CARGO_VENDOR_DIR}/${_crate}
@${PRINTF} '{"package":"%s","files":{}}' \
$$(${SHA256} -q ${DISTDIR}/${CARGO_DIST_SUBDIR}/${_crate}.tar.gz) \
> ${CARGO_VENDOR_DIR}/${_crate}/.cargo-checksum.json
@if [ -r ${CARGO_VENDOR_DIR}/${_crate}/Cargo.toml.orig ]; then \
${MV} ${CARGO_VENDOR_DIR}/${_crate}/Cargo.toml.orig \
${CARGO_VENDOR_DIR}/${_crate}/Cargo.toml.orig-cargo; \
fi
.endfor
_CARGO_GIT_PATCH_CARGOTOML=
.if ${CARGO_USE_GITHUB:tl} == "yes"
. for _group in ${GH_TUPLE:C@^[^:]*:[^:]*:[^:]*:(([^:/]*)?)((/.*)?)@\2@}
. if empty(CARGO_GIT_SUBDIR:M${_group}\:*)
_CARGO_GIT_PATCH_CARGOTOML:= ${_CARGO_GIT_PATCH_CARGOTOML} \
-e "s@git = ['\"](https|http|git)://github.com/${GH_ACCOUNT_${_group}}/${GH_PROJECT_${_group}}(\.git)?/?[\"']@path = \"${WRKSRC_${_group}}\"@"
. else
. for _group2 _crate _subdir in ${CARGO_GIT_SUBDIR:M${_group}\:*:S,:, ,g}
_CARGO_GIT_PATCH_CARGOTOML:= ${_CARGO_GIT_PATCH_CARGOTOML} \
-e "/^${_crate} =/ s@git = ['\"](https|http|git)://github.com/${GH_ACCOUNT_${_group}}/${GH_PROJECT_${_group}}(\.git)?/?[\"']@path = \"${WRKSRC_${_group}}/${_subdir}\"@"
. endfor
. endif
. endfor
.endif
.if ${CARGO_USE_GITLAB:tl} == "yes"
. for _group in ${GL_TUPLE:C@^(([^:]*://[^:/]*(:[0-9]{1,5})?(/[^:]*[^/])?:)?)([^:]*):([^:]*):([^:]*)(:[^:/]*)((/.*)?)@\8@:S/^://}
. if empty(CARGO_GIT_SUBDIR:M${_group}\:*)
_CARGO_GIT_PATCH_CARGOTOML:= ${_CARGO_GIT_PATCH_CARGOTOML} \
-e "s@git = ['\"]${GL_SITE_${_group}}/${GL_ACCOUNT_${_group}}/${GL_PROJECT_${_group}}(\.git)?/?['\"]@path = \"${WRKSRC_${_group}}\"@"
. else
. for _group2 _crate _subdir in ${CARGO_GIT_SUBDIR:M${_group}\:*:S,:, ,g}
_CARGO_GIT_PATCH_CARGOTOML:= ${_CARGO_GIT_PATCH_CARGOTOML} \
-e "/^${_crate} = / s@git = ['\"]${GL_SITE_${_group}}/${GL_ACCOUNT_${_group}}/${GL_PROJECT_${_group}}(\.git)?/?['\"]@path = \"${WRKSRC_${_group}}/${_subdir}\"@"
. endfor
. endif
. endfor
.endif
.if !empty(_CARGO_GIT_PATCH_CARGOTOML)
_USES_patch+= 600:cargo-patch-git
cargo-patch-git:
@${FIND} ${WRKDIR} -name Cargo.toml -type f -exec \
${SED} -i.dist -E ${_CARGO_GIT_PATCH_CARGOTOML} {} +
.endif
.if ${CARGO_CONFIGURE:tl} == "yes"
_USES_configure+= 250:cargo-configure
# configure hook. Place a config file for overriding crates-io index
# by local source directory.
cargo-configure:
# Check that the running kernel has COMPAT_FREEBSD11 required by lang/rust post-ino64
@${SETENV} CC="${CC}" OPSYS="${OPSYS}" OSVERSION="${OSVERSION}" WRKDIR="${WRKDIR}" \
${SH} ${SCRIPTSDIR}/rust-compat11-canary.sh
@${MKDIR} ${WRKDIR}/.cargo
@${ECHO_CMD} "[source.cargo]" > ${WRKDIR}/.cargo/config
@${ECHO_CMD} "directory = '${CARGO_VENDOR_DIR}'" >> ${WRKDIR}/.cargo/config
@${ECHO_CMD} "[source.crates-io]" >> ${WRKDIR}/.cargo/config
@${ECHO_CMD} "replace-with = 'cargo'" >> ${WRKDIR}/.cargo/config
@if ! ${GREP} -qF '[profile.release]' ${CARGO_CARGOTOML}; then \
${ECHO_CMD} "" >> ${CARGO_CARGOTOML}; \
${ECHO_CMD} "[profile.release]" >> ${CARGO_CARGOTOML}; \
${ECHO_CMD} "opt-level = 2" >> ${CARGO_CARGOTOML}; \
${ECHO_CMD} "debug = false" >> ${CARGO_CARGOTOML}; \
fi
@${CARGO_CARGO_RUN} update \
--manifest-path ${CARGO_CARGOTOML} \
--verbose \
${CARGO_UPDATE_ARGS}
.endif
.if !target(do-build) && ${CARGO_BUILD:tl} == "yes"
do-build:
@${CARGO_CARGO_RUN} build \
--manifest-path ${CARGO_CARGOTOML} \
--verbose \
${CARGO_BUILD_ARGS}
.endif
.if !target(do-install) && ${CARGO_INSTALL:tl} == "yes"
do-install:
. for path in ${CARGO_INSTALL_PATH}
@${CARGO_CARGO_RUN} install \
--no-track \
--path "${path}" \
--root "${STAGEDIR}${PREFIX}" \
--verbose \
${CARGO_INSTALL_ARGS}
. endfor
.endif
.if !target(do-test) && ${CARGO_TEST:tl} == "yes"
do-test:
@${CARGO_CARGO_RUN} test \
--manifest-path ${CARGO_CARGOTOML} \
--verbose \
${CARGO_TEST_ARGS}
.endif
#
# Helper targets for port maintainers
#
# cargo-crates will output the crates list from Cargo.lock. If there
# is no Cargo.lock for some reason, try and generate it first.
cargo-crates: extract
@if [ ! -r "${CARGO_CARGOLOCK}" ]; then \
${ECHO_MSG} "===> ${CARGO_CARGOLOCK} not found. Trying to generate it..."; \
${CARGO_CARGO_RUN} generate-lockfile \
--manifest-path ${CARGO_CARGOTOML} \
--verbose; \
fi
@${SETENV} USE_GITHUB=${USE_GITHUB} USE_GITLAB=${USE_GITLAB} GL_SITE=${GL_SITE} \
${AWK} -f ${SCRIPTSDIR}/cargo-crates.awk ${CARGO_CARGOLOCK}
# cargo-crates-licenses will try to grab license information from
# all downloaded crates.
cargo-crates-licenses: configure
@${FIND} ${CARGO_VENDOR_DIR} -name 'Cargo.toml' -maxdepth 2 \
-exec ${GREP} -H '^license' {} \; \
| ${SED} \
-e 's@^${CARGO_VENDOR_DIR}/@@' \
-e 's@/Cargo.toml:license.*= *"@|@' \
-e 's@"$$@@g' | sort | /usr/bin/column -t -s '|'
.endif
Index: head/audio/ncspot/Makefile
===================================================================
--- head/audio/ncspot/Makefile (revision 552220)
+++ head/audio/ncspot/Makefile (revision 552221)
@@ -1,454 +1,455 @@
# $FreeBSD$
PORTNAME= ncspot
DISTVERSIONPREFIX= v
DISTVERSION= 0.2.3
+PORTREVISION= 1
CATEGORIES= audio
MAINTAINER= ports@FreeBSD.org
COMMENT= Ncurses Spotify client
LICENSE= BSD2CLAUSE
LICENSE_FILE= ${WRKSRC}/LICENSE
LIB_DEPENDS= libdbus-1.so:devel/dbus
USES= cargo ncurses ssl
USE_GITHUB= yes
GH_ACCOUNT= hrkfdn
CARGO_CRATES= addr2line-0.13.0 \
adler-0.2.3 \
aes-0.3.2 \
aes-ctr-0.3.0 \
aes-soft-0.3.3 \
aesni-0.6.0 \
ahash-0.3.8 \
ahash-0.4.4 \
aho-corasick-0.7.13 \
alga-0.9.3 \
alsa-0.2.2 \
alsa-sys-0.1.2 \
ansi_term-0.11.0 \
approx-0.3.2 \
arc-swap-0.4.7 \
array-macro-1.0.5 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
atty-0.2.14 \
autocfg-0.1.7 \
autocfg-1.0.0 \
backtrace-0.3.50 \
base64-0.9.3 \
base64-0.10.1 \
base64-0.11.0 \
base64-0.12.3 \
bindgen-0.53.3 \
bit-set-0.5.2 \
bit-vec-0.6.2 \
bitflags-0.9.1 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
block-0.1.6 \
block-buffer-0.7.3 \
block-cipher-trait-0.6.2 \
block-padding-0.1.5 \
bumpalo-3.4.0 \
byte-tools-0.3.1 \
byteorder-1.3.4 \
bytes-0.4.12 \
bytes-0.5.6 \
cc-1.0.58 \
cexpr-0.4.0 \
cfg-if-0.1.10 \
chrono-0.4.15 \
clang-sys-0.29.3 \
clap-2.33.3 \
clipboard-0.5.0 \
clipboard-win-2.2.0 \
cloudabi-0.0.3 \
const-random-0.1.8 \
const-random-macro-0.1.8 \
constant_time_eq-0.1.5 \
cookie-0.12.0 \
cookie_store-0.7.0 \
core-foundation-0.7.0 \
core-foundation-sys-0.5.1 \
core-foundation-sys-0.7.0 \
coreaudio-rs-0.9.1 \
coreaudio-sys-0.2.5 \
cpal-0.8.2 \
crc32fast-1.2.0 \
crossbeam-channel-0.4.3 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-queue-0.2.3 \
crossbeam-utils-0.7.2 \
crypto-mac-0.7.0 \
ctr-0.3.2 \
cursive-0.15.0 \
cursive_core-0.1.1 \
darling-0.9.0 \
darling-0.10.2 \
darling_core-0.9.0 \
darling_core-0.10.2 \
darling_macro-0.9.0 \
darling_macro-0.10.2 \
dbus-0.8.4 \
derive_builder-0.7.2 \
derive_builder_core-0.5.0 \
digest-0.8.1 \
directories-2.0.2 \
dirs-1.0.5 \
dirs-sys-0.3.5 \
dotenv-0.13.0 \
dtoa-0.4.6 \
either-1.6.0 \
encoding_rs-0.8.23 \
enum-map-0.6.2 \
enum-map-derive-0.4.3 \
enumset-1.0.1 \
enumset_derive-0.5.0 \
env_logger-0.6.2 \
error-chain-0.12.4 \
failure-0.1.8 \
failure_derive-0.1.8 \
fake-simd-0.1.2 \
fern-0.5.9 \
flate2-1.0.16 \
fnv-1.0.7 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.1.29 \
futures-0.3.5 \
futures-channel-0.3.5 \
futures-core-0.3.5 \
futures-cpupool-0.1.8 \
futures-executor-0.3.5 \
futures-io-0.3.5 \
futures-macro-0.3.5 \
futures-sink-0.3.5 \
futures-task-0.3.5 \
futures-util-0.3.5 \
generic-array-0.12.3 \
getrandom-0.1.14 \
gimli-0.22.0 \
glob-0.3.0 \
h2-0.1.26 \
h2-0.2.6 \
hashbrown-0.8.2 \
heck-0.3.1 \
hermit-abi-0.1.15 \
hmac-0.7.1 \
http-0.1.21 \
http-0.2.1 \
http-body-0.1.0 \
http-body-0.3.1 \
httparse-1.3.4 \
humantime-1.3.0 \
hyper-0.11.27 \
hyper-0.12.35 \
hyper-0.13.7 \
hyper-proxy-0.4.1 \
hyper-tls-0.3.2 \
hyper-tls-0.4.3 \
ident_case-1.0.1 \
idna-0.1.5 \
idna-0.2.0 \
indexmap-1.5.1 \
iovec-0.1.4 \
ipnet-2.3.0 \
itertools-0.8.2 \
itoa-0.4.6 \
js-sys-0.3.44 \
kernel32-sys-0.2.2 \
language-tags-0.2.2 \
lazy_static-1.4.0 \
lazycell-1.3.0 \
lewton-0.9.4 \
libc-0.2.76 \
libdbus-sys-0.2.1 \
libloading-0.5.2 \
libm-0.2.1 \
libpulse-sys-0.0.0 \
librespot-audio-0.1.3 \
librespot-core-0.1.3 \
librespot-metadata-0.1.3 \
librespot-playback-0.1.3 \
librespot-protocol-0.1.3 \
linear-map-1.2.0 \
lock_api-0.3.4 \
log-0.3.9 \
log-0.4.11 \
mac-notification-sys-0.3.0 \
malloc_buf-0.0.6 \
maplit-1.0.2 \
matches-0.1.8 \
matrixmultiply-0.2.3 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memoffset-0.5.5 \
mime-0.3.16 \
mime_guess-2.0.3 \
miniz_oxide-0.4.0 \
mio-0.6.22 \
mio-uds-0.6.8 \
miow-0.2.1 \
nalgebra-0.18.1 \
native-tls-0.2.4 \
ncurses-5.99.0 \
net2-0.2.34 \
nix-0.9.0 \
nom-5.1.2 \
notify-rust-4.0.0 \
num-0.3.0 \
num-bigint-0.2.6 \
num-complex-0.2.4 \
num-complex-0.3.0 \
num-integer-0.1.43 \
num-iter-0.1.41 \
num-rational-0.2.4 \
num-rational-0.3.0 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
numtoa-0.1.0 \
objc-0.2.7 \
objc-foundation-0.1.1 \
objc_id-0.1.1 \
object-0.20.0 \
ogg-0.7.0 \
once_cell-1.4.1 \
opaque-debug-0.2.3 \
openssl-0.10.30 \
openssl-probe-0.1.2 \
openssl-sys-0.9.58 \
owning_ref-0.4.1 \
pancurses-0.16.1 \
parking_lot-0.9.0 \
parking_lot_core-0.6.2 \
pbkdf2-0.3.0 \
pdcurses-sys-0.7.1 \
peeking_take_while-0.1.2 \
percent-encoding-1.0.1 \
percent-encoding-2.1.0 \
pin-project-0.4.23 \
pin-project-internal-0.4.23 \
pin-project-lite-0.1.7 \
pin-utils-0.1.0 \
pkg-config-0.3.18 \
portaudio-rs-0.3.2 \
portaudio-sys-0.1.1 \
ppv-lite86-0.2.8 \
proc-macro-hack-0.5.18 \
proc-macro-nested-0.1.6 \
proc-macro2-0.4.30 \
proc-macro2-1.0.19 \
protobuf-2.14.0 \
protobuf-codegen-2.14.0 \
protobuf-codegen-pure-2.14.0 \
publicsuffix-1.5.4 \
quick-error-1.2.3 \
quote-0.3.15 \
quote-0.6.13 \
quote-1.0.7 \
rand-0.3.23 \
rand-0.4.6 \
rand-0.5.6 \
rand-0.6.5 \
rand-0.7.3 \
rand_chacha-0.1.1 \
rand_chacha-0.2.2 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
random-0.12.2 \
rawpointer-0.2.1 \
rdrand-0.4.0 \
redox_syscall-0.1.57 \
redox_termios-0.1.1 \
redox_users-0.3.4 \
regex-1.3.9 \
regex-syntax-0.6.18 \
relay-0.1.1 \
remove_dir_all-0.5.3 \
reqwest-0.9.24 \
reqwest-0.10.7 \
rodio-0.9.0 \
rspotify-0.10.0 \
rust-argon2-0.7.0 \
rustc-demangle-0.1.16 \
rustc-hash-1.1.0 \
rustc-serialize-0.3.24 \
rustc_version-0.2.3 \
ryu-1.0.5 \
safemem-0.3.3 \
schannel-0.1.19 \
scoped-tls-0.1.2 \
scopeguard-1.1.0 \
security-framework-0.4.4 \
security-framework-sys-0.4.3 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.115 \
serde_derive-1.0.115 \
serde_json-1.0.57 \
serde_urlencoded-0.5.5 \
serde_urlencoded-0.6.1 \
sha-1-0.8.2 \
sha2-0.8.2 \
shannon-0.2.0 \
shell-words-0.1.0 \
shlex-0.1.1 \
signal-hook-0.1.16 \
signal-hook-registry-1.2.1 \
slab-0.3.0 \
slab-0.4.2 \
smallvec-0.2.1 \
smallvec-0.6.13 \
socket2-0.3.12 \
stable_deref_trait-1.2.0 \
stdweb-0.1.3 \
stream-cipher-0.3.2 \
string-0.2.1 \
strsim-0.7.0 \
strsim-0.8.0 \
strsim-0.9.3 \
strum-0.8.0 \
strum-0.17.1 \
strum_macros-0.8.0 \
strum_macros-0.17.1 \
subtle-1.0.0 \
syn-0.11.11 \
syn-0.15.44 \
syn-1.0.38 \
synom-0.11.3 \
synstructure-0.12.4 \
take-0.1.0 \
tempfile-3.1.0 \
term_size-0.3.2 \
termcolor-1.1.0 \
termion-1.5.5 \
textwrap-0.11.0 \
thiserror-1.0.20 \
thiserror-impl-1.0.20 \
thread_local-1.0.1 \
time-0.1.43 \
tinyvec-0.3.4 \
tokio-0.1.22 \
tokio-0.2.22 \
tokio-buf-0.1.1 \
tokio-codec-0.1.2 \
tokio-core-0.1.17 \
tokio-current-thread-0.1.7 \
tokio-executor-0.1.10 \
tokio-fs-0.1.7 \
tokio-io-0.1.13 \
tokio-proto-0.1.1 \
tokio-reactor-0.1.12 \
tokio-service-0.1.0 \
tokio-socks-0.2.2 \
tokio-sync-0.1.8 \
tokio-tcp-0.1.4 \
tokio-threadpool-0.1.18 \
tokio-timer-0.2.13 \
tokio-tls-0.3.1 \
tokio-udp-0.1.6 \
tokio-uds-0.2.7 \
tokio-util-0.3.1 \
toml-0.5.6 \
tower-service-0.3.0 \
tracing-0.1.19 \
tracing-core-0.1.14 \
try-lock-0.1.0 \
try-lock-0.2.3 \
try_from-0.3.2 \
typenum-1.12.0 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.0.4 \
unicode-xid-0.1.0 \
unicode-xid-0.2.1 \
url-1.7.2 \
url-2.1.1 \
uuid-0.7.4 \
vcpkg-0.2.10 \
vec_map-0.8.2 \
vergen-3.1.0 \
version_check-0.9.2 \
void-1.0.2 \
want-0.0.4 \
want-0.2.0 \
want-0.3.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.67 \
wasm-bindgen-backend-0.2.67 \
wasm-bindgen-futures-0.4.17 \
wasm-bindgen-macro-0.2.67 \
wasm-bindgen-macro-support-0.2.67 \
wasm-bindgen-shared-0.2.67 \
web-sys-0.3.44 \
webbrowser-0.5.5 \
widestring-0.4.2 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.5.1 \
winreg-0.6.2 \
winreg-0.7.0 \
winrt-0.4.0 \
winrt-notification-0.2.2 \
ws2_32-sys-0.2.1 \
x11-clipboard-0.3.3 \
xcb-0.8.2 \
xi-unicode-0.2.1 \
xml-rs-0.6.1
CARGO_FEATURES= --no-default-features cursive/pancurses-backend
PLIST_FILES= bin/ncspot
PORTDOCS= README.md
OPTIONS_DEFINE= CLIPBOARD DOCS MPRIS PORTAUDIO PULSEAUDIO
OPTIONS_DEFAULT= CLIPBOARD MPRIS PORTAUDIO
CLIPBOARD_DESC= Support for accessing X11 clipboard
MPRIS_DESC= D-Bus MPRIS support
CLIPBOARD_USES= python:3.6+,build xorg
CLIPBOARD_USE= XORG=xcb
CLIPBOARD_BINARY_ALIAS= python3=${PYTHON_CMD}
CLIPBOARD_VARS= CARGO_FEATURES+=share_clipboard
MPRIS_VARS= CARGO_FEATURES+=mpris
PORTAUDIO_LIB_DEPENDS= libportaudio.so:audio/portaudio
PORTAUDIO_VARS= CARGO_FEATURES+=portaudio_backend
PULSEAUDIO_LIB_DEPENDS= libpulse.so:audio/pulseaudio
PULSEAUDIO_USES= localbase:ldflags
PULSEAUDIO_VARS= CARGO_FEATURES+=pulseaudio_backend
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/ncspot
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_MAN} ${WRKSRC}/README.md ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/audio/spotify-tui/Makefile
===================================================================
--- head/audio/spotify-tui/Makefile (revision 552220)
+++ head/audio/spotify-tui/Makefile (revision 552221)
@@ -1,275 +1,276 @@
# $FreeBSD$
PORTNAME= spotify-tui
DISTVERSIONPREFIX= v
DISTVERSION= 0.22.0
+PORTREVISION= 1
CATEGORIES= audio
MAINTAINER= vulcan@wired.sh
COMMENT= Spotify for the terminal written in Rust
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo localbase:ldflags python:3.6+,build ssl xorg
USE_GITHUB= yes
GH_ACCOUNT= Rigellute
USE_XORG= xcb
CARGO_CRATES= addr2line-0.13.0 \
adler-0.2.3 \
aho-corasick-0.7.13 \
ansi_term-0.11.0 \
anyhow-1.0.32 \
arc-swap-0.4.7 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
atty-0.2.14 \
autocfg-0.1.7 \
autocfg-1.0.0 \
backtrace-0.3.51 \
base64-0.10.1 \
base64-0.11.0 \
base64-0.12.3 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
block-0.1.6 \
bumpalo-3.4.0 \
byteorder-1.3.4 \
bytes-0.4.12 \
bytes-0.5.6 \
cassowary-0.3.0 \
cc-1.0.58 \
cfg-if-0.1.10 \
chrono-0.4.13 \
clap-2.33.3 \
clipboard-0.5.0 \
clipboard-win-2.2.0 \
cloudabi-0.0.3 \
cloudabi-0.1.0 \
constant_time_eq-0.1.5 \
core-foundation-0.7.0 \
core-foundation-sys-0.7.0 \
crossbeam-utils-0.7.2 \
crossterm-0.17.7 \
crossterm-0.18.0 \
crossterm_winapi-0.6.1 \
darling-0.9.0 \
darling_core-0.9.0 \
darling_macro-0.9.0 \
derive_builder-0.7.2 \
derive_builder_core-0.5.0 \
dirs-3.0.1 \
dirs-sys-0.3.5 \
dotenv-0.13.0 \
dtoa-0.4.6 \
either-1.5.3 \
encoding_rs-0.8.23 \
env_logger-0.6.2 \
failure-0.1.8 \
failure_derive-0.1.8 \
fnv-1.0.7 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.3.5 \
futures-channel-0.3.5 \
futures-core-0.3.5 \
futures-executor-0.3.5 \
futures-io-0.3.5 \
futures-macro-0.3.5 \
futures-sink-0.3.5 \
futures-task-0.3.5 \
futures-util-0.3.5 \
getrandom-0.1.14 \
gimli-0.22.0 \
h2-0.2.6 \
hashbrown-0.8.1 \
hermit-abi-0.1.15 \
http-0.2.1 \
http-body-0.3.1 \
httparse-1.3.4 \
humantime-1.3.0 \
hyper-0.13.7 \
hyper-tls-0.4.3 \
ident_case-1.0.1 \
idna-0.1.5 \
idna-0.2.0 \
indexmap-1.5.0 \
instant-0.1.7 \
iovec-0.1.4 \
itertools-0.8.2 \
itoa-0.4.6 \
js-sys-0.3.42 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.73 \
linked-hash-map-0.5.3 \
lock_api-0.3.4 \
lock_api-0.4.1 \
log-0.4.11 \
malloc_buf-0.0.6 \
matches-0.1.8 \
memchr-2.3.3 \
mime-0.3.16 \
mime_guess-2.0.3 \
miniz_oxide-0.4.0 \
mio-0.6.22 \
mio-0.7.0 \
mio-named-pipes-0.1.7 \
mio-uds-0.6.8 \
miow-0.2.1 \
miow-0.3.5 \
native-tls-0.2.4 \
net2-0.2.34 \
ntapi-0.3.4 \
num-integer-0.1.43 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
objc-0.2.7 \
objc-foundation-0.1.1 \
objc_id-0.1.1 \
object-0.20.0 \
once_cell-1.4.0 \
openssl-0.10.30 \
openssl-probe-0.1.2 \
openssl-sys-0.9.58 \
parking_lot-0.10.2 \
parking_lot-0.11.0 \
parking_lot_core-0.7.2 \
parking_lot_core-0.8.0 \
percent-encoding-1.0.1 \
percent-encoding-2.1.0 \
pin-project-0.4.22 \
pin-project-internal-0.4.22 \
pin-project-lite-0.1.7 \
pin-utils-0.1.0 \
pkg-config-0.3.18 \
ppv-lite86-0.2.8 \
proc-macro-hack-0.5.16 \
proc-macro-nested-0.1.6 \
proc-macro2-0.4.30 \
proc-macro2-1.0.19 \
quick-error-1.2.3 \
quote-0.6.13 \
quote-1.0.7 \
rand-0.6.5 \
rand-0.7.3 \
rand_chacha-0.1.1 \
rand_chacha-0.2.2 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
random-0.12.2 \
rdrand-0.4.0 \
redox_syscall-0.1.57 \
redox_users-0.3.4 \
regex-1.3.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.3 \
reqwest-0.10.6 \
rspotify-0.10.0 \
rust-argon2-0.7.0 \
rustc-demangle-0.1.16 \
rustc-serialize-0.3.24 \
ryu-1.0.5 \
schannel-0.1.19 \
scopeguard-1.1.0 \
security-framework-0.4.4 \
security-framework-sys-0.4.3 \
serde-1.0.116 \
serde_derive-1.0.116 \
serde_json-1.0.57 \
serde_urlencoded-0.6.1 \
serde_yaml-0.8.13 \
signal-hook-0.1.16 \
signal-hook-registry-1.2.0 \
slab-0.4.2 \
smallvec-1.4.1 \
socket2-0.3.12 \
strsim-0.7.0 \
strsim-0.8.0 \
syn-0.15.44 \
syn-1.0.35 \
synstructure-0.12.4 \
tempfile-3.1.0 \
termcolor-1.1.0 \
textwrap-0.11.0 \
thiserror-1.0.20 \
thiserror-impl-1.0.20 \
thread_local-1.0.1 \
time-0.1.43 \
tinyvec-0.3.3 \
tokio-0.2.22 \
tokio-macros-0.2.5 \
tokio-socks-0.2.2 \
tokio-tls-0.3.1 \
tokio-util-0.3.1 \
tower-service-0.3.0 \
tracing-0.1.17 \
tracing-core-0.1.11 \
try-lock-0.2.3 \
tui-0.12.0 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.1.0 \
unicode-xid-0.2.1 \
url-1.7.2 \
url-2.1.1 \
vcpkg-0.2.10 \
vec_map-0.8.2 \
version_check-0.9.2 \
want-0.3.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.65 \
wasm-bindgen-backend-0.2.65 \
wasm-bindgen-futures-0.4.15 \
wasm-bindgen-macro-0.2.65 \
wasm-bindgen-macro-support-0.2.65 \
wasm-bindgen-shared-0.2.65 \
web-sys-0.3.42 \
webbrowser-0.5.5 \
widestring-0.4.2 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.7.0 \
ws2_32-sys-0.2.1 \
x11-clipboard-0.3.3 \
xcb-0.8.2 \
yaml-rust-0.4.4
BINARY_ALIAS= python3=${PYTHON_CMD}
SUB_FILES= pkg-message
PLIST_FILES= bin/spt
PORTDOCS= CHANGELOG.md README.md
OPTIONS_DEFINE= DOCS SPOTIFYD
SPOTIFYD_DESC= Lightweight Spotify client daemon
SPOTIFYD_RUN_DEPENDS= spotifyd:audio/spotifyd
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/spt
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_DATA} ${PORTDOCS:S|^|${WRKSRC}/|} ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/audio/spotifyd/Makefile
===================================================================
--- head/audio/spotifyd/Makefile (revision 552220)
+++ head/audio/spotifyd/Makefile (revision 552221)
@@ -1,394 +1,394 @@
# $FreeBSD$
PORTNAME= spotifyd
DISTVERSIONPREFIX= v
DISTVERSION= 0.2.24
-PORTREVISION= 9
+PORTREVISION= 10
CATEGORIES= audio
MAINTAINER= ports@FreeBSD.org
COMMENT= Spotify daemon
LICENSE= GPLv3
LICENSE_FILE= ${WRKSRC}/LICENCE
LIB_DEPENDS= libogg.so:audio/libogg
USES= cargo ssl:build
USE_GITHUB= yes
GH_ACCOUNT= Spotifyd
USE_RC_SUBR= spotifyd
CARGO_CRATES= adler32-1.0.4 \
advapi32-sys-0.2.0 \
aes-0.3.2 \
aes-ctr-0.3.0 \
aes-soft-0.3.3 \
aesni-0.6.0 \
aho-corasick-0.7.6 \
alga-0.9.2 \
alsa-0.2.2 \
alsa-0.3.0 \
alsa-sys-0.1.2 \
ansi_term-0.11.0 \
approx-0.3.2 \
arc-swap-0.4.4 \
atty-0.2.14 \
autocfg-0.1.7 \
autocfg-1.0.0 \
backtrace-0.3.40 \
backtrace-sys-0.1.32 \
base64-0.9.3 \
base64-0.10.1 \
bindgen-0.51.1 \
bit-set-0.5.1 \
bit-vec-0.5.1 \
bitflags-0.3.3 \
bitflags-0.9.1 \
bitflags-1.2.1 \
block-buffer-0.7.3 \
block-cipher-trait-0.6.2 \
block-modes-0.3.3 \
block-padding-0.1.5 \
boxfnonce-0.1.1 \
byte-tools-0.3.1 \
bytecount-0.4.0 \
byteorder-1.3.2 \
bytes-0.4.12 \
c2-chacha-0.2.3 \
cargo_metadata-0.6.4 \
cc-1.0.50 \
cexpr-0.3.6 \
cfg-if-0.1.10 \
chrono-0.4.10 \
clang-sys-0.28.1 \
clap-2.33.0 \
cloudabi-0.0.3 \
cookie-0.12.0 \
cookie_store-0.7.0 \
core-foundation-0.6.4 \
core-foundation-sys-0.5.1 \
core-foundation-sys-0.6.2 \
coreaudio-rs-0.9.1 \
coreaudio-sys-0.2.3 \
cpal-0.8.2 \
crc32fast-1.2.0 \
crossbeam-deque-0.7.2 \
crossbeam-epoch-0.8.0 \
crossbeam-queue-0.1.2 \
crossbeam-utils-0.6.6 \
crossbeam-utils-0.7.0 \
crypto-mac-0.7.0 \
ctr-0.3.2 \
daemonize-0.4.1 \
darling-0.9.0 \
darling_core-0.9.0 \
darling_macro-0.9.0 \
dbus-0.2.3 \
dbus-0.6.5 \
dbus-tokio-0.2.1 \
derive_builder-0.7.2 \
derive_builder_core-0.5.0 \
digest-0.8.1 \
dotenv-0.13.0 \
dtoa-0.4.4 \
either-1.5.3 \
encoding_rs-0.8.22 \
env_logger-0.6.2 \
env_logger-0.7.1 \
error-chain-0.11.0 \
error-chain-0.12.1 \
failure-0.1.6 \
failure_derive-0.1.6 \
fake-simd-0.1.2 \
fern-0.5.9 \
flate2-1.0.13 \
fnv-1.0.6 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.1.29 \
futures-cpupool-0.1.8 \
gcc-0.3.55 \
generic-array-0.12.3 \
gethostname-0.2.1 \
getopts-0.2.21 \
getrandom-0.1.14 \
glob-0.2.11 \
glob-0.3.0 \
h2-0.1.26 \
heck-0.3.1 \
hermit-abi-0.1.6 \
hex-0.3.2 \
hex-0.4.0 \
hkdf-0.7.1 \
hmac-0.7.1 \
http-0.1.21 \
http-body-0.1.0 \
httparse-1.3.4 \
humantime-1.3.0 \
hyper-0.11.27 \
hyper-0.12.35 \
hyper-proxy-0.4.1 \
hyper-tls-0.3.2 \
ident_case-1.0.1 \
idna-0.1.5 \
idna-0.2.0 \
indexmap-1.3.0 \
iovec-0.1.4 \
itertools-0.8.2 \
itoa-0.4.4 \
kernel32-sys-0.2.2 \
keyring-0.7.1 \
language-tags-0.2.2 \
lazy_static-1.4.0 \
lewton-0.9.4 \
libc-0.2.66 \
libdbus-sys-0.2.1 \
libloading-0.5.2 \
libm-0.1.4 \
libmdns-0.2.4 \
libpulse-sys-0.0.0 \
librespot-0.1.0 \
librespot-audio-0.1.0 \
librespot-connect-0.1.0 \
librespot-core-0.1.0 \
librespot-metadata-0.1.0 \
librespot-playback-0.1.0 \
librespot-protocol-0.1.0 \
librespot-tremor-0.1.0 \
linear-map-1.2.0 \
lock_api-0.3.3 \
log-0.3.9 \
log-0.4.8 \
matches-0.1.8 \
matrixmultiply-0.2.3 \
maybe-uninit-2.0.0 \
memchr-2.3.0 \
memoffset-0.5.3 \
mime-0.3.16 \
mime_guess-2.0.1 \
miniz_oxide-0.3.5 \
mio-0.6.21 \
mio-named-pipes-0.1.6 \
mio-uds-0.6.7 \
miow-0.2.1 \
miow-0.3.3 \
multimap-0.4.0 \
nalgebra-0.18.1 \
native-tls-0.2.3 \
net2-0.2.33 \
nix-0.9.0 \
nix-0.10.0 \
nix-0.14.1 \
nom-4.2.3 \
num-0.2.1 \
num-bigint-0.2.5 \
num-complex-0.2.4 \
num-integer-0.1.42 \
num-iter-0.1.40 \
num-rational-0.2.3 \
num-traits-0.2.11 \
num_cpus-1.11.1 \
ogg-0.7.0 \
ogg-sys-0.0.9 \
opaque-debug-0.2.3 \
openssl-0.10.26 \
openssl-probe-0.1.2 \
openssl-sys-0.9.53 \
parking_lot-0.9.0 \
parking_lot_core-0.6.2 \
pbkdf2-0.3.0 \
peeking_take_while-0.1.2 \
percent-encoding-1.0.1 \
percent-encoding-2.1.0 \
pkg-config-0.3.17 \
portaudio-rs-0.3.1 \
portaudio-sys-0.1.1 \
ppv-lite86-0.2.6 \
proc-macro-error-0.4.4 \
proc-macro-error-attr-0.4.3 \
proc-macro2-0.4.30 \
proc-macro2-1.0.7 \
protobuf-2.10.0 \
protobuf-codegen-2.10.0 \
protobuf-codegen-pure-2.10.0 \
publicsuffix-1.5.4 \
pulldown-cmark-0.2.0 \
quick-error-1.2.3 \
quote-0.6.13 \
quote-1.0.2 \
rand-0.3.23 \
rand-0.4.6 \
rand-0.5.6 \
rand-0.6.5 \
rand-0.7.3 \
rand_chacha-0.1.1 \
rand_chacha-0.2.1 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
random-0.12.2 \
rawpointer-0.2.1 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
regex-1.3.3 \
regex-syntax-0.6.13 \
relay-0.1.1 \
remove_dir_all-0.5.2 \
reqwest-0.9.17 \
result-1.0.0 \
rodio-0.9.0 \
rpassword-3.0.2 \
rspotify-0.7.0 \
rustc-demangle-0.1.16 \
rustc-hash-1.0.1 \
rustc-serialize-0.3.24 \
rustc_version-0.2.3 \
rustversion-1.0.1 \
ryu-1.0.2 \
safemem-0.3.3 \
same-file-1.0.6 \
schannel-0.1.16 \
scoped-tls-0.1.2 \
scopeguard-1.0.0 \
secret-service-1.0.0 \
security-framework-0.3.4 \
security-framework-sys-0.3.3 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.104 \
serde_derive-1.0.104 \
serde_ini-0.2.0 \
serde_json-1.0.44 \
serde_urlencoded-0.5.5 \
sha-1-0.8.2 \
sha2-0.8.1 \
shannon-0.2.0 \
shlex-0.1.1 \
signal-hook-0.1.12 \
signal-hook-registry-1.2.0 \
skeptic-0.13.4 \
slab-0.3.0 \
slab-0.4.2 \
smallvec-0.2.1 \
smallvec-0.6.13 \
smallvec-1.1.0 \
socket2-0.2.4 \
socket2-0.3.11 \
stdweb-0.1.3 \
stream-cipher-0.3.2 \
string-0.2.1 \
strsim-0.7.0 \
strsim-0.8.0 \
structopt-0.3.8 \
structopt-derive-0.4.1 \
subtle-1.0.0 \
syn-0.15.44 \
syn-1.0.13 \
syn-mid-0.4.0 \
synstructure-0.12.3 \
syslog-4.0.1 \
take-0.1.0 \
tempdir-0.3.7 \
tempfile-3.1.0 \
termcolor-1.1.0 \
textwrap-0.11.0 \
thread_local-1.0.1 \
time-0.1.42 \
tokio-0.1.22 \
tokio-buf-0.1.1 \
tokio-codec-0.1.1 \
tokio-core-0.1.17 \
tokio-current-thread-0.1.6 \
tokio-executor-0.1.9 \
tokio-fs-0.1.6 \
tokio-io-0.1.12 \
tokio-process-0.2.4 \
tokio-proto-0.1.1 \
tokio-reactor-0.1.11 \
tokio-service-0.1.0 \
tokio-signal-0.1.5 \
tokio-signal-0.2.7 \
tokio-sync-0.1.7 \
tokio-tcp-0.1.3 \
tokio-threadpool-0.1.17 \
tokio-timer-0.2.12 \
tokio-udp-0.1.5 \
tokio-uds-0.2.5 \
try-lock-0.1.0 \
try-lock-0.2.2 \
try_from-0.3.2 \
typenum-1.11.2 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.11 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.7 \
unicode-xid-0.1.0 \
unicode-xid-0.2.0 \
url-1.7.2 \
url-2.1.1 \
uuid-0.7.4 \
vcpkg-0.2.8 \
vec_map-0.8.1 \
vergen-3.0.4 \
version_check-0.1.5 \
version_check-0.9.1 \
void-1.0.2 \
walkdir-2.3.0 \
want-0.0.4 \
want-0.2.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
webbrowser-0.5.2 \
whoami-0.7.0 \
widestring-0.4.0 \
winapi-0.2.8 \
winapi-0.3.8 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.3 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
ws2_32-sys-0.2.1 \
xdg-2.2.0
CARGO_FEATURES= --no-default-features
PLIST_FILES= bin/spotifyd \
"@sample etc/spotifyd.conf.sample"
PORTDOCS= README.md
OPTIONS_DEFINE= DBUS DOCS PORTAUDIO PULSEAUDIO
OPTIONS_DEFAULT= PORTAUDIO
DBUS_DESC= D-Bus MPRIS support
DBUS_LIB_DEPENDS= libdbus-1.so:devel/dbus
DBUS_VARS= CARGO_FEATURES+=dbus_mpris
PORTAUDIO_LIB_DEPENDS= libportaudio.so:audio/portaudio
PORTAUDIO_VARS= CARGO_FEATURES+=portaudio_backend
PULSEAUDIO_LIB_DEPENDS= libpulse.so:audio/pulseaudio
PULSEAUDIO_VARS= CARGO_FEATURES+=pulseaudio_backend
post-patch:
@${REINPLACE_CMD} 's,/etc/,${PREFIX}/etc/,g' ${WRKSRC}/src/config.rs
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/spotifyd
${INSTALL_DATA} ${FILESDIR}/spotifyd.conf ${STAGEDIR}${PREFIX}/etc/spotifyd.conf.sample
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_MAN} ${WRKSRC}/README.md ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/benchmarks/hyperfine/Makefile
===================================================================
--- head/benchmarks/hyperfine/Makefile (revision 552220)
+++ head/benchmarks/hyperfine/Makefile (revision 552221)
@@ -1,103 +1,103 @@
# $FreeBSD$
PORTNAME= hyperfine
DISTVERSIONPREFIX= v
DISTVERSION= 1.10.0
-PORTREVISION= 4
+PORTREVISION= 5
CATEGORIES= benchmarks
MAINTAINER= pizzamig@FreeBSD.org
COMMENT= Command-line benchmarking tool
LICENSE= APACHE20 MIT
LICENSE_COMB= dual
LICENSE_FILE_APACHE20= ${WRKSRC}/LICENSE-APACHE
LICENSE_FILE_MIT= ${WRKSRC}/LICENSE-MIT
BROKEN_i386= LLVM ERROR: No support for lowering a copy into EFLAGS when used by this instruction
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= sharkdp
PLIST_FILES= bin/hyperfine
CARGO_CRATES= ansi_term-0.11.0 \
approx-0.3.2 \
atty-0.2.14 \
autocfg-0.1.7 \
autocfg-1.0.0 \
bitflags-1.2.1 \
bstr-0.2.13 \
byteorder-1.3.4 \
cfg-if-0.1.10 \
clap-2.33.1 \
cloudabi-0.0.3 \
colored-1.9.3 \
console-0.11.3 \
csv-1.1.3 \
csv-core-0.1.10 \
encode_unicode-0.3.6 \
fuchsia-cprng-0.1.1 \
getrandom-0.1.14 \
hermit-abi-0.1.13 \
indicatif-0.14.0 \
itoa-0.4.5 \
lazy_static-1.4.0 \
libc-0.2.70 \
memchr-2.3.3 \
num-0.2.1 \
num-bigint-0.2.6 \
num-complex-0.2.4 \
num-integer-0.1.42 \
num-iter-0.1.40 \
num-rational-0.2.4 \
num-traits-0.2.11 \
number_prefix-0.3.0 \
ppv-lite86-0.2.8 \
proc-macro2-1.0.17 \
quote-1.0.6 \
rand-0.6.5 \
rand-0.7.3 \
rand_chacha-0.1.1 \
rand_chacha-0.2.2 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rdrand-0.4.0 \
regex-1.3.7 \
regex-automata-0.1.9 \
regex-syntax-0.6.17 \
rust_decimal-1.6.0 \
ryu-1.0.4 \
serde-1.0.110 \
serde_derive-1.0.110 \
serde_json-1.0.53 \
statistical-1.0.0 \
strsim-0.8.0 \
syn-1.0.24 \
term_size-0.3.2 \
terminal_size-0.1.12 \
termios-0.3.2 \
textwrap-0.11.0 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
vec_map-0.8.2 \
version_check-0.9.2 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/hyperfine
.include <bsd.port.mk>
Index: head/deskutils/just/Makefile
===================================================================
--- head/deskutils/just/Makefile (revision 552220)
+++ head/deskutils/just/Makefile (revision 552221)
@@ -1,84 +1,85 @@
# $FreeBSD$
PORTNAME= just
DISTVERSIONPREFIX= v
DISTVERSION= 0.8.0
+PORTREVISION= 1
CATEGORIES= deskutils
MAINTAINER= yuri@FreeBSD.org
COMMENT= Just a command runner: handy way to save/run project-specific commands
LICENSE= CC0-1.0
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= casey
CARGO_CRATES= aho-corasick-0.7.13 \
ansi_term-0.11.0 \
ansi_term-0.12.1 \
atty-0.2.14 \
bitflags-1.2.1 \
cc-1.0.60 \
cfg-if-0.1.10 \
clap-2.33.3 \
ctor-0.1.16 \
ctrlc-3.1.6 \
derivative-2.1.1 \
difference-2.0.0 \
doc-comment-0.3.3 \
dotenv-0.15.0 \
edit-distance-2.1.0 \
env_logger-0.7.1 \
executable-path-1.0.0 \
getrandom-0.1.15 \
hermit-abi-0.1.16 \
humantime-1.3.0 \
lazy_static-1.4.0 \
libc-0.2.78 \
log-0.4.11 \
memchr-2.3.3 \
nix-0.17.0 \
output_vt100-0.1.2 \
ppv-lite86-0.2.9 \
pretty_assertions-0.6.1 \
proc-macro2-1.0.24 \
quick-error-1.2.3 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
redox_syscall-0.1.57 \
regex-1.3.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.3 \
snafu-0.6.9 \
snafu-derive-0.6.9 \
strsim-0.8.0 \
syn-1.0.42 \
target-1.0.0 \
tempfile-3.1.0 \
termcolor-1.1.0 \
textwrap-0.11.0 \
thiserror-1.0.20 \
thiserror-impl-1.0.20 \
thread_local-1.0.1 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
vec_map-0.8.2 \
void-1.0.2 \
wasi-0.9.0+wasi-snapshot-preview1 \
which-4.0.2 \
winapi-0.3.9 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0
PLIST_FILES= bin/${PORTNAME}
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
.include <bsd.port.mk>
Index: head/devel/bingrep/Makefile
===================================================================
--- head/devel/bingrep/Makefile (revision 552220)
+++ head/devel/bingrep/Makefile (revision 552221)
@@ -1,115 +1,115 @@
# $FreeBSD$
PORTNAME= bingrep
PORTVERSION= 0.8.2
-PORTREVISION= 9
+PORTREVISION= 10
CATEGORIES= devel
MASTER_SITES= CRATESIO
DISTFILES= ${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= ports@FreeBSD.org
COMMENT= Grep through binaries
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
CARGO_CRATES= aho-corasick-0.7.6 \
ansi_term-0.11.0 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
atty-0.2.14 \
autocfg-0.1.7 \
backtrace-0.3.42 \
backtrace-sys-0.1.32 \
base64-0.11.0 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
bstr-0.2.10 \
byteorder-1.3.2 \
cc-1.0.50 \
cfg-if-0.1.10 \
clap-2.33.0 \
constant_time_eq-0.1.5 \
cpp_demangle-0.2.14 \
crossbeam-utils-0.7.0 \
csv-1.1.3 \
csv-core-0.1.6 \
dirs-1.0.5 \
encode_unicode-0.3.6 \
env_logger-0.7.1 \
failure-0.1.6 \
failure_derive-0.1.6 \
fuchsia-cprng-0.1.1 \
getrandom-0.1.14 \
glob-0.3.0 \
goblin-0.2.0 \
heck-0.3.1 \
hermit-abi-0.1.6 \
hexplay-0.2.1 \
humantime-1.3.0 \
itoa-0.4.5 \
lazy_static-1.4.0 \
libc-0.2.66 \
log-0.4.8 \
memchr-2.3.0 \
memrange-0.1.3 \
metagoblin-0.3.1 \
plain-0.2.3 \
prettytable-rs-0.8.0 \
proc-macro-error-0.2.6 \
proc-macro-error-0.4.5 \
proc-macro-error-attr-0.4.5 \
proc-macro2-1.0.8 \
quick-error-1.2.3 \
quote-1.0.2 \
rand-0.3.23 \
rand-0.4.6 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
redox_users-0.3.4 \
regex-1.3.3 \
regex-automata-0.1.8 \
regex-syntax-0.6.13 \
rust-argon2-0.7.0 \
rustc-demangle-0.1.16 \
rustc-serialize-0.3.24 \
rustversion-1.0.2 \
ryu-1.0.2 \
scroll-0.10.1 \
scroll_derive-0.10.1 \
serde-1.0.104 \
strsim-0.8.0 \
structopt-0.3.8 \
structopt-derive-0.3.5 \
structopt-derive-0.4.1 \
syn-1.0.14 \
syn-mid-0.4.0 \
synstructure-0.12.3 \
term-0.5.2 \
termcolor-0.3.6 \
termcolor-1.1.0 \
textwrap-0.11.0 \
theban_interval_tree-0.7.1 \
thread_local-1.0.1 \
time-0.1.42 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
vec_map-0.8.1 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.3 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
wincolor-0.1.6
PLIST_FILES= bin/bingrep
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/bingrep
.include <bsd.port.mk>
Index: head/devel/cargo-c/Makefile
===================================================================
--- head/devel/cargo-c/Makefile (revision 552220)
+++ head/devel/cargo-c/Makefile (revision 552221)
@@ -1,158 +1,159 @@
# $FreeBSD$
PORTNAME= cargo-c
DISTVERSION= 0.6.13
+PORTREVISION= 1
CATEGORIES= devel
MASTER_SITES= CRATESIO
# XXX Teach USES=cargo to have proper default
DISTFILES= ${CARGO_DIST_SUBDIR}/${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= jbeich@FreeBSD.org
COMMENT= Cargo C-ABI helpers
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
LIB_DEPENDS= libgit2.so:devel/libgit2 \
libcurl.so:ftp/curl \
libssh2.so:security/libssh2
USES= cargo ssl
PLIST_FILES= bin/cargo-capi \
bin/cargo-cbuild \
bin/cargo-cinstall
CARGO_CRATES= adler-0.2.3 \
aho-corasick-0.7.13 \
ansi_term-0.11.0 \
anyhow-1.0.32 \
atty-0.2.14 \
autocfg-1.0.1 \
bitflags-1.2.1 \
bitmaps-2.1.0 \
bstr-0.2.13 \
bytesize-1.0.1 \
cargo-0.45.1 \
cargo-platform-0.1.1 \
cbindgen-0.14.4 \
cc-1.0.59 \
cfg-if-0.1.10 \
clap-2.33.3 \
commoncrypto-0.2.0 \
commoncrypto-sys-0.2.0 \
core-foundation-0.7.0 \
core-foundation-sys-0.7.0 \
crates-io-0.31.1 \
crc32fast-1.2.0 \
crossbeam-utils-0.7.2 \
crypto-hash-0.3.4 \
curl-0.4.33 \
curl-sys-0.4.36+curl-7.71.1 \
env_logger-0.7.1 \
filetime-0.2.12 \
flate2-1.0.17 \
fnv-1.0.7 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fwdansi-1.1.0 \
getrandom-0.1.14 \
git2-0.13.11 \
git2-curl-0.14.1 \
glob-0.3.0 \
globset-0.4.5 \
heck-0.3.1 \
hermit-abi-0.1.15 \
hex-0.3.2 \
hex-0.4.2 \
home-0.5.3 \
humantime-1.3.0 \
humantime-2.0.1 \
idna-0.2.0 \
ignore-0.4.16 \
im-rc-14.3.0 \
itoa-0.4.6 \
jobserver-0.1.21 \
lazy_static-1.4.0 \
lazycell-1.3.0 \
libc-0.2.76 \
libgit2-sys-0.12.13+1.0.1 \
libnghttp2-sys-0.1.4+1.41.0 \
libssh2-sys-0.2.19 \
libz-sys-1.1.2 \
log-0.4.11 \
matches-0.1.8 \
memchr-2.3.3 \
miniz_oxide-0.4.1 \
miow-0.3.5 \
num_cpus-1.13.0 \
opener-0.4.1 \
openssl-0.10.30 \
openssl-probe-0.1.2 \
openssl-sys-0.9.58 \
percent-encoding-2.1.0 \
pkg-config-0.3.18 \
ppv-lite86-0.2.9 \
proc-macro-error-1.0.4 \
proc-macro-error-attr-1.0.4 \
proc-macro2-1.0.20 \
quick-error-1.2.3 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rand_xoshiro-0.4.0 \
redox_syscall-0.1.57 \
regex-1.3.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.3 \
rustc-workspace-hack-1.0.0 \
rustfix-0.5.1 \
ryu-1.0.5 \
same-file-1.0.6 \
schannel-0.1.19 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.115 \
serde_derive-1.0.115 \
serde_ignored-0.1.2 \
serde_json-1.0.57 \
shell-escape-0.1.5 \
sized-chunks-0.5.3 \
socket2-0.3.12 \
strip-ansi-escapes-0.1.0 \
strsim-0.8.0 \
structopt-0.3.17 \
structopt-derive-0.4.10 \
syn-1.0.40 \
tar-0.4.30 \
tempfile-3.1.0 \
termcolor-1.1.0 \
textwrap-0.11.0 \
thread_local-1.0.1 \
tinyvec-0.3.4 \
toml-0.5.6 \
typenum-1.12.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
url-2.1.1 \
utf8parse-0.1.1 \
vcpkg-0.2.10 \
vec_map-0.8.2 \
version_check-0.9.2 \
vte-0.3.3 \
walkdir-2.3.1 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.3.9 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0
post-patch:
@${REINPLACE_CMD} -e 's,libdir.join("pkgconfig,prefix.join("libdata/pkgconfig,' \
${WRKSRC}/src/install.rs
.include <bsd.port.mk>
Index: head/devel/cargo-generate/Makefile
===================================================================
--- head/devel/cargo-generate/Makefile (revision 552220)
+++ head/devel/cargo-generate/Makefile (revision 552221)
@@ -1,215 +1,216 @@
# $FreeBSD$
PORTNAME= cargo-generate
DISTVERSIONPREFIX= v
DISTVERSION= 0.5.1
+PORTREVISION= 1
CATEGORIES= devel
MAINTAINER= vulcan@wired.sh
COMMENT= Quickly generate Rust project templates from existing git repositories
LICENSE= APACHE20 MIT
LICENSE_COMB= dual
LICENSE_FILE_APACHE20= ${WRKSRC}/LICENSE-APACHE
LICENSE_FILE_MIT= ${WRKSRC}/LICENSE-MIT
LIB_DEPENDS= libcurl.so:ftp/curl \
libgit2.so:devel/libgit2 \
libssh2.so:security/libssh2
USES= cargo ssl
USE_GITHUB= yes
GH_ACCOUNT= ashleygwilliams
CARGO_CRATES= adler-0.2.3 \
aho-corasick-0.7.13 \
ansi_term-0.11.0 \
anyhow-1.0.32 \
anymap-0.12.1 \
assert_cmd-1.0.1 \
atty-0.2.14 \
autocfg-1.0.1 \
bitflags-1.2.1 \
bitmaps-2.1.0 \
block-buffer-0.7.3 \
block-padding-0.1.5 \
bstr-0.2.13 \
byte-tools-0.3.1 \
byteorder-1.3.4 \
bytesize-1.0.1 \
cargo-0.46.1 \
cargo-platform-0.1.1 \
cc-1.0.59 \
cfg-if-0.1.10 \
chrono-0.4.15 \
clap-2.33.3 \
commoncrypto-0.2.0 \
commoncrypto-sys-0.2.0 \
console-0.11.3 \
console-0.12.0 \
core-foundation-0.7.0 \
core-foundation-sys-0.7.0 \
crates-io-0.31.1 \
crc32fast-1.2.0 \
crossbeam-utils-0.7.2 \
crypto-hash-0.3.4 \
curl-0.4.33 \
curl-sys-0.4.36+curl-7.71.1 \
dialoguer-0.6.2 \
difference-2.0.0 \
digest-0.8.1 \
doc-comment-0.3.3 \
either-1.6.0 \
encode_unicode-0.3.6 \
env_logger-0.7.1 \
fake-simd-0.1.2 \
filetime-0.2.12 \
flate2-1.0.17 \
float-cmp-0.8.0 \
fnv-1.0.7 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fwdansi-1.1.0 \
generic-array-0.12.3 \
getrandom-0.1.15 \
git2-0.13.11 \
git2-curl-0.14.1 \
glob-0.3.0 \
globset-0.4.5 \
heck-0.3.1 \
hermit-abi-0.1.15 \
hex-0.3.2 \
hex-0.4.2 \
home-0.5.3 \
humantime-1.3.0 \
humantime-2.0.1 \
idna-0.2.0 \
ignore-0.4.16 \
im-rc-15.0.0 \
indicatif-0.15.0 \
itertools-0.9.0 \
itoa-0.4.6 \
jobserver-0.1.21 \
kstring-1.0.0 \
lazy_static-1.4.0 \
lazycell-1.3.0 \
libc-0.2.77 \
libgit2-sys-0.12.13+1.0.1 \
libnghttp2-sys-0.1.4+1.41.0 \
libssh2-sys-0.2.19 \
libz-sys-1.1.2 \
liquid-0.21.4 \
liquid-core-0.21.2 \
liquid-derive-0.21.0 \
liquid-lib-0.21.1 \
log-0.4.11 \
maplit-1.0.2 \
matches-0.1.8 \
memchr-2.3.3 \
miniz_oxide-0.4.2 \
miow-0.3.5 \
normalize-line-endings-0.3.0 \
num-integer-0.1.43 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
number_prefix-0.3.0 \
once_cell-1.4.1 \
opaque-debug-0.2.3 \
opener-0.4.1 \
openssl-0.10.30 \
openssl-probe-0.1.2 \
openssl-sys-0.9.58 \
percent-encoding-2.1.0 \
pest-2.1.3 \
pest_derive-2.1.0 \
pest_generator-2.1.3 \
pest_meta-2.1.3 \
pkg-config-0.3.18 \
ppv-lite86-0.2.9 \
predicates-1.0.5 \
predicates-core-1.0.0 \
predicates-tree-1.0.0 \
proc-macro-error-1.0.4 \
proc-macro-error-attr-1.0.4 \
proc-macro-hack-0.5.18 \
proc-macro2-1.0.21 \
proc-quote-0.3.2 \
proc-quote-impl-0.3.2 \
quick-error-1.2.3 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rand_xoshiro-0.4.0 \
redox_syscall-0.1.57 \
regex-1.3.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.3 \
rustc-workspace-hack-1.0.0 \
rustfix-0.5.1 \
ryu-1.0.5 \
same-file-1.0.6 \
schannel-0.1.19 \
semver-0.10.0 \
semver-parser-0.7.0 \
serde-1.0.116 \
serde_derive-1.0.116 \
serde_ignored-0.1.2 \
serde_json-1.0.57 \
sha-1-0.8.2 \
shell-escape-0.1.5 \
sized-chunks-0.6.2 \
socket2-0.3.15 \
strip-ansi-escapes-0.1.0 \
strsim-0.8.0 \
structopt-0.3.17 \
structopt-derive-0.4.10 \
syn-1.0.41 \
tar-0.4.30 \
tempfile-3.1.0 \
termcolor-1.1.0 \
terminal_size-0.1.13 \
termios-0.3.2 \
textwrap-0.11.0 \
thread_local-1.0.1 \
time-0.1.44 \
tinyvec-0.3.4 \
toml-0.5.6 \
treeline-0.1.0 \
typenum-1.12.0 \
ucd-trie-0.1.3 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
url-2.1.1 \
utf8parse-0.1.1 \
vcpkg-0.2.10 \
vec_map-0.8.2 \
version_check-0.9.2 \
vte-0.3.3 \
wait-timeout-0.2.0 \
walkdir-2.3.1 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasi-0.10.0+wasi-snapshot-preview1 \
winapi-0.3.9 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0
PLIST_FILES= bin/${PORTNAME}
PORTDOCS= CHANGELOG.md CODE_OF_CONDUCT.md CONTRIBUTING.md README.md \
TEMPLATES.md
OPTIONS_DEFINE= DOCS
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_DATA} ${PORTDOCS:S|^|${WRKSRC}/|} ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/devel/desed/Makefile
===================================================================
--- head/devel/desed/Makefile (revision 552220)
+++ head/devel/desed/Makefile (revision 552221)
@@ -1,80 +1,80 @@
# $FreeBSD$
PORTNAME= desed
DISTVERSION= 1.2.0
-PORTREVISION= 1
+PORTREVISION= 2
CATEGORIES= devel
MASTER_SITES= CRATESIO
DISTFILES= ${CARGO_DIST_SUBDIR}/${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= yuri@FreeBSD.org
COMMENT= Demystify and debug sed scripts from the comfort of your terminal
LICENSE= GPLv3
LICENSE_FILE= ${WRKSRC}/LICENSE
RUN_DEPENDS= gsed:textproc/gsed
USES= cargo
PLIST_FILES= bin/${PORTNAME}
CARGO_CRATES= ansi_term-0.11.0 \
anyhow-1.0.32 \
arc-swap-0.4.7 \
atty-0.2.14 \
bitflags-1.2.1 \
bytes-0.5.6 \
cassowary-0.3.0 \
cfg-if-0.1.10 \
clap-2.33.3 \
cloudabi-0.0.3 \
crossterm-0.17.6 \
crossterm_winapi-0.6.1 \
either-1.6.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-core-0.3.5 \
hermit-abi-0.1.15 \
inotify-0.8.3 \
inotify-sys-0.1.3 \
iovec-0.1.4 \
itertools-0.9.0 \
kernel32-sys-0.2.2 \
kqueue-1.0.2 \
kqueue-sys-1.0.1 \
lazy_static-1.4.0 \
libc-0.2.78 \
lock_api-0.3.4 \
log-0.4.8 \
mio-0.6.22 \
mio-0.7.0 \
miow-0.2.1 \
miow-0.3.5 \
net2-0.2.35 \
ntapi-0.3.4 \
parking_lot-0.10.2 \
parking_lot_core-0.7.2 \
pin-project-lite-0.1.10 \
redox_syscall-0.1.57 \
scopeguard-1.1.0 \
signal-hook-0.1.16 \
signal-hook-registry-1.2.0 \
slab-0.4.2 \
smallvec-1.4.1 \
socket2-0.3.12 \
strsim-0.8.0 \
textwrap-0.11.0 \
tokio-0.2.22 \
tui-0.9.5 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
vec_map-0.8.2 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
ws2_32-sys-0.2.1
.include <bsd.port.mk>
Index: head/devel/dtool/Makefile
===================================================================
--- head/devel/dtool/Makefile (revision 552220)
+++ head/devel/dtool/Makefile (revision 552221)
@@ -1,188 +1,189 @@
# $FreeBSD$
PORTNAME= dtool
DISTVERSIONPREFIX= v
DISTVERSION= 0.10.1
+PORTREVISION= 1
CATEGORIES= devel
MAINTAINER= vulcan@wired.sh
COMMENT= Command-line tool collection to assist development
LICENSE= GPLv3
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= guoxbin
CARGO_CRATES= aho-corasick-0.7.6 \
ansi_term-0.11.0 \
anyhow-1.0.26 \
arrayref-0.3.5 \
arrayvec-0.4.12 \
arrayvec-0.5.1 \
atty-0.2.14 \
autocfg-0.1.7 \
autocfg-1.0.0 \
base64-0.11.0 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
block-buffer-0.7.3 \
block-buffer-0.9.0 \
block-padding-0.1.5 \
bs58-0.3.0 \
bstr-0.2.9 \
build_const-0.2.1 \
bumpalo-3.2.0 \
byte-tools-0.3.1 \
byteorder-1.3.2 \
cc-1.0.41 \
cfg-if-0.1.10 \
chrono-0.4.10 \
clap-2.33.0 \
clear_on_drop-0.2.3 \
cloudabi-0.0.3 \
const-oid-0.1.0 \
constant_time_eq-0.1.5 \
cpuid-bool-0.1.2 \
crc-1.8.1 \
crossbeam-utils-0.6.6 \
csv-1.1.2 \
csv-core-0.1.6 \
curve25519-dalek-1.2.3 \
digest-0.8.1 \
digest-0.9.0 \
dirs-1.0.5 \
dtoa-0.4.4 \
ecdsa-0.7.2 \
elliptic-curve-0.5.0 \
encode_unicode-0.3.6 \
entities-1.0.1 \
escaper-0.1.0 \
failure-0.1.6 \
fake-simd-0.1.2 \
fuchsia-cprng-0.1.1 \
gcc-0.3.55 \
generic-array-0.12.3 \
generic-array-0.14.4 \
getrandom-0.1.14 \
heck-0.3.1 \
hermit-abi-0.1.6 \
hex-0.4.0 \
indexmap-1.3.1 \
itoa-0.4.4 \
js-sys-0.3.35 \
k256-0.4.2 \
keccak-0.1.0 \
lazy_static-1.4.0 \
libc-0.2.66 \
linked-hash-map-0.5.3 \
log-0.4.8 \
madato-0.5.3 \
md5-0.7.0 \
memchr-2.3.0 \
merlin-1.3.0 \
nodrop-0.1.14 \
nom-4.2.3 \
num-integer-0.1.42 \
num-traits-0.2.11 \
opaque-debug-0.2.3 \
opaque-debug-0.3.0 \
parity-codec-3.5.4 \
prettytable-rs-0.8.0 \
proc-macro2-1.0.8 \
quote-1.0.2 \
rand-0.3.23 \
rand-0.4.6 \
rand-0.5.6 \
rand-0.6.5 \
rand_chacha-0.1.1 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
redox_users-0.3.3 \
regex-1.3.3 \
regex-automata-0.1.8 \
regex-syntax-0.6.13 \
ring-fork-dtool-0.16.13 \
ripemd160-0.8.0 \
rust-argon2-0.6.1 \
rust-crypto-0.2.36 \
rustc-serialize-0.3.24 \
ryu-1.0.2 \
schnorrkel-0.8.5 \
secp256k1-0.15.5 \
secp256k1-0.17.2 \
secp256k1-sys-0.1.2 \
serde-1.0.104 \
serde_derive-1.0.104 \
serde_test-1.0.104 \
serde_yaml-0.7.5 \
sha2-0.8.1 \
sha2-0.9.1 \
sha3-0.8.2 \
signatory-0.21.0 \
signatory-secp256k1-0.21.0 \
signature-1.2.2 \
signature_derive-1.0.0-pre.2 \
sourcefile-0.1.4 \
spin-0.5.2 \
strsim-0.8.0 \
subtle-2.2.2 \
subtle-encoding-0.5.1 \
syn-1.0.14 \
synstructure-0.12.3 \
term-0.5.2 \
textwrap-0.11.0 \
thread_local-1.0.1 \
time-0.1.42 \
twox-hash-1.5.0 \
typenum-1.12.0 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
untrusted-0.7.0 \
urlencoding-1.0.0 \
vec_map-0.8.1 \
version_check-0.1.5 \
version_check-0.9.2 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.58 \
wasm-bindgen-backend-0.2.58 \
wasm-bindgen-macro-0.2.58 \
wasm-bindgen-macro-support-0.2.58 \
wasm-bindgen-shared-0.2.58 \
wasm-bindgen-webidl-0.2.58 \
web-sys-0.3.35 \
weedle-0.10.0 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
yaml-rust-0.4.3 \
yogcrypt-0.0.0 \
zeroize-0.9.3 \
zeroize-1.1.0 \
zeroize_derive-1.0.0
PLIST_FILES= bin/${PORTNAME}
_DOCS= docs/Usage.md README.md
PORTDOCS= ${DOCS:T}
OPTIONS_DEFINE= DOCS
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
cd ${WRKSRC} && ${INSTALL_DATA} ${_DOCS} ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/devel/gbump/Makefile
===================================================================
--- head/devel/gbump/Makefile (revision 552220)
+++ head/devel/gbump/Makefile (revision 552221)
@@ -1,70 +1,70 @@
# $FreeBSD$
PORTNAME= gbump
PORTVERSION= 1.0.1
-PORTREVISION= 5
+PORTREVISION= 6
CATEGORIES= devel
MASTER_SITES= CRATESIO
DISTFILES= ${CARGO_DIST_SUBDIR}/${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= nbari@tequila.io
COMMENT= Git tag semantic version bumper
LICENSE= BSD3CLAUSE
LICENSE_FILE= ${WRKSRC}/LICENSE
LIB_DEPENDS= libgit2.so:devel/libgit2 \
libssh2.so:security/libssh2
USES= cargo ssl
CARGO_CRATES= aho-corasick-0.7.6 \
ansi_term-0.11.0 \
atty-0.2.13 \
autocfg-0.1.7 \
bitflags-1.2.1 \
cc-1.0.47 \
cfg-if-0.1.10 \
clap-2.33.0 \
getrandom-0.1.13 \
git2-0.10.1 \
hermit-abi-0.1.3 \
idna-0.2.0 \
jobserver-0.1.17 \
lazy_static-1.4.0 \
libc-0.2.65 \
libgit2-sys-0.9.1 \
libssh2-sys-0.2.13 \
libz-sys-1.0.25 \
log-0.4.8 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.2.1 \
num_cpus-1.11.1 \
openssl-probe-0.1.2 \
openssl-sys-0.9.52 \
percent-encoding-2.1.0 \
pkg-config-0.3.17 \
regex-1.3.1 \
regex-syntax-0.6.12 \
smallvec-0.6.13 \
strsim-0.8.0 \
textwrap-0.11.0 \
thread_local-0.3.6 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.9 \
unicode-width-0.1.6 \
url-2.1.0 \
vcpkg-0.2.7 \
vec_map-0.8.1 \
wasi-0.7.0 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0
PLIST_FILES= bin/gbump
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/gbump
.include <bsd.port.mk>
Index: head/devel/git-absorb/Makefile
===================================================================
--- head/devel/git-absorb/Makefile (revision 552220)
+++ head/devel/git-absorb/Makefile (revision 552221)
@@ -1,88 +1,88 @@
# $FreeBSD$
PORTNAME= git-absorb
DISTVERSION= 0.5.0
-PORTREVISION= 15
+PORTREVISION= 16
CATEGORIES= devel
MAINTAINER= greg@unrelenting.technology
COMMENT= Git command for automating fixup/autosquash commits
LICENSE= BSD3CLAUSE
LICENSE_FILE= ${WRKSRC}/LICENSE.md
LIB_DEPENDS= libgit2.so:devel/libgit2
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= tummychow
CARGO_CRATES= ansi_term-0.10.2 \
atty-0.2.12 \
autocfg-0.1.4 \
backtrace-0.3.32 \
backtrace-sys-0.1.30 \
bitflags-1.1.0 \
cc-1.0.37 \
cfg-if-0.1.9 \
chrono-0.4.7 \
clap-2.30.0 \
failure-0.1.5 \
failure_derive-0.1.5 \
fuchsia-cprng-0.1.1 \
git2-0.9.1 \
idna-0.1.5 \
isatty-0.1.9 \
kernel32-sys-0.2.2 \
lazy_static-1.3.0 \
libc-0.2.59 \
libgit2-sys-0.8.1 \
libz-sys-1.0.25 \
log-0.4.7 \
matches-0.1.8 \
memchr-2.0.2 \
num-integer-0.1.41 \
num-traits-0.2.8 \
percent-encoding-1.0.1 \
pkg-config-0.3.14 \
proc-macro2-0.4.30 \
quote-0.6.13 \
rand-0.4.6 \
rand_core-0.3.1 \
rand_core-0.4.0 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
remove_dir_all-0.5.2 \
rustc-demangle-0.1.15 \
slog-2.1.1 \
slog-async-2.2.0 \
slog-term-2.3.0 \
smallvec-0.6.10 \
strsim-0.7.0 \
syn-0.15.39 \
synstructure-0.10.2 \
take_mut-0.2.2 \
tempdir-0.3.7 \
term-0.4.6 \
textwrap-0.9.0 \
thread_local-0.3.6 \
time-0.1.42 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.8 \
unicode-width-0.1.5 \
unicode-xid-0.1.0 \
url-1.7.2 \
vcpkg-0.2.7 \
vec_map-0.8.1 \
winapi-0.2.8 \
winapi-0.3.7 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0
PLIST_FILES= bin/git-absorb
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/git-absorb
.include <bsd.port.mk>
Index: head/devel/git-delta/Makefile
===================================================================
--- head/devel/git-delta/Makefile (revision 552220)
+++ head/devel/git-delta/Makefile (revision 552221)
@@ -1,141 +1,141 @@
# $FreeBSD$
PORTNAME= delta
DISTVERSION= 0.0.16
-PORTREVISION= 8
+PORTREVISION= 9
CATEGORIES= devel
PKGNAMEPREFIX= git-
MAINTAINER= greg@unrelenting.technology
COMMENT= Syntax-highlighting pager for git/diff
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= dandavison
LIB_DEPENDS+= libonig.so:devel/oniguruma
CARGO_CRATES= adler32-1.0.3 \
aho-corasick-0.7.6 \
ansi_term-0.11.0 \
arrayref-0.3.5 \
arrayvec-0.5.1 \
atty-0.2.13 \
autocfg-0.1.5 \
backtrace-0.3.40 \
backtrace-sys-0.1.32 \
base64-0.10.1 \
bincode-1.1.4 \
bitflags-1.1.0 \
blake2b_simd-0.5.9 \
box_drawing-0.1.2 \
byteorder-1.3.2 \
cc-1.0.38 \
cfg-if-0.1.9 \
clap-2.33.0 \
clicolors-control-1.0.0 \
cloudabi-0.0.3 \
console-0.7.7 \
constant_time_eq-0.1.4 \
crc32fast-1.2.0 \
crossbeam-utils-0.6.6 \
dirs-2.0.2 \
dirs-sys-0.3.4 \
either-1.5.2 \
encode_unicode-0.3.5 \
error-chain-0.12.1 \
failure-0.1.6 \
failure_derive-0.1.6 \
flate2-1.0.9 \
fnv-1.0.6 \
fuchsia-cprng-0.1.1 \
heck-0.3.1 \
humantime-1.2.0 \
itertools-0.8.0 \
itoa-0.4.4 \
lazy_static-1.4.0 \
lazycell-1.2.1 \
libc-0.2.60 \
line-wrap-0.1.1 \
linked-hash-map-0.5.2 \
lock_api-0.3.1 \
memchr-2.2.1 \
miniz-sys-0.1.12 \
miniz_oxide-0.2.2 \
miniz_oxide_c_api-0.2.2 \
onig-4.3.2 \
onig_sys-69.1.0 \
parking_lot-0.9.0 \
parking_lot_core-0.6.2 \
pkg-config-0.3.15 \
plist-0.4.2 \
proc-macro2-0.4.30 \
proc-macro2-1.0.6 \
quick-error-1.2.2 \
quote-0.6.13 \
quote-1.0.2 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_os-0.1.3 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
redox_users-0.3.1 \
regex-1.2.1 \
regex-syntax-0.6.11 \
rust-argon2-0.5.1 \
rustc-demangle-0.1.16 \
rustc_version-0.2.3 \
ryu-1.0.0 \
safemem-0.3.1 \
same-file-1.0.5 \
scopeguard-1.0.0 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.98 \
serde_derive-1.0.98 \
serde_json-1.0.40 \
shell-words-0.1.0 \
smallvec-0.6.10 \
strsim-0.8.0 \
structopt-0.2.18 \
structopt-derive-0.2.18 \
syn-0.15.43 \
syn-1.0.11 \
synstructure-0.12.3 \
syntect-3.2.0 \
termios-0.3.1 \
textwrap-0.11.0 \
thread_local-0.3.6 \
unicode-segmentation-1.3.0 \
unicode-width-0.1.5 \
unicode-xid-0.1.0 \
unicode-xid-0.2.0 \
vec_map-0.8.1 \
version_check-0.1.5 \
walkdir-2.2.9 \
winapi-0.3.7 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.2 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
xml-rs-0.8.0 \
yaml-rust-0.4.3
CONFLICTS_INSTALL= devel/delta
PLIST_FILES= bin/delta
OPTIONS_DEFINE= BASH
OPTIONS_DEFAULT= BASH
BASH_PLIST_FILES= etc/bash_completion.d/_delta.bash
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/delta
do-install-BASH-on:
${MKDIR} ${STAGEDIR}${LOCALBASE}/etc/bash_completion.d
${INSTALL_DATA} ${WRKSRC}/completion/bash/completion.sh \
${STAGEDIR}${LOCALBASE}/etc/bash_completion.d/_delta.bash
.include <bsd.port.mk>
Index: head/devel/gitui/Makefile
===================================================================
--- head/devel/gitui/Makefile (revision 552220)
+++ head/devel/gitui/Makefile (revision 552221)
@@ -1,171 +1,172 @@
# $FreeBSD$
PORTNAME= gitui
DISTVERSIONPREFIX= v
DISTVERSION= 0.10.1
+PORTREVISION= 1
CATEGORIES= devel
MAINTAINER= yuri@FreeBSD.org
COMMENT= Terminal UI for git
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE.md
LIB_DEPENDS= libgit2.so:devel/libgit2
USES= cargo pkgconfig localbase:ldflags python:build xorg
USE_GITHUB= yes
GH_ACCOUNT= extrawurst
USE_XORG= xcb
CARGO_CRATES= addr2line-0.13.0 \
adler-0.2.2 \
ahash-0.3.8 \
anyhow-1.0.32 \
arc-swap-0.4.7 \
arrayref-0.3.6 \
arrayvec-0.4.12 \
arrayvec-0.5.1 \
autocfg-1.0.0 \
backtrace-0.3.50 \
base64-0.11.0 \
base64-0.12.3 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
block-0.1.6 \
bytemuck-1.2.0 \
bytesize-1.0.1 \
cassowary-0.3.0 \
cc-1.0.58 \
cfg-if-0.1.10 \
chrono-0.4.15 \
clap-2.33.3 \
clipboard-0.5.0 \
clipboard-win-2.2.0 \
cloudabi-0.0.3 \
cloudabi-0.1.0 \
const-random-0.1.8 \
const-random-macro-0.1.8 \
constant_time_eq-0.1.5 \
cpp_demangle-0.3.0 \
crossbeam-channel-0.4.3 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-utils-0.7.2 \
crossterm-0.17.7 \
crossterm_winapi-0.6.1 \
debugid-0.7.2 \
dirs-3.0.1 \
dirs-sys-0.3.5 \
either-1.5.3 \
failure-0.1.8 \
failure_derive-0.1.8 \
getrandom-0.1.14 \
gimli-0.22.0 \
git2-0.13.10 \
glob-0.3.0 \
hermit-abi-0.1.15 \
idna-0.2.0 \
indexmap-1.4.0 \
inferno-0.10.0 \
instant-0.1.6 \
itertools-0.9.0 \
itoa-0.4.6 \
jobserver-0.1.21 \
lazy_static-1.4.0 \
libc-0.2.72 \
libgit2-sys-0.12.12+1.0.1 \
libz-sys-1.1.0 \
lock_api-0.3.4 \
lock_api-0.4.1 \
log-0.4.11 \
malloc_buf-0.0.6 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memmap-0.7.0 \
memoffset-0.5.5 \
miniz_oxide-0.4.0 \
mio-0.7.0 \
miow-0.3.5 \
msvc-demangler-0.8.0 \
nix-0.17.0 \
nodrop-0.1.14 \
ntapi-0.3.4 \
num-format-0.4.0 \
num-integer-0.1.43 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
objc-0.2.7 \
objc-foundation-0.1.1 \
objc_id-0.1.1 \
object-0.20.0 \
parking_lot-0.10.2 \
parking_lot-0.11.0 \
parking_lot_core-0.7.2 \
parking_lot_core-0.8.0 \
percent-encoding-2.1.0 \
pkg-config-0.3.17 \
pprof-0.3.18 \
ppv-lite86-0.2.8 \
proc-macro-hack-0.5.16 \
proc-macro2-1.0.18 \
quick-xml-0.18.1 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rayon-core-1.8.0 \
redox_syscall-0.1.56 \
redox_users-0.3.4 \
remove_dir_all-0.5.3 \
rgb-0.8.20 \
ron-0.6.1 \
rust-argon2-0.7.0 \
rustc-demangle-0.1.16 \
scopeguard-1.1.0 \
serde-1.0.115 \
serde_derive-1.0.115 \
signal-hook-0.1.16 \
signal-hook-registry-1.2.0 \
simplelog-0.8.0 \
smallvec-1.4.1 \
socket2-0.3.12 \
stable_deref_trait-1.1.1 \
str_stack-0.1.0 \
symbolic-common-7.4.0 \
symbolic-demangle-7.4.0 \
syn-1.0.33 \
synstructure-0.12.4 \
tempfile-3.1.0 \
textwrap-0.11.0 \
textwrap-0.12.1 \
thiserror-1.0.20 \
thiserror-impl-1.0.20 \
time-0.1.43 \
tinyvec-0.3.3 \
tui-0.9.5 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
url-2.1.1 \
uuid-0.8.1 \
vcpkg-0.2.10 \
void-1.0.2 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.3.9 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
x11-clipboard-0.3.3 \
xcb-0.8.2
PLIST_FILES= bin/${PORTNAME}
BINARY_ALIAS= python3=${PYTHON_CMD} # xcb requires python3
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
.include <bsd.port.mk>
Index: head/devel/interactive_rebase_tool/Makefile
===================================================================
--- head/devel/interactive_rebase_tool/Makefile (revision 552220)
+++ head/devel/interactive_rebase_tool/Makefile (revision 552221)
@@ -1,69 +1,69 @@
# $FreeBSD$
PORTNAME= interactive_rebase_tool
DISTVERSION= 1.2.1
-PORTREVISION= 10
+PORTREVISION= 11
CATEGORIES= devel
MAINTAINER= petteri.valkonen@iki.fi
COMMENT= Improved sequence editor for Git
LICENSE= GPLv3+
LIB_DEPENDS= libgit2.so:devel/libgit2
USES= cargo ncurses
USE_GITHUB= yes
GH_ACCOUNT= MitMaro
GH_PROJECT= git-interactive-rebase-tool
CARGO_CRATES= ansi_term-0.11.0 \
atty-0.2.11 \
bitflags-1.0.4 \
cc-1.0.29 \
cfg-if-0.1.6 \
chrono-0.4.6 \
clap-2.32.0 \
git2-0.7.5 \
idna-0.1.5 \
libc-0.2.49 \
libgit2-sys-0.7.11 \
libz-sys-1.0.25 \
log-0.4.6 \
matches-0.1.8 \
ncurses-5.98.0 \
num-integer-0.1.39 \
num-traits-0.2.6 \
pancurses-0.16.1 \
pdcurses-sys-0.7.0 \
percent-encoding-1.0.1 \
pkg-config-0.3.14 \
redox_syscall-0.1.51 \
redox_termios-0.1.1 \
smallvec-0.6.8 \
strsim-0.7.0 \
termion-1.5.1 \
textwrap-0.10.0 \
time-0.1.42 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.8 \
unicode-segmentation-1.2.1 \
unicode-width-0.1.5 \
unreachable-1.0.0 \
url-1.7.2 \
vcpkg-0.2.6 \
vec_map-0.8.1 \
void-1.0.2 \
winapi-0.3.6 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.5.1
PLIST_FILES= bin/interactive-rebase-tool \
man/man1/interactive-rebase-tool.1.gz
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/interactive-rebase-tool
${INSTALL_MAN} ${WRKSRC}/src/interactive-rebase-tool.1 ${STAGEDIR}${MAN1PREFIX}/man/man1
.include <bsd.port.mk>
Index: head/devel/pijul/Makefile
===================================================================
--- head/devel/pijul/Makefile (revision 552220)
+++ head/devel/pijul/Makefile (revision 552221)
@@ -1,300 +1,300 @@
# Created by: Carlo Strub
# $FreeBSD$
PORTNAME= pijul
PORTVERSION= 0.12.0
-PORTREVISION= 19
+PORTREVISION= 20
CATEGORIES= devel
MASTER_SITES= https://pijul.org/releases/
DISTFILES= ${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= cs@FreeBSD.org
COMMENT= Distributed version control system
LICENSE= GPLv2
BUILD_DEPENDS= llvm${LLVM_DEFAULT}>0:devel/llvm${LLVM_DEFAULT}
LIB_DEPENDS= libgmp.so:math/gmp \
libnettle.so:security/nettle \
libsodium.so:security/libsodium
USES= cargo ssl
CARGO_CRATES= MacTypes-sys-2.1.0 \
adler32-1.0.3 \
advapi32-sys-0.2.0 \
aho-corasick-0.6.10 \
ansi_term-0.11.0 \
argon2rs-0.2.5 \
arrayvec-0.4.10 \
ascii-canvas-1.0.0 \
atty-0.2.11 \
autocfg-0.1.2 \
backtrace-0.3.14 \
backtrace-sys-0.1.28 \
base64-0.10.1 \
base64-0.9.3 \
bincode-1.1.2 \
bindgen-0.43.2 \
bit-set-0.5.1 \
bit-vec-0.4.4 \
bit-vec-0.5.1 \
bitflags-1.0.4 \
blake2-rfc-0.2.18 \
block-buffer-0.7.0 \
block-padding-0.1.3 \
bs58-0.2.2 \
buffered-reader-0.3.0 \
build_const-0.2.1 \
byte-tools-0.3.1 \
byteorder-1.3.1 \
bytes-0.4.12 \
bzip2-0.3.3 \
bzip2-sys-0.1.7 \
cc-1.0.31 \
cexpr-0.3.5 \
cfg-if-0.1.7 \
chrono-0.4.6 \
clang-sys-0.26.4 \
clap-2.32.0 \
cloudabi-0.0.3 \
constant_time_eq-0.1.3 \
core-foundation-0.5.1 \
core-foundation-sys-0.5.1 \
crc-1.8.1 \
crc32fast-1.2.0 \
crossbeam-channel-0.3.8 \
crossbeam-deque-0.7.1 \
crossbeam-epoch-0.7.1 \
crossbeam-queue-0.1.2 \
crossbeam-utils-0.6.5 \
cryptovec-0.4.6 \
diff-0.1.11 \
diffs-0.3.0 \
digest-0.8.0 \
dirs-1.0.5 \
docopt-1.0.2 \
dtoa-0.4.3 \
either-1.5.1 \
ena-0.11.0 \
encoding_rs-0.8.17 \
env_logger-0.6.1 \
failure-0.1.5 \
failure_derive-0.1.5 \
fake-simd-0.1.2 \
filetime-0.2.4 \
fixedbitset-0.1.9 \
flate2-1.0.7 \
fnv-1.0.6 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fs2-0.4.3 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.1.25 \
futures-cpupool-0.1.8 \
generic-array-0.12.0 \
getch-0.2.1 \
glob-0.2.11 \
globset-0.4.2 \
h2-0.1.17 \
hex-0.3.2 \
http-0.1.16 \
httparse-1.3.3 \
humantime-1.2.0 \
hyper-0.12.25 \
hyper-tls-0.3.2 \
idna-0.1.5 \
ignore-0.4.6 \
indexmap-1.0.2 \
iovec-0.1.2 \
itertools-0.8.0 \
itoa-0.4.3 \
kernel32-sys-0.2.2 \
lalrpop-0.16.3 \
lalrpop-util-0.16.3 \
lazy_static-1.3.0 \
lazycell-1.2.1 \
libc-0.2.50 \
libloading-0.5.0 \
libpijul-0.12.0 \
line-0.1.15 \
lock_api-0.1.5 \
log-0.4.6 \
mach_o_sys-0.1.1 \
matches-0.1.8 \
memchr-2.2.0 \
memmap-0.7.0 \
memoffset-0.2.1 \
memsec-0.5.4 \
mime-0.3.13 \
mime_guess-2.0.0-alpha.6 \
miniz-sys-0.1.11 \
miniz_oxide-0.2.1 \
miniz_oxide_c_api-0.2.1 \
mio-0.6.16 \
mio-uds-0.6.7 \
miow-0.2.1 \
native-tls-0.2.2 \
net2-0.2.33 \
nettle-4.0.0 \
nettle-sys-1.0.1 \
new_debug_unreachable-1.0.3 \
nodrop-0.1.13 \
nom-4.2.3 \
num-0.1.42 \
num-bigint-0.1.44 \
num-complex-0.1.43 \
num-integer-0.1.39 \
num-iter-0.1.37 \
num-rational-0.1.42 \
num-traits-0.2.6 \
num_cpus-1.10.0 \
opaque-debug-0.2.2 \
openssl-0.10.20 \
openssl-probe-0.1.2 \
openssl-sys-0.9.43 \
ordermap-0.3.5 \
owning_ref-0.4.0 \
parking_lot-0.7.1 \
parking_lot_core-0.4.0 \
pathdiff-0.1.0 \
peeking_take_while-0.1.2 \
percent-encoding-1.0.1 \
petgraph-0.4.13 \
phf-0.7.24 \
phf_codegen-0.7.24 \
phf_generator-0.7.24 \
phf_shared-0.7.24 \
pkg-config-0.3.14 \
precomputed-hash-0.1.1 \
proc-macro2-0.3.5 \
proc-macro2-0.4.27 \
progrs-0.1.1 \
quick-error-1.2.2 \
quickcheck-0.8.2 \
quote-0.5.2 \
quote-0.6.11 \
rand-0.4.6 \
rand-0.5.6 \
rand-0.6.5 \
rand_chacha-0.1.1 \
rand_core-0.3.1 \
rand_core-0.4.0 \
rand_hc-0.1.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.3 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rdrand-0.4.0 \
redox_syscall-0.1.51 \
redox_termios-0.1.1 \
redox_users-0.3.0 \
regex-1.1.2 \
regex-syntax-0.6.5 \
remove_dir_all-0.5.1 \
reqwest-0.9.12 \
rpassword-2.1.0 \
rustc-demangle-0.1.13 \
rustc-serialize-0.3.24 \
rustc_version-0.2.3 \
ryu-0.2.7 \
safemem-0.3.0 \
same-file-1.0.4 \
sanakirja-0.10.2 \
schannel-0.1.15 \
scoped_threadpool-0.1.9 \
scopeguard-0.3.3 \
security-framework-0.2.2 \
security-framework-sys-0.2.3 \
semver-0.9.0 \
semver-parser-0.7.0 \
sequoia-openpgp-0.4.1 \
serde-1.0.89 \
serde_derive-1.0.89 \
serde_json-1.0.39 \
serde_urlencoded-0.5.4 \
sha2-0.8.0 \
shell-escape-0.1.4 \
siphasher-0.2.3 \
slab-0.4.2 \
smallvec-0.6.9 \
stable_deref_trait-1.1.1 \
string-0.1.3 \
string_cache-0.7.3 \
string_cache_codegen-0.4.2 \
string_cache_shared-0.3.0 \
strsim-0.7.0 \
syn-0.15.29 \
synstructure-0.10.1 \
tar-0.4.22 \
tempdir-0.3.7 \
tempfile-3.0.7 \
term-0.4.6 \
term-0.5.1 \
termcolor-1.0.4 \
termion-1.5.1 \
termios-0.2.2 \
textwrap-0.10.0 \
thread_local-0.3.6 \
thrussh-0.20.7 \
thrussh-config-0.1.2 \
thrussh-keys-0.11.9 \
thrussh-libsodium-0.1.3 \
time-0.1.42 \
tokio-0.1.18 \
tokio-codec-0.1.1 \
tokio-current-thread-0.1.6 \
tokio-executor-0.1.7 \
tokio-fs-0.1.6 \
tokio-io-0.1.12 \
tokio-reactor-0.1.9 \
tokio-sync-0.1.4 \
tokio-tcp-0.1.3 \
tokio-threadpool-0.1.13 \
tokio-timer-0.2.10 \
tokio-trace-core-0.1.0 \
tokio-udp-0.1.3 \
tokio-uds-0.2.5 \
toml-0.4.10 \
try-lock-0.2.2 \
typenum-1.10.0 \
ucd-util-0.1.3 \
unicase-1.4.2 \
unicase-2.3.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.8 \
unicode-width-0.1.5 \
unicode-xid-0.1.0 \
url-1.7.2 \
username-0.2.0 \
utf8-ranges-1.0.2 \
utf8parse-0.1.1 \
uuid-0.7.3 \
vcpkg-0.2.6 \
vec_map-0.8.1 \
version_check-0.1.5 \
walkdir-2.2.7 \
want-0.0.6 \
which-1.0.5 \
winapi-0.2.8 \
winapi-0.3.6 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.2 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
wincolor-1.0.1 \
ws2_32-sys-0.2.1 \
xattr-0.2.2 \
yasna-0.1.3
CARGO_INSTALL_PATH= pijul
PLIST_FILES= bin/pijul
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/pijul
.include <bsd.port.mk>
Index: head/devel/racer/Makefile
===================================================================
--- head/devel/racer/Makefile (revision 552220)
+++ head/devel/racer/Makefile (revision 552221)
@@ -1,159 +1,157 @@
# Created by: Timothy Beyer <beyert@cs.ucr.edu>
# $FreeBSD$
PORTNAME= racer
-DISTVERSION= 2.1.35
-PORTREVISION= 3
+DISTVERSION= 2.1.36
CATEGORIES= devel
MASTER_SITES= CRATESIO
DISTFILES= ${CARGO_DIST_SUBDIR}/${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= rust@FreeBSD.org
COMMENT= Rust code completion helper
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE-MIT
USES= cargo
CARGO_CRATES= aho-corasick-0.7.10 \
- annotate-snippets-0.6.1 \
annotate-snippets-0.8.0 \
ansi_term-0.11.0 \
atty-0.2.14 \
autocfg-1.0.0 \
bitflags-1.2.1 \
block-buffer-0.7.3 \
block-padding-0.1.5 \
byte-tools-0.3.1 \
byteorder-1.3.4 \
cc-1.0.53 \
cfg-if-0.1.10 \
clap-2.33.0 \
cloudabi-0.0.3 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-queue-0.1.2 \
crossbeam-utils-0.6.6 \
crossbeam-utils-0.7.2 \
derive_more-0.99.5 \
digest-0.8.1 \
either-1.5.3 \
ena-0.14.0 \
env_logger-0.7.1 \
fake-simd-0.1.2 \
generic-array-0.12.3 \
getopts-0.2.21 \
getrandom-0.1.14 \
hermit-abi-0.1.10 \
humantime-1.3.0 \
humantime-2.0.0 \
indexmap-1.3.2 \
itoa-0.4.5 \
jobserver-0.1.21 \
lazy_static-1.4.0 \
lazycell-1.2.1 \
libc-0.2.68 \
lock_api-0.3.4 \
log-0.4.8 \
maybe-uninit-2.0.0 \
md-5-0.8.0 \
measureme-0.7.1 \
memchr-2.3.3 \
memmap-0.7.0 \
memoffset-0.5.4 \
num_cpus-1.12.0 \
once_cell-1.4.0 \
opaque-debug-0.2.3 \
parking_lot-0.9.0 \
parking_lot-0.10.2 \
parking_lot_core-0.6.2 \
parking_lot_core-0.7.2 \
ppv-lite86-0.2.6 \
proc-macro2-1.0.10 \
psm-0.1.8 \
quick-error-1.2.3 \
quote-1.0.3 \
- racer-2.1.34 \
+ racer-2.1.35 \
racer-cargo-metadata-0.1.1 \
racer-interner-0.1.0 \
racer-testutils-0.1.0 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
redox_syscall-0.1.56 \
regex-1.3.6 \
regex-syntax-0.6.17 \
remove_dir_all-0.5.2 \
rls-span-0.5.2 \
- rustc-ap-arena-659.0.0 \
- rustc-ap-graphviz-659.0.0 \
rustc-ap-rustc_arena-664.0.0 \
- rustc-ap-rustc_ast-659.0.0 \
+ rustc-ap-rustc_arena-669.0.0 \
rustc-ap-rustc_ast-664.0.0 \
- rustc-ap-rustc_ast_pretty-659.0.0 \
+ rustc-ap-rustc_ast-669.0.0 \
rustc-ap-rustc_ast_pretty-664.0.0 \
- rustc-ap-rustc_data_structures-659.0.0 \
+ rustc-ap-rustc_ast_pretty-669.0.0 \
rustc-ap-rustc_data_structures-664.0.0 \
- rustc-ap-rustc_errors-659.0.0 \
+ rustc-ap-rustc_data_structures-669.0.0 \
rustc-ap-rustc_errors-664.0.0 \
- rustc-ap-rustc_feature-659.0.0 \
+ rustc-ap-rustc_errors-669.0.0 \
rustc-ap-rustc_feature-664.0.0 \
- rustc-ap-rustc_fs_util-659.0.0 \
+ rustc-ap-rustc_feature-669.0.0 \
rustc-ap-rustc_fs_util-664.0.0 \
+ rustc-ap-rustc_fs_util-669.0.0 \
rustc-ap-rustc_graphviz-664.0.0 \
- rustc-ap-rustc_index-659.0.0 \
+ rustc-ap-rustc_graphviz-669.0.0 \
rustc-ap-rustc_index-664.0.0 \
- rustc-ap-rustc_lexer-659.0.0 \
+ rustc-ap-rustc_index-669.0.0 \
rustc-ap-rustc_lexer-664.0.0 \
- rustc-ap-rustc_macros-659.0.0 \
+ rustc-ap-rustc_lexer-669.0.0 \
rustc-ap-rustc_macros-664.0.0 \
- rustc-ap-rustc_parse-659.0.0 \
+ rustc-ap-rustc_macros-669.0.0 \
rustc-ap-rustc_parse-664.0.0 \
+ rustc-ap-rustc_parse-669.0.0 \
rustc-ap-rustc_serialize-664.0.0 \
- rustc-ap-rustc_session-659.0.0 \
+ rustc-ap-rustc_serialize-669.0.0 \
rustc-ap-rustc_session-664.0.0 \
- rustc-ap-rustc_span-659.0.0 \
+ rustc-ap-rustc_session-669.0.0 \
rustc-ap-rustc_span-664.0.0 \
- rustc-ap-rustc_target-659.0.0 \
+ rustc-ap-rustc_span-669.0.0 \
rustc-ap-rustc_target-664.0.0 \
- rustc-ap-serialize-659.0.0 \
+ rustc-ap-rustc_target-669.0.0 \
rustc-hash-1.1.0 \
rustc-rayon-0.3.0 \
rustc-rayon-core-0.3.0 \
rustc_version-0.2.3 \
ryu-1.0.3 \
scoped-tls-1.0.0 \
scopeguard-1.1.0 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.105 \
serde_derive-1.0.105 \
serde_json-1.0.50 \
sha-1-0.8.2 \
smallvec-0.6.13 \
smallvec-1.2.0 \
stable_deref_trait-1.1.1 \
stacker-0.1.9 \
strsim-0.8.0 \
syn-1.0.17 \
synstructure-0.12.3 \
tempfile-3.1.0 \
termcolor-1.1.0 \
termize-0.1.1 \
textwrap-0.11.0 \
thread_local-1.0.1 \
typenum-1.12.0 \
unicode-normalization-0.1.12 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
vec_map-0.8.1 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.4 \
winapi-x86_64-pc-windows-gnu-0.4.0
MAKE_ENV= RUSTC_BOOTSTRAP=1
PLIST_FILES= bin/racer
.include <bsd.port.mk>
Index: head/devel/racer/distinfo
===================================================================
--- head/devel/racer/distinfo (revision 552220)
+++ head/devel/racer/distinfo (revision 552221)
@@ -1,279 +1,277 @@
-TIMESTAMP = 1594701867
-SHA256 (rust/crates/racer-2.1.35.tar.gz) = 421174f19211ba9e5fda34aa0cbc292188aae8e0cfbff4aebbae23f1a416bfb3
-SIZE (rust/crates/racer-2.1.35.tar.gz) = 186355
+TIMESTAMP = 1602245176
+SHA256 (rust/crates/racer-2.1.36.tar.gz) = 09ba6cca9fcd8ae086b842b1bd9e3f19f104a4c30e0e8927b2befc06d375e7e0
+SIZE (rust/crates/racer-2.1.36.tar.gz) = 186258
SHA256 (rust/crates/aho-corasick-0.7.10.tar.gz) = 8716408b8bc624ed7f65d223ddb9ac2d044c0547b6fa4b0d554f3a9540496ada
SIZE (rust/crates/aho-corasick-0.7.10.tar.gz) = 111039
-SHA256 (rust/crates/annotate-snippets-0.6.1.tar.gz) = c7021ce4924a3f25f802b2cccd1af585e39ea1a363a1aa2e72afe54b67a3a7a7
-SIZE (rust/crates/annotate-snippets-0.6.1.tar.gz) = 21894
SHA256 (rust/crates/annotate-snippets-0.8.0.tar.gz) = d78ea013094e5ea606b1c05fe35f1dd7ea1eb1ea259908d040b25bd5ec677ee5
SIZE (rust/crates/annotate-snippets-0.8.0.tar.gz) = 28829
SHA256 (rust/crates/ansi_term-0.11.0.tar.gz) = ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b
SIZE (rust/crates/ansi_term-0.11.0.tar.gz) = 17087
SHA256 (rust/crates/atty-0.2.14.tar.gz) = d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8
SIZE (rust/crates/atty-0.2.14.tar.gz) = 5470
SHA256 (rust/crates/autocfg-1.0.0.tar.gz) = f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d
SIZE (rust/crates/autocfg-1.0.0.tar.gz) = 12870
SHA256 (rust/crates/bitflags-1.2.1.tar.gz) = cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693
SIZE (rust/crates/bitflags-1.2.1.tar.gz) = 16745
SHA256 (rust/crates/block-buffer-0.7.3.tar.gz) = c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b
SIZE (rust/crates/block-buffer-0.7.3.tar.gz) = 7179
SHA256 (rust/crates/block-padding-0.1.5.tar.gz) = fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5
SIZE (rust/crates/block-padding-0.1.5.tar.gz) = 7342
SHA256 (rust/crates/byte-tools-0.3.1.tar.gz) = e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7
SIZE (rust/crates/byte-tools-0.3.1.tar.gz) = 5526
SHA256 (rust/crates/byteorder-1.3.4.tar.gz) = 08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de
SIZE (rust/crates/byteorder-1.3.4.tar.gz) = 21943
SHA256 (rust/crates/cc-1.0.53.tar.gz) = 404b1fe4f65288577753b17e3b36a04596ee784493ec249bf81c7f2d2acd751c
SIZE (rust/crates/cc-1.0.53.tar.gz) = 51740
SHA256 (rust/crates/cfg-if-0.1.10.tar.gz) = 4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822
SIZE (rust/crates/cfg-if-0.1.10.tar.gz) = 7933
SHA256 (rust/crates/clap-2.33.0.tar.gz) = 5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9
SIZE (rust/crates/clap-2.33.0.tar.gz) = 196458
SHA256 (rust/crates/cloudabi-0.0.3.tar.gz) = ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f
SIZE (rust/crates/cloudabi-0.0.3.tar.gz) = 22156
SHA256 (rust/crates/crossbeam-deque-0.7.3.tar.gz) = 9f02af974daeee82218205558e51ec8768b48cf524bd01d550abe5573a608285
SIZE (rust/crates/crossbeam-deque-0.7.3.tar.gz) = 19888
SHA256 (rust/crates/crossbeam-epoch-0.8.2.tar.gz) = 058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace
SIZE (rust/crates/crossbeam-epoch-0.8.2.tar.gz) = 39283
SHA256 (rust/crates/crossbeam-queue-0.1.2.tar.gz) = 7c979cd6cfe72335896575c6b5688da489e420d36a27a0b9eb0c73db574b4a4b
SIZE (rust/crates/crossbeam-queue-0.1.2.tar.gz) = 14104
SHA256 (rust/crates/crossbeam-utils-0.6.6.tar.gz) = 04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6
SIZE (rust/crates/crossbeam-utils-0.6.6.tar.gz) = 32836
SHA256 (rust/crates/crossbeam-utils-0.7.2.tar.gz) = c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8
SIZE (rust/crates/crossbeam-utils-0.7.2.tar.gz) = 34338
SHA256 (rust/crates/derive_more-0.99.5.tar.gz) = e2323f3f47db9a0e77ce7a300605d8d2098597fc451ed1a97bb1f6411bb550a7
SIZE (rust/crates/derive_more-0.99.5.tar.gz) = 51045
SHA256 (rust/crates/digest-0.8.1.tar.gz) = f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5
SIZE (rust/crates/digest-0.8.1.tar.gz) = 9449
SHA256 (rust/crates/either-1.5.3.tar.gz) = bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3
SIZE (rust/crates/either-1.5.3.tar.gz) = 12278
SHA256 (rust/crates/ena-0.14.0.tar.gz) = d7402b94a93c24e742487327a7cd839dc9d36fec9de9fb25b09f2dae459f36c3
SIZE (rust/crates/ena-0.14.0.tar.gz) = 23052
SHA256 (rust/crates/env_logger-0.7.1.tar.gz) = 44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36
SIZE (rust/crates/env_logger-0.7.1.tar.gz) = 32281
SHA256 (rust/crates/fake-simd-0.1.2.tar.gz) = e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed
SIZE (rust/crates/fake-simd-0.1.2.tar.gz) = 5398
SHA256 (rust/crates/generic-array-0.12.3.tar.gz) = c68f0274ae0e023facc3c97b2e00f076be70e254bc851d972503b328db79b2ec
SIZE (rust/crates/generic-array-0.12.3.tar.gz) = 18017
SHA256 (rust/crates/getopts-0.2.21.tar.gz) = 14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5
SIZE (rust/crates/getopts-0.2.21.tar.gz) = 18457
SHA256 (rust/crates/getrandom-0.1.14.tar.gz) = 7abc8dd8451921606d809ba32e95b6111925cd2906060d2dcc29c070220503eb
SIZE (rust/crates/getrandom-0.1.14.tar.gz) = 24698
SHA256 (rust/crates/hermit-abi-0.1.10.tar.gz) = 725cf19794cf90aa94e65050cb4191ff5d8fa87a498383774c47b332e3af952e
SIZE (rust/crates/hermit-abi-0.1.10.tar.gz) = 9127
SHA256 (rust/crates/humantime-1.3.0.tar.gz) = df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f
SIZE (rust/crates/humantime-1.3.0.tar.gz) = 17020
SHA256 (rust/crates/humantime-2.0.0.tar.gz) = b9b6c53306532d3c8e8087b44e6580e10db51a023cf9b433cea2ac38066b92da
SIZE (rust/crates/humantime-2.0.0.tar.gz) = 17374
SHA256 (rust/crates/indexmap-1.3.2.tar.gz) = 076f042c5b7b98f31d205f1249267e12a6518c1481e9dae9764af19b707d2292
SIZE (rust/crates/indexmap-1.3.2.tar.gz) = 47144
SHA256 (rust/crates/itoa-0.4.5.tar.gz) = b8b7a7c0c47db5545ed3fef7468ee7bb5b74691498139e4b3f6a20685dc6dd8e
SIZE (rust/crates/itoa-0.4.5.tar.gz) = 11194
SHA256 (rust/crates/jobserver-0.1.21.tar.gz) = 5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2
SIZE (rust/crates/jobserver-0.1.21.tar.gz) = 21228
SHA256 (rust/crates/lazy_static-1.4.0.tar.gz) = e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646
SIZE (rust/crates/lazy_static-1.4.0.tar.gz) = 10443
SHA256 (rust/crates/lazycell-1.2.1.tar.gz) = b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f
SIZE (rust/crates/lazycell-1.2.1.tar.gz) = 11691
SHA256 (rust/crates/libc-0.2.68.tar.gz) = dea0c0405123bba743ee3f91f49b1c7cfb684eef0da0a50110f758ccf24cdff0
SIZE (rust/crates/libc-0.2.68.tar.gz) = 468882
SHA256 (rust/crates/lock_api-0.3.4.tar.gz) = c4da24a77a3d8a6d4862d95f72e6fdb9c09a643ecdb402d754004a557f2bec75
SIZE (rust/crates/lock_api-0.3.4.tar.gz) = 18750
SHA256 (rust/crates/log-0.4.8.tar.gz) = 14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7
SIZE (rust/crates/log-0.4.8.tar.gz) = 31297
SHA256 (rust/crates/maybe-uninit-2.0.0.tar.gz) = 60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00
SIZE (rust/crates/maybe-uninit-2.0.0.tar.gz) = 11809
SHA256 (rust/crates/md-5-0.8.0.tar.gz) = a18af3dcaf2b0219366cdb4e2af65a6101457b415c3d1a5c71dd9c2b7c77b9c8
SIZE (rust/crates/md-5-0.8.0.tar.gz) = 13122
SHA256 (rust/crates/measureme-0.7.1.tar.gz) = fef709d3257013bba7cff14fc504e07e80631d3fe0f6d38ce63b8f6510ccb932
SIZE (rust/crates/measureme-0.7.1.tar.gz) = 12194
SHA256 (rust/crates/memchr-2.3.3.tar.gz) = 3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400
SIZE (rust/crates/memchr-2.3.3.tar.gz) = 22566
SHA256 (rust/crates/memmap-0.7.0.tar.gz) = 6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b
SIZE (rust/crates/memmap-0.7.0.tar.gz) = 15214
SHA256 (rust/crates/memoffset-0.5.4.tar.gz) = b4fc2c02a7e374099d4ee95a193111f72d2110197fe200272371758f6c3643d8
SIZE (rust/crates/memoffset-0.5.4.tar.gz) = 6927
SHA256 (rust/crates/num_cpus-1.12.0.tar.gz) = 46203554f085ff89c235cd12f7075f3233af9b11ed7c9e16dfe2560d03313ce6
SIZE (rust/crates/num_cpus-1.12.0.tar.gz) = 11867
SHA256 (rust/crates/once_cell-1.4.0.tar.gz) = 0b631f7e854af39a1739f401cf34a8a013dfe09eac4fa4dba91e9768bd28168d
SIZE (rust/crates/once_cell-1.4.0.tar.gz) = 24625
SHA256 (rust/crates/opaque-debug-0.2.3.tar.gz) = 2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c
SIZE (rust/crates/opaque-debug-0.2.3.tar.gz) = 5643
SHA256 (rust/crates/parking_lot-0.9.0.tar.gz) = f842b1982eb6c2fe34036a4fbfb06dd185a3f5c8edfaacdf7d1ea10b07de6252
SIZE (rust/crates/parking_lot-0.9.0.tar.gz) = 35170
SHA256 (rust/crates/parking_lot-0.10.2.tar.gz) = d3a704eb390aafdc107b0e392f56a82b668e3a71366993b5340f5833fd62505e
SIZE (rust/crates/parking_lot-0.10.2.tar.gz) = 39536
SHA256 (rust/crates/parking_lot_core-0.6.2.tar.gz) = b876b1b9e7ac6e1a74a6da34d25c42e17e8862aa409cbbbdcfc8d86c6f3bc62b
SIZE (rust/crates/parking_lot_core-0.6.2.tar.gz) = 30598
SHA256 (rust/crates/parking_lot_core-0.7.2.tar.gz) = d58c7c768d4ba344e3e8d72518ac13e259d7c7ade24167003b8488e10b6740a3
SIZE (rust/crates/parking_lot_core-0.7.2.tar.gz) = 33875
SHA256 (rust/crates/ppv-lite86-0.2.6.tar.gz) = 74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b
SIZE (rust/crates/ppv-lite86-0.2.6.tar.gz) = 20522
SHA256 (rust/crates/proc-macro2-1.0.10.tar.gz) = df246d292ff63439fea9bc8c0a270bed0e390d5ebd4db4ba15aba81111b5abe3
SIZE (rust/crates/proc-macro2-1.0.10.tar.gz) = 35622
SHA256 (rust/crates/psm-0.1.8.tar.gz) = 659ecfea2142a458893bb7673134bad50b752fea932349c213d6a23874ce3aa7
SIZE (rust/crates/psm-0.1.8.tar.gz) = 20826
SHA256 (rust/crates/quick-error-1.2.3.tar.gz) = a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0
SIZE (rust/crates/quick-error-1.2.3.tar.gz) = 15066
SHA256 (rust/crates/quote-1.0.3.tar.gz) = 2bdc6c187c65bca4260c9011c9e3132efe4909da44726bad24cf7572ae338d7f
SIZE (rust/crates/quote-1.0.3.tar.gz) = 22939
-SHA256 (rust/crates/racer-2.1.34.tar.gz) = cc9caecf1286a3ed28d3ae35207a178ba12e58de95540781e5c6cba05e0f0833
-SIZE (rust/crates/racer-2.1.34.tar.gz) = 186222
+SHA256 (rust/crates/racer-2.1.35.tar.gz) = 421174f19211ba9e5fda34aa0cbc292188aae8e0cfbff4aebbae23f1a416bfb3
+SIZE (rust/crates/racer-2.1.35.tar.gz) = 186355
SHA256 (rust/crates/racer-cargo-metadata-0.1.1.tar.gz) = 2b60cd72291a641dbaa649e9e328df552186dda1fea834c55cf28594a25b7c6f
SIZE (rust/crates/racer-cargo-metadata-0.1.1.tar.gz) = 10768
SHA256 (rust/crates/racer-interner-0.1.0.tar.gz) = 206a244afd319767bdf97cf4e94c0d5d3b1de9cb23fd25434e7992cca4d4fa4c
SIZE (rust/crates/racer-interner-0.1.0.tar.gz) = 1833
SHA256 (rust/crates/racer-testutils-0.1.0.tar.gz) = adb261af243928ffa20752c84d0f8b3debfd7a486e8953e2a0669b2ad48621e1
SIZE (rust/crates/racer-testutils-0.1.0.tar.gz) = 2790
SHA256 (rust/crates/rand-0.7.3.tar.gz) = 6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03
SIZE (rust/crates/rand-0.7.3.tar.gz) = 112246
SHA256 (rust/crates/rand_chacha-0.2.2.tar.gz) = f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402
SIZE (rust/crates/rand_chacha-0.2.2.tar.gz) = 13267
SHA256 (rust/crates/rand_core-0.5.1.tar.gz) = 90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19
SIZE (rust/crates/rand_core-0.5.1.tar.gz) = 21116
SHA256 (rust/crates/rand_hc-0.2.0.tar.gz) = ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c
SIZE (rust/crates/rand_hc-0.2.0.tar.gz) = 11670
SHA256 (rust/crates/redox_syscall-0.1.56.tar.gz) = 2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84
SIZE (rust/crates/redox_syscall-0.1.56.tar.gz) = 17117
SHA256 (rust/crates/regex-1.3.6.tar.gz) = 7f6946991529684867e47d86474e3a6d0c0ab9b82d5821e314b1ede31fa3a4b3
SIZE (rust/crates/regex-1.3.6.tar.gz) = 235465
SHA256 (rust/crates/regex-syntax-0.6.17.tar.gz) = 7fe5bd57d1d7414c6b5ed48563a2c855d995ff777729dcd91c369ec7fea395ae
SIZE (rust/crates/regex-syntax-0.6.17.tar.gz) = 294004
SHA256 (rust/crates/remove_dir_all-0.5.2.tar.gz) = 4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e
SIZE (rust/crates/remove_dir_all-0.5.2.tar.gz) = 8907
SHA256 (rust/crates/rls-span-0.5.2.tar.gz) = f2e9bed56f6272bd85d9d06d1aaeef80c5fddc78a82199eb36dceb5f94e7d934
SIZE (rust/crates/rls-span-0.5.2.tar.gz) = 9628
-SHA256 (rust/crates/rustc-ap-arena-659.0.0.tar.gz) = fdaf0295fc40b10ec1091aad1a1760b4bb3b4e7c4f77d543d1a2e9d50a01e6b1
-SIZE (rust/crates/rustc-ap-arena-659.0.0.tar.gz) = 7492
-SHA256 (rust/crates/rustc-ap-graphviz-659.0.0.tar.gz) = 8028e8cdb4eb71810d0c22a5a5e1e3106c81123be63ce7f044b6d4ac100d8941
-SIZE (rust/crates/rustc-ap-graphviz-659.0.0.tar.gz) = 9952
SHA256 (rust/crates/rustc-ap-rustc_arena-664.0.0.tar.gz) = 0c6683b49209f8b132bec33dc6b6c8f9958c8c94eb3586d4cb495e092b61c1da
SIZE (rust/crates/rustc-ap-rustc_arena-664.0.0.tar.gz) = 7465
-SHA256 (rust/crates/rustc-ap-rustc_ast-659.0.0.tar.gz) = 16e9e502bb3a5568433db1cf2fb1f1e1074934636069cf744ad7c77b58e1428e
-SIZE (rust/crates/rustc-ap-rustc_ast-659.0.0.tar.gz) = 68426
+SHA256 (rust/crates/rustc-ap-rustc_arena-669.0.0.tar.gz) = c9cdd301e9dcb15ead384fc07196c850fd22829fae81d296b2ed6b4b10bf3278
+SIZE (rust/crates/rustc-ap-rustc_arena-669.0.0.tar.gz) = 7827
SHA256 (rust/crates/rustc-ap-rustc_ast-664.0.0.tar.gz) = 5b21784d92fb2d584800f528866f00fe814f73abda794f406bfd1fbb2f1ca7f7
SIZE (rust/crates/rustc-ap-rustc_ast-664.0.0.tar.gz) = 70480
-SHA256 (rust/crates/rustc-ap-rustc_ast_pretty-659.0.0.tar.gz) = 3684ed43dc552f1e030e3f7a5a300a7a834bdda4e9e00ab80284be4220d8c603
-SIZE (rust/crates/rustc-ap-rustc_ast_pretty-659.0.0.tar.gz) = 26015
+SHA256 (rust/crates/rustc-ap-rustc_ast-669.0.0.tar.gz) = 3f7c0d0537ca69dfe4a49212035295dfb37a235b5df01aa877d50b247f4775b8
+SIZE (rust/crates/rustc-ap-rustc_ast-669.0.0.tar.gz) = 71015
SHA256 (rust/crates/rustc-ap-rustc_ast_pretty-664.0.0.tar.gz) = 013db7dd198fe95962d2cefa5bd0b350cf2028af77c169b17b4baa9c3bbf77d1
SIZE (rust/crates/rustc-ap-rustc_ast_pretty-664.0.0.tar.gz) = 26843
-SHA256 (rust/crates/rustc-ap-rustc_data_structures-659.0.0.tar.gz) = 4b1c6069e5c522657f1c6f5ab33074e097092f48e804cc896d337e319aacbd60
-SIZE (rust/crates/rustc-ap-rustc_data_structures-659.0.0.tar.gz) = 86345
+SHA256 (rust/crates/rustc-ap-rustc_ast_pretty-669.0.0.tar.gz) = 202bd2886d0cfa48baa3711042c14843f1b4852555b7ee7e5376bf66b276cb8d
+SIZE (rust/crates/rustc-ap-rustc_ast_pretty-669.0.0.tar.gz) = 26913
SHA256 (rust/crates/rustc-ap-rustc_data_structures-664.0.0.tar.gz) = b92e4c6cb6c43ee9031a71709dc12853b358253c2b41d12a26379994fab625e0
SIZE (rust/crates/rustc-ap-rustc_data_structures-664.0.0.tar.gz) = 85634
-SHA256 (rust/crates/rustc-ap-rustc_errors-659.0.0.tar.gz) = 0c374e89b3c9714869ef86076942155383804ba6778c26be2169d324563c31f9
-SIZE (rust/crates/rustc-ap-rustc_errors-659.0.0.tar.gz) = 44399
+SHA256 (rust/crates/rustc-ap-rustc_data_structures-669.0.0.tar.gz) = 7a45d43b974d4cb9e32e5a15119c5eb7672c306ef09b064f2125b6a0399f6656
+SIZE (rust/crates/rustc-ap-rustc_data_structures-669.0.0.tar.gz) = 85649
SHA256 (rust/crates/rustc-ap-rustc_errors-664.0.0.tar.gz) = 6b0aa79423260c1b9e2f856e144e040f606b0f5d43644408375becf9d7bcdf86
SIZE (rust/crates/rustc-ap-rustc_errors-664.0.0.tar.gz) = 44296
-SHA256 (rust/crates/rustc-ap-rustc_feature-659.0.0.tar.gz) = c0296fbc29b629d5ae2ebee1bbf0407bb22de04d26d87216c20899b79579ccb3
-SIZE (rust/crates/rustc-ap-rustc_feature-659.0.0.tar.gz) = 18880
+SHA256 (rust/crates/rustc-ap-rustc_errors-669.0.0.tar.gz) = 8cd895d440820aaa04e6dc5486105494920a1e9779b9b051e8dba4ca5c182f94
+SIZE (rust/crates/rustc-ap-rustc_errors-669.0.0.tar.gz) = 44574
SHA256 (rust/crates/rustc-ap-rustc_feature-664.0.0.tar.gz) = 1bbd625705c1db42a0c7503736292813d7b76ada5da20578fb55c63228c80ab5
SIZE (rust/crates/rustc-ap-rustc_feature-664.0.0.tar.gz) = 18982
-SHA256 (rust/crates/rustc-ap-rustc_fs_util-659.0.0.tar.gz) = 34734f6cc681399630acd836a14207c6b5b9671a290cc7cad0354b0a4d71b3c9
-SIZE (rust/crates/rustc-ap-rustc_fs_util-659.0.0.tar.gz) = 2167
+SHA256 (rust/crates/rustc-ap-rustc_feature-669.0.0.tar.gz) = 5473d5106401aa46f881eb91772f0a41fd5f28ae6134cf4b450eb1370ea6af22
+SIZE (rust/crates/rustc-ap-rustc_feature-669.0.0.tar.gz) = 19254
SHA256 (rust/crates/rustc-ap-rustc_fs_util-664.0.0.tar.gz) = 34cca6e2942fa0b059c582437ead666d5bcf20fa7c242599e2bbea9b609f29ae
SIZE (rust/crates/rustc-ap-rustc_fs_util-664.0.0.tar.gz) = 2167
+SHA256 (rust/crates/rustc-ap-rustc_fs_util-669.0.0.tar.gz) = 8da1d57ee7a7ef55f31a97d99c7f919f02fc9a60ab96faa8cf45a7ae3ab1ccbf
+SIZE (rust/crates/rustc-ap-rustc_fs_util-669.0.0.tar.gz) = 2167
SHA256 (rust/crates/rustc-ap-rustc_graphviz-664.0.0.tar.gz) = 13d6a029b81f5e02da85763f82c135507f278a4a0c776432c728520563059529
SIZE (rust/crates/rustc-ap-rustc_graphviz-664.0.0.tar.gz) = 9957
-SHA256 (rust/crates/rustc-ap-rustc_index-659.0.0.tar.gz) = d1e4508753d71d3523209c2ca5086db15a1413e71ebf17ad5412bb7ced5e44c2
-SIZE (rust/crates/rustc-ap-rustc_index-659.0.0.tar.gz) = 15073
+SHA256 (rust/crates/rustc-ap-rustc_graphviz-669.0.0.tar.gz) = e3af62b20460908378cd1d354917acd9553376c5363bbb4e465f949bd82bdef9
+SIZE (rust/crates/rustc-ap-rustc_graphviz-669.0.0.tar.gz) = 9962
SHA256 (rust/crates/rustc-ap-rustc_index-664.0.0.tar.gz) = bae50852d303e230b2781c994513788136dc6c2fe4ebe032959f0b990a425767
SIZE (rust/crates/rustc-ap-rustc_index-664.0.0.tar.gz) = 15120
-SHA256 (rust/crates/rustc-ap-rustc_lexer-659.0.0.tar.gz) = 42b9fcd8407e322908a721262fbc0b35b5f3c35bb173a26dd1e0070bde336e33
-SIZE (rust/crates/rustc-ap-rustc_lexer-659.0.0.tar.gz) = 13069
+SHA256 (rust/crates/rustc-ap-rustc_index-669.0.0.tar.gz) = 3af7d4c456fe7647453d3fcd58335c9d512d1ff9a239a370b7ebdd353d69f66f
+SIZE (rust/crates/rustc-ap-rustc_index-669.0.0.tar.gz) = 15351
SHA256 (rust/crates/rustc-ap-rustc_lexer-664.0.0.tar.gz) = b7186e74aa2d31bf0e2454325fefcdf0a3da77d9344134592144b9e40d45b15d
SIZE (rust/crates/rustc-ap-rustc_lexer-664.0.0.tar.gz) = 13083
-SHA256 (rust/crates/rustc-ap-rustc_macros-659.0.0.tar.gz) = 3d104115a689367d2e0bcd99f37e0ebd6b9c8c78bab0d9cbea5bae86323601b5
-SIZE (rust/crates/rustc-ap-rustc_macros-659.0.0.tar.gz) = 7562
+SHA256 (rust/crates/rustc-ap-rustc_lexer-669.0.0.tar.gz) = 456af5f09c006cf6c22c1a433ee0232c4bb74bdc6c647a010166a47c94ed2a63
+SIZE (rust/crates/rustc-ap-rustc_lexer-669.0.0.tar.gz) = 13122
SHA256 (rust/crates/rustc-ap-rustc_macros-664.0.0.tar.gz) = 4fc1add04e9d2301164118660ee0bc3266e9a7b1973fc2303fdbe002a12e5401
SIZE (rust/crates/rustc-ap-rustc_macros-664.0.0.tar.gz) = 7556
-SHA256 (rust/crates/rustc-ap-rustc_parse-659.0.0.tar.gz) = afaaab91853fc5a3916785ccae727a4433359d9787c260d42b96a2265fe5b287
-SIZE (rust/crates/rustc-ap-rustc_parse-659.0.0.tar.gz) = 113515
+SHA256 (rust/crates/rustc-ap-rustc_macros-669.0.0.tar.gz) = 64f6acd192f313047759a346b892998b626466b93fe04f415da5f38906bb3b4c
+SIZE (rust/crates/rustc-ap-rustc_macros-669.0.0.tar.gz) = 7542
SHA256 (rust/crates/rustc-ap-rustc_parse-664.0.0.tar.gz) = 9cd7fc4968bd60084f2fa4f280fa450b0cf98660a7983d6b93a7ae41b6d1d322
SIZE (rust/crates/rustc-ap-rustc_parse-664.0.0.tar.gz) = 114922
+SHA256 (rust/crates/rustc-ap-rustc_parse-669.0.0.tar.gz) = c006e8117c1c55e42bb56386c86ce6f7e4b47349e0bec7888c1d24784272e61b
+SIZE (rust/crates/rustc-ap-rustc_parse-669.0.0.tar.gz) = 116240
SHA256 (rust/crates/rustc-ap-rustc_serialize-664.0.0.tar.gz) = 00bf4c110271d9a2b7dfd2c6eb82e56fd80606a8bad6c102e158c54e44044046
SIZE (rust/crates/rustc-ap-rustc_serialize-664.0.0.tar.gz) = 32824
-SHA256 (rust/crates/rustc-ap-rustc_session-659.0.0.tar.gz) = 86e756a57ce6ce1b868e35e64a7e10ab28d49ece80d7c661b07aff5afc6e5d2d
-SIZE (rust/crates/rustc-ap-rustc_session-659.0.0.tar.gz) = 62055
+SHA256 (rust/crates/rustc-ap-rustc_serialize-669.0.0.tar.gz) = 306ced69beaeebe4de9552ee751eb54ea25b5f34a73fe80f5f9cbbe15ccebc48
+SIZE (rust/crates/rustc-ap-rustc_serialize-669.0.0.tar.gz) = 32806
SHA256 (rust/crates/rustc-ap-rustc_session-664.0.0.tar.gz) = 431cf962de71d4c03fb877d54f331ec36eca77350b0539017abc40a4410d6501
SIZE (rust/crates/rustc-ap-rustc_session-664.0.0.tar.gz) = 63031
-SHA256 (rust/crates/rustc-ap-rustc_span-659.0.0.tar.gz) = 21031c3396ee452f4c6e994b67513a633055c57c86d00336afd9d63149518f34
-SIZE (rust/crates/rustc-ap-rustc_span-659.0.0.tar.gz) = 53894
+SHA256 (rust/crates/rustc-ap-rustc_session-669.0.0.tar.gz) = dbff48435f5a476365e3ab5f49e07f98715cecb2d8c5bbcafeaf3aec638407be
+SIZE (rust/crates/rustc-ap-rustc_session-669.0.0.tar.gz) = 63778
SHA256 (rust/crates/rustc-ap-rustc_span-664.0.0.tar.gz) = b912039640597624f4bcb75f1e1fcfa5710267d715a7f73a6336baef341b23d1
SIZE (rust/crates/rustc-ap-rustc_span-664.0.0.tar.gz) = 55566
-SHA256 (rust/crates/rustc-ap-rustc_target-659.0.0.tar.gz) = ff21badfbead5b0050391eaad8840f2e4fcb03b6b0fc6006f447443529e9ae6e
-SIZE (rust/crates/rustc-ap-rustc_target-659.0.0.tar.gz) = 81197
+SHA256 (rust/crates/rustc-ap-rustc_span-669.0.0.tar.gz) = ec4273af0abbe78fc4585316ab193445c848c555e9203ddc28af02330918bf30
+SIZE (rust/crates/rustc-ap-rustc_span-669.0.0.tar.gz) = 56951
SHA256 (rust/crates/rustc-ap-rustc_target-664.0.0.tar.gz) = 51347a9dadc5ad0b5916cc12d42624b31955285ad13745dbe72f0140038b84e9
SIZE (rust/crates/rustc-ap-rustc_target-664.0.0.tar.gz) = 93766
-SHA256 (rust/crates/rustc-ap-serialize-659.0.0.tar.gz) = 768b5a305669d934522712bc13502962edfde5128ea63b9e7db4000410be1dc6
-SIZE (rust/crates/rustc-ap-serialize-659.0.0.tar.gz) = 32880
+SHA256 (rust/crates/rustc-ap-rustc_target-669.0.0.tar.gz) = 6f9a2d6004ce6ad492a8eeacc2569b1c008169434b8828996d8dade4e5c6b6ee
+SIZE (rust/crates/rustc-ap-rustc_target-669.0.0.tar.gz) = 95791
SHA256 (rust/crates/rustc-hash-1.1.0.tar.gz) = 08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2
SIZE (rust/crates/rustc-hash-1.1.0.tar.gz) = 9331
SHA256 (rust/crates/rustc-rayon-0.3.0.tar.gz) = f32767f90d938f1b7199a174ef249ae1924f6e5bbdb9d112fea141e016f25b3a
SIZE (rust/crates/rustc-rayon-0.3.0.tar.gz) = 142908
SHA256 (rust/crates/rustc-rayon-core-0.3.0.tar.gz) = ea2427831f0053ea3ea73559c8eabd893133a51b251d142bacee53c62a288cb3
SIZE (rust/crates/rustc-rayon-core-0.3.0.tar.gz) = 65250
SHA256 (rust/crates/rustc_version-0.2.3.tar.gz) = 138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a
SIZE (rust/crates/rustc_version-0.2.3.tar.gz) = 10210
SHA256 (rust/crates/ryu-1.0.3.tar.gz) = 535622e6be132bccd223f4bb2b8ac8d53cda3c7a6394944d3b2b33fb974f9d76
SIZE (rust/crates/ryu-1.0.3.tar.gz) = 41983
SHA256 (rust/crates/scoped-tls-1.0.0.tar.gz) = ea6a9290e3c9cf0f18145ef7ffa62d68ee0bf5fcd651017e586dc7fd5da448c2
SIZE (rust/crates/scoped-tls-1.0.0.tar.gz) = 9146
SHA256 (rust/crates/scopeguard-1.1.0.tar.gz) = d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd
SIZE (rust/crates/scopeguard-1.1.0.tar.gz) = 11470
SHA256 (rust/crates/semver-0.9.0.tar.gz) = 1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403
SIZE (rust/crates/semver-0.9.0.tar.gz) = 17344
SHA256 (rust/crates/semver-parser-0.7.0.tar.gz) = 388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3
SIZE (rust/crates/semver-parser-0.7.0.tar.gz) = 10268
SHA256 (rust/crates/serde-1.0.105.tar.gz) = e707fbbf255b8fc8c3b99abb91e7257a622caeb20a9818cbadbeeede4e0932ff
SIZE (rust/crates/serde-1.0.105.tar.gz) = 73358
SHA256 (rust/crates/serde_derive-1.0.105.tar.gz) = ac5d00fc561ba2724df6758a17de23df5914f20e41cb00f94d5b7ae42fffaff8
SIZE (rust/crates/serde_derive-1.0.105.tar.gz) = 49700
SHA256 (rust/crates/serde_json-1.0.50.tar.gz) = 78a7a12c167809363ec3bd7329fc0a3369056996de43c4b37ef3cd54a6ce4867
SIZE (rust/crates/serde_json-1.0.50.tar.gz) = 72229
SHA256 (rust/crates/sha-1-0.8.2.tar.gz) = f7d94d0bede923b3cea61f3f1ff57ff8cdfd77b400fb8f9998949e0cf04163df
SIZE (rust/crates/sha-1-0.8.2.tar.gz) = 12338
SHA256 (rust/crates/smallvec-0.6.13.tar.gz) = f7b0758c52e15a8b5e3691eae6cc559f08eee9406e548a4477ba4e67770a82b6
SIZE (rust/crates/smallvec-0.6.13.tar.gz) = 22760
SHA256 (rust/crates/smallvec-1.2.0.tar.gz) = 5c2fb2ec9bcd216a5b0d0ccf31ab17b5ed1d627960edff65bbe95d3ce221cefc
SIZE (rust/crates/smallvec-1.2.0.tar.gz) = 23658
SHA256 (rust/crates/stable_deref_trait-1.1.1.tar.gz) = dba1a27d3efae4351c8051072d619e3ade2820635c3958d826bfea39d59b54c8
SIZE (rust/crates/stable_deref_trait-1.1.1.tar.gz) = 8007
SHA256 (rust/crates/stacker-0.1.9.tar.gz) = 72dd941b456e1c006d6b9f27c526d5b69281288aeea8cba82c19d3843d8ccdd2
SIZE (rust/crates/stacker-0.1.9.tar.gz) = 13603
SHA256 (rust/crates/strsim-0.8.0.tar.gz) = 8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a
SIZE (rust/crates/strsim-0.8.0.tar.gz) = 9309
SHA256 (rust/crates/syn-1.0.17.tar.gz) = 0df0eb663f387145cab623dea85b09c2c5b4b0aef44e945d928e682fce71bb03
SIZE (rust/crates/syn-1.0.17.tar.gz) = 198158
SHA256 (rust/crates/synstructure-0.12.3.tar.gz) = 67656ea1dc1b41b1451851562ea232ec2e5a80242139f7e679ceccfb5d61f545
SIZE (rust/crates/synstructure-0.12.3.tar.gz) = 17788
SHA256 (rust/crates/tempfile-3.1.0.tar.gz) = 7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9
SIZE (rust/crates/tempfile-3.1.0.tar.gz) = 25823
SHA256 (rust/crates/termcolor-1.1.0.tar.gz) = bb6bfa289a4d7c5766392812c0a1f4c1ba45afa1ad47803c11e1f407d846d75f
SIZE (rust/crates/termcolor-1.1.0.tar.gz) = 17193
SHA256 (rust/crates/termize-0.1.1.tar.gz) = 1706be6b564323ce7092f5f7e6b118a14c8ef7ed0e69c8c5329c914a9f101295
SIZE (rust/crates/termize-0.1.1.tar.gz) = 10691
SHA256 (rust/crates/textwrap-0.11.0.tar.gz) = d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060
SIZE (rust/crates/textwrap-0.11.0.tar.gz) = 17322
SHA256 (rust/crates/thread_local-1.0.1.tar.gz) = d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14
SIZE (rust/crates/thread_local-1.0.1.tar.gz) = 12609
SHA256 (rust/crates/typenum-1.12.0.tar.gz) = 373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33
SIZE (rust/crates/typenum-1.12.0.tar.gz) = 36037
SHA256 (rust/crates/unicode-normalization-0.1.12.tar.gz) = 5479532badd04e128284890390c1e876ef7a993d0570b3597ae43dfa1d59afa4
SIZE (rust/crates/unicode-normalization-0.1.12.tar.gz) = 91057
SHA256 (rust/crates/unicode-width-0.1.7.tar.gz) = caaa9d531767d1ff2150b9332433f32a24622147e5ebb1f26409d5da67afd479
SIZE (rust/crates/unicode-width-0.1.7.tar.gz) = 16644
SHA256 (rust/crates/unicode-xid-0.2.0.tar.gz) = 826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c
SIZE (rust/crates/unicode-xid-0.2.0.tar.gz) = 14994
SHA256 (rust/crates/vec_map-0.8.1.tar.gz) = 05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a
SIZE (rust/crates/vec_map-0.8.1.tar.gz) = 14959
SHA256 (rust/crates/wasi-0.9.0+wasi-snapshot-preview1.tar.gz) = cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519
SIZE (rust/crates/wasi-0.9.0+wasi-snapshot-preview1.tar.gz) = 31521
SHA256 (rust/crates/winapi-0.3.8.tar.gz) = 8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6
SIZE (rust/crates/winapi-0.3.8.tar.gz) = 1128308
SHA256 (rust/crates/winapi-i686-pc-windows-gnu-0.4.0.tar.gz) = ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6
SIZE (rust/crates/winapi-i686-pc-windows-gnu-0.4.0.tar.gz) = 2918815
SHA256 (rust/crates/winapi-util-0.1.4.tar.gz) = fa515c5163a99cc82bab70fd3bfdd36d827be85de63737b40fcef2ce084a436e
SIZE (rust/crates/winapi-util-0.1.4.tar.gz) = 10089
SHA256 (rust/crates/winapi-x86_64-pc-windows-gnu-0.4.0.tar.gz) = 712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f
SIZE (rust/crates/winapi-x86_64-pc-windows-gnu-0.4.0.tar.gz) = 2947998
Index: head/devel/rust-analyzer/Makefile
===================================================================
--- head/devel/rust-analyzer/Makefile (revision 552220)
+++ head/devel/rust-analyzer/Makefile (revision 552221)
@@ -1,195 +1,196 @@
# $FreeBSD$
PORTNAME= rust-analyzer
DISTVERSION= 2020-09-28
+PORTREVISION= 1
CATEGORIES= devel
MAINTAINER= rust@FreeBSD.org
COMMENT= Experimental Rust compiler front-end for IDEs
LICENSE= APACHE20 MIT
LICENSE_COMB= dual
LICENSE_FILE_APACHE20= ${WRKSRC}/LICENSE-APACHE
LICENSE_FILE_MIT= ${WRKSRC}/LICENSE-MIT
USES= cargo
USE_GITHUB= yes
CARGO_CRATES= addr2line-0.13.0 \
adler-0.2.3 \
ansi_term-0.12.1 \
anyhow-1.0.32 \
anymap-0.12.1 \
arrayvec-0.5.1 \
atty-0.2.14 \
autocfg-1.0.1 \
backtrace-0.3.50 \
base64-0.12.3 \
bitflags-1.2.1 \
byteorder-1.3.4 \
cargo_metadata-0.11.3 \
cc-1.0.60 \
cfg-if-0.1.10 \
chalk-derive-0.29.0 \
chalk-ir-0.29.0 \
chalk-recursive-0.29.0 \
chalk-solve-0.29.0 \
chrono-0.4.18 \
cloudabi-0.1.0 \
cmake-0.1.44 \
crc32fast-1.2.0 \
crossbeam-channel-0.4.4 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-utils-0.7.2 \
difference-2.0.0 \
drop_bomb-0.1.5 \
either-1.6.1 \
ena-0.14.0 \
env_logger-0.7.1 \
expect-test-1.0.1 \
filetime-0.2.12 \
fixedbitset-0.2.0 \
flate2-1.0.17 \
fs-err-2.5.0 \
fsevent-2.0.2 \
fsevent-sys-3.0.2 \
fst-0.4.4 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
gimli-0.22.0 \
goblin-0.2.3 \
hashbrown-0.9.0 \
heck-0.3.1 \
hermit-abi-0.1.16 \
home-0.5.3 \
idna-0.2.0 \
indexmap-1.6.0 \
inotify-0.8.3 \
inotify-sys-0.1.3 \
instant-0.1.7 \
iovec-0.1.4 \
itertools-0.9.0 \
itoa-0.4.6 \
jod-thread-0.1.2 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
lazycell-1.3.0 \
libc-0.2.77 \
libloading-0.6.3 \
libmimalloc-sys-0.1.17 \
lock_api-0.4.1 \
log-0.4.11 \
lsp-server-0.3.4 \
lsp-types-0.82.0 \
matchers-0.0.1 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memmap-0.7.0 \
memoffset-0.5.6 \
mimalloc-0.1.21 \
miniz_oxide-0.4.2 \
mio-0.6.22 \
mio-extras-2.0.6 \
miow-0.2.1 \
net2-0.2.35 \
notify-5.0.0-pre.3 \
num-integer-0.1.43 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
object-0.20.0 \
once_cell-1.4.1 \
oorandom-11.1.2 \
parking_lot-0.11.0 \
parking_lot_core-0.8.0 \
percent-encoding-2.1.0 \
perf-event-0.4.5 \
perf-event-open-sys-1.0.1 \
petgraph-0.5.1 \
pico-args-0.3.4 \
plain-0.2.3 \
proc-macro2-1.0.23 \
pulldown-cmark-0.7.2 \
pulldown-cmark-to-cmark-5.0.0 \
quote-1.0.7 \
rayon-1.4.0 \
rayon-core-1.8.1 \
redox_syscall-0.1.57 \
regex-1.3.9 \
regex-automata-0.1.9 \
regex-syntax-0.6.18 \
rowan-0.10.0 \
rustc-ap-rustc_lexer-673.0.0 \
rustc-demangle-0.1.16 \
rustc-hash-1.1.0 \
ryu-1.0.5 \
salsa-0.15.2 \
salsa-macros-0.15.2 \
same-file-1.0.6 \
scoped-tls-1.0.0 \
scopeguard-1.1.0 \
scroll-0.10.1 \
scroll_derive-0.10.2 \
semver-0.10.0 \
semver-parser-0.7.0 \
serde-1.0.116 \
serde_derive-1.0.116 \
serde_json-1.0.57 \
serde_repr-0.1.6 \
sharded-slab-0.0.9 \
slab-0.4.2 \
smallvec-1.4.2 \
smol_str-0.1.17 \
syn-1.0.42 \
synstructure-0.12.4 \
termcolor-1.1.0 \
text-size-1.0.0 \
thin-dst-1.1.0 \
thread_local-1.0.1 \
threadpool-1.8.1 \
time-0.1.44 \
tinyvec-0.3.4 \
tracing-0.1.19 \
tracing-attributes-0.1.11 \
tracing-core-0.1.16 \
tracing-log-0.1.1 \
tracing-serde-0.1.2 \
tracing-subscriber-0.2.12 \
tracing-tree-0.1.5 \
ungrammar-1.1.4 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-segmentation-1.6.0 \
unicode-xid-0.2.1 \
url-2.1.1 \
version_check-0.9.2 \
walkdir-2.3.1 \
wasi-0.10.0+wasi-snapshot-preview1 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
write-json-0.1.2 \
ws2_32-sys-0.2.1
CARGO_INSTALL_PATH= crates/rust-analyzer
# attempts to install rustup toolchains
NO_TEST= yes
PLIST_FILES= bin/rust-analyzer
PORTDOCS= *
OPTIONS_DEFINE= DOCS
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/rust-analyzer
post-install-DOCS-on:
@cd ${WRKSRC}/docs && \
${COPYTREE_SHARE} . ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/devel/rust-bindgen/Makefile
===================================================================
--- head/devel/rust-bindgen/Makefile (revision 552220)
+++ head/devel/rust-bindgen/Makefile (revision 552221)
@@ -1,72 +1,72 @@
# $FreeBSD$
PORTNAME= bindgen
DISTVERSION= 0.54.1
-PORTREVISION= 4
+PORTREVISION= 5
CATEGORIES= devel
MASTER_SITES= CRATESIO
PKGNAMEPREFIX= rust-
# XXX Teach USES=cargo to have proper default
DISTFILES= ${CARGO_DIST_SUBDIR}/${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= gecko@FreeBSD.org
COMMENT= Generate Rust bindings from C (and some C++) code
LICENSE= BSD3CLAUSE
LICENSE_FILE= ${WRKSRC}/LICENSE
RUN_DEPENDS= llvm${LLVM_DEFAULT}>0:devel/llvm${LLVM_DEFAULT}
USES= cargo
PLIST_FILES= bin/${PORTNAME}
CARGO_CRATES= aho-corasick-0.7.6 \
ansi_term-0.11.0 \
atty-0.2.13 \
bitflags-1.2.0 \
byteorder-1.3.2 \
cc-1.0.45 \
cexpr-0.4.0 \
cfg-if-0.1.10 \
clang-sys-0.29.0 \
clap-2.33.0 \
diff-0.1.11 \
env_logger-0.7.0 \
glob-0.3.0 \
humantime-1.3.0 \
lazy_static-1.4.0 \
lazycell-1.2.1 \
libc-0.2.66 \
libloading-0.5.2 \
log-0.4.8 \
memchr-2.2.1 \
nom-5.1.1 \
peeking_take_while-0.1.2 \
proc-macro2-1.0.4 \
quick-error-1.2.2 \
quote-1.0.2 \
regex-1.3.1 \
regex-syntax-0.6.12 \
rustc-hash-1.0.1 \
shlex-0.1.1 \
strsim-0.8.0 \
termcolor-1.0.5 \
textwrap-0.11.0 \
thread_local-0.3.6 \
unicode-width-0.1.6 \
unicode-xid-0.2.0 \
vec_map-0.8.1 \
version_check-0.9.1 \
which-3.0.0 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.2 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
wincolor-1.0.2
post-patch:
@${REINPLACE_CMD} -e 's,"llvm-config,&${LLVM_DEFAULT},' \
${WRKSRC}/cargo-crates/clang-sys-*/build/common.rs \
${WRKSRC}/cargo-crates/clang-sys-*/src/support.rs
.include <bsd.port.mk>
Index: head/devel/rust-cbindgen/Makefile
===================================================================
--- head/devel/rust-cbindgen/Makefile (revision 552220)
+++ head/devel/rust-cbindgen/Makefile (revision 552221)
@@ -1,62 +1,63 @@
# $FreeBSD$
PORTNAME= cbindgen
DISTVERSION= 0.15.0
+PORTREVISION= 1
CATEGORIES= devel
MASTER_SITES= CRATESIO
PKGNAMEPREFIX= rust-
# XXX Teach USES=cargo to have proper default
DISTFILES= ${CARGO_DIST_SUBDIR}/${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= gecko@FreeBSD.org
COMMENT= Generate C bindings from Rust code
LICENSE= MPL20
RUN_DEPENDS= cargo:lang/${RUST_DEFAULT}
USES= cargo
PLIST_FILES= bin/${PORTNAME}
CARGO_CRATES= ansi_term-0.11.0 \
atty-0.2.14 \
autocfg-1.0.1 \
bitflags-1.2.1 \
cfg-if-0.1.10 \
clap-2.33.3 \
getrandom-0.1.15 \
hashbrown-0.9.1 \
heck-0.3.1 \
hermit-abi-0.1.16 \
indexmap-1.6.0 \
itoa-0.4.6 \
libc-0.2.77 \
log-0.4.11 \
ppv-lite86-0.2.9 \
proc-macro2-1.0.21 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
redox_syscall-0.1.57 \
remove_dir_all-0.5.3 \
ryu-1.0.5 \
serde-1.0.116 \
serde_derive-1.0.116 \
serde_json-1.0.57 \
strsim-0.8.0 \
syn-1.0.41 \
tempfile-3.1.0 \
textwrap-0.11.0 \
toml-0.5.6 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
vec_map-0.8.2 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.3.9 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0
.include <bsd.port.mk>
Index: head/devel/sccache/Makefile
===================================================================
--- head/devel/sccache/Makefile (revision 552220)
+++ head/devel/sccache/Makefile (revision 552221)
@@ -1,356 +1,356 @@
# $FreeBSD$
PORTNAME= sccache
DISTVERSION= 0.2.13
-PORTREVISION= 6
+PORTREVISION= 7
PORTEPOCH= 1
CATEGORIES= devel
MAINTAINER= pizzamig@FreeBSD.org
COMMENT= Like ccache with cloud storage support
LICENSE= APACHE20
LICENSE_FILE= ${WRKSRC}/LICENSE
ONLY_FOR_ARCHS= amd64 i386
ONLY_FOR_ARCHS_REASON= ring crate not ported to other architectures
USES= cargo ssl
USE_GITHUB= yes
GH_ACCOUNT= mozilla
OPTIONS_DEFINE= DOCS
OPTIONS_DEFAULT= DOCS
PLIST_FILES= bin/sccache
PORTDOCS= README.md
CARGO_FEATURES= all
GH_TUPLE= saresend:selenium-rs:0314a2420da78cce7454a980d862995750771722:seleniumrs
CARGO_CRATES= adler32-1.0.4 \
aho-corasick-0.7.6 \
ansi_term-0.11.0 \
ar-0.6.2 \
arc-swap-0.4.4 \
arrayref-0.3.5 \
arrayvec-0.5.1 \
ascii-0.8.7 \
ascii-0.9.3 \
assert_cmd-0.9.1 \
atty-0.2.13 \
autocfg-0.1.7 \
backtrace-0.3.40 \
backtrace-sys-0.1.32 \
base64-0.9.3 \
base64-0.10.1 \
base64-0.11.0 \
bincode-0.8.0 \
bincode-1.2.0 \
bitflags-1.2.1 \
blake2b_simd-0.5.9 \
block-buffer-0.7.3 \
block-padding-0.1.5 \
buf_redux-0.6.3 \
bufstream-0.1.4 \
byte-tools-0.3.1 \
byteorder-1.3.2 \
bytes-0.4.12 \
c2-chacha-0.2.3 \
case-0.1.0 \
cc-1.0.48 \
cfg-if-0.1.10 \
chrono-0.4.10 \
chunked_transfer-0.3.1 \
clap-2.33.0 \
cloudabi-0.0.3 \
combine-3.8.1 \
conhash-0.4.0 \
constant_time_eq-0.1.4 \
cookie-0.12.0 \
cookie_store-0.7.0 \
core-foundation-0.6.4 \
core-foundation-sys-0.6.2 \
counted-array-0.1.2 \
crc32fast-1.2.0 \
crossbeam-deque-0.7.2 \
crossbeam-epoch-0.8.0 \
crossbeam-queue-0.1.2 \
crossbeam-utils-0.5.0 \
crossbeam-utils-0.6.6 \
crossbeam-utils-0.7.0 \
crypto-mac-0.7.0 \
daemonize-0.3.0 \
derive-error-0.0.3 \
difference-2.0.0 \
digest-0.8.1 \
directories-1.0.2 \
dirs-1.0.5 \
dtoa-0.4.4 \
either-1.5.3 \
encoding_rs-0.8.20 \
env_logger-0.5.13 \
error-chain-0.11.0 \
error-chain-0.12.1 \
escargot-0.3.1 \
failure-0.1.6 \
failure_derive-0.1.6 \
fake-simd-0.1.2 \
filetime-0.1.15 \
filetime-0.2.8 \
flate2-1.0.13 \
float-cmp-0.4.0 \
fnv-1.0.6 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.1.29 \
futures-cpupool-0.1.8 \
generic-array-0.12.3 \
getopts-0.2.21 \
getrandom-0.1.13 \
h2-0.1.26 \
hermit-abi-0.1.3 \
hmac-0.7.1 \
http-0.1.21 \
http-body-0.1.0 \
httparse-1.3.4 \
humantime-1.3.0 \
hyper-0.12.35 \
hyper-tls-0.3.2 \
hyperx-0.12.0 \
idna-0.1.5 \
idna-0.2.0 \
indexmap-1.3.0 \
iovec-0.1.4 \
itertools-0.7.11 \
itoa-0.4.4 \
jobserver-0.1.19 \
jsonwebtoken-6.0.1 \
kernel32-sys-0.2.2 \
language-tags-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.66 \
libmount-0.1.15 \
libz-sys-1.0.25 \
linked-hash-map-0.2.1 \
local-encoding-0.2.0 \
lock_api-0.3.2 \
log-0.3.9 \
log-0.4.8 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
md-5-0.8.0 \
md5-0.3.8 \
memcached-rs-0.3.0 \
memchr-1.0.2 \
memchr-2.2.1 \
memoffset-0.5.3 \
mime-0.2.6 \
mime-0.3.14 \
mime_guess-1.8.7 \
mime_guess-2.0.1 \
miniz_oxide-0.3.5 \
mio-0.6.21 \
mio-named-pipes-0.1.6 \
mio-uds-0.6.7 \
miow-0.2.1 \
miow-0.3.3 \
msdos_time-0.1.6 \
multipart-0.13.6 \
native-tls-0.2.3 \
net2-0.2.33 \
nix-0.11.1 \
nix-0.14.1 \
normalize-line-endings-0.2.2 \
num-integer-0.1.41 \
num-traits-0.1.43 \
num-traits-0.2.10 \
num_cpus-1.11.1 \
number_prefix-0.2.8 \
opaque-debug-0.2.3 \
openssl-0.10.26 \
openssl-probe-0.1.2 \
openssl-sys-0.9.53 \
parking_lot-0.9.0 \
parking_lot_core-0.6.2 \
percent-encoding-1.0.1 \
percent-encoding-2.1.0 \
phf-0.7.24 \
phf_codegen-0.7.24 \
phf_generator-0.7.24 \
phf_shared-0.7.24 \
pkg-config-0.3.17 \
podio-0.1.6 \
ppv-lite86-0.2.6 \
predicates-0.9.1 \
predicates-core-0.9.0 \
predicates-tree-0.9.0 \
proc-macro2-1.0.6 \
publicsuffix-1.5.4 \
pulldown-cmark-0.0.3 \
quick-error-1.2.2 \
quote-0.3.15 \
quote-1.0.2 \
rand-0.3.23 \
rand-0.4.6 \
rand-0.5.6 \
rand-0.6.5 \
rand-0.7.2 \
rand_chacha-0.1.1 \
rand_chacha-0.2.1 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rdrand-0.4.0 \
redis-0.9.1 \
redox_syscall-0.1.56 \
redox_users-0.3.1 \
regex-1.3.1 \
regex-syntax-0.6.12 \
remove_dir_all-0.5.2 \
reqwest-0.9.22 \
retry-0.4.0 \
ring-0.14.6 \
rouille-2.2.0 \
rust-argon2-0.5.1 \
rustc-demangle-0.1.16 \
rustc_version-0.2.3 \
ryu-1.0.2 \
safemem-0.2.0 \
safemem-0.3.3 \
same-file-0.1.3 \
schannel-0.1.16 \
scopeguard-1.0.0 \
security-framework-0.3.4 \
security-framework-sys-0.3.3 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.103 \
serde_derive-1.0.103 \
serde_json-1.0.44 \
serde_urlencoded-0.5.5 \
sha-1-0.8.1 \
sha1-0.6.0 \
sha2-0.8.0 \
signal-hook-0.1.12 \
signal-hook-registry-1.2.0 \
siphasher-0.2.3 \
skeptic-0.4.0 \
slab-0.4.2 \
smallvec-0.6.13 \
smallvec-1.0.0 \
socket2-0.3.11 \
spin-0.5.2 \
string-0.2.1 \
strip-ansi-escapes-0.1.0 \
strsim-0.8.0 \
subtle-1.0.0 \
syn-0.11.11 \
syn-1.0.11 \
synom-0.11.3 \
synstructure-0.12.3 \
syslog-4.0.1 \
tar-0.4.26 \
tempdir-0.3.7 \
tempfile-3.1.0 \
term-0.5.2 \
termcolor-1.0.5 \
textwrap-0.11.0 \
thread_local-0.3.6 \
threadpool-1.7.1 \
time-0.1.42 \
tiny_http-0.6.2 \
tokio-0.1.22 \
tokio-buf-0.1.1 \
tokio-codec-0.1.1 \
tokio-current-thread-0.1.6 \
tokio-executor-0.1.9 \
tokio-fs-0.1.6 \
tokio-io-0.1.12 \
tokio-named-pipes-0.1.0 \
tokio-process-0.2.4 \
tokio-reactor-0.1.11 \
tokio-serde-0.1.0 \
tokio-serde-bincode-0.1.1 \
tokio-signal-0.2.7 \
tokio-sync-0.1.7 \
tokio-tcp-0.1.3 \
tokio-threadpool-0.1.17 \
tokio-timer-0.2.12 \
tokio-udp-0.1.5 \
tokio-uds-0.2.5 \
toml-0.4.10 \
tower-0.1.1 \
tower-buffer-0.1.2 \
tower-discover-0.1.0 \
tower-layer-0.1.0 \
tower-limit-0.1.1 \
tower-load-shed-0.1.0 \
tower-retry-0.1.0 \
tower-service-0.2.0 \
tower-timeout-0.1.1 \
tower-util-0.1.0 \
tracing-0.1.10 \
tracing-attributes-0.1.5 \
tracing-core-0.1.7 \
treeline-0.1.0 \
try-lock-0.2.2 \
try_from-0.3.2 \
twoway-0.1.8 \
typenum-1.11.2 \
unicase-1.4.2 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.11 \
unicode-width-0.1.7 \
unicode-xid-0.0.4 \
unicode-xid-0.2.0 \
unix_socket-0.5.0 \
unreachable-1.0.0 \
untrusted-0.6.2 \
url-1.7.2 \
url-2.1.0 \
utf8parse-0.1.1 \
uuid-0.7.4 \
vcpkg-0.2.8 \
vec_map-0.8.1 \
version-compare-0.0.10 \
version_check-0.1.5 \
version_check-0.9.1 \
void-1.0.2 \
vte-0.3.3 \
walkdir-1.0.7 \
want-0.2.0 \
wasi-0.7.0 \
which-2.0.1 \
winapi-0.2.8 \
winapi-0.3.8 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.2 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
wincolor-1.0.2 \
winreg-0.6.2 \
ws2_32-sys-0.2.1 \
xattr-0.2.2 \
zip-0.4.2
CARGO_USE_GITHUB= yes
do-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_DATA} ${WRKSRC}/README.md ${STAGEDIR}${DOCSDIR}
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/sccache
.include <bsd.port.mk>
Index: head/devel/sentry-cli/Makefile
===================================================================
--- head/devel/sentry-cli/Makefile (revision 552220)
+++ head/devel/sentry-cli/Makefile (revision 552221)
@@ -1,315 +1,316 @@
# $FreeBSD$
PORTNAME= sentry-cli
DISTVERSION= 1.58.0
+PORTREVISION= 1
CATEGORIES= devel
MAINTAINER= vulcan@wired.sh
COMMENT= Command line utility to work with Sentry
LICENSE= BSD3CLAUSE
LICENSE_FILE= ${WRKSRC}/LICENSE
LIB_DEPENDS= libcurl.so:ftp/curl \
libgit2.so:devel/libgit2
USES= cargo ssl:build
USE_GITHUB= yes
GH_ACCOUNT= getsentry
CARGO_CRATES= addr2line-0.12.2 \
adler-0.2.2 \
adler32-1.1.0 \
advapi32-sys-0.2.0 \
ahash-0.3.8 \
aho-corasick-0.7.13 \
anylog-0.5.0 \
app_dirs-1.2.1 \
arc-swap-0.4.7 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
assert-json-diff-1.0.3 \
assert_cmd-1.0.1 \
atty-0.2.14 \
autocfg-0.1.7 \
autocfg-1.0.0 \
backoff-0.1.6 \
backtrace-0.3.49 \
base64-0.10.1 \
base64-0.11.0 \
bitflags-1.2.1 \
bitmaps-2.1.0 \
blake2b_simd-0.5.10 \
block-buffer-0.7.3 \
block-padding-0.1.5 \
brotli-sys-0.3.2 \
brotli2-0.3.2 \
bstr-0.2.13 \
byte-tools-0.3.1 \
byteorder-1.3.4 \
bzip2-0.3.3 \
bzip2-sys-0.1.9+1.0.8 \
cc-1.0.56 \
cfg-if-0.1.10 \
chardet-0.2.4 \
chrono-0.4.11 \
clap-2.33.1 \
cloudabi-0.0.3 \
colored-1.9.3 \
console-0.11.3 \
constant_time_eq-0.1.5 \
crc32fast-1.2.0 \
crossbeam-channel-0.4.2 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-queue-0.2.3 \
crossbeam-utils-0.7.2 \
csv-1.1.3 \
csv-core-0.1.10 \
curl-0.4.30 \
curl-sys-0.4.32+curl-7.70.0 \
debugid-0.7.2 \
difference-2.0.0 \
digest-0.8.1 \
dirs-1.0.5 \
dirs-2.0.2 \
dirs-sys-0.3.5 \
dlv-list-0.2.2 \
dmsort-1.0.0 \
doc-comment-0.3.3 \
dotenv-0.15.0 \
dtoa-0.4.6 \
either-1.5.3 \
elementtree-0.5.0 \
encode_unicode-0.3.6 \
encoding-0.2.33 \
encoding-index-japanese-1.20141219.5 \
encoding-index-korean-1.20141219.5 \
encoding-index-simpchinese-1.20141219.5 \
encoding-index-singlebyte-1.20141219.5 \
encoding-index-tradchinese-1.20141219.5 \
encoding_index_tests-0.1.4 \
extend-0.1.2 \
failure-0.1.8 \
failure_derive-0.1.8 \
fake-simd-0.1.2 \
fallible-iterator-0.2.0 \
flate2-1.0.16 \
float-cmp-0.6.0 \
fnv-1.0.7 \
fuchsia-cprng-0.1.1 \
generic-array-0.12.3 \
getrandom-0.1.14 \
gimli-0.21.0 \
git2-0.13.6 \
glob-0.3.0 \
globset-0.4.5 \
goblin-0.2.3 \
hashbrown-0.7.2 \
hermit-abi-0.1.14 \
hostname-0.3.1 \
httparse-1.3.4 \
httpdate-0.3.2 \
idna-0.1.5 \
idna-0.2.0 \
if_chain-0.1.3 \
if_chain-1.0.0 \
ignore-0.4.16 \
im-14.3.0 \
indexmap-1.4.0 \
indicatif-0.14.0 \
insta-0.16.1 \
instant-0.1.5 \
itertools-0.9.0 \
itoa-0.4.6 \
java-properties-1.2.0 \
jobserver-0.1.21 \
lazy_static-1.4.0 \
lazycell-1.2.1 \
libc-0.2.71 \
libgit2-sys-0.12.7+1.0.0 \
libz-sys-1.0.25 \
line-wrap-0.1.1 \
linked-hash-map-0.5.3 \
lock_api-0.3.4 \
log-0.4.8 \
mac-process-info-0.2.0 \
maplit-1.0.2 \
match_cfg-0.1.0 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memmap-0.7.0 \
memoffset-0.5.4 \
might-be-minified-0.3.0 \
miniz_oxide-0.3.7 \
miniz_oxide-0.4.0 \
mockito-0.26.0 \
new_debug_unreachable-1.0.4 \
normalize-line-endings-0.3.0 \
num-integer-0.1.43 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
number_prefix-0.3.0 \
object-0.20.0 \
ole32-sys-0.2.0 \
opaque-debug-0.2.3 \
open-1.4.0 \
openssl-probe-0.1.2 \
openssl-sys-0.9.58 \
ordered-multimap-0.2.4 \
osascript-0.3.0 \
parking_lot-0.10.2 \
parking_lot_core-0.7.2 \
pdb-0.6.0 \
percent-encoding-1.0.1 \
percent-encoding-2.1.0 \
pest-2.1.3 \
pest_derive-2.1.0 \
pest_generator-2.1.3 \
pest_meta-2.1.3 \
phf_shared-0.8.0 \
pkg-config-0.3.17 \
plain-0.2.3 \
plist-0.5.5 \
podio-0.1.7 \
ppv-lite86-0.2.8 \
precomputed-hash-0.1.1 \
predicates-1.0.4 \
predicates-core-1.0.0 \
predicates-tree-1.0.0 \
prettytable-rs-0.8.0 \
proc-macro-error-1.0.3 \
proc-macro-error-attr-1.0.3 \
proc-macro2-0.4.30 \
proc-macro2-1.0.18 \
proguard-4.0.1 \
quote-0.6.13 \
quote-1.0.7 \
r2d2-0.8.8 \
rand-0.6.5 \
rand-0.7.3 \
rand_chacha-0.1.1 \
rand_chacha-0.2.2 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rand_xoshiro-0.4.0 \
rayon-1.3.1 \
rayon-core-1.7.1 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
redox_users-0.3.4 \
regex-1.3.9 \
regex-automata-0.1.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.3 \
runas-0.2.1 \
rust-argon2-0.7.0 \
rust-ini-0.15.3 \
rustc-demangle-0.1.16 \
rustc_version-0.2.3 \
ryu-1.0.5 \
safemem-0.3.3 \
same-file-1.0.6 \
schannel-0.1.19 \
scheduled-thread-pool-0.2.4 \
scopeguard-1.1.0 \
scroll-0.9.2 \
scroll-0.10.1 \
scroll_derive-0.9.5 \
scroll_derive-0.10.2 \
semver-0.9.0 \
semver-parser-0.7.0 \
sentry-0.18.1 \
sentry-types-0.14.1 \
serde-1.0.114 \
serde_derive-1.0.114 \
serde_json-1.0.56 \
serde_urlencoded-0.6.1 \
serde_yaml-0.8.13 \
sha-1-0.8.2 \
sha1-0.6.0 \
shell32-sys-0.1.2 \
signal-hook-0.1.16 \
signal-hook-registry-1.2.0 \
siphasher-0.3.3 \
sized-chunks-0.5.3 \
smallvec-1.4.0 \
socket2-0.3.12 \
sourcemap-5.0.0 \
stable_deref_trait-1.1.1 \
string_cache-0.8.0 \
strsim-0.8.0 \
symbolic-7.5.0 \
symbolic-common-7.5.0 \
symbolic-debuginfo-7.5.0 \
syn-0.15.44 \
syn-1.0.33 \
syn-mid-0.5.0 \
synstructure-0.12.4 \
tempfile-3.1.0 \
term-0.5.2 \
term_size-0.3.2 \
terminal_size-0.1.12 \
termios-0.3.2 \
textwrap-0.11.0 \
thread_local-1.0.1 \
time-0.1.43 \
tinyvec-0.3.3 \
treeline-0.1.0 \
typenum-1.12.0 \
ucd-trie-0.1.3 \
uname-0.1.1 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-width-0.1.8 \
unicode-xid-0.1.0 \
unicode-xid-0.2.1 \
unix-daemonize-0.1.2 \
url-1.7.2 \
url-2.1.1 \
username-0.2.0 \
uuid-0.8.1 \
vcpkg-0.2.10 \
version_check-0.9.2 \
wait-timeout-0.2.0 \
walkdir-2.3.1 \
wasi-0.9.0+wasi-snapshot-preview1 \
which-3.1.1 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
xdg-2.2.0 \
xml-rs-0.8.3 \
yaml-rust-0.4.4 \
zip-0.5.6
CARGO_FEATURES= --no-default-features
PLIST_FILES= bin/${PORTNAME}
PORTDOCS= CHANGELOG.md README.md
OPTIONS_DEFINE= DOCS REPORTING
OPTIONS_DEFAULT= REPORTING
REPORTING_DESC= Enable crash reporting
REPORTING_VARS= CARGO_FEATURES+=with_crash_reporting
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_DATA} ${PORTDOCS:S|^|${WRKSRC}/|} ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/devel/tokei/Makefile
===================================================================
--- head/devel/tokei/Makefile (revision 552220)
+++ head/devel/tokei/Makefile (revision 552221)
@@ -1,175 +1,175 @@
# $FreeBSD$
PORTNAME= tokei
DISTVERSIONPREFIX= v
DISTVERSION= 12.0.4
-PORTREVISION= 4
+PORTREVISION= 5
CATEGORIES= devel
MAINTAINER= ports@FreeBSD.org
COMMENT= Display statistics about your code
LICENSE= APACHE20 MIT
LICENSE_COMB= dual
LICENSE_FILE_APACHE20= ${WRKSRC}/LICENCE-APACHE
LICENSE_FILE_MIT= ${WRKSRC}/LICENCE-MIT
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= XAMPPRocky
CARGO_CRATES= ahash-0.3.8 \
aho-corasick-0.7.10 \
ansi_term-0.11.0 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
atty-0.2.14 \
autocfg-1.0.0 \
base64-0.11.0 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
block-buffer-0.7.3 \
block-padding-0.1.5 \
bstr-0.2.13 \
byte-tools-0.3.1 \
bytecount-0.6.0 \
byteorder-1.3.4 \
cc-1.0.54 \
cfg-if-0.1.10 \
chrono-0.4.11 \
chrono-tz-0.5.2 \
clap-2.33.1 \
cloudabi-0.0.3 \
const-random-0.1.8 \
const-random-macro-0.1.8 \
constant_time_eq-0.1.5 \
crossbeam-channel-0.4.2 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-queue-0.2.3 \
crossbeam-utils-0.7.2 \
dashmap-3.11.4 \
deunicode-0.4.3 \
digest-0.8.1 \
dirs-2.0.2 \
dirs-sys-0.3.5 \
dtoa-0.4.6 \
either-1.5.3 \
encoding_rs-0.8.23 \
encoding_rs_io-0.1.7 \
env_logger-0.7.1 \
fake-simd-0.1.2 \
fnv-1.0.7 \
generic-array-0.12.3 \
getrandom-0.1.14 \
git2-0.13.6 \
globset-0.4.5 \
globwalk-0.8.0 \
grep-matcher-0.1.4 \
grep-searcher-0.1.7 \
half-1.6.0 \
hermit-abi-0.1.14 \
hex-0.4.2 \
humansize-1.1.0 \
humantime-1.3.0 \
idna-0.2.0 \
ignore-0.4.16 \
itoa-0.4.6 \
jobserver-0.1.21 \
lazy_static-1.4.0 \
libc-0.2.71 \
libgit2-sys-0.12.7+1.0.0 \
libz-sys-1.0.25 \
linked-hash-map-0.5.3 \
lock_api-0.3.4 \
log-0.4.8 \
maplit-1.0.2 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memmap-0.7.0 \
memoffset-0.5.4 \
num-integer-0.1.43 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
once_cell-1.4.0 \
opaque-debug-0.2.3 \
parking_lot-0.10.2 \
parking_lot_core-0.7.2 \
parse-zoneinfo-0.3.0 \
percent-encoding-2.1.0 \
pest-2.1.3 \
pest_derive-2.1.0 \
pest_generator-2.1.3 \
pest_meta-2.1.3 \
pkg-config-0.3.17 \
ppv-lite86-0.2.8 \
proc-macro-hack-0.5.16 \
proc-macro2-1.0.18 \
quick-error-1.2.3 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rayon-1.3.1 \
rayon-core-1.7.1 \
redox_syscall-0.1.56 \
redox_users-0.3.4 \
regex-1.3.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.3 \
rust-argon2-0.7.0 \
ryu-1.0.5 \
same-file-1.0.6 \
scopeguard-1.1.0 \
serde-1.0.114 \
serde_cbor-0.11.1 \
serde_derive-1.0.114 \
serde_json-1.0.55 \
serde_yaml-0.8.13 \
sha-1-0.8.2 \
slug-0.1.4 \
smallvec-1.4.0 \
strsim-0.8.0 \
syn-1.0.33 \
tempfile-3.1.0 \
tera-1.3.1 \
term_size-0.3.2 \
termcolor-1.1.0 \
textwrap-0.11.0 \
thread_local-1.0.1 \
time-0.1.43 \
tinyvec-0.3.3 \
toml-0.5.6 \
typenum-1.12.0 \
ucd-trie-0.1.3 \
unic-char-property-0.9.0 \
unic-char-range-0.9.0 \
unic-common-0.9.0 \
unic-segment-0.9.0 \
unic-ucd-segment-0.9.0 \
unic-ucd-version-0.9.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
url-2.1.1 \
vcpkg-0.2.10 \
vec_map-0.8.2 \
walkdir-2.3.1 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
yaml-rust-0.4.4
# enable all output serialization formats
CARGO_FEATURES= all
PLIST_FILES= bin/tokei
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/tokei
.include <bsd.port.mk>
Index: head/dns/doh-proxy/Makefile
===================================================================
--- head/dns/doh-proxy/Makefile (revision 552220)
+++ head/dns/doh-proxy/Makefile (revision 552221)
@@ -1,27 +1,27 @@
# Created by: Timothy Beyer <beyert@cs.ucr.edu>
# $FreeBSD$
PORTNAME= doh-proxy
DISTVERSION= 0.3.3
-PORTREVISION= 5
+PORTREVISION= 6
CATEGORIES= dns
MASTER_SITES= CRATESIO
DISTFILES= ${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= mat@FreeBSD.org
COMMENT= DNS-over-HTTP server proxy
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
PLIST_FILES= bin/doh-proxy
# make cargo-crates > Makefile.crates
.include "Makefile.crates"
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/doh-proxy
.include <bsd.port.mk>
Index: head/editors/kak-lsp/Makefile
===================================================================
--- head/editors/kak-lsp/Makefile (revision 552220)
+++ head/editors/kak-lsp/Makefile (revision 552221)
@@ -1,156 +1,156 @@
# $FreeBSD$
PORTNAME= kak-lsp
DISTVERSIONPREFIX= v
DISTVERSION= 8.0.0
-PORTREVISION= 7
+PORTREVISION= 8
CATEGORIES= editors
MAINTAINER= ports@FreeBSD.org
COMMENT= Kakoune Language Server Protocol Client
LICENSE= UNLICENSE
LICENSE_FILE= ${WRKSRC}/UNLICENSE
RUN_DEPENDS= kakoune>=2018.10.27:editors/kakoune
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= ul
CARGO_CRATES= adler32-1.0.4 \
aho-corasick-0.7.6 \
ansi_term-0.11.0 \
arc-swap-0.4.4 \
arrayref-0.3.5 \
arrayvec-0.5.1 \
atty-0.2.13 \
autocfg-0.1.7 \
autocfg-1.0.0 \
backtrace-0.3.40 \
backtrace-sys-0.1.32 \
base64-0.10.1 \
base64-0.11.0 \
bitflags-1.2.1 \
blake2b_simd-0.5.9 \
boxfnonce-0.1.1 \
byteorder-1.3.2 \
c2-chacha-0.2.3 \
cc-1.0.48 \
cfg-if-0.1.10 \
chrono-0.4.10 \
clap-2.33.0 \
cloudabi-0.0.3 \
constant_time_eq-0.1.4 \
crc32fast-1.2.0 \
crossbeam-0.2.12 \
crossbeam-channel-0.4.2 \
crossbeam-utils-0.6.6 \
crossbeam-utils-0.7.2 \
daemonize-0.4.1 \
dirs-2.0.2 \
dirs-sys-0.3.4 \
either-1.5.3 \
enum_primitive-0.1.1 \
failure-0.1.6 \
failure_derive-0.1.6 \
fuchsia-cprng-0.1.1 \
futures-0.1.29 \
getrandom-0.1.13 \
glob-0.3.0 \
idna-0.2.0 \
itertools-0.9.0 \
itoa-0.4.4 \
jsonrpc-core-14.1.0 \
lazy_static-1.4.0 \
libc-0.2.69 \
libflate-0.1.27 \
log-0.3.9 \
log-0.4.8 \
lsp-types-0.73.0 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.2.1 \
num-integer-0.1.41 \
num-traits-0.1.43 \
num-traits-0.2.10 \
percent-encoding-2.1.0 \
ppv-lite86-0.2.6 \
proc-macro2-0.4.30 \
proc-macro2-1.0.6 \
quote-0.6.13 \
quote-1.0.2 \
rand-0.7.3 \
rand_chacha-0.2.1 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rand_os-0.1.3 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
redox_users-0.3.1 \
regex-1.3.7 \
regex-syntax-0.6.17 \
rle-decode-fast-1.0.1 \
ropey-1.1.0 \
rust-argon2-0.5.1 \
rustc-demangle-0.1.16 \
ryu-1.0.2 \
serde-1.0.106 \
serde_derive-1.0.106 \
serde_json-1.0.51 \
serde_repr-0.1.5 \
slog-2.5.2 \
slog-async-2.3.0 \
slog-kvfilter-0.7.0 \
slog-scope-4.3.0 \
slog-stdlog-3.0.5 \
slog-term-2.4.2 \
sloggers-0.3.5 \
smallvec-0.6.13 \
smallvec-1.1.0 \
strsim-0.8.0 \
syn-0.15.44 \
syn-1.0.11 \
synstructure-0.12.3 \
take_mut-0.2.2 \
term-0.6.1 \
textwrap-0.11.0 \
thread_local-0.3.6 \
thread_local-1.0.1 \
time-0.1.42 \
toml-0.5.6 \
trackable-0.2.23 \
trackable_derive-0.1.2 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.11 \
unicode-width-0.1.7 \
unicode-xid-0.1.0 \
unicode-xid-0.2.0 \
url-2.1.1 \
vec_map-0.8.1 \
wasi-0.7.0 \
whoami-0.8.1 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0
SUB_FILES= pkg-message
PLIST_FILES= bin/kak-lsp \
"@sample ${ETCDIR}/kak-lsp.toml.sample"
PORTDOCS= README.asciidoc
OPTIONS_DEFINE= DOCS
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/kak-lsp
@${MKDIR} ${STAGEDIR}${ETCDIR}
${INSTALL_DATA} ${WRKSRC}/kak-lsp.toml ${STAGEDIR}${ETCDIR}/kak-lsp.toml.sample
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_DATA} ${WRKSRC}/README.asciidoc ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/editors/kibi/Makefile
===================================================================
--- head/editors/kibi/Makefile (revision 552220)
+++ head/editors/kibi/Makefile (revision 552221)
@@ -1,72 +1,73 @@
# $FreeBSD$
PORTNAME= kibi
DISTVERSIONPREFIX= v
DISTVERSION= 0.2.1
+PORTREVISION= 1
CATEGORIES= editors
MAINTAINER= vulcan@wired.sh
COMMENT= Tiny text editor written in Rust
LICENSE= BSD2CLAUSE
LICENSE_FILE= ${WRKSRC}/COPYRIGHT
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= ilai-deutel
CARGO_CRATES= bitflags-1.2.1 \
cfg-if-0.1.10 \
cloudabi-0.0.3 \
getrandom-0.1.14 \
lazy_static-1.4.0 \
libc-0.2.79 \
lock_api-0.3.4 \
parking_lot-0.10.2 \
parking_lot_core-0.7.2 \
ppv-lite86-0.2.6 \
proc-macro2-1.0.12 \
quote-1.0.4 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
redox_syscall-0.1.56 \
remove_dir_all-0.5.2 \
scopeguard-1.1.0 \
serial_test-0.5.0 \
serial_test_derive-0.5.0 \
smallvec-1.4.0 \
syn-1.0.18 \
tempfile-3.1.0 \
unicode-width-0.1.8 \
unicode-xid-0.2.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.3.9 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0
PORTDOCS= CHANGELOG.md README.md
OPTIONS_DEFINE= DOCS SYNTAX
OPTIONS_DEFAULT= SYNTAX
OPTIONS_SUB= yes
SYNTAX_DESC= Install Syntax-highlighting ini files
post-patch:
@${REINPLACE_CMD} -e "s|%%PREFIX%%|${PREFIX}|g" ${WRKSRC}/src/unix.rs
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_DATA} ${PORTDOCS:S|^|${WRKSRC}/|} ${STAGEDIR}${DOCSDIR}
post-install-SYNTAX-on:
@${MKDIR} ${STAGEDIR}${DATADIR}
cd ${WRKSRC} && ${COPYTREE_SHARE} syntax.d ${STAGEDIR}${DATADIR}
.include <bsd.port.mk>
Index: head/editors/parinfer-rust/Makefile
===================================================================
--- head/editors/parinfer-rust/Makefile (revision 552220)
+++ head/editors/parinfer-rust/Makefile (revision 552221)
@@ -1,132 +1,132 @@
# $FreeBSD$
PORTNAME= parinfer-rust
DISTVERSIONPREFIX= v
DISTVERSION= 0.4.3
-PORTREVISION= 7
+PORTREVISION= 8
CATEGORIES= editors
MAINTAINER= ports@FreeBSD.org
COMMENT= Infer parentheses for Clojure, Lisp, and Scheme
LICENSE= ISCL
LICENSE_FILE= ${WRKSRC}/LICENSE.md
BUILD_DEPENDS= llvm${LLVM_DEFAULT}>0:devel/llvm${LLVM_DEFAULT}
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= eraserhd
USE_LDCONFIG= yes
CARGO_CRATES= aho-corasick-0.7.3 \
ansi_term-0.11.0 \
atty-0.2.11 \
autocfg-0.1.4 \
backtrace-0.3.30 \
backtrace-sys-0.1.28 \
base-x-0.2.5 \
bindgen-0.48.1 \
bitflags-1.1.0 \
bumpalo-2.4.3 \
byteorder-1.3.2 \
cc-1.0.37 \
cexpr-0.3.5 \
cfg-if-0.1.9 \
clang-sys-0.26.4 \
clap-2.33.0 \
ctor-0.1.9 \
darling-0.9.0 \
darling_core-0.9.0 \
darling_macro-0.9.0 \
discard-1.0.4 \
emacs-0.11.0 \
emacs-macros-0.11.0 \
emacs_module-0.10.0 \
env_logger-0.6.1 \
failure-0.1.5 \
failure_derive-0.1.5 \
fnv-1.0.6 \
getopts-0.2.19 \
glob-0.2.11 \
hashbrown-0.1.8 \
humantime-1.2.0 \
ident_case-1.0.1 \
itoa-0.4.4 \
lazy_static-1.3.0 \
libc-0.2.58 \
libloading-0.5.1 \
log-0.4.6 \
memchr-2.2.0 \
nom-4.2.3 \
numtoa-0.1.0 \
peeking_take_while-0.1.2 \
proc-macro2-0.4.30 \
quick-error-1.2.2 \
quote-0.6.13 \
redox_syscall-0.1.54 \
redox_termios-0.1.1 \
regex-1.1.7 \
regex-syntax-0.6.7 \
rustc-demangle-0.1.15 \
rustc_version-0.2.3 \
ryu-0.2.8 \
scopeguard-0.3.3 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.92 \
serde_derive-1.0.92 \
serde_json-1.0.39 \
sha1-0.6.0 \
stdweb-0.4.17 \
stdweb-derive-0.5.1 \
stdweb-internal-macros-0.2.7 \
stdweb-internal-runtime-0.1.4 \
strsim-0.7.0 \
strsim-0.8.0 \
syn-0.15.42 \
synstructure-0.10.2 \
termcolor-1.0.5 \
termion-1.5.2 \
textwrap-0.11.0 \
thread_local-0.3.6 \
ucd-util-0.1.3 \
unicode-segmentation-1.3.0 \
unicode-width-0.1.5 \
unicode-xid-0.1.0 \
utf8-ranges-1.0.3 \
vec_map-0.8.1 \
version_check-0.1.5 \
wasm-bindgen-0.2.45 \
wasm-bindgen-backend-0.2.45 \
wasm-bindgen-macro-0.2.45 \
wasm-bindgen-macro-support-0.2.45 \
wasm-bindgen-shared-0.2.45 \
which-2.0.1 \
winapi-0.3.7 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.2 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
wincolor-1.0.1
PLIST_FILES= bin/parinfer-rust \
lib/libparinfer_rust.so \
share/kak/rc/extra/parinfer.kak \
share/vim/vimfiles/doc/parinfer.txt \
share/vim/vimfiles/plugin/parinfer.vim
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/parinfer-rust
${INSTALL_LIB} ${CARGO_TARGET_DIR}/*/libparinfer_rust.so \
${STAGEDIR}${PREFIX}/lib
@${MKDIR} ${STAGEDIR}${PREFIX}/share/kak/rc/extra \
${STAGEDIR}${PREFIX}/share/vim/vimfiles/doc \
${STAGEDIR}${PREFIX}/share/vim/vimfiles/plugin
${INSTALL_DATA} ${WRKSRC}/rc/parinfer.kak \
${STAGEDIR}${PREFIX}/share/kak/rc/extra
${INSTALL_DATA} ${WRKSRC}/doc/parinfer.txt \
${STAGEDIR}${PREFIX}/share/vim/vimfiles/doc
${INSTALL_DATA} ${WRKSRC}/plugin/parinfer.vim \
${STAGEDIR}${PREFIX}/share/vim/vimfiles/plugin
.include <bsd.port.mk>
Index: head/editors/xi-core/Makefile
===================================================================
--- head/editors/xi-core/Makefile (revision 552220)
+++ head/editors/xi-core/Makefile (revision 552221)
@@ -1,184 +1,184 @@
# $FreeBSD$
PORTNAME= xi-core
DISTVERSIONPREFIX= v
DISTVERSION= 0.3.0
-PORTREVISION= 8
+PORTREVISION= 9
CATEGORIES= editors
MAINTAINER= ed.arrakis@gmail.com
COMMENT= Modern editor backend written in Rust
LICENSE= APACHE20
LICENSE_FILE= ${WRKSRC}/../LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= xi-editor
GH_PROJECT= xi-editor
CARGO_CRATES= adler32-1.0.3 \
aho-corasick-0.7.3 \
argon2rs-0.2.5 \
arrayvec-0.4.10 \
ascii-0.9.1 \
autocfg-0.1.4 \
backtrace-0.3.32 \
backtrace-sys-0.1.29 \
base64-0.10.1 \
bincode-1.1.4 \
bitflags-1.1.0 \
blake2-rfc-0.2.18 \
block-buffer-0.7.3 \
block-padding-0.1.4 \
build_const-0.2.1 \
byte-tools-0.3.1 \
bytecount-0.5.1 \
byteorder-1.3.2 \
cc-1.0.37 \
cfg-if-0.1.9 \
chrono-0.4.7 \
cloudabi-0.0.3 \
combine-3.8.1 \
constant_time_eq-0.1.3 \
crc-1.8.1 \
crc32fast-1.2.0 \
crossbeam-0.7.1 \
crossbeam-channel-0.3.8 \
crossbeam-deque-0.7.1 \
crossbeam-epoch-0.7.1 \
crossbeam-queue-0.1.2 \
crossbeam-utils-0.6.5 \
digest-0.8.0 \
dirs-2.0.1 \
dirs-sys-0.3.3 \
either-1.5.2 \
failure-0.1.5 \
failure_derive-0.1.5 \
fake-simd-0.1.2 \
fern-0.5.8 \
filetime-0.2.5 \
flate2-1.0.9 \
fnv-1.0.6 \
fsevent-0.4.0 \
fsevent-sys-2.0.1 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
generic-array-0.12.3 \
humantime-1.2.0 \
idna-0.1.5 \
inotify-0.6.1 \
inotify-sys-0.1.3 \
iovec-0.1.2 \
itoa-0.4.4 \
jsonrpc-lite-0.5.0 \
kernel32-sys-0.2.2 \
languageserver-types-0.54.0 \
lazy_static-1.3.0 \
lazycell-1.2.1 \
libc-0.2.58 \
line-wrap-0.1.1 \
linked-hash-map-0.5.2 \
log-0.4.6 \
matches-0.1.8 \
memchr-2.2.0 \
memoffset-0.2.1 \
miniz-sys-0.1.12 \
miniz_oxide-0.2.1 \
miniz_oxide_c_api-0.2.1 \
mio-0.6.19 \
mio-extras-2.0.5 \
miow-0.2.1 \
net2-0.2.33 \
nodrop-0.1.13 \
nom-4.2.3 \
notify-4.0.12 \
num-derive-0.2.5 \
num-integer-0.1.41 \
num-traits-0.2.8 \
onig-4.3.2 \
onig_sys-69.1.0 \
opaque-debug-0.2.2 \
percent-encoding-1.0.1 \
pkg-config-0.3.14 \
plist-0.4.2 \
pom-3.0.2 \
proc-macro2-0.4.30 \
quick-error-1.2.2 \
quote-0.6.12 \
rand-0.4.6 \
rand-0.6.5 \
rand_chacha-0.1.1 \
rand_core-0.3.1 \
rand_core-0.4.0 \
rand_hc-0.1.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rdrand-0.4.0 \
redox_syscall-0.1.54 \
redox_users-0.3.0 \
regex-1.1.7 \
regex-syntax-0.6.7 \
remove_dir_all-0.5.2 \
rustc-demangle-0.1.15 \
ryu-0.2.8 \
safemem-0.3.0 \
same-file-1.0.4 \
scoped_threadpool-0.1.9 \
scopeguard-0.3.3 \
serde-1.0.94 \
serde_derive-1.0.94 \
serde_json-1.0.39 \
serde_test-1.0.94 \
sha2-0.8.0 \
slab-0.4.2 \
smallvec-0.6.10 \
syn-0.15.39 \
synstructure-0.10.2 \
syntect-3.2.0 \
tempdir-0.3.7 \
thread_local-0.3.6 \
time-0.1.42 \
toml-0.5.1 \
typenum-1.10.0 \
ucd-util-0.1.3 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.8 \
unicode-segmentation-1.3.0 \
unicode-xid-0.1.0 \
unreachable-1.0.0 \
url-1.7.2 \
url_serde-0.2.0 \
utf8-ranges-1.0.3 \
version_check-0.1.5 \
void-1.0.2 \
walkdir-2.2.8 \
winapi-0.2.8 \
winapi-0.3.7 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.2 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
ws2_32-sys-0.2.1 \
xml-rs-0.8.0 \
yaml-rust-0.4.3
WRKSRC_SUBDIR= rust
PLIST_FILES= bin/xi-core
PORTDOCS= README.md
OPTIONS_DEFINE= DOCS
do-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_MAN} ${WRKSRC}/../README.md ${STAGEDIR}${DOCSDIR}
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/xi-core
.include <bsd.port.mk>
Index: head/editors/xi-term/Makefile
===================================================================
--- head/editors/xi-term/Makefile (revision 552220)
+++ head/editors/xi-term/Makefile (revision 552221)
@@ -1,178 +1,178 @@
# $FreeBSD$
PORTNAME= xi-term
DISTVERSION= g20190328
-PORTREVISION= 16
+PORTREVISION= 17
CATEGORIES= editors
MAINTAINER= ed.arrakis@gmail.com
COMMENT= Terminal frontend for xi-editor
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
RUN_DEPENDS= xi-core:editors/xi-core
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= xi-frontend
GH_TAGNAME= 08dd47e
GH_TUPLE= xi-frontend:xrl:5788adfeca56ef869ed6f4064c87472593a0b334:xrl
CARGO_CRATES= adler32-1.0.3 \
ansi_term-0.11.0 \
antidote-1.0.0 \
arc-swap-0.3.7 \
arrayvec-0.4.10 \
atty-0.2.11 \
autocfg-0.1.2 \
backtrace-0.3.14 \
backtrace-sys-0.1.28 \
base64-0.8.0 \
bitflags-1.0.4 \
build_const-0.2.1 \
byteorder-1.3.1 \
bytes-0.4.12 \
cc-1.0.30 \
cfg-if-0.1.7 \
chrono-0.4.6 \
clap-2.32.0 \
cloudabi-0.0.3 \
crc-1.8.1 \
crc32fast-1.2.0 \
crossbeam-0.3.2 \
crossbeam-deque-0.7.1 \
crossbeam-epoch-0.7.1 \
crossbeam-queue-0.1.2 \
crossbeam-utils-0.6.5 \
dtoa-0.4.3 \
failure-0.1.5 \
failure_derive-0.1.5 \
flate2-1.0.6 \
fnv-1.0.6 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.1.25 \
humantime-1.2.0 \
indexmap-1.0.2 \
iovec-0.1.2 \
itoa-0.4.3 \
kernel32-sys-0.2.2 \
lazy_static-1.3.0 \
lazycell-1.2.1 \
libc-0.2.50 \
linked-hash-map-0.5.1 \
lock_api-0.1.5 \
log-0.4.6 \
log-mdc-0.1.0 \
log4rs-0.8.1 \
memoffset-0.2.1 \
miniz-sys-0.1.11 \
miniz_oxide-0.2.1 \
miniz_oxide_c_api-0.2.1 \
mio-0.6.16 \
mio-named-pipes-0.1.6 \
mio-uds-0.6.7 \
miow-0.2.1 \
miow-0.3.3 \
net2-0.2.33 \
nodrop-0.1.13 \
num-integer-0.1.39 \
num-traits-0.2.6 \
num_cpus-1.10.0 \
ordered-float-1.0.1 \
owning_ref-0.4.0 \
parking_lot-0.7.1 \
parking_lot_core-0.4.0 \
plist-0.2.4 \
proc-macro2-0.4.27 \
quick-error-1.2.2 \
quote-0.6.11 \
rand-0.6.5 \
rand_chacha-0.1.1 \
rand_core-0.3.1 \
rand_core-0.4.0 \
rand_hc-0.1.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.3 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rdrand-0.4.0 \
redox_syscall-0.1.51 \
redox_termios-0.1.1 \
rustc-demangle-0.1.13 \
rustc_version-0.2.3 \
ryu-0.2.7 \
safemem-0.2.0 \
same-file-1.0.4 \
scopeguard-0.3.3 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.89 \
serde-value-0.5.3 \
serde_derive-1.0.89 \
serde_json-1.0.39 \
serde_yaml-0.8.8 \
signal-hook-0.1.8 \
slab-0.4.2 \
smallvec-0.6.9 \
socket2-0.3.8 \
stable_deref_trait-1.1.1 \
strsim-0.7.0 \
syn-0.15.27 \
synstructure-0.10.1 \
syntect-2.1.0 \
termion-1.5.1 \
textwrap-0.10.0 \
thread-id-3.3.0 \
time-0.1.42 \
tokio-0.1.16 \
tokio-codec-0.1.1 \
tokio-current-thread-0.1.5 \
tokio-executor-0.1.6 \
tokio-fs-0.1.6 \
tokio-io-0.1.12 \
tokio-process-0.2.3 \
tokio-reactor-0.1.9 \
tokio-signal-0.2.7 \
tokio-sync-0.1.3 \
tokio-tcp-0.1.3 \
tokio-threadpool-0.1.12 \
tokio-timer-0.2.10 \
tokio-udp-0.1.3 \
tokio-uds-0.2.5 \
traitobject-0.1.0 \
typemap-0.3.3 \
unicode-width-0.1.5 \
unicode-xid-0.1.0 \
unsafe-any-0.4.2 \
vec_map-0.8.1 \
walkdir-2.2.7 \
winapi-0.2.8 \
winapi-0.3.6 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.2 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
ws2_32-sys-0.2.1 \
xdg-2.2.0 \
xml-rs-0.7.0 \
yaml-rust-0.4.3
CARGO_USE_GITHUB= yes
PLIST_FILES= bin/xi-term
PORTDOCS= README.md
OPTIONS_DEFINE= DOCS
do-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_MAN} ${WRKSRC}/README.md ${STAGEDIR}${DOCSDIR}
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/xi-term
.include <bsd.port.mk>
Index: head/games/abstreet/Makefile
===================================================================
--- head/games/abstreet/Makefile (revision 552220)
+++ head/games/abstreet/Makefile (revision 552221)
@@ -1,482 +1,483 @@
# $FreeBSD$
PORTNAME= abstreet
DISTVERSIONPREFIX= v
DISTVERSION= 0.2.9-49
+PORTREVISION= 1
DISTVERSIONSUFFIX= -g74aca40c0
CATEGORIES= games
MASTER_SITES= LOCAL/yuri:data
DISTFILES= ${PORTNAME}-data-${DISTVERSION}.tgz:data # updated by the update-data target
MAINTAINER= yuri@FreeBSD.org
COMMENT= Game exploring how changes to city affect drivers/cyclists/pedestrians
LICENSE= APACHE20
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo localbase:ldflags python:build xorg
USE_GITHUB= yes
GH_ACCOUNT= dabreegster
USE_XORG= xcb
MAKE_ENV= ABST_DATA_DIR=${DATADIR} ABST_PLAYER_HOME_DIR=1
GH_TUPLE= dabreegster:earcutr:e029a11b66fd27f2ca2fd457dd37c403485ebba1:earcutr \
dabreegster:seattle_traffic_signals:c2b33cc7fc4b922c8d25d88a8b07fb824d235b9d:seattle_traffic_signals \
easbar:fast_paths:192ae1997f9857791826ac5ed16892b2f692920c:fast_paths \
michaelkirk:winit:a87ae2661263ff241e8868cbf3ce65aab205863a:michaelkirk_winit
CARGO_CRATES= RustyXML-0.1.1 \
aabb-quadtree-0.1.0 \
adler32-1.0.4 \
ahash-0.2.18 \
aho-corasick-0.7.10 \
andrew-0.2.1 \
android_glue-0.2.3 \
android_log-sys-0.1.2 \
ansi_term-0.11.0 \
approx-0.3.2 \
arc-swap-0.4.7 \
arrayvec-0.5.1 \
ascii-canvas-1.0.0 \
atty-0.2.14 \
autocfg-0.1.7 \
autocfg-1.0.0 \
backtrace-0.2.3 \
backtrace-0.3.46 \
backtrace-sys-0.1.35 \
base-x-0.2.6 \
base64-0.11.0 \
base64-0.12.1 \
bincode-1.2.1 \
bit-set-0.5.1 \
bit-vec-0.5.1 \
bitflags-1.2.1 \
block-0.1.6 \
block-buffer-0.7.3 \
block-padding-0.1.5 \
bstr-0.2.12 \
built-0.4.2 \
bumpalo-3.2.1 \
byte-tools-0.3.1 \
bytemuck-1.2.0 \
byteorder-1.3.4 \
bytes-0.5.4 \
bzip2-0.3.3 \
bzip2-sys-0.1.9+1.0.8 \
calloop-0.4.4 \
cargo-lock-4.0.1 \
cbindgen-0.9.1 \
cc-1.0.50 \
cfg-if-0.1.10 \
cgl-0.3.2 \
chrono-0.4.11 \
clap-2.33.0 \
cloudabi-0.0.3 \
crossbeam-channel-0.4.3 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-utils-0.7.2 \
cocoa-0.20.1 \
colorous-1.0.1 \
const-random-0.1.8 \
const-random-macro-0.1.8 \
contour-0.2.0 \
core-foundation-0.7.0 \
core-foundation-sys-0.7.0 \
core-graphics-0.19.0 \
core-video-sys-0.1.4 \
cpuprofiler-0.0.3 \
crc32fast-1.2.0 \
csv-1.1.3 \
csv-core-0.1.10 \
darling-0.10.2 \
darling_core-0.10.2 \
darling_macro-0.10.2 \
data-url-0.1.0 \
dbghelp-sys-0.2.0 \
debug_unreachable-0.1.1 \
deflate-0.8.4 \
derivative-2.1.1 \
diff-0.1.12 \
digest-0.8.1 \
discard-1.0.4 \
dispatch-0.2.0 \
dlib-0.4.1 \
docopt-1.1.0 \
downcast-rs-1.1.1 \
dtoa-0.4.5 \
either-1.5.3 \
ena-0.11.0 \
encoding-0.2.33 \
encoding-index-japanese-1.20141219.5 \
encoding-index-korean-1.20141219.5 \
encoding-index-simpchinese-1.20141219.5 \
encoding-index-singlebyte-1.20141219.5 \
encoding-index-tradchinese-1.20141219.5 \
encoding_index_tests-0.1.4 \
encoding_rs-0.8.22 \
enumset-1.0.0 \
enumset_derive-0.5.0 \
error-chain-0.5.0 \
euclid-0.20.10 \
failure-0.1.7 \
failure_derive-0.1.7 \
fake-simd-0.1.2 \
fixedbitset-0.1.9 \
fixedbitset-0.2.0 \
flate2-1.0.14 \
float-cmp-0.5.3 \
float_next_after-0.1.5 \
fnv-1.0.6 \
fontdb-0.1.0 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futf-0.1.4 \
futures-channel-0.3.5 \
futures-channel-preview-0.3.0-alpha.19 \
futures-core-0.3.5 \
futures-core-preview-0.3.0-alpha.19 \
futures-executor-preview-0.3.0-alpha.19 \
futures-io-0.3.5 \
futures-macro-0.3.5 \
futures-sink-0.3.5 \
futures-task-0.3.5 \
futures-util-0.3.5 \
futures-util-preview-0.3.0-alpha.19 \
gdal-0.6.0 \
gdal-sys-0.2.0 \
generic-array-0.12.3 \
geo-0.13.0 \
geo-booleanop-0.3.0 \
geo-types-0.4.3 \
geo-types-0.5.0 \
geojson-0.19.0 \
getrandom-0.1.14 \
gl_generator-0.13.1 \
gl_generator-0.14.0 \
glob-0.3.0 \
glow-0.5.0 \
glutin-0.24.1 \
glutin_egl_sys-0.1.4 \
glutin_emscripten_sys-0.1.1 \
glutin_gles2_sys-0.1.4 \
glutin_glx_sys-0.1.6 \
glutin_wgl_sys-0.1.4 \
h2-0.2.5 \
hashbrown-0.6.3 \
heck-0.3.1 \
hermit-abi-0.1.10 \
hex-0.4.2 \
histogram-0.6.9 \
html2runes-1.0.1 \
html5ever-0.13.1 \
html5ever-atoms-0.2.2 \
htmlescape-0.3.1 \
http-0.2.1 \
http-body-0.3.1 \
httparse-1.3.4 \
hyper-0.13.7 \
hyper-rustls-0.20.0 \
ident_case-1.0.1 \
idna-0.2.0 \
image-0.23.4 \
include_dir-0.5.0 \
include_dir_impl-0.5.0 \
indexmap-1.3.2 \
inflate-0.4.5 \
instant-0.1.2 \
iovec-0.1.4 \
itertools-0.8.2 \
itertools-0.9.0 \
itoa-0.4.5 \
jni-sys-0.3.0 \
jobserver-0.1.21 \
js-sys-0.3.39 \
kernel32-sys-0.2.2 \
khronos_api-3.1.0 \
kurbo-0.6.1 \
lalrpop-0.16.3 \
lalrpop-util-0.16.3 \
lazy_static-0.2.11 \
lazy_static-1.4.0 \
lazycell-1.2.1 \
libc-0.2.68 \
libflate-0.1.27 \
libloading-0.5.2 \
libm-0.1.4 \
libm-0.2.1 \
line_drawing-0.7.0 \
lock_api-0.3.4 \
log-0.4.8 \
lru-0.4.3 \
lttb-0.2.0 \
lyon-0.15.8 \
lyon_algorithms-0.15.0 \
lyon_geom-0.15.3 \
lyon_path-0.15.2 \
lyon_tessellation-0.15.8 \
mac-0.1.1 \
malloc_buf-0.0.6 \
maplit-1.0.2 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
md5-0.7.0 \
memchr-2.3.3 \
memmap-0.7.0 \
memmap2-0.1.0 \
memoffset-0.5.5 \
mime-0.3.16 \
mime_guess-2.0.3 \
miniz_oxide-0.3.6 \
mio-0.6.21 \
mio-extras-2.0.6 \
mio-named-pipes-0.1.6 \
mio-uds-0.6.7 \
miow-0.2.1 \
miow-0.3.5 \
nbez-0.1.0 \
ndk-0.1.0 \
ndk-glue-0.1.0 \
ndk-sys-0.1.0 \
net2-0.2.33 \
new_debug_unreachable-1.0.4 \
nix-0.14.1 \
num-integer-0.1.42 \
num-iter-0.1.40 \
num-rational-0.2.4 \
num-traits-0.1.43 \
num-traits-0.2.11 \
num_cpus-1.12.0 \
num_enum-0.4.3 \
num_enum_derive-0.4.3 \
numtoa-0.1.0 \
objc-0.2.7 \
once_cell-1.3.1 \
opaque-debug-0.2.3 \
ordered-float-1.0.2 \
ordermap-0.3.5 \
osmesa-sys-0.1.2 \
parking_lot-0.10.2 \
parking_lot_core-0.7.1 \
pdqselect-0.1.0 \
percent-encoding-2.1.0 \
petgraph-0.4.13 \
petgraph-0.5.0 \
petname-1.0.12 \
phf-0.7.24 \
phf_codegen-0.7.24 \
phf_generator-0.7.24 \
phf_shared-0.7.24 \
pico-args-0.3.3 \
pin-project-0.4.23 \
pin-project-internal-0.4.23 \
pin-project-lite-0.1.5 \
pin-utils-0.1.0 \
pkg-config-0.3.17 \
png-0.16.3 \
podio-0.1.6 \
polylabel-2.2.0 \
ppv-lite86-0.2.6 \
precomputed-hash-0.1.1 \
priority-queue-1.0.0 \
proc-macro-crate-0.1.4 \
proc-macro-hack-0.5.15 \
proc-macro-nested-0.1.6 \
proc-macro2-0.4.30 \
proc-macro2-1.0.10 \
procfs-0.7.8 \
quick-xml-0.18.1 \
quote-0.3.15 \
quote-0.6.13 \
quote-1.0.3 \
rand-0.6.5 \
rand-0.7.3 \
rand_chacha-0.1.1 \
rand_chacha-0.2.2 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_distr-0.2.2 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rand_xorshift-0.2.0 \
raw-window-handle-0.3.3 \
rayon-1.4.0 \
rayon-core-1.8.0 \
rctree-0.3.3 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
redox_termios-0.1.1 \
regex-1.3.6 \
regex-automata-0.1.9 \
regex-syntax-0.6.17 \
remove_dir_all-0.5.2 \
reqwest-0.10.6 \
ring-0.16.12 \
rle-decode-fast-1.0.1 \
robust-0.1.2 \
roxmltree-0.13.0 \
rstar-0.7.1 \
rustc-demangle-0.1.16 \
rustc-hash-1.1.0 \
rustc-serialize-0.3.24 \
rustc_version-0.2.3 \
rustls-0.17.0 \
rusttype-0.7.9 \
rusttype-0.8.3 \
rustybuzz-0.1.1 \
ryu-1.0.3 \
same-file-1.0.6 \
scoped_threadpool-0.1.9 \
scopeguard-1.1.0 \
sct-0.6.0 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-0.9.15 \
serde-1.0.110 \
serde_derive-1.0.110 \
serde_json-1.0.51 \
serde_urlencoded-0.6.1 \
sha1-0.6.0 \
sha2-0.8.1 \
shared_library-0.1.9 \
sid-0.6.1 \
signal-hook-registry-1.2.0 \
simplecss-0.2.0 \
siphasher-0.2.3 \
slab-0.4.2 \
slotmap-0.4.0 \
smallvec-1.3.0 \
smithay-client-toolkit-0.6.6 \
socket2-0.3.12 \
spin-0.5.2 \
stb_truetype-0.3.1 \
stdweb-0.4.20 \
stdweb-derive-0.5.3 \
stdweb-internal-macros-0.2.9 \
stdweb-internal-runtime-0.1.5 \
stretch-0.3.2 \
string_cache-0.4.0 \
string_cache-0.7.5 \
string_cache_codegen-0.3.1 \
string_cache_codegen-0.4.4 \
string_cache_shared-0.3.0 \
strsim-0.8.0 \
strsim-0.9.3 \
svg_face-0.1.2 \
svgtypes-0.5.0 \
syn-0.11.11 \
syn-1.0.17 \
synom-0.11.3 \
synstructure-0.12.3 \
take_mut-0.2.2 \
tempfile-3.1.0 \
tendril-0.2.4 \
term-0.4.6 \
termion-1.5.5 \
textwrap-0.11.0 \
thiserror-1.0.14 \
thiserror-impl-1.0.14 \
thread_local-1.0.1 \
time-0.1.42 \
tokio-0.2.21 \
tokio-macros-0.2.5 \
tokio-rustls-0.13.1 \
tokio-util-0.3.1 \
toml-0.5.6 \
tower-service-0.3.0 \
tracing-0.1.18 \
tracing-core-0.1.13 \
try-lock-0.2.2 \
ttf-parser-0.6.2 \
typenum-1.11.2 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.12 \
unicode-script-0.5.0 \
unicode-segmentation-1.6.0 \
unicode-vo-0.1.0 \
unicode-width-0.1.7 \
unicode-xid-0.0.4 \
unicode-xid-0.1.0 \
unicode-xid-0.2.0 \
unreachable-0.1.1 \
untrusted-0.7.1 \
url-2.1.1 \
usvg-0.11.0 \
utf-8-0.6.0 \
uuid-0.8.1 \
vec_map-0.8.1 \
version_check-0.9.1 \
void-1.0.2 \
walkdir-2.3.1 \
want-0.3.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.62 \
wasm-bindgen-backend-0.2.62 \
wasm-bindgen-futures-0.4.12 \
wasm-bindgen-macro-0.2.62 \
wasm-bindgen-macro-support-0.2.62 \
wasm-bindgen-shared-0.2.62 \
wayland-client-0.23.6 \
wayland-commons-0.23.6 \
wayland-protocols-0.23.6 \
wayland-scanner-0.23.6 \
wayland-sys-0.23.6 \
web-sys-0.3.39 \
webbrowser-0.5.2 \
webgl_generator-0.2.0 \
webgl_stdweb-0.3.0 \
webidl-0.8.0 \
webpki-0.21.2 \
webpki-roots-0.19.0 \
widestring-0.4.0 \
winapi-0.2.8 \
winapi-0.3.8 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.4 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.7.0 \
ws2_32-sys-0.2.1 \
x11-dl-2.18.5 \
xdg-2.2.0 \
xml-rs-0.8.2 \
xmlparser-0.13.2 \
xmltree-0.10.0 \
xmlwriter-0.1.0 \
zip-0.5.5
CARGO_USE_GITHUB= yes
BINARY_ALIAS= python3=${PYTHON_CMD}
# To update this port:
# 1. Update CARGO_CRATES based on 'make cargo-crates'
# 2. Run 'make update-data'
# 3. Upload the abstreet-data distfile
# 4. Run 'make makesum clean'
update-data: configure
# see https://github.com/dabreegster/abstreet/blob/master/docs/dev.md#getting-started for the list of supported cities
# huge_seattle is excluded as per this comment: https://github.com/dabreegster/abstreet/issues/192#issuecomment-657938969
@(${ECHO} "runtime: seattle,krakow,berlin" && \
${ECHO} "input: seattle,krakow,berlin") > ${WRKSRC}/data/config
@cd ${WRKSRC} && \
${SETENV} ${MAKE_ENV} ${CARGO_CARGO_RUN} run --bin updater && \
${TAR} czf ${DISTDIR}/${PORTNAME}-data-${DISTVERSION}.tgz data/system/cities data/system/maps data/system/prebaked_results data/system/scenarios data/config && \
${ECHO} "Please upload the file ${DISTDIR}/${PORTNAME}-data-${DISTVERSION}.tgz"
do-install:
${INSTALL_PROGRAM} ${WRKDIR}/target/release/game ${STAGEDIR}${PREFIX}/bin/${PORTNAME}-game
cd ${WRKSRC}/data && ${COPYTREE_SHARE} . ${STAGEDIR}${DATADIR}
cd ${WRKDIR}/data && ${COPYTREE_SHARE} . ${STAGEDIR}${DATADIR}
.include <bsd.port.mk>
Index: head/games/dose-response/Makefile
===================================================================
--- head/games/dose-response/Makefile (revision 552220)
+++ head/games/dose-response/Makefile (revision 552221)
@@ -1,230 +1,230 @@
# $FreeBSD$
PORTNAME= dose-response
DISTVERSIONPREFIX= v
DISTVERSION= 1.0.0
-PORTREVISION= 9
+PORTREVISION= 10
CATEGORIES= games
MAINTAINER= greg@unrelenting.technology
COMMENT= Open-world roguelike game where you play an addict
LICENSE= GPLv3+
LICENSE_FILE= ${WRKSRC}/COPYING.txt
LIB_DEPENDS+= libSDL2.so:devel/sdl20
USES= cargo gnome
USE_GITHUB= yes
GH_ACCOUNT= tryjumping
GH_TAGNAME= 179c326
CARGO_FEATURES+= prod sdl2/use-pkgconfig
CARGO_CRATES= adler32-1.0.3 \
aho-corasick-0.7.6 \
andrew-0.2.1 \
android_glue-0.2.3 \
ansi_term-0.11.0 \
approx-0.3.0 \
arrayvec-0.4.7 \
ascii-0.9.1 \
atty-0.2.11 \
backtrace-0.3.11 \
backtrace-sys-0.1.28 \
bincode-1.0.1 \
bitflags-1.0.4 \
block-0.1.6 \
byteorder-1.2.7 \
cc-1.0.25 \
cfg-if-0.1.6 \
cgl-0.2.3 \
chrono-0.4.9 \
clap-2.32.0 \
cloudabi-0.0.3 \
cmake-0.1.35 \
cocoa-0.18.4 \
color_quant-1.0.1 \
combine-3.8.1 \
core-foundation-0.6.3 \
core-foundation-sys-0.6.2 \
core-graphics-0.17.3 \
crc32fast-1.2.0 \
crossbeam-deque-0.2.0 \
crossbeam-epoch-0.3.1 \
crossbeam-utils-0.2.2 \
deflate-0.7.19 \
derivative-1.0.2 \
dlib-0.4.1 \
downcast-rs-1.0.3 \
either-1.5.0 \
encoding_rs-0.8.20 \
error-chain-0.10.0 \
filetime-0.2.7 \
flate2-1.0.12 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
gif-0.10.1 \
gl-0.10.0 \
gl_generator-0.9.0 \
gl_generator-0.10.0 \
gl_generator-0.11.0 \
gleam-0.6.7 \
glutin-0.21.0 \
glutin_egl_sys-0.1.3 \
glutin_emscripten_sys-0.1.0 \
glutin_gles2_sys-0.1.3 \
glutin_glx_sys-0.1.5 \
glutin_wgl_sys-0.1.3 \
image-0.20.1 \
inflate-0.4.3 \
itoa-0.4.3 \
jpeg-decoder-0.1.15 \
khronos_api-2.2.0 \
khronos_api-3.1.0 \
lazy_static-1.3.0 \
libc-0.2.65 \
libloading-0.5.0 \
line_drawing-0.7.0 \
linked-hash-map-0.5.2 \
lock_api-0.1.5 \
lock_api-0.3.1 \
log-0.3.9 \
log-0.4.6 \
log-panics-2.0.0 \
lzw-0.10.0 \
malloc_buf-0.0.6 \
memchr-2.2.1 \
memmap-0.7.0 \
memoffset-0.2.1 \
metadeps-1.1.2 \
miniz_oxide-0.3.3 \
nix-0.11.0 \
nix-0.13.0 \
nodrop-0.1.13 \
num-0.1.42 \
num-derive-0.2.3 \
num-integer-0.1.39 \
num-iter-0.1.37 \
num-rational-0.2.1 \
num-traits-0.2.6 \
num_cpus-1.8.0 \
objc-0.2.6 \
oorandom-11.0.1 \
ordered-float-1.0.1 \
osmesa-sys-0.1.2 \
owning_ref-0.4.0 \
parking_lot-0.7.1 \
parking_lot-0.9.0 \
parking_lot_core-0.4.0 \
parking_lot_core-0.6.2 \
percent-encoding-2.1.0 \
pkg-config-0.3.14 \
png-0.12.0 \
proc-macro2-0.4.24 \
quote-0.6.10 \
rand-0.6.0 \
rand_chacha-0.1.0 \
rand_core-0.3.0 \
rand_hc-0.1.0 \
rand_isaac-0.1.0 \
rand_pcg-0.1.1 \
rand_xorshift-0.1.0 \
raw-window-handle-0.3.1 \
rayon-1.0.3 \
rayon-core-1.4.1 \
redox_syscall-0.1.42 \
redox_termios-0.1.1 \
regex-1.3.1 \
regex-syntax-0.6.12 \
rustc-demangle-0.1.13 \
rustc_version-0.2.3 \
rusttype-0.7.3 \
ryu-0.2.7 \
same-file-1.0.4 \
scoped_threadpool-0.1.9 \
scopeguard-0.3.3 \
scopeguard-1.0.0 \
sdl2-0.32.2 \
sdl2-sys-0.32.6 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.80 \
serde_derive-1.0.80 \
serde_json-1.0.33 \
shared_library-0.1.9 \
simplelog-0.5.3 \
smallvec-0.6.6 \
smithay-client-toolkit-0.4.5 \
stable_deref_trait-1.1.1 \
stb_truetype-0.2.4 \
strsim-0.7.0 \
syn-0.15.21 \
tar-0.4.26 \
termion-1.5.1 \
textwrap-0.10.0 \
thread_local-0.3.6 \
tiff-0.2.1 \
time-0.1.40 \
toml-0.2.1 \
toml-0.4.10 \
toml_edit-0.1.5 \
unicode-width-0.1.5 \
unicode-xid-0.1.0 \
unidiff-0.3.1 \
unreachable-1.0.0 \
vec_map-0.8.1 \
void-1.0.2 \
walkdir-2.2.7 \
wayland-client-0.21.4 \
wayland-commons-0.21.4 \
wayland-protocols-0.21.4 \
wayland-scanner-0.21.4 \
wayland-sys-0.21.4 \
winapi-0.3.6 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.1 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winit-0.19.4 \
winres-0.1.8 \
x11-dl-2.18.3 \
xattr-0.2.2 \
xdg-2.2.0 \
xml-rs-0.7.0 \
xml-rs-0.8.0 \
zmq-0.8.2 \
zmq-sys-0.8.2
PLIST_FILES= bin/${PORTNAME}
.for size in 16 32 48 64 256
PLIST_FILES+= share/icons/hicolor/${size}x${size}/apps/${PORTNAME}.png
.endfor
PORTDOCS= CHANGELOG.md DEVELOPER-NOTES.md README.md
INSTALLS_ICONS= yes
DESKTOP_ENTRIES= "Dose Response" \
"${COMMENT}" \
"${PORTNAME}" \
"${PORTNAME}" \
"Games;" \
false
OPTIONS_DEFINE= DOCS
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
.for size in 16 32 48 64 256
@${MKDIR} ${STAGEDIR}${LOCALBASE}/share/icons/hicolor/${size}x${size}/apps
${INSTALL_DATA} ${WRKSRC}/assets/icon_${size}x${size}.png \
${STAGEDIR}${LOCALBASE}/share/icons/hicolor/${size}x${size}/apps/${PORTNAME}.png
.endfor
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
.for f in ${PORTDOCS}
${INSTALL_DATA} ${WRKSRC}/${f} ${STAGEDIR}${DOCSDIR}
.endfor
.include <bsd.port.mk>
Index: head/games/genact/Makefile
===================================================================
--- head/games/genact/Makefile (revision 552220)
+++ head/games/genact/Makefile (revision 552221)
@@ -1,147 +1,148 @@
# $FreeBSD$
PORTNAME= genact
DISTVERSIONPREFIX= v
DISTVERSION= 0.10.0
+PORTREVISION= 1
CATEGORIES= games
MAINTAINER= 0mp@FreeBSD.org
COMMENT= Nonsense activity generator
LICENSE= APACHE20 BSD3CLAUSE ISCL MIT UNLICENSE
LICENSE_COMB= multi
LICENSE_FILE_MIT= ${WRKSRC}/LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= svenstaro
PLIST_FILES= bin/genact
CARGO_CRATES= aho-corasick-0.7.13 \
ansi_term-0.11.0 \
anyhow-1.0.32 \
async-attributes-1.1.1 \
async-channel-1.4.0 \
async-executor-0.1.2 \
async-io-0.1.11 \
async-mutex-1.1.5 \
async-std-1.6.3 \
async-task-3.0.0 \
atomic-waker-1.0.0 \
atty-0.2.14 \
autocfg-1.0.0 \
bitflags-1.2.1 \
blocking-0.5.1 \
bumpalo-3.4.0 \
cache-padded-1.1.1 \
cc-1.0.58 \
cfg-if-0.1.10 \
chrono-0.4.15 \
clap-2.33.3 \
concurrent-queue-1.2.2 \
console_error_panic_hook-0.1.6 \
crossbeam-utils-0.7.2 \
ctrlc-3.1.6 \
event-listener-2.3.3 \
fake-2.2.3 \
fastrand-1.3.4 \
futures-0.3.5 \
futures-channel-0.3.5 \
futures-core-0.3.5 \
futures-executor-0.3.5 \
futures-io-0.3.5 \
futures-lite-0.1.11 \
futures-macro-0.3.5 \
futures-sink-0.3.5 \
futures-task-0.3.5 \
futures-util-0.3.5 \
getrandom-0.1.14 \
heck-0.3.1 \
hermit-abi-0.1.15 \
humansize-1.1.0 \
humantime-2.0.1 \
idna-0.2.0 \
instant-0.1.6 \
js-sys-0.3.44 \
kernel32-sys-0.2.2 \
kv-log-macro-1.0.7 \
lazy_static-1.4.0 \
libc-0.2.74 \
libm-0.2.1 \
log-0.4.11 \
matches-0.1.8 \
memchr-2.3.3 \
multitask-0.2.0 \
nix-0.17.0 \
num-integer-0.1.43 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
once_cell-1.4.0 \
parking-1.0.6 \
parking-2.0.0 \
percent-encoding-2.1.0 \
pin-project-0.4.23 \
pin-project-internal-0.4.23 \
pin-project-lite-0.1.7 \
pin-utils-0.1.0 \
polling-0.1.4 \
ppv-lite86-0.2.8 \
proc-macro-error-1.0.4 \
proc-macro-error-attr-1.0.4 \
proc-macro-hack-0.5.18 \
proc-macro-nested-0.1.6 \
proc-macro2-1.0.19 \
progress_string-0.1.1 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_distr-0.3.0 \
rand_hc-0.2.0 \
redox_syscall-0.1.57 \
regex-1.3.9 \
regex-syntax-0.6.18 \
scoped-tls-1.0.0 \
slab-0.4.2 \
socket2-0.3.12 \
strsim-0.8.0 \
structopt-0.3.17 \
structopt-derive-0.4.10 \
syn-1.0.38 \
term_size-1.0.0-beta1 \
textwrap-0.11.0 \
thread_local-1.0.1 \
time-0.1.43 \
tinyvec-0.3.3 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
url-2.1.1 \
vec-arena-0.5.0 \
vec_map-0.8.2 \
version_check-0.9.2 \
void-1.0.2 \
waker-fn-1.0.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.67 \
wasm-bindgen-backend-0.2.67 \
wasm-bindgen-futures-0.4.17 \
wasm-bindgen-macro-0.2.67 \
wasm-bindgen-macro-support-0.2.67 \
wasm-bindgen-shared-0.2.67 \
web-sys-0.3.44 \
wepoll-sys-stjepang-1.0.6 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
yansi-0.5.0
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/genact
.include <bsd.port.mk>
Index: head/games/jaggedalliance2/Makefile
===================================================================
--- head/games/jaggedalliance2/Makefile (revision 552220)
+++ head/games/jaggedalliance2/Makefile (revision 552221)
@@ -1,140 +1,140 @@
# Created by: Christoph Mallon <christoph.mallon@gmx.de>
# $FreeBSD$
PORTNAME= ja2
DISTVERSIONPREFIX= v
DISTVERSION= 0.17.0
-PORTREVISION= 1
+PORTREVISION= 2
CATEGORIES= games
MAINTAINER= ports@FreeBSD.org
COMMENT= Port of "Jagged Alliance 2" using SDL
LICENSE= PD SFI-SCLA
LICENSE_COMB= multi
LICENSE_NAME_SFI-SCLA= Strategy First Inc. Source Code License Agreement
LICENSE_FILE_SFI-SCLA= ${WRKSRC}/SFI\ Source\ Code\ license\ agreement.txt
LICENSE_PERMS_SFI-SCLA= dist-mirror no-dist-sell pkg-mirror no-pkg-sell \
auto-accept
# Ignore this if you feel adventurous
ONLY_FOR_ARCHS= aarch64 amd64 arm armv6 armv7 i386 powerpc64le
ONLY_FOR_ARCHS_REASON= Needs a little endian environment
BUILD_DEPENDS= string_theory>=3.3:devel/string_theory
LIB_DEPENDS= libfltk.so:x11-toolkits/fltk
USES= cargo cmake compiler:c++11-lang pkgconfig sdl
USE_GITHUB= yes
GH_ACCOUNT= ja2-stracciatella
GH_PROJECT= ja2-stracciatella
USE_LDCONFIG= yes
USE_SDL= sdl2
CMAKE_ARGS= -DEXTRA_DATA_DIR:PATH="${DATADIR}"
CMAKE_OFF= LOCAL_STRING_THEORY_LIB WITH_UNITTESTS
CARGO_CRATES= aho-corasick-0.7.10 \
ansi_term-0.11.0 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
atty-0.2.14 \
autocfg-1.0.0 \
base64-0.11.0 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
block-buffer-0.7.3 \
block-padding-0.1.5 \
byte-tools-0.3.1 \
byteorder-1.3.4 \
caseless-0.2.1 \
cbindgen-0.13.2 \
cfg-if-0.1.10 \
chrono-0.4.11 \
clap-2.33.0 \
constant_time_eq-0.1.5 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-queue-0.2.1 \
crossbeam-utils-0.7.2 \
digest-0.8.1 \
dirs-1.0.5 \
dunce-1.0.0 \
either-1.5.3 \
generic-array-0.12.3 \
getopts-0.2.21 \
getrandom-0.1.14 \
hermit-abi-0.1.10 \
hex-0.3.2 \
indexmap-1.3.2 \
itoa-0.4.5 \
json_comments-0.2.0 \
lazy_static-1.4.0 \
libc-0.2.68 \
log-0.4.8 \
maybe-uninit-2.0.0 \
md-5-0.8.0 \
memchr-2.3.3 \
memoffset-0.5.4 \
num-integer-0.1.42 \
num-traits-0.2.11 \
num_cpus-1.12.0 \
opaque-debug-0.2.3 \
ppv-lite86-0.2.6 \
proc-macro2-1.0.10 \
quote-1.0.3 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rayon-1.3.0 \
rayon-core-1.7.0 \
redox_syscall-0.1.56 \
redox_users-0.3.4 \
regex-1.3.6 \
regex-syntax-0.6.17 \
remove_dir_all-0.5.2 \
rust-argon2-0.7.0 \
ryu-1.0.3 \
scopeguard-1.1.0 \
serde-1.0.105 \
serde_derive-1.0.105 \
serde_json-1.0.50 \
simplelog-0.6.0 \
smallvec-1.2.0 \
strsim-0.8.0 \
syn-1.0.17 \
tempfile-3.1.0 \
term-0.5.2 \
textwrap-0.11.0 \
thread_local-1.0.1 \
time-0.1.42 \
toml-0.5.6 \
typenum-1.11.2 \
unicode-normalization-0.1.12 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
vec_map-0.8.1 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0
CARGO_CARGOLOCK= ${WRKSRC}/rust/Cargo.lock
CARGO_CARGOTOML= ${WRKSRC}/rust/Cargo.toml
CARGO_BUILD= no
CARGO_INSTALL= no
CARGO_TEST= no
MAKE_ENV= ${CARGO_ENV:NCARGO_TARGET_DIR=*}
LDFLAGS+= -Wl,--as-needed
post-patch:
@${REINPLACE_CMD} -e 's|/some/place/where/the/data/is|${DATADIR}|' \
${WRKSRC}/rust/stracciatella/src/stracciatella.rs
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/ja2 \
${STAGEDIR}${PREFIX}/bin/ja2-launcher \
${STAGEDIR}${PREFIX}/bin/ja2-resource-pack
.include <bsd.port.mk>
Index: head/games/veloren/Makefile
===================================================================
--- head/games/veloren/Makefile (revision 552220)
+++ head/games/veloren/Makefile (revision 552221)
@@ -1,572 +1,572 @@
# $FreeBSD$
PORTNAME= veloren
DISTVERSIONPREFIX= v
DISTVERSION= 0.7.0
-PORTREVISION= 1
+PORTREVISION= 2
CATEGORIES= games
MASTER_SITES= https://veloren.net/icons/favicon/:icon \
LOCAL/jbeich:assets
DISTFILES= android-icon-192x192.png:icon \
${PORTNAME}-assets-${DISTVERSIONFULL}.tar.xz:assets
EXTRACT_ONLY= ${DISTFILES:N*\:icon:C/:.*//}
MAINTAINER= jbeich@FreeBSD.org
COMMENT= Multiplayer voxel RPG written in Rust
LICENSE= GPLv3
LICENSE_FILE= ${WRKSRC}/LICENSE
LIB_DEPENDS= libasound.so:audio/alsa-lib
RUN_DEPENDS= ${LOCALBASE}/lib/alsa-lib/libasound_module_pcm_oss.so:audio/alsa-plugins
USES= cargo gnome python:3.4+,build xorg
USE_GNOME= gtk30
USE_XORG= xcb
USE_GITHUB= nodefault
GH_TUPLE= zesterer:euc:c9a7c17a03d45fce00caeeca09afa1e1558cd183:euc \
Imberflur:guillotiere:42c298f5bcf0f95f1a004360d05e25ca3711e9ed:guillotiere \
bekker:msgbox-rs:68fe39a60019b38a1569ae4e9ed796a0f0542673:msgbox \
xMAC94x:portpicker-rs:9d6df36c53c94684080a64a7212dd6bfc3617ee4:portpicker \
amethyst:specs:7a2e348ab2223818bad487695c66c43db88050a5:specs \
Imberflur:winit:e98133adf2abbfc4368f6c069d0beb2b8b688b42:winit
USE_GITLAB= yes
GL_COMMIT= 8f8b20c9139fbe1ddfb1937e3264e592d42f4fd0
GL_TUPLE= veloren:auth:b943c85e4a38f5ec60cd18c34c73097640162bfe:authcommon \
veloren:conrod:1ab6eccf94b16a8977a3274b31d4dbfef9cf9a30:conrod_core \
veloren:specs-idvs:fcb0b2306b571f62f9f85d89e79e087454d95efd:specsidvs
INSTALLS_ICONS= yes
PLIST_FILES= bin/${PORTNAME}-chat-cli \
bin/${PORTNAME}-server-cli \
bin/${PORTNAME}-voxygen \
share/icons/hicolor/192x192/apps/${PORTNAME}.png
PORTDATA= *
.if exists(/usr/bin/ld.lld) && ${/usr/bin/ld:L:tA} != /usr/bin/ld.lld
# veloren_voxygen*rcgu* has 71718 section headers, breaking old GNU ld.bfd in base
LDFLAGS+= -fuse-ld=lld
.endif
DESKTOP_ENTRIES="Veloren (client)" \
"" \
"${PORTNAME}" \
"${PORTNAME}-voxygen" \
"Game;RolePlaying;" \
""
CARGO_CRATES= addr2line-0.12.1 \
adler32-1.0.4 \
ahash-0.3.8 \
aho-corasick-0.7.10 \
alsa-sys-0.1.2 \
andrew-0.2.1 \
android_glue-0.2.3 \
android_log-sys-0.1.2 \
ansi_term-0.11.0 \
anymap-0.12.1 \
approx-0.3.2 \
arr_macro-0.1.3 \
arr_macro_impl-0.1.3 \
arraygen-0.1.13 \
arrayref-0.3.6 \
arrayvec-0.4.12 \
arrayvec-0.5.1 \
ascii-1.0.0 \
async-std-1.5.0 \
async-task-1.3.1 \
atk-sys-0.6.0 \
atom-0.3.5 \
atty-0.2.14 \
autocfg-0.1.7 \
autocfg-1.0.0 \
backtrace-0.3.48 \
base-x-0.2.6 \
base64-0.11.0 \
base64-0.12.1 \
bincode-1.2.1 \
bindgen-0.53.3 \
bitflags-1.2.1 \
bitvec-0.17.4 \
blake2b_simd-0.5.10 \
block-0.1.6 \
broadcaster-1.0.0 \
bstr-0.2.13 \
bumpalo-3.4.0 \
byteorder-0.5.3 \
byteorder-1.3.4 \
bytes-0.4.12 \
c_vec-1.3.3 \
cairo-rs-0.4.1 \
cairo-sys-rs-0.6.0 \
calloop-0.4.4 \
cast-0.2.3 \
cc-1.0.54 \
cexpr-0.4.0 \
cfg-if-0.1.10 \
cgl-0.3.2 \
chrono-0.4.11 \
chunked_transfer-1.2.0 \
clang-sys-0.29.3 \
clap-2.33.1 \
clipboard-win-2.2.0 \
cloudabi-0.0.3 \
cocoa-0.19.1 \
cocoa-0.20.2 \
const-random-0.1.8 \
const-random-macro-0.1.8 \
const-tweaker-0.3.1 \
const-tweaker-attribute-0.5.0 \
constant_time_eq-0.1.5 \
cookie-0.12.0 \
copypasta-0.6.3 \
core-foundation-0.6.4 \
core-foundation-0.7.0 \
core-foundation-sys-0.6.2 \
core-foundation-sys-0.7.0 \
core-graphics-0.17.3 \
core-graphics-0.19.2 \
core-video-sys-0.1.4 \
coreaudio-rs-0.9.1 \
coreaudio-sys-0.2.4 \
cpal-0.11.0 \
crc32fast-1.2.0 \
criterion-0.3.2 \
criterion-plot-0.4.2 \
crossbeam-0.7.2 \
crossbeam-channel-0.3.9 \
crossbeam-channel-0.4.2 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.7.2 \
crossbeam-epoch-0.8.2 \
crossbeam-queue-0.1.2 \
crossbeam-queue-0.2.3 \
crossbeam-utils-0.6.6 \
crossbeam-utils-0.7.2 \
csv-1.1.3 \
csv-core-0.1.10 \
ctor-0.1.15 \
daggy-0.5.0 \
darling-0.10.2 \
darling_core-0.10.2 \
darling_macro-0.10.2 \
dashmap-3.11.4 \
data-encoding-2.2.1 \
deflate-0.7.20 \
derivative-2.1.1 \
deunicode-1.1.1 \
diesel-1.4.5 \
diesel_derives-1.4.1 \
diesel_migrations-1.4.0 \
directories-next-1.0.1 \
dirs-sys-next-0.1.0 \
discard-1.0.4 \
dispatch-0.1.4 \
dispatch-0.2.0 \
dlib-0.4.2 \
dot_vox-4.1.0 \
dotenv-0.15.0 \
downcast-rs-1.1.1 \
draw_state-0.8.0 \
either-1.5.3 \
enum-iterator-0.6.0 \
enum-iterator-derive-0.6.0 \
error-chain-0.12.2 \
euclid-0.19.9 \
euclid_macros-0.1.0 \
failure-0.1.8 \
failure_derive-0.1.8 \
filetime-0.2.10 \
find_folder-0.3.0 \
fixedbitset-0.1.9 \
fnv-1.0.7 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fsevent-2.0.2 \
fsevent-sys-3.0.2 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.1.29 \
futures-0.3.5 \
futures-channel-0.3.5 \
futures-core-0.3.5 \
futures-cpupool-0.1.8 \
futures-executor-0.3.5 \
futures-io-0.3.5 \
futures-macro-0.3.5 \
futures-sink-0.3.5 \
futures-task-0.3.5 \
futures-timer-2.0.2 \
futures-util-0.3.5 \
fxhash-0.2.1 \
gdk-0.8.0 \
gdk-pixbuf-0.4.0 \
gdk-pixbuf-sys-0.6.0 \
gdk-sys-0.6.0 \
getrandom-0.1.14 \
gfx-0.18.2 \
gfx_core-0.9.2 \
gfx_device_gl-0.16.2 \
gfx_gl-0.6.1 \
gilrs-0.7.4 \
gilrs-core-0.2.6 \
gimli-0.21.0 \
gio-0.4.1 \
gio-sys-0.6.0 \
git2-0.13.6 \
gl_generator-0.13.1 \
gl_generator-0.14.0 \
glib-0.5.0 \
glib-sys-0.6.0 \
glob-0.3.0 \
glsl-include-0.3.1 \
glutin-0.24.1 \
glutin_egl_sys-0.1.4 \
glutin_emscripten_sys-0.1.1 \
glutin_gles2_sys-0.1.4 \
glutin_glx_sys-0.1.6 \
glutin_wgl_sys-0.1.4 \
gobject-sys-0.6.0 \
gtk-0.4.1 \
gtk-sys-0.6.0 \
h2-0.1.26 \
hashbrown-0.7.2 \
heck-0.3.1 \
hermit-abi-0.1.13 \
hex-0.3.2 \
hibitset-0.6.3 \
horrorshow-0.8.3 \
hound-3.4.0 \
http-0.1.21 \
http-body-0.1.0 \
http-service-0.4.0 \
http-service-hyper-0.4.1 \
httparse-1.3.4 \
hyper-0.12.35 \
ident_case-1.0.1 \
idna-0.1.5 \
idna-0.2.0 \
image-0.22.5 \
indexmap-1.4.0 \
inflate-0.4.5 \
inotify-0.8.3 \
inotify-sys-0.1.3 \
instant-0.1.4 \
io-kit-sys-0.1.0 \
iovec-0.1.4 \
itertools-0.9.0 \
itoa-0.4.5 \
jni-sys-0.3.0 \
jobserver-0.1.21 \
js-sys-0.3.40 \
kernel32-sys-0.2.2 \
khronos_api-3.1.0 \
kv-log-macro-1.0.6 \
lazy_static-1.4.0 \
lazycell-1.2.1 \
lewton-0.10.1 \
libc-0.2.71 \
libgit2-sys-0.12.7+1.0.0 \
libloading-0.5.2 \
libloading-0.6.2 \
libsqlite3-sys-0.18.0 \
libssh2-sys-0.2.17 \
libudev-sys-0.1.4 \
libz-sys-1.0.25 \
line_drawing-0.7.0 \
linked-hash-map-0.5.3 \
lock_api-0.3.4 \
log-0.4.8 \
lz4-compress-0.1.1 \
mach-0.2.3 \
malloc_buf-0.0.6 \
matchers-0.0.1 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memmap-0.7.0 \
memoffset-0.5.4 \
migrations_internals-1.4.1 \
migrations_macros-1.4.2 \
mime-0.3.16 \
minifb-0.14.0 \
mio-0.6.22 \
mio-extras-2.0.6 \
mio-uds-0.6.8 \
miow-0.2.1 \
mopa-0.2.2 \
ndk-0.1.0 \
ndk-glue-0.1.0 \
ndk-sys-0.1.0 \
net2-0.2.34 \
nix-0.14.1 \
nix-0.15.0 \
nodrop-0.1.14 \
noise-0.6.0 \
nom-4.2.3 \
nom-5.1.1 \
notify-5.0.0-pre.3 \
num-0.1.42 \
num-0.2.1 \
num-bigint-0.2.6 \
num-complex-0.2.4 \
num-integer-0.1.43 \
num-iter-0.1.41 \
num-rational-0.2.4 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
num_enum-0.4.3 \
num_enum_derive-0.4.3 \
objc-0.2.7 \
objc-foundation-0.1.1 \
objc_id-0.1.1 \
object-0.19.0 \
ogg-0.7.0 \
old_school_gfx_glutin_ext-0.24.0 \
once_cell-1.4.0 \
oorandom-11.1.1 \
openssl-probe-0.1.2 \
openssl-sys-0.9.58 \
orbclient-0.3.27 \
ordered-float-1.0.2 \
osmesa-sys-0.1.2 \
packed_simd-0.3.3 \
pango-0.4.0 \
pango-sys-0.6.0 \
parking_lot-0.9.0 \
parking_lot-0.10.2 \
parking_lot_core-0.6.2 \
parking_lot_core-0.7.2 \
peeking_take_while-0.1.2 \
percent-encoding-1.0.1 \
percent-encoding-2.1.0 \
petgraph-0.4.13 \
pin-project-0.4.20 \
pin-project-internal-0.4.20 \
pin-project-lite-0.1.7 \
pin-utils-0.1.0 \
piston-float-0.3.0 \
piston-viewport-0.5.0 \
pistoncore-input-0.24.0 \
pkg-config-0.3.17 \
plotters-0.2.15 \
png-0.15.3 \
ppv-lite86-0.2.8 \
proc-macro-crate-0.1.5 \
proc-macro-error-0.4.12 \
proc-macro-error-1.0.4 \
proc-macro-error-attr-0.4.12 \
proc-macro-error-attr-1.0.4 \
proc-macro-hack-0.5.16 \
proc-macro-nested-0.1.5 \
proc-macro2-0.4.30 \
proc-macro2-1.0.18 \
prometheus-0.9.0 \
qstring-0.7.2 \
quick-error-1.2.3 \
quote-0.6.13 \
quote-1.0.7 \
radium-0.3.0 \
rand-0.5.6 \
rand-0.6.5 \
rand-0.7.3 \
rand_chacha-0.1.1 \
rand_chacha-0.2.2 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_pcg-0.2.1 \
rand_xorshift-0.1.1 \
raw-window-handle-0.3.3 \
rayon-1.3.0 \
rayon-core-1.7.0 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
redox_users-0.3.4 \
regex-1.3.9 \
regex-automata-0.1.9 \
regex-syntax-0.6.18 \
ring-0.16.14 \
rodio-0.11.0 \
ron-0.6.0 \
roots-0.0.5 \
route-recognizer-0.1.13 \
rust-argon2-0.7.0 \
rust-argon2-0.8.2 \
rustc-demangle-0.1.16 \
rustc-hash-1.1.0 \
rustc_version-0.2.3 \
rustls-0.17.0 \
rusttype-0.7.9 \
rusttype-0.8.3 \
rusty-xinput-1.2.0 \
ryu-1.0.5 \
same-file-1.0.6 \
scan_fmt-0.2.5 \
scopeguard-1.1.0 \
sct-0.6.0 \
sdl2-0.32.2 \
sdl2-sys-0.32.6 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.114 \
serde_derive-1.0.114 \
serde_json-1.0.55 \
serde_qs-0.5.2 \
sha1-0.6.0 \
sharded-slab-0.0.9 \
shared_library-0.1.9 \
shlex-0.1.1 \
shred-0.10.2 \
shred-derive-0.6.2 \
shrev-1.1.1 \
slab-0.4.2 \
smallvec-0.6.13 \
smallvec-1.4.0 \
smithay-client-toolkit-0.6.6 \
smithay-clipboard-0.4.0 \
spin-0.5.2 \
static_assertions-1.1.0 \
stb_truetype-0.3.1 \
stdweb-0.1.3 \
stdweb-0.4.20 \
stdweb-derive-0.5.3 \
stdweb-internal-macros-0.2.9 \
stdweb-internal-runtime-0.1.5 \
string-0.2.1 \
strsim-0.8.0 \
strsim-0.9.3 \
structopt-0.3.16 \
structopt-derive-0.4.9 \
sum_type-0.2.0 \
svg_fmt-0.2.1 \
syn-0.15.44 \
syn-1.0.33 \
syn-mid-0.5.0 \
synstructure-0.12.4 \
textwrap-0.11.0 \
thiserror-1.0.20 \
thiserror-impl-1.0.20 \
thread_local-1.0.1 \
tide-0.6.0 \
time-0.1.43 \
tiny_http-0.7.0 \
tinytemplate-1.1.0 \
tinyvec-0.3.3 \
tokio-0.1.22 \
tokio-buf-0.1.1 \
tokio-current-thread-0.1.7 \
tokio-executor-0.1.10 \
tokio-io-0.1.13 \
tokio-reactor-0.1.12 \
tokio-sync-0.1.8 \
tokio-tcp-0.1.4 \
tokio-threadpool-0.1.18 \
tokio-timer-0.2.13 \
toml-0.5.6 \
tracing-0.1.15 \
tracing-appender-0.1.0 \
tracing-attributes-0.1.8 \
tracing-core-0.1.10 \
tracing-futures-0.2.4 \
tracing-log-0.1.1 \
tracing-serde-0.1.1 \
tracing-subscriber-0.2.6 \
treeculler-0.1.0 \
try-lock-0.2.2 \
tuple_utils-0.3.0 \
tynm-0.1.4 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.12 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.7 \
unicode-xid-0.1.0 \
unicode-xid-0.2.0 \
untrusted-0.7.1 \
ureq-1.3.0 \
url-1.7.2 \
url-2.1.1 \
uuid-0.8.1 \
uvth-3.1.1 \
uvth-4.0.1 \
vcpkg-0.2.10 \
vec_map-0.8.2 \
vek-0.11.2 \
version_check-0.1.5 \
version_check-0.9.2 \
void-1.0.2 \
walkdir-2.3.1 \
want-0.2.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.63 \
wasm-bindgen-backend-0.2.63 \
wasm-bindgen-macro-0.2.63 \
wasm-bindgen-macro-support-0.2.63 \
wasm-bindgen-shared-0.2.63 \
wayland-client-0.23.6 \
wayland-commons-0.23.6 \
wayland-protocols-0.23.6 \
wayland-scanner-0.23.6 \
wayland-sys-0.23.6 \
web-sys-0.3.40 \
webpki-0.21.3 \
webpki-roots-0.19.0 \
winapi-0.2.8 \
winapi-0.3.8 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winres-0.1.11 \
ws2_32-sys-0.2.1 \
x11-clipboard-0.5.1 \
x11-dl-2.18.5 \
xcb-0.9.0 \
xdg-2.2.0 \
xml-rs-0.8.3
CARGO_USE_GITHUB= yes
CARGO_USE_GITLAB= yes
CARGO_GIT_SUBDIR= authcommon:authc:authc \
conrod_core:conrod_core:conrod_core \
conrod_core:conrod_winit:backends/conrod_winit
# https://gitlab.com/veloren/veloren/issues/264
CARGO_ENV= RUSTC_BOOTSTRAP=1
# XXX https://github.com/rust-lang/cargo/issues/4101
CARGO_INSTALL_PATH= chat-cli server-cli voxygen
post-extract:
# XXX https://gitlab.com/gitlab-org/gitlab/issues/15079
# Replace LFS placeholders with data from a LOCAL snapshot
@${RM} -r ${WRKSRC}/assets
@${MV} ${WRKDIR}/assets ${WRKSRC}
post-patch:
# .git/ directory is missing, so don't abort if git binary is also missing
@${REINPLACE_CMD} -e 's/"git"/"${TRUE}"/' \
${WRKSRC}/common/build.rs
# Extract (snapshot) version from the port instead of empty file
@${REINPLACE_CMD} -e '/GIT_HASH/s/=.*/= "${DISTVERSIONFULL}";/' \
-e "/GIT_DATE/s/=.*/= \"$$(date -r $$(${AWK} '/TIMESTAMP/ { print $$3 }' \
${DISTINFO_FILE}) +'%Y-%m-%d-%H:%M')\";/" \
${WRKSRC}/common/src/util/mod.rs
# Respect PREFIX != /usr/local for system assets
@${REINPLACE_CMD} -e 's,/usr/share,${DATADIR:H},' \
${WRKSRC}/common/src/assets/mod.rs
# lang/python3 cannot be used as a dependency
@${REINPLACE_CMD} -e 's/"python3"/"${PYTHON_CMD:T}"/' \
${WRKSRC}/cargo-crates/xcb-*/build.rs
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/*
(cd ${WRKSRC} && ${COPYTREE_SHARE} assets ${STAGEDIR}${DATADIR})
${MKDIR} ${STAGEDIR}${PREFIX}/share/icons/hicolor/192x192/apps
${INSTALL_DATA} ${DISTDIR}/android-icon-192x192.png \
${STAGEDIR}${PREFIX}/share/icons/hicolor/192x192/apps/${PORTNAME}.png
.include <bsd.port.mk>
Index: head/graphics/dssim/Makefile
===================================================================
--- head/graphics/dssim/Makefile (revision 552220)
+++ head/graphics/dssim/Makefile (revision 552221)
@@ -1,74 +1,75 @@
# $FreeBSD$
PORTNAME= dssim
DISTVERSION= 2.11.4
+PORTREVISION= 1
CATEGORIES= graphics
MASTER_SITES= CRATESIO
# XXX Teach USES=cargo to have proper default
DISTFILES= ${CARGO_DIST_SUBDIR}/${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= jbeich@FreeBSD.org
COMMENT= Image similarity comparison simulating human perception
LICENSE= AGPLv3+
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
PLIST_FILES= bin/${PORTNAME}
CARGO_CRATES= adler-0.2.3 \
arrayvec-0.5.1 \
autocfg-1.0.1 \
bytemuck-1.4.1 \
cc-1.0.61 \
cfg-if-0.1.10 \
crc32fast-1.2.0 \
crossbeam-channel-0.4.4 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-utils-0.7.2 \
dssim-core-2.11.4 \
dunce-1.0.1 \
either-1.6.1 \
flate2-1.0.18 \
foreign-types-0.5.0 \
foreign-types-macros-0.2.1 \
foreign-types-shared-0.3.0 \
getopts-0.2.21 \
hermit-abi-0.1.17 \
imgref-1.7.0 \
itertools-0.9.0 \
jobserver-0.1.21 \
lazy_static-1.4.0 \
lcms2-5.4.0 \
lcms2-sys-3.1.5 \
libc-0.2.79 \
load_image-2.12.1 \
lodepng-3.2.2 \
maybe-uninit-2.0.0 \
memoffset-0.5.6 \
miniz_oxide-0.4.3 \
mozjpeg-0.8.20 \
mozjpeg-sys-0.10.11 \
nasm-rs-0.2.0 \
num-0.2.1 \
num-bigint-0.2.6 \
num-complex-0.2.4 \
num-integer-0.1.43 \
num-iter-0.1.41 \
num-rational-0.2.4 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
pkg-config-0.3.18 \
proc-macro2-1.0.24 \
quote-1.0.7 \
rayon-1.4.1 \
rayon-core-1.8.1 \
rexif-0.5.0 \
rgb-0.8.25 \
scopeguard-1.1.0 \
syn-1.0.44 \
unicode-width-0.1.8 \
unicode-xid-0.2.1
.include <bsd.port.mk>
Index: head/graphics/ikona/Makefile
===================================================================
--- head/graphics/ikona/Makefile (revision 552220)
+++ head/graphics/ikona/Makefile (revision 552221)
@@ -1,31 +1,31 @@
# $FreeBSD$
PORTNAME= ikona
DISTVERSION= 1.0
-PORTREVISION= 6
+PORTREVISION= 7
CATEGORIES= graphics kde
MASTER_SITES= KDE/stable/${PORTNAME}/${DISTVERSION}/
DISTFILES= ${DISTNAME}${EXTRACT_SUFX} \
${DISTNAME}.cargo.vendor${EXTRACT_SUFX}
EXTRACT_ONLY= ${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= kde@FreeBSD.org
COMMENT= Icon editing companion
LICENSE= GPLv2
LICENSE_FILE= ${WRKSRC}/LICENSE
BUILD_DEPENDS= rustc:lang/${RUST_DEFAULT}
USES= cmake:noninja compiler:c++11-lang desktop-file-utils \
gmake gnome iconv kde:5 pkgconfig qt:5 tar:xz
USE_GNOME= cairo gdkpixbuf2 glib20 libxml2 pango
USE_KDE= auth codecs config configwidgets coreaddons i18n \
kirigami2 widgetsaddons
USE_QT= core declarative gui network widgets xml \
buildtools_build qmake_build
pre-configure:
${CP} ${DISTDIR}/${DISTNAME}.cargo.vendor${EXTRACT_SUFX} ${WRKSRC}/ikona.cargo.vendor.tar.xz
.include <bsd.port.mk>
Index: head/graphics/librsvg2-rust/Makefile
===================================================================
--- head/graphics/librsvg2-rust/Makefile (revision 552220)
+++ head/graphics/librsvg2-rust/Makefile (revision 552221)
@@ -1,50 +1,51 @@
# Created by: Ade Lovett <ade@FreeBSD.org>
# $FreeBSD$
PORTNAME= librsvg
PORTVERSION= 2.50.0
+PORTREVISION= 1
CATEGORIES= graphics gnome
MASTER_SITES= GNOME
PKGNAMESUFFIX= 2-rust
DIST_SUBDIR= gnome2
MAINTAINER= desktop@FreeBSD.org
COMMENT= Library for parsing and rendering SVG vector-graphic files
LICENSE= LGPL20
BUILD_DEPENDS= ${RUST_DEFAULT}>=1.39.0:lang/${RUST_DEFAULT} \
valac:lang/vala
LIB_DEPENDS= libfontconfig.so:x11-fonts/fontconfig \
libfreetype.so:print/freetype2 \
libharfbuzz.so:print/harfbuzz \
libpng.so:graphics/png
USES= compiler:c11 gettext gmake gnome libtool pkgconfig tar:xz
USE_GNOME= cairo gdkpixbuf2 gnomeprefix libxml2 pango introspection:build
USE_LDCONFIG= yes
GNU_CONFIGURE= yes
CONFIGURE_ARGS= --disable-Bsymbolic \
--disable-dependency-tracking \
--disable-static \
--enable-vala
# Make sure it uses the Rust toolchain from ports.
CONFIGURE_ENV= CARGO=${LOCALBASE}/bin/cargo \
RUSTC=${LOCALBASE}/bin/rustc
MAKE_ENV= CARGO_BUILD_JOBS=${MAKE_JOBS_NUMBER} \
RUSTC=${LOCALBASE}/bin/rustc
INSTALL_TARGET= install-strip
TEST_TARGET= check
CONFLICTS_INSTALL= librsvg2
post-patch:
# Disable vendor checksums
@${REINPLACE_CMD} -e 's/"files":{[^}]*}/"files":{}/' \
${WRKSRC}/vendor/*/.cargo-checksum.json
pre-configure:
@cd ${WRKSRC} && ${SETENV} ${CONFIGURE_ENV} ${LOCALBASE}/bin/cargo update
.include <bsd.port.mk>
Index: head/graphics/pastel/Makefile
===================================================================
--- head/graphics/pastel/Makefile (revision 552220)
+++ head/graphics/pastel/Makefile (revision 552221)
@@ -1,132 +1,132 @@
# $FreeBSD$
PORTNAME= pastel
DISTVERSIONPREFIX= v
DISTVERSION= 0.8.0
-PORTREVISION= 5
+PORTREVISION= 6
CATEGORIES= graphics
MAINTAINER= vulcan@wired.sh
COMMENT= Command-line tool to generate, analyze, convert and manipulate colors
LICENSE= APACHE20 MIT
LICENSE_COMB= dual
LICENSE_FILE_APACHE20= ${WRKSRC}/LICENSE-APACHE
LICENSE_FILE_MIT= ${WRKSRC}/LICENSE-MIT
USES= cargo shebangfix
USE_GITHUB= yes
GH_ACCOUNT= sharkdp
SHEBANG_FILES= doc/demo-scripts/gradient.sh
CARGO_CRATES= aho-corasick-0.7.10 \
ansi_term-0.11.0 \
approx-0.3.2 \
arrayvec-0.4.12 \
assert_cmd-0.12.2 \
atty-0.2.14 \
autocfg-1.0.0 \
bitflags-1.2.1 \
bstr-0.2.13 \
bumpalo-3.3.0 \
byteorder-1.3.4 \
cast-0.2.3 \
cfg-if-0.1.10 \
clap-2.33.1 \
criterion-0.3.2 \
criterion-plot-0.4.2 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-queue-0.2.1 \
crossbeam-utils-0.7.2 \
csv-1.1.3 \
csv-core-0.1.10 \
difference-2.0.0 \
doc-comment-0.3.3 \
either-1.5.3 \
escargot-0.5.0 \
getrandom-0.1.14 \
hermit-abi-0.1.13 \
itertools-0.9.0 \
itoa-0.4.5 \
js-sys-0.3.39 \
lazy_static-1.4.0 \
lexical-core-0.6.2 \
libc-0.2.70 \
log-0.4.8 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memoffset-0.5.4 \
nodrop-0.1.14 \
nom-5.1.1 \
num-traits-0.2.11 \
num_cpus-1.13.0 \
oorandom-11.1.1 \
output_vt100-0.1.2 \
plotters-0.2.14 \
ppv-lite86-0.2.8 \
predicates-1.0.4 \
predicates-core-1.0.0 \
predicates-tree-1.0.0 \
proc-macro2-1.0.17 \
quote-1.0.6 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rand_xoshiro-0.4.0 \
rayon-1.3.0 \
rayon-core-1.7.0 \
regex-1.3.7 \
regex-automata-0.1.9 \
regex-syntax-0.6.17 \
rustc_version-0.2.3 \
ryu-1.0.4 \
same-file-1.0.6 \
scopeguard-1.1.0 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.110 \
serde_derive-1.0.110 \
serde_json-1.0.53 \
static_assertions-0.3.4 \
strsim-0.8.0 \
syn-1.0.24 \
term_size-0.3.2 \
textwrap-0.11.0 \
thread_local-1.0.1 \
tinytemplate-1.0.4 \
treeline-0.1.0 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
vec_map-0.8.2 \
version_check-0.9.2 \
wait-timeout-0.2.0 \
walkdir-2.3.1 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.62 \
wasm-bindgen-backend-0.2.62 \
wasm-bindgen-macro-0.2.62 \
wasm-bindgen-macro-support-0.2.62 \
wasm-bindgen-shared-0.2.62 \
web-sys-0.3.39 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0
PLIST_FILES= bin/${PORTNAME}
DOCS= doc/colorcheck.md doc/colorcheck.png \
doc/demo-scripts/gradient.sh doc/pastel.gif README.md
PORTDOCS= ${DOCS:T}
OPTIONS_DEFINE= DOCS
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
cd ${WRKSRC} && ${INSTALL_DATA} ${DOCS} ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/graphics/rx/Makefile
===================================================================
--- head/graphics/rx/Makefile (revision 552220)
+++ head/graphics/rx/Makefile (revision 552221)
@@ -1,179 +1,179 @@
# $FreeBSD$
PORTNAME= rx
DISTVERSIONPREFIX= v
DISTVERSION= 0.4.0
-PORTREVISION= 8
+PORTREVISION= 9
CATEGORIES= graphics
PKGNAMESUFFIX= -editor
MAINTAINER= yuri@FreeBSD.org
COMMENT= Modern and minimalist pixel editor implemented in rust
LICENSE= GPLv3
LICENSE_FILE= ${WRKSRC}/LICENSE
ONLY_FOR_ARCHS= amd64
ONLY_FOR_ARCHS_REASON= meowhash crate only supports x86_64, see https://github.com/bodil/meowhash-rs/issues/5
USES= cargo xorg
USE_GITHUB= yes
GH_ACCOUNT= cloudhead
USE_XORG= x11 xcursor xext xi xinerama xrandr
CARGO_CRATES= adler32-1.0.3 \
arrayref-0.3.5 \
arrayvec-0.4.11 \
arrayvec-0.5.1 \
ash-0.29.0 \
atom-0.3.5 \
atty-0.2.13 \
autocfg-0.1.5 \
backtrace-0.3.34 \
backtrace-sys-0.1.31 \
base64-0.10.1 \
bitflags-1.1.0 \
blake2b_simd-0.5.7 \
block-0.1.6 \
bumpalo-2.6.0 \
byteorder-1.3.2 \
c2-chacha-0.2.3 \
cc-1.0.40 \
cfg-if-0.1.9 \
chrono-0.4.10 \
cloudabi-0.0.3 \
cmake-0.1.41 \
cocoa-0.19.1 \
color_quant-1.0.1 \
colorful-0.2.1 \
constant_time_eq-0.1.4 \
copyless-0.1.4 \
core-foundation-0.6.4 \
core-foundation-sys-0.6.2 \
core-graphics-0.17.3 \
crc32fast-1.2.0 \
crossbeam-utils-0.6.6 \
d3d12-0.3.0 \
deflate-0.7.20 \
digest-0.8.1 \
directories-2.0.2 \
dirs-sys-0.3.4 \
failure-0.1.5 \
failure_derive-0.1.5 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fuchsia-cprng-0.1.1 \
fxhash-0.2.1 \
gcc-0.3.55 \
generic-array-0.12.3 \
getrandom-0.1.14 \
gfx-auxil-0.1.0 \
gfx-backend-dx11-0.4.1 \
gfx-backend-dx12-0.4.1 \
gfx-backend-empty-0.4.0 \
gfx-backend-metal-0.4.0 \
gfx-backend-vulkan-0.4.0 \
gfx-hal-0.4.0 \
gif-0.10.3 \
gl-0.14.0 \
gl_generator-0.14.0 \
glfw-0.34.0 \
glfw-sys-3.3.0 \
hibitset-0.6.2 \
inflate-0.4.5 \
js-sys-0.3.33 \
khronos_api-3.1.0 \
lazy_static-1.3.0 \
libc-0.2.65 \
libloading-0.5.2 \
lock_api-0.3.1 \
log-0.4.8 \
luminance-0.38.0 \
luminance-derive-0.5.0 \
lzw-0.10.0 \
malloc_buf-0.0.6 \
meowhash-0.1.2 \
metal-0.17.0 \
nodrop-0.1.13 \
nonempty-0.1.4 \
num-integer-0.1.41 \
num-traits-0.2.8 \
objc-0.2.6 \
objc_exception-0.1.1 \
parking_lot-0.9.0 \
parking_lot_core-0.6.2 \
pico-args-0.3.0 \
pkg-config-0.3.15 \
png-0.15.0 \
ppv-lite86-0.2.6 \
proc-macro2-0.4.30 \
proc-macro2-1.0.1 \
quote-0.6.13 \
quote-1.0.2 \
rand-0.7.3 \
rand_chacha-0.2.1 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rand_os-0.1.3 \
range-alloc-0.1.0 \
raw-window-handle-0.3.3 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
redox_users-0.3.1 \
relevant-0.4.2 \
remove_dir_all-0.5.2 \
rendy-descriptor-0.5.0 \
rendy-memory-0.5.0 \
rgx-0.7.1 \
rust-argon2-0.5.1 \
rustc-demangle-0.1.16 \
rustc_version-0.2.3 \
scopeguard-1.0.0 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.102 \
serde_derive-1.0.102 \
shared_library-0.1.9 \
simple_logger-1.4.0 \
slab-0.4.2 \
smallvec-0.6.10 \
snap-0.2.5 \
spirv_cross-0.16.0 \
storage-map-0.2.0 \
syn-0.15.44 \
syn-1.0.3 \
synstructure-0.10.2 \
tempfile-3.1.0 \
time-0.1.42 \
toml-0.5.5 \
typenum-1.11.2 \
unicode-xid-0.1.0 \
unicode-xid-0.2.0 \
vec_map-0.8.1 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.56 \
wasm-bindgen-backend-0.2.56 \
wasm-bindgen-macro-0.2.56 \
wasm-bindgen-macro-support-0.2.56 \
wasm-bindgen-shared-0.2.56 \
wgpu-0.4.0 \
wgpu-native-0.4.1 \
winapi-0.3.7 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
wio-0.2.2 \
x11-2.18.1 \
xml-rs-0.8.0 \
zerocopy-0.2.8 \
zerocopy-derive-0.1.4
PLIST_FILES= bin/rx
LDFLAGS+= -L${PREFIX}/lib
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
.include <bsd.port.mk>
Index: head/graphics/svgbob/Makefile
===================================================================
--- head/graphics/svgbob/Makefile (revision 552220)
+++ head/graphics/svgbob/Makefile (revision 552221)
@@ -1,67 +1,67 @@
# $FreeBSD$
PORTNAME= svgbob
DISTVERSION= g20190412
-PORTREVISION= 13
+PORTREVISION= 14
CATEGORIES= graphics
MAINTAINER= ports@FreeBSD.org
COMMENT= Convert ASCII diagrams to SVG
LICENSE= APACHE20
LICENSE_FILE= ${WRKSRC:H}/LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= ivanceras
GH_TAGNAME= c3e84c87094550d8cdc6fc48b40a00712b2951cb
CARGO_CRATES= ansi_term-0.11.0 \
atty-0.2.11 \
bitflags-1.0.4 \
cfg-if-0.1.5 \
clap-2.32.0 \
handlebars-0.21.1 \
libc-0.2.60 \
log-0.3.9 \
log-0.4.5 \
pest-0.3.3 \
pom-1.1.0 \
quick-error-1.2.2 \
redox_syscall-0.1.40 \
redox_termios-0.1.1 \
rustc-serialize-0.3.24 \
strsim-0.7.0 \
svg-0.5.11 \
svgbob-0.4.1 \
termion-1.5.1 \
textwrap-0.10.0 \
unicode-width-0.1.5 \
vec_map-0.8.1 \
winapi-0.3.6 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0
# ignore #![deny(warnings)]
RUSTFLAGS+= --cap-lints warn
WRKSRC_SUBDIR= svgbob_cli
PLIST_FILES= bin/svgbob
PORTDOCS= README.md spec.md
PORTEXAMPLES= *
OPTIONS_DEFINE= DOCS EXAMPLES
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/svgbob
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
cd ${WRKSRC:H} && ${INSTALL_MAN} ${PORTDOCS} ${STAGEDIR}${DOCSDIR}
post-install-EXAMPLES-on:
@${MKDIR} ${STAGEDIR}${EXAMPLESDIR}
cd ${WRKSRC}/examples && ${COPYTREE_SHARE} . ${STAGEDIR}${EXAMPLESDIR}
.include <bsd.port.mk>
Index: head/graphics/viu/Makefile
===================================================================
--- head/graphics/viu/Makefile (revision 552220)
+++ head/graphics/viu/Makefile (revision 552221)
@@ -1,110 +1,111 @@
# $FreeBSD$
PORTNAME= viu
DISTVERSIONPREFIX= v
DISTVERSION= 1.1
+PORTREVISION= 1
CATEGORIES= graphics
MAINTAINER= vulcan@wired.sh
COMMENT= Simple terminal image viewer written in Rust
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE-MIT
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= atanunq
CARGO_CRATES= adler32-1.2.0 \
ansi_colours-1.0.1 \
ansi_term-0.11.0 \
arc-swap-0.4.7 \
atty-0.2.14 \
autocfg-1.0.1 \
bitflags-1.2.1 \
bytemuck-1.4.1 \
byteorder-1.3.4 \
cc-1.0.60 \
cfg-if-0.1.10 \
clap-2.33.3 \
cloudabi-0.0.3 \
color_quant-1.0.1 \
crc32fast-1.2.0 \
crossbeam-channel-0.4.4 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-utils-0.7.2 \
crossterm-0.17.7 \
crossterm_winapi-0.6.1 \
ctrlc-3.1.6 \
deflate-0.7.20 \
deflate-0.8.6 \
either-1.6.1 \
gif-0.10.3 \
hermit-abi-0.1.15 \
image-0.22.5 \
image-0.23.9 \
inflate-0.4.5 \
jpeg-decoder-0.1.20 \
lazy_static-1.4.0 \
libc-0.2.77 \
lock_api-0.3.4 \
log-0.4.11 \
lzw-0.10.0 \
maybe-uninit-2.0.0 \
memoffset-0.5.5 \
miniz_oxide-0.3.7 \
mio-0.7.0 \
miow-0.3.5 \
nix-0.17.0 \
ntapi-0.3.4 \
num-derive-0.2.5 \
num-integer-0.1.43 \
num-iter-0.1.41 \
num-rational-0.2.4 \
num-rational-0.3.0 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
parking_lot-0.10.2 \
parking_lot_core-0.7.2 \
png-0.15.3 \
png-0.16.7 \
proc-macro2-0.4.30 \
quote-0.6.13 \
rayon-1.4.0 \
rayon-core-1.8.1 \
redox_syscall-0.1.57 \
scoped_threadpool-0.1.9 \
scopeguard-1.1.0 \
signal-hook-0.1.16 \
signal-hook-registry-1.2.1 \
smallvec-1.4.2 \
socket2-0.3.15 \
strsim-0.8.0 \
syn-0.15.44 \
termcolor-1.1.0 \
textwrap-0.11.0 \
tiff-0.3.1 \
tiff-0.5.0 \
unicode-width-0.1.8 \
unicode-xid-0.1.0 \
vec_map-0.8.2 \
void-1.0.2 \
winapi-0.3.9 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0
PLIST_FILES= bin/${PORTNAME}
PORTDOCS= README.md
OPTIONS_DEFINE= DOCS
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_DATA} ${WRKSRC}/${PORTDOCS} ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/java/icedtea-web/Makefile
===================================================================
--- head/java/icedtea-web/Makefile (revision 552220)
+++ head/java/icedtea-web/Makefile (revision 552221)
@@ -1,126 +1,126 @@
# $FreeBSD$
PORTNAME= icedtea-web
PORTVERSION= 1.8.4
DISTVERSIONPREFIX= icedtea-web-
-PORTREVISION= 4
+PORTREVISION= 5
CATEGORIES= java www
DISTFILES= ${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= java@FreeBSD.org
COMMENT= Free Java plug-in and Java Web Start for OpenJDK
LICENSE= GPLv2
BUILD_DEPENDS= zip:archivers/zip \
rustc:lang/${RUST_DEFAULT} \
bash:shells/bash \
gsed:textproc/gsed \
${HAMCREST_JAR}:java/hamcrest \
${JUNIT_JAR}:java/junit
RUN_DEPENDS= bash:shells/bash
MAKE_JOBS_UNSAFE=yes
GNU_CONFIGURE= yes
USE_JAVA= yes
JAVA_VERSION= 8
JAVA_OS= native
JAVA_VENDOR= openjdk
USE_PERL5= build
USE_XORG= x11
USES= autoreconf compiler:c++11-lang desktop-file-utils gmake perl5 \
pkgconfig shebangfix xorg
USE_GITHUB= yes
GH_ACCOUNT= AdoptOpenJDK
GH_PROJECT= IcedTea-Web
SHEBANG_FILES= jrunscript.in shell-launcher/launchers.sh.in
CONFIGURE_ARGS= --with-hamcrest="${HAMCREST_JAR}"
CONFIGURE_ARGS+= --with-jdk-home="${JAVA_HOME}"
CONFIGURE_ARGS+= --with-junit="${JUNIT_JAR}"
CONFIGURE_ARGS+= --with-rhino="${RHINO_JAR}"
CONFIGURE_ARGS+= --with-tagsoup="${TAGSOUP_JAR}"
CONFIGURE_ARGS+= --with-itw-libs="DISTRIBUTION"
CONFIGURE_ARGS+= --enable-shell-launchers
.for opt in asm chrome chromium ecj epiphany firefox jacoco midori opera
CONFIGURE_ARGS+= --without-${opt}
.endfor
CONFIGURE_ENV= BIN_BASH=${bash_CMD}
CONFIGURE_ENV+= bashcompdir=${PREFIX}/etc/bash_completion.d
CONFIGURE_SHELL= ${bash_CMD}
TEST_TARGET= check
# Upstream archive contains files with UTF-8 names
EXTRACT_CMD= ${SETENV} LC_ALL=en_US.UTF-8 /usr/bin/bsdtar
OPTIONS_DEFINE= DOCS PLUGIN RHINO TAGSOUP
OPTIONS_DEFAULT=PLUGIN
OPTIONS_SUB= yes
PLUGIN_DESC= Enable the browser plug-in
RHINO_DESC= Add support for Proxy Auto Config files
TAGSOUP_DESC= Enable cleaning up of malformed JNLP files
DOCS_CONFIGURE_ENABLE= docs
DOCS_PORTDOCS= netx
RHINO_BUILD_DEPENDS= ${JAVASHAREDIR}/rhino/rhino.jar:lang/rhino
RHINO_RUN_DEPENDS= ${RHINO_BUILD_DEPENDS}
RHINO_VARS= RHINO_JAR="${JAVASHAREDIR}/rhino/rhino.jar"
RHINO_VARS_OFF= RHINO_JAR=no
TAGSOUP_BUILD_DEPENDS= ${JAVALIBDIR}/tagsoup.jar:textproc/tagsoup
TAGSOUP_RUN_DEPENDS= ${TAGSOUP_BUILD_DEPENDS}
TAGSOUP_VARS= TAGSOUP_JAR="${JAVALIBDIR}/tagsoup.jar"
TAGSOUP_VARS_OFF= TAGSOUP_JAR=no
PLUGIN_CONFIGURE_ENABLE=native-plugin pluginjar
PLUGIN_CONFIGURE_ENV= MOZILLA_CFLAGS="-I${WRKDIR}/npapi-headers -DXP_UNIX" \
MOZILLA_LIBS=" " # needs to be set and non-empty!
PLUGIN_DISTFILES= libxul-npapi-headers-45.9.0${EXTRACT_SUFX}:npapi
PLUGIN_MASTER_SITES= LOCAL/tobik:npapi
PLUGIN_PORTDOCS= plugin
PLUGIN_TEST_TARGET= plugin-tests
PLUGIN_USE= GNOME=glib20
PLUGIN_USES= webplugin:native gnome
PLUGIN_VARS= WEBPLUGIN_DIR=${PREFIX}/lib \
WEBPLUGIN_FILES=IcedTeaPlugin.so
HAMCREST_JAR= ${JAVALIBDIR}/hamcrest.jar
JUNIT_JAR= ${JAVALIBDIR}/junit.jar
.include <bsd.port.pre.mk>
.if ${JAVA_PORT_VERSION:M*6*}
pre-extract:
@${ECHO_MSG}
@${ECHO_MSG} "IMPORTANT: To build IcedTea-Web ${PORTVERSION}, you have to turn on 'ICEDTEA' option"
@${ECHO_MSG} "for ${JAVA_PORT} (default). Otherwise, it will neither build nor work."
@${ECHO_MSG}
. if !(defined(PACKAGE_BUILDING) || defined(BATCH))
@sleep 5
. endif
.endif
post-patch:
${REINPLACE_CMD} -e 's|^sed\([[:space:]]\)|gsed\1|' \
-e 's|\([[:space:]]\)sed\([[:space:]]\)|\1gsed\2|g' \
${WRKSRC}/Makefile.am ${WRKSRC}/html-gen.sh
${REINPLACE_CMD} -e 's|^Icon=javaws|Icon=itweb-javaws|' \
${WRKSRC}/*.desktop.in
post-install:
${INSTALL_DATA} ${WRKSRC}/*.desktop ${STAGEDIR}${DESKTOPDIR}
.for lang in cs de en pl
.for man in javaws policyeditor
${MV} ${STAGEDIR}${MANPREFIX}/man/${lang:Nen}/man1/${man}.1 \
${STAGEDIR}${MANPREFIX}/man/${lang:Nen}/man1/itweb-${man}.1
.endfor
.endfor
@${ECHO_MSG}
@${ECHO_MSG} "Run \"make test\" to execute regression test."
@${ECHO_MSG}
.include <bsd.port.post.mk>
Index: head/lang/rust/Makefile
===================================================================
--- head/lang/rust/Makefile (revision 552220)
+++ head/lang/rust/Makefile (revision 552221)
@@ -1,229 +1,232 @@
# Created by: Jyun-Yan You <jyyou@cs.nctu.edu.tw>
# $FreeBSD$
PORTNAME= rust
-PORTVERSION?= 1.46.0
+PORTVERSION?= 1.47.0
PORTREVISION?= 0
CATEGORIES= lang
MASTER_SITES= https://static.rust-lang.org/dist/:src \
https://dev-static.rust-lang.org/dist/:src \
+ LOCAL/bdragon/rust:bootstrap \
+ LOCAL/mikael/rust:bootstrap \
LOCAL/tobik/rust:bootstrap \
https://static.rust-lang.org/dist/:bootstrap
DISTNAME?= ${PORTNAME}c-${PORTVERSION}-src
DISTFILES?= ${NIGHTLY_DATE:D${NIGHTLY_DATE}/}${DISTNAME}${EXTRACT_SUFX}:src \
${_RUSTC_BOOTSTRAP}${BOOTSTRAPS_SUFFIX}${EXTRACT_SUFX}:bootstrap \
${_RUST_STD_BOOTSTRAP}${BOOTSTRAPS_SUFFIX}${EXTRACT_SUFX}:bootstrap \
${_CARGO_BOOTSTRAP}${BOOTSTRAPS_SUFFIX}${EXTRACT_SUFX}:bootstrap
DIST_SUBDIR?= rust
MAINTAINER= rust@FreeBSD.org
COMMENT= Language with a focus on memory safety and concurrency
LICENSE= APACHE20 MIT
LICENSE_COMB= dual
LICENSE_FILE_APACHE20= ${WRKSRC}/LICENSE-APACHE
LICENSE_FILE_MIT= ${WRKSRC}/LICENSE-MIT
IGNORE_FreeBSD_11_powerpc64= is missing a bootstrap for FreeBSD 11.x powerpc64
-ONLY_FOR_ARCHS?= aarch64 amd64 armv6 armv7 i386 powerpc64
+ONLY_FOR_ARCHS?= aarch64 amd64 armv6 armv7 i386 powerpc64 powerpc64le
ONLY_FOR_ARCHS_REASON?= requires prebuilt bootstrap compiler
BUILD_DEPENDS= cmake:devel/cmake \
libgit2>=1.0.0:devel/libgit2 \
ninja:devel/ninja
LIB_DEPENDS= libcurl.so:ftp/curl \
libgit2.so:devel/libgit2 \
libssh2.so:security/libssh2
USES= pkgconfig python:3.3+,build ssl tar:xz
MAKE_ENV= DESTDIR=${STAGEDIR} \
LIBGIT2_SYS_USE_PKG_CONFIG=1 \
LIBSSH2_SYS_USE_PKG_CONFIG=1 \
OPENSSL_DIR="${OPENSSLBASE}" \
RUST_BACKTRACE=1
TEST_ENV= ${MAKE_ENV} \
ALLOW_NONZERO_RLIMIT_CORE=1
CONFLICTS_INSTALL?= rust-nightly
OPTIONS_DEFINE= DOCS GDB SOURCES WASM
OPTIONS_DEFAULT= SOURCES WASM
+OPTIONS_EXCLUDE= DOCS # https://github.com/rust-lang/rust/issues/76526
GDB_DESC= Install ports gdb (necessary for debugging rust programs)
SOURCES_DESC= Install source files
WASM_DESC= Build the WebAssembly target (wasm32-unknown-unknown)
DOCS_VARS= _RUST_BUILD_DOCS=true
DOCS_VARS_OFF= _RUST_BUILD_DOCS=false
GDB_RUN_DEPENDS= ${LOCALBASE}/bin/gdb:devel/gdb
SOURCES_VARS= _RUST_TOOLS+=src
WASM_VARS= _RUST_BUILD_WASM=true \
_RUST_TARGETS+=wasm32-unknown-unknown
WASM_VARS_OFF= _RUST_BUILD_WASM=false
# See WRKSRC/src/stage0.txt for the date and version values.
-BOOTSTRAPS_DATE?= 2020-08-03
-RUST_BOOTSTRAP_VERSION?= 1.45.2
-CARGO_BOOTSTRAP_VERSION?= 0.46.1
+BOOTSTRAPS_DATE?= 2020-08-27
+RUST_BOOTSTRAP_VERSION?= 1.46.0
+CARGO_BOOTSTRAP_VERSION?= 0.47.0
BOOTSTRAPS_SUFFIX?= ${BOOTSTRAPS_SUFFIX_${ARCH}}
BOOTSTRAPS_SUFFIX_powerpc64?= -${PPC_ABI:tl}
CARGO_VENDOR_DIR?= ${WRKSRC}/vendor
# Rust's target arch string might be different from *BSD arch strings
_RUST_ARCH_amd64= x86_64
_RUST_ARCH_i386= i686
_RUST_TARGET= ${_RUST_ARCH_${ARCH}:U${ARCH}}-unknown-${OPSYS:tl}
_RUST_TARGETS= ${_RUST_TARGET}
_RUST_TOOLS= analysis cargo clippy rls rustfmt
_RUSTC_BOOTSTRAP= ${BOOTSTRAPS_DATE_${ARCH}:U${BOOTSTRAPS_DATE}}/rustc-${RUST_BOOTSTRAP_VERSION_${ARCH}:U${RUST_BOOTSTRAP_VERSION}}-${_RUST_TARGET}
_RUST_STD_BOOTSTRAP= ${BOOTSTRAPS_DATE_${ARCH}:U${BOOTSTRAPS_DATE}}/rust-std-${RUST_BOOTSTRAP_VERSION_${ARCH}:U${RUST_BOOTSTRAP_VERSION}}-${_RUST_TARGET}
_CARGO_BOOTSTRAP= ${BOOTSTRAPS_DATE_${ARCH}:U${BOOTSTRAPS_DATE}}/cargo-${CARGO_BOOTSTRAP_VERSION_${ARCH}:U${CARGO_BOOTSTRAP_VERSION}}-${_RUST_TARGET}
.include <bsd.port.pre.mk>
.if exists(${PATCHDIR}/${ARCH}${BOOTSTRAPS_SUFFIX})
EXTRA_PATCHES+= ${PATCHDIR}/${ARCH}${BOOTSTRAPS_SUFFIX}
.endif
.if defined(PPC_ABI) && ${PPC_ABI} == ELFv1
# The bootstrap is hardcoded to use gcc9
# but we can build with a newer or older compiler as provided by USE_GCC=yes
BUILD_DEPENDS+= gcc9:lang/gcc9
USE_GCC= yes
.endif
.if ${ARCH} == aarch64 && ${OSVERSION} < 1200502
IGNORE= fails to run due to a bug in rtld, update to 12-STABLE r342847 or 13-CURRENT r342113
.endif
.ifdef QEMU_EMULATING
IGNORE= fails to build with qemu-user-static
.endif
post-patch:
@${REINPLACE_CMD} 's,gdb,${LOCALBASE}/bin/gdb,' ${WRKSRC}/src/etc/rust-gdb
.if defined(NIGHTLY_DATE)
@${REINPLACE_CMD} '/^rustfmt:/d' ${WRKSRC}/src/stage0.txt
.endif
# Disable vendor checksums
@${REINPLACE_CMD} 's,"files":{[^}]*},"files":{},' \
${CARGO_VENDOR_DIR}/*/.cargo-checksum.json
do-configure:
# Check that the running kernel has COMPAT_FREEBSD11 required by lang/rust post-ino64
@${SETENV} CC="${CC}" OPSYS="${OPSYS}" OSVERSION="${OSVERSION}" WRKDIR="${WRKDIR}" \
${SH} ${SCRIPTSDIR}/rust-compat11-canary.sh
.for _component in cargo rust-std rustc
@cd ${WRKDIR}/${_component}-*-${OPSYS:tl} && \
${SH} install.sh --disable-ldconfig --prefix=${WRKDIR}/bootstrap \
--verbose
.endfor
@${ECHO_CMD} '[build]' > ${WRKSRC}/config.toml
@${ECHO_CMD} 'vendor=true' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'extended=true' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'python="${PYTHON_CMD}"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'docs=${_RUST_BUILD_DOCS}' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'verbose=2' >> ${WRKSRC}/config.toml
.if defined(NIGHTLY_DATE)
@${ECHO_CMD} 'profiler=true' >> ${WRKSRC}/config.toml
.endif
@${ECHO_CMD} 'target=[${_RUST_TARGETS:@.target.@"${.target.}"@:ts,}]' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'cargo="${WRKDIR}/bootstrap/bin/cargo"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'rustc="${WRKDIR}/bootstrap/bin/rustc"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'tools=[${_RUST_TOOLS:@.tool.@"${.tool.}"@:ts,}]' >> ${WRKSRC}/config.toml
@${ECHO_CMD} '[install]' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'prefix="${PREFIX}"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'sysconfdir="${PREFIX}/etc"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} '[rust]' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'channel="${PKGNAMESUFFIX:Ustable:S/^-//}"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'default-linker="${CC}"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'verbose-tests=true' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'lld=${_RUST_BUILD_WASM}' >> ${WRKSRC}/config.toml
@${ECHO_CMD} '[llvm]' >> ${WRKSRC}/config.toml
.if defined(WITH_CCACHE_BUILD) && !defined(NO_CCACHE)
@${ECHO_CMD} 'ccache="${CCACHE_BIN}"' >> ${WRKSRC}/config.toml
.else
@${ECHO_CMD} 'ccache=false' >> ${WRKSRC}/config.toml
.endif
@${ECHO_CMD} 'ninja=true' >> ${WRKSRC}/config.toml
.if ${ARCH} == armv6
# fails to link with base ld.bfd: rustc_codegen_llvm.e2557spx-cgu.11:(.text._ZN89_$LT$rustc_target..abi..call..CastTarget$u20$as$u20$rustc_codegen_llvm..abi..LlvmType$GT$9llvm_type17h1296210ab461fc57E+0x54): relocation truncated to fit: R_ARM_CALL against symbol `__aeabi_uldivmod' defined in .text.__aeabi_uldivmod section in /tmp/rustcdnGbao/libcompiler_builtins-ee65b414e4115a8f.rlib(compiler_builtins-ee65b414e4115a8f.compiler_builtins.ay8p39ey-cgu.13.rcgu.o)
@${PRINTF} '#!/bin/sh\nexec ${CC} -fuse-ld=lld "$$@"' > ${WRKDIR}/cc-wrapper
@${CHMOD} +x ${WRKDIR}/cc-wrapper
.endif
.for _target in ${_RUST_TARGETS}
@${ECHO_CMD} '[target.${_target}]' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'ar="${AR}"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'cc="${CC}"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'cxx="${CXX}"' >> ${WRKSRC}/config.toml
.if ${ARCH} == armv6
@${ECHO_CMD} 'linker="${WRKDIR}/cc-wrapper"' >> ${WRKSRC}/config.toml
.else
@${ECHO_CMD} 'linker="${CC}"' >> ${WRKSRC}/config.toml
.endif
.endfor
@${ECHO_CMD} '[dist]' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'src-tarball=false' >> ${WRKSRC}/config.toml
.if defined(NIGHTLY_DATE)
# Don't abort if optional tools fail to build
@${ECHO_CMD} 'missing-tools=true' >> ${WRKSRC}/config.toml
.endif
@${REINPLACE_CMD} 's,%CC%,${CC},g' \
${WRKSRC}/src/librustc_llvm/build.rs \
${WRKSRC}/src/bootstrap/native.rs
do-build:
@cd ${WRKSRC} && \
${SETENV} ${MAKE_ENV} ${PYTHON_CMD} x.py build --jobs=${MAKE_JOBS_NUMBER}
do-install:
@cd ${WRKSRC} && \
${SETENV} ${MAKE_ENV} ${PYTHON_CMD} x.py install --jobs=${MAKE_JOBS_NUMBER}
# We autogenerate the plist file. We do that, instead of the
# regular pkg-plist, because several libraries have a computed
# filename based on the absolute path of the source files. As it
# is user-specific, we cannot know their filename in advance.
@${RM} -r ${STAGEDIR}${DOCSDIR}/*.old \
${STAGEDIR}${DOCSDIR}/html/.lock \
${STAGEDIR}${DOCSDIR}/html/.stamp \
${STAGEDIR}${PREFIX}/lib/rustlib/install.log \
${STAGEDIR}${PREFIX}/lib/rustlib/manifest-* \
${STAGEDIR}${PREFIX}/lib/rustlib/uninstall.sh
@${FIND} ${STAGEDIR}${PREFIX}/bin ${STAGEDIR}${PREFIX}/lib -exec ${FILE} -i {} + | \
${AWK} -F: '/executable|sharedlib/ { print $$1 }' | ${XARGS} ${STRIP_CMD}
@${FIND} ${STAGEDIR}${PREFIX} -not -type d | \
${SED} -E -e 's,^${STAGEDIR}${PREFIX}/,,' \
-e 's,(share/man/man[1-9]/.*\.[0-9]),\1.gz,' >> ${TMPPLIST}
post-install-SOURCES-on:
# Silence stage-qa warnings by sanitizing permissions on sources
@${FIND} ${STAGEDIR}${PREFIX}/lib/rustlib/src -type f -exec ${CHMOD} \
${SHAREMODE} {} +
# Note that make test does not work when rust is already installed.
do-test:
@cd ${WRKSRC} && \
${SETENV} ${TEST_ENV} ${PYTHON_CMD} x.py test --jobs=${MAKE_JOBS_NUMBER}
.if !defined(_RUST_MAKESUM_GUARD)
makesum:
${MAKE} -D_RUST_MAKESUM_GUARD makesum ARCH=${ONLY_FOR_ARCHS:O:[1]} DISTINFO_FILE=${DISTINFO_FILE}.tmp
.for arch in ${ONLY_FOR_ARCHS:O:[2..-1]}
${MAKE} -D_RUST_MAKESUM_GUARD makesum PPC_ABI=ELFv1 ARCH=${arch} DISTINFO_FILE=${DISTINFO_FILE}.${arch}
${SED} 1d ${DISTINFO_FILE}.${arch} >> ${DISTINFO_FILE}.tmp
${RM} ${DISTINFO_FILE}.${arch}
.endfor
.if ${ONLY_FOR_ARCHS:Mpowerpc64}
${MAKE} -D_RUST_MAKESUM_GUARD makesum PPC_ABI=ELFv2 ARCH=powerpc64 DISTINFO_FILE=${DISTINFO_FILE}.powerpc64-elfv2
${SED} 1d ${DISTINFO_FILE}.powerpc64-elfv2 >> ${DISTINFO_FILE}.tmp
${RM} ${DISTINFO_FILE}.powerpc64-elfv2
.endif
${AWK} '!seen[$$0]++' ${DISTINFO_FILE}.tmp > ${DISTINFO_FILE}
${RM} ${DISTINFO_FILE}.tmp
.endif
.include <bsd.port.post.mk>
Index: head/lang/rust/distinfo
===================================================================
--- head/lang/rust/distinfo (revision 552220)
+++ head/lang/rust/distinfo (revision 552221)
@@ -1,45 +1,51 @@
-TIMESTAMP = 1598307595
-SHA256 (rust/rustc-1.46.0-src.tar.xz) = 865dae1290a205f16ded8818c6a0254cc32862985fc250a602a70285b7d92b82
-SIZE (rust/rustc-1.46.0-src.tar.xz) = 101868452
-SHA256 (rust/2020-08-03/rustc-1.45.2-aarch64-unknown-freebsd.tar.xz) = 4a9eb073ad13ab260acfa93dc07fe95978fabbfe1180727b342079eb95221215
-SIZE (rust/2020-08-03/rustc-1.45.2-aarch64-unknown-freebsd.tar.xz) = 30800808
-SHA256 (rust/2020-08-03/rust-std-1.45.2-aarch64-unknown-freebsd.tar.xz) = fd16c5d7be637fc0a621e47f7bf89f92e1fa894f3ef32a5727d0aad8c76b2f9f
-SIZE (rust/2020-08-03/rust-std-1.45.2-aarch64-unknown-freebsd.tar.xz) = 12310440
-SHA256 (rust/2020-08-03/cargo-0.46.1-aarch64-unknown-freebsd.tar.xz) = b42d415c69c13afc89eaa6c5cd3ae07bf5825b5226b1e42169c4b67eb705f1d3
-SIZE (rust/2020-08-03/cargo-0.46.1-aarch64-unknown-freebsd.tar.xz) = 3946548
-SHA256 (rust/2020-08-03/rustc-1.45.2-x86_64-unknown-freebsd.tar.xz) = 1e4e9087f16be263d85c020e2c56ee1779b29bfa7717a0c71d4c233daaecf4ea
-SIZE (rust/2020-08-03/rustc-1.45.2-x86_64-unknown-freebsd.tar.xz) = 33083584
-SHA256 (rust/2020-08-03/rust-std-1.45.2-x86_64-unknown-freebsd.tar.xz) = 3e527a49f076074155db48b23cd16a4aedf172f1cb37484c816df71001e63b94
-SIZE (rust/2020-08-03/rust-std-1.45.2-x86_64-unknown-freebsd.tar.xz) = 12938052
-SHA256 (rust/2020-08-03/cargo-0.46.1-x86_64-unknown-freebsd.tar.xz) = 7b6238a71db2937e2aa7105d59e025160028665cd29c5a110384c19c8531504e
-SIZE (rust/2020-08-03/cargo-0.46.1-x86_64-unknown-freebsd.tar.xz) = 4673872
-SHA256 (rust/2020-08-03/rustc-1.45.2-armv6-unknown-freebsd.tar.xz) = 791c165814903e4d6a898e23b0486cc066582e81645eade29e8b6f9e2080bb4c
-SIZE (rust/2020-08-03/rustc-1.45.2-armv6-unknown-freebsd.tar.xz) = 31745892
-SHA256 (rust/2020-08-03/rust-std-1.45.2-armv6-unknown-freebsd.tar.xz) = 95e9134d3c9d22650df62ba5f2e3c0f9460d7c5d517f74d4695f380bf4d0955e
-SIZE (rust/2020-08-03/rust-std-1.45.2-armv6-unknown-freebsd.tar.xz) = 12264212
-SHA256 (rust/2020-08-03/cargo-0.46.1-armv6-unknown-freebsd.tar.xz) = 734e35b8eb69296113c009f20c7af5e3a6580e26940d3878b937405216b329a0
-SIZE (rust/2020-08-03/cargo-0.46.1-armv6-unknown-freebsd.tar.xz) = 4068112
-SHA256 (rust/2020-08-03/rustc-1.45.2-armv7-unknown-freebsd.tar.xz) = 87947861b38af004eef01e36eb37dd7fe8b4e8d2a0e1f93656ec6bf44d7a7ced
-SIZE (rust/2020-08-03/rustc-1.45.2-armv7-unknown-freebsd.tar.xz) = 32189244
-SHA256 (rust/2020-08-03/rust-std-1.45.2-armv7-unknown-freebsd.tar.xz) = 38024abb8331e8cbe1f57d73f995f6c7db2c0289b0d04741bb4b328734cbff7a
-SIZE (rust/2020-08-03/rust-std-1.45.2-armv7-unknown-freebsd.tar.xz) = 12189004
-SHA256 (rust/2020-08-03/cargo-0.46.1-armv7-unknown-freebsd.tar.xz) = 9f87ae190174190d17ce7e8669b1182ab043438d1dd8262cf879a07f5a2d6b7c
-SIZE (rust/2020-08-03/cargo-0.46.1-armv7-unknown-freebsd.tar.xz) = 4054892
-SHA256 (rust/2020-08-03/rustc-1.45.2-i686-unknown-freebsd.tar.xz) = 54db1b27243b152245ba2339127baf0ce0c9da97a8e0bbd5bfb427b1fa7a7b42
-SIZE (rust/2020-08-03/rustc-1.45.2-i686-unknown-freebsd.tar.xz) = 34092656
-SHA256 (rust/2020-08-03/rust-std-1.45.2-i686-unknown-freebsd.tar.xz) = 2e85ad6fb361b48f93b3b18008034dcd2b1f2aa05d72fd7f4cd06ba12dfd5a6d
-SIZE (rust/2020-08-03/rust-std-1.45.2-i686-unknown-freebsd.tar.xz) = 12865768
-SHA256 (rust/2020-08-03/cargo-0.46.1-i686-unknown-freebsd.tar.xz) = 472db73f921a8bd1668e1d7ce338b8c7369215dd6ffe2cfc9d77f65f94db9a29
-SIZE (rust/2020-08-03/cargo-0.46.1-i686-unknown-freebsd.tar.xz) = 4605600
-SHA256 (rust/2020-08-03/rustc-1.45.2-powerpc64-unknown-freebsd-elfv1.tar.xz) = dce4298fcfde04ce35d86acbf97c57334304ce379efb78f6254bdc040e61795c
-SIZE (rust/2020-08-03/rustc-1.45.2-powerpc64-unknown-freebsd-elfv1.tar.xz) = 37171624
-SHA256 (rust/2020-08-03/rust-std-1.45.2-powerpc64-unknown-freebsd-elfv1.tar.xz) = 236d119752df7ee16de2e2a5042a0db06ace2971d3a624f6aaf48dde3d56fb18
-SIZE (rust/2020-08-03/rust-std-1.45.2-powerpc64-unknown-freebsd-elfv1.tar.xz) = 12276312
-SHA256 (rust/2020-08-03/cargo-0.46.1-powerpc64-unknown-freebsd-elfv1.tar.xz) = b859a1320ddcbebab1fd234353290b0c6290e02563eb93346467a4aca0c36712
-SIZE (rust/2020-08-03/cargo-0.46.1-powerpc64-unknown-freebsd-elfv1.tar.xz) = 4523696
-SHA256 (rust/2020-08-03/rustc-1.45.2-powerpc64-unknown-freebsd-elfv2.tar.xz) = a56eaa8acd84e081ee0195126774c8e0c832cc2d0ca6a38e06d964dca0098f41
-SIZE (rust/2020-08-03/rustc-1.45.2-powerpc64-unknown-freebsd-elfv2.tar.xz) = 30953660
-SHA256 (rust/2020-08-03/rust-std-1.45.2-powerpc64-unknown-freebsd-elfv2.tar.xz) = 1f3e44d1cfd260fc9533f1fa46dbc317b2c09251bd47f084402f2d22301418fd
-SIZE (rust/2020-08-03/rust-std-1.45.2-powerpc64-unknown-freebsd-elfv2.tar.xz) = 12223976
-SHA256 (rust/2020-08-03/cargo-0.46.1-powerpc64-unknown-freebsd-elfv2.tar.xz) = afb179eb04eb06f946f875aa243c2f3723ea5bc8a7cd2a027cc33ea97808d87f
-SIZE (rust/2020-08-03/cargo-0.46.1-powerpc64-unknown-freebsd-elfv2.tar.xz) = 4213368
+TIMESTAMP = 1602225332
+SHA256 (rust/rustc-1.47.0-src.tar.xz) = ec2c81d2d34890486094a6407589be96161e4e301c238332d32c6dbae4f38ea2
+SIZE (rust/rustc-1.47.0-src.tar.xz) = 104143736
+SHA256 (rust/2020-08-27/rustc-1.46.0-aarch64-unknown-freebsd.tar.xz) = 77d1cad786ca38ad3fb17bb13eb98ee7adeffcbb6925e8aeb3b084b726fffa1a
+SIZE (rust/2020-08-27/rustc-1.46.0-aarch64-unknown-freebsd.tar.xz) = 46123756
+SHA256 (rust/2020-08-27/rust-std-1.46.0-aarch64-unknown-freebsd.tar.xz) = 10c3896a1b10ec0b99351c5c479f2c053923136165befafa39c18eeeb738973b
+SIZE (rust/2020-08-27/rust-std-1.46.0-aarch64-unknown-freebsd.tar.xz) = 12424080
+SHA256 (rust/2020-08-27/cargo-0.47.0-aarch64-unknown-freebsd.tar.xz) = 463dd5338a8600758cac0f7e5c2a62c0068e20e59a3bc7bcfab4efa6da58e99b
+SIZE (rust/2020-08-27/cargo-0.47.0-aarch64-unknown-freebsd.tar.xz) = 3124348
+SHA256 (rust/2020-08-27/rustc-1.46.0-x86_64-unknown-freebsd.tar.xz) = 05f78c027c8fea8b19927b1af84c96959896d96d9692d432ea64210f2540947d
+SIZE (rust/2020-08-27/rustc-1.46.0-x86_64-unknown-freebsd.tar.xz) = 34583796
+SHA256 (rust/2020-08-27/rust-std-1.46.0-x86_64-unknown-freebsd.tar.xz) = 24d011a0f1b48bdffa5577cbc8298758f7f2ac091f647b93442c8251de31e493
+SIZE (rust/2020-08-27/rust-std-1.46.0-x86_64-unknown-freebsd.tar.xz) = 13078008
+SHA256 (rust/2020-08-27/cargo-0.47.0-x86_64-unknown-freebsd.tar.xz) = 5e5fbda34ea2cc3e2ed57090edfbf1b32948ebd8550b4e25bbc64b96b6ef72c0
+SIZE (rust/2020-08-27/cargo-0.47.0-x86_64-unknown-freebsd.tar.xz) = 4635900
+SHA256 (rust/2020-08-27/rustc-1.46.0-armv6-unknown-freebsd.tar.xz) = 8d5f1c519fde1b47062ed63721db43d7edff2483cc33490b4d84a3dd9c41d94a
+SIZE (rust/2020-08-27/rustc-1.46.0-armv6-unknown-freebsd.tar.xz) = 32934396
+SHA256 (rust/2020-08-27/rust-std-1.46.0-armv6-unknown-freebsd.tar.xz) = 744019b69dd9146d024bcbbf3c75edd64b9e2bd21d5e8164cc43b192ea74fada
+SIZE (rust/2020-08-27/rust-std-1.46.0-armv6-unknown-freebsd.tar.xz) = 12370772
+SHA256 (rust/2020-08-27/cargo-0.47.0-armv6-unknown-freebsd.tar.xz) = 8c42654cbda2efd4ffd156f566a1e289052b9d743389e1c668c73235d91a3442
+SIZE (rust/2020-08-27/cargo-0.47.0-armv6-unknown-freebsd.tar.xz) = 4081880
+SHA256 (rust/2020-08-27/rustc-1.46.0-armv7-unknown-freebsd.tar.xz) = d82bd2e50339ca0749812e8f4078ed466217cba7e3f68886d7d2baaf74257c3c
+SIZE (rust/2020-08-27/rustc-1.46.0-armv7-unknown-freebsd.tar.xz) = 33430540
+SHA256 (rust/2020-08-27/rust-std-1.46.0-armv7-unknown-freebsd.tar.xz) = c1417394efb65c264000842eb5dbbd9a00061cdc02415b5f3a5e416753bb3d5f
+SIZE (rust/2020-08-27/rust-std-1.46.0-armv7-unknown-freebsd.tar.xz) = 12358112
+SHA256 (rust/2020-08-27/cargo-0.47.0-armv7-unknown-freebsd.tar.xz) = 32268a102117cf4c51c260945c0627c7f00769fa6d2cb387b05d0148b4a2e48e
+SIZE (rust/2020-08-27/cargo-0.47.0-armv7-unknown-freebsd.tar.xz) = 4070860
+SHA256 (rust/2020-08-27/rustc-1.46.0-i686-unknown-freebsd.tar.xz) = 24a95b887e2a66980052d39a04a02662593b9b57b4d0dc1339c60549208b389e
+SIZE (rust/2020-08-27/rustc-1.46.0-i686-unknown-freebsd.tar.xz) = 35015008
+SHA256 (rust/2020-08-27/rust-std-1.46.0-i686-unknown-freebsd.tar.xz) = 885a0bb88d044b29b07ba31e5acbf18db175eb0dbb4187c8c0f88c4e7115a615
+SIZE (rust/2020-08-27/rust-std-1.46.0-i686-unknown-freebsd.tar.xz) = 12964928
+SHA256 (rust/2020-08-27/cargo-0.47.0-i686-unknown-freebsd.tar.xz) = d04245ea7183d733c03e4f93ed487e1450bcec3fbad0f2bd12d98471d76966f5
+SIZE (rust/2020-08-27/cargo-0.47.0-i686-unknown-freebsd.tar.xz) = 4651612
+SHA256 (rust/2020-08-27/rustc-1.46.0-powerpc64-unknown-freebsd-elfv1.tar.xz) = 466790a920feae2932f578d70c01735ddd5bafa7ea6236e4ceac0c5146fa520b
+SIZE (rust/2020-08-27/rustc-1.46.0-powerpc64-unknown-freebsd-elfv1.tar.xz) = 38373788
+SHA256 (rust/2020-08-27/rust-std-1.46.0-powerpc64-unknown-freebsd-elfv1.tar.xz) = 45e52a1d3a176ad5a47f87dc031b9c1f8c6e85c22c71ce99b186059ebceaf811
+SIZE (rust/2020-08-27/rust-std-1.46.0-powerpc64-unknown-freebsd-elfv1.tar.xz) = 12300552
+SHA256 (rust/2020-08-27/cargo-0.47.0-powerpc64-unknown-freebsd-elfv1.tar.xz) = a176b017c751316d90247c8779db7624741f706b4868cb670822e24dbdacb3af
+SIZE (rust/2020-08-27/cargo-0.47.0-powerpc64-unknown-freebsd-elfv1.tar.xz) = 4521544
+SHA256 (rust/2020-08-27/rustc-1.46.0-powerpc64-unknown-freebsd-elfv2.tar.xz) = 376c921c989182973c1336ee1d3ecb9b90b75ab479fd6632e83cf0391f038b66
+SIZE (rust/2020-08-27/rustc-1.46.0-powerpc64-unknown-freebsd-elfv2.tar.xz) = 32563056
+SHA256 (rust/2020-08-27/rust-std-1.46.0-powerpc64-unknown-freebsd-elfv2.tar.xz) = 1b67251c2a97ef25e566231baffc38c32689ab7be8d165a6688979ec25858afb
+SIZE (rust/2020-08-27/rust-std-1.46.0-powerpc64-unknown-freebsd-elfv2.tar.xz) = 12255592
+SHA256 (rust/2020-08-27/cargo-0.47.0-powerpc64-unknown-freebsd-elfv2.tar.xz) = 312c8502aaa49aa339fb8252cec94de7f2f29220d237ddc144c6b19de90334ef
+SIZE (rust/2020-08-27/cargo-0.47.0-powerpc64-unknown-freebsd-elfv2.tar.xz) = 4254320
+SHA256 (rust/2020-08-27/cargo-0.47.0-powerpc64le-unknown-freebsd.tar.xz) = 5391388fce390adaa10bb2ab7de9cbdeda8307ab95a64d53d501ced257f99ff2
+SIZE (rust/2020-08-27/cargo-0.47.0-powerpc64le-unknown-freebsd.tar.xz) = 4493584
+SHA256 (rust/2020-08-27/rust-std-1.46.0-powerpc64le-unknown-freebsd.tar.xz) = aa207e73c88f12d512aaa42c704145eb84ce04e48d027dffb0363965e74a8820
+SIZE (rust/2020-08-27/rust-std-1.46.0-powerpc64le-unknown-freebsd.tar.xz) = 12494160
+SHA256 (rust/2020-08-27/rustc-1.46.0-powerpc64le-unknown-freebsd.tar.xz) = 559ee2c55f3614351ca5618166a3bbe8ebcda7bddf0074c5e2c5917734c1d279
+SIZE (rust/2020-08-27/rustc-1.46.0-powerpc64le-unknown-freebsd.tar.xz) = 33672276
Index: head/lang/rust/files/patch-vendor_openssl-sys_build_main.rs
===================================================================
--- head/lang/rust/files/patch-vendor_openssl-sys_build_main.rs (revision 552220)
+++ head/lang/rust/files/patch-vendor_openssl-sys_build_main.rs (nonexistent)
@@ -1,21 +0,0 @@
---- vendor/openssl-sys/build/main.rs.orig 2020-06-01 17:45:25 UTC
-+++ vendor/openssl-sys/build/main.rs
-@@ -204,6 +204,9 @@ See rust-openssl README for more information:
- (3, 0, 0) => ('3', '0', '0'),
- (3, 0, 1) => ('3', '0', '1'),
- (3, 0, _) => ('3', '0', 'x'),
-+ (3, 1, 0) => ('3', '1', '0'),
-+ (3, 1, _) => ('3', '1', 'x'),
-+ (3, 2, 0) => ('3', '2', '0'),
- _ => version_error(),
- };
-
-@@ -244,7 +247,7 @@ fn version_error() -> ! {
- "
-
- This crate is only compatible with OpenSSL 1.0.1 through 1.1.1, or LibreSSL 2.5
--through 3.0.x, but a different version of OpenSSL was found. The build is now aborting
-+through 3.2.0, but a different version of OpenSSL was found. The build is now aborting
- due to this version mismatch.
-
- "
Property changes on: head/lang/rust/files/patch-vendor_openssl-sys_build_main.rs
___________________________________________________________________
Deleted: fbsd:nokeywords
## -1 +0,0 ##
-yes
\ No newline at end of property
Index: head/lang/rust/files/patch-src_bootstrap_native.rs
===================================================================
--- head/lang/rust/files/patch-src_bootstrap_native.rs (revision 552220)
+++ head/lang/rust/files/patch-src_bootstrap_native.rs (revision 552221)
@@ -1,55 +1,43 @@
From 9741fbd202b2b55de95abe1eb7f3d8185e312444 Mon Sep 17 00:00:00 2001
From: Jake Goulding <jake.goulding@gmail.com>
Date: Sat, 11 Jul 2020 09:38:01 -0400
Subject: [PATCH] Don't allow `DESTDIR` to influence LLVM builds
When running a command like `DESTDIR=foo x.py install` in a completely
clean build directory, this will cause LLVM to be installed into
`DESTDIR`, which then causes the build to fail later when it attempts
to *use* those LLVM files.
---
src/bootstrap/native.rs | 5 +++++
1 file changed, 5 insertions(+)
--- src/bootstrap/native.rs.orig 2020-08-24 15:00:49 UTC
+++ src/bootstrap/native.rs
-@@ -347,6 +347,11 @@ fn configure_cmake(
- // LLVM and LLD builds can produce a lot of those and hit CI limits on log size.
- cfg.define("CMAKE_INSTALL_MESSAGE", "LAZY");
-
-+ // Do not allow the user's value of DESTDIR to influence where
-+ // LLVM will install itself. LLVM must always be installed in our
-+ // own build directories.
-+ cfg.env("DESTDIR", "");
-+
- if builder.config.ninja {
- cfg.generator("Ninja");
- }
@@ -517,26 +522,9 @@ impl Step for Lld {
let mut cfg = cmake::Config::new(builder.src.join("src/llvm-project/lld"));
configure_cmake(builder, target, &mut cfg, true);
- // This is an awful, awful hack. Discovered when we migrated to using
- // clang-cl to compile LLVM/LLD it turns out that LLD, when built out of
- // tree, will execute `llvm-config --cmakedir` and then tell CMake about
- // that directory for later processing. Unfortunately if this path has
- // forward slashes in it (which it basically always does on Windows)
- // then CMake will hit a syntax error later on as... something isn't
- // escaped it seems?
- //
- // Instead of attempting to fix this problem in upstream CMake and/or
- // LLVM/LLD we just hack around it here. This thin wrapper will take the
- // output from llvm-config and replace all instances of `\` with `/` to
- // ensure we don't hit the same bugs with escaping. It means that you
- // can't build on a system where your paths require `\` on Windows, but
- // there's probably a lot of reasons you can't do that other than this.
- let llvm_config_shim = env::current_exe().unwrap().with_file_name("llvm-config-wrapper");
-
cfg.out_dir(&out_dir)
.profile("Release")
- .env("LLVM_CONFIG_REAL", &llvm_config)
- .define("LLVM_CONFIG_PATH", llvm_config_shim)
+ .define("LLVM_CONFIG_PATH", &llvm_config)
.define("LLVM_INCLUDE_TESTS", "OFF");
// While we're using this horrible workaround to shim the execution of
Index: head/lang/rust/files/powerpc64le/patch-src_librustc__target_spec_mod.rs
===================================================================
--- head/lang/rust/files/powerpc64le/patch-src_librustc__target_spec_mod.rs (nonexistent)
+++ head/lang/rust/files/powerpc64le/patch-src_librustc__target_spec_mod.rs (revision 552221)
@@ -0,0 +1,10 @@
+--- src/librustc_target/spec/mod.rs.orig 2020-09-30 18:06:16.613040000 -0500
++++ src/librustc_target/spec/mod.rs 2020-09-30 18:06:47.040460000 -0500
+@@ -553,6 +553,7 @@ supported_targets! {
+ ("armv7-unknown-freebsd", armv7_unknown_freebsd),
+ ("i686-unknown-freebsd", i686_unknown_freebsd),
+ ("powerpc64-unknown-freebsd", powerpc64_unknown_freebsd),
++ ("powerpc64le-unknown-freebsd", powerpc64le_unknown_freebsd),
+ ("x86_64-unknown-freebsd", x86_64_unknown_freebsd),
+
+ ("x86_64-unknown-dragonfly", x86_64_unknown_dragonfly),
Property changes on: head/lang/rust/files/powerpc64le/patch-src_librustc__target_spec_mod.rs
___________________________________________________________________
Added: fbsd:nokeywords
## -0,0 +1 ##
+yes
\ No newline at end of property
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Index: head/lang/rust/files/powerpc64le/patch-src_librustc__target_spec_powerpc64le__unknown__freebsd.rs
===================================================================
--- head/lang/rust/files/powerpc64le/patch-src_librustc__target_spec_powerpc64le__unknown__freebsd.rs (nonexistent)
+++ head/lang/rust/files/powerpc64le/patch-src_librustc__target_spec_powerpc64le__unknown__freebsd.rs (revision 552221)
@@ -0,0 +1,25 @@
+--- /dev/null 2020-09-30 18:09:21.093949000 -0500
++++ src/librustc_target/spec/powerpc64le_unknown_freebsd.rs 2020-09-30 18:08:47.737833000 -0500
+@@ -0,0 +1,22 @@
++use crate::spec::{LinkerFlavor, Target, TargetOptions, TargetResult};
++
++pub fn target() -> TargetResult {
++ let mut base = super::freebsd_base::opts();
++ base.cpu = "ppc64le".to_string();
++ base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
++ base.max_atomic_width = Some(64);
++
++ Ok(Target {
++ llvm_target: "powerpc64le-unknown-freebsd".to_string(),
++ target_endian: "little".to_string(),
++ target_pointer_width: "64".to_string(),
++ target_c_int_width: "32".to_string(),
++ data_layout: "e-m:e-i64:64-n32:64".to_string(),
++ arch: "powerpc64".to_string(),
++ target_os: "freebsd".to_string(),
++ target_env: String::new(),
++ target_vendor: "unknown".to_string(),
++ linker_flavor: LinkerFlavor::Gcc,
++ options: TargetOptions { target_mcount: "_mcount".to_string(), ..base },
++ })
++}
Property changes on: head/lang/rust/files/powerpc64le/patch-src_librustc__target_spec_powerpc64le__unknown__freebsd.rs
___________________________________________________________________
Added: fbsd:nokeywords
## -0,0 +1 ##
+yes
\ No newline at end of property
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Index: head/lang/rust/files/powerpc64le/patch-src_llvm-project_clang_lib_Basic_Targets.cpp
===================================================================
--- head/lang/rust/files/powerpc64le/patch-src_llvm-project_clang_lib_Basic_Targets.cpp (nonexistent)
+++ head/lang/rust/files/powerpc64le/patch-src_llvm-project_clang_lib_Basic_Targets.cpp (revision 552221)
@@ -0,0 +1,11 @@
+--- src/llvm-project/clang/lib/Basic/Targets.cpp.orig 2020-07-07 01:39:17.000000000 -0500
++++ src/llvm-project/clang/lib/Basic/Targets.cpp 2020-09-30 18:13:54.521751000 -0500
+@@ -352,6 +352,8 @@ TargetInfo *AllocateTarget(const llvm::Triple &Triple,
+ switch (os) {
+ case llvm::Triple::Linux:
+ return new LinuxTargetInfo<PPC64TargetInfo>(Triple, Opts);
++ case llvm::Triple::FreeBSD:
++ return new FreeBSDTargetInfo<PPC64TargetInfo>(Triple, Opts);
+ case llvm::Triple::NetBSD:
+ return new NetBSDTargetInfo<PPC64TargetInfo>(Triple, Opts);
+ default:
Property changes on: head/lang/rust/files/powerpc64le/patch-src_llvm-project_clang_lib_Basic_Targets.cpp
___________________________________________________________________
Added: fbsd:nokeywords
## -0,0 +1 ##
+yes
\ No newline at end of property
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Index: head/lang/rust/files/powerpc64le/patch-vendor_openssl-src_src_lib.rs
===================================================================
--- head/lang/rust/files/powerpc64le/patch-vendor_openssl-src_src_lib.rs (nonexistent)
+++ head/lang/rust/files/powerpc64le/patch-vendor_openssl-src_src_lib.rs (revision 552221)
@@ -0,0 +1,10 @@
+--- vendor/openssl-src/src/lib.rs.orig 2020-09-30 18:01:13.855023000 -0500
++++ vendor/openssl-src/src/lib.rs 2020-09-30 18:02:06.365292000 -0500
+@@ -173,6 +173,7 @@ impl Build {
+ "powerpc-unknown-linux-gnu" => "linux-ppc",
+ "powerpc64-unknown-freebsd" => "BSD-generic64",
+ "powerpc64-unknown-linux-gnu" => "linux-ppc64",
++ "powerpc64le-unknown-freebsd" => "BSD-generic64",
+ "powerpc64le-unknown-linux-gnu" => "linux-ppc64le",
+ "riscv64gc-unknown-linux-gnu" => "linux-generic64",
+ "s390x-unknown-linux-gnu" => "linux64-s390x",
Property changes on: head/lang/rust/files/powerpc64le/patch-vendor_openssl-src_src_lib.rs
___________________________________________________________________
Added: fbsd:nokeywords
## -0,0 +1 ##
+yes
\ No newline at end of property
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Index: head/lang/rust-bootstrap/Makefile
===================================================================
--- head/lang/rust-bootstrap/Makefile (revision 552220)
+++ head/lang/rust-bootstrap/Makefile (revision 552221)
@@ -1,229 +1,236 @@
# $FreeBSD$
PORTNAME= rust
PORTVERSION= 1.46.0
CATEGORIES= lang
MASTER_SITES= https://static.rust-lang.org/dist/:rust \
LOCAL/tobik:armbase \
LOCAL/tobik:base \
+ LOCAL/bdragon:ppc64lebase \
https://download.freebsd.org/ftp/${_RUST_FBSD_SUBDIR_${FLAVOR}}/:base \
LOCAL/tobik:powerpc64_gcc
# http://pkg.freebsd.org/FreeBSD:12:powerpc64/quarterly/All/gcc9-9.2.0.txz?dummy=/:powerpc64_gcc
PKGNAMESUFFIX= -bootstrap
DISTNAME= ${PORTNAME}c-${PORTVERSION}-src
DISTFILES= rust/${DISTNAME}${EXTRACT_SUFX}:rust \
${_RUST_FBSD_DIST_${FLAVOR}}
MAINTAINER= rust@FreeBSD.org
COMMENT= Create bootstrap compilers for building lang/rust
LICENSE= APACHE20 MIT
LICENSE_COMB= dual
LICENSE_FILE_APACHE20= ${WRKSRC}/LICENSE-APACHE
LICENSE_FILE_MIT= ${WRKSRC}/LICENSE-MIT
ONLY_FOR_ARCHS= amd64
ONLY_FOR_ARCHS_REASON= untested on other archs
BUILD_DEPENDS= cmake:devel/cmake \
gmake:devel/gmake \
rust>=${PORTVERSION}:lang/rust
-FLAVORS= aarch64 amd64 armv6 armv7 i386 powerpc64_elfv1 powerpc64_elfv2
+FLAVORS= aarch64 amd64 armv6 armv7 i386 powerpc64_elfv1 powerpc64_elfv2 powerpc64le
FLAVOR?= ${FLAVORS:[1]}
aarch64_PKGNAMEPREFIX= aarch64-
amd64_PKGNAMEPREFIX= amd64-
armv6_PKGNAMEPREFIX= armv6-
armv7_PKGNAMEPREFIX= armv7-
i386_PKGNAMEPREFIX= i386-
powerpc64_elfv1_PKGNAMEPREFIX= powerpc64-elfv1-
powerpc64_elfv1_BUILD_DEPENDS= powerpc64-gcc9>0:devel/freebsd-gcc9@powerpc64
powerpc64_elfv2_PKGNAMEPREFIX= powerpc64-elfv2-
+powerpc64le_PKGNAMEPREFIX= powerpc64le-
USES= perl5 python:3.3+,build tar:xz
.if ${FLAVOR} == powerpc64_elfv1
USE_GCC= 9
.endif
# for openssl-src crate
USE_PERL5= build
PATCHDIR= ${.CURDIR}/../rust/files
# Resulting packages are not specific to amd64
NO_ARCH= yes
_RUST_FBSD_DIST_aarch64= FreeBSD-${_RUST_FBSD_VER}-arm64${EXTRACT_SUFX}:base
_RUST_FBSD_DIST_amd64= FreeBSD-${_RUST_FBSD_VER}-amd64${EXTRACT_SUFX}:base
# base.txz for armv* created from WANDBOARD images as there seem
# to be no generic base.txz for it.
_RUST_FBSD_DIST_armv6= FreeBSD-${_RUST_FBSD_VER}-arm-armv6${EXTRACT_SUFX}:armbase
_RUST_FBSD_DIST_armv7= FreeBSD-${_RUST_FBSD_VER}-arm-armv7${EXTRACT_SUFX}:armbase
_RUST_FBSD_DIST_i386= FreeBSD-${_RUST_FBSD_VER}-i386${EXTRACT_SUFX}:base
_RUST_FBSD_DIST_powerpc64_elfv1= FreeBSD-${_RUST_FBSD_VER}-powerpc64-elfv1${EXTRACT_SUFX}:base \
FreeBSD-${_RUST_FBSD_VER}-powerpc64-elfv1-gcc9-9.2.0${EXTRACT_SUFX}:powerpc64_gcc
_RUST_FBSD_DIST_powerpc64_elfv2= FreeBSD-${_RUST_FBSD_VER}-powerpc64-elfv2-r356261${EXTRACT_SUFX}:base
+_RUST_FBSD_DIST_powerpc64le= FreeBSD-${_RUST_FBSD_VER}-powerpc64le-r366300${EXTRACT_SUFX}:ppc64lebase
_RUST_FBSD_VER= ${_RUST_FBSD_VER_${FLAVOR}:U11.3-RELEASE}
_RUST_FBSD_VER_armv7= 12.1-RELEASE
_RUST_FBSD_VER_powerpc64_elfv1= 12.1-RELEASE
_RUST_FBSD_VER_powerpc64_elfv2= 13.0-CURRENT
+_RUST_FBSD_VER_powerpc64le= 13.0-CURRENT
_RUST_FBSD_SUBDIR_aarch64= releases/arm64/${_RUST_FBSD_VER}/base.txz?dummy=
_RUST_FBSD_SUBDIR_amd64= releases/amd64/${_RUST_FBSD_VER}/base.txz?dummy=
_RUST_FBSD_SUBDIR_i386= releases/i386/${_RUST_FBSD_VER}/base.txz?dummy=
_RUST_FBSD_SUBDIR_powerpc64_elfv1= releases/powerpc/powerpc64/${_RUST_FBSD_VER}/base.txz?dummy=
_RUST_FBSD_SUBDIR_powerpc64_elfv2= snapshots/powerpc/powerpc64/${_RUST_FBSD_VER}/base.txz?dummy=
+_RUST_FBSD_SUBDIR_powerpc64le= snapshots/powerpc/powerpc64le/${_RUST_FBSD_VER}/base.txz?dummy=
_CARGO_VENDOR_DIR= ${WRKSRC}/vendor
_RUST_ARCH_amd64= x86_64
_RUST_ARCH_i386= i686
_RUST_ARCH_powerpc64_elfv1= powerpc64
_RUST_ARCH_powerpc64_elfv2= powerpc64
+_RUST_ARCH_powerpc64le= powerpc64le
_RUST_HOST= ${_RUST_ARCH_${ARCH}:U${ARCH}}-unknown-${OPSYS:tl}
_RUST_TARGET= ${_RUST_ARCH_${FLAVOR}:U${FLAVOR}}-unknown-${OPSYS:tl}
_RUST_LLVM_TARGET= ${_RUST_LLVM_TARGET_${FLAVOR}}
_RUST_LLVM_TARGET_aarch64= AArch64
_RUST_LLVM_TARGET_amd64= X86
_RUST_LLVM_TARGET_armv6= ARM
_RUST_LLVM_TARGET_armv7= ARM
_RUST_LLVM_TARGET_i386= X86
_RUST_LLVM_TARGET_powerpc64_elfv1= PowerPC
_RUST_LLVM_TARGET_powerpc64_elfv2= PowerPC
+_RUST_LLVM_TARGET_powerpc64le= PowerPC
_RUST_LLVM_TRIPLE= ${_RUST_LLVM_TRIPLE_${FLAVOR}:U${_RUST_TARGET}}
_RUST_LLVM_TRIPLE_armv6= armv6-gnueabihf-freebsd
_RUST_LLVM_TRIPLE_armv7= armv7-gnueabihf-freebsd
.include <bsd.port.pre.mk>
.if ${OPSYS} != FreeBSD
IGNORE= is only for FreeBSD
.elif ${OSVERSION} < 1200502
IGNORE= will not build on 12.0 due to old toolchain; 11.x untested
.endif
.if exists(${PATCHDIR}/${FLAVOR:S/_/-/})
EXTRA_PATCHES+= ${PATCHDIR}/${FLAVOR:S/_/-/}
.endif
post-patch:
# Disable vendor checksums
@${REINPLACE_CMD} 's,"files":{[^}]*},"files":{},' \
${_CARGO_VENDOR_DIR}/*/.cargo-checksum.json
.if ${FLAVOR} == powerpc64_elfv1
@${REINPLACE_CMD} -e 's,"c++","stdc++",g' \
${WRKSRC}/src/librustc_llvm/build.rs
.endif
@${REINPLACE_CMD} -e 's,%CC%,${CC},g' \
-e 's,%WRKDIR%,${WRKDIR},g' \
${WRKSRC}/src/librustc_llvm/build.rs \
${WRKSRC}/src/bootstrap/native.rs
do-configure:
# Check that the running kernel has COMPAT_FREEBSD11 required by lang/rust post-ino64
@${SETENV} CC="${CC}" OPSYS="${OPSYS}" OSVERSION="${OSVERSION}" WRKDIR="${WRKDIR}" \
${SH} ${SCRIPTSDIR}/rust-compat11-canary.sh
@${ECHO_CMD} '[build]' > ${WRKSRC}/config.toml
@${ECHO_CMD} 'vendor=true' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'extended=false' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'python="${PYTHON_CMD}"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'docs=false' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'verbose=2' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'cargo-native-static=true' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'cargo="${LOCALBASE}/bin/cargo"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'rustc="${LOCALBASE}/bin/rustc"' >> ${WRKSRC}/config.toml
.if ${_RUST_HOST} != ${_RUST_TARGET}
@${ECHO_CMD} 'host=["${_RUST_HOST}","${_RUST_TARGET}"]' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'target=["${_RUST_TARGET}"]' >> ${WRKSRC}/config.toml
.endif
@${ECHO_CMD} '[rust]' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'channel="stable"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'default-linker="${CC}"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'deny-warnings=false' >> ${WRKSRC}/config.toml
@${ECHO_CMD} '[llvm]' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'link-shared=false' >> ${WRKSRC}/config.toml
.if ${FLAVOR} == powerpc64_elfv1
@${ECHO_CMD} 'static-libstdcpp=true' >> ${WRKSRC}/config.toml
.endif
.if defined(WITH_CCACHE_BUILD) && !defined(NO_CCACHE)
@${ECHO_CMD} 'ccache="${CCACHE_BIN}"' >> ${WRKSRC}/config.toml
.else
@${ECHO_CMD} 'ccache=false' >> ${WRKSRC}/config.toml
.endif
# https://github.com/rust-lang/rust/pull/72696#issuecomment-641517185
@${ECHO_CMD} 'ldflags="-lz"' >> ${WRKSRC}/config.toml
# we need to make sure to always build llvm with X86 support to get a
# host compiler that can build the host->target compiler
@${ECHO_CMD} 'targets="${_RUST_LLVM_TARGET};X86"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} '[target.${_RUST_TARGET}]' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'cc="${WRKDIR}/${_RUST_TARGET}-cc"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'cxx="${WRKDIR}/${_RUST_TARGET}-c++"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'linker="${WRKDIR}/${_RUST_TARGET}-cc"' >> ${WRKSRC}/config.toml
.for _key _util in ar ${AR} ranlib ${RANLIB}
@bin="$$(which ${_util})"; \
${ECHO_CMD} "${_key}=\"$$bin\"" >> ${WRKSRC}/config.toml
.endfor
.if ${_RUST_HOST} != ${_RUST_TARGET}
@${ECHO_CMD} '[target.${_RUST_HOST}]' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'cc="${CC}"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'cxx="${CXX}"' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'linker="${CC}"' >> ${WRKSRC}/config.toml
.endif
@${ECHO_CMD} '[dist]' >> ${WRKSRC}/config.toml
@${ECHO_CMD} 'src-tarball=false' >> ${WRKSRC}/config.toml
.if ${FLAVOR} == powerpc64_elfv1
@${RLN} ${WRKDIR}${LOCALBASE}/lib/gcc9/libstdc++.a ${WRKDIR}/usr/lib/libstdc++.a
@${RLN} ${WRKDIR}${LOCALBASE}/lib/gcc9/libstdc++.a ${WRKDIR}/usr/lib/libc++.a
@gcc="$$(${ECHO_CMD} ${LOCALBASE}/bin/${_RUST_TARGET}*[0-9]-gcc9)"; \
${PRINTF} '#!/bin/sh\nexec %s --sysroot=${WRKDIR} -Wl,-rpath=${LOCALBASE}/lib/gcc9 -L${WRKDIR}${LOCALBASE}/lib/gcc9 "$$@"\n' "$${gcc}" \
> ${WRKDIR}/${_RUST_TARGET}-cc
@gcc="$$(${ECHO_CMD} ${LOCALBASE}/bin/${_RUST_TARGET}*[0-9]-g++9)"; \
cxxinc="/$$(cd ${WRKDIR} && ${ECHO_CMD} ${LOCALBASE:S,^/,,}/lib/gcc9/include/c++/powerpc64-*)"; \
${PRINTF} '#!/bin/sh\nexec %s --sysroot=${WRKDIR} -isystem=${LOCALBASE}/lib/gcc9/include/c++ -isystem=%s -Wl,-rpath=${LOCALBASE}/lib/gcc9 -L${WRKDIR}${LOCALBASE}/lib/gcc9 "$$@"\n' "$${gcc}" "$${cxxinc}" \
> ${WRKDIR}/${_RUST_TARGET}-c++
.elif ${FLAVOR} == powerpc64_elfv2
@${PRINTF} '#!/bin/sh\nexec ${CC} --sysroot=${WRKDIR} -mabi=elfv2 --target=${_RUST_LLVM_TRIPLE} "$$@"\n' \
> ${WRKDIR}/${_RUST_TARGET}-cc
@${PRINTF} '#!/bin/sh\nexec ${CXX} --sysroot=${WRKDIR} -mabi=elfv2 --target=${_RUST_LLVM_TRIPLE} -stdlib=libc++ "$$@"\n' \
> ${WRKDIR}/${_RUST_TARGET}-c++
.else
@${PRINTF} '#!/bin/sh\nexec ${CC} --sysroot=${WRKDIR} --target=${_RUST_LLVM_TRIPLE} "$$@"\n' \
> ${WRKDIR}/${_RUST_TARGET}-cc
@${PRINTF} '#!/bin/sh\nexec ${CXX} --sysroot=${WRKDIR} --target=${_RUST_LLVM_TRIPLE} -stdlib=libc++ "$$@"\n' \
> ${WRKDIR}/${_RUST_TARGET}-c++
.endif
@${CHMOD} +x ${WRKDIR}/${_RUST_TARGET}-c*
# sanity check cross compilers. we cannot execute the result but
# at least check that it can link a simple program before going further.
@${PRINTF} '#include <stdio.h>\nint main(){return printf("hello\\n");}' | ${WRKDIR}/${_RUST_TARGET}-cc -o ${WRKDIR}/test-c -xc -
# produce some useful info for the build logs like what release/arch test-c is compiled for
@cd ${WRKDIR} && ${FILE} test-c && ${READELF} -A test-c
@${PRINTF} '#include <iostream>\nint main(){std::cout<<"hello"<<std::endl;return 0;}' | ${WRKDIR}/${_RUST_TARGET}-c++ -o ${WRKDIR}/test-c++ -xc++ -
do-build:
@cd ${WRKSRC} && \
${SETENV} ${MAKE_ENV} ${PYTHON_CMD} x.py dist --jobs=${MAKE_JOBS_NUMBER} \
cargo src/librustc src/libstd
do-install:
@${MKDIR} ${STAGEDIR}${PREFIX}/rust-bootstrap/${FLAVOR}
${INSTALL_DATA} ${WRKSRC}/build/dist/*-unknown-${OPSYS:tl}${EXTRACT_SUFX} \
${STAGEDIR}${PREFIX}/rust-bootstrap/${FLAVOR}
.if ${FLAVOR:Mpowerpc64_*}
@cd ${STAGEDIR}${PREFIX}/rust-bootstrap/${FLAVOR} && for f in *${EXTRACT_SUFX}; do \
${MV} $$f $${f%%${EXTRACT_SUFX}}-${FLAVOR:S/_/ /:[2]}${EXTRACT_SUFX}; \
done
.endif
@cd ${STAGEDIR}${PREFIX} && \
${FIND} rust-bootstrap -type f >> ${TMPPLIST}
.if !defined(_RUST_MAKESUM_GUARD)
makesum:
${MAKE} -D_RUST_MAKESUM_GUARD makesum FLAVOR=${FLAVORS:O:[1]} DISTINFO_FILE=${DISTINFO_FILE}.tmp
.for _flavor in ${FLAVORS:O:[2..-1]}
${MAKE} -D_RUST_MAKESUM_GUARD makesum FLAVOR=${_flavor} DISTINFO_FILE=${DISTINFO_FILE}.${_flavor}
${SED} 1d ${DISTINFO_FILE}.${_flavor} >> ${DISTINFO_FILE}.tmp
${RM} ${DISTINFO_FILE}.${_flavor}
.endfor
${AWK} '!seen[$$0]++' ${DISTINFO_FILE}.tmp > ${DISTINFO_FILE}
${RM} ${DISTINFO_FILE}.tmp
.endif
.include <bsd.port.post.mk>
RUN_DEPENDS:= ${RUN_DEPENDS:Ngcc*}
Index: head/lang/rust-bootstrap/distinfo
===================================================================
--- head/lang/rust-bootstrap/distinfo (revision 552220)
+++ head/lang/rust-bootstrap/distinfo (revision 552221)
@@ -1,19 +1,21 @@
TIMESTAMP = 1598307595
SHA256 (rust/rustc-1.46.0-src.tar.xz) = 865dae1290a205f16ded8818c6a0254cc32862985fc250a602a70285b7d92b82
SIZE (rust/rustc-1.46.0-src.tar.xz) = 101868452
SHA256 (FreeBSD-11.3-RELEASE-arm64.tar.xz) = 0c1ee2bdbec3b6b404edef6858f38f5cdacd727abc53b1dee23910cab939d0c1
SIZE (FreeBSD-11.3-RELEASE-arm64.tar.xz) = 97990888
SHA256 (FreeBSD-11.3-RELEASE-amd64.tar.xz) = 4599023ac136325b86f2fddeec64c1624daa83657e40b00b2ef944c81463a4ff
SIZE (FreeBSD-11.3-RELEASE-amd64.tar.xz) = 117877900
SHA256 (FreeBSD-11.3-RELEASE-arm-armv6.tar.xz) = 1d8025c4a51f025211fb6b4b24ba26144906ff288694394c1c3b1c6e0011db03
SIZE (FreeBSD-11.3-RELEASE-arm-armv6.tar.xz) = 46549572
SHA256 (FreeBSD-12.1-RELEASE-arm-armv7.tar.xz) = 0292ce8186908b3e4b5eb5e5923e1f43d22fcb0ab7f6071b493fcc17d386deab
SIZE (FreeBSD-12.1-RELEASE-arm-armv7.tar.xz) = 83402220
SHA256 (FreeBSD-11.3-RELEASE-i386.tar.xz) = 9c0adb8c34b7e11d277c2fb9354f7864d91a4620e5238d7eb9dbfe51601ed5cb
SIZE (FreeBSD-11.3-RELEASE-i386.tar.xz) = 94399236
SHA256 (FreeBSD-12.1-RELEASE-powerpc64-elfv1.tar.xz) = 2223e22babc169260f9a3f03bbbaf926ef76b1c44d2e81fb172f907ab8fd7aac
SIZE (FreeBSD-12.1-RELEASE-powerpc64-elfv1.tar.xz) = 108905368
SHA256 (FreeBSD-12.1-RELEASE-powerpc64-elfv1-gcc9-9.2.0.tar.xz) = fe208feb93a75e6043e1cd1b9e50be853858d6d600296aaf2b25b55525e58bb2
SIZE (FreeBSD-12.1-RELEASE-powerpc64-elfv1-gcc9-9.2.0.tar.xz) = 38150240
SHA256 (FreeBSD-13.0-CURRENT-powerpc64-elfv2-r356261.tar.xz) = db762f136e41dd3c6676d6dc104282be6d6d6684afb348506fc44ceccd43ce17
SIZE (FreeBSD-13.0-CURRENT-powerpc64-elfv2-r356261.tar.xz) = 150455928
+SHA256 (FreeBSD-13.0-CURRENT-powerpc64le-r366300.tar.xz) = a828a3a968c9911655148fa080587ecd7673aa3d58588ed3dafa55a5c2e12dd3
+SIZE (FreeBSD-13.0-CURRENT-powerpc64le-r366300.tar.xz) = 167748612
Index: head/lang/spidermonkey78/Makefile
===================================================================
--- head/lang/spidermonkey78/Makefile (revision 552220)
+++ head/lang/spidermonkey78/Makefile (revision 552221)
@@ -1,85 +1,86 @@
# $FreeBSD$
PORTNAME= spidermonkey
DISTVERSION= 78.3.1
+PORTREVISION= 1
CATEGORIES= lang
MASTER_SITES= MOZILLA/firefox/releases/${DISTVERSION}esr/source
PKGNAMESUFFIX= ${SP_VER}
DISTNAME= firefox-${DISTVERSION}esr.source
MAINTAINER= swills@FreeBSD.org
COMMENT= Standalone JavaScript based from Mozilla 78-esr
LICENSE= MPL20
LICENSE_FILE= ${WRKSRC}/LICENSE
BUILD_DEPENDS= ${LOCALBASE}/bin/clang++90:devel/llvm90 \
${LOCALBASE}/bin/python${PYTHON3_DEFAULT}:lang/python${PYTHON3_DEFAULT:S/.//g} \
${RUST_DEFAULT}>=1.35:lang/${RUST_DEFAULT} \
autoconf-2.13:devel/autoconf213 \
rust-cbindgen>=0.8.7:devel/rust-cbindgen
LIB_DEPENDS= libffi.so:devel/libffi \
libicudata.so:devel/icu \
libnspr4.so:devel/nspr
USES= compiler:c++17-lang gmake localbase pathfix pkgconfig python:build \
readline tar:xz
USE_LDCONFIG= yes
SP_VER= 78
HAS_CONFIGURE= yes
WRKSRC= ${WRKDIR}/firefox-${DISTVERSION}
PATCH_WRKSRC= ${WRKDIR}/firefox-${DISTVERSION}/
CONFIGURE_OUTSOURCE= yes
CONFIGURE_SCRIPT= ../firefox-${DISTVERSION}/js/src/configure
CONFIGURE_ARGS= --disable-debug \
--disable-debug-symbols \
--disable-gold \
--disable-jemalloc \
--disable-tests \
--enable-optimize \
--enable-readline \
--enable-shared-js \
--prefix=${PREFIX:Q} \
--target=${CONFIGURE_TARGET} \
--with-intl-api \
--with-system-icu \
--with-system-nspr \
--with-system-zlib
CONFIGURE_ENV= HOST_CC=${CC} \
HOST_CXX=${CXX}
CONFIGURE_ENV+= LLVM_CONFIG=llvm-config90 \
LLVM_OBJDUMP=llvm-objdump90
BINARY_ALIAS= python3=${PYTHON_CMD}
PLIST_SUB= SP_VER=${SP_VER}
.include <bsd.port.pre.mk>
.if ${ARCH} == amd64
CONFIGURE_TARGET= x86_64-portbld-freebsd${OSREL}
.endif
# Require newer Clang than what's in base system unless user opted out
.if ${CHOSEN_COMPILER_TYPE} == gcc
USE_GCC= yes
.elif ${CC} == cc && ${CXX} == c++ && exists(/usr/lib/libc++.so)
CPP= ${LOCALBASE}/bin/clang-cpp90
CC= ${LOCALBASE}/bin/clang90
CXX= ${LOCALBASE}/bin/clang++90
BUILD_DEPENDS+= ${LOCALBASE}/bin/clang90:devel/llvm90
.endif
post-patch:
@${REINPLACE_CMD} -e 's|%%LOCALBASE%%|${LOCALBASE}|g' ${WRKSRC}/js/moz.configure
post-install:
${RM} ${STAGEDIR}${PREFIX}/lib/libjs_static.ajs
${LN} -fs libmozjs-${SP_VER}.so ${STAGEDIR}${PREFIX}/lib/libmozjs-${SP_VER}.so.1
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/js${SP_VER}
${STRIP_CMD} ${STAGEDIR}${PREFIX}/lib/libmozjs-${SP_VER}.*
.include <bsd.port.post.mk>
Index: head/mail/thunderbird/Makefile
===================================================================
--- head/mail/thunderbird/Makefile (revision 552220)
+++ head/mail/thunderbird/Makefile (revision 552221)
@@ -1,80 +1,80 @@
# Created by: Joe Marcus Clarke <marcus@FreeBSD.org>
# $FreeBSD$
PORTNAME= thunderbird
DISTVERSION= 78.3.2
-PORTREVISION= 3
+PORTREVISION= 4
CATEGORIES= mail news net-im
MASTER_SITES= MOZILLA/${PORTNAME}/releases/${DISTVERSION}/source \
MOZILLA/${PORTNAME}/candidates/${DISTVERSION}-candidates/build1/source
DISTFILES= ${DISTNAME}.source${EXTRACT_SUFX}
MAINTAINER= gecko@FreeBSD.org
COMMENT= Mozilla Thunderbird is standalone mail and news that stands above
BUILD_DEPENDS= nspr>=4.26:devel/nspr \
nss>=3.56:security/nss \
icu>=67.1,1:devel/icu \
libevent>=2.1.8:devel/libevent \
harfbuzz>=2.6.6:print/harfbuzz \
graphite2>=1.3.14:graphics/graphite2 \
png>=1.6.35:graphics/png \
libvpx>=1.8.2:multimedia/libvpx \
${PYTHON_PKGNAMEPREFIX}sqlite3>0:databases/py-sqlite3@${PY_FLAVOR} \
v4l_compat>0:multimedia/v4l_compat \
autoconf-2.13:devel/autoconf213 \
nasm:devel/nasm \
yasm:devel/yasm \
zip:archivers/zip
LIB_DEPENDS= libjson-c.so:devel/json-c \
libbotan-2.so:security/botan2
SSP_UNSAFE= yes
USE_GECKO= gecko
USE_MOZILLA= -sqlite
USES= tar:xz
MOZ_OPTIONS= --enable-application=comm/mail --enable-official-branding
MOZ_OPTIONS+= --with-system-bz2 --with-system-jsonc --with-system-botan
MOZ_MK_OPTIONS= MOZ_THUNDERBIRD=1 MAIL_PKG_SHARED=1
MOZ_EXPORT= MOZ_THUNDERBIRD=1 MAIL_PKG_SHARED=1
PORTNAME_ICON= ${MOZILLA}.png
PORTNAME_ICON_SRC= ${PREFIX}/lib/${MOZILLA}/chrome/icons/default/default48.png
SYSTEM_PREFS= ${FAKEDIR}/lib/${PORTNAME}/defaults/pref/${PORTNAME}.js
OPTIONS_DEFINE= LIGHTNING
OPTIONS_DEFAULT=CANBERRA LIGHTNING
.include "${.CURDIR}/../../www/firefox/Makefile.options"
.include <bsd.port.pre.mk>
.if ${PORT_OPTIONS:MLIGHTNING}
MOZ_OPTIONS+= --enable-calendar
.else
MOZ_OPTIONS+= --disable-calendar
.endif
post-extract:
@${SED} -e 's|@PORTNAME_ICON@|${PORTNAME_ICON:R}|;s|@MOZILLA@|${MOZILLA}|' \
<${FILESDIR}/thunderbird.desktop.in >${WRKDIR}/${MOZILLA_EXEC_NAME}.desktop
post-patch:
@${REINPLACE_CMD} -e 's|%%LOCALBASE%%|${LOCALBASE}|g' \
${WRKSRC}/comm/mail/app/nsMailApp.cpp
pre-configure:
(cd ${WRKSRC} && ${LOCALBASE}/bin/autoconf-2.13)
(cd ${MOZSRC} && ${LOCALBASE}/bin/autoconf-2.13)
(cd ${MOZSRC}/js/src/ && ${LOCALBASE}/bin/autoconf-2.13)
port-pre-install:
${MKDIR} ${STAGEDIR}${PREFIX}/lib/${PORTNAME}/defaults
post-install:
${INSTALL_DATA} ${WRKDIR}/${MOZILLA_EXEC_NAME}.desktop ${STAGEDIR}${PREFIX}/share/applications
${LN} -sf ${PORTNAME_ICON_SRC} ${STAGEDIR}${PREFIX}/share/pixmaps/${PORTNAME_ICON}
.include <bsd.port.post.mk>
Index: head/mail/thunderbird/files/patch-bug1663715
===================================================================
--- head/mail/thunderbird/files/patch-bug1663715 (nonexistent)
+++ head/mail/thunderbird/files/patch-bug1663715 (revision 552221)
@@ -0,0 +1,31087 @@
+From 63678ae69e03325d65255d29f1af4a6ea3dd354a Mon Sep 17 00:00:00 2001
+From: Thomas Deutschmann <whissi@gentoo.org>
+Date: Sat, 10 Oct 2020 16:07:49 +0200
+Subject: [PATCH 36/38] bmo#1643201: Cherry-pick some servo changes to
+ derive_common
+
+Link: https://bugzilla.mozilla.org/show_bug.cgi?id=1663715#c7
+Signed-off-by: Thomas Deutschmann <whissi@gentoo.org>
+---
+ servo/components/derive_common/cg.rs | 6 +++++-
+ 1 file changed, 5 insertions(+), 1 deletion(-)
+
+diff --git a/servo/components/derive_common/cg.rs b/servo/components/derive_common/cg.rs
+index 55a75398c7..c51c0d7750 100644
+--- servo/components/derive_common/cg.rs
++++ servo/components/derive_common/cg.rs
+@@ -7,7 +7,7 @@ use proc_macro2::{Span, TokenStream};
+ use quote::TokenStreamExt;
+ use syn::{self, AngleBracketedGenericArguments, Binding, DeriveInput, Field};
+ use syn::{GenericArgument, GenericParam, Ident, Path};
+-use syn::{PathArguments, PathSegment, QSelf, Type, TypeArray};
++use syn::{PathArguments, PathSegment, QSelf, Type, TypeArray, TypeGroup};
+ use syn::{TypeParam, TypeParen, TypePath, TypeSlice, TypeTuple};
+ use syn::{Variant, WherePredicate};
+ use synstructure::{self, BindStyle, BindingInfo, VariantAst, VariantInfo};
+@@ -208,6 +208,10 @@ where
+ elem: Box::new(map_type_params(&inner.elem, params, f)),
+ ..inner.clone()
+ }),
++ Type::Group(ref inner) => Type::from(TypeGroup {
++ elem: Box::new(map_type_params(&inner.elem, params, f)),
++ ..inner.clone()
++ }),
+ ref ty => panic!("type {:?} cannot be mapped yet", ty),
+ }
+ }
+--
+2.28.0
+
+From 23f22e9de6cc2236d58cc03997a1040e62c532e1 Mon Sep 17 00:00:00 2001
+From: Thomas Deutschmann <whissi@gentoo.org>
+Date: Sat, 10 Oct 2020 16:09:04 +0200
+Subject: [PATCH 37/38] bmo#1653339: Teach style_derive's map_type_params about
+ mapping self correctly
+
+Link: https://bugzilla.mozilla.org/show_bug.cgi?id=1663715#c7
+Signed-off-by: Thomas Deutschmann <whissi@gentoo.org>
+---
+ servo/components/derive_common/cg.rs | 30 +++++++++++--------
+ .../style_derive/to_computed_value.rs | 4 ++-
+ 2 files changed, 21 insertions(+), 13 deletions(-)
+
+diff --git a/servo/components/derive_common/cg.rs b/servo/components/derive_common/cg.rs
+index c51c0d7750..8abfd87149 100644
+--- servo/components/derive_common/cg.rs
++++ servo/components/derive_common/cg.rs
+@@ -154,19 +154,19 @@ pub fn fmap_trait_output(input: &DeriveInput, trait_path: &Path, trait_output: &
+ segment.into()
+ }
+
+-pub fn map_type_params<F>(ty: &Type, params: &[&TypeParam], f: &mut F) -> Type
++pub fn map_type_params<F>(ty: &Type, params: &[&TypeParam], self_type: &Path, f: &mut F) -> Type
+ where
+ F: FnMut(&Ident) -> Type,
+ {
+ match *ty {
+ Type::Slice(ref inner) => Type::from(TypeSlice {
+- elem: Box::new(map_type_params(&inner.elem, params, f)),
++ elem: Box::new(map_type_params(&inner.elem, params, self_type, f)),
+ ..inner.clone()
+ }),
+ Type::Array(ref inner) => {
+ //ref ty, ref expr) => {
+ Type::from(TypeArray {
+- elem: Box::new(map_type_params(&inner.elem, params, f)),
++ elem: Box::new(map_type_params(&inner.elem, params, self_type, f)),
+ ..inner.clone()
+ })
+ },
+@@ -175,7 +175,7 @@ where
+ elems: inner
+ .elems
+ .iter()
+- .map(|ty| map_type_params(&ty, params, f))
++ .map(|ty| map_type_params(&ty, params, self_type, f))
+ .collect(),
+ ..inner.clone()
+ }),
+@@ -187,10 +187,16 @@ where
+ if params.iter().any(|ref param| &param.ident == ident) {
+ return f(ident);
+ }
++ if ident == "Self" {
++ return Type::from(TypePath {
++ qself: None,
++ path: self_type.clone(),
++ });
++ }
+ }
+ Type::from(TypePath {
+ qself: None,
+- path: map_type_params_in_path(path, params, f),
++ path: map_type_params_in_path(path, params, self_type, f),
+ })
+ },
+ Type::Path(TypePath {
+@@ -198,25 +204,25 @@ where
+ ref path,
+ }) => Type::from(TypePath {
+ qself: qself.as_ref().map(|qself| QSelf {
+- ty: Box::new(map_type_params(&qself.ty, params, f)),
++ ty: Box::new(map_type_params(&qself.ty, params, self_type, f)),
+ position: qself.position,
+ ..qself.clone()
+ }),
+- path: map_type_params_in_path(path, params, f),
++ path: map_type_params_in_path(path, params, self_type, f),
+ }),
+ Type::Paren(ref inner) => Type::from(TypeParen {
+- elem: Box::new(map_type_params(&inner.elem, params, f)),
++ elem: Box::new(map_type_params(&inner.elem, params, self_type, f)),
+ ..inner.clone()
+ }),
+ Type::Group(ref inner) => Type::from(TypeGroup {
+- elem: Box::new(map_type_params(&inner.elem, params, f)),
++ elem: Box::new(map_type_params(&inner.elem, params, self_type, f)),
+ ..inner.clone()
+ }),
+ ref ty => panic!("type {:?} cannot be mapped yet", ty),
+ }
+ }
+
+-fn map_type_params_in_path<F>(path: &Path, params: &[&TypeParam], f: &mut F) -> Path
++fn map_type_params_in_path<F>(path: &Path, params: &[&TypeParam], self_type: &Path, f: &mut F) -> Path
+ where
+ F: FnMut(&Ident) -> Type,
+ {
+@@ -236,11 +242,11 @@ where
+ .map(|arg| match arg {
+ ty @ &GenericArgument::Lifetime(_) => ty.clone(),
+ &GenericArgument::Type(ref data) => {
+- GenericArgument::Type(map_type_params(data, params, f))
++ GenericArgument::Type(map_type_params(data, params, self_type, f))
+ },
+ &GenericArgument::Binding(ref data) => {
+ GenericArgument::Binding(Binding {
+- ty: map_type_params(&data.ty, params, f),
++ ty: map_type_params(&data.ty, params, self_type, f),
+ ..data.clone()
+ })
+ },
+diff --git a/servo/components/style_derive/to_computed_value.rs b/servo/components/style_derive/to_computed_value.rs
+index fe6bddb7ed..1dc422e2dd 100644
+--- servo/components/style_derive/to_computed_value.rs
++++ servo/components/style_derive/to_computed_value.rs
+@@ -47,12 +47,15 @@ pub fn derive_to_value(
+ cg::add_predicate(&mut where_clause, parse_quote!(#param: #trait_path));
+ }
+
++ let computed_value_type = cg::fmap_trait_output(&input, &trait_path, &output_type_name);
++
+ let mut add_field_bound = |binding: &BindingInfo| {
+ let ty = &binding.ast().ty;
+
+ let output_type = cg::map_type_params(
+ ty,
+ &params,
++ &computed_value_type,
+ &mut |ident| parse_quote!(<#ident as #trait_path>::#output_type_name),
+ );
+
+@@ -142,7 +145,6 @@ pub fn derive_to_value(
+
+ input.generics.where_clause = where_clause;
+ let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
+- let computed_value_type = cg::fmap_trait_output(&input, &trait_path, &output_type_name);
+
+ let impl_ = trait_impl(from_body, to_body);
+
+--
+2.28.0
+
+From 300e01e71c9dc536d499d80563968c5fc7f7e34a Mon Sep 17 00:00:00 2001
+From: Thomas Deutschmann <whissi@gentoo.org>
+Date: Sat, 10 Oct 2020 16:10:20 +0200
+Subject: [PATCH 38/38] bmo#1663715: Update syn and proc-macro2 so that Firefox
+ can build on Rust nightly again
+
+Link: https://bugzilla.mozilla.org/show_bug.cgi?id=1663715#c7
+Signed-off-by: Thomas Deutschmann <whissi@gentoo.org>
+---
+ Cargo.lock | 8 +-
+ .../rust/lucet-wasi/.cargo-checksum.json | 2 +-
+ .../rust/packed_simd/.cargo-checksum.json | 2 +-
+ .../rust/proc-macro2/.cargo-checksum.json | 2 +-
+ third_party/rust/proc-macro2/Cargo.toml | 15 +-
+ third_party/rust/proc-macro2/README.md | 2 +-
+ third_party/rust/proc-macro2/build.rs | 20 +
+ third_party/rust/proc-macro2/src/detection.rs | 67 +
+ third_party/rust/proc-macro2/src/fallback.rs | 1010 ++----
+ third_party/rust/proc-macro2/src/lib.rs | 225 +-
+ third_party/rust/proc-macro2/src/marker.rs | 18 +
+ third_party/rust/proc-macro2/src/parse.rs | 849 +++++
+ third_party/rust/proc-macro2/src/strnom.rs | 391 ---
+ third_party/rust/proc-macro2/src/wrapper.rs | 258 +-
+ .../rust/proc-macro2/tests/comments.rs | 103 +
+ third_party/rust/proc-macro2/tests/marker.rs | 33 +
+ third_party/rust/proc-macro2/tests/test.rs | 240 +-
+ .../rust/proc-macro2/tests/test_fmt.rs | 26 +
+ .../spirv-cross-internal/.cargo-checksum.json | 2 +-
+ third_party/rust/syn/.cargo-checksum.json | 2 +-
+ third_party/rust/syn/Cargo.toml | 35 +-
+ third_party/rust/syn/README.md | 16 +-
+ third_party/rust/syn/benches/file.rs | 7 +
+ third_party/rust/syn/benches/rust.rs | 45 +-
+ third_party/rust/syn/build.rs | 38 +-
+ third_party/rust/syn/src/attr.rs | 126 +-
+ third_party/rust/syn/src/buffer.rs | 56 +-
+ third_party/rust/syn/src/custom_keyword.rs | 12 +-
+ .../rust/syn/src/custom_punctuation.rs | 50 +-
+ third_party/rust/syn/src/data.rs | 96 +-
+ third_party/rust/syn/src/derive.rs | 10 +-
+ third_party/rust/syn/src/discouraged.rs | 27 +-
+ third_party/rust/syn/src/error.rs | 33 +-
+ third_party/rust/syn/src/expr.rs | 826 +++--
+ third_party/rust/syn/src/ext.rs | 12 +-
+ third_party/rust/syn/src/file.rs | 4 +-
+ third_party/rust/syn/src/gen/clone.rs | 2051 ++++++++++++
+ third_party/rust/syn/src/gen/debug.rs | 2857 +++++++++++++++++
+ third_party/rust/syn/src/gen/eq.rs | 1930 +++++++++++
+ third_party/rust/syn/src/gen/fold.rs | 287 +-
+ third_party/rust/syn/src/gen/hash.rs | 2691 ++++++++++++++++
+ third_party/rust/syn/src/gen/visit.rs | 19 +-
+ third_party/rust/syn/src/gen/visit_mut.rs | 19 +-
+ third_party/rust/syn/src/generics.rs | 255 +-
+ third_party/rust/syn/src/item.rs | 1515 +++++----
+ third_party/rust/syn/src/keyword.rs | 0
+ third_party/rust/syn/src/lib.rs | 109 +-
+ third_party/rust/syn/src/lifetime.rs | 13 +-
+ third_party/rust/syn/src/lit.rs | 581 ++--
+ third_party/rust/syn/src/mac.rs | 55 +-
+ third_party/rust/syn/src/macros.rs | 61 +-
+ third_party/rust/syn/src/op.rs | 6 +-
+ third_party/rust/syn/src/parse.rs | 211 +-
+ third_party/rust/syn/src/parse_macro_input.rs | 32 +-
+ third_party/rust/syn/src/parse_quote.rs | 15 +-
+ third_party/rust/syn/src/pat.rs | 313 +-
+ third_party/rust/syn/src/path.rs | 33 +-
+ third_party/rust/syn/src/punctuated.rs | 123 +-
+ third_party/rust/syn/src/reserved.rs | 42 +
+ third_party/rust/syn/src/spanned.rs | 4 +-
+ third_party/rust/syn/src/stmt.rs | 141 +-
+ third_party/rust/syn/src/token.rs | 99 +-
+ third_party/rust/syn/src/tt.rs | 6 +-
+ third_party/rust/syn/src/ty.rs | 364 ++-
+ third_party/rust/syn/src/verbatim.rs | 15 +
+ third_party/rust/syn/src/whitespace.rs | 65 +
+ third_party/rust/syn/tests/clone.sh | 16 -
+ third_party/rust/syn/tests/common/eq.rs | 247 +-
+ third_party/rust/syn/tests/common/mod.rs | 13 +
+ third_party/rust/syn/tests/common/parse.rs | 24 +-
+ third_party/rust/syn/tests/debug/gen.rs | 50 +-
+ third_party/rust/syn/tests/debug/mod.rs | 17 +-
+ third_party/rust/syn/tests/features/error.rs | 1 -
+ third_party/rust/syn/tests/features/mod.rs | 22 -
+ third_party/rust/syn/tests/macros/mod.rs | 8 +-
+ third_party/rust/syn/tests/repo/mod.rs | 137 +-
+ third_party/rust/syn/tests/repo/progress.rs | 37 +
+ third_party/rust/syn/tests/test_asyncness.rs | 38 +-
+ third_party/rust/syn/tests/test_attribute.rs | 452 +--
+ .../rust/syn/tests/test_derive_input.rs | 1321 ++++----
+ third_party/rust/syn/tests/test_expr.rs | 314 +-
+ third_party/rust/syn/tests/test_generics.rs | 371 ++-
+ third_party/rust/syn/tests/test_grouping.rs | 53 +-
+ third_party/rust/syn/tests/test_ident.rs | 5 -
+ third_party/rust/syn/tests/test_item.rs | 45 +
+ third_party/rust/syn/tests/test_iterators.rs | 7 +-
+ third_party/rust/syn/tests/test_lit.rs | 75 +-
+ third_party/rust/syn/tests/test_meta.rs | 498 ++-
+ .../rust/syn/tests/test_parse_buffer.rs | 41 +-
+ .../rust/syn/tests/test_parse_stream.rs | 12 +
+ third_party/rust/syn/tests/test_pat.rs | 27 +-
+ third_party/rust/syn/tests/test_path.rs | 52 +
+ third_party/rust/syn/tests/test_precedence.rs | 196 +-
+ third_party/rust/syn/tests/test_receiver.rs | 127 +
+ third_party/rust/syn/tests/test_round_trip.rs | 41 +-
+ third_party/rust/syn/tests/test_shebang.rs | 59 +
+ .../rust/syn/tests/test_should_parse.rs | 4 -
+ third_party/rust/syn/tests/test_size.rs | 2 -
+ third_party/rust/syn/tests/test_stmt.rs | 44 +
+ .../rust/syn/tests/test_token_trees.rs | 12 +-
+ third_party/rust/syn/tests/test_ty.rs | 53 +
+ third_party/rust/syn/tests/test_visibility.rs | 145 +
+ third_party/rust/syn/tests/zzz_stable.rs | 4 +-
+ 103 files changed, 17319 insertions(+), 5831 deletions(-)
+ create mode 100644 third_party/rust/proc-macro2/src/detection.rs
+ create mode 100644 third_party/rust/proc-macro2/src/marker.rs
+ create mode 100644 third_party/rust/proc-macro2/src/parse.rs
+ delete mode 100644 third_party/rust/proc-macro2/src/strnom.rs
+ create mode 100644 third_party/rust/proc-macro2/tests/comments.rs
+ create mode 100644 third_party/rust/proc-macro2/tests/test_fmt.rs
+ create mode 100644 third_party/rust/syn/src/gen/clone.rs
+ create mode 100644 third_party/rust/syn/src/gen/debug.rs
+ create mode 100644 third_party/rust/syn/src/gen/eq.rs
+ create mode 100644 third_party/rust/syn/src/gen/hash.rs
+ delete mode 100644 third_party/rust/syn/src/keyword.rs
+ create mode 100644 third_party/rust/syn/src/reserved.rs
+ create mode 100644 third_party/rust/syn/src/verbatim.rs
+ create mode 100644 third_party/rust/syn/src/whitespace.rs
+ delete mode 100755 third_party/rust/syn/tests/clone.sh
+ delete mode 100644 third_party/rust/syn/tests/features/error.rs
+ delete mode 100644 third_party/rust/syn/tests/features/mod.rs
+ create mode 100644 third_party/rust/syn/tests/repo/progress.rs
+ create mode 100644 third_party/rust/syn/tests/test_item.rs
+ create mode 100644 third_party/rust/syn/tests/test_parse_stream.rs
+ create mode 100644 third_party/rust/syn/tests/test_path.rs
+ create mode 100644 third_party/rust/syn/tests/test_receiver.rs
+ create mode 100644 third_party/rust/syn/tests/test_shebang.rs
+ create mode 100644 third_party/rust/syn/tests/test_stmt.rs
+ create mode 100644 third_party/rust/syn/tests/test_ty.rs
+ create mode 100644 third_party/rust/syn/tests/test_visibility.rs
+
+diff --git a/Cargo.lock b/Cargo.lock
+index 19117e8368..d5fe0f6457 100644
+--- Cargo.lock
++++ Cargo.lock
+@@ -3717,9 +3717,9 @@ dependencies = [
+
+ [[package]]
+ name = "proc-macro2"
+-version = "1.0.5"
++version = "1.0.24"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"
++checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
+ dependencies = [
+ "unicode-xid",
+ ]
+@@ -4647,9 +4647,9 @@ dependencies = [
+
+ [[package]]
+ name = "syn"
+-version = "1.0.5"
++version = "1.0.40"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"
++checksum = "963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"
+ dependencies = [
+ "proc-macro2",
+ "quote",
+diff --git a/third_party/rust/lucet-wasi/.cargo-checksum.json b/third_party/rust/lucet-wasi/.cargo-checksum.json
+index 229fc9978c..2c8c0a3c22 100644
+--- third_party/rust/lucet-wasi/.cargo-checksum.json
++++ third_party/rust/lucet-wasi/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"fea1408a1c1b1e84b06044a0b12cb26c8fd3253ca124debb6cd3e4faab48fcbd","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","LICENSE.cloudabi-utils":"86a34251f0aab76b7dc3daf8d252afbdf481ea94aa5b46d020205178b7e2eac1","LICENSE.wasmtime":"a6c48161a09acc75a0e25503bab66a731eb5fba5392ed4bb4743e4ba5085327a","bindings.json":"fad8362f845e6f7a2af1d7547cee5730894e7b96804f338181fc070ffdcfae1e","build.rs":"593173ad03963afcbef43f1843be6f04cde1df3eae0298ca20bf881019dd350a","examples/.gitignore":"44575cf5b28512d75644bf54a517dcef304ff809fd511747621b4d64f19aac66","examples/Makefile":"d2d2ceeb1bc4435189ea9a2710b6f5f5331ce6aa73ae8a4f4edcca215058a9b4","examples/README.md":"f2a5be6cc88d511c9f4d3bfefdd42dcb2ace813bc23f6a4430b6b543f7373341","examples/hello.c":"9cbc0d3173e02309e15372835fa849d196b2a202d81806fea60378e1878d0c53","examples/pseudoquine.c":"8fd696f8e1b8fb86188564a05f4776875ead31d785a12e3aa4af9d9c1b46d5b5","include/lucet_wasi.h":"497f712c64f753ebdf73ab05b0b340d50094f9c59bb8637ccbf99d895cd20966","src/bindings.rs":"edbeb51d1a93fd31039ee1f1dc7c1b6c0bded2cf5dad10039e8b7da81a4d4a12","src/c_api.rs":"a9c73070a88a461882a28e3e2adfd773f569b964c7ffabde39a3cef907782f71","src/ctx.rs":"578f87c35cce12eaebec95d03e31954c3e6cd0afa214a0fec068f03814eb0cc7","src/fdentry.rs":"94a8480fa587e5586327dfd6b66d8a6a3ef1f8091ba8deb335bf45642f4f98e6","src/host.rs":"6f05f8fea2afed827abfc3c008a5854a8023d91d066580ecfb49e5c8036ef3a3","src/hostcalls/fs.rs":"4726e6f695f7d1d4e371ec52c57f4e36b0ba0d2302fc008b21a301f5fd7a5c97","src/hostcalls/fs_helpers.rs":"474bce0a1f15fa23b0b0b8aa83143d993dd2cbd7cdfc38c118d452d04e80caea","src/hostcalls/misc.rs":"83d087891d92af08cfa2d2e0c5f41cc47cb8219460f6dbcc8666b418dfef206e","src/hostcalls/mod.rs":"4c5d3f65c69503e11e647770879026c37c0e5e01a99b7116c8fb9411b4797187","src/hostcalls/timers.rs":"e65d6a491256b5d6051b6816f6c5049ba3cdc6142651bac81f34d659c1c2a104","src/lib.rs":"5554e1a3f0cd3756173ece6435a0d01b2f520b3401cd5fc33180a04fb9f69bbe","src/memory.rs":"0a09026b15d27f99d74e560cd94795f645cba414a8491bc961987fab9d9da69b","src/wasi_host.rs":"cacbdac28304a837b11e5ad400ae9de3ee79c0284be335e64606ecdfe426ad6e","src/wasm32.rs":"13a5dc6e59784662f1e55eccb457cbbae241a96f70cfa72c41d55858ca05b980","tests/guests/cant_dotdot.c":"609b8cece9443e375a0b38a7e43651b179f66ee9c686edba6696fe1bcd45b111","tests/guests/clock_getres.c":"f5e41c0a2b05a8d7cdb5b4da6c8b6778b858004c1e9d115503c45a1d976be33b","tests/guests/duplicate_import.wat":"4bd8d7a5c1d1597dbe7648300e94e3fab84d7ab068d56cfb656aa1a208026cee","tests/guests/exitcode.c":"b7c5cec3ead0ed82326c568287a1f3398e71ae7e447ce49a3c4c7114c82495af","tests/guests/follow_symlink.c":"de3143ad2bbbfe834c0c32b54c9fcf144ca4eba5cdcf7588929e5f47225ab616","tests/guests/fs.c":"0dca5232ff5da1b7745e3b44bca39333c01a20ba4eae1a6a0a1c492c71ca1efa","tests/guests/getentropy.c":"5d80bcc68dcf3ba91576969055099d61635ae713c057b3cb36afb122a5f26347","tests/guests/getrusage.c":"8114c103b85eb564d9ab43684958bc1939de3794d314b7c121762f3a2f0434a6","tests/guests/gettimeofday.c":"4a57f376b06f4228017b82695448a0bd213fb91455f5301d689cd87fcff01f06","tests/guests/notdir.c":"bd8f8b24360b7cf8d5dced9d9ba4c15843fcbbae89fecc13e3a457c33a275e28","tests/guests/poll.c":"aefaa9b58ce9906dc379e0bd25fa68dfbf8cdffb48cd5ecde1d67708b83b366d","tests/guests/preopen_populates.c":"f186e4eb4aab6a1d9ec7bc5c49eaea6d9d162e0159dfe8f953bb48ade9b58d43","tests/guests/read_file.c":"1aab9393f005f05b69592826d7c4d384a115d5bca42c66f10a901811b4b1dcac","tests/guests/read_file_twice.c":"04a3dad7a43b93e36efd4e2c822c11b3f129429ec799af304d82b358686c578a","tests/guests/stat.c":"02756933ea7d4337b4fa04344b32968851b02f9d0bd5ea1cb0e2f022e8c65ab0","tests/guests/stdin.c":"66efc4b54f68d1138046f1afefae15f7d4555b2904b4a988818e61e67fe8fefb","tests/guests/symlink_escape.c":"686e047b5c986e29c854bcd93996d027dcdc8721219fa9fa532efc98d2798f5c","tests/guests/symlink_loop.c":"2bbddf3a5edfc6e5f3c0fa82cee4ac92b18804810509e263abd17f5240cd37e5","tests/guests/write_file.c":"9e9b14552c2445cfa6d0aa26b334081a59e6e3428dbb17ceca005a9ba59d3220","tests/test_helpers/mod.rs":"bc18194317611fe1be5c439a7a9e0de75399555c3b6de4275af149fb180456c8","tests/tests.rs":"173a7e0f086f6ed46474686cc3413ee68bbd2ff67004f7790e963a1392c7c46e"},"package":null}
+\ No newline at end of file
++{"files":{"Cargo.toml":"fea1408a1c1b1e84b06044a0b12cb26c8fd3253ca124debb6cd3e4faab48fcbd","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","LICENSE.cloudabi-utils":"86a34251f0aab76b7dc3daf8d252afbdf481ea94aa5b46d020205178b7e2eac1","LICENSE.wasmtime":"a6c48161a09acc75a0e25503bab66a731eb5fba5392ed4bb4743e4ba5085327a","bindings.json":"fad8362f845e6f7a2af1d7547cee5730894e7b96804f338181fc070ffdcfae1e","build.rs":"593173ad03963afcbef43f1843be6f04cde1df3eae0298ca20bf881019dd350a","examples/Makefile":"d2d2ceeb1bc4435189ea9a2710b6f5f5331ce6aa73ae8a4f4edcca215058a9b4","examples/README.md":"f2a5be6cc88d511c9f4d3bfefdd42dcb2ace813bc23f6a4430b6b543f7373341","examples/hello.c":"9cbc0d3173e02309e15372835fa849d196b2a202d81806fea60378e1878d0c53","examples/pseudoquine.c":"8fd696f8e1b8fb86188564a05f4776875ead31d785a12e3aa4af9d9c1b46d5b5","include/lucet_wasi.h":"497f712c64f753ebdf73ab05b0b340d50094f9c59bb8637ccbf99d895cd20966","src/bindings.rs":"edbeb51d1a93fd31039ee1f1dc7c1b6c0bded2cf5dad10039e8b7da81a4d4a12","src/c_api.rs":"a9c73070a88a461882a28e3e2adfd773f569b964c7ffabde39a3cef907782f71","src/ctx.rs":"578f87c35cce12eaebec95d03e31954c3e6cd0afa214a0fec068f03814eb0cc7","src/fdentry.rs":"94a8480fa587e5586327dfd6b66d8a6a3ef1f8091ba8deb335bf45642f4f98e6","src/host.rs":"6f05f8fea2afed827abfc3c008a5854a8023d91d066580ecfb49e5c8036ef3a3","src/hostcalls/fs.rs":"4726e6f695f7d1d4e371ec52c57f4e36b0ba0d2302fc008b21a301f5fd7a5c97","src/hostcalls/fs_helpers.rs":"474bce0a1f15fa23b0b0b8aa83143d993dd2cbd7cdfc38c118d452d04e80caea","src/hostcalls/misc.rs":"83d087891d92af08cfa2d2e0c5f41cc47cb8219460f6dbcc8666b418dfef206e","src/hostcalls/mod.rs":"4c5d3f65c69503e11e647770879026c37c0e5e01a99b7116c8fb9411b4797187","src/hostcalls/timers.rs":"e65d6a491256b5d6051b6816f6c5049ba3cdc6142651bac81f34d659c1c2a104","src/lib.rs":"5554e1a3f0cd3756173ece6435a0d01b2f520b3401cd5fc33180a04fb9f69bbe","src/memory.rs":"0a09026b15d27f99d74e560cd94795f645cba414a8491bc961987fab9d9da69b","src/wasi_host.rs":"cacbdac28304a837b11e5ad400ae9de3ee79c0284be335e64606ecdfe426ad6e","src/wasm32.rs":"13a5dc6e59784662f1e55eccb457cbbae241a96f70cfa72c41d55858ca05b980","tests/guests/cant_dotdot.c":"609b8cece9443e375a0b38a7e43651b179f66ee9c686edba6696fe1bcd45b111","tests/guests/clock_getres.c":"f5e41c0a2b05a8d7cdb5b4da6c8b6778b858004c1e9d115503c45a1d976be33b","tests/guests/duplicate_import.wat":"4bd8d7a5c1d1597dbe7648300e94e3fab84d7ab068d56cfb656aa1a208026cee","tests/guests/exitcode.c":"b7c5cec3ead0ed82326c568287a1f3398e71ae7e447ce49a3c4c7114c82495af","tests/guests/follow_symlink.c":"de3143ad2bbbfe834c0c32b54c9fcf144ca4eba5cdcf7588929e5f47225ab616","tests/guests/fs.c":"0dca5232ff5da1b7745e3b44bca39333c01a20ba4eae1a6a0a1c492c71ca1efa","tests/guests/getentropy.c":"5d80bcc68dcf3ba91576969055099d61635ae713c057b3cb36afb122a5f26347","tests/guests/getrusage.c":"8114c103b85eb564d9ab43684958bc1939de3794d314b7c121762f3a2f0434a6","tests/guests/gettimeofday.c":"4a57f376b06f4228017b82695448a0bd213fb91455f5301d689cd87fcff01f06","tests/guests/notdir.c":"bd8f8b24360b7cf8d5dced9d9ba4c15843fcbbae89fecc13e3a457c33a275e28","tests/guests/poll.c":"aefaa9b58ce9906dc379e0bd25fa68dfbf8cdffb48cd5ecde1d67708b83b366d","tests/guests/preopen_populates.c":"f186e4eb4aab6a1d9ec7bc5c49eaea6d9d162e0159dfe8f953bb48ade9b58d43","tests/guests/read_file.c":"1aab9393f005f05b69592826d7c4d384a115d5bca42c66f10a901811b4b1dcac","tests/guests/read_file_twice.c":"04a3dad7a43b93e36efd4e2c822c11b3f129429ec799af304d82b358686c578a","tests/guests/stat.c":"02756933ea7d4337b4fa04344b32968851b02f9d0bd5ea1cb0e2f022e8c65ab0","tests/guests/stdin.c":"66efc4b54f68d1138046f1afefae15f7d4555b2904b4a988818e61e67fe8fefb","tests/guests/symlink_escape.c":"686e047b5c986e29c854bcd93996d027dcdc8721219fa9fa532efc98d2798f5c","tests/guests/symlink_loop.c":"2bbddf3a5edfc6e5f3c0fa82cee4ac92b18804810509e263abd17f5240cd37e5","tests/guests/write_file.c":"9e9b14552c2445cfa6d0aa26b334081a59e6e3428dbb17ceca005a9ba59d3220","tests/test_helpers/mod.rs":"bc18194317611fe1be5c439a7a9e0de75399555c3b6de4275af149fb180456c8","tests/tests.rs":"173a7e0f086f6ed46474686cc3413ee68bbd2ff67004f7790e963a1392c7c46e"},"package":null}
+\ No newline at end of file
+diff --git a/third_party/rust/packed_simd/.cargo-checksum.json b/third_party/rust/packed_simd/.cargo-checksum.json
+index 01afcc1efd..c727a10006 100644
+--- third_party/rust/packed_simd/.cargo-checksum.json
++++ third_party/rust/packed_simd/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{".appveyor.yml":"f1ed01850e0d725f9498f52a1a63ddf40702ad6e0bf5b2d7c4c04d76e96794a3",".travis.yml":"e9258d9a54fdaf4cbc12405fe5993ac4497eb2b29021691dbc91b19cb9b52227","Cargo.toml":"089941ba3c89ea111cbea3cc3abdcdcf2b9d0ae0db268d7269ee38226db950e5","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","bors.toml":"dee881dc69b9b7834e4eba5d95c3ed5a416d4628815a167d6a22d4cb4fb064b8","build.rs":"f3baefc5e5bb9b250e762a1466371b922fd7ee4243c217b2d014307603c2f57a","ci/all.sh":"a23d14e10cb26a0eb719e389c30eb955fa53cddcd436890646df09af640bd2eb","ci/android-install-ndk.sh":"0f1746108cc30bf9b9ba45bcde7b19fc1a8bdf5b0258035b4eb8dc69b75efac4","ci/android-install-sdk.sh":"3490432022c5c8f5a115c084f7a9aca1626f96c0c87ffb62019228c4346b47e4","ci/android-sysimage.sh":"ebf4e5daa1f0fe1b2092b79f0f3f161c4c4275cb744e52352c4d81ab451e4c5a","ci/benchmark.sh":"b61d19ef6b90deba8fb79dee74c8b062d94844676293da346da87bb78a9a49a4","ci/deploy_and_run_on_ios_simulator.rs":"ec8ecf82d92072676aa47f0d1a3d021b60a7ae3531153ef12d2ff4541fc294dc","ci/docker/aarch64-linux-android/Dockerfile":"ace2e7d33c87bc0f6d3962a4a3408c04557646f7f51ab99cfbf574906796b016","ci/docker/aarch64-unknown-linux-gnu/Dockerfile":"1ecdac757101d951794fb2ab0deaa278199cf25f2e08a15c7d40ff31a8556184","ci/docker/arm-linux-androideabi/Dockerfile":"370e55d3330a413a3ccf677b3afb3e0ef9018a5fab263faa97ae8ac017fc2286","ci/docker/arm-unknown-linux-gnueabi/Dockerfile":"e25d88f6c0c94aada3d2e3f08243f755feb7e869dc5dc505b3799719cb1af591","ci/docker/arm-unknown-linux-gnueabihf/Dockerfile":"f126f4c7bae8c11ab8b16df06ad997863f0838825a9c08c9899a3eedb6d570bd","ci/docker/armv7-unknown-linux-gnueabihf/Dockerfile":"b647545c158ee480a4c581dbdc1f57833aef056c8d498acc04b573e842bf803c","ci/docker/i586-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/i686-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/mips-unknown-linux-gnu/Dockerfile":"323776469bb7b160385f3621d66e3ee14c75242f8180f916e65af048a29d4ea0","ci/docker/mips64-unknown-linux-gnuabi64/Dockerfile":"c647f6948a9a43b0be695cbed4eac752120d0faf28e5e69c718cb10406921dab","ci/docker/mips64el-unknown-linux-gnuabi64/Dockerfile":"77bfd00cc8639509be381b394f077e39b45a00158ad61b4e1656714c714665d1","ci/docker/mipsel-unknown-linux-musl/Dockerfile":"ec5bea6c98a3b626731fdb95f9ff2d1182639c76e8fb16d3271d0fc884901524","ci/docker/powerpc-unknown-linux-gnu/Dockerfile":"4f2b662de66e83d1354f650b7077692309637f786c2ea5516c31b5c2ee10af2d","ci/docker/powerpc64-unknown-linux-gnu/Dockerfile":"a9595402b772bc365982e22a0096a8988825d90b09b5faa97ab192e76072f71d","ci/docker/powerpc64le-unknown-linux-gnu/Dockerfile":"df3c381c157439695ae8cd10ab71664702c061e3b4ab22906a5ad6c2680acfed","ci/docker/s390x-unknown-linux-gnu/Dockerfile":"93fb44df3d7fd31ead158570667c97b5076a05c3d968af4a84bc13819a8f2db8","ci/docker/sparc64-unknown-linux-gnu/Dockerfile":"da1c39a3ff1fe22e41395fa7c8934e90b4c1788e551b9aec6e38bfd94effc437","ci/docker/thumbv7neon-linux-androideabi/Dockerfile":"c2decd5591bd7a09378901bef629cd944acf052eb55e4f35b79eb9cb4d62246a","ci/docker/thumbv7neon-unknown-linux-gnueabihf/Dockerfile":"75c0c56161c7382b439de74c00de1c0e3dc9d59560cd6720976a751034b78714","ci/docker/wasm32-unknown-unknown/Dockerfile":"3e5f294bc1e004aa599086c2af49d6f3e7459fa250f5fbdd60cf67d53db78758","ci/docker/x86_64-linux-android/Dockerfile":"685040273cf350d5509e580ac451555efa19790c8723ca2af066adadc6880ad2","ci/docker/x86_64-unknown-linux-gnu-emulated/Dockerfile":"44b6203d9290bfdc53d81219f0937e1110847a23dd982ec8c4de388354f01536","ci/docker/x86_64-unknown-linux-gnu/Dockerfile":"d253c86803b22da428fa9cc671a05f18d3318eca7733b8dccb4f7be1ddf524c5","ci/dox.sh":"5b61711be47a4e3dde0ddd15ba73d256ea95fd75af3897732c24db1dc7e66366","ci/linux-s390x.sh":"d6b732d7795b4ba131326aff893bca6228a7d2eb0e9402f135705413dbbe0dce","ci/linux-sparc64.sh":"c92966838b1ab7ad3b7a344833ee726aba6b647cf5952e56f0ad1ba420b13325","ci/lld-shim.rs":"3d7f71ec23a49e2b67f694a0168786f9a954dda15f5a138815d966643fd3fcc3","ci/max_line_width.sh":"0a1518bba4c9ecaa55694cb2e9930d0e19c265baabf73143f17f9cf285aaa5bb","ci/run-docker.sh":"92e036390ad9b0d16f109579df1b5ced2e72e9afea40c7d011400ebd3a2a90de","ci/run.sh":"63259e22a96ba539f53c06b1b39f53e3a78a71171652e7afc170836110ccd913","ci/run_examples.sh":"d1a23c6c35374a0678ba5114b9b8fefd8be0a79e774872a8bf0898d1baca18d0","ci/runtest-android.rs":"145a8e9799a5223975061fe7e586ade5669ee4877a7d7a4cf6b4ab48e8e36c7c","ci/setup_benchmarks.sh":"73fb981a8fdb1dcd54409d3c0fbbfb8f77a3ceabf8626a6b9bf9d21d6bc8ce72","ci/test-runner-linux":"c8aa6025cff5306f4f31d0c61dc5f9d4dd5a1d189ab613ef8d4c367c694d9ccd","contributing.md":"2cc8c9c560ae17867e69b06d09b758dbf7bc39eb774ada50a743724b10acc0a2","perf-guide/.gitignore":"fe82c7da551079d832cf74200b0b359b4df9828cb4a0416fa7384f07a2ae6a13","perf-guide/book.toml":"115a98284126c6b180178b44713314cc494f08a71662ee2ce15cf67f17a51064","perf-guide/src/SUMMARY.md":"3e03bffc991fdc2050f3d51842d72d9d21ea6abab56a3baf3b2d5973a78b89e1","perf-guide/src/ascii.css":"29afb08833b2fe2250f0412e1fa1161a2432a0820a14953c87124407417c741a","perf-guide/src/bound_checks.md":"5e4991ff58a183ef0cd9fdc1feb4cd12d083b44bdf87393bbb0927808ef3ce7d","perf-guide/src/float-math/approx.md":"8c09032fa2d795a0c5db1775826c850d28eb2627846d0965c60ee72de63735ad","perf-guide/src/float-math/fma.md":"311076ba4b741d604a82e74b83a8d7e8c318fcbd7f64c4392d1cf5af95c60243","perf-guide/src/float-math/fp.md":"04153e775ab6e4f0d7837bcc515230d327b04edfa34c84ce9c9e10ebaeef2be8","perf-guide/src/float-math/svml.md":"0798873b8eedaeda5fed62dc91645b57c20775a02d3cd74d8bd06958f1516506","perf-guide/src/introduction.md":"9f5a19e9e6751f25d2daad39891a0cc600974527ec4c8305843f9618910671bd","perf-guide/src/prof/linux.md":"447731eb5de7d69166728fdbc5ecb0c0c9db678ea493b45a592d67dd002184c0","perf-guide/src/prof/mca.md":"f56d54f3d20e7aa4d32052186e8237b03d65971eb5d112802b442570ff11d344","perf-guide/src/prof/profiling.md":"8a650c0fd6ede0964789bb6577557eeef1d8226a896788602ce61528e260e43c","perf-guide/src/target-feature/attribute.md":"615f88dca0a707b6c416fa605435dd6e1fb5361cc639429cbf68cd87624bd78b","perf-guide/src/target-feature/features.md":"17077760ff24c006b606dd21889c53d87228f4311f3ba3a574f9afdeacd86165","perf-guide/src/target-feature/inlining.md":"7ed1d7068d8173a00d84c16cfe5871cd68b9f04f8d0cca2d01ebc84957ebf2f6","perf-guide/src/target-feature/practice.md":"c4b371842e0086df178488fec97f20def8f0c62ee588bcd25fd948b9b1fa227e","perf-guide/src/target-feature/runtime.md":"835425f5ee597fb3e51d36e725a81ebee29f4561231d19563cd4da81dbb1cfcb","perf-guide/src/target-feature/rustflags.md":"ab49712e9293a65d74d540ba4784fcb57ff1119ec05a575d895c071f1a620f64","perf-guide/src/vert-hor-ops.md":"c6211c0ee91e60552ec592d89d9d957eedc21dee3cbd89e1ad6765ea06a27471","readme.md":"585a8f0e16877fb9abb00cd17a175fcb9d7857840c6c61209f1827ffab095070","rustfmt.toml":"de6101d0670bad65fb3b337d56957d2a024e017e5ab146ec784d77312daaf8ff","src/api.rs":"331a3a4abb19cee2df5f2df4ad7c3e88b45e62cf23fdacfc9bbaa633dc5cf788","src/api/bit_manip.rs":"e68290ee679cc5abc9c73afbe635c1035f8cbfe849e5c751a1680e459244c39e","src/api/cast.rs":"03b94a3d316ac7b7be7068810044911e965e889a0ace7bae762749ca74a92747","src/api/cast/macros.rs":"b0a14d0c83ad2ebb7a275180f6d9e3f2bc312ba57a7d3d6c39fad4e0f20f9408","src/api/cast/v128.rs":"63e28c6a3edf1a7a635f51b8d3c6adbb1d46f884d92a196b3d4a6e743d809416","src/api/cast/v16.rs":"2a584eeb57fd47baad6f3533764301b04aaaac23702b7a8db12598ac02899262","src/api/cast/v256.rs":"b91c15ed8d1536ecd97b4eb79ff9d5aba0552cd9b6f0ea6435b05f2273e23b3a","src/api/cast/v32.rs":"62ec89fcce7fa7f28497ee5770adc8f81d2d3a6b2925b02f7dc06504c40e8f38","src/api/cast/v512.rs":"d855cb943ae7106e9599ef38e30a3afb1c6bd5433178baca54cb128fd9a7d143","src/api/cast/v64.rs":"fe0f7dfaf4fc0c0c1a78c96fcfcdfdc2a1e2845843b11aa797a0c6fb52a8f774","src/api/cmp.rs":"357c3a2a09c6d4611c32dd7fa95be2fae933d513e229026ec9b44451a77b884e","src/api/cmp/eq.rs":"60f70f355bae4cb5b17db53204cacc3890f70670611c17df638d4c04f7cc8075","src/api/cmp/ord.rs":"589f7234761c294fa5df8f525bc4acd5a47cdb602207d524a0d4e19804cd9695","src/api/cmp/partial_eq.rs":"3ed23d2a930b0f9750c3a5309da766b03dc4f9c4d375b42ad3c50fe732693d15","src/api/cmp/partial_ord.rs":"e16b11805c94048acd058c93994b5bc74bb187f8d7e3b86a87df60e1601467f9","src/api/cmp/vertical.rs":"de3d62f38eba817299aa16f1e1939954c9a447e316509397465c2830852ba053","src/api/default.rs":"b61f92fc0e33a2633b3375eb405beba480da071cde03df4d437d8a6058afcd97","src/api/fmt.rs":"67fb804bb86b6cd77cf8cd492b5733ce437071b66fe3297278b8a6552c325dda","src/api/fmt/binary.rs":"35cb5c266197d6224d598fb3d286e5fe48ef0c01ed356c2ff6fe9ba946f96a92","src/api/fmt/debug.rs":"aa18eea443bf353fea3db8b1a025132bbcaf91e747ecfa43b8d9fce9af395a0c","src/api/fmt/lower_hex.rs":"69d5be366631af309f214e8031c8c20267fcc27a695eac6f45c6bc1df72a67e6","src/api/fmt/octal.rs":"9eb11ba3d990213f3c7f1ec25edba7ce997cb1320e16d308c83498ba6b9bfbd9","src/api/fmt/upper_hex.rs":"a4637d085b7bb20e759ce58e08435b510a563ba3dd468af2b03560fdc5511562","src/api/from.rs":"2e599d8329cb05eaf06224cc441355c4b7b51254fc19256619333be8c149d444","src/api/from/from_array.rs":"4151593c7bba7455821fffa5b59867005a77c95d32f1f0cc3fd87294000157d9","src/api/from/from_vector.rs":"9764371aa9e6005aace74dea14f59e5611a095b7cf42707940924749282c52f0","src/api/hash.rs":"562cfa3f1d8eb9a733c035a3665a599c2f1e341ee820d8fbdd102a4398a441bc","src/api/into_bits.rs":"82297f0697d67b5a015e904e7e6e7b2a7066ba825bc54b94b4ff3e22d7a1eefb","src/api/into_bits/arch_specific.rs":"1f925390b0ce7132587d95f2419c6e2ad3e1a9d17eb1d9c120a1c1c4bdf4277e","src/api/into_bits/macros.rs":"d762406de25aedff88d460dec7a80dc8e825a2a419d53218ce007efa6a1d3e04","src/api/into_bits/v128.rs":"ecdc5893664c71d7ab1ff3697c3fbe490d20d8748b9b76881d05e7625e40d74c","src/api/into_bits/v16.rs":"5459ec7dad1ad7bd30dc7e48374580b993abf23701d9c3cb22203fa0a9aabb6d","src/api/into_bits/v256.rs":"90ea351da0380ead1bf0f63b620afd40d01d638d09f7e7be31840bd2c1d9c663","src/api/into_bits/v32.rs":"ee1dc5a430050e16f51154b5fe85b1536f5feddf2ea23dd1d3859b67c4afc6fc","src/api/into_bits/v512.rs":"f72098ed1c9a23944f3d01abaf5e0f2d0e81d35a06fdadd2183e896d41b59867","src/api/into_bits/v64.rs":"6394462facdfe7827349c742b7801f1291e75a720dfb8c0b52100df46f371c98","src/api/math.rs":"8b2a2fc651917a850539f993aa0b9e5bf4da67b11685285b8de8cdca311719ec","src/api/math/float.rs":"61d2794d68262a1090ae473bd30793b5f65cf732f32a6694a3af2ce5d9225616","src/api/math/float/abs.rs":"5b6b2701e2e11135b7ce58a05052ea8120e10e4702c95d046b9d21b827b26bf8","src/api/math/float/consts.rs":"78acba000d3fa527111300b6327c1932de9c4c1e02d4174e1a5615c01463d38c","src/api/math/float/cos.rs":"4c2dd7173728ef189314f1576c9486e03be21b7da98843b2f9011282a7979e31","src/api/math/float/exp.rs":"7c6d5f1e304f498a01cfa23b92380c815d7da0ad94eae3483783bc377d287eef","src/api/math/float/ln.rs":"54c7583f3df793b39ff57534fade27b41bb992439e5dc178252f5ca3190a3e54","src/api/math/float/mul_add.rs":"62cac77660d20159276d4c9ef066eb90c81cbddb808e8e157182c607625ad2eb","src/api/math/float/mul_adde.rs":"bae056ee9f3a70df39ec3c3b2f6437c65303888a7b843ef1a5bcf1f5aca0e602","src/api/math/float/powf.rs":"9ddb938984b36d39d82a82f862f80df8f7fb013f1d222d45698d41d88472f568","src/api/math/float/recpre.rs":"589225794ff1dbf31158dff660e6d4509ecc8befbb57c633900dea5ac0b840d6","src/api/math/float/rsqrte.rs":"a32abdcc318d7ccc8448231f54d75b884b7cbeb03a7d595713ab6243036f4dbf","src/api/math/float/sin.rs":"cbd3622b7df74f19691743001c8cf747a201f8977ad90542fee915f37dcd1e49","src/api/math/float/sqrt.rs":"0c66d5d63fb08e4d99c6b82a8828e41173aff1ac9fa1a2764a11fac217ccf2ac","src/api/math/float/sqrte.rs":"731e1c9f321b662accdd27dacb3aac2e8043b7aecb2f2161dde733bd9f025362","src/api/minimal.rs":"1f22bcc528555444e76de569ec0ae2029b9ae9d04805efeafa93369c8098036b","src/api/minimal/iuf.rs":"c501a6696950cf5e521765f178de548af64fdfb6e10d026616d09fab93ca2d17","src/api/minimal/mask.rs":"42e415f536c5193d0218f5a754b34b87fd7c971bff068009f958712166ff056d","src/api/minimal/ptr.rs":"a9ee482d1dd1c956fb8f3f179e6e620b1de4e9d713961461d4c6923a4ef2e67c","src/api/ops.rs":"3e273b277a0f3019d42c3c59ca94a5afd4885d5ae6d2182e5089bbeec9de42ee","src/api/ops/scalar_arithmetic.rs":"d2d5ad897a59dd0787544f927e0e7ca4072c3e58b0f4a2324083312b0d5a21d7","src/api/ops/scalar_bitwise.rs":"482204e459ca6be79568e1c9f70adbe2d2151412ddf122fb2161be8ebb51c40c","src/api/ops/scalar_mask_bitwise.rs":"c250f52042e37b22d57256c80d4604104cfd2fbe2a2e127c676267270ca5d350","src/api/ops/scalar_shifts.rs":"987f8fdebeedc16e3d77c1b732e7826ef70633c541d16dfa290845d5c6289150","src/api/ops/vector_arithmetic.rs":"ddca15d09ddeef502c2ed66117a62300ca65d87e959e8b622d767bdf1c307910","src/api/ops/vector_bitwise.rs":"b3968f7005b649edcc22a54e2379b14d5ee19045f2e784029805781ae043b5ee","src/api/ops/vector_float_min_max.rs":"f5155dce75219f4ba11275b1f295d2fdcddd49d174a6f1fb2ace7ea42813ce41","src/api/ops/vector_int_min_max.rs":"a378789c6ff9b32a51fbd0a97ffd36ed102cd1fe6a067d2b02017c1df342def6","src/api/ops/vector_mask_bitwise.rs":"5052d18517d765415d40327e6e8e55a312daaca0a5e2aec959bfa54b1675f9c8","src/api/ops/vector_neg.rs":"5c62f6b0221983cdbd23cd0a3af3672e6ba1255f0dfe8b19aae6fbd6503e231b","src/api/ops/vector_rotates.rs":"03cbe8a400fd7c688e4ee771a990a6754f2031b1a59b19ae81158b21471167e5","src/api/ops/vector_shifts.rs":"9bf69d0087268f61009e39aea52e03a90f378910206b6a28e8393178b6a5d0e0","src/api/ptr.rs":"8a793251bed6130dcfb2f1519ceaa18b751bbb15875928d0fb6deb5a5e07523a","src/api/ptr/gather_scatter.rs":"9ddd960365e050674b25b2fd3116e24d94669b4375d74e71c03e3f1469576066","src/api/reductions.rs":"ae5baca81352ecd44526d6c30c0a1feeda475ec73ddd3c3ec6b14e944e5448ee","src/api/reductions/bitwise.rs":"8bf910ae226188bd15fc7e125f058cd2566b6186fcd0cd8fd020f352c39ce139","src/api/reductions/float_arithmetic.rs":"e58c8c87806a95df2b2b5b48ac5991036df024096d9d7c171a480fe9282896a4","src/api/reductions/integer_arithmetic.rs":"47471da1c5f859489680bb5d34ced3d3aa20081c16053a3af121a4496fcb57bf","src/api/reductions/mask.rs":"db83327a950e33a317f37fd33ca4e20c347fb415975ec024f3e23da8509425af","src/api/reductions/min_max.rs":"f27be3aa28e1c1f46de7890198db6e12f00c207085e89ef2de7e57ee443cdb98","src/api/select.rs":"a98e2ccf9fc6bdeed32d337c8675bc96c2fbe2cc34fbf149ad6047fb8e749774","src/api/shuffle.rs":"da58200790868c09659819322a489929a5b6e56c596ed07e6a44293ea02e7d09","src/api/shuffle1_dyn.rs":"bfea5a91905b31444e9ef7ca6eddb7a9606b7e22d3f71bb842eb2795a0346620","src/api/slice.rs":"ee87484e8af329547b9a5d4f2a69e8bed6ea10bbd96270d706083843d4eea2ac","src/api/slice/from_slice.rs":"4d4fe8a329c885fcb4fbcbedf99efb15a95296fe6b3f595056cc37037450d5ac","src/api/slice/write_to_slice.rs":"f5b23b2c4b91cfb26b713a9013a6c0da7f45eaefb79ba06dcbc27f3f23bda679","src/api/swap_bytes.rs":"4a6792a2e49a77475e1b237592b4b2804dbddb79c474331acd0dd71b36934259","src/codegen.rs":"c6eebc3d3665420aa6a2f317977e3c41a4f43e0550ac630cdbe8e4bbed5e2031","src/codegen/bit_manip.rs":"5559e095105a80003e0de35af1d19b0c65c9ab04eb743c7e01c5442d882eb34e","src/codegen/llvm.rs":"d1299c189abb17a6133f047574cffc7a6db4c1be37cb7d4785491cb5e8f8cf54","src/codegen/math.rs":"35f96e37a78fcf0cdb02146b7f27a45108fe06a37fc2a54d8851ce131a326178","src/codegen/math/float.rs":"dd86c0449e576c83b719700962ac017c332987fac08d91f2b7a2b1b883598170","src/codegen/math/float/abs.rs":"f56e2b4b8055ea861c1f5cbc6b6e1d8e7e5af163b62c13574ddee4e09513bfbc","src/codegen/math/float/cos.rs":"ef3b511a24d23045b310315e80348a9b7fedb576fc2de52d74290616a0abeb2a","src/codegen/math/float/cos_pi.rs":"4e7631a5d73dac21531e09ef1802d1180f8997509c2c8fa9f67f322194263a97","src/codegen/math/float/exp.rs":"61b691598c41b5622f24e4320c1bdd08701e612a516438bdddcc728fc3405c8c","src/codegen/math/float/ln.rs":"46b718b1ba8c9d99e1ad40f53d20dfde08a3063ca7bd2a9fdd6698e060da687e","src/codegen/math/float/macros.rs":"dd42135fff13f9aca4fd3a1a4e14c7e6c31aadc6d817d63b0d2fb9e62e062744","src/codegen/math/float/mul_add.rs":"a37bf764345d4b1714f97e83897b7cf0855fc2811704bcbc0012db91825339e1","src/codegen/math/float/mul_adde.rs":"c75702bfcb361de45964a93caf959a695ef2376bd069227600b8c6872665c755","src/codegen/math/float/powf.rs":"642346e982bc4c39203de0864d2149c4179cd7b21cf67a2951687932b4675872","src/codegen/math/float/sin.rs":"9d68164c90cdca6a85155040cdac42e27342ebe0b925273ef1593df721af4258","src/codegen/math/float/sin_cos_pi.rs":"9be02ad48585a1e8d99129382fbffbaed47852f15459256a708850b6b7a75405","src/codegen/math/float/sin_pi.rs":"9890347905b4d4a3c7341c3eb06406e46e60582bcf6960688bd727e5dadc6c57","src/codegen/math/float/sqrt.rs":"e3c60dcfb0c6d2fc62adabcc931b2d4040b83cab294dea36443fb4b89eb79e34","src/codegen/math/float/sqrte.rs":"f0f4ef9eb475ae41bcc7ec6a95ad744ba6b36925faa8b2c2814004396d196b63","src/codegen/pointer_sized_int.rs":"a70697169c28218b56fd2e8d5353f2e00671d1150d0c8cef77d613bdfacd84cb","src/codegen/reductions.rs":"645e2514746d01387ddd07f0aa4ffd8430cc9ab428d4fb13773ea319fa25dd95","src/codegen/reductions/mask.rs":"8f1afe6aabf096a3278e1fc3a30f736e04aa8b9ce96373cee22162d18cfe2702","src/codegen/reductions/mask/aarch64.rs":"cba6e17603d39795dcfe8339b6b7d8714c3e162a1f0a635979f037aa24fe4206","src/codegen/reductions/mask/arm.rs":"9447904818aa2c7c25d0963eead452a639a11ca7dbd6d21eedbfcaade07a0f33","src/codegen/reductions/mask/fallback.rs":"7a0ef9f7fd03ae318b495b95e121350cd61caffc5cc6ee17fabf130d5d933453","src/codegen/reductions/mask/fallback_impl.rs":"76547f396e55ef403327c77c314cf8db8c7a5c9b9819bfb925abeacf130249e5","src/codegen/reductions/mask/x86.rs":"14bd2c482071f2355beebcf7b7ecf950ff2dfcdb08c3ca50993092434a9de717","src/codegen/reductions/mask/x86/avx.rs":"b4913d87844c522903641cbbf10db4551addb1ce5e9e78278e21612fa65c733b","src/codegen/reductions/mask/x86/avx2.rs":"677aed3f056285285daa3adff8bc65e739630b4424defa6d9665e160f027507e","src/codegen/reductions/mask/x86/sse.rs":"226610b4ff88c676d5187114dd57b4a8800de6ce40884675e9198445b1ed0306","src/codegen/reductions/mask/x86/sse2.rs":"bc38e6c31cb4b3d62147eba6cac264e519e2a48e0f7ce9010cfa9ef0cf0ec9fd","src/codegen/shuffle.rs":"0abca97e92cdce49a58a39cc447eb09dc7d7715ef256c8dbd2181a186e61bb64","src/codegen/shuffle1_dyn.rs":"04523e9338133bdedb012dd076c2c564b79ce5593b0fc56d0fb6910e04190a81","src/codegen/swap_bytes.rs":"1d6cdc716eadddc92b4fd506b2445a821caa8dc00860447de09d7ebd69c2087f","src/codegen/v128.rs":"94226b31ec403d18d9d2fe06713f147c9c79e9b5f9105089088266313f843185","src/codegen/v16.rs":"ddec4ffb66b6f7aaffb9a1780c5ddba82557abd74f45073d335047e04cf74924","src/codegen/v256.rs":"6b63917f0444118d6b1595bff2045e59b97c4d24012bd575f69f1f0efc5a0241","src/codegen/v32.rs":"3477b3c5540aed86e61e2f5807dd31db947413cec9181c587d93ed6ec74f0eba","src/codegen/v512.rs":"5854f99d3aabc4cd42b28a20d9ce447756dc2ba024a409a69b6a8ae1f1842fc5","src/codegen/v64.rs":"e9e89caebfe63d10c0cbca61e4dfdba3b7e02ee0989170f80beed23237ddd950","src/codegen/vPtr.rs":"96d609a9eece4dcbbcc01ba0b8744d7f5958be12774176a2945bc676f4e6b5cb","src/codegen/vSize.rs":"eeee9858749aa82142b27bc120d1989bb74a6b82e1e4efbbeaccc9634dc9acfc","src/lib.rs":"1b5d419ff05ee0370d671810423ccc254708cc8d415c1dbac2a7a36be4bf63a8","src/masks.rs":"870f429967b2d7d5133f4d28d6c753fc5cef0570b27b29d4e966a066d22d2d0e","src/sealed.rs":"ff7f0324276408ae8249941cfa32c90b8835a54d750896b683efea857af19db2","src/testing.rs":"1d3a7862ef625e235a5734ad7204e68d350f902c0695182b1f08a0552432416e","src/testing/macros.rs":"6378856d7a40ba5ec5c7c0dad6327d79f0c77266921c24296d10aed6c68e9b98","src/testing/utils.rs":"d6fd5a5017f1f85d9d99585754f8f6ad06fc3d683b34083543e67a7cc6c1772c","src/v128.rs":"18fe263c4aa28cd06461c7070b0269f69f4a2e75749b8f142a83dfdfe4d22bf5","src/v16.rs":"e5c663c9fb3547eaeac78a5f7db9969f4d8b5ec96112bf2954602fff11f0aebd","src/v256.rs":"68732cd688ad12a56d8b4f8ddf279f77bdfe1be2943c7dc0c1b4f1a76798aa0f","src/v32.rs":"785b22a1ccb4a41bb53dfeb0670f624c0ce42e6cdf62d1747e3283777a1c70bd","src/v512.rs":"d1337bfe07f06a8f37f8e8fa7d4315b9307476ee435ad80dd5269eaed564fbfa","src/v64.rs":"3077468d65125b8f085e9454c8b2463a4d5225697464ba6a1300f8799528fd4b","src/vPtr.rs":"c9a53f41f466e17b6648a4ce390fd8f4d3a848d440eb8a9a803a11608d76eb05","src/vSize.rs":"5c46d3e8c3ee5863d9b6e37e681f871386e0efc254d6d84ba711edb529ce7b3c","tests/endianness.rs":"541a144be017e3dd7da7c8ea49d907dc02538245e8c5f3deb5bd43da92c929e1"},"package":null}
+\ No newline at end of file
++{"files":{".appveyor.yml":"f1ed01850e0d725f9498f52a1a63ddf40702ad6e0bf5b2d7c4c04d76e96794a3",".travis.yml":"e9258d9a54fdaf4cbc12405fe5993ac4497eb2b29021691dbc91b19cb9b52227","Cargo.toml":"089941ba3c89ea111cbea3cc3abdcdcf2b9d0ae0db268d7269ee38226db950e5","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","bors.toml":"dee881dc69b9b7834e4eba5d95c3ed5a416d4628815a167d6a22d4cb4fb064b8","build.rs":"f3baefc5e5bb9b250e762a1466371b922fd7ee4243c217b2d014307603c2f57a","ci/all.sh":"a23d14e10cb26a0eb719e389c30eb955fa53cddcd436890646df09af640bd2eb","ci/android-install-ndk.sh":"0f1746108cc30bf9b9ba45bcde7b19fc1a8bdf5b0258035b4eb8dc69b75efac4","ci/android-install-sdk.sh":"3490432022c5c8f5a115c084f7a9aca1626f96c0c87ffb62019228c4346b47e4","ci/android-sysimage.sh":"ebf4e5daa1f0fe1b2092b79f0f3f161c4c4275cb744e52352c4d81ab451e4c5a","ci/benchmark.sh":"b61d19ef6b90deba8fb79dee74c8b062d94844676293da346da87bb78a9a49a4","ci/deploy_and_run_on_ios_simulator.rs":"ec8ecf82d92072676aa47f0d1a3d021b60a7ae3531153ef12d2ff4541fc294dc","ci/docker/aarch64-linux-android/Dockerfile":"ace2e7d33c87bc0f6d3962a4a3408c04557646f7f51ab99cfbf574906796b016","ci/docker/aarch64-unknown-linux-gnu/Dockerfile":"1ecdac757101d951794fb2ab0deaa278199cf25f2e08a15c7d40ff31a8556184","ci/docker/arm-linux-androideabi/Dockerfile":"370e55d3330a413a3ccf677b3afb3e0ef9018a5fab263faa97ae8ac017fc2286","ci/docker/arm-unknown-linux-gnueabi/Dockerfile":"e25d88f6c0c94aada3d2e3f08243f755feb7e869dc5dc505b3799719cb1af591","ci/docker/arm-unknown-linux-gnueabihf/Dockerfile":"f126f4c7bae8c11ab8b16df06ad997863f0838825a9c08c9899a3eedb6d570bd","ci/docker/armv7-unknown-linux-gnueabihf/Dockerfile":"b647545c158ee480a4c581dbdc1f57833aef056c8d498acc04b573e842bf803c","ci/docker/i586-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/i686-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/mips-unknown-linux-gnu/Dockerfile":"323776469bb7b160385f3621d66e3ee14c75242f8180f916e65af048a29d4ea0","ci/docker/mips64-unknown-linux-gnuabi64/Dockerfile":"c647f6948a9a43b0be695cbed4eac752120d0faf28e5e69c718cb10406921dab","ci/docker/mips64el-unknown-linux-gnuabi64/Dockerfile":"77bfd00cc8639509be381b394f077e39b45a00158ad61b4e1656714c714665d1","ci/docker/mipsel-unknown-linux-musl/Dockerfile":"ec5bea6c98a3b626731fdb95f9ff2d1182639c76e8fb16d3271d0fc884901524","ci/docker/powerpc-unknown-linux-gnu/Dockerfile":"4f2b662de66e83d1354f650b7077692309637f786c2ea5516c31b5c2ee10af2d","ci/docker/powerpc64-unknown-linux-gnu/Dockerfile":"a9595402b772bc365982e22a0096a8988825d90b09b5faa97ab192e76072f71d","ci/docker/powerpc64le-unknown-linux-gnu/Dockerfile":"df3c381c157439695ae8cd10ab71664702c061e3b4ab22906a5ad6c2680acfed","ci/docker/s390x-unknown-linux-gnu/Dockerfile":"93fb44df3d7fd31ead158570667c97b5076a05c3d968af4a84bc13819a8f2db8","ci/docker/sparc64-unknown-linux-gnu/Dockerfile":"da1c39a3ff1fe22e41395fa7c8934e90b4c1788e551b9aec6e38bfd94effc437","ci/docker/thumbv7neon-linux-androideabi/Dockerfile":"c2decd5591bd7a09378901bef629cd944acf052eb55e4f35b79eb9cb4d62246a","ci/docker/thumbv7neon-unknown-linux-gnueabihf/Dockerfile":"75c0c56161c7382b439de74c00de1c0e3dc9d59560cd6720976a751034b78714","ci/docker/wasm32-unknown-unknown/Dockerfile":"3e5f294bc1e004aa599086c2af49d6f3e7459fa250f5fbdd60cf67d53db78758","ci/docker/x86_64-linux-android/Dockerfile":"685040273cf350d5509e580ac451555efa19790c8723ca2af066adadc6880ad2","ci/docker/x86_64-unknown-linux-gnu-emulated/Dockerfile":"44b6203d9290bfdc53d81219f0937e1110847a23dd982ec8c4de388354f01536","ci/docker/x86_64-unknown-linux-gnu/Dockerfile":"d253c86803b22da428fa9cc671a05f18d3318eca7733b8dccb4f7be1ddf524c5","ci/dox.sh":"5b61711be47a4e3dde0ddd15ba73d256ea95fd75af3897732c24db1dc7e66366","ci/linux-s390x.sh":"d6b732d7795b4ba131326aff893bca6228a7d2eb0e9402f135705413dbbe0dce","ci/linux-sparc64.sh":"c92966838b1ab7ad3b7a344833ee726aba6b647cf5952e56f0ad1ba420b13325","ci/lld-shim.rs":"3d7f71ec23a49e2b67f694a0168786f9a954dda15f5a138815d966643fd3fcc3","ci/max_line_width.sh":"0a1518bba4c9ecaa55694cb2e9930d0e19c265baabf73143f17f9cf285aaa5bb","ci/run-docker.sh":"92e036390ad9b0d16f109579df1b5ced2e72e9afea40c7d011400ebd3a2a90de","ci/run.sh":"63259e22a96ba539f53c06b1b39f53e3a78a71171652e7afc170836110ccd913","ci/run_examples.sh":"d1a23c6c35374a0678ba5114b9b8fefd8be0a79e774872a8bf0898d1baca18d0","ci/runtest-android.rs":"145a8e9799a5223975061fe7e586ade5669ee4877a7d7a4cf6b4ab48e8e36c7c","ci/setup_benchmarks.sh":"73fb981a8fdb1dcd54409d3c0fbbfb8f77a3ceabf8626a6b9bf9d21d6bc8ce72","ci/test-runner-linux":"c8aa6025cff5306f4f31d0c61dc5f9d4dd5a1d189ab613ef8d4c367c694d9ccd","contributing.md":"2cc8c9c560ae17867e69b06d09b758dbf7bc39eb774ada50a743724b10acc0a2","perf-guide/book.toml":"115a98284126c6b180178b44713314cc494f08a71662ee2ce15cf67f17a51064","perf-guide/src/SUMMARY.md":"3e03bffc991fdc2050f3d51842d72d9d21ea6abab56a3baf3b2d5973a78b89e1","perf-guide/src/ascii.css":"29afb08833b2fe2250f0412e1fa1161a2432a0820a14953c87124407417c741a","perf-guide/src/bound_checks.md":"5e4991ff58a183ef0cd9fdc1feb4cd12d083b44bdf87393bbb0927808ef3ce7d","perf-guide/src/float-math/approx.md":"8c09032fa2d795a0c5db1775826c850d28eb2627846d0965c60ee72de63735ad","perf-guide/src/float-math/fma.md":"311076ba4b741d604a82e74b83a8d7e8c318fcbd7f64c4392d1cf5af95c60243","perf-guide/src/float-math/fp.md":"04153e775ab6e4f0d7837bcc515230d327b04edfa34c84ce9c9e10ebaeef2be8","perf-guide/src/float-math/svml.md":"0798873b8eedaeda5fed62dc91645b57c20775a02d3cd74d8bd06958f1516506","perf-guide/src/introduction.md":"9f5a19e9e6751f25d2daad39891a0cc600974527ec4c8305843f9618910671bd","perf-guide/src/prof/linux.md":"447731eb5de7d69166728fdbc5ecb0c0c9db678ea493b45a592d67dd002184c0","perf-guide/src/prof/mca.md":"f56d54f3d20e7aa4d32052186e8237b03d65971eb5d112802b442570ff11d344","perf-guide/src/prof/profiling.md":"8a650c0fd6ede0964789bb6577557eeef1d8226a896788602ce61528e260e43c","perf-guide/src/target-feature/attribute.md":"615f88dca0a707b6c416fa605435dd6e1fb5361cc639429cbf68cd87624bd78b","perf-guide/src/target-feature/features.md":"17077760ff24c006b606dd21889c53d87228f4311f3ba3a574f9afdeacd86165","perf-guide/src/target-feature/inlining.md":"7ed1d7068d8173a00d84c16cfe5871cd68b9f04f8d0cca2d01ebc84957ebf2f6","perf-guide/src/target-feature/practice.md":"c4b371842e0086df178488fec97f20def8f0c62ee588bcd25fd948b9b1fa227e","perf-guide/src/target-feature/runtime.md":"835425f5ee597fb3e51d36e725a81ebee29f4561231d19563cd4da81dbb1cfcb","perf-guide/src/target-feature/rustflags.md":"ab49712e9293a65d74d540ba4784fcb57ff1119ec05a575d895c071f1a620f64","perf-guide/src/vert-hor-ops.md":"c6211c0ee91e60552ec592d89d9d957eedc21dee3cbd89e1ad6765ea06a27471","readme.md":"585a8f0e16877fb9abb00cd17a175fcb9d7857840c6c61209f1827ffab095070","rustfmt.toml":"de6101d0670bad65fb3b337d56957d2a024e017e5ab146ec784d77312daaf8ff","src/api.rs":"331a3a4abb19cee2df5f2df4ad7c3e88b45e62cf23fdacfc9bbaa633dc5cf788","src/api/bit_manip.rs":"e68290ee679cc5abc9c73afbe635c1035f8cbfe849e5c751a1680e459244c39e","src/api/cast.rs":"03b94a3d316ac7b7be7068810044911e965e889a0ace7bae762749ca74a92747","src/api/cast/macros.rs":"b0a14d0c83ad2ebb7a275180f6d9e3f2bc312ba57a7d3d6c39fad4e0f20f9408","src/api/cast/v128.rs":"63e28c6a3edf1a7a635f51b8d3c6adbb1d46f884d92a196b3d4a6e743d809416","src/api/cast/v16.rs":"2a584eeb57fd47baad6f3533764301b04aaaac23702b7a8db12598ac02899262","src/api/cast/v256.rs":"b91c15ed8d1536ecd97b4eb79ff9d5aba0552cd9b6f0ea6435b05f2273e23b3a","src/api/cast/v32.rs":"62ec89fcce7fa7f28497ee5770adc8f81d2d3a6b2925b02f7dc06504c40e8f38","src/api/cast/v512.rs":"d855cb943ae7106e9599ef38e30a3afb1c6bd5433178baca54cb128fd9a7d143","src/api/cast/v64.rs":"fe0f7dfaf4fc0c0c1a78c96fcfcdfdc2a1e2845843b11aa797a0c6fb52a8f774","src/api/cmp.rs":"357c3a2a09c6d4611c32dd7fa95be2fae933d513e229026ec9b44451a77b884e","src/api/cmp/eq.rs":"60f70f355bae4cb5b17db53204cacc3890f70670611c17df638d4c04f7cc8075","src/api/cmp/ord.rs":"589f7234761c294fa5df8f525bc4acd5a47cdb602207d524a0d4e19804cd9695","src/api/cmp/partial_eq.rs":"3ed23d2a930b0f9750c3a5309da766b03dc4f9c4d375b42ad3c50fe732693d15","src/api/cmp/partial_ord.rs":"e16b11805c94048acd058c93994b5bc74bb187f8d7e3b86a87df60e1601467f9","src/api/cmp/vertical.rs":"de3d62f38eba817299aa16f1e1939954c9a447e316509397465c2830852ba053","src/api/default.rs":"b61f92fc0e33a2633b3375eb405beba480da071cde03df4d437d8a6058afcd97","src/api/fmt.rs":"67fb804bb86b6cd77cf8cd492b5733ce437071b66fe3297278b8a6552c325dda","src/api/fmt/binary.rs":"35cb5c266197d6224d598fb3d286e5fe48ef0c01ed356c2ff6fe9ba946f96a92","src/api/fmt/debug.rs":"aa18eea443bf353fea3db8b1a025132bbcaf91e747ecfa43b8d9fce9af395a0c","src/api/fmt/lower_hex.rs":"69d5be366631af309f214e8031c8c20267fcc27a695eac6f45c6bc1df72a67e6","src/api/fmt/octal.rs":"9eb11ba3d990213f3c7f1ec25edba7ce997cb1320e16d308c83498ba6b9bfbd9","src/api/fmt/upper_hex.rs":"a4637d085b7bb20e759ce58e08435b510a563ba3dd468af2b03560fdc5511562","src/api/from.rs":"2e599d8329cb05eaf06224cc441355c4b7b51254fc19256619333be8c149d444","src/api/from/from_array.rs":"4151593c7bba7455821fffa5b59867005a77c95d32f1f0cc3fd87294000157d9","src/api/from/from_vector.rs":"9764371aa9e6005aace74dea14f59e5611a095b7cf42707940924749282c52f0","src/api/hash.rs":"562cfa3f1d8eb9a733c035a3665a599c2f1e341ee820d8fbdd102a4398a441bc","src/api/into_bits.rs":"82297f0697d67b5a015e904e7e6e7b2a7066ba825bc54b94b4ff3e22d7a1eefb","src/api/into_bits/arch_specific.rs":"1f925390b0ce7132587d95f2419c6e2ad3e1a9d17eb1d9c120a1c1c4bdf4277e","src/api/into_bits/macros.rs":"d762406de25aedff88d460dec7a80dc8e825a2a419d53218ce007efa6a1d3e04","src/api/into_bits/v128.rs":"ecdc5893664c71d7ab1ff3697c3fbe490d20d8748b9b76881d05e7625e40d74c","src/api/into_bits/v16.rs":"5459ec7dad1ad7bd30dc7e48374580b993abf23701d9c3cb22203fa0a9aabb6d","src/api/into_bits/v256.rs":"90ea351da0380ead1bf0f63b620afd40d01d638d09f7e7be31840bd2c1d9c663","src/api/into_bits/v32.rs":"ee1dc5a430050e16f51154b5fe85b1536f5feddf2ea23dd1d3859b67c4afc6fc","src/api/into_bits/v512.rs":"f72098ed1c9a23944f3d01abaf5e0f2d0e81d35a06fdadd2183e896d41b59867","src/api/into_bits/v64.rs":"6394462facdfe7827349c742b7801f1291e75a720dfb8c0b52100df46f371c98","src/api/math.rs":"8b2a2fc651917a850539f993aa0b9e5bf4da67b11685285b8de8cdca311719ec","src/api/math/float.rs":"61d2794d68262a1090ae473bd30793b5f65cf732f32a6694a3af2ce5d9225616","src/api/math/float/abs.rs":"5b6b2701e2e11135b7ce58a05052ea8120e10e4702c95d046b9d21b827b26bf8","src/api/math/float/consts.rs":"78acba000d3fa527111300b6327c1932de9c4c1e02d4174e1a5615c01463d38c","src/api/math/float/cos.rs":"4c2dd7173728ef189314f1576c9486e03be21b7da98843b2f9011282a7979e31","src/api/math/float/exp.rs":"7c6d5f1e304f498a01cfa23b92380c815d7da0ad94eae3483783bc377d287eef","src/api/math/float/ln.rs":"54c7583f3df793b39ff57534fade27b41bb992439e5dc178252f5ca3190a3e54","src/api/math/float/mul_add.rs":"62cac77660d20159276d4c9ef066eb90c81cbddb808e8e157182c607625ad2eb","src/api/math/float/mul_adde.rs":"bae056ee9f3a70df39ec3c3b2f6437c65303888a7b843ef1a5bcf1f5aca0e602","src/api/math/float/powf.rs":"9ddb938984b36d39d82a82f862f80df8f7fb013f1d222d45698d41d88472f568","src/api/math/float/recpre.rs":"589225794ff1dbf31158dff660e6d4509ecc8befbb57c633900dea5ac0b840d6","src/api/math/float/rsqrte.rs":"a32abdcc318d7ccc8448231f54d75b884b7cbeb03a7d595713ab6243036f4dbf","src/api/math/float/sin.rs":"cbd3622b7df74f19691743001c8cf747a201f8977ad90542fee915f37dcd1e49","src/api/math/float/sqrt.rs":"0c66d5d63fb08e4d99c6b82a8828e41173aff1ac9fa1a2764a11fac217ccf2ac","src/api/math/float/sqrte.rs":"731e1c9f321b662accdd27dacb3aac2e8043b7aecb2f2161dde733bd9f025362","src/api/minimal.rs":"1f22bcc528555444e76de569ec0ae2029b9ae9d04805efeafa93369c8098036b","src/api/minimal/iuf.rs":"c501a6696950cf5e521765f178de548af64fdfb6e10d026616d09fab93ca2d17","src/api/minimal/mask.rs":"42e415f536c5193d0218f5a754b34b87fd7c971bff068009f958712166ff056d","src/api/minimal/ptr.rs":"a9ee482d1dd1c956fb8f3f179e6e620b1de4e9d713961461d4c6923a4ef2e67c","src/api/ops.rs":"3e273b277a0f3019d42c3c59ca94a5afd4885d5ae6d2182e5089bbeec9de42ee","src/api/ops/scalar_arithmetic.rs":"d2d5ad897a59dd0787544f927e0e7ca4072c3e58b0f4a2324083312b0d5a21d7","src/api/ops/scalar_bitwise.rs":"482204e459ca6be79568e1c9f70adbe2d2151412ddf122fb2161be8ebb51c40c","src/api/ops/scalar_mask_bitwise.rs":"c250f52042e37b22d57256c80d4604104cfd2fbe2a2e127c676267270ca5d350","src/api/ops/scalar_shifts.rs":"987f8fdebeedc16e3d77c1b732e7826ef70633c541d16dfa290845d5c6289150","src/api/ops/vector_arithmetic.rs":"ddca15d09ddeef502c2ed66117a62300ca65d87e959e8b622d767bdf1c307910","src/api/ops/vector_bitwise.rs":"b3968f7005b649edcc22a54e2379b14d5ee19045f2e784029805781ae043b5ee","src/api/ops/vector_float_min_max.rs":"f5155dce75219f4ba11275b1f295d2fdcddd49d174a6f1fb2ace7ea42813ce41","src/api/ops/vector_int_min_max.rs":"a378789c6ff9b32a51fbd0a97ffd36ed102cd1fe6a067d2b02017c1df342def6","src/api/ops/vector_mask_bitwise.rs":"5052d18517d765415d40327e6e8e55a312daaca0a5e2aec959bfa54b1675f9c8","src/api/ops/vector_neg.rs":"5c62f6b0221983cdbd23cd0a3af3672e6ba1255f0dfe8b19aae6fbd6503e231b","src/api/ops/vector_rotates.rs":"03cbe8a400fd7c688e4ee771a990a6754f2031b1a59b19ae81158b21471167e5","src/api/ops/vector_shifts.rs":"9bf69d0087268f61009e39aea52e03a90f378910206b6a28e8393178b6a5d0e0","src/api/ptr.rs":"8a793251bed6130dcfb2f1519ceaa18b751bbb15875928d0fb6deb5a5e07523a","src/api/ptr/gather_scatter.rs":"9ddd960365e050674b25b2fd3116e24d94669b4375d74e71c03e3f1469576066","src/api/reductions.rs":"ae5baca81352ecd44526d6c30c0a1feeda475ec73ddd3c3ec6b14e944e5448ee","src/api/reductions/bitwise.rs":"8bf910ae226188bd15fc7e125f058cd2566b6186fcd0cd8fd020f352c39ce139","src/api/reductions/float_arithmetic.rs":"e58c8c87806a95df2b2b5b48ac5991036df024096d9d7c171a480fe9282896a4","src/api/reductions/integer_arithmetic.rs":"47471da1c5f859489680bb5d34ced3d3aa20081c16053a3af121a4496fcb57bf","src/api/reductions/mask.rs":"db83327a950e33a317f37fd33ca4e20c347fb415975ec024f3e23da8509425af","src/api/reductions/min_max.rs":"f27be3aa28e1c1f46de7890198db6e12f00c207085e89ef2de7e57ee443cdb98","src/api/select.rs":"a98e2ccf9fc6bdeed32d337c8675bc96c2fbe2cc34fbf149ad6047fb8e749774","src/api/shuffle.rs":"da58200790868c09659819322a489929a5b6e56c596ed07e6a44293ea02e7d09","src/api/shuffle1_dyn.rs":"bfea5a91905b31444e9ef7ca6eddb7a9606b7e22d3f71bb842eb2795a0346620","src/api/slice.rs":"ee87484e8af329547b9a5d4f2a69e8bed6ea10bbd96270d706083843d4eea2ac","src/api/slice/from_slice.rs":"4d4fe8a329c885fcb4fbcbedf99efb15a95296fe6b3f595056cc37037450d5ac","src/api/slice/write_to_slice.rs":"f5b23b2c4b91cfb26b713a9013a6c0da7f45eaefb79ba06dcbc27f3f23bda679","src/api/swap_bytes.rs":"4a6792a2e49a77475e1b237592b4b2804dbddb79c474331acd0dd71b36934259","src/codegen.rs":"c6eebc3d3665420aa6a2f317977e3c41a4f43e0550ac630cdbe8e4bbed5e2031","src/codegen/bit_manip.rs":"5559e095105a80003e0de35af1d19b0c65c9ab04eb743c7e01c5442d882eb34e","src/codegen/llvm.rs":"d1299c189abb17a6133f047574cffc7a6db4c1be37cb7d4785491cb5e8f8cf54","src/codegen/math.rs":"35f96e37a78fcf0cdb02146b7f27a45108fe06a37fc2a54d8851ce131a326178","src/codegen/math/float.rs":"dd86c0449e576c83b719700962ac017c332987fac08d91f2b7a2b1b883598170","src/codegen/math/float/abs.rs":"f56e2b4b8055ea861c1f5cbc6b6e1d8e7e5af163b62c13574ddee4e09513bfbc","src/codegen/math/float/cos.rs":"ef3b511a24d23045b310315e80348a9b7fedb576fc2de52d74290616a0abeb2a","src/codegen/math/float/cos_pi.rs":"4e7631a5d73dac21531e09ef1802d1180f8997509c2c8fa9f67f322194263a97","src/codegen/math/float/exp.rs":"61b691598c41b5622f24e4320c1bdd08701e612a516438bdddcc728fc3405c8c","src/codegen/math/float/ln.rs":"46b718b1ba8c9d99e1ad40f53d20dfde08a3063ca7bd2a9fdd6698e060da687e","src/codegen/math/float/macros.rs":"dd42135fff13f9aca4fd3a1a4e14c7e6c31aadc6d817d63b0d2fb9e62e062744","src/codegen/math/float/mul_add.rs":"a37bf764345d4b1714f97e83897b7cf0855fc2811704bcbc0012db91825339e1","src/codegen/math/float/mul_adde.rs":"c75702bfcb361de45964a93caf959a695ef2376bd069227600b8c6872665c755","src/codegen/math/float/powf.rs":"642346e982bc4c39203de0864d2149c4179cd7b21cf67a2951687932b4675872","src/codegen/math/float/sin.rs":"9d68164c90cdca6a85155040cdac42e27342ebe0b925273ef1593df721af4258","src/codegen/math/float/sin_cos_pi.rs":"9be02ad48585a1e8d99129382fbffbaed47852f15459256a708850b6b7a75405","src/codegen/math/float/sin_pi.rs":"9890347905b4d4a3c7341c3eb06406e46e60582bcf6960688bd727e5dadc6c57","src/codegen/math/float/sqrt.rs":"e3c60dcfb0c6d2fc62adabcc931b2d4040b83cab294dea36443fb4b89eb79e34","src/codegen/math/float/sqrte.rs":"f0f4ef9eb475ae41bcc7ec6a95ad744ba6b36925faa8b2c2814004396d196b63","src/codegen/pointer_sized_int.rs":"a70697169c28218b56fd2e8d5353f2e00671d1150d0c8cef77d613bdfacd84cb","src/codegen/reductions.rs":"645e2514746d01387ddd07f0aa4ffd8430cc9ab428d4fb13773ea319fa25dd95","src/codegen/reductions/mask.rs":"8f1afe6aabf096a3278e1fc3a30f736e04aa8b9ce96373cee22162d18cfe2702","src/codegen/reductions/mask/aarch64.rs":"cba6e17603d39795dcfe8339b6b7d8714c3e162a1f0a635979f037aa24fe4206","src/codegen/reductions/mask/arm.rs":"9447904818aa2c7c25d0963eead452a639a11ca7dbd6d21eedbfcaade07a0f33","src/codegen/reductions/mask/fallback.rs":"7a0ef9f7fd03ae318b495b95e121350cd61caffc5cc6ee17fabf130d5d933453","src/codegen/reductions/mask/fallback_impl.rs":"76547f396e55ef403327c77c314cf8db8c7a5c9b9819bfb925abeacf130249e5","src/codegen/reductions/mask/x86.rs":"14bd2c482071f2355beebcf7b7ecf950ff2dfcdb08c3ca50993092434a9de717","src/codegen/reductions/mask/x86/avx.rs":"b4913d87844c522903641cbbf10db4551addb1ce5e9e78278e21612fa65c733b","src/codegen/reductions/mask/x86/avx2.rs":"677aed3f056285285daa3adff8bc65e739630b4424defa6d9665e160f027507e","src/codegen/reductions/mask/x86/sse.rs":"226610b4ff88c676d5187114dd57b4a8800de6ce40884675e9198445b1ed0306","src/codegen/reductions/mask/x86/sse2.rs":"bc38e6c31cb4b3d62147eba6cac264e519e2a48e0f7ce9010cfa9ef0cf0ec9fd","src/codegen/shuffle.rs":"0abca97e92cdce49a58a39cc447eb09dc7d7715ef256c8dbd2181a186e61bb64","src/codegen/shuffle1_dyn.rs":"04523e9338133bdedb012dd076c2c564b79ce5593b0fc56d0fb6910e04190a81","src/codegen/swap_bytes.rs":"1d6cdc716eadddc92b4fd506b2445a821caa8dc00860447de09d7ebd69c2087f","src/codegen/v128.rs":"94226b31ec403d18d9d2fe06713f147c9c79e9b5f9105089088266313f843185","src/codegen/v16.rs":"ddec4ffb66b6f7aaffb9a1780c5ddba82557abd74f45073d335047e04cf74924","src/codegen/v256.rs":"6b63917f0444118d6b1595bff2045e59b97c4d24012bd575f69f1f0efc5a0241","src/codegen/v32.rs":"3477b3c5540aed86e61e2f5807dd31db947413cec9181c587d93ed6ec74f0eba","src/codegen/v512.rs":"5854f99d3aabc4cd42b28a20d9ce447756dc2ba024a409a69b6a8ae1f1842fc5","src/codegen/v64.rs":"e9e89caebfe63d10c0cbca61e4dfdba3b7e02ee0989170f80beed23237ddd950","src/codegen/vPtr.rs":"96d609a9eece4dcbbcc01ba0b8744d7f5958be12774176a2945bc676f4e6b5cb","src/codegen/vSize.rs":"eeee9858749aa82142b27bc120d1989bb74a6b82e1e4efbbeaccc9634dc9acfc","src/lib.rs":"1b5d419ff05ee0370d671810423ccc254708cc8d415c1dbac2a7a36be4bf63a8","src/masks.rs":"870f429967b2d7d5133f4d28d6c753fc5cef0570b27b29d4e966a066d22d2d0e","src/sealed.rs":"ff7f0324276408ae8249941cfa32c90b8835a54d750896b683efea857af19db2","src/testing.rs":"1d3a7862ef625e235a5734ad7204e68d350f902c0695182b1f08a0552432416e","src/testing/macros.rs":"6378856d7a40ba5ec5c7c0dad6327d79f0c77266921c24296d10aed6c68e9b98","src/testing/utils.rs":"d6fd5a5017f1f85d9d99585754f8f6ad06fc3d683b34083543e67a7cc6c1772c","src/v128.rs":"18fe263c4aa28cd06461c7070b0269f69f4a2e75749b8f142a83dfdfe4d22bf5","src/v16.rs":"e5c663c9fb3547eaeac78a5f7db9969f4d8b5ec96112bf2954602fff11f0aebd","src/v256.rs":"68732cd688ad12a56d8b4f8ddf279f77bdfe1be2943c7dc0c1b4f1a76798aa0f","src/v32.rs":"785b22a1ccb4a41bb53dfeb0670f624c0ce42e6cdf62d1747e3283777a1c70bd","src/v512.rs":"d1337bfe07f06a8f37f8e8fa7d4315b9307476ee435ad80dd5269eaed564fbfa","src/v64.rs":"3077468d65125b8f085e9454c8b2463a4d5225697464ba6a1300f8799528fd4b","src/vPtr.rs":"c9a53f41f466e17b6648a4ce390fd8f4d3a848d440eb8a9a803a11608d76eb05","src/vSize.rs":"5c46d3e8c3ee5863d9b6e37e681f871386e0efc254d6d84ba711edb529ce7b3c","tests/endianness.rs":"541a144be017e3dd7da7c8ea49d907dc02538245e8c5f3deb5bd43da92c929e1"},"package":null}
+\ No newline at end of file
+diff --git a/third_party/rust/proc-macro2/.cargo-checksum.json b/third_party/rust/proc-macro2/.cargo-checksum.json
+index eeef4120af..e7849f2896 100644
+--- third_party/rust/proc-macro2/.cargo-checksum.json
++++ third_party/rust/proc-macro2/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"e2c1fc6ed317eeef8462fcd192f6b6389e1d84f0d7afeac78f12c23903deddf8","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"362a2156f7645528061b6e8487a2eb0f32f1693012ed82ee57afa05c039bba0d","build.rs":"0cc6e2cb919ddbff59cf1d810283939f97a59f0037540c0f2ee3453237635ff8","src/fallback.rs":"5c6379a90735e27abcc40253b223158c6b1e5784f3850bc423335363e87ef038","src/lib.rs":"ae5251296ad3fcd8b600919a993fec0afd8b56da3e11fef6bc7265b273129936","src/strnom.rs":"37f7791f73f123817ad5403af1d4e2a0714be27401729a2d451bc80b1f26bac9","src/wrapper.rs":"81372e910604217a625aa71c47d43e65f4e008456eae93ac39325c9abf10701a","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"8c427be9cba1fa8d4a16647e53e3545e5863e29e2c0b311c93c9dd1399abf6a1"},"package":"90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"302d447d62c8d091d6241cf62bdad607c0d4ed8ff9f43d9b254c9d99c253ee8e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"e1f9d4fc22cff2c049f166a403b41458632a94357890d31cf0e3ad83807fb430","build.rs":"a71283fbc495095eebbbf46753df3fe2c19505c745b508dea157f65796b64dd7","src/detection.rs":"9d25d896889e65330858f2d6f6223c1b98cd1dad189813ad4161ff189fbda2b8","src/fallback.rs":"b114e013695260f6066395c8712cea112ec2a386010397a80f15a60f8b986444","src/lib.rs":"7f528764a958587f007f0c2a330a6a414bae2c8e73d5ed9fb64ff1b42b1805b1","src/marker.rs":"87fce2d0357f5b7998b6d9dfb064f4a0cbc9dabb19e33d4b514a446243ebe2e8","src/parse.rs":"1d2253eacbd40eb3a2a933be2adcee356af922bdb48cc89ff266252a41fd98a1","src/wrapper.rs":"f52646ce1705c1f6265516f30d4c43297b5f529dd31fb91f4c806be89d5a4122","tests/comments.rs":"ea6cbe6f4c8852e6a0612893c7d4f2c144a2e6a134a6c3db641a320cbfc3c800","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"652db9f25c69ffc65baa60cdca8f195aa2e254d4de0a9ddc85de4dc2470544b6","tests/test.rs":"5f30a704eeb2b9198b57f416d622da72d25cb9bf8d8b12e6d0e90aa2cb0e43fc","tests/test_fmt.rs":"745dfdc41d09c5308c221395eb43f2041f0a1413d2927a813bc2ad4554438fe2"},"package":"1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"}
+\ No newline at end of file
+diff --git a/third_party/rust/proc-macro2/Cargo.toml b/third_party/rust/proc-macro2/Cargo.toml
+index 95d653633d..22150c516a 100644
+--- third_party/rust/proc-macro2/Cargo.toml
++++ third_party/rust/proc-macro2/Cargo.toml
+@@ -13,21 +13,22 @@
+ [package]
+ edition = "2018"
+ name = "proc-macro2"
+-version = "1.0.5"
+-authors = ["Alex Crichton <alex@alexcrichton.com>"]
+-description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
+-homepage = "https://github.com/alexcrichton/proc-macro2"
++version = "1.0.24"
++authors = ["Alex Crichton <alex@alexcrichton.com>", "David Tolnay <dtolnay@gmail.com>"]
++description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n"
+ documentation = "https://docs.rs/proc-macro2"
+ readme = "README.md"
+ keywords = ["macros"]
++categories = ["development-tools::procedural-macro-helpers"]
+ license = "MIT OR Apache-2.0"
+ repository = "https://github.com/alexcrichton/proc-macro2"
+ [package.metadata.docs.rs]
+ rustc-args = ["--cfg", "procmacro2_semver_exempt"]
+ rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
++targets = ["x86_64-unknown-linux-gnu"]
+
+-[lib]
+-name = "proc_macro2"
++[package.metadata.playground]
++features = ["span-locations"]
+ [dependencies.unicode-xid]
+ version = "0.2"
+ [dev-dependencies.quote]
+@@ -39,5 +40,3 @@ default = ["proc-macro"]
+ nightly = []
+ proc-macro = []
+ span-locations = []
+-[badges.travis-ci]
+-repository = "alexcrichton/proc-macro2"
+diff --git a/third_party/rust/proc-macro2/README.md b/third_party/rust/proc-macro2/README.md
+index 19b0c3b5f8..3d05e871a7 100644
+--- third_party/rust/proc-macro2/README.md
++++ third_party/rust/proc-macro2/README.md
+@@ -1,6 +1,6 @@
+ # proc-macro2
+
+-[![Build Status](https://api.travis-ci.com/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.com/alexcrichton/proc-macro2)
++[![Build Status](https://img.shields.io/github/workflow/status/alexcrichton/proc-macro2/build%20and%20test)](https://github.com/alexcrichton/proc-macro2/actions)
+ [![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
+ [![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
+
+diff --git a/third_party/rust/proc-macro2/build.rs b/third_party/rust/proc-macro2/build.rs
+index deb9b92719..b247d874f6 100644
+--- third_party/rust/proc-macro2/build.rs
++++ third_party/rust/proc-macro2/build.rs
+@@ -14,6 +14,10 @@
+ // procmacro2_semver_exempt surface area is implemented by using the
+ // nightly-only proc_macro API.
+ //
++// "hygiene"
++// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at
++// and Span::located_at. Enabled on Rust 1.45+.
++//
+ // "proc_macro_span"
+ // Enable non-dummy behavior of Span::start and Span::end methods which
+ // requires an unstable compiler feature. Enabled when building with
+@@ -57,6 +61,22 @@ fn main() {
+ println!("cargo:rustc-cfg=span_locations");
+ }
+
++ if version.minor < 32 {
++ println!("cargo:rustc-cfg=no_libprocmacro_unwind_safe");
++ }
++
++ if version.minor < 39 {
++ println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard");
++ }
++
++ if version.minor >= 44 {
++ println!("cargo:rustc-cfg=lexerror_display");
++ }
++
++ if version.minor >= 45 {
++ println!("cargo:rustc-cfg=hygiene");
++ }
++
+ let target = env::var("TARGET").unwrap();
+ if !enable_use_proc_macro(&target) {
+ return;
+diff --git a/third_party/rust/proc-macro2/src/detection.rs b/third_party/rust/proc-macro2/src/detection.rs
+new file mode 100644
+index 0000000000..c597bc99c6
+--- /dev/null
++++ third_party/rust/proc-macro2/src/detection.rs
+@@ -0,0 +1,67 @@
++use std::panic::{self, PanicInfo};
++use std::sync::atomic::*;
++use std::sync::Once;
++
++static WORKS: AtomicUsize = AtomicUsize::new(0);
++static INIT: Once = Once::new();
++
++pub(crate) fn inside_proc_macro() -> bool {
++ match WORKS.load(Ordering::SeqCst) {
++ 1 => return false,
++ 2 => return true,
++ _ => {}
++ }
++
++ INIT.call_once(initialize);
++ inside_proc_macro()
++}
++
++pub(crate) fn force_fallback() {
++ WORKS.store(1, Ordering::SeqCst);
++}
++
++pub(crate) fn unforce_fallback() {
++ initialize();
++}
++
++// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
++// then use catch_unwind to determine whether the compiler's proc_macro is
++// working. When proc-macro2 is used from outside of a procedural macro all
++// of the proc_macro crate's APIs currently panic.
++//
++// The Once is to prevent the possibility of this ordering:
++//
++// thread 1 calls take_hook, gets the user's original hook
++// thread 1 calls set_hook with the null hook
++// thread 2 calls take_hook, thinks null hook is the original hook
++// thread 2 calls set_hook with the null hook
++// thread 1 calls set_hook with the actual original hook
++// thread 2 calls set_hook with what it thinks is the original hook
++//
++// in which the user's hook has been lost.
++//
++// There is still a race condition where a panic in a different thread can
++// happen during the interval that the user's original panic hook is
++// unregistered such that their hook is incorrectly not called. This is
++// sufficiently unlikely and less bad than printing panic messages to stderr
++// on correct use of this crate. Maybe there is a libstd feature request
++// here. For now, if a user needs to guarantee that this failure mode does
++// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
++// the main thread before launching any other threads.
++fn initialize() {
++ type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
++
++ let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
++ let sanity_check = &*null_hook as *const PanicHook;
++ let original_hook = panic::take_hook();
++ panic::set_hook(null_hook);
++
++ let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
++ WORKS.store(works as usize + 1, Ordering::SeqCst);
++
++ let hopefully_null_hook = panic::take_hook();
++ panic::set_hook(original_hook);
++ if sanity_check != &*hopefully_null_hook {
++ panic!("observed race condition in proc_macro2::inside_proc_macro");
++ }
++}
+diff --git a/third_party/rust/proc-macro2/src/fallback.rs b/third_party/rust/proc-macro2/src/fallback.rs
+index fe582b3b5f..8900c5ff0f 100644
+--- third_party/rust/proc-macro2/src/fallback.rs
++++ third_party/rust/proc-macro2/src/fallback.rs
+@@ -1,27 +1,41 @@
++use crate::parse::{token_stream, Cursor};
++use crate::{Delimiter, Spacing, TokenTree};
+ #[cfg(span_locations)]
+ use std::cell::RefCell;
+ #[cfg(span_locations)]
+ use std::cmp;
+-use std::fmt;
+-use std::iter;
++use std::fmt::{self, Debug, Display};
++use std::iter::FromIterator;
++use std::mem;
+ use std::ops::RangeBounds;
+ #[cfg(procmacro2_semver_exempt)]
+ use std::path::Path;
+ use std::path::PathBuf;
+ use std::str::FromStr;
+ use std::vec;
+-
+-use crate::strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
+-use crate::{Delimiter, Punct, Spacing, TokenTree};
+ use unicode_xid::UnicodeXID;
+
++/// Force use of proc-macro2's fallback implementation of the API for now, even
++/// if the compiler's implementation is available.
++pub fn force() {
++ #[cfg(wrap_proc_macro)]
++ crate::detection::force_fallback();
++}
++
++/// Resume using the compiler's implementation of the proc macro API if it is
++/// available.
++pub fn unforce() {
++ #[cfg(wrap_proc_macro)]
++ crate::detection::unforce_fallback();
++}
++
+ #[derive(Clone)]
+-pub struct TokenStream {
+- inner: Vec<TokenTree>,
++pub(crate) struct TokenStream {
++ pub(crate) inner: Vec<TokenTree>,
+ }
+
+ #[derive(Debug)]
+-pub struct LexError;
++pub(crate) struct LexError;
+
+ impl TokenStream {
+ pub fn new() -> TokenStream {
+@@ -31,6 +45,72 @@ impl TokenStream {
+ pub fn is_empty(&self) -> bool {
+ self.inner.len() == 0
+ }
++
++ fn take_inner(&mut self) -> Vec<TokenTree> {
++ mem::replace(&mut self.inner, Vec::new())
++ }
++
++ fn push_token(&mut self, token: TokenTree) {
++ // https://github.com/alexcrichton/proc-macro2/issues/235
++ match token {
++ #[cfg(not(no_bind_by_move_pattern_guard))]
++ TokenTree::Literal(crate::Literal {
++ #[cfg(wrap_proc_macro)]
++ inner: crate::imp::Literal::Fallback(literal),
++ #[cfg(not(wrap_proc_macro))]
++ inner: literal,
++ ..
++ }) if literal.text.starts_with('-') => {
++ push_negative_literal(self, literal);
++ }
++ #[cfg(no_bind_by_move_pattern_guard)]
++ TokenTree::Literal(crate::Literal {
++ #[cfg(wrap_proc_macro)]
++ inner: crate::imp::Literal::Fallback(literal),
++ #[cfg(not(wrap_proc_macro))]
++ inner: literal,
++ ..
++ }) => {
++ if literal.text.starts_with('-') {
++ push_negative_literal(self, literal);
++ } else {
++ self.inner
++ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
++ }
++ }
++ _ => self.inner.push(token),
++ }
++
++ #[cold]
++ fn push_negative_literal(stream: &mut TokenStream, mut literal: Literal) {
++ literal.text.remove(0);
++ let mut punct = crate::Punct::new('-', Spacing::Alone);
++ punct.set_span(crate::Span::_new_stable(literal.span));
++ stream.inner.push(TokenTree::Punct(punct));
++ stream
++ .inner
++ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
++ }
++ }
++}
++
++// Nonrecursive to prevent stack overflow.
++impl Drop for TokenStream {
++ fn drop(&mut self) {
++ while let Some(token) = self.inner.pop() {
++ let group = match token {
++ TokenTree::Group(group) => group.inner,
++ _ => continue,
++ };
++ #[cfg(wrap_proc_macro)]
++ let group = match group {
++ crate::imp::Group::Fallback(group) => group,
++ _ => continue,
++ };
++ let mut group = group;
++ self.inner.extend(group.stream.take_inner());
++ }
++ }
+ }
+
+ #[cfg(span_locations)]
+@@ -59,20 +139,22 @@ impl FromStr for TokenStream {
+ // Create a dummy file & add it to the source map
+ let cursor = get_cursor(src);
+
+- match token_stream(cursor) {
+- Ok((input, output)) => {
+- if skip_whitespace(input).len() != 0 {
+- Err(LexError)
+- } else {
+- Ok(output)
+- }
+- }
+- Err(LexError) => Err(LexError),
++ let (rest, tokens) = token_stream(cursor)?;
++ if rest.is_empty() {
++ Ok(tokens)
++ } else {
++ Err(LexError)
+ }
+ }
+ }
+
+-impl fmt::Display for TokenStream {
++impl Display for LexError {
++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
++ f.write_str("cannot parse string into token stream")
++ }
++}
++
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut joint = false;
+ for (i, tt) in self.inner.iter().enumerate() {
+@@ -80,37 +162,22 @@ impl fmt::Display for TokenStream {
+ write!(f, " ")?;
+ }
+ joint = false;
+- match *tt {
+- TokenTree::Group(ref tt) => {
+- let (start, end) = match tt.delimiter() {
+- Delimiter::Parenthesis => ("(", ")"),
+- Delimiter::Brace => ("{", "}"),
+- Delimiter::Bracket => ("[", "]"),
+- Delimiter::None => ("", ""),
+- };
+- if tt.stream().into_iter().next().is_none() {
+- write!(f, "{} {}", start, end)?
+- } else {
+- write!(f, "{} {} {}", start, tt.stream(), end)?
+- }
+- }
+- TokenTree::Ident(ref tt) => write!(f, "{}", tt)?,
+- TokenTree::Punct(ref tt) => {
+- write!(f, "{}", tt.as_char())?;
+- match tt.spacing() {
+- Spacing::Alone => {}
+- Spacing::Joint => joint = true,
+- }
++ match tt {
++ TokenTree::Group(tt) => Display::fmt(tt, f),
++ TokenTree::Ident(tt) => Display::fmt(tt, f),
++ TokenTree::Punct(tt) => {
++ joint = tt.spacing() == Spacing::Joint;
++ Display::fmt(tt, f)
+ }
+- TokenTree::Literal(ref tt) => write!(f, "{}", tt)?,
+- }
++ TokenTree::Literal(tt) => Display::fmt(tt, f),
++ }?
+ }
+
+ Ok(())
+ }
+ }
+
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+@@ -139,28 +206,26 @@ impl From<TokenStream> for proc_macro::TokenStream {
+
+ impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+- TokenStream { inner: vec![tree] }
++ let mut stream = TokenStream::new();
++ stream.push_token(tree);
++ stream
+ }
+ }
+
+-impl iter::FromIterator<TokenTree> for TokenStream {
+- fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
+- let mut v = Vec::new();
+-
+- for token in streams.into_iter() {
+- v.push(token);
+- }
+-
+- TokenStream { inner: v }
++impl FromIterator<TokenTree> for TokenStream {
++ fn from_iter<I: IntoIterator<Item = TokenTree>>(tokens: I) -> Self {
++ let mut stream = TokenStream::new();
++ stream.extend(tokens);
++ stream
+ }
+ }
+
+-impl iter::FromIterator<TokenStream> for TokenStream {
++impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut v = Vec::new();
+
+- for stream in streams.into_iter() {
+- v.extend(stream.inner);
++ for mut stream in streams {
++ v.extend(stream.take_inner());
+ }
+
+ TokenStream { inner: v }
+@@ -168,31 +233,30 @@ impl iter::FromIterator<TokenStream> for TokenStream {
+ }
+
+ impl Extend<TokenTree> for TokenStream {
+- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
+- self.inner.extend(streams);
++ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) {
++ tokens.into_iter().for_each(|token| self.push_token(token));
+ }
+ }
+
+ impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+- self.inner
+- .extend(streams.into_iter().flat_map(|stream| stream));
++ self.inner.extend(streams.into_iter().flatten());
+ }
+ }
+
+-pub type TokenTreeIter = vec::IntoIter<TokenTree>;
++pub(crate) type TokenTreeIter = vec::IntoIter<TokenTree>;
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = TokenTreeIter;
+
+- fn into_iter(self) -> TokenTreeIter {
+- self.inner.into_iter()
++ fn into_iter(mut self) -> TokenTreeIter {
++ self.take_inner().into_iter()
+ }
+ }
+
+ #[derive(Clone, PartialEq, Eq)]
+-pub struct SourceFile {
++pub(crate) struct SourceFile {
+ path: PathBuf,
+ }
+
+@@ -208,7 +272,7 @@ impl SourceFile {
+ }
+ }
+
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+@@ -218,7 +282,7 @@ impl fmt::Debug for SourceFile {
+ }
+
+ #[derive(Clone, Copy, Debug, PartialEq, Eq)]
+-pub struct LineColumn {
++pub(crate) struct LineColumn {
+ pub line: usize,
+ pub column: usize,
+ }
+@@ -228,23 +292,11 @@ thread_local! {
+ static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
+ // NOTE: We start with a single dummy file which all call_site() and
+ // def_site() spans reference.
+- files: vec![{
++ files: vec![FileInfo {
+ #[cfg(procmacro2_semver_exempt)]
+- {
+- FileInfo {
+- name: "<unspecified>".to_owned(),
+- span: Span { lo: 0, hi: 0 },
+- lines: vec![0],
+- }
+- }
+-
+- #[cfg(not(procmacro2_semver_exempt))]
+- {
+- FileInfo {
+- span: Span { lo: 0, hi: 0 },
+- lines: vec![0],
+- }
+- }
++ name: "<unspecified>".to_owned(),
++ span: Span { lo: 0, hi: 0 },
++ lines: vec![0],
+ }],
+ });
+ }
+@@ -282,16 +334,21 @@ impl FileInfo {
+ }
+ }
+
+-/// Computesthe offsets of each line in the given source string.
++/// Computes the offsets of each line in the given source string
++/// and the total number of characters
+ #[cfg(span_locations)]
+-fn lines_offsets(s: &str) -> Vec<usize> {
++fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
+ let mut lines = vec![0];
+- let mut prev = 0;
+- while let Some(len) = s[prev..].find('\n') {
+- prev += len + 1;
+- lines.push(prev);
++ let mut total = 0;
++
++ for ch in s.chars() {
++ total += 1;
++ if ch == '\n' {
++ lines.push(total);
++ }
+ }
+- lines
++
++ (total, lines)
+ }
+
+ #[cfg(span_locations)]
+@@ -310,23 +367,22 @@ impl SourceMap {
+ }
+
+ fn add_file(&mut self, name: &str, src: &str) -> Span {
+- let lines = lines_offsets(src);
++ let (len, lines) = lines_offsets(src);
+ let lo = self.next_start_pos();
+ // XXX(nika): Shouild we bother doing a checked cast or checked add here?
+ let span = Span {
+ lo,
+- hi: lo + (src.len() as u32),
++ hi: lo + (len as u32),
+ };
+
+- #[cfg(procmacro2_semver_exempt)]
+ self.files.push(FileInfo {
++ #[cfg(procmacro2_semver_exempt)]
+ name: name.to_owned(),
+ span,
+ lines,
+ });
+
+ #[cfg(not(procmacro2_semver_exempt))]
+- self.files.push(FileInfo { span, lines });
+ let _ = name;
+
+ span
+@@ -343,11 +399,11 @@ impl SourceMap {
+ }
+
+ #[derive(Clone, Copy, PartialEq, Eq)]
+-pub struct Span {
++pub(crate) struct Span {
+ #[cfg(span_locations)]
+- lo: u32,
++ pub(crate) lo: u32,
+ #[cfg(span_locations)]
+- hi: u32,
++ pub(crate) hi: u32,
+ }
+
+ impl Span {
+@@ -361,12 +417,16 @@ impl Span {
+ Span { lo: 0, hi: 0 }
+ }
+
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ Span::call_site()
++ }
++
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn def_site() -> Span {
+ Span::call_site()
+ }
+
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn resolved_at(&self, _other: Span) -> Span {
+ // Stable spans consist only of line/column information, so
+ // `resolved_at` and `located_at` only select which span the
+@@ -374,7 +434,6 @@ impl Span {
+ *self
+ }
+
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn located_at(&self, other: Span) -> Span {
+ other
+ }
+@@ -427,26 +486,59 @@ impl Span {
+ })
+ })
+ }
++
++ #[cfg(not(span_locations))]
++ fn first_byte(self) -> Self {
++ self
++ }
++
++ #[cfg(span_locations)]
++ fn first_byte(self) -> Self {
++ Span {
++ lo: self.lo,
++ hi: cmp::min(self.lo.saturating_add(1), self.hi),
++ }
++ }
++
++ #[cfg(not(span_locations))]
++ fn last_byte(self) -> Self {
++ self
++ }
++
++ #[cfg(span_locations)]
++ fn last_byte(self) -> Self {
++ Span {
++ lo: cmp::max(self.hi.saturating_sub(1), self.lo),
++ hi: self.hi,
++ }
++ }
+ }
+
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ return write!(f, "bytes({}..{})", self.lo, self.hi);
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ write!(f, "Span")
+ }
+ }
+
+-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+- if cfg!(procmacro2_semver_exempt) {
++pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
++ #[cfg(span_locations)]
++ {
++ if span.lo == 0 && span.hi == 0 {
++ return;
++ }
++ }
++
++ if cfg!(span_locations) {
+ debug.field("span", &span);
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Group {
++pub(crate) struct Group {
+ delimiter: Delimiter,
+ stream: TokenStream,
+ span: Span,
+@@ -474,11 +566,11 @@ impl Group {
+ }
+
+ pub fn span_open(&self) -> Span {
+- self.span
++ self.span.first_byte()
+ }
+
+ pub fn span_close(&self) -> Span {
+- self.span
++ self.span.last_byte()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+@@ -486,36 +578,45 @@ impl Group {
+ }
+ }
+
+-impl fmt::Display for Group {
++impl Display for Group {
++ // We attempt to match libproc_macro's formatting.
++ // Empty parens: ()
++ // Nonempty parens: (...)
++ // Empty brackets: []
++ // Nonempty brackets: [...]
++ // Empty braces: { }
++ // Nonempty braces: { ... }
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- let (left, right) = match self.delimiter {
++ let (open, close) = match self.delimiter {
+ Delimiter::Parenthesis => ("(", ")"),
+- Delimiter::Brace => ("{", "}"),
++ Delimiter::Brace => ("{ ", "}"),
+ Delimiter::Bracket => ("[", "]"),
+ Delimiter::None => ("", ""),
+ };
+
+- f.write_str(left)?;
+- self.stream.fmt(f)?;
+- f.write_str(right)?;
++ f.write_str(open)?;
++ Display::fmt(&self.stream, f)?;
++ if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty() {
++ f.write_str(" ")?;
++ }
++ f.write_str(close)?;
+
+ Ok(())
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Group");
+ debug.field("delimiter", &self.delimiter);
+ debug.field("stream", &self.stream);
+- #[cfg(procmacro2_semver_exempt)]
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Ident {
++pub(crate) struct Ident {
+ sym: String,
+ span: Span,
+ raw: bool,
+@@ -549,16 +650,14 @@ impl Ident {
+ }
+ }
+
+-#[inline]
+-fn is_ident_start(c: char) -> bool {
++pub(crate) fn is_ident_start(c: char) -> bool {
+ ('a' <= c && c <= 'z')
+ || ('A' <= c && c <= 'Z')
+ || c == '_'
+ || (c > '\x7f' && UnicodeXID::is_xid_start(c))
+ }
+
+-#[inline]
+-fn is_ident_continue(c: char) -> bool {
++pub(crate) fn is_ident_continue(c: char) -> bool {
+ ('a' <= c && c <= 'z')
+ || ('A' <= c && c <= 'Z')
+ || c == '_'
+@@ -615,18 +714,18 @@ where
+ }
+ }
+
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ if self.raw {
+- "r#".fmt(f)?;
++ f.write_str("r#")?;
+ }
+- self.sym.fmt(f)
++ Display::fmt(&self.sym, f)
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ // Ident(proc_macro), Ident(r#union)
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = f.debug_tuple("Ident");
+ debug.field(&format_args!("{}", self));
+@@ -637,17 +736,17 @@ impl fmt::Debug for Ident {
+ // sym: proc_macro,
+ // span: bytes(128..138)
+ // }
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = f.debug_struct("Ident");
+ debug.field("sym", &format_args!("{}", self));
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Literal {
++pub(crate) struct Literal {
+ text: String,
+ span: Span,
+ }
+@@ -669,7 +768,7 @@ macro_rules! unsuffixed_numbers {
+ }
+
+ impl Literal {
+- fn _new(text: String) -> Literal {
++ pub(crate) fn _new(text: String) -> Literal {
+ Literal {
+ text,
+ span: Span::call_site(),
+@@ -711,7 +810,7 @@ impl Literal {
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+ let mut s = f.to_string();
+- if !s.contains(".") {
++ if !s.contains('.') {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+@@ -719,7 +818,7 @@ impl Literal {
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+ let mut s = f.to_string();
+- if !s.contains(".") {
++ if !s.contains('.') {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+@@ -730,10 +829,10 @@ impl Literal {
+ text.push('"');
+ for c in t.chars() {
+ if c == '\'' {
+- // escape_default turns this into "\'" which is unnecessary.
++ // escape_debug turns this into "\'" which is unnecessary.
+ text.push(c);
+ } else {
+- text.extend(c.escape_default());
++ text.extend(c.escape_debug());
+ }
+ }
+ text.push('"');
+@@ -744,10 +843,10 @@ impl Literal {
+ let mut text = String::new();
+ text.push('\'');
+ if t == '"' {
+- // escape_default turns this into '\"' which is unnecessary.
++ // escape_debug turns this into '\"' which is unnecessary.
+ text.push(t);
+ } else {
+- text.extend(t.escape_default());
++ text.extend(t.escape_debug());
+ }
+ text.push('\'');
+ Literal::_new(text)
+@@ -756,6 +855,7 @@ impl Literal {
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ let mut escaped = "b\"".to_string();
+ for b in bytes {
++ #[allow(clippy::match_overlapping_arm)]
+ match *b {
+ b'\0' => escaped.push_str(r"\0"),
+ b'\t' => escaped.push_str(r"\t"),
+@@ -784,651 +884,17 @@ impl Literal {
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.text.fmt(f)
++ Display::fmt(&self.text, f)
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Literal");
+ debug.field("lit", &format_args!("{}", self.text));
+- #[cfg(procmacro2_semver_exempt)]
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+-
+-fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
+- let mut trees = Vec::new();
+- loop {
+- let input_no_ws = skip_whitespace(input);
+- if input_no_ws.rest.len() == 0 {
+- break;
+- }
+- if let Ok((a, tokens)) = doc_comment(input_no_ws) {
+- input = a;
+- trees.extend(tokens);
+- continue;
+- }
+-
+- let (a, tt) = match token_tree(input_no_ws) {
+- Ok(p) => p,
+- Err(_) => break,
+- };
+- trees.push(tt);
+- input = a;
+- }
+- Ok((input, TokenStream { inner: trees }))
+-}
+-
+-#[cfg(not(span_locations))]
+-fn spanned<'a, T>(
+- input: Cursor<'a>,
+- f: fn(Cursor<'a>) -> PResult<'a, T>,
+-) -> PResult<'a, (T, crate::Span)> {
+- let (a, b) = f(skip_whitespace(input))?;
+- Ok((a, ((b, crate::Span::_new_stable(Span::call_site())))))
+-}
+-
+-#[cfg(span_locations)]
+-fn spanned<'a, T>(
+- input: Cursor<'a>,
+- f: fn(Cursor<'a>) -> PResult<'a, T>,
+-) -> PResult<'a, (T, crate::Span)> {
+- let input = skip_whitespace(input);
+- let lo = input.off;
+- let (a, b) = f(input)?;
+- let hi = a.off;
+- let span = crate::Span::_new_stable(Span { lo, hi });
+- Ok((a, (b, span)))
+-}
+-
+-fn token_tree(input: Cursor) -> PResult<TokenTree> {
+- let (rest, (mut tt, span)) = spanned(input, token_kind)?;
+- tt.set_span(span);
+- Ok((rest, tt))
+-}
+-
+-named!(token_kind -> TokenTree, alt!(
+- map!(group, |g| TokenTree::Group(crate::Group::_new_stable(g)))
+- |
+- map!(literal, |l| TokenTree::Literal(crate::Literal::_new_stable(l))) // must be before symbol
+- |
+- map!(op, TokenTree::Punct)
+- |
+- symbol_leading_ws
+-));
+-
+-named!(group -> Group, alt!(
+- delimited!(
+- punct!("("),
+- token_stream,
+- punct!(")")
+- ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
+- |
+- delimited!(
+- punct!("["),
+- token_stream,
+- punct!("]")
+- ) => { |ts| Group::new(Delimiter::Bracket, ts) }
+- |
+- delimited!(
+- punct!("{"),
+- token_stream,
+- punct!("}")
+- ) => { |ts| Group::new(Delimiter::Brace, ts) }
+-));
+-
+-fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
+- symbol(skip_whitespace(input))
+-}
+-
+-fn symbol(input: Cursor) -> PResult<TokenTree> {
+- let raw = input.starts_with("r#");
+- let rest = input.advance((raw as usize) << 1);
+-
+- let (rest, sym) = symbol_not_raw(rest)?;
+-
+- if !raw {
+- let ident = crate::Ident::new(sym, crate::Span::call_site());
+- return Ok((rest, ident.into()));
+- }
+-
+- if sym == "_" {
+- return Err(LexError);
+- }
+-
+- let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
+- Ok((rest, ident.into()))
+-}
+-
+-fn symbol_not_raw(input: Cursor) -> PResult<&str> {
+- let mut chars = input.char_indices();
+-
+- match chars.next() {
+- Some((_, ch)) if is_ident_start(ch) => {}
+- _ => return Err(LexError),
+- }
+-
+- let mut end = input.len();
+- for (i, ch) in chars {
+- if !is_ident_continue(ch) {
+- end = i;
+- break;
+- }
+- }
+-
+- Ok((input.advance(end), &input.rest[..end]))
+-}
+-
+-fn literal(input: Cursor) -> PResult<Literal> {
+- let input_no_ws = skip_whitespace(input);
+-
+- match literal_nocapture(input_no_ws) {
+- Ok((a, ())) => {
+- let start = input.len() - input_no_ws.len();
+- let len = input_no_ws.len() - a.len();
+- let end = start + len;
+- Ok((a, Literal::_new(input.rest[start..end].to_string())))
+- }
+- Err(LexError) => Err(LexError),
+- }
+-}
+-
+-named!(literal_nocapture -> (), alt!(
+- string
+- |
+- byte_string
+- |
+- byte
+- |
+- character
+- |
+- float
+- |
+- int
+-));
+-
+-named!(string -> (), alt!(
+- quoted_string
+- |
+- preceded!(
+- punct!("r"),
+- raw_string
+- ) => { |_| () }
+-));
+-
+-named!(quoted_string -> (), do_parse!(
+- punct!("\"") >>
+- cooked_string >>
+- tag!("\"") >>
+- option!(symbol_not_raw) >>
+- (())
+-));
+-
+-fn cooked_string(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices().peekable();
+- while let Some((byte_offset, ch)) = chars.next() {
+- match ch {
+- '"' => {
+- return Ok((input.advance(byte_offset), ()));
+- }
+- '\r' => {
+- if let Some((_, '\n')) = chars.next() {
+- // ...
+- } else {
+- break;
+- }
+- }
+- '\\' => match chars.next() {
+- Some((_, 'x')) => {
+- if !backslash_x_char(&mut chars) {
+- break;
+- }
+- }
+- Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
+- | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
+- Some((_, 'u')) => {
+- if !backslash_u(&mut chars) {
+- break;
+- }
+- }
+- Some((_, '\n')) | Some((_, '\r')) => {
+- while let Some(&(_, ch)) = chars.peek() {
+- if ch.is_whitespace() {
+- chars.next();
+- } else {
+- break;
+- }
+- }
+- }
+- _ => break,
+- },
+- _ch => {}
+- }
+- }
+- Err(LexError)
+-}
+-
+-named!(byte_string -> (), alt!(
+- delimited!(
+- punct!("b\""),
+- cooked_byte_string,
+- tag!("\"")
+- ) => { |_| () }
+- |
+- preceded!(
+- punct!("br"),
+- raw_string
+- ) => { |_| () }
+-));
+-
+-fn cooked_byte_string(mut input: Cursor) -> PResult<()> {
+- let mut bytes = input.bytes().enumerate();
+- 'outer: while let Some((offset, b)) = bytes.next() {
+- match b {
+- b'"' => {
+- return Ok((input.advance(offset), ()));
+- }
+- b'\r' => {
+- if let Some((_, b'\n')) = bytes.next() {
+- // ...
+- } else {
+- break;
+- }
+- }
+- b'\\' => match bytes.next() {
+- Some((_, b'x')) => {
+- if !backslash_x_byte(&mut bytes) {
+- break;
+- }
+- }
+- Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
+- | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
+- Some((newline, b'\n')) | Some((newline, b'\r')) => {
+- let rest = input.advance(newline + 1);
+- for (offset, ch) in rest.char_indices() {
+- if !ch.is_whitespace() {
+- input = rest.advance(offset);
+- bytes = input.bytes().enumerate();
+- continue 'outer;
+- }
+- }
+- break;
+- }
+- _ => break,
+- },
+- b if b < 0x80 => {}
+- _ => break,
+- }
+- }
+- Err(LexError)
+-}
+-
+-fn raw_string(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices();
+- let mut n = 0;
+- while let Some((byte_offset, ch)) = chars.next() {
+- match ch {
+- '"' => {
+- n = byte_offset;
+- break;
+- }
+- '#' => {}
+- _ => return Err(LexError),
+- }
+- }
+- for (byte_offset, ch) in chars {
+- match ch {
+- '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
+- let rest = input.advance(byte_offset + 1 + n);
+- return Ok((rest, ()));
+- }
+- '\r' => {}
+- _ => {}
+- }
+- }
+- Err(LexError)
+-}
+-
+-named!(byte -> (), do_parse!(
+- punct!("b") >>
+- tag!("'") >>
+- cooked_byte >>
+- tag!("'") >>
+- (())
+-));
+-
+-fn cooked_byte(input: Cursor) -> PResult<()> {
+- let mut bytes = input.bytes().enumerate();
+- let ok = match bytes.next().map(|(_, b)| b) {
+- Some(b'\\') => match bytes.next().map(|(_, b)| b) {
+- Some(b'x') => backslash_x_byte(&mut bytes),
+- Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
+- | Some(b'"') => true,
+- _ => false,
+- },
+- b => b.is_some(),
+- };
+- if ok {
+- match bytes.next() {
+- Some((offset, _)) => {
+- if input.chars().as_str().is_char_boundary(offset) {
+- Ok((input.advance(offset), ()))
+- } else {
+- Err(LexError)
+- }
+- }
+- None => Ok((input.advance(input.len()), ())),
+- }
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-named!(character -> (), do_parse!(
+- punct!("'") >>
+- cooked_char >>
+- tag!("'") >>
+- (())
+-));
+-
+-fn cooked_char(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices();
+- let ok = match chars.next().map(|(_, ch)| ch) {
+- Some('\\') => match chars.next().map(|(_, ch)| ch) {
+- Some('x') => backslash_x_char(&mut chars),
+- Some('u') => backslash_u(&mut chars),
+- Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
+- true
+- }
+- _ => false,
+- },
+- ch => ch.is_some(),
+- };
+- if ok {
+- match chars.next() {
+- Some((idx, _)) => Ok((input.advance(idx), ())),
+- None => Ok((input.advance(input.len()), ())),
+- }
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-macro_rules! next_ch {
+- ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
+- match $chars.next() {
+- Some((_, ch)) => match ch {
+- $pat $(| $rest)* => ch,
+- _ => return false,
+- },
+- None => return false
+- }
+- };
+-}
+-
+-fn backslash_x_char<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, char)>,
+-{
+- next_ch!(chars @ '0'..='7');
+- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+- true
+-}
+-
+-fn backslash_x_byte<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, u8)>,
+-{
+- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+- true
+-}
+-
+-fn backslash_u<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, char)>,
+-{
+- next_ch!(chars @ '{');
+- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+- loop {
+- let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
+- if c == '}' {
+- return true;
+- }
+- }
+-}
+-
+-fn float(input: Cursor) -> PResult<()> {
+- let (mut rest, ()) = float_digits(input)?;
+- if let Some(ch) = rest.chars().next() {
+- if is_ident_start(ch) {
+- rest = symbol_not_raw(rest)?.0;
+- }
+- }
+- word_break(rest)
+-}
+-
+-fn float_digits(input: Cursor) -> PResult<()> {
+- let mut chars = input.chars().peekable();
+- match chars.next() {
+- Some(ch) if ch >= '0' && ch <= '9' => {}
+- _ => return Err(LexError),
+- }
+-
+- let mut len = 1;
+- let mut has_dot = false;
+- let mut has_exp = false;
+- while let Some(&ch) = chars.peek() {
+- match ch {
+- '0'..='9' | '_' => {
+- chars.next();
+- len += 1;
+- }
+- '.' => {
+- if has_dot {
+- break;
+- }
+- chars.next();
+- if chars
+- .peek()
+- .map(|&ch| ch == '.' || is_ident_start(ch))
+- .unwrap_or(false)
+- {
+- return Err(LexError);
+- }
+- len += 1;
+- has_dot = true;
+- }
+- 'e' | 'E' => {
+- chars.next();
+- len += 1;
+- has_exp = true;
+- break;
+- }
+- _ => break,
+- }
+- }
+-
+- let rest = input.advance(len);
+- if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
+- return Err(LexError);
+- }
+-
+- if has_exp {
+- let mut has_exp_value = false;
+- while let Some(&ch) = chars.peek() {
+- match ch {
+- '+' | '-' => {
+- if has_exp_value {
+- break;
+- }
+- chars.next();
+- len += 1;
+- }
+- '0'..='9' => {
+- chars.next();
+- len += 1;
+- has_exp_value = true;
+- }
+- '_' => {
+- chars.next();
+- len += 1;
+- }
+- _ => break,
+- }
+- }
+- if !has_exp_value {
+- return Err(LexError);
+- }
+- }
+-
+- Ok((input.advance(len), ()))
+-}
+-
+-fn int(input: Cursor) -> PResult<()> {
+- let (mut rest, ()) = digits(input)?;
+- if let Some(ch) = rest.chars().next() {
+- if is_ident_start(ch) {
+- rest = symbol_not_raw(rest)?.0;
+- }
+- }
+- word_break(rest)
+-}
+-
+-fn digits(mut input: Cursor) -> PResult<()> {
+- let base = if input.starts_with("0x") {
+- input = input.advance(2);
+- 16
+- } else if input.starts_with("0o") {
+- input = input.advance(2);
+- 8
+- } else if input.starts_with("0b") {
+- input = input.advance(2);
+- 2
+- } else {
+- 10
+- };
+-
+- let mut len = 0;
+- let mut empty = true;
+- for b in input.bytes() {
+- let digit = match b {
+- b'0'..=b'9' => (b - b'0') as u64,
+- b'a'..=b'f' => 10 + (b - b'a') as u64,
+- b'A'..=b'F' => 10 + (b - b'A') as u64,
+- b'_' => {
+- if empty && base == 10 {
+- return Err(LexError);
+- }
+- len += 1;
+- continue;
+- }
+- _ => break,
+- };
+- if digit >= base {
+- return Err(LexError);
+- }
+- len += 1;
+- empty = false;
+- }
+- if empty {
+- Err(LexError)
+- } else {
+- Ok((input.advance(len), ()))
+- }
+-}
+-
+-fn op(input: Cursor) -> PResult<Punct> {
+- let input = skip_whitespace(input);
+- match op_char(input) {
+- Ok((rest, '\'')) => {
+- symbol(rest)?;
+- Ok((rest, Punct::new('\'', Spacing::Joint)))
+- }
+- Ok((rest, ch)) => {
+- let kind = match op_char(rest) {
+- Ok(_) => Spacing::Joint,
+- Err(LexError) => Spacing::Alone,
+- };
+- Ok((rest, Punct::new(ch, kind)))
+- }
+- Err(LexError) => Err(LexError),
+- }
+-}
+-
+-fn op_char(input: Cursor) -> PResult<char> {
+- if input.starts_with("//") || input.starts_with("/*") {
+- // Do not accept `/` of a comment as an op.
+- return Err(LexError);
+- }
+-
+- let mut chars = input.chars();
+- let first = match chars.next() {
+- Some(ch) => ch,
+- None => {
+- return Err(LexError);
+- }
+- };
+- let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
+- if recognized.contains(first) {
+- Ok((input.advance(first.len_utf8()), first))
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
+- let mut trees = Vec::new();
+- let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
+- trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
+- if inner {
+- trees.push(Punct::new('!', Spacing::Alone).into());
+- }
+- let mut stream = vec![
+- TokenTree::Ident(crate::Ident::new("doc", span)),
+- TokenTree::Punct(Punct::new('=', Spacing::Alone)),
+- TokenTree::Literal(crate::Literal::string(comment)),
+- ];
+- for tt in stream.iter_mut() {
+- tt.set_span(span);
+- }
+- let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
+- trees.push(crate::Group::_new_stable(group).into());
+- for tt in trees.iter_mut() {
+- tt.set_span(span);
+- }
+- Ok((rest, trees))
+-}
+-
+-named!(doc_comment_contents -> (&str, bool), alt!(
+- do_parse!(
+- punct!("//!") >>
+- s: take_until_newline_or_eof!() >>
+- ((s, true))
+- )
+- |
+- do_parse!(
+- option!(whitespace) >>
+- peek!(tag!("/*!")) >>
+- s: block_comment >>
+- ((s, true))
+- )
+- |
+- do_parse!(
+- punct!("///") >>
+- not!(tag!("/")) >>
+- s: take_until_newline_or_eof!() >>
+- ((s, false))
+- )
+- |
+- do_parse!(
+- option!(whitespace) >>
+- peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
+- s: block_comment >>
+- ((s, false))
+- )
+-));
+diff --git a/third_party/rust/proc-macro2/src/lib.rs b/third_party/rust/proc-macro2/src/lib.rs
+index a08be3e815..c20fb50d4a 100644
+--- third_party/rust/proc-macro2/src/lib.rs
++++ third_party/rust/proc-macro2/src/lib.rs
+@@ -78,27 +78,24 @@
+ //! a different thread.
+
+ // Proc-macro2 types in rustdoc of other crates get linked to here.
+-#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.5")]
++#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.24")]
+ #![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
+ #![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
++#![allow(clippy::needless_doctest_main)]
+
+ #[cfg(use_proc_macro)]
+ extern crate proc_macro;
+
+-use std::cmp::Ordering;
+-use std::fmt;
+-use std::hash::{Hash, Hasher};
+-use std::iter::FromIterator;
+-use std::marker;
+-use std::ops::RangeBounds;
+-#[cfg(procmacro2_semver_exempt)]
+-use std::path::PathBuf;
+-use std::rc::Rc;
+-use std::str::FromStr;
++mod marker;
++mod parse;
++
++#[cfg(wrap_proc_macro)]
++mod detection;
+
+-#[macro_use]
+-mod strnom;
+-mod fallback;
++// Public for proc_macro2::fallback::force() and unforce(), but those are quite
++// a niche use case so we omit it from rustdoc.
++#[doc(hidden)]
++pub mod fallback;
+
+ #[cfg(not(wrap_proc_macro))]
+ use crate::fallback as imp;
+@@ -106,6 +103,17 @@ use crate::fallback as imp;
+ #[cfg(wrap_proc_macro)]
+ mod imp;
+
++use crate::marker::Marker;
++use std::cmp::Ordering;
++use std::error::Error;
++use std::fmt::{self, Debug, Display};
++use std::hash::{Hash, Hasher};
++use std::iter::FromIterator;
++use std::ops::RangeBounds;
++#[cfg(procmacro2_semver_exempt)]
++use std::path::PathBuf;
++use std::str::FromStr;
++
+ /// An abstract stream of tokens, or more concretely a sequence of token trees.
+ ///
+ /// This type provides interfaces for iterating over token trees and for
+@@ -116,27 +124,27 @@ mod imp;
+ #[derive(Clone)]
+ pub struct TokenStream {
+ inner: imp::TokenStream,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ /// Error returned from `TokenStream::from_str`.
+ pub struct LexError {
+ inner: imp::LexError,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl TokenStream {
+ fn _new(inner: imp::TokenStream) -> TokenStream {
+ TokenStream {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+ fn _new_stable(inner: fallback::TokenStream) -> TokenStream {
+ TokenStream {
+ inner: inner.into(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -173,7 +181,7 @@ impl FromStr for TokenStream {
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let e = src.parse().map_err(|e| LexError {
+ inner: e,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ })?;
+ Ok(TokenStream::_new(e))
+ }
+@@ -228,25 +236,33 @@ impl FromIterator<TokenStream> for TokenStream {
+ /// convertible back into the same token stream (modulo spans), except for
+ /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+ /// numeric literals.
+-impl fmt::Display for TokenStream {
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+ /// Prints token in a form convenient for debugging.
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
++ Debug::fmt(&self.inner, f)
++ }
++}
++
++impl Debug for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Debug for LexError {
++impl Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
++impl Error for LexError {}
++
+ /// The source file of a given `Span`.
+ ///
+ /// This type is semver exempt and not exposed by default.
+@@ -254,7 +270,7 @@ impl fmt::Debug for LexError {
+ #[derive(Clone, PartialEq, Eq)]
+ pub struct SourceFile {
+ inner: imp::SourceFile,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+@@ -262,7 +278,7 @@ impl SourceFile {
+ fn _new(inner: imp::SourceFile) -> Self {
+ SourceFile {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -291,9 +307,9 @@ impl SourceFile {
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -311,25 +327,41 @@ pub struct LineColumn {
+ pub column: usize,
+ }
+
++#[cfg(span_locations)]
++impl Ord for LineColumn {
++ fn cmp(&self, other: &Self) -> Ordering {
++ self.line
++ .cmp(&other.line)
++ .then(self.column.cmp(&other.column))
++ }
++}
++
++#[cfg(span_locations)]
++impl PartialOrd for LineColumn {
++ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
++ Some(self.cmp(other))
++ }
++}
++
+ /// A region of source code, along with macro expansion information.
+ #[derive(Copy, Clone)]
+ pub struct Span {
+ inner: imp::Span,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl Span {
+ fn _new(inner: imp::Span) -> Span {
+ Span {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+ fn _new_stable(inner: fallback::Span) -> Span {
+ Span {
+ inner: inner.into(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -342,6 +374,16 @@ impl Span {
+ Span::_new(imp::Span::call_site())
+ }
+
++ /// The span located at the invocation of the procedural macro, but with
++ /// local variables, labels, and `$crate` resolved at the definition site
++ /// of the macro. This is the same hygiene behavior as `macro_rules`.
++ ///
++ /// This function requires Rust 1.45 or later.
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ Span::_new(imp::Span::mixed_site())
++ }
++
+ /// A span that resolves at the macro definition site.
+ ///
+ /// This method is semver exempt and not exposed by default.
+@@ -352,18 +394,12 @@ impl Span {
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+- ///
+- /// This method is semver exempt and not exposed by default.
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.resolved_at(other.inner))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+- ///
+- /// This method is semver exempt and not exposed by default.
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn located_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.located_at(other.inner))
+ }
+@@ -439,9 +475,9 @@ impl Span {
+ }
+
+ /// Prints a span in a form convenient for debugging.
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -462,11 +498,11 @@ impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+- match *self {
+- TokenTree::Group(ref t) => t.span(),
+- TokenTree::Ident(ref t) => t.span(),
+- TokenTree::Punct(ref t) => t.span(),
+- TokenTree::Literal(ref t) => t.span(),
++ match self {
++ TokenTree::Group(t) => t.span(),
++ TokenTree::Ident(t) => t.span(),
++ TokenTree::Punct(t) => t.span(),
++ TokenTree::Literal(t) => t.span(),
+ }
+ }
+
+@@ -476,11 +512,11 @@ impl TokenTree {
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+- match *self {
+- TokenTree::Group(ref mut t) => t.set_span(span),
+- TokenTree::Ident(ref mut t) => t.set_span(span),
+- TokenTree::Punct(ref mut t) => t.set_span(span),
+- TokenTree::Literal(ref mut t) => t.set_span(span),
++ match self {
++ TokenTree::Group(t) => t.set_span(span),
++ TokenTree::Ident(t) => t.set_span(span),
++ TokenTree::Punct(t) => t.set_span(span),
++ TokenTree::Literal(t) => t.set_span(span),
+ }
+ }
+ }
+@@ -513,32 +549,32 @@ impl From<Literal> for TokenTree {
+ /// convertible back into the same token tree (modulo spans), except for
+ /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+ /// numeric literals.
+-impl fmt::Display for TokenTree {
++impl Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- match *self {
+- TokenTree::Group(ref t) => t.fmt(f),
+- TokenTree::Ident(ref t) => t.fmt(f),
+- TokenTree::Punct(ref t) => t.fmt(f),
+- TokenTree::Literal(ref t) => t.fmt(f),
++ match self {
++ TokenTree::Group(t) => Display::fmt(t, f),
++ TokenTree::Ident(t) => Display::fmt(t, f),
++ TokenTree::Punct(t) => Display::fmt(t, f),
++ TokenTree::Literal(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+ /// Prints token tree in a form convenient for debugging.
+-impl fmt::Debug for TokenTree {
++impl Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+- match *self {
+- TokenTree::Group(ref t) => t.fmt(f),
+- TokenTree::Ident(ref t) => {
++ match self {
++ TokenTree::Group(t) => Debug::fmt(t, f),
++ TokenTree::Ident(t) => {
+ let mut debug = f.debug_struct("Ident");
+ debug.field("sym", &format_args!("{}", t));
+ imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner);
+ debug.finish()
+ }
+- TokenTree::Punct(ref t) => t.fmt(f),
+- TokenTree::Literal(ref t) => t.fmt(f),
++ TokenTree::Punct(t) => Debug::fmt(t, f),
++ TokenTree::Literal(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+@@ -651,15 +687,15 @@ impl Group {
+ /// Prints the group as a string that should be losslessly convertible back
+ /// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+ /// with `Delimiter::None` delimiters.
+-impl fmt::Display for Group {
++impl Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- fmt::Display::fmt(&self.inner, formatter)
++ Display::fmt(&self.inner, formatter)
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- fmt::Debug::fmt(&self.inner, formatter)
++ Debug::fmt(&self.inner, formatter)
+ }
+ }
+
+@@ -669,7 +705,7 @@ impl fmt::Debug for Group {
+ /// `Punct` with different forms of `Spacing` returned.
+ #[derive(Clone)]
+ pub struct Punct {
+- op: char,
++ ch: char,
+ spacing: Spacing,
+ span: Span,
+ }
+@@ -695,9 +731,9 @@ impl Punct {
+ ///
+ /// The returned `Punct` will have the default span of `Span::call_site()`
+ /// which can be further configured with the `set_span` method below.
+- pub fn new(op: char, spacing: Spacing) -> Punct {
++ pub fn new(ch: char, spacing: Spacing) -> Punct {
+ Punct {
+- op,
++ ch,
+ spacing,
+ span: Span::call_site(),
+ }
+@@ -705,7 +741,7 @@ impl Punct {
+
+ /// Returns the value of this punctuation character as `char`.
+ pub fn as_char(&self) -> char {
+- self.op
++ self.ch
+ }
+
+ /// Returns the spacing of this punctuation character, indicating whether
+@@ -730,16 +766,16 @@ impl Punct {
+
+ /// Prints the punctuation character as a string that should be losslessly
+ /// convertible back into the same character.
+-impl fmt::Display for Punct {
++impl Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.op.fmt(f)
++ Display::fmt(&self.ch, f)
+ }
+ }
+
+-impl fmt::Debug for Punct {
++impl Debug for Punct {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Punct");
+- debug.field("op", &self.op);
++ debug.field("char", &self.ch);
+ debug.field("spacing", &self.spacing);
+ imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner);
+ debug.finish()
+@@ -813,14 +849,14 @@ impl fmt::Debug for Punct {
+ #[derive(Clone)]
+ pub struct Ident {
+ inner: imp::Ident,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl Ident {
+ fn _new(inner: imp::Ident) -> Ident {
+ Ident {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -920,15 +956,15 @@ impl Hash for Ident {
+
+ /// Prints the identifier as a string that should be losslessly convertible back
+ /// into the same identifier.
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -941,7 +977,7 @@ impl fmt::Debug for Ident {
+ #[derive(Clone)]
+ pub struct Literal {
+ inner: imp::Literal,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ macro_rules! suffixed_int_literals {
+@@ -988,14 +1024,14 @@ impl Literal {
+ fn _new(inner: imp::Literal) -> Literal {
+ Literal {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+ fn _new_stable(inner: fallback::Literal) -> Literal {
+ Literal {
+ inner: inner.into(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -1140,26 +1176,25 @@ impl Literal {
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+ /// Public implementation details for the `TokenStream` type, such as iterators.
+ pub mod token_stream {
+- use std::fmt;
+- use std::marker;
+- use std::rc::Rc;
++ use crate::marker::Marker;
++ use crate::{imp, TokenTree};
++ use std::fmt::{self, Debug};
+
+ pub use crate::TokenStream;
+- use crate::{imp, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ ///
+@@ -1168,7 +1203,7 @@ pub mod token_stream {
+ #[derive(Clone)]
+ pub struct IntoIter {
+ inner: imp::TokenTreeIter,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl Iterator for IntoIter {
+@@ -1179,9 +1214,9 @@ pub mod token_stream {
+ }
+ }
+
+- impl fmt::Debug for IntoIter {
++ impl Debug for IntoIter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -1192,7 +1227,7 @@ pub mod token_stream {
+ fn into_iter(self) -> IntoIter {
+ IntoIter {
+ inner: self.inner.into_iter(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+ }
+diff --git a/third_party/rust/proc-macro2/src/marker.rs b/third_party/rust/proc-macro2/src/marker.rs
+new file mode 100644
+index 0000000000..58729baf4a
+--- /dev/null
++++ third_party/rust/proc-macro2/src/marker.rs
+@@ -0,0 +1,18 @@
++use std::marker::PhantomData;
++use std::panic::{RefUnwindSafe, UnwindSafe};
++use std::rc::Rc;
++
++// Zero sized marker with the correct set of autotrait impls we want all proc
++// macro types to have.
++pub(crate) type Marker = PhantomData<ProcMacroAutoTraits>;
++
++pub(crate) use self::value::*;
++
++mod value {
++ pub(crate) use std::marker::PhantomData as Marker;
++}
++
++pub(crate) struct ProcMacroAutoTraits(Rc<()>);
++
++impl UnwindSafe for ProcMacroAutoTraits {}
++impl RefUnwindSafe for ProcMacroAutoTraits {}
+diff --git a/third_party/rust/proc-macro2/src/parse.rs b/third_party/rust/proc-macro2/src/parse.rs
+new file mode 100644
+index 0000000000..365fe0484d
+--- /dev/null
++++ third_party/rust/proc-macro2/src/parse.rs
+@@ -0,0 +1,849 @@
++use crate::fallback::{
++ is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
++};
++use crate::{Delimiter, Punct, Spacing, TokenTree};
++use std::char;
++use std::str::{Bytes, CharIndices, Chars};
++
++#[derive(Copy, Clone, Eq, PartialEq)]
++pub(crate) struct Cursor<'a> {
++ pub rest: &'a str,
++ #[cfg(span_locations)]
++ pub off: u32,
++}
++
++impl<'a> Cursor<'a> {
++ fn advance(&self, bytes: usize) -> Cursor<'a> {
++ let (_front, rest) = self.rest.split_at(bytes);
++ Cursor {
++ rest,
++ #[cfg(span_locations)]
++ off: self.off + _front.chars().count() as u32,
++ }
++ }
++
++ fn starts_with(&self, s: &str) -> bool {
++ self.rest.starts_with(s)
++ }
++
++ pub(crate) fn is_empty(&self) -> bool {
++ self.rest.is_empty()
++ }
++
++ fn len(&self) -> usize {
++ self.rest.len()
++ }
++
++ fn as_bytes(&self) -> &'a [u8] {
++ self.rest.as_bytes()
++ }
++
++ fn bytes(&self) -> Bytes<'a> {
++ self.rest.bytes()
++ }
++
++ fn chars(&self) -> Chars<'a> {
++ self.rest.chars()
++ }
++
++ fn char_indices(&self) -> CharIndices<'a> {
++ self.rest.char_indices()
++ }
++
++ fn parse(&self, tag: &str) -> Result<Cursor<'a>, LexError> {
++ if self.starts_with(tag) {
++ Ok(self.advance(tag.len()))
++ } else {
++ Err(LexError)
++ }
++ }
++}
++
++type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
++
++fn skip_whitespace(input: Cursor) -> Cursor {
++ let mut s = input;
++
++ while !s.is_empty() {
++ let byte = s.as_bytes()[0];
++ if byte == b'/' {
++ if s.starts_with("//")
++ && (!s.starts_with("///") || s.starts_with("////"))
++ && !s.starts_with("//!")
++ {
++ let (cursor, _) = take_until_newline_or_eof(s);
++ s = cursor;
++ continue;
++ } else if s.starts_with("/**/") {
++ s = s.advance(4);
++ continue;
++ } else if s.starts_with("/*")
++ && (!s.starts_with("/**") || s.starts_with("/***"))
++ && !s.starts_with("/*!")
++ {
++ match block_comment(s) {
++ Ok((rest, _)) => {
++ s = rest;
++ continue;
++ }
++ Err(LexError) => return s,
++ }
++ }
++ }
++ match byte {
++ b' ' | 0x09..=0x0d => {
++ s = s.advance(1);
++ continue;
++ }
++ b if b <= 0x7f => {}
++ _ => {
++ let ch = s.chars().next().unwrap();
++ if is_whitespace(ch) {
++ s = s.advance(ch.len_utf8());
++ continue;
++ }
++ }
++ }
++ return s;
++ }
++ s
++}
++
++fn block_comment(input: Cursor) -> PResult<&str> {
++ if !input.starts_with("/*") {
++ return Err(LexError);
++ }
++
++ let mut depth = 0;
++ let bytes = input.as_bytes();
++ let mut i = 0;
++ let upper = bytes.len() - 1;
++
++ while i < upper {
++ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
++ depth += 1;
++ i += 1; // eat '*'
++ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
++ depth -= 1;
++ if depth == 0 {
++ return Ok((input.advance(i + 2), &input.rest[..i + 2]));
++ }
++ i += 1; // eat '/'
++ }
++ i += 1;
++ }
++
++ Err(LexError)
++}
++
++fn is_whitespace(ch: char) -> bool {
++ // Rust treats left-to-right mark and right-to-left mark as whitespace
++ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
++}
++
++fn word_break(input: Cursor) -> Result<Cursor, LexError> {
++ match input.chars().next() {
++ Some(ch) if is_ident_continue(ch) => Err(LexError),
++ Some(_) | None => Ok(input),
++ }
++}
++
++pub(crate) fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
++ let mut trees = Vec::new();
++ let mut stack = Vec::new();
++
++ loop {
++ input = skip_whitespace(input);
++
++ if let Ok((rest, tt)) = doc_comment(input) {
++ trees.extend(tt);
++ input = rest;
++ continue;
++ }
++
++ #[cfg(span_locations)]
++ let lo = input.off;
++
++ let first = match input.bytes().next() {
++ Some(first) => first,
++ None => break,
++ };
++
++ if let Some(open_delimiter) = match first {
++ b'(' => Some(Delimiter::Parenthesis),
++ b'[' => Some(Delimiter::Bracket),
++ b'{' => Some(Delimiter::Brace),
++ _ => None,
++ } {
++ input = input.advance(1);
++ let frame = (open_delimiter, trees);
++ #[cfg(span_locations)]
++ let frame = (lo, frame);
++ stack.push(frame);
++ trees = Vec::new();
++ } else if let Some(close_delimiter) = match first {
++ b')' => Some(Delimiter::Parenthesis),
++ b']' => Some(Delimiter::Bracket),
++ b'}' => Some(Delimiter::Brace),
++ _ => None,
++ } {
++ input = input.advance(1);
++ let frame = stack.pop().ok_or(LexError)?;
++ #[cfg(span_locations)]
++ let (lo, frame) = frame;
++ let (open_delimiter, outer) = frame;
++ if open_delimiter != close_delimiter {
++ return Err(LexError);
++ }
++ let mut g = Group::new(open_delimiter, TokenStream { inner: trees });
++ g.set_span(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: input.off,
++ });
++ trees = outer;
++ trees.push(TokenTree::Group(crate::Group::_new_stable(g)));
++ } else {
++ let (rest, mut tt) = leaf_token(input)?;
++ tt.set_span(crate::Span::_new_stable(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: rest.off,
++ }));
++ trees.push(tt);
++ input = rest;
++ }
++ }
++
++ if stack.is_empty() {
++ Ok((input, TokenStream { inner: trees }))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn leaf_token(input: Cursor) -> PResult<TokenTree> {
++ if let Ok((input, l)) = literal(input) {
++ // must be parsed before ident
++ Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l))))
++ } else if let Ok((input, p)) = punct(input) {
++ Ok((input, TokenTree::Punct(p)))
++ } else if let Ok((input, i)) = ident(input) {
++ Ok((input, TokenTree::Ident(i)))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn ident(input: Cursor) -> PResult<crate::Ident> {
++ if ["r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#"]
++ .iter()
++ .any(|prefix| input.starts_with(prefix))
++ {
++ Err(LexError)
++ } else {
++ ident_any(input)
++ }
++}
++
++fn ident_any(input: Cursor) -> PResult<crate::Ident> {
++ let raw = input.starts_with("r#");
++ let rest = input.advance((raw as usize) << 1);
++
++ let (rest, sym) = ident_not_raw(rest)?;
++
++ if !raw {
++ let ident = crate::Ident::new(sym, crate::Span::call_site());
++ return Ok((rest, ident));
++ }
++
++ if sym == "_" {
++ return Err(LexError);
++ }
++
++ let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
++ Ok((rest, ident))
++}
++
++fn ident_not_raw(input: Cursor) -> PResult<&str> {
++ let mut chars = input.char_indices();
++
++ match chars.next() {
++ Some((_, ch)) if is_ident_start(ch) => {}
++ _ => return Err(LexError),
++ }
++
++ let mut end = input.len();
++ for (i, ch) in chars {
++ if !is_ident_continue(ch) {
++ end = i;
++ break;
++ }
++ }
++
++ Ok((input.advance(end), &input.rest[..end]))
++}
++
++fn literal(input: Cursor) -> PResult<Literal> {
++ match literal_nocapture(input) {
++ Ok(a) => {
++ let end = input.len() - a.len();
++ Ok((a, Literal::_new(input.rest[..end].to_string())))
++ }
++ Err(LexError) => Err(LexError),
++ }
++}
++
++fn literal_nocapture(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(ok) = string(input) {
++ Ok(ok)
++ } else if let Ok(ok) = byte_string(input) {
++ Ok(ok)
++ } else if let Ok(ok) = byte(input) {
++ Ok(ok)
++ } else if let Ok(ok) = character(input) {
++ Ok(ok)
++ } else if let Ok(ok) = float(input) {
++ Ok(ok)
++ } else if let Ok(ok) = int(input) {
++ Ok(ok)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn literal_suffix(input: Cursor) -> Cursor {
++ match ident_not_raw(input) {
++ Ok((input, _)) => input,
++ Err(LexError) => input,
++ }
++}
++
++fn string(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(input) = input.parse("\"") {
++ cooked_string(input)
++ } else if let Ok(input) = input.parse("r") {
++ raw_string(input)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn cooked_string(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.char_indices().peekable();
++
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' => {
++ let input = input.advance(i + 1);
++ return Ok(literal_suffix(input));
++ }
++ '\r' => match chars.next() {
++ Some((_, '\n')) => {}
++ _ => break,
++ },
++ '\\' => match chars.next() {
++ Some((_, 'x')) => {
++ if !backslash_x_char(&mut chars) {
++ break;
++ }
++ }
++ Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
++ | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
++ Some((_, 'u')) => {
++ if !backslash_u(&mut chars) {
++ break;
++ }
++ }
++ Some((_, ch @ '\n')) | Some((_, ch @ '\r')) => {
++ let mut last = ch;
++ loop {
++ if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') {
++ return Err(LexError);
++ }
++ match chars.peek() {
++ Some((_, ch)) if ch.is_whitespace() => {
++ last = *ch;
++ chars.next();
++ }
++ _ => break,
++ }
++ }
++ }
++ _ => break,
++ },
++ _ch => {}
++ }
++ }
++ Err(LexError)
++}
++
++fn byte_string(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(input) = input.parse("b\"") {
++ cooked_byte_string(input)
++ } else if let Ok(input) = input.parse("br") {
++ raw_string(input)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, LexError> {
++ let mut bytes = input.bytes().enumerate();
++ while let Some((offset, b)) = bytes.next() {
++ match b {
++ b'"' => {
++ let input = input.advance(offset + 1);
++ return Ok(literal_suffix(input));
++ }
++ b'\r' => match bytes.next() {
++ Some((_, b'\n')) => {}
++ _ => break,
++ },
++ b'\\' => match bytes.next() {
++ Some((_, b'x')) => {
++ if !backslash_x_byte(&mut bytes) {
++ break;
++ }
++ }
++ Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
++ | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
++ Some((newline, b @ b'\n')) | Some((newline, b @ b'\r')) => {
++ let mut last = b as char;
++ let rest = input.advance(newline + 1);
++ let mut chars = rest.char_indices();
++ loop {
++ if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') {
++ return Err(LexError);
++ }
++ match chars.next() {
++ Some((_, ch)) if ch.is_whitespace() => last = ch,
++ Some((offset, _)) => {
++ input = rest.advance(offset);
++ bytes = input.bytes().enumerate();
++ break;
++ }
++ None => return Err(LexError),
++ }
++ }
++ }
++ _ => break,
++ },
++ b if b < 0x80 => {}
++ _ => break,
++ }
++ }
++ Err(LexError)
++}
++
++fn raw_string(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.char_indices();
++ let mut n = 0;
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' => {
++ n = i;
++ break;
++ }
++ '#' => {}
++ _ => return Err(LexError),
++ }
++ }
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => {
++ let rest = input.advance(i + 1 + n);
++ return Ok(literal_suffix(rest));
++ }
++ '\r' => match chars.next() {
++ Some((_, '\n')) => {}
++ _ => break,
++ },
++ _ => {}
++ }
++ }
++ Err(LexError)
++}
++
++fn byte(input: Cursor) -> Result<Cursor, LexError> {
++ let input = input.parse("b'")?;
++ let mut bytes = input.bytes().enumerate();
++ let ok = match bytes.next().map(|(_, b)| b) {
++ Some(b'\\') => match bytes.next().map(|(_, b)| b) {
++ Some(b'x') => backslash_x_byte(&mut bytes),
++ Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
++ | Some(b'"') => true,
++ _ => false,
++ },
++ b => b.is_some(),
++ };
++ if !ok {
++ return Err(LexError);
++ }
++ let (offset, _) = bytes.next().ok_or(LexError)?;
++ if !input.chars().as_str().is_char_boundary(offset) {
++ return Err(LexError);
++ }
++ let input = input.advance(offset).parse("'")?;
++ Ok(literal_suffix(input))
++}
++
++fn character(input: Cursor) -> Result<Cursor, LexError> {
++ let input = input.parse("'")?;
++ let mut chars = input.char_indices();
++ let ok = match chars.next().map(|(_, ch)| ch) {
++ Some('\\') => match chars.next().map(|(_, ch)| ch) {
++ Some('x') => backslash_x_char(&mut chars),
++ Some('u') => backslash_u(&mut chars),
++ Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
++ true
++ }
++ _ => false,
++ },
++ ch => ch.is_some(),
++ };
++ if !ok {
++ return Err(LexError);
++ }
++ let (idx, _) = chars.next().ok_or(LexError)?;
++ let input = input.advance(idx).parse("'")?;
++ Ok(literal_suffix(input))
++}
++
++macro_rules! next_ch {
++ ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
++ match $chars.next() {
++ Some((_, ch)) => match ch {
++ $pat $(| $rest)* => ch,
++ _ => return false,
++ },
++ None => return false,
++ }
++ };
++}
++
++fn backslash_x_char<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, char)>,
++{
++ next_ch!(chars @ '0'..='7');
++ next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
++ true
++}
++
++fn backslash_x_byte<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, u8)>,
++{
++ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
++ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
++ true
++}
++
++fn backslash_u<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, char)>,
++{
++ next_ch!(chars @ '{');
++ let mut value = 0;
++ let mut len = 0;
++ while let Some((_, ch)) = chars.next() {
++ let digit = match ch {
++ '0'..='9' => ch as u8 - b'0',
++ 'a'..='f' => 10 + ch as u8 - b'a',
++ 'A'..='F' => 10 + ch as u8 - b'A',
++ '_' if len > 0 => continue,
++ '}' if len > 0 => return char::from_u32(value).is_some(),
++ _ => return false,
++ };
++ if len == 6 {
++ return false;
++ }
++ value *= 0x10;
++ value += u32::from(digit);
++ len += 1;
++ }
++ false
++}
++
++fn float(input: Cursor) -> Result<Cursor, LexError> {
++ let mut rest = float_digits(input)?;
++ if let Some(ch) = rest.chars().next() {
++ if is_ident_start(ch) {
++ rest = ident_not_raw(rest)?.0;
++ }
++ }
++ word_break(rest)
++}
++
++fn float_digits(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.chars().peekable();
++ match chars.next() {
++ Some(ch) if ch >= '0' && ch <= '9' => {}
++ _ => return Err(LexError),
++ }
++
++ let mut len = 1;
++ let mut has_dot = false;
++ let mut has_exp = false;
++ while let Some(&ch) = chars.peek() {
++ match ch {
++ '0'..='9' | '_' => {
++ chars.next();
++ len += 1;
++ }
++ '.' => {
++ if has_dot {
++ break;
++ }
++ chars.next();
++ if chars
++ .peek()
++ .map(|&ch| ch == '.' || is_ident_start(ch))
++ .unwrap_or(false)
++ {
++ return Err(LexError);
++ }
++ len += 1;
++ has_dot = true;
++ }
++ 'e' | 'E' => {
++ chars.next();
++ len += 1;
++ has_exp = true;
++ break;
++ }
++ _ => break,
++ }
++ }
++
++ if !(has_dot || has_exp) {
++ return Err(LexError);
++ }
++
++ if has_exp {
++ let token_before_exp = if has_dot {
++ Ok(input.advance(len - 1))
++ } else {
++ Err(LexError)
++ };
++ let mut has_sign = false;
++ let mut has_exp_value = false;
++ while let Some(&ch) = chars.peek() {
++ match ch {
++ '+' | '-' => {
++ if has_exp_value {
++ break;
++ }
++ if has_sign {
++ return token_before_exp;
++ }
++ chars.next();
++ len += 1;
++ has_sign = true;
++ }
++ '0'..='9' => {
++ chars.next();
++ len += 1;
++ has_exp_value = true;
++ }
++ '_' => {
++ chars.next();
++ len += 1;
++ }
++ _ => break,
++ }
++ }
++ if !has_exp_value {
++ return token_before_exp;
++ }
++ }
++
++ Ok(input.advance(len))
++}
++
++fn int(input: Cursor) -> Result<Cursor, LexError> {
++ let mut rest = digits(input)?;
++ if let Some(ch) = rest.chars().next() {
++ if is_ident_start(ch) {
++ rest = ident_not_raw(rest)?.0;
++ }
++ }
++ word_break(rest)
++}
++
++fn digits(mut input: Cursor) -> Result<Cursor, LexError> {
++ let base = if input.starts_with("0x") {
++ input = input.advance(2);
++ 16
++ } else if input.starts_with("0o") {
++ input = input.advance(2);
++ 8
++ } else if input.starts_with("0b") {
++ input = input.advance(2);
++ 2
++ } else {
++ 10
++ };
++
++ let mut len = 0;
++ let mut empty = true;
++ for b in input.bytes() {
++ match b {
++ b'0'..=b'9' => {
++ let digit = (b - b'0') as u64;
++ if digit >= base {
++ return Err(LexError);
++ }
++ }
++ b'a'..=b'f' => {
++ let digit = 10 + (b - b'a') as u64;
++ if digit >= base {
++ break;
++ }
++ }
++ b'A'..=b'F' => {
++ let digit = 10 + (b - b'A') as u64;
++ if digit >= base {
++ break;
++ }
++ }
++ b'_' => {
++ if empty && base == 10 {
++ return Err(LexError);
++ }
++ len += 1;
++ continue;
++ }
++ _ => break,
++ };
++ len += 1;
++ empty = false;
++ }
++ if empty {
++ Err(LexError)
++ } else {
++ Ok(input.advance(len))
++ }
++}
++
++fn punct(input: Cursor) -> PResult<Punct> {
++ match punct_char(input) {
++ Ok((rest, '\'')) => {
++ if ident_any(rest)?.0.starts_with("'") {
++ Err(LexError)
++ } else {
++ Ok((rest, Punct::new('\'', Spacing::Joint)))
++ }
++ }
++ Ok((rest, ch)) => {
++ let kind = match punct_char(rest) {
++ Ok(_) => Spacing::Joint,
++ Err(LexError) => Spacing::Alone,
++ };
++ Ok((rest, Punct::new(ch, kind)))
++ }
++ Err(LexError) => Err(LexError),
++ }
++}
++
++fn punct_char(input: Cursor) -> PResult<char> {
++ if input.starts_with("//") || input.starts_with("/*") {
++ // Do not accept `/` of a comment as a punct.
++ return Err(LexError);
++ }
++
++ let mut chars = input.chars();
++ let first = match chars.next() {
++ Some(ch) => ch,
++ None => {
++ return Err(LexError);
++ }
++ };
++ let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
++ if recognized.contains(first) {
++ Ok((input.advance(first.len_utf8()), first))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
++ #[cfg(span_locations)]
++ let lo = input.off;
++ let (rest, (comment, inner)) = doc_comment_contents(input)?;
++ let span = crate::Span::_new_stable(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: rest.off,
++ });
++
++ let mut scan_for_bare_cr = comment;
++ while let Some(cr) = scan_for_bare_cr.find('\r') {
++ let rest = &scan_for_bare_cr[cr + 1..];
++ if !rest.starts_with('\n') {
++ return Err(LexError);
++ }
++ scan_for_bare_cr = rest;
++ }
++
++ let mut trees = Vec::new();
++ trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
++ if inner {
++ trees.push(Punct::new('!', Spacing::Alone).into());
++ }
++ let mut stream = vec![
++ TokenTree::Ident(crate::Ident::new("doc", span)),
++ TokenTree::Punct(Punct::new('=', Spacing::Alone)),
++ TokenTree::Literal(crate::Literal::string(comment)),
++ ];
++ for tt in stream.iter_mut() {
++ tt.set_span(span);
++ }
++ let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
++ trees.push(crate::Group::_new_stable(group).into());
++ for tt in trees.iter_mut() {
++ tt.set_span(span);
++ }
++ Ok((rest, trees))
++}
++
++fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
++ if input.starts_with("//!") {
++ let input = input.advance(3);
++ let (input, s) = take_until_newline_or_eof(input);
++ Ok((input, (s, true)))
++ } else if input.starts_with("/*!") {
++ let (input, s) = block_comment(input)?;
++ Ok((input, (&s[3..s.len() - 2], true)))
++ } else if input.starts_with("///") {
++ let input = input.advance(3);
++ if input.starts_with("/") {
++ return Err(LexError);
++ }
++ let (input, s) = take_until_newline_or_eof(input);
++ Ok((input, (s, false)))
++ } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
++ let (input, s) = block_comment(input)?;
++ Ok((input, (&s[3..s.len() - 2], false)))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
++ let chars = input.char_indices();
++
++ for (i, ch) in chars {
++ if ch == '\n' {
++ return (input.advance(i), &input.rest[..i]);
++ } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
++ return (input.advance(i + 1), &input.rest[..i]);
++ }
++ }
++
++ (input.advance(input.len()), input.rest)
++}
+diff --git a/third_party/rust/proc-macro2/src/strnom.rs b/third_party/rust/proc-macro2/src/strnom.rs
+deleted file mode 100644
+index eb7d0b8a8e..0000000000
+--- third_party/rust/proc-macro2/src/strnom.rs
++++ /dev/null
+@@ -1,391 +0,0 @@
+-//! Adapted from [`nom`](https://github.com/Geal/nom).
+-
+-use crate::fallback::LexError;
+-use std::str::{Bytes, CharIndices, Chars};
+-use unicode_xid::UnicodeXID;
+-
+-#[derive(Copy, Clone, Eq, PartialEq)]
+-pub struct Cursor<'a> {
+- pub rest: &'a str,
+- #[cfg(span_locations)]
+- pub off: u32,
+-}
+-
+-impl<'a> Cursor<'a> {
+- #[cfg(not(span_locations))]
+- pub fn advance(&self, amt: usize) -> Cursor<'a> {
+- Cursor {
+- rest: &self.rest[amt..],
+- }
+- }
+- #[cfg(span_locations)]
+- pub fn advance(&self, amt: usize) -> Cursor<'a> {
+- Cursor {
+- rest: &self.rest[amt..],
+- off: self.off + (amt as u32),
+- }
+- }
+-
+- pub fn find(&self, p: char) -> Option<usize> {
+- self.rest.find(p)
+- }
+-
+- pub fn starts_with(&self, s: &str) -> bool {
+- self.rest.starts_with(s)
+- }
+-
+- pub fn is_empty(&self) -> bool {
+- self.rest.is_empty()
+- }
+-
+- pub fn len(&self) -> usize {
+- self.rest.len()
+- }
+-
+- pub fn as_bytes(&self) -> &'a [u8] {
+- self.rest.as_bytes()
+- }
+-
+- pub fn bytes(&self) -> Bytes<'a> {
+- self.rest.bytes()
+- }
+-
+- pub fn chars(&self) -> Chars<'a> {
+- self.rest.chars()
+- }
+-
+- pub fn char_indices(&self) -> CharIndices<'a> {
+- self.rest.char_indices()
+- }
+-}
+-
+-pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
+-
+-pub fn whitespace(input: Cursor) -> PResult<()> {
+- if input.is_empty() {
+- return Err(LexError);
+- }
+-
+- let bytes = input.as_bytes();
+- let mut i = 0;
+- while i < bytes.len() {
+- let s = input.advance(i);
+- if bytes[i] == b'/' {
+- if s.starts_with("//")
+- && (!s.starts_with("///") || s.starts_with("////"))
+- && !s.starts_with("//!")
+- {
+- if let Some(len) = s.find('\n') {
+- i += len + 1;
+- continue;
+- }
+- break;
+- } else if s.starts_with("/**/") {
+- i += 4;
+- continue;
+- } else if s.starts_with("/*")
+- && (!s.starts_with("/**") || s.starts_with("/***"))
+- && !s.starts_with("/*!")
+- {
+- let (_, com) = block_comment(s)?;
+- i += com.len();
+- continue;
+- }
+- }
+- match bytes[i] {
+- b' ' | 0x09..=0x0d => {
+- i += 1;
+- continue;
+- }
+- b if b <= 0x7f => {}
+- _ => {
+- let ch = s.chars().next().unwrap();
+- if is_whitespace(ch) {
+- i += ch.len_utf8();
+- continue;
+- }
+- }
+- }
+- return if i > 0 { Ok((s, ())) } else { Err(LexError) };
+- }
+- Ok((input.advance(input.len()), ()))
+-}
+-
+-pub fn block_comment(input: Cursor) -> PResult<&str> {
+- if !input.starts_with("/*") {
+- return Err(LexError);
+- }
+-
+- let mut depth = 0;
+- let bytes = input.as_bytes();
+- let mut i = 0;
+- let upper = bytes.len() - 1;
+- while i < upper {
+- if bytes[i] == b'/' && bytes[i + 1] == b'*' {
+- depth += 1;
+- i += 1; // eat '*'
+- } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
+- depth -= 1;
+- if depth == 0 {
+- return Ok((input.advance(i + 2), &input.rest[..i + 2]));
+- }
+- i += 1; // eat '/'
+- }
+- i += 1;
+- }
+- Err(LexError)
+-}
+-
+-pub fn skip_whitespace(input: Cursor) -> Cursor {
+- match whitespace(input) {
+- Ok((rest, _)) => rest,
+- Err(LexError) => input,
+- }
+-}
+-
+-fn is_whitespace(ch: char) -> bool {
+- // Rust treats left-to-right mark and right-to-left mark as whitespace
+- ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
+-}
+-
+-pub fn word_break(input: Cursor) -> PResult<()> {
+- match input.chars().next() {
+- Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
+- Some(_) | None => Ok((input, ())),
+- }
+-}
+-
+-macro_rules! named {
+- ($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
+- fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
+- $submac!(i, $($args)*)
+- }
+- };
+-}
+-
+-macro_rules! alt {
+- ($i:expr, $e:ident | $($rest:tt)*) => {
+- alt!($i, call!($e) | $($rest)*)
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
+- match $subrule!($i, $($args)*) {
+- res @ Ok(_) => res,
+- _ => alt!($i, $($rest)*)
+- }
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
+- match $subrule!($i, $($args)*) {
+- Ok((i, o)) => Ok((i, $gen(o))),
+- Err(LexError) => alt!($i, $($rest)*)
+- }
+- };
+-
+- ($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
+- alt!($i, call!($e) => { $gen } | $($rest)*)
+- };
+-
+- ($i:expr, $e:ident => { $gen:expr }) => {
+- alt!($i, call!($e) => { $gen })
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
+- match $subrule!($i, $($args)*) {
+- Ok((i, o)) => Ok((i, $gen(o))),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-
+- ($i:expr, $e:ident) => {
+- alt!($i, call!($e))
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)*)) => {
+- $subrule!($i, $($args)*)
+- };
+-}
+-
+-macro_rules! do_parse {
+- ($i:expr, ( $($rest:expr),* )) => {
+- Ok(($i, ( $($rest),* )))
+- };
+-
+- ($i:expr, $e:ident >> $($rest:tt)*) => {
+- do_parse!($i, call!($e) >> $($rest)*)
+- };
+-
+- ($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, _)) => do_parse!(i, $($rest)*),
+- }
+- };
+-
+- ($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
+- do_parse!($i, $field: call!($e) >> $($rest)*)
+- };
+-
+- ($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => {
+- let $field = o;
+- do_parse!(i, $($rest)*)
+- },
+- }
+- };
+-}
+-
+-macro_rules! peek {
+- ($i:expr, $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Ok((_, o)) => Ok(($i, o)),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-}
+-
+-macro_rules! call {
+- ($i:expr, $fun:expr $(, $args:expr)*) => {
+- $fun($i $(, $args)*)
+- };
+-}
+-
+-macro_rules! option {
+- ($i:expr, $f:expr) => {
+- match $f($i) {
+- Ok((i, o)) => Ok((i, Some(o))),
+- Err(LexError) => Ok(($i, None)),
+- }
+- };
+-}
+-
+-macro_rules! take_until_newline_or_eof {
+- ($i:expr,) => {{
+- if $i.len() == 0 {
+- Ok(($i, ""))
+- } else {
+- match $i.find('\n') {
+- Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
+- None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
+- }
+- }
+- }};
+-}
+-
+-macro_rules! tuple {
+- ($i:expr, $($rest:tt)*) => {
+- tuple_parser!($i, (), $($rest)*)
+- };
+-}
+-
+-/// Do not use directly. Use `tuple!`.
+-macro_rules! tuple_parser {
+- ($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
+- tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
+- };
+-
+- ($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
+- }
+- };
+-
+- ($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
+- }
+- };
+-
+- ($i:expr, ($($parsed:tt),*), $e:ident) => {
+- tuple_parser!($i, ($($parsed),*), call!($e))
+- };
+-
+- ($i:expr, (), $submac:ident!( $($args:tt)* )) => {
+- $submac!($i, $($args)*)
+- };
+-
+- ($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => Ok((i, ($($parsed),*, o)))
+- }
+- };
+-
+- ($i:expr, ($($parsed:expr),*)) => {
+- Ok(($i, ($($parsed),*)))
+- };
+-}
+-
+-macro_rules! not {
+- ($i:expr, $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Ok((_, _)) => Err(LexError),
+- Err(LexError) => Ok(($i, ())),
+- }
+- };
+-}
+-
+-macro_rules! tag {
+- ($i:expr, $tag:expr) => {
+- if $i.starts_with($tag) {
+- Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
+- } else {
+- Err(LexError)
+- }
+- };
+-}
+-
+-macro_rules! punct {
+- ($i:expr, $punct:expr) => {
+- $crate::strnom::punct($i, $punct)
+- };
+-}
+-
+-/// Do not use directly. Use `punct!`.
+-pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
+- let input = skip_whitespace(input);
+- if input.starts_with(token) {
+- Ok((input.advance(token.len()), token))
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-macro_rules! preceded {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
+- match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
+- Ok((remaining, (_, o))) => Ok((remaining, o)),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-
+- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
+- preceded!($i, $submac!($($args)*), call!($g))
+- };
+-}
+-
+-macro_rules! delimited {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
+- match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i1, (_, o, _))) => Ok((i1, o))
+- }
+- };
+-}
+-
+-macro_rules! map {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => Ok((i, call!(o, $g)))
+- }
+- };
+-
+- ($i:expr, $f:expr, $g:expr) => {
+- map!($i, call!($f), $g)
+- };
+-}
+diff --git a/third_party/rust/proc-macro2/src/wrapper.rs b/third_party/rust/proc-macro2/src/wrapper.rs
+index 552b9381cf..3df044af17 100644
+--- third_party/rust/proc-macro2/src/wrapper.rs
++++ third_party/rust/proc-macro2/src/wrapper.rs
+@@ -1,15 +1,15 @@
+-use std::fmt;
+-use std::iter;
++use crate::detection::inside_proc_macro;
++use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
++use std::fmt::{self, Debug, Display};
++use std::iter::FromIterator;
+ use std::ops::RangeBounds;
+-use std::panic::{self, PanicInfo};
++use std::panic;
+ #[cfg(super_unstable)]
+ use std::path::PathBuf;
+ use std::str::FromStr;
+
+-use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
+-
+ #[derive(Clone)]
+-pub enum TokenStream {
++pub(crate) enum TokenStream {
+ Compiler(DeferredTokenStream),
+ Fallback(fallback::TokenStream),
+ }
+@@ -19,73 +19,16 @@ pub enum TokenStream {
+ // we hold on to the appended tokens and do proc_macro::TokenStream::extend as
+ // late as possible to batch together consecutive uses of the Extend impl.
+ #[derive(Clone)]
+-pub struct DeferredTokenStream {
++pub(crate) struct DeferredTokenStream {
+ stream: proc_macro::TokenStream,
+ extra: Vec<proc_macro::TokenTree>,
+ }
+
+-pub enum LexError {
++pub(crate) enum LexError {
+ Compiler(proc_macro::LexError),
+ Fallback(fallback::LexError),
+ }
+
+-fn nightly_works() -> bool {
+- use std::sync::atomic::*;
+- use std::sync::Once;
+-
+- static WORKS: AtomicUsize = AtomicUsize::new(0);
+- static INIT: Once = Once::new();
+-
+- match WORKS.load(Ordering::SeqCst) {
+- 1 => return false,
+- 2 => return true,
+- _ => {}
+- }
+-
+- // Swap in a null panic hook to avoid printing "thread panicked" to stderr,
+- // then use catch_unwind to determine whether the compiler's proc_macro is
+- // working. When proc-macro2 is used from outside of a procedural macro all
+- // of the proc_macro crate's APIs currently panic.
+- //
+- // The Once is to prevent the possibility of this ordering:
+- //
+- // thread 1 calls take_hook, gets the user's original hook
+- // thread 1 calls set_hook with the null hook
+- // thread 2 calls take_hook, thinks null hook is the original hook
+- // thread 2 calls set_hook with the null hook
+- // thread 1 calls set_hook with the actual original hook
+- // thread 2 calls set_hook with what it thinks is the original hook
+- //
+- // in which the user's hook has been lost.
+- //
+- // There is still a race condition where a panic in a different thread can
+- // happen during the interval that the user's original panic hook is
+- // unregistered such that their hook is incorrectly not called. This is
+- // sufficiently unlikely and less bad than printing panic messages to stderr
+- // on correct use of this crate. Maybe there is a libstd feature request
+- // here. For now, if a user needs to guarantee that this failure mode does
+- // not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
+- // the main thread before launching any other threads.
+- INIT.call_once(|| {
+- type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
+-
+- let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
+- let sanity_check = &*null_hook as *const PanicHook;
+- let original_hook = panic::take_hook();
+- panic::set_hook(null_hook);
+-
+- let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
+- WORKS.store(works as usize + 1, Ordering::SeqCst);
+-
+- let hopefully_null_hook = panic::take_hook();
+- panic::set_hook(original_hook);
+- if sanity_check != &*hopefully_null_hook {
+- panic!("observed race condition in proc_macro2::nightly_works");
+- }
+- });
+- nightly_works()
+-}
+-
+ fn mismatch() -> ! {
+ panic!("stable/nightly mismatch")
+ }
+@@ -103,7 +46,12 @@ impl DeferredTokenStream {
+ }
+
+ fn evaluate_now(&mut self) {
+- self.stream.extend(self.extra.drain(..));
++ // If-check provides a fast short circuit for the common case of `extra`
++ // being empty, which saves a round trip over the proc macro bridge.
++ // Improves macro expansion time in winrt by 6% in debug mode.
++ if !self.extra.is_empty() {
++ self.stream.extend(self.extra.drain(..));
++ }
+ }
+
+ fn into_token_stream(mut self) -> proc_macro::TokenStream {
+@@ -114,7 +62,7 @@ impl DeferredTokenStream {
+
+ impl TokenStream {
+ pub fn new() -> TokenStream {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
+ } else {
+ TokenStream::Fallback(fallback::TokenStream::new())
+@@ -147,9 +95,9 @@ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Ok(TokenStream::Compiler(DeferredTokenStream::new(
+- src.parse()?,
++ proc_macro_parse(src)?,
+ )))
+ } else {
+ Ok(TokenStream::Fallback(src.parse()?))
+@@ -157,11 +105,17 @@ impl FromStr for TokenStream {
+ }
+ }
+
+-impl fmt::Display for TokenStream {
++// Work around https://github.com/rust-lang/rust/issues/58736.
++fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
++ panic::catch_unwind(|| src.parse().map_err(LexError::Compiler))
++ .unwrap_or(Err(LexError::Fallback(fallback::LexError)))
++}
++
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
+- TokenStream::Fallback(tts) => tts.fmt(f),
++ TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f),
++ TokenStream::Fallback(tts) => Display::fmt(tts, f),
+ }
+ }
+ }
+@@ -187,7 +141,7 @@ impl From<fallback::TokenStream> for TokenStream {
+ }
+ }
+
+-// Assumes nightly_works().
++// Assumes inside_proc_macro().
+ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+ match token {
+ TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
+@@ -196,9 +150,9 @@ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+ Spacing::Joint => proc_macro::Spacing::Joint,
+ Spacing::Alone => proc_macro::Spacing::Alone,
+ };
+- let mut op = proc_macro::Punct::new(tt.as_char(), spacing);
+- op.set_span(tt.span().inner.unwrap_nightly());
+- op.into()
++ let mut punct = proc_macro::Punct::new(tt.as_char(), spacing);
++ punct.set_span(tt.span().inner.unwrap_nightly());
++ punct.into()
+ }
+ TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
+ TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
+@@ -207,7 +161,7 @@ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+
+ impl From<TokenTree> for TokenStream {
+ fn from(token: TokenTree) -> TokenStream {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
+ } else {
+ TokenStream::Fallback(token.into())
+@@ -215,9 +169,9 @@ impl From<TokenTree> for TokenStream {
+ }
+ }
+
+-impl iter::FromIterator<TokenTree> for TokenStream {
++impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(
+ trees.into_iter().map(into_compiler_token).collect(),
+ ))
+@@ -227,7 +181,7 @@ impl iter::FromIterator<TokenTree> for TokenStream {
+ }
+ }
+
+-impl iter::FromIterator<TokenStream> for TokenStream {
++impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut streams = streams.into_iter();
+ match streams.next() {
+@@ -252,14 +206,15 @@ impl iter::FromIterator<TokenStream> for TokenStream {
+ }
+
+ impl Extend<TokenTree> for TokenStream {
+- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
++ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
+ match self {
+ TokenStream::Compiler(tts) => {
+ // Here is the reason for DeferredTokenStream.
+- tts.extra
+- .extend(streams.into_iter().map(into_compiler_token));
++ for token in stream {
++ tts.extra.push(into_compiler_token(token));
++ }
+ }
+- TokenStream::Fallback(tts) => tts.extend(streams),
++ TokenStream::Fallback(tts) => tts.extend(stream),
+ }
+ }
+ }
+@@ -270,20 +225,20 @@ impl Extend<TokenStream> for TokenStream {
+ TokenStream::Compiler(tts) => {
+ tts.evaluate_now();
+ tts.stream
+- .extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
++ .extend(streams.into_iter().map(TokenStream::unwrap_nightly));
+ }
+ TokenStream::Fallback(tts) => {
+- tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()));
++ tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
+ }
+ }
+ }
+ }
+
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
+- TokenStream::Fallback(tts) => tts.fmt(f),
++ TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f),
++ TokenStream::Fallback(tts) => Debug::fmt(tts, f),
+ }
+ }
+ }
+@@ -300,17 +255,29 @@ impl From<fallback::LexError> for LexError {
+ }
+ }
+
+-impl fmt::Debug for LexError {
++impl Debug for LexError {
++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ LexError::Compiler(e) => Debug::fmt(e, f),
++ LexError::Fallback(e) => Debug::fmt(e, f),
++ }
++ }
++}
++
++impl Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- LexError::Compiler(e) => e.fmt(f),
+- LexError::Fallback(e) => e.fmt(f),
++ #[cfg(lexerror_display)]
++ LexError::Compiler(e) => Display::fmt(e, f),
++ #[cfg(not(lexerror_display))]
++ LexError::Compiler(_e) => Display::fmt(&fallback::LexError, f),
++ LexError::Fallback(e) => Display::fmt(e, f),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum TokenTreeIter {
++pub(crate) enum TokenTreeIter {
+ Compiler(proc_macro::token_stream::IntoIter),
+ Fallback(fallback::TokenTreeIter),
+ }
+@@ -361,7 +328,7 @@ impl Iterator for TokenTreeIter {
+ }
+ }
+
+-impl fmt::Debug for TokenTreeIter {
++impl Debug for TokenTreeIter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("TokenTreeIter").finish()
+ }
+@@ -369,7 +336,7 @@ impl fmt::Debug for TokenTreeIter {
+
+ #[derive(Clone, PartialEq, Eq)]
+ #[cfg(super_unstable)]
+-pub enum SourceFile {
++pub(crate) enum SourceFile {
+ Compiler(proc_macro::SourceFile),
+ Fallback(fallback::SourceFile),
+ }
+@@ -397,58 +364,77 @@ impl SourceFile {
+ }
+
+ #[cfg(super_unstable)]
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- SourceFile::Compiler(a) => a.fmt(f),
+- SourceFile::Fallback(a) => a.fmt(f),
++ SourceFile::Compiler(a) => Debug::fmt(a, f),
++ SourceFile::Fallback(a) => Debug::fmt(a, f),
+ }
+ }
+ }
+
+ #[cfg(any(super_unstable, feature = "span-locations"))]
+-pub struct LineColumn {
++pub(crate) struct LineColumn {
+ pub line: usize,
+ pub column: usize,
+ }
+
+ #[derive(Copy, Clone)]
+-pub enum Span {
++pub(crate) enum Span {
+ Compiler(proc_macro::Span),
+ Fallback(fallback::Span),
+ }
+
+ impl Span {
+ pub fn call_site() -> Span {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::call_site())
+ } else {
+ Span::Fallback(fallback::Span::call_site())
+ }
+ }
+
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ if inside_proc_macro() {
++ Span::Compiler(proc_macro::Span::mixed_site())
++ } else {
++ Span::Fallback(fallback::Span::mixed_site())
++ }
++ }
++
+ #[cfg(super_unstable)]
+ pub fn def_site() -> Span {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::def_site())
+ } else {
+ Span::Fallback(fallback::Span::def_site())
+ }
+ }
+
+- #[cfg(super_unstable)]
+ pub fn resolved_at(&self, other: Span) -> Span {
+ match (self, other) {
++ #[cfg(hygiene)]
+ (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
++
++ // Name resolution affects semantics, but location is only cosmetic
++ #[cfg(not(hygiene))]
++ (Span::Compiler(_), Span::Compiler(_)) => other,
++
+ (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
+ _ => mismatch(),
+ }
+ }
+
+- #[cfg(super_unstable)]
+ pub fn located_at(&self, other: Span) -> Span {
+ match (self, other) {
++ #[cfg(hygiene)]
+ (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
++
++ // Name resolution affects semantics, but location is only cosmetic
++ #[cfg(not(hygiene))]
++ (Span::Compiler(_), Span::Compiler(_)) => *self,
++
+ (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
+ _ => mismatch(),
+ }
+@@ -542,16 +528,16 @@ impl From<fallback::Span> for Span {
+ }
+ }
+
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Span::Compiler(s) => s.fmt(f),
+- Span::Fallback(s) => s.fmt(f),
++ Span::Compiler(s) => Debug::fmt(s, f),
++ Span::Fallback(s) => Debug::fmt(s, f),
+ }
+ }
+ }
+
+-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
++pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+ match span {
+ Span::Compiler(s) => {
+ debug.field("span", &s);
+@@ -561,7 +547,7 @@ pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span)
+ }
+
+ #[derive(Clone)]
+-pub enum Group {
++pub(crate) enum Group {
+ Compiler(proc_macro::Group),
+ Fallback(fallback::Group),
+ }
+@@ -652,26 +638,26 @@ impl From<fallback::Group> for Group {
+ }
+ }
+
+-impl fmt::Display for Group {
++impl Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Group::Compiler(group) => group.fmt(formatter),
+- Group::Fallback(group) => group.fmt(formatter),
++ Group::Compiler(group) => Display::fmt(group, formatter),
++ Group::Fallback(group) => Display::fmt(group, formatter),
+ }
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Group::Compiler(group) => group.fmt(formatter),
+- Group::Fallback(group) => group.fmt(formatter),
++ Group::Compiler(group) => Debug::fmt(group, formatter),
++ Group::Fallback(group) => Debug::fmt(group, formatter),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum Ident {
++pub(crate) enum Ident {
+ Compiler(proc_macro::Ident),
+ Fallback(fallback::Ident),
+ }
+@@ -747,26 +733,26 @@ where
+ }
+ }
+
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Ident::Compiler(t) => t.fmt(f),
+- Ident::Fallback(t) => t.fmt(f),
++ Ident::Compiler(t) => Display::fmt(t, f),
++ Ident::Fallback(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Ident::Compiler(t) => t.fmt(f),
+- Ident::Fallback(t) => t.fmt(f),
++ Ident::Compiler(t) => Debug::fmt(t, f),
++ Ident::Fallback(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum Literal {
++pub(crate) enum Literal {
+ Compiler(proc_macro::Literal),
+ Fallback(fallback::Literal),
+ }
+@@ -774,7 +760,7 @@ pub enum Literal {
+ macro_rules! suffixed_numbers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::$name(n))
+ } else {
+ Literal::Fallback(fallback::Literal::$name(n))
+@@ -786,7 +772,7 @@ macro_rules! suffixed_numbers {
+ macro_rules! unsuffixed_integers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::$name(n))
+ } else {
+ Literal::Fallback(fallback::Literal::$name(n))
+@@ -830,7 +816,7 @@ impl Literal {
+ }
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
+ } else {
+ Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
+@@ -838,7 +824,7 @@ impl Literal {
+ }
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
+ } else {
+ Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
+@@ -846,7 +832,7 @@ impl Literal {
+ }
+
+ pub fn string(t: &str) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::string(t))
+ } else {
+ Literal::Fallback(fallback::Literal::string(t))
+@@ -854,7 +840,7 @@ impl Literal {
+ }
+
+ pub fn character(t: char) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::character(t))
+ } else {
+ Literal::Fallback(fallback::Literal::character(t))
+@@ -862,7 +848,7 @@ impl Literal {
+ }
+
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::byte_string(bytes))
+ } else {
+ Literal::Fallback(fallback::Literal::byte_string(bytes))
+@@ -908,20 +894,20 @@ impl From<fallback::Literal> for Literal {
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Literal::Compiler(t) => t.fmt(f),
+- Literal::Fallback(t) => t.fmt(f),
++ Literal::Compiler(t) => Display::fmt(t, f),
++ Literal::Fallback(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Literal::Compiler(t) => t.fmt(f),
+- Literal::Fallback(t) => t.fmt(f),
++ Literal::Compiler(t) => Debug::fmt(t, f),
++ Literal::Fallback(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+diff --git a/third_party/rust/proc-macro2/tests/comments.rs b/third_party/rust/proc-macro2/tests/comments.rs
+new file mode 100644
+index 0000000000..708cccb880
+--- /dev/null
++++ third_party/rust/proc-macro2/tests/comments.rs
+@@ -0,0 +1,103 @@
++use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree};
++
++// #[doc = "..."] -> "..."
++fn lit_of_outer_doc_comment(tokens: TokenStream) -> Literal {
++ lit_of_doc_comment(tokens, false)
++}
++
++// #![doc = "..."] -> "..."
++fn lit_of_inner_doc_comment(tokens: TokenStream) -> Literal {
++ lit_of_doc_comment(tokens, true)
++}
++
++fn lit_of_doc_comment(tokens: TokenStream, inner: bool) -> Literal {
++ let mut iter = tokens.clone().into_iter();
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '#');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ if inner {
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '!');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ }
++ iter = match iter.next().unwrap() {
++ TokenTree::Group(group) => {
++ assert_eq!(group.delimiter(), Delimiter::Bracket);
++ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
++ group.stream().into_iter()
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ };
++ match iter.next().unwrap() {
++ TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"),
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '=');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Literal(literal) => {
++ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
++ literal
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++}
++
++#[test]
++fn closed_immediately() {
++ let stream = "/**/".parse::<TokenStream>().unwrap();
++ let tokens = stream.into_iter().collect::<Vec<_>>();
++ assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
++}
++
++#[test]
++fn incomplete() {
++ assert!("/*/".parse::<TokenStream>().is_err());
++}
++
++#[test]
++fn lit() {
++ let stream = "/// doc".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc\"");
++
++ let stream = "//! doc".parse::<TokenStream>().unwrap();
++ let lit = lit_of_inner_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc\"");
++
++ let stream = "/** doc */".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc \"");
++
++ let stream = "/*! doc */".parse::<TokenStream>().unwrap();
++ let lit = lit_of_inner_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc \"");
++}
++
++#[test]
++fn carriage_return() {
++ let stream = "///\r\n".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\"\"");
++
++ let stream = "/**\r\n*/".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\"\\r\\n\"");
++
++ "///\r".parse::<TokenStream>().unwrap_err();
++ "///\r \n".parse::<TokenStream>().unwrap_err();
++ "/**\r \n*/".parse::<TokenStream>().unwrap_err();
++}
+diff --git a/third_party/rust/proc-macro2/tests/marker.rs b/third_party/rust/proc-macro2/tests/marker.rs
+index 7af2539c1a..70e57677cd 100644
+--- third_party/rust/proc-macro2/tests/marker.rs
++++ third_party/rust/proc-macro2/tests/marker.rs
+@@ -57,3 +57,36 @@ mod semver_exempt {
+
+ assert_impl!(SourceFile is not Send or Sync);
+ }
++
++#[cfg(not(no_libprocmacro_unwind_safe))]
++mod unwind_safe {
++ use super::*;
++ use std::panic::{RefUnwindSafe, UnwindSafe};
++
++ macro_rules! assert_unwind_safe {
++ ($($types:ident)*) => {
++ $(
++ assert_impl!($types is UnwindSafe and RefUnwindSafe);
++ )*
++ };
++ }
++
++ assert_unwind_safe! {
++ Delimiter
++ Group
++ Ident
++ LexError
++ Literal
++ Punct
++ Spacing
++ Span
++ TokenStream
++ TokenTree
++ }
++
++ #[cfg(procmacro2_semver_exempt)]
++ assert_unwind_safe! {
++ LineColumn
++ SourceFile
++ }
++}
+diff --git a/third_party/rust/proc-macro2/tests/test.rs b/third_party/rust/proc-macro2/tests/test.rs
+index 7528388138..1e9f633944 100644
+--- third_party/rust/proc-macro2/tests/test.rs
++++ third_party/rust/proc-macro2/tests/test.rs
+@@ -1,7 +1,6 @@
++use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+ use std::str::{self, FromStr};
+
+-use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
+-
+ #[test]
+ fn idents() {
+ assert_eq!(
+@@ -84,6 +83,11 @@ fn literal_string() {
+ assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
+ }
+
++#[test]
++fn literal_raw_string() {
++ "r\"\r\n\"".parse::<TokenStream>().unwrap();
++}
++
+ #[test]
+ fn literal_character() {
+ assert_eq!(Literal::character('x').to_string(), "'x'");
+@@ -110,6 +114,37 @@ fn literal_suffix() {
+ assert_eq!(token_count("1._0"), 3);
+ assert_eq!(token_count("1._m"), 3);
+ assert_eq!(token_count("\"\"s"), 1);
++ assert_eq!(token_count("r\"\"r"), 1);
++ assert_eq!(token_count("b\"\"b"), 1);
++ assert_eq!(token_count("br\"\"br"), 1);
++ assert_eq!(token_count("r#\"\"#r"), 1);
++ assert_eq!(token_count("'c'c"), 1);
++ assert_eq!(token_count("b'b'b"), 1);
++ assert_eq!(token_count("0E"), 1);
++ assert_eq!(token_count("0o0A"), 1);
++ assert_eq!(token_count("0E--0"), 4);
++ assert_eq!(token_count("0.0ECMA"), 1);
++}
++
++#[test]
++fn literal_iter_negative() {
++ let negative_literal = Literal::i32_suffixed(-3);
++ let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
++ let mut iter = tokens.into_iter();
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '-');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ unexpected => panic!("unexpected token {:?}", unexpected),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Literal(literal) => {
++ assert_eq!(literal.to_string(), "3i32");
++ }
++ unexpected => panic!("unexpected token {:?}", unexpected),
++ }
++ assert!(iter.next().is_none());
+ }
+
+ #[test]
+@@ -161,41 +196,21 @@ fn fail() {
+ fail("' static");
+ fail("r#1");
+ fail("r#_");
++ fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits)
++ fail("\"\\u{999999}\""); // outside of valid range of char
++ fail("\"\\u{_0}\""); // leading underscore
++ fail("\"\\u{}\""); // empty
++ fail("b\"\r\""); // bare carriage return in byte string
++ fail("r\"\r\""); // bare carriage return in raw string
++ fail("\"\\\r \""); // backslash carriage return
++ fail("'aa'aa");
++ fail("br##\"\"#");
++ fail("\"\\\n\u{85}\r\"");
+ }
+
+ #[cfg(span_locations)]
+ #[test]
+ fn span_test() {
+- use proc_macro2::TokenTree;
+-
+- fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
+- let ts = p.parse::<TokenStream>().unwrap();
+- check_spans_internal(ts, &mut lines);
+- }
+-
+- fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
+- for i in ts {
+- if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
+- *lines = rest;
+-
+- let start = i.span().start();
+- assert_eq!(start.line, sline, "sline did not match for {}", i);
+- assert_eq!(start.column, scol, "scol did not match for {}", i);
+-
+- let end = i.span().end();
+- assert_eq!(end.line, eline, "eline did not match for {}", i);
+- assert_eq!(end.column, ecol, "ecol did not match for {}", i);
+-
+- match i {
+- TokenTree::Group(ref g) => {
+- check_spans_internal(g.stream().clone(), lines);
+- }
+- _ => {}
+- }
+- }
+- }
+- }
+-
+ check_spans(
+ "\
+ /// This is a document comment
+@@ -274,53 +289,11 @@ fn span_join() {
+ #[test]
+ fn no_panic() {
+ let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
+- assert!(s.parse::<proc_macro2::TokenStream>().is_err());
++ assert!(s.parse::<TokenStream>().is_err());
+ }
+
+ #[test]
+-fn tricky_doc_comment() {
+- let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
+-
+- let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
+- match tokens[0] {
+- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
+- _ => panic!("wrong token {:?}", tokens[0]),
+- }
+- let mut tokens = match tokens[1] {
+- proc_macro2::TokenTree::Group(ref tt) => {
+- assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
+- tt.stream().into_iter()
+- }
+- _ => panic!("wrong token {:?}", tokens[0]),
+- };
+-
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
+- t => panic!("wrong token {:?}", t),
+- }
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
+- t => panic!("wrong token {:?}", t),
+- }
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Literal(ref tt) => {
+- assert_eq!(tt.to_string(), "\" doc\"");
+- }
+- t => panic!("wrong token {:?}", t),
+- }
+- assert!(tokens.next().is_none());
+-
+- let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
+-}
+-
+-#[test]
+-fn op_before_comment() {
++fn punct_before_comment() {
+ let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
+ match tts.next().unwrap() {
+ TokenTree::Punct(tt) => {
+@@ -331,6 +304,22 @@ fn op_before_comment() {
+ }
+ }
+
++#[test]
++fn joint_last_token() {
++ // This test verifies that we match the behavior of libproc_macro *not* in
++ // the range nightly-2020-09-06 through nightly-2020-09-10, in which this
++ // behavior was temporarily broken.
++ // See https://github.com/rust-lang/rust/issues/76399
++
++ let joint_punct = Punct::new(':', Spacing::Joint);
++ let stream = TokenStream::from(TokenTree::Punct(joint_punct));
++ let punct = match stream.into_iter().next().unwrap() {
++ TokenTree::Punct(punct) => punct,
++ _ => unreachable!(),
++ };
++ assert_eq!(punct.spacing(), Spacing::Joint);
++}
++
+ #[test]
+ fn raw_identifier() {
+ let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
+@@ -345,11 +334,11 @@ fn raw_identifier() {
+ fn test_debug_ident() {
+ let ident = Ident::new("proc_macro", Span::call_site());
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected = "Ident(proc_macro)";
+
+- #[cfg(procmacro2_semver_exempt)]
+- let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
++ #[cfg(span_locations)]
++ let expected = "Ident { sym: proc_macro }";
+
+ assert_eq!(expected, format!("{:?}", ident));
+ }
+@@ -358,7 +347,7 @@ fn test_debug_ident() {
+ fn test_debug_tokenstream() {
+ let tts = TokenStream::from_str("[a + 1]").unwrap();
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected = "\
+ TokenStream [
+ Group {
+@@ -368,7 +357,7 @@ TokenStream [
+ sym: a,
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone,
+ },
+ Literal {
+@@ -379,7 +368,7 @@ TokenStream [
+ ]\
+ ";
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected_before_trailing_commas = "\
+ TokenStream [
+ Group {
+@@ -389,7 +378,7 @@ TokenStream [
+ sym: a
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone
+ },
+ Literal {
+@@ -400,7 +389,7 @@ TokenStream [
+ ]\
+ ";
+
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ let expected = "\
+ TokenStream [
+ Group {
+@@ -411,7 +400,7 @@ TokenStream [
+ span: bytes(2..3),
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone,
+ span: bytes(4..5),
+ },
+@@ -425,7 +414,7 @@ TokenStream [
+ ]\
+ ";
+
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ let expected_before_trailing_commas = "\
+ TokenStream [
+ Group {
+@@ -436,7 +425,7 @@ TokenStream [
+ span: bytes(2..3)
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone,
+ span: bytes(4..5)
+ },
+@@ -464,3 +453,80 @@ fn default_tokenstream_is_empty() {
+
+ assert!(default_token_stream.is_empty());
+ }
++
++#[test]
++fn tuple_indexing() {
++ // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
++ let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
++ assert_eq!("tuple", tokens.next().unwrap().to_string());
++ assert_eq!(".", tokens.next().unwrap().to_string());
++ assert_eq!("0.0", tokens.next().unwrap().to_string());
++ assert!(tokens.next().is_none());
++}
++
++#[cfg(span_locations)]
++#[test]
++fn non_ascii_tokens() {
++ check_spans("// abc", &[]);
++ check_spans("// ábc", &[]);
++ check_spans("// abc x", &[]);
++ check_spans("// ábc x", &[]);
++ check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
++ check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
++ check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
++ check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
++ check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
++ check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
++ check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
++ check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
++ check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]);
++ check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]);
++ check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
++ check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
++ check_spans("'a'", &[(1, 0, 1, 3)]);
++ check_spans("'á'", &[(1, 0, 1, 3)]);
++ check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
++ check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
++ check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
++ check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
++ check_spans("abc", &[(1, 0, 1, 3)]);
++ check_spans("ábc", &[(1, 0, 1, 3)]);
++ check_spans("ábć", &[(1, 0, 1, 3)]);
++ check_spans("abc// foo", &[(1, 0, 1, 3)]);
++ check_spans("ábc// foo", &[(1, 0, 1, 3)]);
++ check_spans("ábć// foo", &[(1, 0, 1, 3)]);
++ check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
++ check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]);
++}
++
++#[cfg(span_locations)]
++fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
++ let ts = p.parse::<TokenStream>().unwrap();
++ check_spans_internal(ts, &mut lines);
++ assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
++}
++
++#[cfg(span_locations)]
++fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
++ for i in ts {
++ if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
++ *lines = rest;
++
++ let start = i.span().start();
++ assert_eq!(start.line, sline, "sline did not match for {}", i);
++ assert_eq!(start.column, scol, "scol did not match for {}", i);
++
++ let end = i.span().end();
++ assert_eq!(end.line, eline, "eline did not match for {}", i);
++ assert_eq!(end.column, ecol, "ecol did not match for {}", i);
++
++ if let TokenTree::Group(g) = i {
++ check_spans_internal(g.stream().clone(), lines);
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/proc-macro2/tests/test_fmt.rs b/third_party/rust/proc-macro2/tests/test_fmt.rs
+new file mode 100644
+index 0000000000..99a0aee5c8
+--- /dev/null
++++ third_party/rust/proc-macro2/tests/test_fmt.rs
+@@ -0,0 +1,26 @@
++use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
++use std::iter::{self, FromIterator};
++
++#[test]
++fn test_fmt_group() {
++ let ident = Ident::new("x", Span::call_site());
++ let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident)));
++ let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new());
++ let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone());
++ let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new());
++ let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone());
++ let braces_empty = Group::new(Delimiter::Brace, TokenStream::new());
++ let braces_nonempty = Group::new(Delimiter::Brace, inner.clone());
++ let none_empty = Group::new(Delimiter::None, TokenStream::new());
++ let none_nonempty = Group::new(Delimiter::None, inner.clone());
++
++ // Matches libproc_macro.
++ assert_eq!("()", parens_empty.to_string());
++ assert_eq!("(x)", parens_nonempty.to_string());
++ assert_eq!("[]", brackets_empty.to_string());
++ assert_eq!("[x]", brackets_nonempty.to_string());
++ assert_eq!("{ }", braces_empty.to_string());
++ assert_eq!("{ x }", braces_nonempty.to_string());
++ assert_eq!("", none_empty.to_string());
++ assert_eq!("x", none_nonempty.to_string());
++}
+diff --git a/third_party/rust/spirv-cross-internal/.cargo-checksum.json b/third_party/rust/spirv-cross-internal/.cargo-checksum.json
+index 3c732d6d0e..014aa640e1 100644
+--- third_party/rust/spirv-cross-internal/.cargo-checksum.json
++++ third_party/rust/spirv-cross-internal/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"32644850d6dd8a8496fcf2dd0327dfc18a41f87bbd5f0922e174a905babde20d","build.rs":"68d1e7d99b4d890e21606aa7e7904d4f9b8caed6c0951bb8f5028745a7103d1e","src/bindings_native.rs":"6aa4e3a3bd73a638b3282405c0eaf2565ff8f364919c509f94f79e326679b03e","src/bindings_wasm.rs":"331d19691566beb163fc459d630e9830167cc86ec8cacde6be53fb0a7ca2b1be","src/bindings_wasm_functions.rs":"2515dd91e98e769fe282e7dc4f60820f4be7365c784a9f3bd17310d18a136da8","src/compiler.rs":"a53c7e0136ea3deddbfaf158295daca36623a0e244426b7c04d2856922d91c73","src/emscripten.rs":"3169890001970610013026468739910afca0d85e00d7e34beadfdd31bbcbeeb7","src/glsl.rs":"7ca1f6a11db8e7926a813fb91298dac1c0e2f90fe5426cc985cde7d8f36c05c9","src/hlsl.rs":"3734ad89a4f51647ebeb761753cb324a48ebca724970201e7598a3763d094d73","src/lib.rs":"cc41cbbe48f3e96791ba5338c66fa1fe0e533eaed6bbdced3f008d5e9fe6c6ce","src/msl.rs":"5505d1d626d8e7903ffa9d137e7392a1670fa7b574eacc4bbd33673937b36ea7","src/ptr_util.rs":"280404beede469b2c9ae40536323515a9213dac5d30014fac870a23b37672442","src/spirv.rs":"8a2cbe6b554bb6312f2aede0469338ace21e79720b5128c653e7a68c0e161277","src/vendor/SPIRV-Cross/.clang-format":"9ec4314e20afecad827a2dbd4832256be8464e88aab4a53fab45173ed129b2ed","src/vendor/SPIRV-Cross/.gitignore":"7f23cc92ddb5e1f584447e98d3e8ab6543fc182f1543f0f6ec29856f9250cdd6","src/vendor/SPIRV-Cross/.travis.yml":"abcc1b2f622b65feafd3e37a2b5e867fce3cf7211cae9fb2bf92a6de79100be4","src/vendor/SPIRV-Cross/CMakeLists.txt":"0972ab271e1eecdfe41ae0a85bf14ce7fe142b7609f8d2965b2c94f92c63b6aa","src/vendor/SPIRV-Cross/CODE_OF_CONDUCT.md":"a25e58cd66a9543e1500db9a5c3c027e874893ad1a264530bf26eb10918b5d80","src/vendor/SPIRV-Cross/GLSL.std.450.h":"20f32378793c5f416bc0704f44345c2a14c99cba3f411e3beaf1bcea372d58ba","src/vendor/SPIRV-Cross/LICENSE":"cfc7749b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30","src/vendor/SPIRV-Cross/Makefile":"e2255d32e47d447b100ce3cd0753d0758dc26460e0440d14cc157d4caf9b62f1","src/vendor/SPIRV-Cross/README.md":"b944fc5b1de709089fc3cae0dba09186ce2e43b64de6dcae1423ba1c139ee395","src/vendor/SPIRV-Cross/appveyor.yml":"0f18b8ae5fadc027a20f69307fc32e56a97edfbdcb55ac392852308f88545a04","src/vendor/SPIRV-Cross/include/spirv_cross/barrier.hpp":"bb796625e89f75e239e92f9a61597d421ffe5fb1902d200691ebe95cf856a1f8","src/vendor/SPIRV-Cross/include/spirv_cross/external_interface.h":"cdceda962d87133e44989510edc944e99052d713869b406a8b6b2d54e3d02dd7","src/vendor/SPIRV-Cross/include/spirv_cross/image.hpp":"681d0964b144c5009424196a8bc832cb81cfe5df5b91c2f3e1bfb625765a0c50","src/vendor/SPIRV-Cross/include/spirv_cross/internal_interface.hpp":"ab8851e5708b944a9bf340ce17297d94bef4876d30c833ea83d44b16f60726f6","src/vendor/SPIRV-Cross/include/spirv_cross/sampler.hpp":"b0b1bd4796b4315e826985c224ea5fa7d5bc823fcd6091f7527a5e18d9ae42fb","src/vendor/SPIRV-Cross/include/spirv_cross/thread_group.hpp":"70d9e0400f62de71d3775972eadc196ddb218254fa8155e8e33daf8d99957cc0","src/vendor/SPIRV-Cross/main.cpp":"ab2835f6dff9299f562a89755893f98a4bd946d4460abca885ba2172ebce851c","src/vendor/SPIRV-Cross/pkg-config/spirv-cross-c-shared.pc.in":"75a27e069ee07362bc6b7412b2c4e1270778dd639a8b96b8afbb113d21de79e7","src/vendor/SPIRV-Cross/spirv.h":"2c19cd1d262a3757e92276fbaa8e1a706b2328a92f47bda88f849b5cebebb630","src/vendor/SPIRV-Cross/spirv.hpp":"eb7c39a017f0a33e9d9bae7556c678c2840af4cc0beb98b2aeff9d651e370128","src/vendor/SPIRV-Cross/spirv_cfg.cpp":"09b756068d95a87a65a186831484fe21a973d3eb19675225b23864b9d37fe3d0","src/vendor/SPIRV-Cross/spirv_cfg.hpp":"ad98c4e82d6da3ed450dfc7939814115ae6b29ba2a25b0148cfc8c88774055b5","src/vendor/SPIRV-Cross/spirv_common.hpp":"5d0658a6fe9ac2f628754d2439b4d26ec8df7e50e4b196567cd4c7e7b8ce7574","src/vendor/SPIRV-Cross/spirv_cpp.cpp":"d067269ec6fc56860b4ed39edddf0a127f5e2d091512a85a79f02d5f3beccd19","src/vendor/SPIRV-Cross/spirv_cpp.hpp":"751ce82c70a399781f18aca54ea7fbb98083b98706f2a6be1cd1aa90627ef597","src/vendor/SPIRV-Cross/spirv_cross.cpp":"225b4cf4c7f8868019680d0a75a3da7983db428783ee1a3dcbb8c2765a71e79a","src/vendor/SPIRV-Cross/spirv_cross.hpp":"7eab2d7ae4b25a3c4fe85631e664727e036ae23a776aeb8c17db23f08020258a","src/vendor/SPIRV-Cross/spirv_cross_c.cpp":"80f438f4700f0b0b02547771f82be0610ba5e24ba3edb56d4331d59a206a4c2e","src/vendor/SPIRV-Cross/spirv_cross_c.h":"8b0ad63ed9fe52bc62f305afa27b5e33d9bd689a8b3f9935353742faf88a1e91","src/vendor/SPIRV-Cross/spirv_cross_containers.hpp":"409f60a5cb1dba1d0c96ef61761d8643402fc5ab67cecce12f6af3d135cfa338","src/vendor/SPIRV-Cross/spirv_cross_error_handling.hpp":"45868796346eef895cc05825582e60d26c34ec35d1a2b97f5e172f05fc565b19","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.cpp":"7c7e08db4dfc1084fd9cd266a6f829d35a6b2856198b16307a44f4d1c0743106","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.hpp":"d15446565eaa430bf2b14b596b3e579a72095a386b1eb256afc977cdf42ec5b9","src/vendor/SPIRV-Cross/spirv_cross_util.cpp":"f2426dccad2fff44053f6b1b9558719471a1ba0445c317f2c0311bc0d8b4636d","src/vendor/SPIRV-Cross/spirv_cross_util.hpp":"63ca5b2d6c80b5ad39473d6841676e48d5db3d2cbcb252fe6a6db0ef2077f0f5","src/vendor/SPIRV-Cross/spirv_glsl.cpp":"c1704189895442d593819472d3f852979107ac1798375d5a44d8f97be17fa3b5","src/vendor/SPIRV-Cross/spirv_glsl.hpp":"fb5e00ff589e3c5a25c5a15e78cf4809b753ebe04c84a008dc977330b8cbbc27","src/vendor/SPIRV-Cross/spirv_hlsl.cpp":"549ff42d0773a3f9e555d1aebd4c0084709e11528082f47c5f2b8b07de029f7c","src/vendor/SPIRV-Cross/spirv_hlsl.hpp":"18f303e7a384acf0e14677b579b100aee838813f54735330f0d1387f2493b363","src/vendor/SPIRV-Cross/spirv_msl.cpp":"7266afeb827e50782cb35362788d27fa9bd641992380008604ea1a4e48ec9c37","src/vendor/SPIRV-Cross/spirv_msl.hpp":"76b609e0b8d2f421c51e98d6357fcf44666b0d510935e083e4e02f20a80051e1","src/vendor/SPIRV-Cross/spirv_parser.cpp":"4def4db8c77967dcd2d6d93330343dc6ebb309e8cd696f52ee568a7c3ee97036","src/vendor/SPIRV-Cross/spirv_parser.hpp":"fc5f92184d1b4986f78088bee8ed7ddb9b342a34f9a3e55a4a594187292b66b8","src/vendor/SPIRV-Cross/spirv_reflect.cpp":"995a4e4315f67d4a2084e2b5d982de9baf72c7e265dde71d03ae0b8a29a24b92","src/vendor/SPIRV-Cross/spirv_reflect.hpp":"a874b40be0b210c69718e87a90e9add24653957656a995d83656f0918cfd10e4","src/wrapper.cpp":"7800cec5564dc1c7c13059cb1b1cac8d9be9248bbc887d5176600980c0d18142","src/wrapper.hpp":"b9a4f846bf57ee7a10fbb81f8f49df96ad11b1e5749c3c510925bb9b029ac08a","tests/common/mod.rs":"2843bf104c7938d93065f7b5688c9f063ad9e5720c407c737aedc5f2dee5a80f","tests/glsl_tests.rs":"ddd85107370dbfcde9d004d6717e229a7278d29ecba375828d8cb79e86625f66","tests/hlsl_tests.rs":"2058cb590c5ab85a636c93c18d0f2a79eb5ea5e647b7f10a2c83fbf394964dd6","tests/msl_tests.rs":"211d3b9cb43455a4c55bd619c05acdd21953358580c50ae75cac3f06eb26c5dd","tests/shaders/array.vert":"d0dab7ddea131e069961054f40a164602448aa78720b7ff480e141d1f7b0b2d6","tests/shaders/array.vert.spv":"8e44421590ade1716be66ad39f60fb1ce58eedeab8f0806335a7369687b308b1","tests/shaders/multiple_entry_points.cl":"2abbe57f2387f07f7f5f4cd375e47900be9c13bdc79aa0ed98a43a798cb0df81","tests/shaders/multiple_entry_points.cl.spv":"bdd34ce6765dbeab637631c3cbdf251532870d9fd6cd6c54883c0c872058ab3b","tests/shaders/rasterize_disabled.vert":"da6de172549830216933c44edf18b13113d7ca87462e3d09ad50dfc9c9836821","tests/shaders/rasterize_disabled.vert.spv":"2ba809eb500ed6e5a067389ccc056551e796e7019517593d110fb62c9dca2056","tests/shaders/sampler.frag":"4c568e65176afe596dd8ef279485e992607e94d612786214ae1c6702d0322e1f","tests/shaders/sampler.frag.spv":"bd7bd1973a82dcfdf5755361fa4dd420fdf1c32c5de0a6f0896a8d5971f98684","tests/shaders/simple.vert":"ea143c97dff5ef03728b96b2dd893bdc59d56651581ecf9fe50f10807b0efdd0","tests/shaders/simple.vert.spv":"a2b5094ffd76288e0d08c37ce0351e28f20bb6d80ddd73fc44a71c1c7cbbf7db","tests/shaders/specialization.comp":"ce32fa1615737209f2e465ea347d79030ddcb33a88c38447e7cde7dffc920163","tests/shaders/specialization.comp.spv":"848604e37b870b8999692b266677be2ce0df6ce38093a0d81e6bc43d0bdf8a3f","tests/shaders/struct.frag":"d8840bb1961d6f14609b00ee54406c1e3ea31cecd8231b77cfb73d28b71910c0","tests/shaders/struct.frag.spv":"774aa886374eb95abf9bb7d0045ee77d97e26529e9ec96b90991a515fdbca4be","tests/shaders/struct.vert":"9299cda83ddb5b5c3d95ab0d057e4df2af137dfd92d6c4d3e96295b7d42e29a1","tests/shaders/struct.vert.spv":"4a82bdee72616ac058bc60d4255efa3e78199a2b8597570c013bebbee7107fb7","tests/shaders/two_ubo.vert":"be109b2c65e5e9e1bb0dab968d7f651232f6b1c46935a3928f980bf7a40f2d62","tests/shaders/two_ubo.vert.spv":"efd14e2d0a782d61dd944711f30b9e7fcb14af17593c1fe4e11cf2b7e232bcc2","tests/shaders/workgroup.comp":"478044b5392e0d1fb042253d71ea6bf7b8a014af4a6ee35d8db4c5343ac69739","tests/shaders/workgroup.comp.spv":"72f636fe3d1d6d0c5963f71bf4349c7e40d544331b33b6b64fb5b65784e6abee","tests/spirv_tests.rs":"6de5c893f631d550fc86853537bddb824ffb534a0a7f130406be3b9ed166f198"},"package":null}
+\ No newline at end of file
++{"files":{"Cargo.toml":"32644850d6dd8a8496fcf2dd0327dfc18a41f87bbd5f0922e174a905babde20d","build.rs":"68d1e7d99b4d890e21606aa7e7904d4f9b8caed6c0951bb8f5028745a7103d1e","src/bindings_native.rs":"6aa4e3a3bd73a638b3282405c0eaf2565ff8f364919c509f94f79e326679b03e","src/bindings_wasm.rs":"331d19691566beb163fc459d630e9830167cc86ec8cacde6be53fb0a7ca2b1be","src/bindings_wasm_functions.rs":"2515dd91e98e769fe282e7dc4f60820f4be7365c784a9f3bd17310d18a136da8","src/compiler.rs":"a53c7e0136ea3deddbfaf158295daca36623a0e244426b7c04d2856922d91c73","src/emscripten.rs":"3169890001970610013026468739910afca0d85e00d7e34beadfdd31bbcbeeb7","src/glsl.rs":"7ca1f6a11db8e7926a813fb91298dac1c0e2f90fe5426cc985cde7d8f36c05c9","src/hlsl.rs":"3734ad89a4f51647ebeb761753cb324a48ebca724970201e7598a3763d094d73","src/lib.rs":"cc41cbbe48f3e96791ba5338c66fa1fe0e533eaed6bbdced3f008d5e9fe6c6ce","src/msl.rs":"5505d1d626d8e7903ffa9d137e7392a1670fa7b574eacc4bbd33673937b36ea7","src/ptr_util.rs":"280404beede469b2c9ae40536323515a9213dac5d30014fac870a23b37672442","src/spirv.rs":"8a2cbe6b554bb6312f2aede0469338ace21e79720b5128c653e7a68c0e161277","src/vendor/SPIRV-Cross/.clang-format":"9ec4314e20afecad827a2dbd4832256be8464e88aab4a53fab45173ed129b2ed","src/vendor/SPIRV-Cross/.travis.yml":"abcc1b2f622b65feafd3e37a2b5e867fce3cf7211cae9fb2bf92a6de79100be4","src/vendor/SPIRV-Cross/CMakeLists.txt":"0972ab271e1eecdfe41ae0a85bf14ce7fe142b7609f8d2965b2c94f92c63b6aa","src/vendor/SPIRV-Cross/CODE_OF_CONDUCT.md":"a25e58cd66a9543e1500db9a5c3c027e874893ad1a264530bf26eb10918b5d80","src/vendor/SPIRV-Cross/GLSL.std.450.h":"20f32378793c5f416bc0704f44345c2a14c99cba3f411e3beaf1bcea372d58ba","src/vendor/SPIRV-Cross/LICENSE":"cfc7749b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30","src/vendor/SPIRV-Cross/Makefile":"e2255d32e47d447b100ce3cd0753d0758dc26460e0440d14cc157d4caf9b62f1","src/vendor/SPIRV-Cross/README.md":"b944fc5b1de709089fc3cae0dba09186ce2e43b64de6dcae1423ba1c139ee395","src/vendor/SPIRV-Cross/appveyor.yml":"0f18b8ae5fadc027a20f69307fc32e56a97edfbdcb55ac392852308f88545a04","src/vendor/SPIRV-Cross/include/spirv_cross/barrier.hpp":"bb796625e89f75e239e92f9a61597d421ffe5fb1902d200691ebe95cf856a1f8","src/vendor/SPIRV-Cross/include/spirv_cross/external_interface.h":"cdceda962d87133e44989510edc944e99052d713869b406a8b6b2d54e3d02dd7","src/vendor/SPIRV-Cross/include/spirv_cross/image.hpp":"681d0964b144c5009424196a8bc832cb81cfe5df5b91c2f3e1bfb625765a0c50","src/vendor/SPIRV-Cross/include/spirv_cross/internal_interface.hpp":"ab8851e5708b944a9bf340ce17297d94bef4876d30c833ea83d44b16f60726f6","src/vendor/SPIRV-Cross/include/spirv_cross/sampler.hpp":"b0b1bd4796b4315e826985c224ea5fa7d5bc823fcd6091f7527a5e18d9ae42fb","src/vendor/SPIRV-Cross/include/spirv_cross/thread_group.hpp":"70d9e0400f62de71d3775972eadc196ddb218254fa8155e8e33daf8d99957cc0","src/vendor/SPIRV-Cross/main.cpp":"ab2835f6dff9299f562a89755893f98a4bd946d4460abca885ba2172ebce851c","src/vendor/SPIRV-Cross/pkg-config/spirv-cross-c-shared.pc.in":"75a27e069ee07362bc6b7412b2c4e1270778dd639a8b96b8afbb113d21de79e7","src/vendor/SPIRV-Cross/spirv.h":"2c19cd1d262a3757e92276fbaa8e1a706b2328a92f47bda88f849b5cebebb630","src/vendor/SPIRV-Cross/spirv.hpp":"eb7c39a017f0a33e9d9bae7556c678c2840af4cc0beb98b2aeff9d651e370128","src/vendor/SPIRV-Cross/spirv_cfg.cpp":"09b756068d95a87a65a186831484fe21a973d3eb19675225b23864b9d37fe3d0","src/vendor/SPIRV-Cross/spirv_cfg.hpp":"ad98c4e82d6da3ed450dfc7939814115ae6b29ba2a25b0148cfc8c88774055b5","src/vendor/SPIRV-Cross/spirv_common.hpp":"5d0658a6fe9ac2f628754d2439b4d26ec8df7e50e4b196567cd4c7e7b8ce7574","src/vendor/SPIRV-Cross/spirv_cpp.cpp":"d067269ec6fc56860b4ed39edddf0a127f5e2d091512a85a79f02d5f3beccd19","src/vendor/SPIRV-Cross/spirv_cpp.hpp":"751ce82c70a399781f18aca54ea7fbb98083b98706f2a6be1cd1aa90627ef597","src/vendor/SPIRV-Cross/spirv_cross.cpp":"225b4cf4c7f8868019680d0a75a3da7983db428783ee1a3dcbb8c2765a71e79a","src/vendor/SPIRV-Cross/spirv_cross.hpp":"7eab2d7ae4b25a3c4fe85631e664727e036ae23a776aeb8c17db23f08020258a","src/vendor/SPIRV-Cross/spirv_cross_c.cpp":"80f438f4700f0b0b02547771f82be0610ba5e24ba3edb56d4331d59a206a4c2e","src/vendor/SPIRV-Cross/spirv_cross_c.h":"8b0ad63ed9fe52bc62f305afa27b5e33d9bd689a8b3f9935353742faf88a1e91","src/vendor/SPIRV-Cross/spirv_cross_containers.hpp":"409f60a5cb1dba1d0c96ef61761d8643402fc5ab67cecce12f6af3d135cfa338","src/vendor/SPIRV-Cross/spirv_cross_error_handling.hpp":"45868796346eef895cc05825582e60d26c34ec35d1a2b97f5e172f05fc565b19","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.cpp":"7c7e08db4dfc1084fd9cd266a6f829d35a6b2856198b16307a44f4d1c0743106","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.hpp":"d15446565eaa430bf2b14b596b3e579a72095a386b1eb256afc977cdf42ec5b9","src/vendor/SPIRV-Cross/spirv_cross_util.cpp":"f2426dccad2fff44053f6b1b9558719471a1ba0445c317f2c0311bc0d8b4636d","src/vendor/SPIRV-Cross/spirv_cross_util.hpp":"63ca5b2d6c80b5ad39473d6841676e48d5db3d2cbcb252fe6a6db0ef2077f0f5","src/vendor/SPIRV-Cross/spirv_glsl.cpp":"c1704189895442d593819472d3f852979107ac1798375d5a44d8f97be17fa3b5","src/vendor/SPIRV-Cross/spirv_glsl.hpp":"fb5e00ff589e3c5a25c5a15e78cf4809b753ebe04c84a008dc977330b8cbbc27","src/vendor/SPIRV-Cross/spirv_hlsl.cpp":"549ff42d0773a3f9e555d1aebd4c0084709e11528082f47c5f2b8b07de029f7c","src/vendor/SPIRV-Cross/spirv_hlsl.hpp":"18f303e7a384acf0e14677b579b100aee838813f54735330f0d1387f2493b363","src/vendor/SPIRV-Cross/spirv_msl.cpp":"7266afeb827e50782cb35362788d27fa9bd641992380008604ea1a4e48ec9c37","src/vendor/SPIRV-Cross/spirv_msl.hpp":"76b609e0b8d2f421c51e98d6357fcf44666b0d510935e083e4e02f20a80051e1","src/vendor/SPIRV-Cross/spirv_parser.cpp":"4def4db8c77967dcd2d6d93330343dc6ebb309e8cd696f52ee568a7c3ee97036","src/vendor/SPIRV-Cross/spirv_parser.hpp":"fc5f92184d1b4986f78088bee8ed7ddb9b342a34f9a3e55a4a594187292b66b8","src/vendor/SPIRV-Cross/spirv_reflect.cpp":"995a4e4315f67d4a2084e2b5d982de9baf72c7e265dde71d03ae0b8a29a24b92","src/vendor/SPIRV-Cross/spirv_reflect.hpp":"a874b40be0b210c69718e87a90e9add24653957656a995d83656f0918cfd10e4","src/wrapper.cpp":"7800cec5564dc1c7c13059cb1b1cac8d9be9248bbc887d5176600980c0d18142","src/wrapper.hpp":"b9a4f846bf57ee7a10fbb81f8f49df96ad11b1e5749c3c510925bb9b029ac08a","tests/common/mod.rs":"2843bf104c7938d93065f7b5688c9f063ad9e5720c407c737aedc5f2dee5a80f","tests/glsl_tests.rs":"ddd85107370dbfcde9d004d6717e229a7278d29ecba375828d8cb79e86625f66","tests/hlsl_tests.rs":"2058cb590c5ab85a636c93c18d0f2a79eb5ea5e647b7f10a2c83fbf394964dd6","tests/msl_tests.rs":"211d3b9cb43455a4c55bd619c05acdd21953358580c50ae75cac3f06eb26c5dd","tests/shaders/array.vert":"d0dab7ddea131e069961054f40a164602448aa78720b7ff480e141d1f7b0b2d6","tests/shaders/array.vert.spv":"8e44421590ade1716be66ad39f60fb1ce58eedeab8f0806335a7369687b308b1","tests/shaders/multiple_entry_points.cl":"2abbe57f2387f07f7f5f4cd375e47900be9c13bdc79aa0ed98a43a798cb0df81","tests/shaders/multiple_entry_points.cl.spv":"bdd34ce6765dbeab637631c3cbdf251532870d9fd6cd6c54883c0c872058ab3b","tests/shaders/rasterize_disabled.vert":"da6de172549830216933c44edf18b13113d7ca87462e3d09ad50dfc9c9836821","tests/shaders/rasterize_disabled.vert.spv":"2ba809eb500ed6e5a067389ccc056551e796e7019517593d110fb62c9dca2056","tests/shaders/sampler.frag":"4c568e65176afe596dd8ef279485e992607e94d612786214ae1c6702d0322e1f","tests/shaders/sampler.frag.spv":"bd7bd1973a82dcfdf5755361fa4dd420fdf1c32c5de0a6f0896a8d5971f98684","tests/shaders/simple.vert":"ea143c97dff5ef03728b96b2dd893bdc59d56651581ecf9fe50f10807b0efdd0","tests/shaders/simple.vert.spv":"a2b5094ffd76288e0d08c37ce0351e28f20bb6d80ddd73fc44a71c1c7cbbf7db","tests/shaders/specialization.comp":"ce32fa1615737209f2e465ea347d79030ddcb33a88c38447e7cde7dffc920163","tests/shaders/specialization.comp.spv":"848604e37b870b8999692b266677be2ce0df6ce38093a0d81e6bc43d0bdf8a3f","tests/shaders/struct.frag":"d8840bb1961d6f14609b00ee54406c1e3ea31cecd8231b77cfb73d28b71910c0","tests/shaders/struct.frag.spv":"774aa886374eb95abf9bb7d0045ee77d97e26529e9ec96b90991a515fdbca4be","tests/shaders/struct.vert":"9299cda83ddb5b5c3d95ab0d057e4df2af137dfd92d6c4d3e96295b7d42e29a1","tests/shaders/struct.vert.spv":"4a82bdee72616ac058bc60d4255efa3e78199a2b8597570c013bebbee7107fb7","tests/shaders/two_ubo.vert":"be109b2c65e5e9e1bb0dab968d7f651232f6b1c46935a3928f980bf7a40f2d62","tests/shaders/two_ubo.vert.spv":"efd14e2d0a782d61dd944711f30b9e7fcb14af17593c1fe4e11cf2b7e232bcc2","tests/shaders/workgroup.comp":"478044b5392e0d1fb042253d71ea6bf7b8a014af4a6ee35d8db4c5343ac69739","tests/shaders/workgroup.comp.spv":"72f636fe3d1d6d0c5963f71bf4349c7e40d544331b33b6b64fb5b65784e6abee","tests/spirv_tests.rs":"6de5c893f631d550fc86853537bddb824ffb534a0a7f130406be3b9ed166f198"},"package":null}
+\ No newline at end of file
+diff --git a/third_party/rust/syn/.cargo-checksum.json b/third_party/rust/syn/.cargo-checksum.json
+index 77939d8fc6..704f2ed200 100644
+--- third_party/rust/syn/.cargo-checksum.json
++++ third_party/rust/syn/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"484d29864d333a361652fa4e24e1dcfab9efa47705ffd8c106d802eb03b78da7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"ca605417b6db8c995458f8407afaad6c177aedcc2274004283600f5638fa1b0c","benches/file.rs":"b45211cc4a0296a77aac2b4de16dbc6b5cb66adfb5afac00a77bccea87f43968","benches/rust.rs":"9cc0f62e944f1583d05c43a395a1556731501cf5976ef67a081f4f6387f883ba","build.rs":"7423ab199728d55c7d64c44b7c6729cfd93bd8273366a77707353003e27565d7","src/attr.rs":"cf81add298f0e75c35a9980a59bc3c2fd3fe933635830d1591374eeb2487c225","src/await.rs":"18f0b2ecb319991f891e300011424985e3cf33d166ea9f29f22d575fc8c83a76","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"2a432c11a3da67a21d46c2272bf9ce60a0bb20893b5750027bbd8ca3e843ab35","src/custom_keyword.rs":"589e46ec1be9a04d6de12c0b8cadf87cc1c05606ed46ddea62e9869cbca4a191","src/custom_punctuation.rs":"2ba2e294e15a0fce7ede3686c42b2891797079a724dd1193b66e7d305624c891","src/data.rs":"cc9b250d084e444782d3ff5e63c1ba387cbde8f7f2e977eab9846d920b4b8c3f","src/derive.rs":"c18878f14be5d5ab11fd7dda2d2ff1ff75c9662daf11eed033de62e4d0670a89","src/discouraged.rs":"50e10915695c4d14f64a78e20ecbef90a2cd53a7c26ee3426a2524a8ee5c9cbf","src/error.rs":"2c17a402f83ed5ae4ad96e753216771bef620235c2ff1ccc23f4bbafc7266fe1","src/export.rs":"dcae67456278c0339acfbcbb4737b8d37cfba5a150ae789f31f4be79abf7e726","src/expr.rs":"871d8eeb43cef02ef88de3bea7477b79b4eabc096a0899dde0e5750edf482f49","src/ext.rs":"b97ed549490b9248b5b5df31b3d5b08ba8791e23e6c5d3a1157a0363eb683ff3","src/file.rs":"3cc2bf5c709238d515a557f721f231c8c725b196400de051f945b549299d38a7","src/gen/fold.rs":"10b3ae33d0ce410d6bbe8b93be9d5f9e856c7dc8212133cc46b703f97d548190","src/gen/visit.rs":"e0f5798552d186024696b7bfc7219d4ff53b0e45f735a83e77cbb6b6578c5fa4","src/gen/visit_mut.rs":"9f7dda83907969971dba84d545aaa563b0728e54db97ffab5050fdf43a79c731","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"d845d7a828863123a5187fd0fe59c9dae3636f63bad302bd035792eed3dcb1ba","src/group.rs":"119b62d8481b4b1c327639bed40e114bf1969765250b68186628247fd4144b3b","src/ident.rs":"503156ce51a7ef0420892e8dbf2ecf8fe51f42a84d52cc2c05654e1a83020cbf","src/item.rs":"213f2f58c65ee1aa222f111bc9b1be681f8fb069caed04ca56586839979318d0","src/keyword.rs":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/lib.rs":"24778e9f15e8025e75aca114c712716ada586b471adb3b3b69278f4d39b8a21b","src/lifetime.rs":"905359708f772ec858954badde69ee016d29e6eeba1dd205b268445b1aff6f3a","src/lit.rs":"5bb0bddb94cbd256e50e92dc091a0baa09f1be40a77058b897507f3b17191e5d","src/lookahead.rs":"5cce8b4cb345a85c24a452ea6d78eadb76f01ca0a789cbf5ce35108334904173","src/mac.rs":"6b468244cc07e3f2f10419f833d9e2ed23edbcd6dc34cf21c5947633699db964","src/macros.rs":"0d8c3bab47539aa2d00bec64e92c901ea2c9c0af74c868051c0905b82650f970","src/op.rs":"93cd44770bb110deadf807a01d9a666efe644b6e3010f4b51cae77ee7438cfbb","src/parse.rs":"5017123c249ebc65866af113a0ad671814b9873f47568180e6539a305eb0317d","src/parse_macro_input.rs":"f799aadb7216c2d333b579f48ed2fedfe07b5e96f004b25b569649ffbaa958d2","src/parse_quote.rs":"81575bf60b18b0d8624d7025a5bcc8dcd6633ad70c454dee2a06e4c391700b6c","src/pat.rs":"db0f2263b9813de1f4e3e3e0396fe0080b1e11c8090c6b4fb6fca3cfbe22bc96","src/path.rs":"32e685ac7fd2d4b9989802de8f326a8d47fa710f86ec3e45fd9d3ff8fdfe97ef","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"384e7b317b26f24118eb4b0c39e949ee9f4f3e700a4c80e462342c83b2cc3282","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"adddb6acae14a0fa340df302b932c31e34b259706ce56fd82ab597ec424500e1","src/stmt.rs":"fbccf2b4da7980fe6ea8d99457d291577c0f225b370c1dd97da41abf2a18fcf7","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"761d8d1793560eb2b631c36ddfdbb14ac65178405f095453aa0e75e8816bdbb9","src/tt.rs":"1e32ae216d14d895ff621bf32bc611f138aa00108b0090be2cbaa0affebe8e2a","src/ty.rs":"ce052e0079b65b66bea4e9502d2ff2c90ad4b867904bf7eb892eb60aa3ef219a","tests/clone.sh":"961243d42540d9992090efbbd5478b7aa395671db64a6c28cdadb6edc610ebdf","tests/common/eq.rs":"a42d339808fd32dd4bfd440c254add8c56d5e2cde3a6bf0c88621b618ce5eaa7","tests/common/mod.rs":"20a3300016351fa210a193fbb0db059ef5125fa7909585ded64790004d4977ed","tests/common/parse.rs":"17ba6d1e74aaa3f8096c6d379d803221f12d95cca69927be047d6ddf8367647f","tests/debug/gen.rs":"57bd5cf585e0b86ad00f29f09ff3db3390c4a756d503514a9b28407500dcea3c","tests/debug/mod.rs":"462d6fe34ee75c3ca1207d4db2ff3bdee5b430b9f9ca632e5671d1588d3f76b3","tests/features/error.rs":"e0581a2869cbd237c2bc18a0a85272296e1577bb5f7317a67fa85e28e04eea6f","tests/features/mod.rs":"66a2605ec54ede29208da350f2bed536dfa763b58408d64d3fca3b13de64b64f","tests/macros/mod.rs":"3f2d758c0ba76b93f54b0c1fc22ad50edff8ef42629ba4d47ac7d7f823da8359","tests/repo/mod.rs":"e851a68972c9194a9a8d7b68538b16ed79ae81cba55e1a2ce210d1b759fb1a21","tests/test_asyncness.rs":"b6c46118b036e6807d24eb0e1779244b4fca23dac0d8031e9843b3edec484ce8","tests/test_attribute.rs":"2d8f18a98c989d3f7adaaeb1aeebd4f8413365ace63feecb37cb3f9db9db4d8f","tests/test_derive_input.rs":"477d80f914c54b526f8ff229788dc0e7798d118f6dcfa348f4c99755edb347b9","tests/test_expr.rs":"f35ca80566849a36e6ba6403d9663519eff37e4224360c468fedff8b561a643e","tests/test_generics.rs":"83a5dc07f5c5701c12625399262f7120b66f01a742523f3eda28da2cf2c87eb3","tests/test_grouping.rs":"aadd75215addd9e5a8fa2f9472117d4cb80f1e8b84e07f4c0845675c9014164f","tests/test_ident.rs":"236c239dd66f543f084f44ff747d7bc3962cf11a019a279777fe972f6e17aa4c","tests/test_iterators.rs":"718938da14778dcba06324d36a99d9317c9d45d81a34c6a44c47e1fa38085e9f","tests/test_lit.rs":"7dff2661a5ac586d6ed2fe27501cb8ff62f4cf3f6c91f596bff6057c67ad7857","tests/test_meta.rs":"8444dee084882243b107dfc8a6aac27f9382f9774162d1ac8ed8ec30d60c048e","tests/test_parse_buffer.rs":"b244bb4bc41ff06d21f239e60a3d663fdec5aa4af33f2a354afef36d34f0aefc","tests/test_pat.rs":"41776b878efae9b8e340f21ffe6296e921cf309f618482efd98609c33e32c28b","tests/test_precedence.rs":"71f3ea52cda8b40166bb7416fb98774e6a653542497b521f8e183e283dcf579d","tests/test_round_trip.rs":"e0de37f45fa223b488d25a41beab185eb92abb7bf765a9f13fe5d870ff31f5f1","tests/test_should_parse.rs":"4da4e25ee2baa7e75135c375042a7f958de136c5698dab03f99ff7a774dcd463","tests/test_size.rs":"970150b9d49ef91ab4c8f8c6a59b83f9a68a02acb779f0280733a5efaec6487a","tests/test_token_trees.rs":"a07ea657bf03b9c667c821b2db2af49b176ca737e3e01217a73cca78b7f11380","tests/zzz_stable.rs":"961d4940a926db4ca523d834b060c62de988e6a8e01c9f5efaa7bb4c86745b47"},"package":"66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"28ddb678a5ccac4423435384c8b7116f804e896eabc5aae9d5c2bc666aaebbb4","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"03f3b53cf858536a0883aa5b5882ee61dcd0f1e71c0930c9106fcfa1d6aad2df","benches/file.rs":"b4724fc7c0f48b8f488e2632a1064f6c0bf16ded3969680fc3f4a2369536269b","benches/rust.rs":"ea6291ef2d2a83d94a3312fe179d48259f8ec0b04c961993ddd181d0a4ab740e","build.rs":"aeca2312f05aec658eaa66980a0ef3d578837db107a55702b39419ea0422eb4a","src/attr.rs":"7d79482634d6544eb4a4825405407b53660d0f5f8b929f7e1671e005b9d92038","src/await.rs":"18f0b2ecb319991f891e300011424985e3cf33d166ea9f29f22d575fc8c83a76","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"cf2a4b3bdc247b80c85ff5625a1dfb7a5f517fd835f6e1518a7b924990e4c293","src/custom_keyword.rs":"9627467063e41776315a6a14b2aaea3875592d8e0ebd2dc6df1fc2f12c06f146","src/custom_punctuation.rs":"b00e7bee96eb473507527e39db65e74e71592dc06421d2cfe45ed899c17d4847","src/data.rs":"7aec9a745cd53ec95688afa353f6efb9576e7fc0143757b51d28bc3d900b1d2a","src/derive.rs":"fa71866df6e383673dd3329f455a9f953585b83f9739050be3bf1f8c6d526b96","src/discouraged.rs":"a1f3d85e20dedf50b1b7b4571d970a3a6e9b2de4afde7dd0c986fe240df2ba46","src/error.rs":"c3005b50e3132026250c5356d0d391bf96db8087f0f5f744de98e360d8a20a3e","src/export.rs":"dcae67456278c0339acfbcbb4737b8d37cfba5a150ae789f31f4be79abf7e726","src/expr.rs":"54455fd20041996653ca5379b03cdf3c2fc1b3dd2e1149b5bc6b1dd492545d55","src/ext.rs":"870086d9021e6a6fcefa2f00cd91b55c4b74dcee8f0f6a07e76d96fb44707d61","src/file.rs":"75167ebc77e7870122078eabde1b872c337142d4b0962c20cedffcaaa2a5b7c6","src/gen/clone.rs":"0845c1bf8624c3f235cd247b4eb748e7e16b4c240097cb0ff16751f688c079ae","src/gen/debug.rs":"d24fe37f4ce1dd74f2dc54136e893782d3c4d0908323c036c97599551a56960c","src/gen/eq.rs":"1e6ef09b17ca7f36861ef23ce2a6991b231ed5f087f046469b5f23da40f5b419","src/gen/fold.rs":"3f59e59ed8ad2ab5dd347bfbe41bbc785c2aabd8ae902087a584a6daed597182","src/gen/hash.rs":"e5b2a52587173076777233a9e57e2b3c8e0dd6d6f41d16fa7c9fde68b05c2bfc","src/gen/visit.rs":"23008c170d4dd3975232876a0a654921d9b6af57372cb9fcc133ca740588d666","src/gen/visit_mut.rs":"42886c3ee02ded72d9c3eec006e20431eaee0c6b90ddefc1a36ec7bf50c6a24a","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"d1c175284ca21e777ef0414c28383929b170ccb00aaf7a929eb18d3b05e18da8","src/group.rs":"119b62d8481b4b1c327639bed40e114bf1969765250b68186628247fd4144b3b","src/ident.rs":"503156ce51a7ef0420892e8dbf2ecf8fe51f42a84d52cc2c05654e1a83020cbf","src/item.rs":"c9ad9881e8cda8ee3f157f0c7602fc53d08a7e3288b9afc388c393689eac5aea","src/lib.rs":"558ad13779233b27bebc4b2fc8025eb1c7e57b32130dc1dd911391e27b427500","src/lifetime.rs":"f390fe06692fc51fbf3eb490bb9f795da70e4452f51c5b0df3bbaa899084ddf1","src/lit.rs":"9fab84e38756b092fbb055dcdf01e31d42d916c49e3eaae8c9019043b0ee4301","src/lookahead.rs":"5cce8b4cb345a85c24a452ea6d78eadb76f01ca0a789cbf5ce35108334904173","src/mac.rs":"e5cecea397fd01a44958162781d8d94343fe2a1b9b9754a5666c3d2ab4d7ef64","src/macros.rs":"2ce05b553f14da4ee550bb681cb0733b7186ad94719cd36f96d53e15fd02cf2b","src/op.rs":"449514e146deab0ab020bc6f764544c294dbc780941c9802bf60cf1b2839d550","src/parse.rs":"bde888c98ee259f2a73489a693515ed4875432b0d79486ac83aea19f441992a3","src/parse_macro_input.rs":"653a020f023cac0eccbc1fcc34aa7bf80567b43e5475deab4ad3e487a5363201","src/parse_quote.rs":"642f21e5fa54df4b7c373fb158289ee1005d49e1a49b1d194df5438faee71c46","src/pat.rs":"1473b258162cc822f1ee0c0869f521053ed345a140c39ed83b9b4dfb6f9f2aca","src/path.rs":"f119f0c2af12fabd360eac9a2312e0f6e6c28c633c9671bde6ef0bece7c5ba3c","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"212f5a601d6c2eb8b8fa679be1167b455b595bee964d2775b0101ebb16c3eaa5","src/reserved.rs":"3625eb2a64589a4992ab79a1674e9679f465bea613ab139a671df5337e88cee6","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"7d77714d585e6f42397091ffb3a799fd7b20c05c5442c737683c429ea7d409a5","src/stmt.rs":"3917fbc897f80efe838267833c55650ff8d636cb49a6d1084e28eff65d0e3ccd","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"a1ca6298bf6592cb80cbab1db4eac2fa4e3fa56729bb807bfb0f08ab0f229ca5","src/tt.rs":"1cc9e200624288322f800f32e3d6e2e53da946467bb312dd40a52c02cdcc4730","src/ty.rs":"cb167cbb16240c59a31b44adec175172caaf75ffef9a0bb168584b51bf105795","src/verbatim.rs":"802a97df997432f18cac6e6200ff6ea29fb2474986005e0fcdbc2b65197f87f7","src/whitespace.rs":"e63dd0aa3d34029f17766a8b09c1a6e4479e36c552c8b7023d710a399333aace","tests/common/eq.rs":"4b190a3833bdfd20a4cb1e3dff25a698751dec71d6f30249cf09426e061a4fb1","tests/common/mod.rs":"25ef6d7daa09bad3198a0e9e91b2812425f92db7c585c1e34a03a84d7362ccd8","tests/common/parse.rs":"8b7ba32f4988c30758c108536c4877dc5a039a237bf9b0687220ef2295797bbd","tests/debug/gen.rs":"d6e2abf2a7bb58a7895a60c2f094a98a4f85c9189d02011d0dcef6ef053f26e3","tests/debug/mod.rs":"868763d0ef1609a3ad5e05e9f1bfa0f813e91e7e9a36653414a188bb2fdaa425","tests/macros/mod.rs":"c0eafa4e3845fc08f6efe6021bac37822c0ac325eb7b51194a5f35236f648d92","tests/repo/mod.rs":"9e316b88d57ae213e81950c35e45443078ec90e702798353bc3528cb8a2810b6","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/test_asyncness.rs":"cff01db49d28ab23b0b258bc6c0a5cc4071be4fe7248eef344a5d79d2fb649b7","tests/test_attribute.rs":"0ffd99384e1a52ae17d9fed5c4053e411e8f9018decef07ffa621d1faa7329d8","tests/test_derive_input.rs":"610444351e3bf99366976bbf1da109c334a70ac9500caef366bcf9b68819829f","tests/test_expr.rs":"0ee83f6f6de950018c043efcc3e85776b4227dae3068309998a8d9709f2fc66c","tests/test_generics.rs":"9d713f90a79d6145efc89fb6f946029ca03486c632219950889da39940152ba0","tests/test_grouping.rs":"46c27baec4daaaf1e891892f0b0515ea8a44619071c7d0cc9192580916f1569f","tests/test_ident.rs":"9eb53d1e21edf23e7c9e14dc74dcc2b2538e9221e19dbcc0a44e3acc2e90f3f6","tests/test_item.rs":"461ed0c8648afffcea3217f52c9a88298182b4d39d73a11803b1281d99c98c25","tests/test_iterators.rs":"53ed6078d37550bd6765d2411e3660be401aef8a31a407350cc064a7d08c7c33","tests/test_lit.rs":"2a46c5f2f2ad1dcbb7e9b0cd11b55861c5ff818c2c4c51351d07e2daa7c74674","tests/test_meta.rs":"1fc98af3279cadc3d8db3c7e8d4d7f9e9dbd4d17548cf6a2f6f4536ed65367f6","tests/test_parse_buffer.rs":"8bbe2d24ca8a3788f72c6908fc96c26d546f11c69687bf8d72727f851d5e2d27","tests/test_parse_stream.rs":"2f449a2c41a3dee6fd14bee24e1666a453cb808eda17332fd91afd127fcdd2a6","tests/test_pat.rs":"2cb331fe404496d51e7cc7e283ae13c519a2265ca82e1c88e113296f860c2cba","tests/test_path.rs":"fcd5591e639fc787acc9763d828a811c8114525c9341282eefda8f331e082a51","tests/test_precedence.rs":"8d03656741b01e577d7501ce24332d1a4febec3e31a043e47c61062b8c527ed2","tests/test_receiver.rs":"084eca59984b9a18651da52f2c4407355da3de1335916a12477652999e2d01cc","tests/test_round_trip.rs":"ba01bf4ec04cd2d6f9e4800c343563925ae960c5f16752dc0797fda4451b6cc2","tests/test_shebang.rs":"f5772cadad5b56e3112cb16308b779f92bce1c3a48091fc9933deb2276a69331","tests/test_should_parse.rs":"1d3535698a446e2755bfc360676bdb161841a1f454cdef6e7556c6d06a95c89d","tests/test_size.rs":"5fae772bab66809d6708232f35cfb4a287882486763b0f763feec2ad79fbb68b","tests/test_stmt.rs":"17e4355843ee2982b51faba2721a18966f8c2b9422e16b052a123b8ee8b80752","tests/test_token_trees.rs":"43e56a701817e3c3bfd0cae54a457dd7a38ccb3ca19da41e2b995fdf20e6ed18","tests/test_ty.rs":"5b7c0bfc4963d41920dd0b39fdea419e34f00409ba86ad4211d6c3c7e8bbe1c0","tests/test_visibility.rs":"3f958e2b3b5908005e756a80eea326a91eac97cc4ab60599bebde8d4b942d65c","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"}
+\ No newline at end of file
+diff --git a/third_party/rust/syn/Cargo.toml b/third_party/rust/syn/Cargo.toml
+index 7a5c962f06..20277fc461 100644
+--- third_party/rust/syn/Cargo.toml
++++ third_party/rust/syn/Cargo.toml
+@@ -13,7 +13,7 @@
+ [package]
+ edition = "2018"
+ name = "syn"
+-version = "1.0.5"
++version = "1.0.40"
+ authors = ["David Tolnay <dtolnay@gmail.com>"]
+ include = ["/benches/**", "/build.rs", "/Cargo.toml", "/LICENSE-APACHE", "/LICENSE-MIT", "/README.md", "/src/**", "/tests/**"]
+ description = "Parser for Rust source code"
+@@ -24,25 +24,21 @@ license = "MIT OR Apache-2.0"
+ repository = "https://github.com/dtolnay/syn"
+ [package.metadata.docs.rs]
+ all-features = true
++targets = ["x86_64-unknown-linux-gnu"]
+
+ [package.metadata.playground]
+-all-features = true
+-
+-[lib]
+-name = "syn"
++features = ["full", "visit", "visit-mut", "fold", "extra-traits"]
+
+ [[bench]]
+ name = "rust"
+ harness = false
+ required-features = ["full", "parsing"]
+-edition = "2018"
+
+ [[bench]]
+ name = "file"
+ required-features = ["full", "parsing"]
+-edition = "2018"
+ [dependencies.proc-macro2]
+-version = "1.0"
++version = "1.0.13"
+ default-features = false
+
+ [dependencies.quote]
+@@ -52,18 +48,34 @@ default-features = false
+
+ [dependencies.unicode-xid]
+ version = "0.2"
++[dev-dependencies.anyhow]
++version = "1.0"
++
++[dev-dependencies.flate2]
++version = "1.0"
++
+ [dev-dependencies.insta]
+-version = "0.9"
++version = "0.16"
+
+ [dev-dependencies.rayon]
+ version = "1.0"
+
+ [dev-dependencies.ref-cast]
+-version = "0.2"
++version = "1.0"
+
+ [dev-dependencies.regex]
+ version = "1.0"
+
++[dev-dependencies.reqwest]
++version = "0.10"
++features = ["blocking"]
++
++[dev-dependencies.syn-test-suite]
++version = "0"
++
++[dev-dependencies.tar]
++version = "0.4"
++
+ [dev-dependencies.termcolor]
+ version = "1.0"
+
+@@ -80,7 +92,6 @@ full = []
+ parsing = []
+ printing = ["quote"]
+ proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"]
++test = ["syn-test-suite/all-features"]
+ visit = []
+ visit-mut = []
+-[badges.travis-ci]
+-repository = "dtolnay/syn"
+diff --git a/third_party/rust/syn/README.md b/third_party/rust/syn/README.md
+index 29a7f32a46..12b5f45b3d 100644
+--- third_party/rust/syn/README.md
++++ third_party/rust/syn/README.md
+@@ -1,10 +1,10 @@
+ Parser for Rust source code
+ ===========================
+
+-[![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn)
+-[![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn)
+-[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/syn/1.0/syn/)
+-[![Rustc Version 1.31+](https://img.shields.io/badge/rustc-1.31+-lightgray.svg)](https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html)
++[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/syn-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/syn)
++[<img alt="crates.io" src="https://img.shields.io/crates/v/syn.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/syn)
++[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K" height="20">](https://docs.rs/syn)
++[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/syn/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
+
+ Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
+ of Rust source code.
+@@ -46,10 +46,6 @@ contains some APIs that may be useful more generally.
+ [`syn::DeriveInput`]: https://docs.rs/syn/1.0/syn/struct.DeriveInput.html
+ [parser functions]: https://docs.rs/syn/1.0/syn/parse/index.html
+
+-If you get stuck with anything involving procedural macros in Rust I am happy to
+-provide help even if the issue is not related to Syn. Please file a ticket in
+-this repo.
+-
+ *Version requirement: Syn supports rustc 1.31 and up.*
+
+ [*Release notes*](https://github.com/dtolnay/syn/releases)
+@@ -88,8 +84,6 @@ proc-macro = true
+ ```
+
+ ```rust
+-extern crate proc_macro;
+-
+ use proc_macro::TokenStream;
+ use quote::quote;
+ use syn::{parse_macro_input, DeriveInput};
+@@ -271,7 +265,7 @@ points, which are required by the language to use `proc_macro::TokenStream`.
+ The proc-macro2 crate will automatically detect and use the compiler's data
+ structures when a procedural macro is active.
+
+-[proc-macro2]: https://docs.rs/proc-macro2/1.0.0/proc_macro2/
++[proc-macro2]: https://docs.rs/proc-macro2/1.0/proc_macro2/
+
+ <br>
+
+diff --git a/third_party/rust/syn/benches/file.rs b/third_party/rust/syn/benches/file.rs
+index 08ecd90960..58ab8df297 100644
+--- third_party/rust/syn/benches/file.rs
++++ third_party/rust/syn/benches/file.rs
+@@ -1,9 +1,16 @@
+ // $ cargo bench --features full --bench file
+
+ #![feature(rustc_private, test)]
++#![recursion_limit = "1024"]
+
+ extern crate test;
+
++#[macro_use]
++#[path = "../tests/macros/mod.rs"]
++mod macros;
++
++#[path = "../tests/common/mod.rs"]
++mod common;
+ #[path = "../tests/repo/mod.rs"]
+ pub mod repo;
+
+diff --git a/third_party/rust/syn/benches/rust.rs b/third_party/rust/syn/benches/rust.rs
+index e3d9cd29ba..50e1a7f601 100644
+--- third_party/rust/syn/benches/rust.rs
++++ third_party/rust/syn/benches/rust.rs
+@@ -4,7 +4,14 @@
+ // $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full --bench rust
+
+ #![cfg_attr(not(syn_only), feature(rustc_private))]
++#![recursion_limit = "1024"]
+
++#[macro_use]
++#[path = "../tests/macros/mod.rs"]
++mod macros;
++
++#[path = "../tests/common/mod.rs"]
++mod common;
+ #[path = "../tests/repo/mod.rs"]
+ mod repo;
+
+@@ -28,31 +35,35 @@ mod syn_parse {
+ }
+
+ #[cfg(not(syn_only))]
+-mod libsyntax_parse {
++mod librustc_parse {
+ extern crate rustc_data_structures;
+- extern crate syntax;
+- extern crate syntax_pos;
++ extern crate rustc_errors;
++ extern crate rustc_parse;
++ extern crate rustc_session;
++ extern crate rustc_span;
+
+ use rustc_data_structures::sync::Lrc;
+- use syntax::edition::Edition;
+- use syntax::errors::{emitter::Emitter, DiagnosticBuilder, Handler};
+- use syntax::parse::ParseSess;
+- use syntax::source_map::{FilePathMapping, SourceMap};
+- use syntax_pos::FileName;
++ use rustc_errors::{emitter::Emitter, Diagnostic, Handler};
++ use rustc_session::parse::ParseSess;
++ use rustc_span::source_map::{FilePathMapping, SourceMap};
++ use rustc_span::{edition::Edition, FileName};
+
+ pub fn bench(content: &str) -> Result<(), ()> {
+ struct SilentEmitter;
+
+ impl Emitter for SilentEmitter {
+- fn emit_diagnostic(&mut self, _db: &DiagnosticBuilder) {}
++ fn emit_diagnostic(&mut self, _diag: &Diagnostic) {}
++ fn source_map(&self) -> Option<&Lrc<SourceMap>> {
++ None
++ }
+ }
+
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(Edition::Edition2018, || {
+ let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let emitter = Box::new(SilentEmitter);
+ let handler = Handler::with_emitter(false, None, emitter);
+ let sess = ParseSess::with_span_handler(handler, cm);
+- if let Err(mut diagnostic) = syntax::parse::parse_crate_from_source_str(
++ if let Err(mut diagnostic) = rustc_parse::parse_crate_from_source_str(
+ FileName::Custom("bench".to_owned()),
+ content.to_owned(),
+ &sess,
+@@ -104,11 +115,11 @@ fn main() {
+ repo::clone_rust();
+
+ macro_rules! testcases {
+- ($($(#[$cfg:meta])* $name:path,)*) => {
++ ($($(#[$cfg:meta])* $name:ident,)*) => {
+ vec![
+ $(
+ $(#[$cfg])*
+- (stringify!($name), $name as fn(&str) -> Result<(), ()>),
++ (stringify!($name), $name::bench as fn(&str) -> Result<(), ()>),
+ )*
+ ]
+ };
+@@ -128,12 +139,12 @@ fn main() {
+
+ for (name, f) in testcases!(
+ #[cfg(not(syn_only))]
+- read_from_disk::bench,
++ read_from_disk,
+ #[cfg(not(syn_only))]
+- tokenstream_parse::bench,
+- syn_parse::bench,
++ tokenstream_parse,
++ syn_parse,
+ #[cfg(not(syn_only))]
+- libsyntax_parse::bench,
++ librustc_parse,
+ ) {
+ eprint!("{:20}", format!("{}:", name));
+ let elapsed = exec(f);
+diff --git a/third_party/rust/syn/build.rs b/third_party/rust/syn/build.rs
+index c0f9ed3406..cf7681c3f9 100644
+--- third_party/rust/syn/build.rs
++++ third_party/rust/syn/build.rs
+@@ -1,6 +1,6 @@
+ use std::env;
+ use std::process::Command;
+-use std::str::{self, FromStr};
++use std::str;
+
+ // The rustc-cfg strings below are *not* public API. Please let us know by
+ // opening a GitHub issue if your build environment requires some way to enable
+@@ -26,38 +26,14 @@ struct Compiler {
+ }
+
+ fn rustc_version() -> Option<Compiler> {
+- let rustc = match env::var_os("RUSTC") {
+- Some(rustc) => rustc,
+- None => return None,
+- };
+-
+- let output = match Command::new(rustc).arg("--version").output() {
+- Ok(output) => output,
+- Err(_) => return None,
+- };
+-
+- let version = match str::from_utf8(&output.stdout) {
+- Ok(version) => version,
+- Err(_) => return None,
+- };
+-
++ let rustc = env::var_os("RUSTC")?;
++ let output = Command::new(rustc).arg("--version").output().ok()?;
++ let version = str::from_utf8(&output.stdout).ok()?;
+ let mut pieces = version.split('.');
+ if pieces.next() != Some("rustc 1") {
+ return None;
+ }
+-
+- let next = match pieces.next() {
+- Some(next) => next,
+- None => return None,
+- };
+-
+- let minor = match u32::from_str(next) {
+- Ok(minor) => minor,
+- Err(_) => return None,
+- };
+-
+- Some(Compiler {
+- minor: minor,
+- nightly: version.contains("nightly"),
+- })
++ let minor = pieces.next()?.parse().ok()?;
++ let nightly = version.contains("nightly");
++ Some(Compiler { minor, nightly })
+ }
+diff --git a/third_party/rust/syn/src/attr.rs b/third_party/rust/syn/src/attr.rs
+index 34009deabc..fa4f1cb2a3 100644
+--- third_party/rust/syn/src/attr.rs
++++ third_party/rust/syn/src/attr.rs
+@@ -9,15 +9,11 @@ use proc_macro2::TokenStream;
+ use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result};
+ #[cfg(feature = "parsing")]
+ use crate::punctuated::Pair;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// An attribute like `#[repr(transparent)]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// <br>
+@@ -111,7 +107,46 @@ ast_struct! {
+ ///
+ /// [`parse_meta()`]: Attribute::parse_meta
+ /// [`parse_args()`]: Attribute::parse_args
+- pub struct Attribute #manual_extra_traits {
++ ///
++ /// <p><br></p>
++ ///
++ /// # Doc comments
++ ///
++ /// The compiler transforms doc comments, such as `/// comment` and `/*!
++ /// comment */`, into attributes before macros are expanded. Each comment is
++ /// expanded into an attribute of the form `#[doc = r"comment"]`.
++ ///
++ /// As an example, the following `mod` items are expanded identically:
++ ///
++ /// ```
++ /// # use syn::{ItemMod, parse_quote};
++ /// let doc: ItemMod = parse_quote! {
++ /// /// Single line doc comments
++ /// /// We write so many!
++ /// /**
++ /// * Multi-line comments...
++ /// * May span many lines
++ /// */
++ /// mod example {
++ /// //! Of course, they can be inner too
++ /// /*! And fit in a single line */
++ /// }
++ /// };
++ /// let attr: ItemMod = parse_quote! {
++ /// #[doc = r" Single line doc comments"]
++ /// #[doc = r" We write so many!"]
++ /// #[doc = r"
++ /// * Multi-line comments...
++ /// * May span many lines
++ /// "]
++ /// mod example {
++ /// #![doc = r" Of course, they can be inner too"]
++ /// #![doc = r" And fit in a single line "]
++ /// }
++ /// };
++ /// assert_eq!(doc, attr);
++ /// ```
++ pub struct Attribute {
+ pub pound_token: Token![#],
+ pub style: AttrStyle,
+ pub bracket_token: token::Bracket,
+@@ -120,39 +155,11 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Attribute {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Attribute {
+- fn eq(&self, other: &Self) -> bool {
+- self.style == other.style
+- && self.pound_token == other.pound_token
+- && self.bracket_token == other.bracket_token
+- && self.path == other.path
+- && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Attribute {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.style.hash(state);
+- self.pound_token.hash(state);
+- self.bracket_token.hash(state);
+- self.path.hash(state);
+- TokenStreamHelper(&self.tokens).hash(state);
+- }
+-}
+-
+ impl Attribute {
+ /// Parses the content of the attribute, consisting of the path and tokens,
+ /// as a [`Meta`] if possible.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_meta(&self) -> Result<Meta> {
+@@ -199,7 +206,7 @@ impl Attribute {
+ /// ^^^^^^^^^ what gets parsed
+ /// ```
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_args<T: Parse>(&self) -> Result<T> {
+@@ -208,7 +215,7 @@ impl Attribute {
+
+ /// Parse the arguments to the attribute using the given parser.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+@@ -221,7 +228,7 @@ impl Attribute {
+
+ /// Parses zero or more outer attributes from the stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
+@@ -234,7 +241,7 @@ impl Attribute {
+
+ /// Parses zero or more inner attributes from the stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
+@@ -247,7 +254,7 @@ impl Attribute {
+ }
+
+ #[cfg(feature = "parsing")]
+-fn error_expected_args(attr: &Attribute) -> Error {
++fn expected_parentheses(attr: &Attribute) -> String {
+ let style = match attr.style {
+ AttrStyle::Outer => "#",
+ AttrStyle::Inner(_) => "#!",
+@@ -261,19 +268,23 @@ fn error_expected_args(attr: &Attribute) -> Error {
+ path += &segment.ident.to_string();
+ }
+
+- let msg = format!("expected attribute arguments: {}[{}(...)]", style, path);
+-
+- #[cfg(feature = "printing")]
+- return Error::new_spanned(attr, msg);
+-
+- #[cfg(not(feature = "printing"))]
+- return Error::new(attr.bracket_token.span, msg);
++ format!("{}[{}(...)]", style, path)
+ }
+
+ #[cfg(feature = "parsing")]
+ fn enter_args<'a>(attr: &Attribute, input: ParseStream<'a>) -> Result<ParseBuffer<'a>> {
+ if input.is_empty() {
+- return Err(error_expected_args(attr));
++ let expected = expected_parentheses(attr);
++ let msg = format!("expected attribute arguments in parentheses: {}", expected);
++ return Err(crate::error::new2(
++ attr.pound_token.span,
++ attr.bracket_token.span,
++ msg,
++ ));
++ } else if input.peek(Token![=]) {
++ let expected = expected_parentheses(attr);
++ let msg = format!("expected parentheses: {}", expected);
++ return Err(input.error(msg));
+ };
+
+ let content;
+@@ -298,7 +309,7 @@ ast_enum! {
+ /// Distinguishes between attributes that decorate an item and attributes
+ /// that are contained within an item.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Outer attributes
+@@ -312,7 +323,6 @@ ast_enum! {
+ /// - `#![feature(proc_macro)]`
+ /// - `//! # Example`
+ /// - `/*! Please file an issue */`
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum AttrStyle {
+ Outer,
+ Inner(Token![!]),
+@@ -322,7 +332,7 @@ ast_enum! {
+ ast_enum_of_structs! {
+ /// Content of a compile-time structured attribute.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// ## Path
+@@ -360,7 +370,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A structured list within an attribute, like `derive(Copy, Clone)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct MetaList {
+ pub path: Path,
+@@ -372,7 +382,7 @@ ast_struct! {
+ ast_struct! {
+ /// A name-value pair within an attribute, like `feature = "nightly"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct MetaNameValue {
+ pub path: Path,
+@@ -398,7 +408,7 @@ impl Meta {
+ ast_enum_of_structs! {
+ /// Element of a compile-time attribute list.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum NestedMeta {
+ /// A structured meta item, like the `Copy` in `#[derive(Copy)]` which
+@@ -429,8 +439,8 @@ ast_enum_of_structs! {
+ /// as type `AttributeArgs`.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, AttributeArgs, ItemFn};
+ ///
+@@ -464,7 +474,7 @@ where
+ fn is_outer(attr: &&Attribute) -> bool {
+ match attr.style {
+ AttrStyle::Outer => true,
+- _ => false,
++ AttrStyle::Inner(_) => false,
+ }
+ }
+ self.into_iter().filter(is_outer)
+@@ -474,7 +484,7 @@ where
+ fn is_inner(attr: &&Attribute) -> bool {
+ match attr.style {
+ AttrStyle::Inner(_) => true,
+- _ => false,
++ AttrStyle::Outer => false,
+ }
+ }
+ self.into_iter().filter(is_inner)
+diff --git a/third_party/rust/syn/src/buffer.rs b/third_party/rust/syn/src/buffer.rs
+index 551a5ac816..a461cc49ea 100644
+--- third_party/rust/syn/src/buffer.rs
++++ third_party/rust/syn/src/buffer.rs
+@@ -1,7 +1,7 @@
+ //! A stably addressed token buffer supporting efficient traversal based on a
+ //! cheaply copyable cursor.
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ // This module is heavily commented as it contains most of the unsafe code in
+ // Syn, and caution should be used when editing it. The public-facing interface
+@@ -36,7 +36,7 @@ enum Entry {
+ /// `TokenStream` which requires a deep copy in order to traverse more than
+ /// once.
+ ///
+-/// *This type is available if Syn is built with the `"parsing"` feature.*
++/// *This type is available only if Syn is built with the `"parsing"` feature.*
+ pub struct TokenBuffer {
+ // NOTE: Do not derive clone on this - there are raw pointers inside which
+ // will be messed up. Moving the `TokenBuffer` itself is safe as the actual
+@@ -98,7 +98,7 @@ impl TokenBuffer {
+ /// Creates a `TokenBuffer` containing all the tokens from the input
+ /// `TokenStream`.
+ ///
+- /// *This method is available if Syn is built with both the `"parsing"` and
++ /// *This method is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ #[cfg(all(
+ not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
+@@ -133,8 +133,7 @@ impl TokenBuffer {
+ /// Two cursors are equal if they have the same location in the same input
+ /// stream, and have the same scope.
+ ///
+-/// *This type is available if Syn is built with the `"parsing"` feature.*
+-#[derive(Copy, Clone, Eq, PartialEq)]
++/// *This type is available only if Syn is built with the `"parsing"` feature.*
+ pub struct Cursor<'a> {
+ // The current entry which the `Cursor` is pointing at.
+ ptr: *const Entry,
+@@ -201,13 +200,13 @@ impl<'a> Cursor<'a> {
+ Cursor::create(self.ptr.offset(1), self.scope)
+ }
+
+- /// If the cursor is looking at a `None`-delimited group, move it to look at
+- /// the first token inside instead. If the group is empty, this will move
++ /// While the cursor is looking at a `None`-delimited group, move it to look
++ /// at the first token inside instead. If the group is empty, this will move
+ /// the cursor past the `None`-delimited group.
+ ///
+ /// WARNING: This mutates its argument.
+ fn ignore_none(&mut self) {
+- if let Entry::Group(group, buf) = self.entry() {
++ while let Entry::Group(group, buf) = self.entry() {
+ if group.delimiter() == Delimiter::None {
+ // NOTE: We call `Cursor::create` here to make sure that
+ // situations where we should immediately exit the span after
+@@ -215,13 +214,14 @@ impl<'a> Cursor<'a> {
+ unsafe {
+ *self = Cursor::create(&buf.data[0], self.scope);
+ }
++ } else {
++ break;
+ }
+ }
+ }
+
+ /// Checks whether the cursor is currently pointing at the end of its valid
+ /// scope.
+- #[inline]
+ pub fn eof(self) -> bool {
+ // We're at eof if we're at the end of our scope.
+ self.ptr == self.scope
+@@ -342,6 +342,44 @@ impl<'a> Cursor<'a> {
+ Entry::End(..) => Span::call_site(),
+ }
+ }
++
++ /// Skip over the next token without cloning it. Returns `None` if this
++ /// cursor points to eof.
++ ///
++ /// This method treats `'lifetimes` as a single token.
++ pub(crate) fn skip(self) -> Option<Cursor<'a>> {
++ match self.entry() {
++ Entry::End(..) => None,
++
++ // Treat lifetimes as a single tt for the purposes of 'skip'.
++ Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
++ let next = unsafe { self.bump() };
++ match next.entry() {
++ Entry::Ident(_) => Some(unsafe { next.bump() }),
++ _ => Some(next),
++ }
++ }
++ _ => Some(unsafe { self.bump() }),
++ }
++ }
++}
++
++impl<'a> Copy for Cursor<'a> {}
++
++impl<'a> Clone for Cursor<'a> {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++
++impl<'a> Eq for Cursor<'a> {}
++
++impl<'a> PartialEq for Cursor<'a> {
++ fn eq(&self, other: &Self) -> bool {
++ let Cursor { ptr, scope, marker } = self;
++ let _ = marker;
++ *ptr == other.ptr && *scope == other.scope
++ }
+ }
+
+ pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool {
+diff --git a/third_party/rust/syn/src/custom_keyword.rs b/third_party/rust/syn/src/custom_keyword.rs
+index 200e8478ef..a33044a564 100644
+--- third_party/rust/syn/src/custom_keyword.rs
++++ third_party/rust/syn/src/custom_keyword.rs
+@@ -86,7 +86,7 @@
+ /// }
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_keyword {
+ ($ident:ident) => {
+ #[allow(non_camel_case_types)]
+@@ -95,7 +95,7 @@ macro_rules! custom_keyword {
+ }
+
+ #[doc(hidden)]
+- #[allow(non_snake_case)]
++ #[allow(dead_code, non_snake_case)]
+ pub fn $ident<__S: $crate::export::IntoSpans<[$crate::export::Span; 1]>>(
+ span: __S,
+ ) -> $ident {
+@@ -112,10 +112,10 @@ macro_rules! custom_keyword {
+ }
+ }
+
+- impl_parse_for_custom_keyword!($ident);
+- impl_to_tokens_for_custom_keyword!($ident);
+- impl_clone_for_custom_keyword!($ident);
+- impl_extra_traits_for_custom_keyword!($ident);
++ $crate::impl_parse_for_custom_keyword!($ident);
++ $crate::impl_to_tokens_for_custom_keyword!($ident);
++ $crate::impl_clone_for_custom_keyword!($ident);
++ $crate::impl_extra_traits_for_custom_keyword!($ident);
+ };
+ }
+
+diff --git a/third_party/rust/syn/src/custom_punctuation.rs b/third_party/rust/syn/src/custom_punctuation.rs
+index 29fa448bd8..70dff42851 100644
+--- third_party/rust/syn/src/custom_punctuation.rs
++++ third_party/rust/syn/src/custom_punctuation.rs
+@@ -74,19 +74,19 @@
+ /// let _: PathSegments = syn::parse_str(input).unwrap();
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ pub struct $ident {
+- pub spans: custom_punctuation_repr!($($tt)+),
++ pub spans: $crate::custom_punctuation_repr!($($tt)+),
+ }
+
+ #[doc(hidden)]
+- #[allow(non_snake_case)]
+- pub fn $ident<__S: $crate::export::IntoSpans<custom_punctuation_repr!($($tt)+)>>(
++ #[allow(dead_code, non_snake_case)]
++ pub fn $ident<__S: $crate::export::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>(
+ spans: __S,
+ ) -> $ident {
+- let _validate_len = 0 $(+ custom_punctuation_len!(strict, $tt))*;
++ let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*;
+ $ident {
+ spans: $crate::export::IntoSpans::into_spans(spans)
+ }
+@@ -98,33 +98,33 @@ macro_rules! custom_punctuation {
+ }
+ }
+
+- impl_parse_for_custom_punctuation!($ident, $($tt)+);
+- impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
+- impl_clone_for_custom_punctuation!($ident, $($tt)+);
+- impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_parse_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_clone_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
+ };
+ }
+
+ // Not public API.
+ #[cfg(feature = "parsing")]
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! impl_parse_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::token::CustomToken for $ident {
+ fn peek(cursor: $crate::buffer::Cursor) -> bool {
+- $crate::token::parsing::peek_punct(cursor, stringify_punct!($($tt)+))
++ $crate::token::parsing::peek_punct(cursor, $crate::stringify_punct!($($tt)+))
+ }
+
+ fn display() -> &'static $crate::export::str {
+- custom_punctuation_concat!("`", stringify_punct!($($tt)+), "`")
++ concat!("`", $crate::stringify_punct!($($tt)+), "`")
+ }
+ }
+
+ impl $crate::parse::Parse for $ident {
+ fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
+- let spans: custom_punctuation_repr!($($tt)+) =
+- $crate::token::parsing::punct(input, stringify_punct!($($tt)+))?;
++ let spans: $crate::custom_punctuation_repr!($($tt)+) =
++ $crate::token::parsing::punct(input, $crate::stringify_punct!($($tt)+))?;
+ Ok($ident(spans))
+ }
+ }
+@@ -142,12 +142,12 @@ macro_rules! impl_parse_for_custom_punctuation {
+ // Not public API.
+ #[cfg(feature = "printing")]
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! impl_to_tokens_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::export::ToTokens for $ident {
+ fn to_tokens(&self, tokens: &mut $crate::export::TokenStream2) {
+- $crate::token::printing::punct(stringify_punct!($($tt)+), &self.spans, tokens)
++ $crate::token::printing::punct($crate::stringify_punct!($($tt)+), &self.spans, tokens)
+ }
+ }
+ };
+@@ -221,16 +221,16 @@ macro_rules! impl_extra_traits_for_custom_punctuation {
+
+ // Not public API.
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_punctuation_repr {
+ ($($tt:tt)+) => {
+- [$crate::export::Span; 0 $(+ custom_punctuation_len!(lenient, $tt))+]
++ [$crate::export::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+]
+ };
+ }
+
+ // Not public API.
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ #[rustfmt::skip]
+ macro_rules! custom_punctuation_len {
+ ($mode:ident, +) => { 1 };
+@@ -279,7 +279,7 @@ macro_rules! custom_punctuation_len {
+ ($mode:ident, -=) => { 2 };
+ ($mode:ident, ~) => { 1 };
+ (lenient, $tt:tt) => { 0 };
+- (strict, $tt:tt) => {{ custom_punctuation_unexpected!($tt); 0 }};
++ (strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }};
+ }
+
+ // Not public API.
+@@ -297,13 +297,3 @@ macro_rules! stringify_punct {
+ concat!($(stringify!($tt)),+)
+ };
+ }
+-
+-// Not public API.
+-// Without this, local_inner_macros breaks when looking for concat!
+-#[doc(hidden)]
+-#[macro_export]
+-macro_rules! custom_punctuation_concat {
+- ($($tt:tt)*) => {
+- concat!($($tt)*)
+- };
+-}
+diff --git a/third_party/rust/syn/src/data.rs b/third_party/rust/syn/src/data.rs
+index be43679874..b217b8ca6f 100644
+--- third_party/rust/syn/src/data.rs
++++ third_party/rust/syn/src/data.rs
+@@ -4,7 +4,7 @@ use crate::punctuated::Punctuated;
+ ast_struct! {
+ /// An enum variant.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Variant {
+ /// Attributes tagged on the variant.
+@@ -24,7 +24,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// Data stored within an enum variant or struct.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -52,7 +52,7 @@ ast_struct! {
+ /// Named fields of a struct or struct variant such as `Point { x: f64,
+ /// y: f64 }`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct FieldsNamed {
+ pub brace_token: token::Brace,
+@@ -63,7 +63,7 @@ ast_struct! {
+ ast_struct! {
+ /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct FieldsUnnamed {
+ pub paren_token: token::Paren,
+@@ -93,6 +93,24 @@ impl Fields {
+ Fields::Unnamed(f) => f.unnamed.iter_mut(),
+ }
+ }
++
++ /// Returns the number of fields.
++ pub fn len(&self) -> usize {
++ match self {
++ Fields::Unit => 0,
++ Fields::Named(f) => f.named.len(),
++ Fields::Unnamed(f) => f.unnamed.len(),
++ }
++ }
++
++ /// Returns `true` if there are zero fields.
++ pub fn is_empty(&self) -> bool {
++ match self {
++ Fields::Unit => true,
++ Fields::Named(f) => f.named.is_empty(),
++ Fields::Unnamed(f) => f.unnamed.is_empty(),
++ }
++ }
+ }
+
+ impl IntoIterator for Fields {
+@@ -129,7 +147,7 @@ impl<'a> IntoIterator for &'a mut Fields {
+ ast_struct! {
+ /// A field of a struct or enum variant.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Field {
+ /// Attributes tagged on the field.
+@@ -154,7 +172,7 @@ ast_enum_of_structs! {
+ /// The visibility level of an item: inherited or `pub` or
+ /// `pub(restricted)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -184,7 +202,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A public visibility level: `pub`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisPublic {
+ pub pub_token: Token![pub],
+@@ -194,7 +212,7 @@ ast_struct! {
+ ast_struct! {
+ /// A crate-level visibility: `crate`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisCrate {
+ pub crate_token: Token![crate],
+@@ -205,7 +223,7 @@ ast_struct! {
+ /// A visibility level restricted to some path: `pub(self)` or
+ /// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisRestricted {
+ pub pub_token: Token![pub],
+@@ -220,12 +238,15 @@ pub mod parsing {
+ use super::*;
+
+ use crate::ext::IdentExt;
++ use crate::parse::discouraged::Speculative;
+ use crate::parse::{Parse, ParseStream, Result};
+
+ impl Parse for Variant {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
++ let _visibility: Visibility = input.parse()?;
+ Ok(Variant {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs,
+ ident: input.parse()?,
+ fields: {
+ if input.peek(token::Brace) {
+@@ -295,6 +316,17 @@ pub mod parsing {
+
+ impl Parse for Visibility {
+ fn parse(input: ParseStream) -> Result<Self> {
++ // Recognize an empty None-delimited group, as produced by a $:vis
++ // matcher that matched no tokens.
++ if input.peek(token::Group) {
++ let ahead = input.fork();
++ let group = crate::group::parse_group(&ahead)?;
++ if group.content.is_empty() {
++ input.advance_to(&ahead);
++ return Ok(Visibility::Inherited);
++ }
++ }
++
+ if input.peek(Token![pub]) {
+ Self::parse_pub(input)
+ } else if input.peek(Token![crate]) {
+@@ -310,27 +342,39 @@ pub mod parsing {
+ let pub_token = input.parse::<Token![pub]>()?;
+
+ if input.peek(token::Paren) {
+- // TODO: optimize using advance_to
+ let ahead = input.fork();
+- let mut content;
+- parenthesized!(content in ahead);
+
++ let content;
++ let paren_token = parenthesized!(content in ahead);
+ if content.peek(Token![crate])
+ || content.peek(Token![self])
+ || content.peek(Token![super])
+ {
+- return Ok(Visibility::Restricted(VisRestricted {
+- pub_token,
+- paren_token: parenthesized!(content in input),
+- in_token: None,
+- path: Box::new(Path::from(content.call(Ident::parse_any)?)),
+- }));
++ let path = content.call(Ident::parse_any)?;
++
++ // Ensure there are no additional tokens within `content`.
++ // Without explicitly checking, we may misinterpret a tuple
++ // field as a restricted visibility, causing a parse error.
++ // e.g. `pub (crate::A, crate::B)` (Issue #720).
++ if content.is_empty() {
++ input.advance_to(&ahead);
++ return Ok(Visibility::Restricted(VisRestricted {
++ pub_token,
++ paren_token,
++ in_token: None,
++ path: Box::new(Path::from(path)),
++ }));
++ }
+ } else if content.peek(Token![in]) {
++ let in_token: Token![in] = content.parse()?;
++ let path = content.call(Path::parse_mod_style)?;
++
++ input.advance_to(&ahead);
+ return Ok(Visibility::Restricted(VisRestricted {
+ pub_token,
+- paren_token: parenthesized!(content in input),
+- in_token: Some(content.parse()?),
+- path: Box::new(content.call(Path::parse_mod_style)?),
++ paren_token,
++ in_token: Some(in_token),
++ path: Box::new(path),
+ }));
+ }
+ }
+@@ -347,6 +391,14 @@ pub mod parsing {
+ }))
+ }
+ }
++
++ #[cfg(feature = "full")]
++ pub(crate) fn is_some(&self) -> bool {
++ match self {
++ Visibility::Inherited => false,
++ _ => true,
++ }
++ }
+ }
+ }
+
+diff --git a/third_party/rust/syn/src/derive.rs b/third_party/rust/syn/src/derive.rs
+index 8cb9cf7b6d..3fa9d89a93 100644
+--- third_party/rust/syn/src/derive.rs
++++ third_party/rust/syn/src/derive.rs
+@@ -4,7 +4,7 @@ use crate::punctuated::Punctuated;
+ ast_struct! {
+ /// Data structure sent to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` feature.*
++ /// *This type is available only if Syn is built with the `"derive"` feature.*
+ pub struct DeriveInput {
+ /// Attributes tagged on the whole struct or enum.
+ pub attrs: Vec<Attribute>,
+@@ -26,7 +26,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// The storage of a struct, enum or union data structure.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` feature.*
++ /// *This type is available only if Syn is built with the `"derive"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -53,7 +53,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A struct input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataStruct {
+ pub struct_token: Token![struct],
+@@ -65,7 +65,7 @@ ast_struct! {
+ ast_struct! {
+ /// An enum input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataEnum {
+ pub enum_token: Token![enum],
+@@ -77,7 +77,7 @@ ast_struct! {
+ ast_struct! {
+ /// An untagged union input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataUnion {
+ pub union_token: Token![union],
+diff --git a/third_party/rust/syn/src/discouraged.rs b/third_party/rust/syn/src/discouraged.rs
+index 4d9ff93728..76c9fce6f8 100644
+--- third_party/rust/syn/src/discouraged.rs
++++ third_party/rust/syn/src/discouraged.rs
+@@ -16,7 +16,7 @@ pub trait Speculative {
+ /// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem
+ /// is that when the fork fails to parse an `A`, it's impossible to tell
+ /// whether that was because of a syntax error and the user meant to provide
+- /// an `A`, or that the `A`s are finished and its time to start parsing
++ /// an `A`, or that the `A`s are finished and it's time to start parsing
+ /// `B`s. Use with care.
+ ///
+ /// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by
+@@ -72,7 +72,6 @@ pub trait Speculative {
+ /// || input.peek(Token![self])
+ /// || input.peek(Token![Self])
+ /// || input.peek(Token![crate])
+- /// || input.peek(Token![extern])
+ /// {
+ /// let ident = input.call(Ident::parse_any)?;
+ /// return Ok(PathSegment::from(ident));
+@@ -164,6 +163,30 @@ impl<'a> Speculative for ParseBuffer<'a> {
+ panic!("Fork was not derived from the advancing parse stream");
+ }
+
++ let (self_unexp, self_sp) = inner_unexpected(self);
++ let (fork_unexp, fork_sp) = inner_unexpected(fork);
++ if !Rc::ptr_eq(&self_unexp, &fork_unexp) {
++ match (fork_sp, self_sp) {
++ // Unexpected set on the fork, but not on `self`, copy it over.
++ (Some(span), None) => {
++ self_unexp.set(Unexpected::Some(span));
++ }
++ // Unexpected unset. Use chain to propagate errors from fork.
++ (None, None) => {
++ fork_unexp.set(Unexpected::Chain(self_unexp));
++
++ // Ensure toplevel 'unexpected' tokens from the fork don't
++ // bubble up the chain by replacing the root `unexpected`
++ // pointer, only 'unexpected' tokens from existing group
++ // parsers should bubble.
++ fork.unexpected
++ .set(Some(Rc::new(Cell::new(Unexpected::None))));
++ }
++ // Unexpected has been set on `self`. No changes needed.
++ (_, Some(_)) => {}
++ }
++ }
++
+ // See comment on `cell` in the struct definition.
+ self.cell
+ .set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) })
+diff --git a/third_party/rust/syn/src/error.rs b/third_party/rust/syn/src/error.rs
+index 146d652299..dba34f9254 100644
+--- third_party/rust/syn/src/error.rs
++++ third_party/rust/syn/src/error.rs
+@@ -1,4 +1,3 @@
+-use std;
+ use std::fmt::{self, Debug, Display};
+ use std::iter::FromIterator;
+ use std::slice;
+@@ -32,8 +31,8 @@ pub type Result<T> = std::result::Result<T, Error>;
+ /// conversion to `compile_error!` automatically.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, AttributeArgs, ItemFn};
+ ///
+@@ -82,7 +81,6 @@ pub type Result<T> = std::result::Result<T, Error>;
+ /// # }
+ /// # }
+ /// ```
+-#[derive(Clone)]
+ pub struct Error {
+ messages: Vec<ErrorMessage>,
+ }
+@@ -250,6 +248,17 @@ pub fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) -> Error {
+ }
+ }
+
++#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
++pub fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
++ Error {
++ messages: vec![ErrorMessage {
++ start_span: ThreadBound::new(start),
++ end_span: ThreadBound::new(end),
++ message: message.to_string(),
++ }],
++ }
++}
++
+ impl Debug for Error {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ if self.messages.len() == 1 {
+@@ -278,6 +287,14 @@ impl Display for Error {
+ }
+ }
+
++impl Clone for Error {
++ fn clone(&self) -> Self {
++ Error {
++ messages: self.messages.clone(),
++ }
++ }
++}
++
+ impl Clone for ErrorMessage {
+ fn clone(&self) -> Self {
+ let start = self
+@@ -355,3 +372,11 @@ impl<'a> Iterator for Iter<'a> {
+ })
+ }
+ }
++
++impl Extend<Error> for Error {
++ fn extend<T: IntoIterator<Item = Error>>(&mut self, iter: T) {
++ for err in iter {
++ self.combine(err);
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/expr.rs b/third_party/rust/syn/src/expr.rs
+index 2874a463aa..2fe0e0b5d8 100644
+--- third_party/rust/syn/src/expr.rs
++++ third_party/rust/syn/src/expr.rs
+@@ -1,18 +1,21 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
++#[cfg(feature = "full")]
++use crate::reserved::Reserved;
+ use proc_macro2::{Span, TokenStream};
+-#[cfg(feature = "extra-traits")]
++#[cfg(feature = "printing")]
++use quote::IdentFragment;
++#[cfg(feature = "printing")]
++use std::fmt::{self, Display};
+ use std::hash::{Hash, Hasher};
+-#[cfg(all(feature = "parsing", feature = "full"))]
++#[cfg(feature = "parsing")]
+ use std::mem;
+
+ ast_enum_of_structs! {
+ /// A Rust expression.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
++ /// feature, but most of the variants are not available unless "full" is enabled.*
+ ///
+ /// # Syntax tree enums
+ ///
+@@ -83,7 +86,7 @@ ast_enum_of_structs! {
+ /// A sign that you may not be choosing the right variable names is if you
+ /// see names getting repeated in your code, like accessing
+ /// `receiver.receiver` or `pat.pat` or `cond.cond`.
+- pub enum Expr #manual_extra_traits {
++ pub enum Expr {
+ /// A slice literal expression: `[a, b, c, d]`.
+ Array(ExprArray),
+
+@@ -228,7 +231,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A slice literal expression: `[a, b, c, d]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprArray #full {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+@@ -239,7 +242,7 @@ ast_struct! {
+ ast_struct! {
+ /// An assignment expression: `a = compute()`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAssign #full {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+@@ -251,7 +254,7 @@ ast_struct! {
+ ast_struct! {
+ /// A compound assignment expression: `counter += 1`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAssignOp #full {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+@@ -263,7 +266,7 @@ ast_struct! {
+ ast_struct! {
+ /// An async block: `async { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAsync #full {
+ pub attrs: Vec<Attribute>,
+ pub async_token: Token![async],
+@@ -275,7 +278,7 @@ ast_struct! {
+ ast_struct! {
+ /// An await expression: `fut.await`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAwait #full {
+ pub attrs: Vec<Attribute>,
+ pub base: Box<Expr>,
+@@ -287,7 +290,7 @@ ast_struct! {
+ ast_struct! {
+ /// A binary operation: `a + b`, `a * b`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprBinary {
+ pub attrs: Vec<Attribute>,
+@@ -300,7 +303,7 @@ ast_struct! {
+ ast_struct! {
+ /// A blocked scope: `{ ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBlock #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -311,7 +314,7 @@ ast_struct! {
+ ast_struct! {
+ /// A box expression: `box f`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBox #full {
+ pub attrs: Vec<Attribute>,
+ pub box_token: Token![box],
+@@ -323,7 +326,7 @@ ast_struct! {
+ /// A `break`, with an optional label to break and an optional
+ /// expression.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBreak #full {
+ pub attrs: Vec<Attribute>,
+ pub break_token: Token![break],
+@@ -335,7 +338,7 @@ ast_struct! {
+ ast_struct! {
+ /// A function call expression: `invoke(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprCall {
+ pub attrs: Vec<Attribute>,
+@@ -348,7 +351,7 @@ ast_struct! {
+ ast_struct! {
+ /// A cast expression: `foo as f64`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprCast {
+ pub attrs: Vec<Attribute>,
+@@ -361,7 +364,7 @@ ast_struct! {
+ ast_struct! {
+ /// A closure expression: `|a, b| a + b`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprClosure #full {
+ pub attrs: Vec<Attribute>,
+ pub asyncness: Option<Token![async]>,
+@@ -378,7 +381,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `continue`, with an optional label.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprContinue #full {
+ pub attrs: Vec<Attribute>,
+ pub continue_token: Token![continue],
+@@ -390,7 +393,7 @@ ast_struct! {
+ /// Access of a named struct field (`obj.k`) or unnamed tuple struct
+ /// field (`obj.0`).
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprField {
+ pub attrs: Vec<Attribute>,
+ pub base: Box<Expr>,
+@@ -402,7 +405,7 @@ ast_struct! {
+ ast_struct! {
+ /// A for loop: `for pat in expr { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprForLoop #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -421,7 +424,7 @@ ast_struct! {
+ /// of expressions and is related to `None`-delimited spans in a
+ /// `TokenStream`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprGroup #full {
+ pub attrs: Vec<Attribute>,
+ pub group_token: token::Group,
+@@ -436,7 +439,7 @@ ast_struct! {
+ /// The `else` branch expression may only be an `If` or `Block`
+ /// expression, not any of the other types of expression.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprIf #full {
+ pub attrs: Vec<Attribute>,
+ pub if_token: Token![if],
+@@ -449,7 +452,7 @@ ast_struct! {
+ ast_struct! {
+ /// A square bracketed indexing expression: `vector[2]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprIndex {
+ pub attrs: Vec<Attribute>,
+@@ -462,7 +465,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `let` guard: `let Some(x) = opt`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprLet #full {
+ pub attrs: Vec<Attribute>,
+ pub let_token: Token![let],
+@@ -475,7 +478,7 @@ ast_struct! {
+ ast_struct! {
+ /// A literal in place of an expression: `1`, `"foo"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprLit {
+ pub attrs: Vec<Attribute>,
+@@ -486,7 +489,7 @@ ast_struct! {
+ ast_struct! {
+ /// Conditionless loop: `loop { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprLoop #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -498,7 +501,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation expression: `format!("{}", q)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMacro #full {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -508,7 +511,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `match` expression: `match n { Some(n) => {}, None => {} }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMatch #full {
+ pub attrs: Vec<Attribute>,
+ pub match_token: Token![match],
+@@ -521,7 +524,7 @@ ast_struct! {
+ ast_struct! {
+ /// A method call expression: `x.foo::<T>(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMethodCall #full {
+ pub attrs: Vec<Attribute>,
+ pub receiver: Box<Expr>,
+@@ -536,7 +539,7 @@ ast_struct! {
+ ast_struct! {
+ /// A parenthesized expression: `(a + b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprParen {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+@@ -550,7 +553,7 @@ ast_struct! {
+ ///
+ /// A plain identifier like `x` is a path of length 1.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprPath {
+ pub attrs: Vec<Attribute>,
+@@ -562,7 +565,7 @@ ast_struct! {
+ ast_struct! {
+ /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprRange #full {
+ pub attrs: Vec<Attribute>,
+ pub from: Option<Box<Expr>>,
+@@ -574,7 +577,7 @@ ast_struct! {
+ ast_struct! {
+ /// A referencing operation: `&a` or `&mut a`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprReference #full {
+ pub attrs: Vec<Attribute>,
+ pub and_token: Token![&],
+@@ -587,7 +590,7 @@ ast_struct! {
+ ast_struct! {
+ /// An array literal constructed from one repeated element: `[0u8; N]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprRepeat #full {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+@@ -600,7 +603,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `return`, with an optional value to be returned.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprReturn #full {
+ pub attrs: Vec<Attribute>,
+ pub return_token: Token![return],
+@@ -614,7 +617,7 @@ ast_struct! {
+ /// The `rest` provides the value of the remaining fields as in `S { a:
+ /// 1, b: 1, ..rest }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprStruct #full {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+@@ -628,7 +631,7 @@ ast_struct! {
+ ast_struct! {
+ /// A try-expression: `expr?`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTry #full {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+@@ -639,7 +642,7 @@ ast_struct! {
+ ast_struct! {
+ /// A try block: `try { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTryBlock #full {
+ pub attrs: Vec<Attribute>,
+ pub try_token: Token![try],
+@@ -650,7 +653,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple expression: `(a, b, c, d)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTuple #full {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+@@ -661,7 +664,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type ascription expression: `foo: f64`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprType #full {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+@@ -673,7 +676,7 @@ ast_struct! {
+ ast_struct! {
+ /// A unary operation: `!x`, `*x`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprUnary {
+ pub attrs: Vec<Attribute>,
+@@ -685,7 +688,7 @@ ast_struct! {
+ ast_struct! {
+ /// An unsafe block: `unsafe { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprUnsafe #full {
+ pub attrs: Vec<Attribute>,
+ pub unsafe_token: Token![unsafe],
+@@ -696,7 +699,7 @@ ast_struct! {
+ ast_struct! {
+ /// A while loop: `while expr { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprWhile #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -709,7 +712,7 @@ ast_struct! {
+ ast_struct! {
+ /// A yield expression: `yield expr`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprYield #full {
+ pub attrs: Vec<Attribute>,
+ pub yield_token: Token![yield],
+@@ -717,232 +720,6 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Expr {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Expr {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Expr::Array(this), Expr::Array(other)) => this == other,
+- (Expr::Assign(this), Expr::Assign(other)) => this == other,
+- (Expr::AssignOp(this), Expr::AssignOp(other)) => this == other,
+- (Expr::Async(this), Expr::Async(other)) => this == other,
+- (Expr::Await(this), Expr::Await(other)) => this == other,
+- (Expr::Binary(this), Expr::Binary(other)) => this == other,
+- (Expr::Block(this), Expr::Block(other)) => this == other,
+- (Expr::Box(this), Expr::Box(other)) => this == other,
+- (Expr::Break(this), Expr::Break(other)) => this == other,
+- (Expr::Call(this), Expr::Call(other)) => this == other,
+- (Expr::Cast(this), Expr::Cast(other)) => this == other,
+- (Expr::Closure(this), Expr::Closure(other)) => this == other,
+- (Expr::Continue(this), Expr::Continue(other)) => this == other,
+- (Expr::Field(this), Expr::Field(other)) => this == other,
+- (Expr::ForLoop(this), Expr::ForLoop(other)) => this == other,
+- (Expr::Group(this), Expr::Group(other)) => this == other,
+- (Expr::If(this), Expr::If(other)) => this == other,
+- (Expr::Index(this), Expr::Index(other)) => this == other,
+- (Expr::Let(this), Expr::Let(other)) => this == other,
+- (Expr::Lit(this), Expr::Lit(other)) => this == other,
+- (Expr::Loop(this), Expr::Loop(other)) => this == other,
+- (Expr::Macro(this), Expr::Macro(other)) => this == other,
+- (Expr::Match(this), Expr::Match(other)) => this == other,
+- (Expr::MethodCall(this), Expr::MethodCall(other)) => this == other,
+- (Expr::Paren(this), Expr::Paren(other)) => this == other,
+- (Expr::Path(this), Expr::Path(other)) => this == other,
+- (Expr::Range(this), Expr::Range(other)) => this == other,
+- (Expr::Reference(this), Expr::Reference(other)) => this == other,
+- (Expr::Repeat(this), Expr::Repeat(other)) => this == other,
+- (Expr::Return(this), Expr::Return(other)) => this == other,
+- (Expr::Struct(this), Expr::Struct(other)) => this == other,
+- (Expr::Try(this), Expr::Try(other)) => this == other,
+- (Expr::TryBlock(this), Expr::TryBlock(other)) => this == other,
+- (Expr::Tuple(this), Expr::Tuple(other)) => this == other,
+- (Expr::Type(this), Expr::Type(other)) => this == other,
+- (Expr::Unary(this), Expr::Unary(other)) => this == other,
+- (Expr::Unsafe(this), Expr::Unsafe(other)) => this == other,
+- (Expr::Verbatim(this), Expr::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- (Expr::While(this), Expr::While(other)) => this == other,
+- (Expr::Yield(this), Expr::Yield(other)) => this == other,
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Expr {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Expr::Array(expr) => {
+- hash.write_u8(0);
+- expr.hash(hash);
+- }
+- Expr::Assign(expr) => {
+- hash.write_u8(1);
+- expr.hash(hash);
+- }
+- Expr::AssignOp(expr) => {
+- hash.write_u8(2);
+- expr.hash(hash);
+- }
+- Expr::Async(expr) => {
+- hash.write_u8(3);
+- expr.hash(hash);
+- }
+- Expr::Await(expr) => {
+- hash.write_u8(4);
+- expr.hash(hash);
+- }
+- Expr::Binary(expr) => {
+- hash.write_u8(5);
+- expr.hash(hash);
+- }
+- Expr::Block(expr) => {
+- hash.write_u8(6);
+- expr.hash(hash);
+- }
+- Expr::Box(expr) => {
+- hash.write_u8(7);
+- expr.hash(hash);
+- }
+- Expr::Break(expr) => {
+- hash.write_u8(8);
+- expr.hash(hash);
+- }
+- Expr::Call(expr) => {
+- hash.write_u8(9);
+- expr.hash(hash);
+- }
+- Expr::Cast(expr) => {
+- hash.write_u8(10);
+- expr.hash(hash);
+- }
+- Expr::Closure(expr) => {
+- hash.write_u8(11);
+- expr.hash(hash);
+- }
+- Expr::Continue(expr) => {
+- hash.write_u8(12);
+- expr.hash(hash);
+- }
+- Expr::Field(expr) => {
+- hash.write_u8(13);
+- expr.hash(hash);
+- }
+- Expr::ForLoop(expr) => {
+- hash.write_u8(14);
+- expr.hash(hash);
+- }
+- Expr::Group(expr) => {
+- hash.write_u8(15);
+- expr.hash(hash);
+- }
+- Expr::If(expr) => {
+- hash.write_u8(16);
+- expr.hash(hash);
+- }
+- Expr::Index(expr) => {
+- hash.write_u8(17);
+- expr.hash(hash);
+- }
+- Expr::Let(expr) => {
+- hash.write_u8(18);
+- expr.hash(hash);
+- }
+- Expr::Lit(expr) => {
+- hash.write_u8(19);
+- expr.hash(hash);
+- }
+- Expr::Loop(expr) => {
+- hash.write_u8(20);
+- expr.hash(hash);
+- }
+- Expr::Macro(expr) => {
+- hash.write_u8(21);
+- expr.hash(hash);
+- }
+- Expr::Match(expr) => {
+- hash.write_u8(22);
+- expr.hash(hash);
+- }
+- Expr::MethodCall(expr) => {
+- hash.write_u8(23);
+- expr.hash(hash);
+- }
+- Expr::Paren(expr) => {
+- hash.write_u8(24);
+- expr.hash(hash);
+- }
+- Expr::Path(expr) => {
+- hash.write_u8(25);
+- expr.hash(hash);
+- }
+- Expr::Range(expr) => {
+- hash.write_u8(26);
+- expr.hash(hash);
+- }
+- Expr::Reference(expr) => {
+- hash.write_u8(27);
+- expr.hash(hash);
+- }
+- Expr::Repeat(expr) => {
+- hash.write_u8(28);
+- expr.hash(hash);
+- }
+- Expr::Return(expr) => {
+- hash.write_u8(29);
+- expr.hash(hash);
+- }
+- Expr::Struct(expr) => {
+- hash.write_u8(30);
+- expr.hash(hash);
+- }
+- Expr::Try(expr) => {
+- hash.write_u8(31);
+- expr.hash(hash);
+- }
+- Expr::TryBlock(expr) => {
+- hash.write_u8(32);
+- expr.hash(hash);
+- }
+- Expr::Tuple(expr) => {
+- hash.write_u8(33);
+- expr.hash(hash);
+- }
+- Expr::Type(expr) => {
+- hash.write_u8(34);
+- expr.hash(hash);
+- }
+- Expr::Unary(expr) => {
+- hash.write_u8(35);
+- expr.hash(hash);
+- }
+- Expr::Unsafe(expr) => {
+- hash.write_u8(36);
+- expr.hash(hash);
+- }
+- Expr::Verbatim(expr) => {
+- hash.write_u8(37);
+- TokenStreamHelper(expr).hash(hash);
+- }
+- Expr::While(expr) => {
+- hash.write_u8(38);
+- expr.hash(hash);
+- }
+- Expr::Yield(expr) => {
+- hash.write_u8(39);
+- expr.hash(hash);
+- }
+- Expr::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ impl Expr {
+ #[cfg(all(feature = "parsing", feature = "full"))]
+ pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
+@@ -996,7 +773,7 @@ ast_enum! {
+ /// A struct or tuple struct field accessed in a struct literal or field
+ /// expression.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum Member {
+ /// A named field like `self.x`.
+@@ -1006,12 +783,50 @@ ast_enum! {
+ }
+ }
+
++impl Eq for Member {}
++
++impl PartialEq for Member {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Member::Named(this), Member::Named(other)) => this == other,
++ (Member::Unnamed(this), Member::Unnamed(other)) => this == other,
++ _ => false,
++ }
++ }
++}
++
++impl Hash for Member {
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ match self {
++ Member::Named(m) => m.hash(state),
++ Member::Unnamed(m) => m.hash(state),
++ }
++ }
++}
++
++#[cfg(feature = "printing")]
++impl IdentFragment for Member {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Member::Named(m) => Display::fmt(m, formatter),
++ Member::Unnamed(m) => Display::fmt(&m.index, formatter),
++ }
++ }
++
++ fn span(&self) -> Option<Span> {
++ match self {
++ Member::Named(m) => Some(m.span()),
++ Member::Unnamed(m) => Some(m.span),
++ }
++ }
++}
++
+ ast_struct! {
+ /// The index of an unnamed tuple struct field.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- pub struct Index #manual_extra_traits {
++ pub struct Index {
+ pub index: u32,
+ pub span: Span,
+ }
+@@ -1027,28 +842,28 @@ impl From<usize> for Index {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+ impl Eq for Index {}
+
+-#[cfg(feature = "extra-traits")]
+ impl PartialEq for Index {
+ fn eq(&self, other: &Self) -> bool {
+ self.index == other.index
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+ impl Hash for Index {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.index.hash(state);
+ }
+ }
+
+-#[cfg(feature = "full")]
+-ast_struct! {
+- #[derive(Default)]
+- pub struct Reserved {
+- private: (),
++#[cfg(feature = "printing")]
++impl IdentFragment for Index {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ Display::fmt(&self.index, formatter)
++ }
++
++ fn span(&self) -> Option<Span> {
++ Some(self.span)
+ }
+ }
+
+@@ -1057,7 +872,7 @@ ast_struct! {
+ /// The `::<>` explicit type parameters passed to a method call:
+ /// `parse::<u64>()`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct MethodTurbofish {
+ pub colon2_token: Token![::],
+ pub lt_token: Token![<],
+@@ -1070,7 +885,7 @@ ast_struct! {
+ ast_enum! {
+ /// An individual generic argument to a method, like `T`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum GenericMethodArgument {
+ /// A type argument.
+ Type(Type),
+@@ -1086,7 +901,7 @@ ast_enum! {
+ ast_struct! {
+ /// A field-value pair in a struct literal.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct FieldValue {
+ /// Attributes tagged on the field.
+ pub attrs: Vec<Attribute>,
+@@ -1107,7 +922,7 @@ ast_struct! {
+ ast_struct! {
+ /// A lifetime labeling a `for`, `while`, or `loop`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Label {
+ pub name: Lifetime,
+ pub colon_token: Token![:],
+@@ -1134,7 +949,7 @@ ast_struct! {
+ /// # }
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Arm {
+ pub attrs: Vec<Attribute>,
+ pub pat: Pat,
+@@ -1149,8 +964,7 @@ ast_struct! {
+ ast_enum! {
+ /// Limit types of a range, inclusive or exclusive.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum RangeLimits {
+ /// Inclusive at the beginning, exclusive at the end.
+ HalfOpen(Token![..]),
+@@ -1162,7 +976,7 @@ ast_enum! {
+ #[cfg(any(feature = "parsing", feature = "printing"))]
+ #[cfg(feature = "full")]
+ pub(crate) fn requires_terminator(expr: &Expr) -> bool {
+- // see https://github.com/rust-lang/rust/blob/eb8f2586e/src/libsyntax/parse/classify.rs#L17-L37
++ // see https://github.com/rust-lang/rust/blob/2679c38fc/src/librustc_ast/util/classify.rs#L7-L25
+ match *expr {
+ Expr::Unsafe(..)
+ | Expr::Block(..)
+@@ -1183,16 +997,17 @@ pub(crate) mod parsing {
+
+ use crate::parse::{Parse, ParseStream, Result};
+ use crate::path;
++ use std::cmp::Ordering;
++
++ crate::custom_keyword!(raw);
+
+ // When we're parsing expressions which occur before blocks, like in an if
+ // statement's condition, we cannot parse a struct literal.
+ //
+ // Struct literals are ambiguous in certain positions
+ // https://github.com/rust-lang/rfcs/pull/92
+- #[derive(Copy, Clone)]
+ pub struct AllowStruct(bool);
+
+- #[derive(Copy, Clone, PartialEq, PartialOrd)]
+ enum Precedence {
+ Any,
+ Assign,
+@@ -1246,9 +1061,121 @@ pub(crate) mod parsing {
+ }
+ }
+
+- #[cfg(feature = "full")]
+- fn expr_no_struct(input: ParseStream) -> Result<Expr> {
+- ambiguous_expr(input, AllowStruct(false))
++ impl Expr {
++ /// An alternative to the primary `Expr::parse` parser (from the
++ /// [`Parse`] trait) for ambiguous syntactic positions in which a
++ /// trailing brace should not be taken as part of the expression.
++ ///
++ /// Rust grammar has an ambiguity where braces sometimes turn a path
++ /// expression into a struct initialization and sometimes do not. In the
++ /// following code, the expression `S {}` is one expression. Presumably
++ /// there is an empty struct `struct S {}` defined somewhere which it is
++ /// instantiating.
++ ///
++ /// ```
++ /// # struct S;
++ /// # impl std::ops::Deref for S {
++ /// # type Target = bool;
++ /// # fn deref(&self) -> &Self::Target {
++ /// # &true
++ /// # }
++ /// # }
++ /// let _ = *S {};
++ ///
++ /// // parsed by rustc as: `*(S {})`
++ /// ```
++ ///
++ /// We would want to parse the above using `Expr::parse` after the `=`
++ /// token.
++ ///
++ /// But in the following, `S {}` is *not* a struct init expression.
++ ///
++ /// ```
++ /// # const S: &bool = &true;
++ /// if *S {} {}
++ ///
++ /// // parsed by rustc as:
++ /// //
++ /// // if (*S) {
++ /// // /* empty block */
++ /// // }
++ /// // {
++ /// // /* another empty block */
++ /// // }
++ /// ```
++ ///
++ /// For that reason we would want to parse if-conditions using
++ /// `Expr::parse_without_eager_brace` after the `if` token. Same for
++ /// similar syntactic positions such as the condition expr after a
++ /// `while` token or the expr at the top of a `match`.
++ ///
++ /// The Rust grammar's choices around which way this ambiguity is
++ /// resolved at various syntactic positions is fairly arbitrary. Really
++ /// either parse behavior could work in most positions, and language
++ /// designers just decide each case based on which is more likely to be
++ /// what the programmer had in mind most of the time.
++ ///
++ /// ```
++ /// # struct S;
++ /// # fn doc() -> S {
++ /// if return S {} {}
++ /// # unreachable!()
++ /// # }
++ ///
++ /// // parsed by rustc as:
++ /// //
++ /// // if (return (S {})) {
++ /// // }
++ /// //
++ /// // but could equally well have been this other arbitrary choice:
++ /// //
++ /// // if (return S) {
++ /// // }
++ /// // {}
++ /// ```
++ ///
++ /// Note the grammar ambiguity on trailing braces is distinct from
++ /// precedence and is not captured by assigning a precedence level to
++ /// the braced struct init expr in relation to other operators. This can
++ /// be illustrated by `return 0..S {}` vs `match 0..S {}`. The former
++ /// parses as `return (0..(S {}))` implying tighter precedence for
++ /// struct init than `..`, while the latter parses as `match (0..S) {}`
++ /// implying tighter precedence for `..` than struct init, a
++ /// contradiction.
++ #[cfg(feature = "full")]
++ pub fn parse_without_eager_brace(input: ParseStream) -> Result<Expr> {
++ ambiguous_expr(input, AllowStruct(false))
++ }
++ }
++
++ impl Copy for AllowStruct {}
++
++ impl Clone for AllowStruct {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
++ impl Copy for Precedence {}
++
++ impl Clone for Precedence {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
++ impl PartialEq for Precedence {
++ fn eq(&self, other: &Self) -> bool {
++ *self as u8 == *other as u8
++ }
++ }
++
++ impl PartialOrd for Precedence {
++ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
++ let this = *self as u8;
++ let other = *other as u8;
++ Some(this.cmp(&other))
++ }
+ }
+
+ #[cfg(feature = "full")]
+@@ -1430,56 +1357,84 @@ pub(crate) mod parsing {
+ parse_expr(input, lhs, allow_struct, Precedence::Any)
+ }
+
++ #[cfg(feature = "full")]
++ fn expr_attrs(input: ParseStream) -> Result<Vec<Attribute>> {
++ let mut attrs = Vec::new();
++ loop {
++ if input.peek(token::Group) {
++ let ahead = input.fork();
++ let group = crate::group::parse_group(&ahead)?;
++ if !group.content.peek(Token![#]) || group.content.peek2(Token![!]) {
++ break;
++ }
++ let attr = group.content.call(attr::parsing::single_parse_outer)?;
++ if !group.content.is_empty() {
++ break;
++ }
++ attrs.push(attr);
++ } else if input.peek(Token![#]) {
++ attrs.push(input.call(attr::parsing::single_parse_outer)?);
++ } else {
++ break;
++ }
++ }
++ Ok(attrs)
++ }
++
+ // <UnOp> <trailer>
+ // & <trailer>
+ // &mut <trailer>
+ // box <trailer>
+ #[cfg(feature = "full")]
+ fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
+- if ahead.peek(Token![&])
+- || ahead.peek(Token![box])
+- || ahead.peek(Token![*])
+- || ahead.peek(Token![!])
+- || ahead.peek(Token![-])
+- {
+- let attrs = input.call(Attribute::parse_outer)?;
+- if input.peek(Token![&]) {
++ let begin = input.fork();
++ let attrs = input.call(expr_attrs)?;
++ if input.peek(Token![&]) {
++ let and_token: Token![&] = input.parse()?;
++ let raw: Option<raw> =
++ if input.peek(raw) && (input.peek2(Token![mut]) || input.peek2(Token![const])) {
++ Some(input.parse()?)
++ } else {
++ None
++ };
++ let mutability: Option<Token![mut]> = input.parse()?;
++ if raw.is_some() && mutability.is_none() {
++ input.parse::<Token![const]>()?;
++ }
++ let expr = Box::new(unary_expr(input, allow_struct)?);
++ if raw.is_some() {
++ Ok(Expr::Verbatim(verbatim::between(begin, input)))
++ } else {
+ Ok(Expr::Reference(ExprReference {
+ attrs,
+- and_token: input.parse()?,
++ and_token,
+ raw: Reserved::default(),
+- mutability: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
+- }))
+- } else if input.peek(Token![box]) {
+- Ok(Expr::Box(ExprBox {
+- attrs,
+- box_token: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
+- }))
+- } else {
+- Ok(Expr::Unary(ExprUnary {
+- attrs,
+- op: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
++ mutability,
++ expr,
+ }))
+ }
++ } else if input.peek(Token![box]) {
++ Ok(Expr::Box(ExprBox {
++ attrs,
++ box_token: input.parse()?,
++ expr: Box::new(unary_expr(input, allow_struct)?),
++ }))
++ } else if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
++ Ok(Expr::Unary(ExprUnary {
++ attrs,
++ op: input.parse()?,
++ expr: Box::new(unary_expr(input, allow_struct)?),
++ }))
+ } else {
+- trailer_expr(input, allow_struct)
++ trailer_expr(attrs, input, allow_struct)
+ }
+ }
+
+ #[cfg(not(feature = "full"))]
+ fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
+- if ahead.peek(Token![*]) || ahead.peek(Token![!]) || ahead.peek(Token![-]) {
++ if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
+ Ok(Expr::Unary(ExprUnary {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs: Vec::new(),
+ op: input.parse()?,
+ expr: Box::new(unary_expr(input, allow_struct)?),
+ }))
+@@ -1495,13 +1450,11 @@ pub(crate) mod parsing {
+ // <atom> [ <expr> ] ...
+ // <atom> ? ...
+ #[cfg(feature = "full")]
+- fn trailer_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- if input.peek(token::Group) {
+- return input.call(expr_group).map(Expr::Group);
+- }
+-
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+-
++ fn trailer_expr(
++ outer_attrs: Vec<Attribute>,
++ input: ParseStream,
++ allow_struct: AllowStruct,
++ ) -> Result<Expr> {
+ let atom = atom_expr(input, allow_struct)?;
+ let mut e = trailer_helper(input, atom)?;
+
+@@ -1523,18 +1476,26 @@ pub(crate) mod parsing {
+ args: content.parse_terminated(Expr::parse)?,
+ });
+ } else if input.peek(Token![.]) && !input.peek(Token![..]) {
+- let dot_token: Token![.] = input.parse()?;
++ let mut dot_token: Token![.] = input.parse()?;
+
+- if input.peek(token::Await) {
++ let await_token: Option<token::Await> = input.parse()?;
++ if let Some(await_token) = await_token {
+ e = Expr::Await(ExprAwait {
+ attrs: Vec::new(),
+ base: Box::new(e),
+ dot_token,
+- await_token: input.parse()?,
++ await_token,
+ });
+ continue;
+ }
+
++ let float_token: Option<LitFloat> = input.parse()?;
++ if let Some(float_token) = float_token {
++ if multi_index(&mut e, &mut dot_token, float_token)? {
++ continue;
++ }
++ }
++
+ let member: Member = input.parse()?;
+ let turbofish = if member.is_named() && input.peek(Token![::]) {
+ Some(MethodTurbofish {
+@@ -1620,10 +1581,17 @@ pub(crate) mod parsing {
+ });
+ } else if input.peek(Token![.]) && !input.peek(Token![..]) && !input.peek2(token::Await)
+ {
++ let mut dot_token: Token![.] = input.parse()?;
++ let float_token: Option<LitFloat> = input.parse()?;
++ if let Some(float_token) = float_token {
++ if multi_index(&mut e, &mut dot_token, float_token)? {
++ continue;
++ }
++ }
+ e = Expr::Field(ExprField {
+ attrs: Vec::new(),
+ base: Box::new(e),
+- dot_token: input.parse()?,
++ dot_token,
+ member: input.parse()?,
+ });
+ } else if input.peek(token::Bracket) {
+@@ -1646,7 +1614,11 @@ pub(crate) mod parsing {
+ // interactions, as they are fully contained.
+ #[cfg(feature = "full")]
+ fn atom_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- if input.peek(token::Group) {
++ if input.peek(token::Group)
++ && !input.peek2(Token![::])
++ && !input.peek2(Token![!])
++ && !input.peek2(token::Brace)
++ {
+ input.call(expr_group).map(Expr::Group)
+ } else if input.peek(Lit) {
+ input.parse().map(Expr::Lit)
+@@ -1668,7 +1640,6 @@ pub(crate) mod parsing {
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ path_or_macro_or_struct(input, allow_struct)
+@@ -1740,7 +1711,6 @@ pub(crate) mod parsing {
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ input.parse().map(Expr::Path)
+@@ -1878,7 +1848,7 @@ pub(crate) mod parsing {
+
+ #[cfg(feature = "full")]
+ pub(crate) fn expr_early(input: ParseStream) -> Result<Expr> {
+- let mut attrs = input.call(Attribute::parse_outer)?;
++ let mut attrs = input.call(expr_attrs)?;
+ let mut expr = if input.peek(Token![if]) {
+ Expr::If(input.parse()?)
+ } else if input.peek(Token![while]) {
+@@ -1905,7 +1875,7 @@ pub(crate) mod parsing {
+ return parse_expr(input, expr, allow_struct, Precedence::Any);
+ };
+
+- if input.peek(Token![.]) || input.peek(Token![?]) {
++ if input.peek(Token![.]) && !input.peek(Token![..]) || input.peek(Token![?]) {
+ expr = trailer_helper(input, expr)?;
+
+ attrs.extend(expr.replace_attrs(Vec::new()));
+@@ -1951,7 +1921,16 @@ pub(crate) mod parsing {
+
+ #[cfg(feature = "full")]
+ fn generic_method_argument(input: ParseStream) -> Result<GenericMethodArgument> {
+- // TODO parse const generics as well
++ if input.peek(Lit) {
++ let lit = input.parse()?;
++ return Ok(GenericMethodArgument::Const(Expr::Lit(lit)));
++ }
++
++ if input.peek(token::Brace) {
++ let block = input.call(expr::parsing::expr_block)?;
++ return Ok(GenericMethodArgument::Const(Expr::Block(block)));
++ }
++
+ input.parse().map(GenericMethodArgument::Type)
+ }
+
+@@ -1960,44 +1939,20 @@ pub(crate) mod parsing {
+ Ok(ExprLet {
+ attrs: Vec::new(),
+ let_token: input.parse()?,
+- pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let pat: Pat = input.parse()?;
+- if leading_vert.is_some()
+- || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
+- {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|])
+- && !input.peek(Token![||])
+- && !input.peek(Token![|=])
+- {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- })
+- } else {
+- pat
+- }
+- },
++ pat: pat::parsing::multi_pat_with_leading_vert(input)?,
+ eq_token: input.parse()?,
+- expr: Box::new(input.call(expr_no_struct)?),
++ expr: Box::new(input.call(Expr::parse_without_eager_brace)?),
+ })
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for ExprIf {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ Ok(ExprIf {
+- attrs: Vec::new(),
++ attrs,
+ if_token: input.parse()?,
+- cond: Box::new(input.call(expr_no_struct)?),
++ cond: Box::new(input.call(Expr::parse_without_eager_brace)?),
+ then_branch: input.parse()?,
+ else_branch: {
+ if input.peek(Token![else]) {
+@@ -2033,29 +1988,14 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprForLoop {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let for_token: Token![for] = input.parse()?;
+
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let mut pat: Pat = input.parse()?;
+- if leading_vert.is_some() || input.peek(Token![|]) {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|]) {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- pat = Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- });
+- }
++ let pat = pat::parsing::multi_pat_with_leading_vert(input)?;
+
+ let in_token: Token![in] = input.parse()?;
+- let expr: Expr = input.call(expr_no_struct)?;
++ let expr: Expr = input.call(Expr::parse_without_eager_brace)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+@@ -2063,7 +2003,7 @@ pub(crate) mod parsing {
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprForLoop {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ for_token,
+ pat,
+@@ -2077,6 +2017,7 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprLoop {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let loop_token: Token![loop] = input.parse()?;
+
+@@ -2086,7 +2027,7 @@ pub(crate) mod parsing {
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprLoop {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ loop_token,
+ body: Block { brace_token, stmts },
+@@ -2097,8 +2038,9 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprMatch {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let match_token: Token![match] = input.parse()?;
+- let expr = expr_no_struct(input)?;
++ let expr = Expr::parse_without_eager_brace(input)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+@@ -2110,7 +2052,7 @@ pub(crate) mod parsing {
+ }
+
+ Ok(ExprMatch {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ match_token,
+ expr: Box::new(expr),
+ brace_token,
+@@ -2305,9 +2247,10 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprWhile {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let while_token: Token![while] = input.parse()?;
+- let cond = expr_no_struct(input)?;
++ let cond = Expr::parse_without_eager_brace(input)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+@@ -2315,7 +2258,7 @@ pub(crate) mod parsing {
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprWhile {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ while_token,
+ cond: Box::new(cond),
+@@ -2399,6 +2342,7 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for FieldValue {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ let member: Member = input.parse()?;
+ let (colon_token, value) = if input.peek(Token![:]) || !member.is_named() {
+ let colon_token: Token![:] = input.parse()?;
+@@ -2416,7 +2360,7 @@ pub(crate) mod parsing {
+ };
+
+ Ok(FieldValue {
+- attrs: Vec::new(),
++ attrs,
+ member,
+ colon_token,
+ expr: value,
+@@ -2433,46 +2377,36 @@ pub(crate) mod parsing {
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
++ let attrs = private::attrs(outer_attrs, inner_attrs);
+
+ let mut fields = Punctuated::new();
+- loop {
+- let attrs = content.call(Attribute::parse_outer)?;
+- // TODO: optimize using advance_to
+- if content.fork().parse::<Member>().is_err() {
+- if attrs.is_empty() {
+- break;
+- } else {
+- return Err(content.error("expected struct field"));
+- }
++ while !content.is_empty() {
++ if content.peek(Token![..]) {
++ return Ok(ExprStruct {
++ attrs,
++ brace_token,
++ path,
++ fields,
++ dot2_token: Some(content.parse()?),
++ rest: Some(Box::new(content.parse()?)),
++ });
+ }
+
+- fields.push(FieldValue {
+- attrs,
+- ..content.parse()?
+- });
+-
+- if !content.peek(Token![,]) {
++ fields.push(content.parse()?);
++ if content.is_empty() {
+ break;
+ }
+ let punct: Token![,] = content.parse()?;
+ fields.push_punct(punct);
+ }
+
+- let (dot2_token, rest) = if fields.empty_or_trailing() && content.peek(Token![..]) {
+- let dot2_token: Token![..] = content.parse()?;
+- let rest: Expr = content.parse()?;
+- (Some(dot2_token), Some(Box::new(rest)))
+- } else {
+- (None, None)
+- };
+-
+ Ok(ExprStruct {
+- attrs: private::attrs(outer_attrs, inner_attrs),
++ attrs,
+ brace_token,
+ path,
+ fields,
+- dot2_token,
+- rest,
++ dot2_token: None,
++ rest: None,
+ })
+ }
+
+@@ -2577,27 +2511,7 @@ pub(crate) mod parsing {
+ let requires_comma;
+ Ok(Arm {
+ attrs: input.call(Attribute::parse_outer)?,
+- pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let pat: Pat = input.parse()?;
+- if leading_vert.is_some() || input.peek(Token![|]) {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|]) {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- })
+- } else {
+- pat
+- }
+- },
++ pat: pat::parsing::multi_pat_with_leading_vert(input)?,
+ guard: {
+ if input.peek(Token![if]) {
+ let if_token: Token![if] = input.parse()?;
+@@ -2641,6 +2555,26 @@ pub(crate) mod parsing {
+ }
+ }
+
++ fn multi_index(e: &mut Expr, dot_token: &mut Token![.], float: LitFloat) -> Result<bool> {
++ let mut float_repr = float.to_string();
++ let trailing_dot = float_repr.ends_with('.');
++ if trailing_dot {
++ float_repr.truncate(float_repr.len() - 1);
++ }
++ for part in float_repr.split('.') {
++ let index = crate::parse_str(part).map_err(|err| Error::new(float.span(), err))?;
++ let base = mem::replace(e, Expr::__Nonexhaustive);
++ *e = Expr::Field(ExprField {
++ attrs: Vec::new(),
++ base: Box::new(base),
++ dot_token: Token![.](dot_token.span),
++ member: Member::Unnamed(index),
++ });
++ *dot_token = Token![.](float.span());
++ }
++ Ok(!trailing_dot)
++ }
++
+ #[cfg(feature = "full")]
+ impl Member {
+ fn is_named(&self) -> bool {
+diff --git a/third_party/rust/syn/src/ext.rs b/third_party/rust/syn/src/ext.rs
+index d09577a27a..4f9bc145d9 100644
+--- third_party/rust/syn/src/ext.rs
++++ third_party/rust/syn/src/ext.rs
+@@ -1,6 +1,6 @@
+ //! Extension traits to provide parsing methods on foreign types.
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ use proc_macro2::Ident;
+
+@@ -16,7 +16,7 @@ use crate::token::CustomToken;
+ /// This trait is sealed and cannot be implemented for types outside of Syn. It
+ /// is implemented only for `proc_macro2::Ident`.
+ ///
+-/// *This trait is available if Syn is built with the `"parsing"` feature.*
++/// *This trait is available only if Syn is built with the `"parsing"` feature.*
+ pub trait IdentExt: Sized + private::Sealed {
+ /// Parses any identifier including keywords.
+ ///
+@@ -129,7 +129,13 @@ mod private {
+
+ impl Sealed for Ident {}
+
+- #[derive(Copy, Clone)]
+ pub struct PeekFn;
+ pub struct IdentAny;
++
++ impl Copy for PeekFn {}
++ impl Clone for PeekFn {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
+ }
+diff --git a/third_party/rust/syn/src/file.rs b/third_party/rust/syn/src/file.rs
+index 88c02fe832..c8fab63cd9 100644
+--- third_party/rust/syn/src/file.rs
++++ third_party/rust/syn/src/file.rs
+@@ -3,7 +3,7 @@ use super::*;
+ ast_struct! {
+ /// A complete file of Rust source code.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Example
+ ///
+@@ -37,6 +37,8 @@ ast_struct! {
+ /// file.read_to_string(&mut src).expect("Unable to read file");
+ ///
+ /// let syntax = syn::parse_file(&src).expect("Unable to parse file");
++ ///
++ /// // Debug impl is available if Syn is built with "extra-traits" feature.
+ /// println!("{:#?}", syntax);
+ /// }
+ /// ```
+diff --git a/third_party/rust/syn/src/gen/clone.rs b/third_party/rust/syn/src/gen/clone.rs
+new file mode 100644
+index 0000000000..bea3887013
+--- /dev/null
++++ third_party/rust/syn/src/gen/clone.rs
+@@ -0,0 +1,2051 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#![allow(clippy::clone_on_copy, clippy::expl_impl_clone_on_copy)]
++use crate::*;
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Abi {
++ fn clone(&self) -> Self {
++ Abi {
++ extern_token: self.extern_token.clone(),
++ name: self.name.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for AngleBracketedGenericArguments {
++ fn clone(&self) -> Self {
++ AngleBracketedGenericArguments {
++ colon2_token: self.colon2_token.clone(),
++ lt_token: self.lt_token.clone(),
++ args: self.args.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Arm {
++ fn clone(&self) -> Self {
++ Arm {
++ attrs: self.attrs.clone(),
++ pat: self.pat.clone(),
++ guard: self.guard.clone(),
++ fat_arrow_token: self.fat_arrow_token.clone(),
++ body: self.body.clone(),
++ comma: self.comma.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for AttrStyle {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for AttrStyle {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Attribute {
++ fn clone(&self) -> Self {
++ Attribute {
++ pound_token: self.pound_token.clone(),
++ style: self.style.clone(),
++ bracket_token: self.bracket_token.clone(),
++ path: self.path.clone(),
++ tokens: self.tokens.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BareFnArg {
++ fn clone(&self) -> Self {
++ BareFnArg {
++ attrs: self.attrs.clone(),
++ name: self.name.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for BinOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BinOp {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Binding {
++ fn clone(&self) -> Self {
++ Binding {
++ ident: self.ident.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Block {
++ fn clone(&self) -> Self {
++ Block {
++ brace_token: self.brace_token.clone(),
++ stmts: self.stmts.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BoundLifetimes {
++ fn clone(&self) -> Self {
++ BoundLifetimes {
++ for_token: self.for_token.clone(),
++ lt_token: self.lt_token.clone(),
++ lifetimes: self.lifetimes.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ConstParam {
++ fn clone(&self) -> Self {
++ ConstParam {
++ attrs: self.attrs.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ default: self.default.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Constraint {
++ fn clone(&self) -> Self {
++ Constraint {
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for Data {
++ fn clone(&self) -> Self {
++ match self {
++ Data::Struct(v0) => Data::Struct(v0.clone()),
++ Data::Enum(v0) => Data::Enum(v0.clone()),
++ Data::Union(v0) => Data::Union(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataEnum {
++ fn clone(&self) -> Self {
++ DataEnum {
++ enum_token: self.enum_token.clone(),
++ brace_token: self.brace_token.clone(),
++ variants: self.variants.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataStruct {
++ fn clone(&self) -> Self {
++ DataStruct {
++ struct_token: self.struct_token.clone(),
++ fields: self.fields.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataUnion {
++ fn clone(&self) -> Self {
++ DataUnion {
++ union_token: self.union_token.clone(),
++ fields: self.fields.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DeriveInput {
++ fn clone(&self) -> Self {
++ DeriveInput {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ data: self.data.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Expr {
++ fn clone(&self) -> Self {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => Expr::Array(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => Expr::Assign(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => Expr::AssignOp(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => Expr::Async(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => Expr::Await(v0.clone()),
++ Expr::Binary(v0) => Expr::Binary(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => Expr::Block(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => Expr::Box(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => Expr::Break(v0.clone()),
++ Expr::Call(v0) => Expr::Call(v0.clone()),
++ Expr::Cast(v0) => Expr::Cast(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => Expr::Closure(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => Expr::Continue(v0.clone()),
++ Expr::Field(v0) => Expr::Field(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => Expr::ForLoop(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => Expr::Group(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::If(v0) => Expr::If(v0.clone()),
++ Expr::Index(v0) => Expr::Index(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => Expr::Let(v0.clone()),
++ Expr::Lit(v0) => Expr::Lit(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => Expr::Loop(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => Expr::Macro(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => Expr::Match(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => Expr::MethodCall(v0.clone()),
++ Expr::Paren(v0) => Expr::Paren(v0.clone()),
++ Expr::Path(v0) => Expr::Path(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => Expr::Range(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => Expr::Reference(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => Expr::Repeat(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => Expr::Return(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => Expr::Struct(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => Expr::Try(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => Expr::TryBlock(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => Expr::Tuple(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => Expr::Type(v0.clone()),
++ Expr::Unary(v0) => Expr::Unary(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => Expr::Unsafe(v0.clone()),
++ Expr::Verbatim(v0) => Expr::Verbatim(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::While(v0) => Expr::While(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => Expr::Yield(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprArray {
++ fn clone(&self) -> Self {
++ ExprArray {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAssign {
++ fn clone(&self) -> Self {
++ ExprAssign {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ eq_token: self.eq_token.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAssignOp {
++ fn clone(&self) -> Self {
++ ExprAssignOp {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ op: self.op.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAsync {
++ fn clone(&self) -> Self {
++ ExprAsync {
++ attrs: self.attrs.clone(),
++ async_token: self.async_token.clone(),
++ capture: self.capture.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAwait {
++ fn clone(&self) -> Self {
++ ExprAwait {
++ attrs: self.attrs.clone(),
++ base: self.base.clone(),
++ dot_token: self.dot_token.clone(),
++ await_token: self.await_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprBinary {
++ fn clone(&self) -> Self {
++ ExprBinary {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ op: self.op.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBlock {
++ fn clone(&self) -> Self {
++ ExprBlock {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBox {
++ fn clone(&self) -> Self {
++ ExprBox {
++ attrs: self.attrs.clone(),
++ box_token: self.box_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBreak {
++ fn clone(&self) -> Self {
++ ExprBreak {
++ attrs: self.attrs.clone(),
++ break_token: self.break_token.clone(),
++ label: self.label.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprCall {
++ fn clone(&self) -> Self {
++ ExprCall {
++ attrs: self.attrs.clone(),
++ func: self.func.clone(),
++ paren_token: self.paren_token.clone(),
++ args: self.args.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprCast {
++ fn clone(&self) -> Self {
++ ExprCast {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ as_token: self.as_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprClosure {
++ fn clone(&self) -> Self {
++ ExprClosure {
++ attrs: self.attrs.clone(),
++ asyncness: self.asyncness.clone(),
++ movability: self.movability.clone(),
++ capture: self.capture.clone(),
++ or1_token: self.or1_token.clone(),
++ inputs: self.inputs.clone(),
++ or2_token: self.or2_token.clone(),
++ output: self.output.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprContinue {
++ fn clone(&self) -> Self {
++ ExprContinue {
++ attrs: self.attrs.clone(),
++ continue_token: self.continue_token.clone(),
++ label: self.label.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprField {
++ fn clone(&self) -> Self {
++ ExprField {
++ attrs: self.attrs.clone(),
++ base: self.base.clone(),
++ dot_token: self.dot_token.clone(),
++ member: self.member.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprForLoop {
++ fn clone(&self) -> Self {
++ ExprForLoop {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ for_token: self.for_token.clone(),
++ pat: self.pat.clone(),
++ in_token: self.in_token.clone(),
++ expr: self.expr.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprGroup {
++ fn clone(&self) -> Self {
++ ExprGroup {
++ attrs: self.attrs.clone(),
++ group_token: self.group_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprIf {
++ fn clone(&self) -> Self {
++ ExprIf {
++ attrs: self.attrs.clone(),
++ if_token: self.if_token.clone(),
++ cond: self.cond.clone(),
++ then_branch: self.then_branch.clone(),
++ else_branch: self.else_branch.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprIndex {
++ fn clone(&self) -> Self {
++ ExprIndex {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ bracket_token: self.bracket_token.clone(),
++ index: self.index.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprLet {
++ fn clone(&self) -> Self {
++ ExprLet {
++ attrs: self.attrs.clone(),
++ let_token: self.let_token.clone(),
++ pat: self.pat.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprLit {
++ fn clone(&self) -> Self {
++ ExprLit {
++ attrs: self.attrs.clone(),
++ lit: self.lit.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprLoop {
++ fn clone(&self) -> Self {
++ ExprLoop {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ loop_token: self.loop_token.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMacro {
++ fn clone(&self) -> Self {
++ ExprMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMatch {
++ fn clone(&self) -> Self {
++ ExprMatch {
++ attrs: self.attrs.clone(),
++ match_token: self.match_token.clone(),
++ expr: self.expr.clone(),
++ brace_token: self.brace_token.clone(),
++ arms: self.arms.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMethodCall {
++ fn clone(&self) -> Self {
++ ExprMethodCall {
++ attrs: self.attrs.clone(),
++ receiver: self.receiver.clone(),
++ dot_token: self.dot_token.clone(),
++ method: self.method.clone(),
++ turbofish: self.turbofish.clone(),
++ paren_token: self.paren_token.clone(),
++ args: self.args.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprParen {
++ fn clone(&self) -> Self {
++ ExprParen {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprPath {
++ fn clone(&self) -> Self {
++ ExprPath {
++ attrs: self.attrs.clone(),
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprRange {
++ fn clone(&self) -> Self {
++ ExprRange {
++ attrs: self.attrs.clone(),
++ from: self.from.clone(),
++ limits: self.limits.clone(),
++ to: self.to.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprReference {
++ fn clone(&self) -> Self {
++ ExprReference {
++ attrs: self.attrs.clone(),
++ and_token: self.and_token.clone(),
++ raw: self.raw.clone(),
++ mutability: self.mutability.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprRepeat {
++ fn clone(&self) -> Self {
++ ExprRepeat {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ len: self.len.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprReturn {
++ fn clone(&self) -> Self {
++ ExprReturn {
++ attrs: self.attrs.clone(),
++ return_token: self.return_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprStruct {
++ fn clone(&self) -> Self {
++ ExprStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ brace_token: self.brace_token.clone(),
++ fields: self.fields.clone(),
++ dot2_token: self.dot2_token.clone(),
++ rest: self.rest.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTry {
++ fn clone(&self) -> Self {
++ ExprTry {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ question_token: self.question_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTryBlock {
++ fn clone(&self) -> Self {
++ ExprTryBlock {
++ attrs: self.attrs.clone(),
++ try_token: self.try_token.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTuple {
++ fn clone(&self) -> Self {
++ ExprTuple {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprType {
++ fn clone(&self) -> Self {
++ ExprType {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprUnary {
++ fn clone(&self) -> Self {
++ ExprUnary {
++ attrs: self.attrs.clone(),
++ op: self.op.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprUnsafe {
++ fn clone(&self) -> Self {
++ ExprUnsafe {
++ attrs: self.attrs.clone(),
++ unsafe_token: self.unsafe_token.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprWhile {
++ fn clone(&self) -> Self {
++ ExprWhile {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ while_token: self.while_token.clone(),
++ cond: self.cond.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprYield {
++ fn clone(&self) -> Self {
++ ExprYield {
++ attrs: self.attrs.clone(),
++ yield_token: self.yield_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Field {
++ fn clone(&self) -> Self {
++ Field {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FieldPat {
++ fn clone(&self) -> Self {
++ FieldPat {
++ attrs: self.attrs.clone(),
++ member: self.member.clone(),
++ colon_token: self.colon_token.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FieldValue {
++ fn clone(&self) -> Self {
++ FieldValue {
++ attrs: self.attrs.clone(),
++ member: self.member.clone(),
++ colon_token: self.colon_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Fields {
++ fn clone(&self) -> Self {
++ match self {
++ Fields::Named(v0) => Fields::Named(v0.clone()),
++ Fields::Unnamed(v0) => Fields::Unnamed(v0.clone()),
++ Fields::Unit => Fields::Unit,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for FieldsNamed {
++ fn clone(&self) -> Self {
++ FieldsNamed {
++ brace_token: self.brace_token.clone(),
++ named: self.named.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for FieldsUnnamed {
++ fn clone(&self) -> Self {
++ FieldsUnnamed {
++ paren_token: self.paren_token.clone(),
++ unnamed: self.unnamed.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for File {
++ fn clone(&self) -> Self {
++ File {
++ shebang: self.shebang.clone(),
++ attrs: self.attrs.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FnArg {
++ fn clone(&self) -> Self {
++ match self {
++ FnArg::Receiver(v0) => FnArg::Receiver(v0.clone()),
++ FnArg::Typed(v0) => FnArg::Typed(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItem {
++ fn clone(&self) -> Self {
++ match self {
++ ForeignItem::Fn(v0) => ForeignItem::Fn(v0.clone()),
++ ForeignItem::Static(v0) => ForeignItem::Static(v0.clone()),
++ ForeignItem::Type(v0) => ForeignItem::Type(v0.clone()),
++ ForeignItem::Macro(v0) => ForeignItem::Macro(v0.clone()),
++ ForeignItem::Verbatim(v0) => ForeignItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemFn {
++ fn clone(&self) -> Self {
++ ForeignItemFn {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ sig: self.sig.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemMacro {
++ fn clone(&self) -> Self {
++ ForeignItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemStatic {
++ fn clone(&self) -> Self {
++ ForeignItemStatic {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ static_token: self.static_token.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemType {
++ fn clone(&self) -> Self {
++ ForeignItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for GenericArgument {
++ fn clone(&self) -> Self {
++ match self {
++ GenericArgument::Lifetime(v0) => GenericArgument::Lifetime(v0.clone()),
++ GenericArgument::Type(v0) => GenericArgument::Type(v0.clone()),
++ GenericArgument::Binding(v0) => GenericArgument::Binding(v0.clone()),
++ GenericArgument::Constraint(v0) => GenericArgument::Constraint(v0.clone()),
++ GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for GenericMethodArgument {
++ fn clone(&self) -> Self {
++ match self {
++ GenericMethodArgument::Type(v0) => GenericMethodArgument::Type(v0.clone()),
++ GenericMethodArgument::Const(v0) => GenericMethodArgument::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for GenericParam {
++ fn clone(&self) -> Self {
++ match self {
++ GenericParam::Type(v0) => GenericParam::Type(v0.clone()),
++ GenericParam::Lifetime(v0) => GenericParam::Lifetime(v0.clone()),
++ GenericParam::Const(v0) => GenericParam::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Generics {
++ fn clone(&self) -> Self {
++ Generics {
++ lt_token: self.lt_token.clone(),
++ params: self.params.clone(),
++ gt_token: self.gt_token.clone(),
++ where_clause: self.where_clause.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItem {
++ fn clone(&self) -> Self {
++ match self {
++ ImplItem::Const(v0) => ImplItem::Const(v0.clone()),
++ ImplItem::Method(v0) => ImplItem::Method(v0.clone()),
++ ImplItem::Type(v0) => ImplItem::Type(v0.clone()),
++ ImplItem::Macro(v0) => ImplItem::Macro(v0.clone()),
++ ImplItem::Verbatim(v0) => ImplItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemConst {
++ fn clone(&self) -> Self {
++ ImplItemConst {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemMacro {
++ fn clone(&self) -> Self {
++ ImplItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemMethod {
++ fn clone(&self) -> Self {
++ ImplItemMethod {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ sig: self.sig.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemType {
++ fn clone(&self) -> Self {
++ ImplItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Index {
++ fn clone(&self) -> Self {
++ Index {
++ index: self.index.clone(),
++ span: self.span.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Item {
++ fn clone(&self) -> Self {
++ match self {
++ Item::Const(v0) => Item::Const(v0.clone()),
++ Item::Enum(v0) => Item::Enum(v0.clone()),
++ Item::ExternCrate(v0) => Item::ExternCrate(v0.clone()),
++ Item::Fn(v0) => Item::Fn(v0.clone()),
++ Item::ForeignMod(v0) => Item::ForeignMod(v0.clone()),
++ Item::Impl(v0) => Item::Impl(v0.clone()),
++ Item::Macro(v0) => Item::Macro(v0.clone()),
++ Item::Macro2(v0) => Item::Macro2(v0.clone()),
++ Item::Mod(v0) => Item::Mod(v0.clone()),
++ Item::Static(v0) => Item::Static(v0.clone()),
++ Item::Struct(v0) => Item::Struct(v0.clone()),
++ Item::Trait(v0) => Item::Trait(v0.clone()),
++ Item::TraitAlias(v0) => Item::TraitAlias(v0.clone()),
++ Item::Type(v0) => Item::Type(v0.clone()),
++ Item::Union(v0) => Item::Union(v0.clone()),
++ Item::Use(v0) => Item::Use(v0.clone()),
++ Item::Verbatim(v0) => Item::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemConst {
++ fn clone(&self) -> Self {
++ ItemConst {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemEnum {
++ fn clone(&self) -> Self {
++ ItemEnum {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ enum_token: self.enum_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ brace_token: self.brace_token.clone(),
++ variants: self.variants.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemExternCrate {
++ fn clone(&self) -> Self {
++ ItemExternCrate {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ extern_token: self.extern_token.clone(),
++ crate_token: self.crate_token.clone(),
++ ident: self.ident.clone(),
++ rename: self.rename.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemFn {
++ fn clone(&self) -> Self {
++ ItemFn {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ sig: self.sig.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemForeignMod {
++ fn clone(&self) -> Self {
++ ItemForeignMod {
++ attrs: self.attrs.clone(),
++ abi: self.abi.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemImpl {
++ fn clone(&self) -> Self {
++ ItemImpl {
++ attrs: self.attrs.clone(),
++ defaultness: self.defaultness.clone(),
++ unsafety: self.unsafety.clone(),
++ impl_token: self.impl_token.clone(),
++ generics: self.generics.clone(),
++ trait_: self.trait_.clone(),
++ self_ty: self.self_ty.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMacro {
++ fn clone(&self) -> Self {
++ ItemMacro {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMacro2 {
++ fn clone(&self) -> Self {
++ ItemMacro2 {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ macro_token: self.macro_token.clone(),
++ ident: self.ident.clone(),
++ rules: self.rules.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMod {
++ fn clone(&self) -> Self {
++ ItemMod {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ mod_token: self.mod_token.clone(),
++ ident: self.ident.clone(),
++ content: self.content.clone(),
++ semi: self.semi.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemStatic {
++ fn clone(&self) -> Self {
++ ItemStatic {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ static_token: self.static_token.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemStruct {
++ fn clone(&self) -> Self {
++ ItemStruct {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ struct_token: self.struct_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ fields: self.fields.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemTrait {
++ fn clone(&self) -> Self {
++ ItemTrait {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ unsafety: self.unsafety.clone(),
++ auto_token: self.auto_token.clone(),
++ trait_token: self.trait_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ colon_token: self.colon_token.clone(),
++ supertraits: self.supertraits.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemTraitAlias {
++ fn clone(&self) -> Self {
++ ItemTraitAlias {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ trait_token: self.trait_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ bounds: self.bounds.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemType {
++ fn clone(&self) -> Self {
++ ItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemUnion {
++ fn clone(&self) -> Self {
++ ItemUnion {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ union_token: self.union_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ fields: self.fields.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemUse {
++ fn clone(&self) -> Self {
++ ItemUse {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ use_token: self.use_token.clone(),
++ leading_colon: self.leading_colon.clone(),
++ tree: self.tree.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Label {
++ fn clone(&self) -> Self {
++ Label {
++ name: self.name.clone(),
++ colon_token: self.colon_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for LifetimeDef {
++ fn clone(&self) -> Self {
++ LifetimeDef {
++ attrs: self.attrs.clone(),
++ lifetime: self.lifetime.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++impl Clone for Lit {
++ fn clone(&self) -> Self {
++ match self {
++ Lit::Str(v0) => Lit::Str(v0.clone()),
++ Lit::ByteStr(v0) => Lit::ByteStr(v0.clone()),
++ Lit::Byte(v0) => Lit::Byte(v0.clone()),
++ Lit::Char(v0) => Lit::Char(v0.clone()),
++ Lit::Int(v0) => Lit::Int(v0.clone()),
++ Lit::Float(v0) => Lit::Float(v0.clone()),
++ Lit::Bool(v0) => Lit::Bool(v0.clone()),
++ Lit::Verbatim(v0) => Lit::Verbatim(v0.clone()),
++ }
++ }
++}
++impl Clone for LitBool {
++ fn clone(&self) -> Self {
++ LitBool {
++ value: self.value.clone(),
++ span: self.span.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Local {
++ fn clone(&self) -> Self {
++ Local {
++ attrs: self.attrs.clone(),
++ let_token: self.let_token.clone(),
++ pat: self.pat.clone(),
++ init: self.init.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Macro {
++ fn clone(&self) -> Self {
++ Macro {
++ path: self.path.clone(),
++ bang_token: self.bang_token.clone(),
++ delimiter: self.delimiter.clone(),
++ tokens: self.tokens.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MacroDelimiter {
++ fn clone(&self) -> Self {
++ match self {
++ MacroDelimiter::Paren(v0) => MacroDelimiter::Paren(v0.clone()),
++ MacroDelimiter::Brace(v0) => MacroDelimiter::Brace(v0.clone()),
++ MacroDelimiter::Bracket(v0) => MacroDelimiter::Bracket(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Member {
++ fn clone(&self) -> Self {
++ match self {
++ Member::Named(v0) => Member::Named(v0.clone()),
++ Member::Unnamed(v0) => Member::Unnamed(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Meta {
++ fn clone(&self) -> Self {
++ match self {
++ Meta::Path(v0) => Meta::Path(v0.clone()),
++ Meta::List(v0) => Meta::List(v0.clone()),
++ Meta::NameValue(v0) => Meta::NameValue(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MetaList {
++ fn clone(&self) -> Self {
++ MetaList {
++ path: self.path.clone(),
++ paren_token: self.paren_token.clone(),
++ nested: self.nested.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MetaNameValue {
++ fn clone(&self) -> Self {
++ MetaNameValue {
++ path: self.path.clone(),
++ eq_token: self.eq_token.clone(),
++ lit: self.lit.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for MethodTurbofish {
++ fn clone(&self) -> Self {
++ MethodTurbofish {
++ colon2_token: self.colon2_token.clone(),
++ lt_token: self.lt_token.clone(),
++ args: self.args.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for NestedMeta {
++ fn clone(&self) -> Self {
++ match self {
++ NestedMeta::Meta(v0) => NestedMeta::Meta(v0.clone()),
++ NestedMeta::Lit(v0) => NestedMeta::Lit(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ParenthesizedGenericArguments {
++ fn clone(&self) -> Self {
++ ParenthesizedGenericArguments {
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Pat {
++ fn clone(&self) -> Self {
++ match self {
++ Pat::Box(v0) => Pat::Box(v0.clone()),
++ Pat::Ident(v0) => Pat::Ident(v0.clone()),
++ Pat::Lit(v0) => Pat::Lit(v0.clone()),
++ Pat::Macro(v0) => Pat::Macro(v0.clone()),
++ Pat::Or(v0) => Pat::Or(v0.clone()),
++ Pat::Path(v0) => Pat::Path(v0.clone()),
++ Pat::Range(v0) => Pat::Range(v0.clone()),
++ Pat::Reference(v0) => Pat::Reference(v0.clone()),
++ Pat::Rest(v0) => Pat::Rest(v0.clone()),
++ Pat::Slice(v0) => Pat::Slice(v0.clone()),
++ Pat::Struct(v0) => Pat::Struct(v0.clone()),
++ Pat::Tuple(v0) => Pat::Tuple(v0.clone()),
++ Pat::TupleStruct(v0) => Pat::TupleStruct(v0.clone()),
++ Pat::Type(v0) => Pat::Type(v0.clone()),
++ Pat::Verbatim(v0) => Pat::Verbatim(v0.clone()),
++ Pat::Wild(v0) => Pat::Wild(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatBox {
++ fn clone(&self) -> Self {
++ PatBox {
++ attrs: self.attrs.clone(),
++ box_token: self.box_token.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatIdent {
++ fn clone(&self) -> Self {
++ PatIdent {
++ attrs: self.attrs.clone(),
++ by_ref: self.by_ref.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ subpat: self.subpat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatLit {
++ fn clone(&self) -> Self {
++ PatLit {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatMacro {
++ fn clone(&self) -> Self {
++ PatMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatOr {
++ fn clone(&self) -> Self {
++ PatOr {
++ attrs: self.attrs.clone(),
++ leading_vert: self.leading_vert.clone(),
++ cases: self.cases.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatPath {
++ fn clone(&self) -> Self {
++ PatPath {
++ attrs: self.attrs.clone(),
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatRange {
++ fn clone(&self) -> Self {
++ PatRange {
++ attrs: self.attrs.clone(),
++ lo: self.lo.clone(),
++ limits: self.limits.clone(),
++ hi: self.hi.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatReference {
++ fn clone(&self) -> Self {
++ PatReference {
++ attrs: self.attrs.clone(),
++ and_token: self.and_token.clone(),
++ mutability: self.mutability.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatRest {
++ fn clone(&self) -> Self {
++ PatRest {
++ attrs: self.attrs.clone(),
++ dot2_token: self.dot2_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatSlice {
++ fn clone(&self) -> Self {
++ PatSlice {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatStruct {
++ fn clone(&self) -> Self {
++ PatStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ brace_token: self.brace_token.clone(),
++ fields: self.fields.clone(),
++ dot2_token: self.dot2_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatTuple {
++ fn clone(&self) -> Self {
++ PatTuple {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatTupleStruct {
++ fn clone(&self) -> Self {
++ PatTupleStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatType {
++ fn clone(&self) -> Self {
++ PatType {
++ attrs: self.attrs.clone(),
++ pat: self.pat.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatWild {
++ fn clone(&self) -> Self {
++ PatWild {
++ attrs: self.attrs.clone(),
++ underscore_token: self.underscore_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Path {
++ fn clone(&self) -> Self {
++ Path {
++ leading_colon: self.leading_colon.clone(),
++ segments: self.segments.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PathArguments {
++ fn clone(&self) -> Self {
++ match self {
++ PathArguments::None => PathArguments::None,
++ PathArguments::AngleBracketed(v0) => PathArguments::AngleBracketed(v0.clone()),
++ PathArguments::Parenthesized(v0) => PathArguments::Parenthesized(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PathSegment {
++ fn clone(&self) -> Self {
++ PathSegment {
++ ident: self.ident.clone(),
++ arguments: self.arguments.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateEq {
++ fn clone(&self) -> Self {
++ PredicateEq {
++ lhs_ty: self.lhs_ty.clone(),
++ eq_token: self.eq_token.clone(),
++ rhs_ty: self.rhs_ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateLifetime {
++ fn clone(&self) -> Self {
++ PredicateLifetime {
++ lifetime: self.lifetime.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateType {
++ fn clone(&self) -> Self {
++ PredicateType {
++ lifetimes: self.lifetimes.clone(),
++ bounded_ty: self.bounded_ty.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for QSelf {
++ fn clone(&self) -> Self {
++ QSelf {
++ lt_token: self.lt_token.clone(),
++ ty: self.ty.clone(),
++ position: self.position.clone(),
++ as_token: self.as_token.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Copy for RangeLimits {}
++#[cfg(feature = "full")]
++impl Clone for RangeLimits {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Receiver {
++ fn clone(&self) -> Self {
++ Receiver {
++ attrs: self.attrs.clone(),
++ reference: self.reference.clone(),
++ mutability: self.mutability.clone(),
++ self_token: self.self_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ReturnType {
++ fn clone(&self) -> Self {
++ match self {
++ ReturnType::Default => ReturnType::Default,
++ ReturnType::Type(v0, v1) => ReturnType::Type(v0.clone(), v1.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Signature {
++ fn clone(&self) -> Self {
++ Signature {
++ constness: self.constness.clone(),
++ asyncness: self.asyncness.clone(),
++ unsafety: self.unsafety.clone(),
++ abi: self.abi.clone(),
++ fn_token: self.fn_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ variadic: self.variadic.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Stmt {
++ fn clone(&self) -> Self {
++ match self {
++ Stmt::Local(v0) => Stmt::Local(v0.clone()),
++ Stmt::Item(v0) => Stmt::Item(v0.clone()),
++ Stmt::Expr(v0) => Stmt::Expr(v0.clone()),
++ Stmt::Semi(v0, v1) => Stmt::Semi(v0.clone(), v1.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TraitBound {
++ fn clone(&self) -> Self {
++ TraitBound {
++ paren_token: self.paren_token.clone(),
++ modifier: self.modifier.clone(),
++ lifetimes: self.lifetimes.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for TraitBoundModifier {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TraitBoundModifier {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItem {
++ fn clone(&self) -> Self {
++ match self {
++ TraitItem::Const(v0) => TraitItem::Const(v0.clone()),
++ TraitItem::Method(v0) => TraitItem::Method(v0.clone()),
++ TraitItem::Type(v0) => TraitItem::Type(v0.clone()),
++ TraitItem::Macro(v0) => TraitItem::Macro(v0.clone()),
++ TraitItem::Verbatim(v0) => TraitItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemConst {
++ fn clone(&self) -> Self {
++ TraitItemConst {
++ attrs: self.attrs.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemMacro {
++ fn clone(&self) -> Self {
++ TraitItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemMethod {
++ fn clone(&self) -> Self {
++ TraitItemMethod {
++ attrs: self.attrs.clone(),
++ sig: self.sig.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemType {
++ fn clone(&self) -> Self {
++ TraitItemType {
++ attrs: self.attrs.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Type {
++ fn clone(&self) -> Self {
++ match self {
++ Type::Array(v0) => Type::Array(v0.clone()),
++ Type::BareFn(v0) => Type::BareFn(v0.clone()),
++ Type::Group(v0) => Type::Group(v0.clone()),
++ Type::ImplTrait(v0) => Type::ImplTrait(v0.clone()),
++ Type::Infer(v0) => Type::Infer(v0.clone()),
++ Type::Macro(v0) => Type::Macro(v0.clone()),
++ Type::Never(v0) => Type::Never(v0.clone()),
++ Type::Paren(v0) => Type::Paren(v0.clone()),
++ Type::Path(v0) => Type::Path(v0.clone()),
++ Type::Ptr(v0) => Type::Ptr(v0.clone()),
++ Type::Reference(v0) => Type::Reference(v0.clone()),
++ Type::Slice(v0) => Type::Slice(v0.clone()),
++ Type::TraitObject(v0) => Type::TraitObject(v0.clone()),
++ Type::Tuple(v0) => Type::Tuple(v0.clone()),
++ Type::Verbatim(v0) => Type::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeArray {
++ fn clone(&self) -> Self {
++ TypeArray {
++ bracket_token: self.bracket_token.clone(),
++ elem: self.elem.clone(),
++ semi_token: self.semi_token.clone(),
++ len: self.len.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeBareFn {
++ fn clone(&self) -> Self {
++ TypeBareFn {
++ lifetimes: self.lifetimes.clone(),
++ unsafety: self.unsafety.clone(),
++ abi: self.abi.clone(),
++ fn_token: self.fn_token.clone(),
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ variadic: self.variadic.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeGroup {
++ fn clone(&self) -> Self {
++ TypeGroup {
++ group_token: self.group_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeImplTrait {
++ fn clone(&self) -> Self {
++ TypeImplTrait {
++ impl_token: self.impl_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeInfer {
++ fn clone(&self) -> Self {
++ TypeInfer {
++ underscore_token: self.underscore_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeMacro {
++ fn clone(&self) -> Self {
++ TypeMacro {
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeNever {
++ fn clone(&self) -> Self {
++ TypeNever {
++ bang_token: self.bang_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParam {
++ fn clone(&self) -> Self {
++ TypeParam {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ eq_token: self.eq_token.clone(),
++ default: self.default.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParamBound {
++ fn clone(&self) -> Self {
++ match self {
++ TypeParamBound::Trait(v0) => TypeParamBound::Trait(v0.clone()),
++ TypeParamBound::Lifetime(v0) => TypeParamBound::Lifetime(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParen {
++ fn clone(&self) -> Self {
++ TypeParen {
++ paren_token: self.paren_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypePath {
++ fn clone(&self) -> Self {
++ TypePath {
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypePtr {
++ fn clone(&self) -> Self {
++ TypePtr {
++ star_token: self.star_token.clone(),
++ const_token: self.const_token.clone(),
++ mutability: self.mutability.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeReference {
++ fn clone(&self) -> Self {
++ TypeReference {
++ and_token: self.and_token.clone(),
++ lifetime: self.lifetime.clone(),
++ mutability: self.mutability.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeSlice {
++ fn clone(&self) -> Self {
++ TypeSlice {
++ bracket_token: self.bracket_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeTraitObject {
++ fn clone(&self) -> Self {
++ TypeTraitObject {
++ dyn_token: self.dyn_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeTuple {
++ fn clone(&self) -> Self {
++ TypeTuple {
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for UnOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for UnOp {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseGlob {
++ fn clone(&self) -> Self {
++ UseGlob {
++ star_token: self.star_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseGroup {
++ fn clone(&self) -> Self {
++ UseGroup {
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseName {
++ fn clone(&self) -> Self {
++ UseName {
++ ident: self.ident.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UsePath {
++ fn clone(&self) -> Self {
++ UsePath {
++ ident: self.ident.clone(),
++ colon2_token: self.colon2_token.clone(),
++ tree: self.tree.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseRename {
++ fn clone(&self) -> Self {
++ UseRename {
++ ident: self.ident.clone(),
++ as_token: self.as_token.clone(),
++ rename: self.rename.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseTree {
++ fn clone(&self) -> Self {
++ match self {
++ UseTree::Path(v0) => UseTree::Path(v0.clone()),
++ UseTree::Name(v0) => UseTree::Name(v0.clone()),
++ UseTree::Rename(v0) => UseTree::Rename(v0.clone()),
++ UseTree::Glob(v0) => UseTree::Glob(v0.clone()),
++ UseTree::Group(v0) => UseTree::Group(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Variadic {
++ fn clone(&self) -> Self {
++ Variadic {
++ attrs: self.attrs.clone(),
++ dots: self.dots.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Variant {
++ fn clone(&self) -> Self {
++ Variant {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ fields: self.fields.clone(),
++ discriminant: self.discriminant.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisCrate {
++ fn clone(&self) -> Self {
++ VisCrate {
++ crate_token: self.crate_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisPublic {
++ fn clone(&self) -> Self {
++ VisPublic {
++ pub_token: self.pub_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisRestricted {
++ fn clone(&self) -> Self {
++ VisRestricted {
++ pub_token: self.pub_token.clone(),
++ paren_token: self.paren_token.clone(),
++ in_token: self.in_token.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Visibility {
++ fn clone(&self) -> Self {
++ match self {
++ Visibility::Public(v0) => Visibility::Public(v0.clone()),
++ Visibility::Crate(v0) => Visibility::Crate(v0.clone()),
++ Visibility::Restricted(v0) => Visibility::Restricted(v0.clone()),
++ Visibility::Inherited => Visibility::Inherited,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for WhereClause {
++ fn clone(&self) -> Self {
++ WhereClause {
++ where_token: self.where_token.clone(),
++ predicates: self.predicates.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for WherePredicate {
++ fn clone(&self) -> Self {
++ match self {
++ WherePredicate::Type(v0) => WherePredicate::Type(v0.clone()),
++ WherePredicate::Lifetime(v0) => WherePredicate::Lifetime(v0.clone()),
++ WherePredicate::Eq(v0) => WherePredicate::Eq(v0.clone()),
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/debug.rs b/third_party/rust/syn/src/gen/debug.rs
+new file mode 100644
+index 0000000000..72baab05f4
+--- /dev/null
++++ third_party/rust/syn/src/gen/debug.rs
+@@ -0,0 +1,2857 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++use crate::*;
++use std::fmt::{self, Debug};
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Abi {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Abi");
++ formatter.field("extern_token", &self.extern_token);
++ formatter.field("name", &self.name);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for AngleBracketedGenericArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("AngleBracketedGenericArguments");
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("args", &self.args);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Arm {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Arm");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("pat", &self.pat);
++ formatter.field("guard", &self.guard);
++ formatter.field("fat_arrow_token", &self.fat_arrow_token);
++ formatter.field("body", &self.body);
++ formatter.field("comma", &self.comma);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for AttrStyle {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ AttrStyle::Outer => formatter.write_str("Outer"),
++ AttrStyle::Inner(v0) => {
++ let mut formatter = formatter.debug_tuple("Inner");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Attribute {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Attribute");
++ formatter.field("pound_token", &self.pound_token);
++ formatter.field("style", &self.style);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("path", &self.path);
++ formatter.field("tokens", &self.tokens);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BareFnArg {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("BareFnArg");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("name", &self.name);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BinOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ BinOp::Add(v0) => {
++ let mut formatter = formatter.debug_tuple("Add");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Sub(v0) => {
++ let mut formatter = formatter.debug_tuple("Sub");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Mul(v0) => {
++ let mut formatter = formatter.debug_tuple("Mul");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Div(v0) => {
++ let mut formatter = formatter.debug_tuple("Div");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Rem(v0) => {
++ let mut formatter = formatter.debug_tuple("Rem");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::And(v0) => {
++ let mut formatter = formatter.debug_tuple("And");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Or(v0) => {
++ let mut formatter = formatter.debug_tuple("Or");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitXor(v0) => {
++ let mut formatter = formatter.debug_tuple("BitXor");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitAnd(v0) => {
++ let mut formatter = formatter.debug_tuple("BitAnd");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitOr(v0) => {
++ let mut formatter = formatter.debug_tuple("BitOr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Shl(v0) => {
++ let mut formatter = formatter.debug_tuple("Shl");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Shr(v0) => {
++ let mut formatter = formatter.debug_tuple("Shr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Eq(v0) => {
++ let mut formatter = formatter.debug_tuple("Eq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Lt(v0) => {
++ let mut formatter = formatter.debug_tuple("Lt");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Le(v0) => {
++ let mut formatter = formatter.debug_tuple("Le");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Ne(v0) => {
++ let mut formatter = formatter.debug_tuple("Ne");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Ge(v0) => {
++ let mut formatter = formatter.debug_tuple("Ge");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Gt(v0) => {
++ let mut formatter = formatter.debug_tuple("Gt");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::AddEq(v0) => {
++ let mut formatter = formatter.debug_tuple("AddEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::SubEq(v0) => {
++ let mut formatter = formatter.debug_tuple("SubEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::MulEq(v0) => {
++ let mut formatter = formatter.debug_tuple("MulEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::DivEq(v0) => {
++ let mut formatter = formatter.debug_tuple("DivEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::RemEq(v0) => {
++ let mut formatter = formatter.debug_tuple("RemEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitXorEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitXorEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitAndEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitAndEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitOrEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitOrEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::ShlEq(v0) => {
++ let mut formatter = formatter.debug_tuple("ShlEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::ShrEq(v0) => {
++ let mut formatter = formatter.debug_tuple("ShrEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Binding {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Binding");
++ formatter.field("ident", &self.ident);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Block {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Block");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("stmts", &self.stmts);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BoundLifetimes {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("BoundLifetimes");
++ formatter.field("for_token", &self.for_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ConstParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ConstParam");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("default", &self.default);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Constraint {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Constraint");
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for Data {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Data::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Data::Enum(v0) => {
++ let mut formatter = formatter.debug_tuple("Enum");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Data::Union(v0) => {
++ let mut formatter = formatter.debug_tuple("Union");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataEnum {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataEnum");
++ formatter.field("enum_token", &self.enum_token);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("variants", &self.variants);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataStruct");
++ formatter.field("struct_token", &self.struct_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataUnion {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataUnion");
++ formatter.field("union_token", &self.union_token);
++ formatter.field("fields", &self.fields);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DeriveInput {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DeriveInput");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("data", &self.data);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Expr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => {
++ let mut formatter = formatter.debug_tuple("Array");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => {
++ let mut formatter = formatter.debug_tuple("Assign");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => {
++ let mut formatter = formatter.debug_tuple("AssignOp");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => {
++ let mut formatter = formatter.debug_tuple("Async");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => {
++ let mut formatter = formatter.debug_tuple("Await");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Binary(v0) => {
++ let mut formatter = formatter.debug_tuple("Binary");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => {
++ let mut formatter = formatter.debug_tuple("Block");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => {
++ let mut formatter = formatter.debug_tuple("Box");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => {
++ let mut formatter = formatter.debug_tuple("Break");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Call(v0) => {
++ let mut formatter = formatter.debug_tuple("Call");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Cast(v0) => {
++ let mut formatter = formatter.debug_tuple("Cast");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => {
++ let mut formatter = formatter.debug_tuple("Closure");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => {
++ let mut formatter = formatter.debug_tuple("Continue");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Field(v0) => {
++ let mut formatter = formatter.debug_tuple("Field");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => {
++ let mut formatter = formatter.debug_tuple("ForLoop");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::If(v0) => {
++ let mut formatter = formatter.debug_tuple("If");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Index(v0) => {
++ let mut formatter = formatter.debug_tuple("Index");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => {
++ let mut formatter = formatter.debug_tuple("Let");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => {
++ let mut formatter = formatter.debug_tuple("Loop");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => {
++ let mut formatter = formatter.debug_tuple("Match");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => {
++ let mut formatter = formatter.debug_tuple("MethodCall");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => {
++ let mut formatter = formatter.debug_tuple("Range");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => {
++ let mut formatter = formatter.debug_tuple("Repeat");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => {
++ let mut formatter = formatter.debug_tuple("Return");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => {
++ let mut formatter = formatter.debug_tuple("Try");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => {
++ let mut formatter = formatter.debug_tuple("TryBlock");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Unary(v0) => {
++ let mut formatter = formatter.debug_tuple("Unary");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => {
++ let mut formatter = formatter.debug_tuple("Unsafe");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::While(v0) => {
++ let mut formatter = formatter.debug_tuple("While");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => {
++ let mut formatter = formatter.debug_tuple("Yield");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprArray {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprArray");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAssign {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAssign");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAssignOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAssignOp");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("op", &self.op);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAsync {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAsync");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("async_token", &self.async_token);
++ formatter.field("capture", &self.capture);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAwait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAwait");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("base", &self.base);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("await_token", &self.await_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprBinary {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBinary");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("op", &self.op);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBlock {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBlock");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBox {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBox");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("box_token", &self.box_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBreak {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBreak");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("break_token", &self.break_token);
++ formatter.field("label", &self.label);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprCall {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprCall");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("func", &self.func);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("args", &self.args);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprCast {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprCast");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprClosure {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprClosure");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("asyncness", &self.asyncness);
++ formatter.field("movability", &self.movability);
++ formatter.field("capture", &self.capture);
++ formatter.field("or1_token", &self.or1_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("or2_token", &self.or2_token);
++ formatter.field("output", &self.output);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprContinue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprContinue");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("continue_token", &self.continue_token);
++ formatter.field("label", &self.label);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprField {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprField");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("base", &self.base);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("member", &self.member);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprForLoop {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprForLoop");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("for_token", &self.for_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("in_token", &self.in_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprGroup");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("group_token", &self.group_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprIf {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprIf");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("if_token", &self.if_token);
++ formatter.field("cond", &self.cond);
++ formatter.field("then_branch", &self.then_branch);
++ formatter.field("else_branch", &self.else_branch);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprIndex {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprIndex");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("index", &self.index);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprLet {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLet");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("let_token", &self.let_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprLit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLit");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lit", &self.lit);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprLoop {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLoop");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("loop_token", &self.loop_token);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMatch {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMatch");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("match_token", &self.match_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("arms", &self.arms);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMethodCall {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMethodCall");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("receiver", &self.receiver);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("method", &self.method);
++ formatter.field("turbofish", &self.turbofish);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("args", &self.args);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprParen {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprParen");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprPath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprPath");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprRange {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprRange");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("from", &self.from);
++ formatter.field("limits", &self.limits);
++ formatter.field("to", &self.to);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprReference");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("and_token", &self.and_token);
++ formatter.field("raw", &self.raw);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprRepeat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprRepeat");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.field("len", &self.len);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprReturn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprReturn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("return_token", &self.return_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.field("rest", &self.rest);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTry {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTry");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("question_token", &self.question_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTryBlock {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTryBlock");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("try_token", &self.try_token);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTuple");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprUnary {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprUnary");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("op", &self.op);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprUnsafe {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprUnsafe");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("unsafe_token", &self.unsafe_token);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprWhile {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprWhile");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("while_token", &self.while_token);
++ formatter.field("cond", &self.cond);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprYield {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprYield");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("yield_token", &self.yield_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Field {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Field");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FieldPat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldPat");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("member", &self.member);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FieldValue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldValue");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("member", &self.member);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Fields {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Fields::Named(v0) => {
++ let mut formatter = formatter.debug_tuple("Named");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Fields::Unnamed(v0) => {
++ let mut formatter = formatter.debug_tuple("Unnamed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Fields::Unit => formatter.write_str("Unit"),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for FieldsNamed {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldsNamed");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("named", &self.named);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for FieldsUnnamed {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldsUnnamed");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("unnamed", &self.unnamed);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for File {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("File");
++ formatter.field("shebang", &self.shebang);
++ formatter.field("attrs", &self.attrs);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FnArg {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ FnArg::Receiver(v0) => {
++ let mut formatter = formatter.debug_tuple("Receiver");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ FnArg::Typed(v0) => {
++ let mut formatter = formatter.debug_tuple("Typed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ForeignItem::Fn(v0) => {
++ let mut formatter = formatter.debug_tuple("Fn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Static(v0) => {
++ let mut formatter = formatter.debug_tuple("Static");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemFn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("sig", &self.sig);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemStatic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemStatic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("static_token", &self.static_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for GenericArgument {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericArgument::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Binding(v0) => {
++ let mut formatter = formatter.debug_tuple("Binding");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Constraint(v0) => {
++ let mut formatter = formatter.debug_tuple("Constraint");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for GenericMethodArgument {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericMethodArgument::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericMethodArgument::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for GenericParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericParam::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericParam::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericParam::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Generics {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Generics");
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("params", &self.params);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.field("where_clause", &self.where_clause);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ImplItem::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Method(v0) => {
++ let mut formatter = formatter.debug_tuple("Method");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemMethod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemMethod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("sig", &self.sig);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Index {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Index");
++ formatter.field("index", &self.index);
++ formatter.field("span", &self.span);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Item {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Item::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Enum(v0) => {
++ let mut formatter = formatter.debug_tuple("Enum");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::ExternCrate(v0) => {
++ let mut formatter = formatter.debug_tuple("ExternCrate");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Fn(v0) => {
++ let mut formatter = formatter.debug_tuple("Fn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::ForeignMod(v0) => {
++ let mut formatter = formatter.debug_tuple("ForeignMod");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Impl(v0) => {
++ let mut formatter = formatter.debug_tuple("Impl");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Macro2(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro2");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Mod(v0) => {
++ let mut formatter = formatter.debug_tuple("Mod");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Static(v0) => {
++ let mut formatter = formatter.debug_tuple("Static");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Trait(v0) => {
++ let mut formatter = formatter.debug_tuple("Trait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::TraitAlias(v0) => {
++ let mut formatter = formatter.debug_tuple("TraitAlias");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Union(v0) => {
++ let mut formatter = formatter.debug_tuple("Union");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Use(v0) => {
++ let mut formatter = formatter.debug_tuple("Use");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemEnum {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemEnum");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("enum_token", &self.enum_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("variants", &self.variants);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemExternCrate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemExternCrate");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("extern_token", &self.extern_token);
++ formatter.field("crate_token", &self.crate_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("rename", &self.rename);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemFn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("sig", &self.sig);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemForeignMod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemForeignMod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("abi", &self.abi);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemImpl {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemImpl");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("impl_token", &self.impl_token);
++ formatter.field("generics", &self.generics);
++ formatter.field("trait_", &self.trait_);
++ formatter.field("self_ty", &self.self_ty);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMacro2 {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMacro2");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("macro_token", &self.macro_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("rules", &self.rules);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("mod_token", &self.mod_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("content", &self.content);
++ formatter.field("semi", &self.semi);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemStatic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemStatic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("static_token", &self.static_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("struct_token", &self.struct_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("fields", &self.fields);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemTrait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemTrait");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("auto_token", &self.auto_token);
++ formatter.field("trait_token", &self.trait_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("supertraits", &self.supertraits);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemTraitAlias {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemTraitAlias");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("trait_token", &self.trait_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemUnion {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemUnion");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("union_token", &self.union_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("fields", &self.fields);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemUse {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemUse");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("use_token", &self.use_token);
++ formatter.field("leading_colon", &self.leading_colon);
++ formatter.field("tree", &self.tree);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Label {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Label");
++ formatter.field("name", &self.name);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.finish()
++ }
++}
++impl Debug for Lifetime {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Lifetime");
++ formatter.field("apostrophe", &self.apostrophe);
++ formatter.field("ident", &self.ident);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for LifetimeDef {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("LifetimeDef");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++impl Debug for Lit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Lit::Str(v0) => {
++ let mut formatter = formatter.debug_tuple("Str");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::ByteStr(v0) => {
++ let mut formatter = formatter.debug_tuple("ByteStr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Byte(v0) => {
++ let mut formatter = formatter.debug_tuple("Byte");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Char(v0) => {
++ let mut formatter = formatter.debug_tuple("Char");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Int(v0) => {
++ let mut formatter = formatter.debug_tuple("Int");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Float(v0) => {
++ let mut formatter = formatter.debug_tuple("Float");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Bool(v0) => {
++ let mut formatter = formatter.debug_tuple("Bool");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Local {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Local");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("let_token", &self.let_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("init", &self.init);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Macro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Macro");
++ formatter.field("path", &self.path);
++ formatter.field("bang_token", &self.bang_token);
++ formatter.field("delimiter", &self.delimiter);
++ formatter.field("tokens", &self.tokens);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MacroDelimiter {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ MacroDelimiter::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ MacroDelimiter::Brace(v0) => {
++ let mut formatter = formatter.debug_tuple("Brace");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ MacroDelimiter::Bracket(v0) => {
++ let mut formatter = formatter.debug_tuple("Bracket");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Member {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Member::Named(v0) => {
++ let mut formatter = formatter.debug_tuple("Named");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Member::Unnamed(v0) => {
++ let mut formatter = formatter.debug_tuple("Unnamed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Meta {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Meta::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Meta::List(v0) => {
++ let mut formatter = formatter.debug_tuple("List");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Meta::NameValue(v0) => {
++ let mut formatter = formatter.debug_tuple("NameValue");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MetaList {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MetaList");
++ formatter.field("path", &self.path);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("nested", &self.nested);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MetaNameValue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MetaNameValue");
++ formatter.field("path", &self.path);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("lit", &self.lit);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for MethodTurbofish {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MethodTurbofish");
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("args", &self.args);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for NestedMeta {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ NestedMeta::Meta(v0) => {
++ let mut formatter = formatter.debug_tuple("Meta");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ NestedMeta::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ParenthesizedGenericArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ParenthesizedGenericArguments");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Pat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Pat::Box(v0) => {
++ let mut formatter = formatter.debug_tuple("Box");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Ident(v0) => {
++ let mut formatter = formatter.debug_tuple("Ident");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Or(v0) => {
++ let mut formatter = formatter.debug_tuple("Or");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Range(v0) => {
++ let mut formatter = formatter.debug_tuple("Range");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Rest(v0) => {
++ let mut formatter = formatter.debug_tuple("Rest");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Slice(v0) => {
++ let mut formatter = formatter.debug_tuple("Slice");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::TupleStruct(v0) => {
++ let mut formatter = formatter.debug_tuple("TupleStruct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Wild(v0) => {
++ let mut formatter = formatter.debug_tuple("Wild");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatBox {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatBox");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("box_token", &self.box_token);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatIdent {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatIdent");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("by_ref", &self.by_ref);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("subpat", &self.subpat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatLit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatLit");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatOr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatOr");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("leading_vert", &self.leading_vert);
++ formatter.field("cases", &self.cases);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatPath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatPath");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatRange {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatRange");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lo", &self.lo);
++ formatter.field("limits", &self.limits);
++ formatter.field("hi", &self.hi);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatReference");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("and_token", &self.and_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatRest {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatRest");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatSlice {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatSlice");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatTuple");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatTupleStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatTupleStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("pat", &self.pat);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatWild {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatWild");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("underscore_token", &self.underscore_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Path {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Path");
++ formatter.field("leading_colon", &self.leading_colon);
++ formatter.field("segments", &self.segments);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PathArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ PathArguments::None => formatter.write_str("None"),
++ PathArguments::AngleBracketed(v0) => {
++ let mut formatter = formatter.debug_tuple("AngleBracketed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ PathArguments::Parenthesized(v0) => {
++ let mut formatter = formatter.debug_tuple("Parenthesized");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PathSegment {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PathSegment");
++ formatter.field("ident", &self.ident);
++ formatter.field("arguments", &self.arguments);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateEq {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateEq");
++ formatter.field("lhs_ty", &self.lhs_ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("rhs_ty", &self.rhs_ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateLifetime {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateLifetime");
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateType");
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("bounded_ty", &self.bounded_ty);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for QSelf {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("QSelf");
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("position", &self.position);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for RangeLimits {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ RangeLimits::HalfOpen(v0) => {
++ let mut formatter = formatter.debug_tuple("HalfOpen");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ RangeLimits::Closed(v0) => {
++ let mut formatter = formatter.debug_tuple("Closed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Receiver {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Receiver");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("reference", &self.reference);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("self_token", &self.self_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ReturnType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ReturnType::Default => formatter.write_str("Default"),
++ ReturnType::Type(v0, v1) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.field(v1);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Signature {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Signature");
++ formatter.field("constness", &self.constness);
++ formatter.field("asyncness", &self.asyncness);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("abi", &self.abi);
++ formatter.field("fn_token", &self.fn_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("variadic", &self.variadic);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Stmt {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Stmt::Local(v0) => {
++ let mut formatter = formatter.debug_tuple("Local");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Item(v0) => {
++ let mut formatter = formatter.debug_tuple("Item");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Expr(v0) => {
++ let mut formatter = formatter.debug_tuple("Expr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Semi(v0, v1) => {
++ let mut formatter = formatter.debug_tuple("Semi");
++ formatter.field(v0);
++ formatter.field(v1);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TraitBound {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitBound");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("modifier", &self.modifier);
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TraitBoundModifier {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TraitBoundModifier::None => formatter.write_str("None"),
++ TraitBoundModifier::Maybe(v0) => {
++ let mut formatter = formatter.debug_tuple("Maybe");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TraitItem::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Method(v0) => {
++ let mut formatter = formatter.debug_tuple("Method");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemMethod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemMethod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("sig", &self.sig);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Type {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Type::Array(v0) => {
++ let mut formatter = formatter.debug_tuple("Array");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::BareFn(v0) => {
++ let mut formatter = formatter.debug_tuple("BareFn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::ImplTrait(v0) => {
++ let mut formatter = formatter.debug_tuple("ImplTrait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Infer(v0) => {
++ let mut formatter = formatter.debug_tuple("Infer");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Never(v0) => {
++ let mut formatter = formatter.debug_tuple("Never");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Ptr(v0) => {
++ let mut formatter = formatter.debug_tuple("Ptr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Slice(v0) => {
++ let mut formatter = formatter.debug_tuple("Slice");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::TraitObject(v0) => {
++ let mut formatter = formatter.debug_tuple("TraitObject");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeArray {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeArray");
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elem", &self.elem);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.field("len", &self.len);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeBareFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeBareFn");
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("abi", &self.abi);
++ formatter.field("fn_token", &self.fn_token);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("variadic", &self.variadic);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeGroup");
++ formatter.field("group_token", &self.group_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeImplTrait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeImplTrait");
++ formatter.field("impl_token", &self.impl_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeInfer {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeInfer");
++ formatter.field("underscore_token", &self.underscore_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeMacro");
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeNever {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeNever");
++ formatter.field("bang_token", &self.bang_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeParam");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("default", &self.default);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParamBound {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TypeParamBound::Trait(v0) => {
++ let mut formatter = formatter.debug_tuple("Trait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TypeParamBound::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParen {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeParen");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypePath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypePath");
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypePtr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypePtr");
++ formatter.field("star_token", &self.star_token);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeReference");
++ formatter.field("and_token", &self.and_token);
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeSlice {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeSlice");
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeTraitObject {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeTraitObject");
++ formatter.field("dyn_token", &self.dyn_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeTuple");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for UnOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ UnOp::Deref(v0) => {
++ let mut formatter = formatter.debug_tuple("Deref");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UnOp::Not(v0) => {
++ let mut formatter = formatter.debug_tuple("Not");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UnOp::Neg(v0) => {
++ let mut formatter = formatter.debug_tuple("Neg");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseGlob {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseGlob");
++ formatter.field("star_token", &self.star_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseGroup");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseName {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseName");
++ formatter.field("ident", &self.ident);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UsePath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UsePath");
++ formatter.field("ident", &self.ident);
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("tree", &self.tree);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseRename {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseRename");
++ formatter.field("ident", &self.ident);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("rename", &self.rename);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseTree {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ UseTree::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Name(v0) => {
++ let mut formatter = formatter.debug_tuple("Name");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Rename(v0) => {
++ let mut formatter = formatter.debug_tuple("Rename");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Glob(v0) => {
++ let mut formatter = formatter.debug_tuple("Glob");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Variadic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Variadic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("dots", &self.dots);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Variant {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Variant");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("fields", &self.fields);
++ formatter.field("discriminant", &self.discriminant);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisCrate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisCrate");
++ formatter.field("crate_token", &self.crate_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisPublic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisPublic");
++ formatter.field("pub_token", &self.pub_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisRestricted {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisRestricted");
++ formatter.field("pub_token", &self.pub_token);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("in_token", &self.in_token);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Visibility {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Visibility::Public(v0) => {
++ let mut formatter = formatter.debug_tuple("Public");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Crate(v0) => {
++ let mut formatter = formatter.debug_tuple("Crate");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Restricted(v0) => {
++ let mut formatter = formatter.debug_tuple("Restricted");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Inherited => formatter.write_str("Inherited"),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for WhereClause {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("WhereClause");
++ formatter.field("where_token", &self.where_token);
++ formatter.field("predicates", &self.predicates);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for WherePredicate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ WherePredicate::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ WherePredicate::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ WherePredicate::Eq(v0) => {
++ let mut formatter = formatter.debug_tuple("Eq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/eq.rs b/third_party/rust/syn/src/gen/eq.rs
+new file mode 100644
+index 0000000000..15b2bcbbde
+--- /dev/null
++++ third_party/rust/syn/src/gen/eq.rs
+@@ -0,0 +1,1930 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#[cfg(any(feature = "derive", feature = "full"))]
++use crate::tt::TokenStreamHelper;
++use crate::*;
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Abi {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Abi {
++ fn eq(&self, other: &Self) -> bool {
++ self.name == other.name
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for AngleBracketedGenericArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for AngleBracketedGenericArguments {
++ fn eq(&self, other: &Self) -> bool {
++ self.colon2_token == other.colon2_token && self.args == other.args
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Arm {}
++#[cfg(feature = "full")]
++impl PartialEq for Arm {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.pat == other.pat
++ && self.guard == other.guard
++ && self.body == other.body
++ && self.comma == other.comma
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for AttrStyle {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for AttrStyle {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (AttrStyle::Outer, AttrStyle::Outer) => true,
++ (AttrStyle::Inner(_), AttrStyle::Inner(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Attribute {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Attribute {
++ fn eq(&self, other: &Self) -> bool {
++ self.style == other.style
++ && self.path == other.path
++ && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BareFnArg {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BareFnArg {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.name == other.name && self.ty == other.ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BinOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BinOp {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (BinOp::Add(_), BinOp::Add(_)) => true,
++ (BinOp::Sub(_), BinOp::Sub(_)) => true,
++ (BinOp::Mul(_), BinOp::Mul(_)) => true,
++ (BinOp::Div(_), BinOp::Div(_)) => true,
++ (BinOp::Rem(_), BinOp::Rem(_)) => true,
++ (BinOp::And(_), BinOp::And(_)) => true,
++ (BinOp::Or(_), BinOp::Or(_)) => true,
++ (BinOp::BitXor(_), BinOp::BitXor(_)) => true,
++ (BinOp::BitAnd(_), BinOp::BitAnd(_)) => true,
++ (BinOp::BitOr(_), BinOp::BitOr(_)) => true,
++ (BinOp::Shl(_), BinOp::Shl(_)) => true,
++ (BinOp::Shr(_), BinOp::Shr(_)) => true,
++ (BinOp::Eq(_), BinOp::Eq(_)) => true,
++ (BinOp::Lt(_), BinOp::Lt(_)) => true,
++ (BinOp::Le(_), BinOp::Le(_)) => true,
++ (BinOp::Ne(_), BinOp::Ne(_)) => true,
++ (BinOp::Ge(_), BinOp::Ge(_)) => true,
++ (BinOp::Gt(_), BinOp::Gt(_)) => true,
++ (BinOp::AddEq(_), BinOp::AddEq(_)) => true,
++ (BinOp::SubEq(_), BinOp::SubEq(_)) => true,
++ (BinOp::MulEq(_), BinOp::MulEq(_)) => true,
++ (BinOp::DivEq(_), BinOp::DivEq(_)) => true,
++ (BinOp::RemEq(_), BinOp::RemEq(_)) => true,
++ (BinOp::BitXorEq(_), BinOp::BitXorEq(_)) => true,
++ (BinOp::BitAndEq(_), BinOp::BitAndEq(_)) => true,
++ (BinOp::BitOrEq(_), BinOp::BitOrEq(_)) => true,
++ (BinOp::ShlEq(_), BinOp::ShlEq(_)) => true,
++ (BinOp::ShrEq(_), BinOp::ShrEq(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Binding {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Binding {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Block {}
++#[cfg(feature = "full")]
++impl PartialEq for Block {
++ fn eq(&self, other: &Self) -> bool {
++ self.stmts == other.stmts
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BoundLifetimes {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BoundLifetimes {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ConstParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ConstParam {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.eq_token == other.eq_token
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Constraint {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Constraint {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.bounds == other.bounds
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for Data {}
++#[cfg(feature = "derive")]
++impl PartialEq for Data {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Data::Struct(self0), Data::Struct(other0)) => self0 == other0,
++ (Data::Enum(self0), Data::Enum(other0)) => self0 == other0,
++ (Data::Union(self0), Data::Union(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataEnum {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataEnum {
++ fn eq(&self, other: &Self) -> bool {
++ self.variants == other.variants
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataStruct {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.fields == other.fields && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataUnion {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataUnion {
++ fn eq(&self, other: &Self) -> bool {
++ self.fields == other.fields
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DeriveInput {}
++#[cfg(feature = "derive")]
++impl PartialEq for DeriveInput {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.data == other.data
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Expr {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Expr {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ #[cfg(feature = "full")]
++ (Expr::Array(self0), Expr::Array(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Assign(self0), Expr::Assign(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::AssignOp(self0), Expr::AssignOp(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Async(self0), Expr::Async(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Await(self0), Expr::Await(other0)) => self0 == other0,
++ (Expr::Binary(self0), Expr::Binary(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Block(self0), Expr::Block(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Box(self0), Expr::Box(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Break(self0), Expr::Break(other0)) => self0 == other0,
++ (Expr::Call(self0), Expr::Call(other0)) => self0 == other0,
++ (Expr::Cast(self0), Expr::Cast(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Closure(self0), Expr::Closure(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Continue(self0), Expr::Continue(other0)) => self0 == other0,
++ (Expr::Field(self0), Expr::Field(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::ForLoop(self0), Expr::ForLoop(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Group(self0), Expr::Group(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::If(self0), Expr::If(other0)) => self0 == other0,
++ (Expr::Index(self0), Expr::Index(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Let(self0), Expr::Let(other0)) => self0 == other0,
++ (Expr::Lit(self0), Expr::Lit(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Loop(self0), Expr::Loop(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Macro(self0), Expr::Macro(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Match(self0), Expr::Match(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::MethodCall(self0), Expr::MethodCall(other0)) => self0 == other0,
++ (Expr::Paren(self0), Expr::Paren(other0)) => self0 == other0,
++ (Expr::Path(self0), Expr::Path(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Range(self0), Expr::Range(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Reference(self0), Expr::Reference(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Repeat(self0), Expr::Repeat(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Return(self0), Expr::Return(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Struct(self0), Expr::Struct(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Try(self0), Expr::Try(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::TryBlock(self0), Expr::TryBlock(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Tuple(self0), Expr::Tuple(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Type(self0), Expr::Type(other0)) => self0 == other0,
++ (Expr::Unary(self0), Expr::Unary(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Unsafe(self0), Expr::Unsafe(other0)) => self0 == other0,
++ (Expr::Verbatim(self0), Expr::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ #[cfg(feature = "full")]
++ (Expr::While(self0), Expr::While(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Yield(self0), Expr::Yield(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprArray {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprArray {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAssign {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAssign {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.left == other.left && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAssignOp {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAssignOp {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.left == other.left
++ && self.op == other.op
++ && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAsync {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAsync {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.capture == other.capture && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAwait {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAwait {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.base == other.base
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprBinary {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprBinary {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.left == other.left
++ && self.op == other.op
++ && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBlock {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBlock {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBox {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBox {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBreak {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBreak {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprCall {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprCall {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.func == other.func && self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprCast {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprCast {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprClosure {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprClosure {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.asyncness == other.asyncness
++ && self.movability == other.movability
++ && self.capture == other.capture
++ && self.inputs == other.inputs
++ && self.output == other.output
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprContinue {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprContinue {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprField {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprField {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.base == other.base && self.member == other.member
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprForLoop {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprForLoop {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.label == other.label
++ && self.pat == other.pat
++ && self.expr == other.expr
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprGroup {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprIf {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprIf {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.cond == other.cond
++ && self.then_branch == other.then_branch
++ && self.else_branch == other.else_branch
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprIndex {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprIndex {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.index == other.index
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprLet {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprLet {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprLit {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprLit {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.lit == other.lit
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprLoop {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprLoop {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMatch {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMatch {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.arms == other.arms
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMethodCall {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMethodCall {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.receiver == other.receiver
++ && self.method == other.method
++ && self.turbofish == other.turbofish
++ && self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprParen {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprParen {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprPath {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprPath {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprRange {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprRange {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.from == other.from
++ && self.limits == other.limits
++ && self.to == other.to
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprReference {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mutability == other.mutability && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprRepeat {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprRepeat {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.len == other.len
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprReturn {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprReturn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.path == other.path
++ && self.fields == other.fields
++ && self.dot2_token == other.dot2_token
++ && self.rest == other.rest
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTry {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTry {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTryBlock {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTryBlock {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTuple {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprType {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprUnary {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprUnary {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.op == other.op && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprUnsafe {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprUnsafe {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprWhile {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprWhile {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.label == other.label
++ && self.cond == other.cond
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprYield {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprYield {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Field {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Field {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.colon_token == other.colon_token
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FieldPat {}
++#[cfg(feature = "full")]
++impl PartialEq for FieldPat {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.member == other.member
++ && self.colon_token == other.colon_token
++ && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FieldValue {}
++#[cfg(feature = "full")]
++impl PartialEq for FieldValue {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.member == other.member
++ && self.colon_token == other.colon_token
++ && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Fields {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Fields {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Fields::Named(self0), Fields::Named(other0)) => self0 == other0,
++ (Fields::Unnamed(self0), Fields::Unnamed(other0)) => self0 == other0,
++ (Fields::Unit, Fields::Unit) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for FieldsNamed {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for FieldsNamed {
++ fn eq(&self, other: &Self) -> bool {
++ self.named == other.named
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for FieldsUnnamed {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for FieldsUnnamed {
++ fn eq(&self, other: &Self) -> bool {
++ self.unnamed == other.unnamed
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for File {}
++#[cfg(feature = "full")]
++impl PartialEq for File {
++ fn eq(&self, other: &Self) -> bool {
++ self.shebang == other.shebang && self.attrs == other.attrs && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FnArg {}
++#[cfg(feature = "full")]
++impl PartialEq for FnArg {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (FnArg::Receiver(self0), FnArg::Receiver(other0)) => self0 == other0,
++ (FnArg::Typed(self0), FnArg::Typed(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItem {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ForeignItem::Fn(self0), ForeignItem::Fn(other0)) => self0 == other0,
++ (ForeignItem::Static(self0), ForeignItem::Static(other0)) => self0 == other0,
++ (ForeignItem::Type(self0), ForeignItem::Type(other0)) => self0 == other0,
++ (ForeignItem::Macro(self0), ForeignItem::Macro(other0)) => self0 == other0,
++ (ForeignItem::Verbatim(self0), ForeignItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemFn {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.vis == other.vis && self.sig == other.sig
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemStatic {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemStatic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for GenericArgument {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for GenericArgument {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericArgument::Lifetime(self0), GenericArgument::Lifetime(other0)) => {
++ self0 == other0
++ }
++ (GenericArgument::Type(self0), GenericArgument::Type(other0)) => self0 == other0,
++ (GenericArgument::Binding(self0), GenericArgument::Binding(other0)) => self0 == other0,
++ (GenericArgument::Constraint(self0), GenericArgument::Constraint(other0)) => {
++ self0 == other0
++ }
++ (GenericArgument::Const(self0), GenericArgument::Const(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for GenericMethodArgument {}
++#[cfg(feature = "full")]
++impl PartialEq for GenericMethodArgument {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericMethodArgument::Type(self0), GenericMethodArgument::Type(other0)) => {
++ self0 == other0
++ }
++ (GenericMethodArgument::Const(self0), GenericMethodArgument::Const(other0)) => {
++ self0 == other0
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for GenericParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for GenericParam {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericParam::Type(self0), GenericParam::Type(other0)) => self0 == other0,
++ (GenericParam::Lifetime(self0), GenericParam::Lifetime(other0)) => self0 == other0,
++ (GenericParam::Const(self0), GenericParam::Const(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Generics {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Generics {
++ fn eq(&self, other: &Self) -> bool {
++ self.lt_token == other.lt_token
++ && self.params == other.params
++ && self.gt_token == other.gt_token
++ && self.where_clause == other.where_clause
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItem {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ImplItem::Const(self0), ImplItem::Const(other0)) => self0 == other0,
++ (ImplItem::Method(self0), ImplItem::Method(other0)) => self0 == other0,
++ (ImplItem::Type(self0), ImplItem::Type(other0)) => self0 == other0,
++ (ImplItem::Macro(self0), ImplItem::Macro(other0)) => self0 == other0,
++ (ImplItem::Verbatim(self0), ImplItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemMethod {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemMethod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.sig == other.sig
++ && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Item {}
++#[cfg(feature = "full")]
++impl PartialEq for Item {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Item::Const(self0), Item::Const(other0)) => self0 == other0,
++ (Item::Enum(self0), Item::Enum(other0)) => self0 == other0,
++ (Item::ExternCrate(self0), Item::ExternCrate(other0)) => self0 == other0,
++ (Item::Fn(self0), Item::Fn(other0)) => self0 == other0,
++ (Item::ForeignMod(self0), Item::ForeignMod(other0)) => self0 == other0,
++ (Item::Impl(self0), Item::Impl(other0)) => self0 == other0,
++ (Item::Macro(self0), Item::Macro(other0)) => self0 == other0,
++ (Item::Macro2(self0), Item::Macro2(other0)) => self0 == other0,
++ (Item::Mod(self0), Item::Mod(other0)) => self0 == other0,
++ (Item::Static(self0), Item::Static(other0)) => self0 == other0,
++ (Item::Struct(self0), Item::Struct(other0)) => self0 == other0,
++ (Item::Trait(self0), Item::Trait(other0)) => self0 == other0,
++ (Item::TraitAlias(self0), Item::TraitAlias(other0)) => self0 == other0,
++ (Item::Type(self0), Item::Type(other0)) => self0 == other0,
++ (Item::Union(self0), Item::Union(other0)) => self0 == other0,
++ (Item::Use(self0), Item::Use(other0)) => self0 == other0,
++ (Item::Verbatim(self0), Item::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemEnum {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemEnum {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.variants == other.variants
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemExternCrate {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemExternCrate {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.rename == other.rename
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemFn {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.sig == other.sig
++ && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemForeignMod {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemForeignMod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.abi == other.abi && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemImpl {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemImpl {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.defaultness == other.defaultness
++ && self.unsafety == other.unsafety
++ && self.generics == other.generics
++ && self.trait_ == other.trait_
++ && self.self_ty == other.self_ty
++ && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.mac == other.mac
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMacro2 {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMacro2 {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules)
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMod {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.content == other.content
++ && self.semi == other.semi
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemStatic {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemStatic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.fields == other.fields
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemTrait {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemTrait {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.unsafety == other.unsafety
++ && self.auto_token == other.auto_token
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.colon_token == other.colon_token
++ && self.supertraits == other.supertraits
++ && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemTraitAlias {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemTraitAlias {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.bounds == other.bounds
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemUnion {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemUnion {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.fields == other.fields
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemUse {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemUse {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.leading_colon == other.leading_colon
++ && self.tree == other.tree
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Label {}
++#[cfg(feature = "full")]
++impl PartialEq for Label {
++ fn eq(&self, other: &Self) -> bool {
++ self.name == other.name
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for LifetimeDef {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for LifetimeDef {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.lifetime == other.lifetime
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ }
++}
++impl Eq for Lit {}
++impl PartialEq for Lit {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Lit::Str(self0), Lit::Str(other0)) => self0 == other0,
++ (Lit::ByteStr(self0), Lit::ByteStr(other0)) => self0 == other0,
++ (Lit::Byte(self0), Lit::Byte(other0)) => self0 == other0,
++ (Lit::Char(self0), Lit::Char(other0)) => self0 == other0,
++ (Lit::Int(self0), Lit::Int(other0)) => self0 == other0,
++ (Lit::Float(self0), Lit::Float(other0)) => self0 == other0,
++ (Lit::Bool(self0), Lit::Bool(other0)) => self0 == other0,
++ (Lit::Verbatim(self0), Lit::Verbatim(other0)) => {
++ self0.to_string() == other0.to_string()
++ }
++ _ => false,
++ }
++ }
++}
++impl Eq for LitBool {}
++impl PartialEq for LitBool {
++ fn eq(&self, other: &Self) -> bool {
++ self.value == other.value
++ }
++}
++impl Eq for LitByte {}
++impl Eq for LitByteStr {}
++impl Eq for LitChar {}
++impl Eq for LitFloat {}
++impl Eq for LitInt {}
++impl Eq for LitStr {}
++#[cfg(feature = "full")]
++impl Eq for Local {}
++#[cfg(feature = "full")]
++impl PartialEq for Local {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.init == other.init
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Macro {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Macro {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path
++ && self.delimiter == other.delimiter
++ && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MacroDelimiter {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MacroDelimiter {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (MacroDelimiter::Paren(_), MacroDelimiter::Paren(_)) => true,
++ (MacroDelimiter::Brace(_), MacroDelimiter::Brace(_)) => true,
++ (MacroDelimiter::Bracket(_), MacroDelimiter::Bracket(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Meta {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Meta {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Meta::Path(self0), Meta::Path(other0)) => self0 == other0,
++ (Meta::List(self0), Meta::List(other0)) => self0 == other0,
++ (Meta::NameValue(self0), Meta::NameValue(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MetaList {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MetaList {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path && self.nested == other.nested
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MetaNameValue {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MetaNameValue {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path && self.lit == other.lit
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for MethodTurbofish {}
++#[cfg(feature = "full")]
++impl PartialEq for MethodTurbofish {
++ fn eq(&self, other: &Self) -> bool {
++ self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for NestedMeta {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for NestedMeta {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (NestedMeta::Meta(self0), NestedMeta::Meta(other0)) => self0 == other0,
++ (NestedMeta::Lit(self0), NestedMeta::Lit(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ParenthesizedGenericArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ParenthesizedGenericArguments {
++ fn eq(&self, other: &Self) -> bool {
++ self.inputs == other.inputs && self.output == other.output
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Pat {}
++#[cfg(feature = "full")]
++impl PartialEq for Pat {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Pat::Box(self0), Pat::Box(other0)) => self0 == other0,
++ (Pat::Ident(self0), Pat::Ident(other0)) => self0 == other0,
++ (Pat::Lit(self0), Pat::Lit(other0)) => self0 == other0,
++ (Pat::Macro(self0), Pat::Macro(other0)) => self0 == other0,
++ (Pat::Or(self0), Pat::Or(other0)) => self0 == other0,
++ (Pat::Path(self0), Pat::Path(other0)) => self0 == other0,
++ (Pat::Range(self0), Pat::Range(other0)) => self0 == other0,
++ (Pat::Reference(self0), Pat::Reference(other0)) => self0 == other0,
++ (Pat::Rest(self0), Pat::Rest(other0)) => self0 == other0,
++ (Pat::Slice(self0), Pat::Slice(other0)) => self0 == other0,
++ (Pat::Struct(self0), Pat::Struct(other0)) => self0 == other0,
++ (Pat::Tuple(self0), Pat::Tuple(other0)) => self0 == other0,
++ (Pat::TupleStruct(self0), Pat::TupleStruct(other0)) => self0 == other0,
++ (Pat::Type(self0), Pat::Type(other0)) => self0 == other0,
++ (Pat::Verbatim(self0), Pat::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ (Pat::Wild(self0), Pat::Wild(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatBox {}
++#[cfg(feature = "full")]
++impl PartialEq for PatBox {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatIdent {}
++#[cfg(feature = "full")]
++impl PartialEq for PatIdent {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.by_ref == other.by_ref
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.subpat == other.subpat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatLit {}
++#[cfg(feature = "full")]
++impl PartialEq for PatLit {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for PatMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatOr {}
++#[cfg(feature = "full")]
++impl PartialEq for PatOr {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.leading_vert == other.leading_vert
++ && self.cases == other.cases
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatPath {}
++#[cfg(feature = "full")]
++impl PartialEq for PatPath {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatRange {}
++#[cfg(feature = "full")]
++impl PartialEq for PatRange {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.lo == other.lo
++ && self.limits == other.limits
++ && self.hi == other.hi
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatReference {}
++#[cfg(feature = "full")]
++impl PartialEq for PatReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mutability == other.mutability && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatRest {}
++#[cfg(feature = "full")]
++impl PartialEq for PatRest {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatSlice {}
++#[cfg(feature = "full")]
++impl PartialEq for PatSlice {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for PatStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.path == other.path
++ && self.fields == other.fields
++ && self.dot2_token == other.dot2_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatTuple {}
++#[cfg(feature = "full")]
++impl PartialEq for PatTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatTupleStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for PatTupleStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.path == other.path && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatType {}
++#[cfg(feature = "full")]
++impl PartialEq for PatType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatWild {}
++#[cfg(feature = "full")]
++impl PartialEq for PatWild {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Path {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Path {
++ fn eq(&self, other: &Self) -> bool {
++ self.leading_colon == other.leading_colon && self.segments == other.segments
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PathArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PathArguments {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (PathArguments::None, PathArguments::None) => true,
++ (PathArguments::AngleBracketed(self0), PathArguments::AngleBracketed(other0)) => {
++ self0 == other0
++ }
++ (PathArguments::Parenthesized(self0), PathArguments::Parenthesized(other0)) => {
++ self0 == other0
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PathSegment {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PathSegment {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.arguments == other.arguments
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateEq {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateEq {
++ fn eq(&self, other: &Self) -> bool {
++ self.lhs_ty == other.lhs_ty && self.rhs_ty == other.rhs_ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateLifetime {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateLifetime {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetime == other.lifetime && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateType {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateType {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ && self.bounded_ty == other.bounded_ty
++ && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for QSelf {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for QSelf {
++ fn eq(&self, other: &Self) -> bool {
++ self.ty == other.ty && self.position == other.position && self.as_token == other.as_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for RangeLimits {}
++#[cfg(feature = "full")]
++impl PartialEq for RangeLimits {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (RangeLimits::HalfOpen(_), RangeLimits::HalfOpen(_)) => true,
++ (RangeLimits::Closed(_), RangeLimits::Closed(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Receiver {}
++#[cfg(feature = "full")]
++impl PartialEq for Receiver {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.reference == other.reference
++ && self.mutability == other.mutability
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ReturnType {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ReturnType {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ReturnType::Default, ReturnType::Default) => true,
++ (ReturnType::Type(_, self1), ReturnType::Type(_, other1)) => self1 == other1,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Signature {}
++#[cfg(feature = "full")]
++impl PartialEq for Signature {
++ fn eq(&self, other: &Self) -> bool {
++ self.constness == other.constness
++ && self.asyncness == other.asyncness
++ && self.unsafety == other.unsafety
++ && self.abi == other.abi
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.inputs == other.inputs
++ && self.variadic == other.variadic
++ && self.output == other.output
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Stmt {}
++#[cfg(feature = "full")]
++impl PartialEq for Stmt {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Stmt::Local(self0), Stmt::Local(other0)) => self0 == other0,
++ (Stmt::Item(self0), Stmt::Item(other0)) => self0 == other0,
++ (Stmt::Expr(self0), Stmt::Expr(other0)) => self0 == other0,
++ (Stmt::Semi(self0, _), Stmt::Semi(other0, _)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TraitBound {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TraitBound {
++ fn eq(&self, other: &Self) -> bool {
++ self.paren_token == other.paren_token
++ && self.modifier == other.modifier
++ && self.lifetimes == other.lifetimes
++ && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TraitBoundModifier {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TraitBoundModifier {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TraitBoundModifier::None, TraitBoundModifier::None) => true,
++ (TraitBoundModifier::Maybe(_), TraitBoundModifier::Maybe(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItem {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TraitItem::Const(self0), TraitItem::Const(other0)) => self0 == other0,
++ (TraitItem::Method(self0), TraitItem::Method(other0)) => self0 == other0,
++ (TraitItem::Type(self0), TraitItem::Type(other0)) => self0 == other0,
++ (TraitItem::Macro(self0), TraitItem::Macro(other0)) => self0 == other0,
++ (TraitItem::Verbatim(self0), TraitItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.default == other.default
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemMethod {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemMethod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.sig == other.sig
++ && self.default == other.default
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Type {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Type {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Type::Array(self0), Type::Array(other0)) => self0 == other0,
++ (Type::BareFn(self0), Type::BareFn(other0)) => self0 == other0,
++ (Type::Group(self0), Type::Group(other0)) => self0 == other0,
++ (Type::ImplTrait(self0), Type::ImplTrait(other0)) => self0 == other0,
++ (Type::Infer(self0), Type::Infer(other0)) => self0 == other0,
++ (Type::Macro(self0), Type::Macro(other0)) => self0 == other0,
++ (Type::Never(self0), Type::Never(other0)) => self0 == other0,
++ (Type::Paren(self0), Type::Paren(other0)) => self0 == other0,
++ (Type::Path(self0), Type::Path(other0)) => self0 == other0,
++ (Type::Ptr(self0), Type::Ptr(other0)) => self0 == other0,
++ (Type::Reference(self0), Type::Reference(other0)) => self0 == other0,
++ (Type::Slice(self0), Type::Slice(other0)) => self0 == other0,
++ (Type::TraitObject(self0), Type::TraitObject(other0)) => self0 == other0,
++ (Type::Tuple(self0), Type::Tuple(other0)) => self0 == other0,
++ (Type::Verbatim(self0), Type::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeArray {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeArray {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem && self.len == other.len
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeBareFn {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeBareFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ && self.unsafety == other.unsafety
++ && self.abi == other.abi
++ && self.inputs == other.inputs
++ && self.variadic == other.variadic
++ && self.output == other.output
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeGroup {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeImplTrait {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeImplTrait {
++ fn eq(&self, other: &Self) -> bool {
++ self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeInfer {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeInfer {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeMacro {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.mac == other.mac
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeNever {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeNever {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParam {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ && self.eq_token == other.eq_token
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParamBound {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParamBound {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TypeParamBound::Trait(self0), TypeParamBound::Trait(other0)) => self0 == other0,
++ (TypeParamBound::Lifetime(self0), TypeParamBound::Lifetime(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParen {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParen {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypePath {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypePath {
++ fn eq(&self, other: &Self) -> bool {
++ self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypePtr {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypePtr {
++ fn eq(&self, other: &Self) -> bool {
++ self.const_token == other.const_token
++ && self.mutability == other.mutability
++ && self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeReference {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetime == other.lifetime
++ && self.mutability == other.mutability
++ && self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeSlice {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeSlice {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeTraitObject {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeTraitObject {
++ fn eq(&self, other: &Self) -> bool {
++ self.dyn_token == other.dyn_token && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeTuple {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.elems == other.elems
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for UnOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for UnOp {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (UnOp::Deref(_), UnOp::Deref(_)) => true,
++ (UnOp::Not(_), UnOp::Not(_)) => true,
++ (UnOp::Neg(_), UnOp::Neg(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseGlob {}
++#[cfg(feature = "full")]
++impl PartialEq for UseGlob {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseGroup {}
++#[cfg(feature = "full")]
++impl PartialEq for UseGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseName {}
++#[cfg(feature = "full")]
++impl PartialEq for UseName {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UsePath {}
++#[cfg(feature = "full")]
++impl PartialEq for UsePath {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.tree == other.tree
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseRename {}
++#[cfg(feature = "full")]
++impl PartialEq for UseRename {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.rename == other.rename
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseTree {}
++#[cfg(feature = "full")]
++impl PartialEq for UseTree {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (UseTree::Path(self0), UseTree::Path(other0)) => self0 == other0,
++ (UseTree::Name(self0), UseTree::Name(other0)) => self0 == other0,
++ (UseTree::Rename(self0), UseTree::Rename(other0)) => self0 == other0,
++ (UseTree::Glob(self0), UseTree::Glob(other0)) => self0 == other0,
++ (UseTree::Group(self0), UseTree::Group(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Variadic {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Variadic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Variant {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Variant {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.fields == other.fields
++ && self.discriminant == other.discriminant
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisCrate {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisCrate {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisPublic {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisPublic {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisRestricted {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisRestricted {
++ fn eq(&self, other: &Self) -> bool {
++ self.in_token == other.in_token && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Visibility {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Visibility {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Visibility::Public(self0), Visibility::Public(other0)) => self0 == other0,
++ (Visibility::Crate(self0), Visibility::Crate(other0)) => self0 == other0,
++ (Visibility::Restricted(self0), Visibility::Restricted(other0)) => self0 == other0,
++ (Visibility::Inherited, Visibility::Inherited) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for WhereClause {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for WhereClause {
++ fn eq(&self, other: &Self) -> bool {
++ self.predicates == other.predicates
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for WherePredicate {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for WherePredicate {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (WherePredicate::Type(self0), WherePredicate::Type(other0)) => self0 == other0,
++ (WherePredicate::Lifetime(self0), WherePredicate::Lifetime(other0)) => self0 == other0,
++ (WherePredicate::Eq(self0), WherePredicate::Eq(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/fold.rs b/third_party/rust/syn/src/gen/fold.rs
+index f51218b78c..d9dd32a420 100644
+--- third_party/rust/syn/src/gen/fold.rs
++++ third_party/rust/syn/src/gen/fold.rs
+@@ -2,6 +2,7 @@
+ // It is not intended for manual editing.
+
+ #![allow(unreachable_code, unused_variables)]
++#![allow(clippy::match_wildcard_for_single_variants)]
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::gen::helper::fold::*;
+ #[cfg(any(feature = "full", feature = "derive"))]
+@@ -26,7 +27,7 @@ macro_rules! full {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"fold"` feature.*
++/// *This trait is available only if Syn is built with the `"fold"` feature.*
+ pub trait Fold {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_abi(&mut self, i: Abi) -> Abi {
+@@ -433,35 +434,27 @@ pub trait Fold {
+ fn fold_lifetime_def(&mut self, i: LifetimeDef) -> LifetimeDef {
+ fold_lifetime_def(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit(&mut self, i: Lit) -> Lit {
+ fold_lit(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_bool(&mut self, i: LitBool) -> LitBool {
+ fold_lit_bool(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_byte(&mut self, i: LitByte) -> LitByte {
+ fold_lit_byte(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_byte_str(&mut self, i: LitByteStr) -> LitByteStr {
+ fold_lit_byte_str(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_char(&mut self, i: LitChar) -> LitChar {
+ fold_lit_char(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_float(&mut self, i: LitFloat) -> LitFloat {
+ fold_lit_float(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_int(&mut self, i: LitInt) -> LitInt {
+ fold_lit_int(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_str(&mut self, i: LitStr) -> LitStr {
+ fold_lit_str(self, i)
+ }
+@@ -799,10 +792,10 @@ where
+ F: Fold + ?Sized,
+ {
+ AngleBracketedGenericArguments {
+- colon2_token: (node.colon2_token).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ colon2_token: (node.colon2_token).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ args: FoldHelper::lift(node.args, |it| f.fold_generic_argument(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -819,9 +812,9 @@ where
+ Box::new(f.fold_expr(*(it).1)),
+ )
+ }),
+- fat_arrow_token: Token ! [ => ](tokens_helper(f, &node.fat_arrow_token.spans)),
++ fat_arrow_token: Token ! [=>](tokens_helper(f, &node.fat_arrow_token.spans)),
+ body: Box::new(f.fold_expr(*node.body)),
+- comma: (node.comma).map(|it| Token ! [ , ](tokens_helper(f, &it.spans))),
++ comma: (node.comma).map(|it| Token ! [,](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -842,7 +835,7 @@ where
+ F: Fold + ?Sized,
+ {
+ Attribute {
+- pound_token: Token ! [ # ](tokens_helper(f, &node.pound_token.spans)),
++ pound_token: Token ! [#](tokens_helper(f, &node.pound_token.spans)),
+ style: f.fold_attr_style(node.style),
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ path: f.fold_path(node.path),
+@@ -859,7 +852,7 @@ where
+ name: (node.name).map(|it| {
+ (
+ f.fold_ident((it).0),
+- Token ! [ : ](tokens_helper(f, &(it).1.spans)),
++ Token ! [:](tokens_helper(f, &(it).1.spans)),
+ )
+ }),
+ ty: f.fold_type(node.ty),
+@@ -871,59 +864,47 @@ where
+ F: Fold + ?Sized,
+ {
+ match node {
+- BinOp::Add(_binding_0) => BinOp::Add(Token ! [ + ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Sub(_binding_0) => BinOp::Sub(Token ! [ - ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Mul(_binding_0) => BinOp::Mul(Token ! [ * ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Div(_binding_0) => BinOp::Div(Token ! [ / ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Rem(_binding_0) => BinOp::Rem(Token ! [ % ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::And(_binding_0) => BinOp::And(Token ! [ && ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Or(_binding_0) => BinOp::Or(Token ! [ || ](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Add(_binding_0) => BinOp::Add(Token ! [+](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Sub(_binding_0) => BinOp::Sub(Token ! [-](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Mul(_binding_0) => BinOp::Mul(Token ! [*](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Div(_binding_0) => BinOp::Div(Token ! [/](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Rem(_binding_0) => BinOp::Rem(Token ! [%](tokens_helper(f, &_binding_0.spans))),
++ BinOp::And(_binding_0) => BinOp::And(Token ! [&&](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Or(_binding_0) => BinOp::Or(Token ! [||](tokens_helper(f, &_binding_0.spans))),
+ BinOp::BitXor(_binding_0) => {
+- BinOp::BitXor(Token ! [ ^ ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitXor(Token ! [^](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitAnd(_binding_0) => {
+- BinOp::BitAnd(Token ! [ & ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::BitOr(_binding_0) => {
+- BinOp::BitOr(Token ! [ | ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::Shl(_binding_0) => BinOp::Shl(Token ! [ << ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Shr(_binding_0) => BinOp::Shr(Token ! [ >> ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Eq(_binding_0) => BinOp::Eq(Token ! [ == ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Lt(_binding_0) => BinOp::Lt(Token ! [ < ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Le(_binding_0) => BinOp::Le(Token ! [ <= ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Ne(_binding_0) => BinOp::Ne(Token ! [ != ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Ge(_binding_0) => BinOp::Ge(Token ! [ >= ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Gt(_binding_0) => BinOp::Gt(Token ! [ > ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::AddEq(_binding_0) => {
+- BinOp::AddEq(Token ! [ += ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::SubEq(_binding_0) => {
+- BinOp::SubEq(Token ! [ -= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::MulEq(_binding_0) => {
+- BinOp::MulEq(Token ! [ *= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::DivEq(_binding_0) => {
+- BinOp::DivEq(Token ! [ /= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::RemEq(_binding_0) => {
+- BinOp::RemEq(Token ! [ %= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitAnd(Token ! [&](tokens_helper(f, &_binding_0.spans)))
+ }
++ BinOp::BitOr(_binding_0) => BinOp::BitOr(Token ! [|](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Shl(_binding_0) => BinOp::Shl(Token ! [<<](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Shr(_binding_0) => BinOp::Shr(Token ! [>>](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Eq(_binding_0) => BinOp::Eq(Token ! [==](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Lt(_binding_0) => BinOp::Lt(Token ! [<](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Le(_binding_0) => BinOp::Le(Token ! [<=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Ne(_binding_0) => BinOp::Ne(Token ! [!=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Ge(_binding_0) => BinOp::Ge(Token ! [>=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Gt(_binding_0) => BinOp::Gt(Token ! [>](tokens_helper(f, &_binding_0.spans))),
++ BinOp::AddEq(_binding_0) => BinOp::AddEq(Token ! [+=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::SubEq(_binding_0) => BinOp::SubEq(Token ! [-=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::MulEq(_binding_0) => BinOp::MulEq(Token ! [*=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::DivEq(_binding_0) => BinOp::DivEq(Token ! [/=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::RemEq(_binding_0) => BinOp::RemEq(Token ! [%=](tokens_helper(f, &_binding_0.spans))),
+ BinOp::BitXorEq(_binding_0) => {
+- BinOp::BitXorEq(Token ! [ ^= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitXorEq(Token ! [^=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitAndEq(_binding_0) => {
+- BinOp::BitAndEq(Token ! [ &= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitAndEq(Token ! [&=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitOrEq(_binding_0) => {
+- BinOp::BitOrEq(Token ! [ |= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitOrEq(Token ! [|=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::ShlEq(_binding_0) => {
+- BinOp::ShlEq(Token ! [ <<= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::ShlEq(Token ! [<<=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::ShrEq(_binding_0) => {
+- BinOp::ShrEq(Token ! [ >>= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::ShrEq(Token ! [>>=](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+@@ -934,7 +915,7 @@ where
+ {
+ Binding {
+ ident: f.fold_ident(node.ident),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: f.fold_type(node.ty),
+ }
+ }
+@@ -955,9 +936,9 @@ where
+ {
+ BoundLifetimes {
+ for_token: Token![for](tokens_helper(f, &node.for_token.span)),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ lifetimes: FoldHelper::lift(node.lifetimes, |it| f.fold_lifetime_def(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -969,9 +950,9 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+- eq_token: (node.eq_token).map(|it| Token ! [ = ](tokens_helper(f, &it.spans))),
++ eq_token: (node.eq_token).map(|it| Token ! [=](tokens_helper(f, &it.spans))),
+ default: (node.default).map(|it| f.fold_expr(it)),
+ }
+ }
+@@ -982,7 +963,7 @@ where
+ {
+ Constraint {
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ }
+ }
+@@ -1016,7 +997,7 @@ where
+ DataStruct {
+ struct_token: Token![struct](tokens_helper(f, &node.struct_token.span)),
+ fields: f.fold_fields(node.fields),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "derive")]
+@@ -1112,7 +1093,7 @@ where
+ ExprAssign {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ left: Box::new(f.fold_expr(*node.left)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ right: Box::new(f.fold_expr(*node.right)),
+ }
+ }
+@@ -1148,7 +1129,7 @@ where
+ ExprAwait {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ base: Box::new(f.fold_expr(*node.base)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ await_token: crate::token::Await(tokens_helper(f, &node.await_token.span)),
+ }
+ }
+@@ -1232,9 +1213,9 @@ where
+ asyncness: (node.asyncness).map(|it| Token![async](tokens_helper(f, &it.span))),
+ movability: (node.movability).map(|it| Token![static](tokens_helper(f, &it.span))),
+ capture: (node.capture).map(|it| Token![move](tokens_helper(f, &it.span))),
+- or1_token: Token ! [ | ](tokens_helper(f, &node.or1_token.spans)),
++ or1_token: Token ! [|](tokens_helper(f, &node.or1_token.spans)),
+ inputs: FoldHelper::lift(node.inputs, |it| f.fold_pat(it)),
+- or2_token: Token ! [ | ](tokens_helper(f, &node.or2_token.spans)),
++ or2_token: Token ! [|](tokens_helper(f, &node.or2_token.spans)),
+ output: f.fold_return_type(node.output),
+ body: Box::new(f.fold_expr(*node.body)),
+ }
+@@ -1258,7 +1239,7 @@ where
+ ExprField {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ base: Box::new(f.fold_expr(*node.base)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ member: f.fold_member(node.member),
+ }
+ }
+@@ -1327,7 +1308,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ let_token: Token![let](tokens_helper(f, &node.let_token.span)),
+ pat: f.fold_pat(node.pat),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+ }
+ }
+@@ -1384,7 +1365,7 @@ where
+ ExprMethodCall {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ receiver: Box::new(f.fold_expr(*node.receiver)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ method: f.fold_ident(node.method),
+ turbofish: (node.turbofish).map(|it| f.fold_method_turbofish(it)),
+ paren_token: Paren(tokens_helper(f, &node.paren_token.span)),
+@@ -1432,7 +1413,7 @@ where
+ {
+ ExprReference {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ raw: node.raw,
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ expr: Box::new(f.fold_expr(*node.expr)),
+@@ -1447,7 +1428,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ len: Box::new(f.fold_expr(*node.len)),
+ }
+ }
+@@ -1484,7 +1465,7 @@ where
+ ExprTry {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- question_token: Token ! [ ? ](tokens_helper(f, &node.question_token.spans)),
++ question_token: Token ! [?](tokens_helper(f, &node.question_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1517,7 +1498,7 @@ where
+ ExprType {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ }
+ }
+@@ -1576,7 +1557,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ ident: (node.ident).map(|it| f.fold_ident(it)),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ ty: f.fold_type(node.ty),
+ }
+ }
+@@ -1588,7 +1569,7 @@ where
+ FieldPat {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ member: f.fold_member(node.member),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ pat: Box::new(f.fold_pat(*node.pat)),
+ }
+ }
+@@ -1600,7 +1581,7 @@ where
+ FieldValue {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ member: f.fold_member(node.member),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ expr: f.fold_expr(node.expr),
+ }
+ }
+@@ -1681,7 +1662,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ sig: f.fold_signature(node.sig),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1692,7 +1673,7 @@ where
+ ForeignItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1706,9 +1687,9 @@ where
+ static_token: Token![static](tokens_helper(f, &node.static_token.span)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1721,7 +1702,7 @@ where
+ vis: f.fold_visibility(node.vis),
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -1779,9 +1760,9 @@ where
+ F: Fold + ?Sized,
+ {
+ Generics {
+- lt_token: (node.lt_token).map(|it| Token ! [ < ](tokens_helper(f, &it.spans))),
++ lt_token: (node.lt_token).map(|it| Token ! [<](tokens_helper(f, &it.spans))),
+ params: FoldHelper::lift(node.params, |it| f.fold_generic_param(it)),
+- gt_token: (node.gt_token).map(|it| Token ! [ > ](tokens_helper(f, &it.spans))),
++ gt_token: (node.gt_token).map(|it| Token ! [>](tokens_helper(f, &it.spans))),
+ where_clause: (node.where_clause).map(|it| f.fold_where_clause(it)),
+ }
+ }
+@@ -1819,11 +1800,11 @@ where
+ defaultness: (node.defaultness).map(|it| Token![default](tokens_helper(f, &it.span))),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: f.fold_expr(node.expr),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1834,7 +1815,7 @@ where
+ ImplItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1862,9 +1843,9 @@ where
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: f.fold_type(node.ty),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -1913,11 +1894,11 @@ where
+ vis: f.fold_visibility(node.vis),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1952,7 +1933,7 @@ where
+ f.fold_ident((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2011,7 +1992,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ ident: (node.ident).map(|it| f.fold_ident(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2043,7 +2024,7 @@ where
+ FoldHelper::lift((it).1, |it| f.fold_item(it)),
+ )
+ }),
+- semi: (node.semi).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi: (node.semi).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2057,11 +2038,11 @@ where
+ static_token: Token![static](tokens_helper(f, &node.static_token.span)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2076,7 +2057,7 @@ where
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ fields: f.fold_fields(node.fields),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2092,7 +2073,7 @@ where
+ trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ supertraits: FoldHelper::lift(node.supertraits, |it| f.fold_type_param_bound(it)),
+ brace_token: Brace(tokens_helper(f, &node.brace_token.span)),
+ items: FoldHelper::lift(node.items, |it| f.fold_trait_item(it)),
+@@ -2109,9 +2090,9 @@ where
+ trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2125,9 +2106,9 @@ where
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2153,9 +2134,9 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ use_token: Token![use](tokens_helper(f, &node.use_token.span)),
+- leading_colon: (node.leading_colon).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
++ leading_colon: (node.leading_colon).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
+ tree: f.fold_use_tree(node.tree),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2165,7 +2146,7 @@ where
+ {
+ Label {
+ name: f.fold_lifetime(node.name),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ }
+ }
+ pub fn fold_lifetime<F>(f: &mut F, node: Lifetime) -> Lifetime
+@@ -2185,11 +2166,10 @@ where
+ LifetimeDef {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ lifetime: f.fold_lifetime(node.lifetime),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit<F>(f: &mut F, node: Lit) -> Lit
+ where
+ F: Fold + ?Sized,
+@@ -2205,7 +2185,6 @@ where
+ Lit::Verbatim(_binding_0) => Lit::Verbatim(_binding_0),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_bool<F>(f: &mut F, node: LitBool) -> LitBool
+ where
+ F: Fold + ?Sized,
+@@ -2215,7 +2194,6 @@ where
+ span: f.fold_span(node.span),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_byte<F>(f: &mut F, node: LitByte) -> LitByte
+ where
+ F: Fold + ?Sized,
+@@ -2225,7 +2203,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_byte_str<F>(f: &mut F, node: LitByteStr) -> LitByteStr
+ where
+ F: Fold + ?Sized,
+@@ -2235,7 +2212,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_char<F>(f: &mut F, node: LitChar) -> LitChar
+ where
+ F: Fold + ?Sized,
+@@ -2245,7 +2221,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_float<F>(f: &mut F, node: LitFloat) -> LitFloat
+ where
+ F: Fold + ?Sized,
+@@ -2255,7 +2230,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_int<F>(f: &mut F, node: LitInt) -> LitInt
+ where
+ F: Fold + ?Sized,
+@@ -2265,7 +2239,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_str<F>(f: &mut F, node: LitStr) -> LitStr
+ where
+ F: Fold + ?Sized,
+@@ -2286,11 +2259,11 @@ where
+ pat: f.fold_pat(node.pat),
+ init: (node.init).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ Box::new(f.fold_expr(*(it).1)),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2361,7 +2334,7 @@ where
+ {
+ MetaNameValue {
+ path: f.fold_path(node.path),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ lit: f.fold_lit(node.lit),
+ }
+ }
+@@ -2371,10 +2344,10 @@ where
+ F: Fold + ?Sized,
+ {
+ MethodTurbofish {
+- colon2_token: Token ! [ :: ](tokens_helper(f, &node.colon2_token.spans)),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ colon2_token: Token ! [::](tokens_helper(f, &node.colon2_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ args: FoldHelper::lift(node.args, |it| f.fold_generic_method_argument(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2449,7 +2422,7 @@ where
+ ident: f.fold_ident(node.ident),
+ subpat: (node.subpat).map(|it| {
+ (
+- Token ! [ @ ](tokens_helper(f, &(it).0.spans)),
++ Token ! [@](tokens_helper(f, &(it).0.spans)),
+ Box::new(f.fold_pat(*(it).1)),
+ )
+ }),
+@@ -2482,7 +2455,7 @@ where
+ {
+ PatOr {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- leading_vert: (node.leading_vert).map(|it| Token ! [ | ](tokens_helper(f, &it.spans))),
++ leading_vert: (node.leading_vert).map(|it| Token ! [|](tokens_helper(f, &it.spans))),
+ cases: FoldHelper::lift(node.cases, |it| f.fold_pat(it)),
+ }
+ }
+@@ -2516,7 +2489,7 @@ where
+ {
+ PatReference {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ pat: Box::new(f.fold_pat(*node.pat)),
+ }
+@@ -2585,7 +2558,7 @@ where
+ PatType {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ pat: Box::new(f.fold_pat(*node.pat)),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ }
+ }
+@@ -2605,7 +2578,7 @@ where
+ F: Fold + ?Sized,
+ {
+ Path {
+- leading_colon: (node.leading_colon).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
++ leading_colon: (node.leading_colon).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
+ segments: FoldHelper::lift(node.segments, |it| f.fold_path_segment(it)),
+ }
+ }
+@@ -2641,7 +2614,7 @@ where
+ {
+ PredicateEq {
+ lhs_ty: f.fold_type(node.lhs_ty),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ rhs_ty: f.fold_type(node.rhs_ty),
+ }
+ }
+@@ -2652,7 +2625,7 @@ where
+ {
+ PredicateLifetime {
+ lifetime: f.fold_lifetime(node.lifetime),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)),
+ }
+ }
+@@ -2664,7 +2637,7 @@ where
+ PredicateType {
+ lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)),
+ bounded_ty: f.fold_type(node.bounded_ty),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ }
+ }
+@@ -2674,11 +2647,11 @@ where
+ F: Fold + ?Sized,
+ {
+ QSelf {
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ position: node.position,
+ as_token: (node.as_token).map(|it| Token![as](tokens_helper(f, &it.span))),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2691,7 +2664,7 @@ where
+ RangeLimits::HalfOpen(Token![..](tokens_helper(f, &_binding_0.spans)))
+ }
+ RangeLimits::Closed(_binding_0) => {
+- RangeLimits::Closed(Token ! [ ..= ](tokens_helper(f, &_binding_0.spans)))
++ RangeLimits::Closed(Token ! [..=](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+@@ -2704,7 +2677,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ reference: (node.reference).map(|it| {
+ (
+- Token ! [ & ](tokens_helper(f, &(it).0.spans)),
++ Token ! [&](tokens_helper(f, &(it).0.spans)),
+ ((it).1).map(|it| f.fold_lifetime(it)),
+ )
+ }),
+@@ -2720,7 +2693,7 @@ where
+ match node {
+ ReturnType::Default => ReturnType::Default,
+ ReturnType::Type(_binding_0, _binding_1) => ReturnType::Type(
+- Token ! [ -> ](tokens_helper(f, &_binding_0.spans)),
++ Token ! [->](tokens_helper(f, &_binding_0.spans)),
+ Box::new(f.fold_type(*_binding_1)),
+ ),
+ }
+@@ -2761,7 +2734,7 @@ where
+ Stmt::Expr(_binding_0) => Stmt::Expr(f.fold_expr(_binding_0)),
+ Stmt::Semi(_binding_0, _binding_1) => Stmt::Semi(
+ f.fold_expr(_binding_0),
+- Token ! [ ; ](tokens_helper(f, &_binding_1.spans)),
++ Token ! [;](tokens_helper(f, &_binding_1.spans)),
+ ),
+ }
+ }
+@@ -2785,7 +2758,7 @@ where
+ match node {
+ TraitBoundModifier::None => TraitBoundModifier::None,
+ TraitBoundModifier::Maybe(_binding_0) => {
+- TraitBoundModifier::Maybe(Token ! [ ? ](tokens_helper(f, &_binding_0.spans)))
++ TraitBoundModifier::Maybe(Token ! [?](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+@@ -2812,15 +2785,15 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+ default: (node.default).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_expr((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2831,7 +2804,7 @@ where
+ TraitItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2843,7 +2816,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ sig: f.fold_signature(node.sig),
+ default: (node.default).map(|it| f.fold_block(it)),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2856,15 +2829,15 @@ where
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ default: (node.default).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_type((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2899,7 +2872,7 @@ where
+ TypeArray {
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ elem: Box::new(f.fold_type(*node.elem)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ len: f.fold_expr(node.len),
+ }
+ }
+@@ -2974,9 +2947,9 @@ where
+ TypeParam {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ ident: f.fold_ident(node.ident),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+- eq_token: (node.eq_token).map(|it| Token ! [ = ](tokens_helper(f, &it.spans))),
++ eq_token: (node.eq_token).map(|it| Token ! [=](tokens_helper(f, &it.spans))),
+ default: (node.default).map(|it| f.fold_type(it)),
+ }
+ }
+@@ -3018,7 +2991,7 @@ where
+ F: Fold + ?Sized,
+ {
+ TypePtr {
+- star_token: Token ! [ * ](tokens_helper(f, &node.star_token.spans)),
++ star_token: Token ! [*](tokens_helper(f, &node.star_token.spans)),
+ const_token: (node.const_token).map(|it| Token![const](tokens_helper(f, &it.span))),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ elem: Box::new(f.fold_type(*node.elem)),
+@@ -3030,7 +3003,7 @@ where
+ F: Fold + ?Sized,
+ {
+ TypeReference {
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ lifetime: (node.lifetime).map(|it| f.fold_lifetime(it)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ elem: Box::new(f.fold_type(*node.elem)),
+@@ -3072,9 +3045,9 @@ where
+ F: Fold + ?Sized,
+ {
+ match node {
+- UnOp::Deref(_binding_0) => UnOp::Deref(Token ! [ * ](tokens_helper(f, &_binding_0.spans))),
++ UnOp::Deref(_binding_0) => UnOp::Deref(Token ! [*](tokens_helper(f, &_binding_0.spans))),
+ UnOp::Not(_binding_0) => UnOp::Not(Token![!](tokens_helper(f, &_binding_0.spans))),
+- UnOp::Neg(_binding_0) => UnOp::Neg(Token ! [ - ](tokens_helper(f, &_binding_0.spans))),
++ UnOp::Neg(_binding_0) => UnOp::Neg(Token ! [-](tokens_helper(f, &_binding_0.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -3083,7 +3056,7 @@ where
+ F: Fold + ?Sized,
+ {
+ UseGlob {
+- star_token: Token ! [ * ](tokens_helper(f, &node.star_token.spans)),
++ star_token: Token ! [*](tokens_helper(f, &node.star_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -3112,7 +3085,7 @@ where
+ {
+ UsePath {
+ ident: f.fold_ident(node.ident),
+- colon2_token: Token ! [ :: ](tokens_helper(f, &node.colon2_token.spans)),
++ colon2_token: Token ! [::](tokens_helper(f, &node.colon2_token.spans)),
+ tree: Box::new(f.fold_use_tree(*node.tree)),
+ }
+ }
+@@ -3147,7 +3120,7 @@ where
+ {
+ Variadic {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- dots: Token ! [ ... ](tokens_helper(f, &node.dots.spans)),
++ dots: Token ! [...](tokens_helper(f, &node.dots.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -3161,7 +3134,7 @@ where
+ fields: f.fold_fields(node.fields),
+ discriminant: (node.discriminant).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_expr((it).1),
+ )
+ }),
+diff --git a/third_party/rust/syn/src/gen/hash.rs b/third_party/rust/syn/src/gen/hash.rs
+new file mode 100644
+index 0000000000..9e9e84a7af
+--- /dev/null
++++ third_party/rust/syn/src/gen/hash.rs
+@@ -0,0 +1,2691 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#[cfg(any(feature = "derive", feature = "full"))]
++use crate::tt::TokenStreamHelper;
++use crate::*;
++use std::hash::{Hash, Hasher};
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Abi {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.name.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for AngleBracketedGenericArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.colon2_token.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Arm {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.guard.hash(state);
++ self.body.hash(state);
++ self.comma.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for AttrStyle {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ AttrStyle::Outer => {
++ state.write_u8(0u8);
++ }
++ AttrStyle::Inner(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Attribute {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.style.hash(state);
++ self.path.hash(state);
++ TokenStreamHelper(&self.tokens).hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BareFnArg {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.name.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BinOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ BinOp::Add(_) => {
++ state.write_u8(0u8);
++ }
++ BinOp::Sub(_) => {
++ state.write_u8(1u8);
++ }
++ BinOp::Mul(_) => {
++ state.write_u8(2u8);
++ }
++ BinOp::Div(_) => {
++ state.write_u8(3u8);
++ }
++ BinOp::Rem(_) => {
++ state.write_u8(4u8);
++ }
++ BinOp::And(_) => {
++ state.write_u8(5u8);
++ }
++ BinOp::Or(_) => {
++ state.write_u8(6u8);
++ }
++ BinOp::BitXor(_) => {
++ state.write_u8(7u8);
++ }
++ BinOp::BitAnd(_) => {
++ state.write_u8(8u8);
++ }
++ BinOp::BitOr(_) => {
++ state.write_u8(9u8);
++ }
++ BinOp::Shl(_) => {
++ state.write_u8(10u8);
++ }
++ BinOp::Shr(_) => {
++ state.write_u8(11u8);
++ }
++ BinOp::Eq(_) => {
++ state.write_u8(12u8);
++ }
++ BinOp::Lt(_) => {
++ state.write_u8(13u8);
++ }
++ BinOp::Le(_) => {
++ state.write_u8(14u8);
++ }
++ BinOp::Ne(_) => {
++ state.write_u8(15u8);
++ }
++ BinOp::Ge(_) => {
++ state.write_u8(16u8);
++ }
++ BinOp::Gt(_) => {
++ state.write_u8(17u8);
++ }
++ BinOp::AddEq(_) => {
++ state.write_u8(18u8);
++ }
++ BinOp::SubEq(_) => {
++ state.write_u8(19u8);
++ }
++ BinOp::MulEq(_) => {
++ state.write_u8(20u8);
++ }
++ BinOp::DivEq(_) => {
++ state.write_u8(21u8);
++ }
++ BinOp::RemEq(_) => {
++ state.write_u8(22u8);
++ }
++ BinOp::BitXorEq(_) => {
++ state.write_u8(23u8);
++ }
++ BinOp::BitAndEq(_) => {
++ state.write_u8(24u8);
++ }
++ BinOp::BitOrEq(_) => {
++ state.write_u8(25u8);
++ }
++ BinOp::ShlEq(_) => {
++ state.write_u8(26u8);
++ }
++ BinOp::ShrEq(_) => {
++ state.write_u8(27u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Binding {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Block {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.stmts.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BoundLifetimes {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ConstParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.eq_token.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Constraint {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for Data {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Data::Struct(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Data::Enum(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Data::Union(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataEnum {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.variants.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.fields.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataUnion {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.fields.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DeriveInput {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.data.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Expr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Expr::Binary(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Expr::Call(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Expr::Cast(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Expr::Field(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => {
++ state.write_u8(14u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::If(v0) => {
++ state.write_u8(16u8);
++ v0.hash(state);
++ }
++ Expr::Index(v0) => {
++ state.write_u8(17u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => {
++ state.write_u8(18u8);
++ v0.hash(state);
++ }
++ Expr::Lit(v0) => {
++ state.write_u8(19u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => {
++ state.write_u8(20u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => {
++ state.write_u8(21u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => {
++ state.write_u8(22u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => {
++ state.write_u8(23u8);
++ v0.hash(state);
++ }
++ Expr::Paren(v0) => {
++ state.write_u8(24u8);
++ v0.hash(state);
++ }
++ Expr::Path(v0) => {
++ state.write_u8(25u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => {
++ state.write_u8(26u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => {
++ state.write_u8(27u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => {
++ state.write_u8(28u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => {
++ state.write_u8(29u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => {
++ state.write_u8(30u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => {
++ state.write_u8(31u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => {
++ state.write_u8(32u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => {
++ state.write_u8(33u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => {
++ state.write_u8(34u8);
++ v0.hash(state);
++ }
++ Expr::Unary(v0) => {
++ state.write_u8(35u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => {
++ state.write_u8(36u8);
++ v0.hash(state);
++ }
++ Expr::Verbatim(v0) => {
++ state.write_u8(37u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::While(v0) => {
++ state.write_u8(38u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => {
++ state.write_u8(39u8);
++ v0.hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprArray {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAssign {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAssignOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.op.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAsync {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.capture.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAwait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.base.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprBinary {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.op.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBlock {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBox {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBreak {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprCall {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.func.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprCast {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprClosure {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.asyncness.hash(state);
++ self.movability.hash(state);
++ self.capture.hash(state);
++ self.inputs.hash(state);
++ self.output.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprContinue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprField {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.base.hash(state);
++ self.member.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprForLoop {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.pat.hash(state);
++ self.expr.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprIf {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.cond.hash(state);
++ self.then_branch.hash(state);
++ self.else_branch.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprIndex {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.index.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprLet {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprLit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lit.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprLoop {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMatch {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.arms.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMethodCall {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.receiver.hash(state);
++ self.method.hash(state);
++ self.turbofish.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprParen {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprPath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprRange {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.from.hash(state);
++ self.limits.hash(state);
++ self.to.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mutability.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprRepeat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.len.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprReturn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.fields.hash(state);
++ self.dot2_token.hash(state);
++ self.rest.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTry {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTryBlock {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprUnary {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.op.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprUnsafe {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprWhile {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.cond.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprYield {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Field {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.colon_token.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FieldPat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.member.hash(state);
++ self.colon_token.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FieldValue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.member.hash(state);
++ self.colon_token.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Fields {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Fields::Named(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Fields::Unnamed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Fields::Unit => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for FieldsNamed {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.named.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for FieldsUnnamed {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.unnamed.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for File {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.shebang.hash(state);
++ self.attrs.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FnArg {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ FnArg::Receiver(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ FnArg::Typed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ForeignItem::Fn(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ ForeignItem::Static(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ ForeignItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ ForeignItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ ForeignItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.sig.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemStatic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for GenericArgument {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericArgument::Lifetime(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericArgument::Type(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ GenericArgument::Binding(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ GenericArgument::Constraint(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ GenericArgument::Const(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for GenericMethodArgument {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericMethodArgument::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericMethodArgument::Const(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for GenericParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericParam::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericParam::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ GenericParam::Const(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Generics {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lt_token.hash(state);
++ self.params.hash(state);
++ self.gt_token.hash(state);
++ self.where_clause.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ImplItem::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ ImplItem::Method(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ ImplItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ ImplItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ ImplItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemMethod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.sig.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Item {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Item::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Item::Enum(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Item::ExternCrate(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Item::Fn(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Item::ForeignMod(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Item::Impl(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Item::Macro(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Item::Macro2(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Item::Mod(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Item::Static(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Item::Struct(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Item::Trait(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Item::TraitAlias(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Item::Type(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Item::Union(v0) => {
++ state.write_u8(14u8);
++ v0.hash(state);
++ }
++ Item::Use(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ Item::Verbatim(v0) => {
++ state.write_u8(16u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemEnum {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.variants.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemExternCrate {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.rename.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.sig.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemForeignMod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.abi.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemImpl {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.defaultness.hash(state);
++ self.unsafety.hash(state);
++ self.generics.hash(state);
++ self.trait_.hash(state);
++ self.self_ty.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMacro2 {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ TokenStreamHelper(&self.rules).hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.content.hash(state);
++ self.semi.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemStatic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.fields.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemTrait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.unsafety.hash(state);
++ self.auto_token.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.colon_token.hash(state);
++ self.supertraits.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemTraitAlias {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemUnion {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.fields.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemUse {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.leading_colon.hash(state);
++ self.tree.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Label {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.name.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for LifetimeDef {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lifetime.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ }
++}
++impl Hash for Lit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Lit::Str(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Lit::ByteStr(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Lit::Byte(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Lit::Char(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Lit::Int(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Lit::Float(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Lit::Bool(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Lit::Verbatim(v0) => {
++ state.write_u8(7u8);
++ v0.to_string().hash(state);
++ }
++ }
++ }
++}
++impl Hash for LitBool {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.value.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Local {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.init.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Macro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.delimiter.hash(state);
++ TokenStreamHelper(&self.tokens).hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MacroDelimiter {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ MacroDelimiter::Paren(_) => {
++ state.write_u8(0u8);
++ }
++ MacroDelimiter::Brace(_) => {
++ state.write_u8(1u8);
++ }
++ MacroDelimiter::Bracket(_) => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Meta {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Meta::Path(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Meta::List(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Meta::NameValue(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MetaList {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.nested.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MetaNameValue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.lit.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for MethodTurbofish {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for NestedMeta {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ NestedMeta::Meta(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ NestedMeta::Lit(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ParenthesizedGenericArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.inputs.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Pat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Pat::Box(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Pat::Ident(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Pat::Lit(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Pat::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Pat::Or(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Pat::Path(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Pat::Range(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Pat::Reference(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Pat::Rest(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Pat::Slice(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Pat::Struct(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Pat::Tuple(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Pat::TupleStruct(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Pat::Type(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Pat::Verbatim(v0) => {
++ state.write_u8(14u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ Pat::Wild(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatBox {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatIdent {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.by_ref.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.subpat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatLit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatOr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.leading_vert.hash(state);
++ self.cases.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatPath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatRange {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lo.hash(state);
++ self.limits.hash(state);
++ self.hi.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mutability.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatRest {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatSlice {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.fields.hash(state);
++ self.dot2_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatTupleStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatWild {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Path {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.leading_colon.hash(state);
++ self.segments.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PathArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ PathArguments::None => {
++ state.write_u8(0u8);
++ }
++ PathArguments::AngleBracketed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ PathArguments::Parenthesized(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PathSegment {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.arguments.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateEq {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lhs_ty.hash(state);
++ self.rhs_ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateLifetime {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetime.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ self.bounded_ty.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for QSelf {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ty.hash(state);
++ self.position.hash(state);
++ self.as_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for RangeLimits {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ RangeLimits::HalfOpen(_) => {
++ state.write_u8(0u8);
++ }
++ RangeLimits::Closed(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Receiver {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.reference.hash(state);
++ self.mutability.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ReturnType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ReturnType::Default => {
++ state.write_u8(0u8);
++ }
++ ReturnType::Type(_, v1) => {
++ state.write_u8(1u8);
++ v1.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Signature {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.constness.hash(state);
++ self.asyncness.hash(state);
++ self.unsafety.hash(state);
++ self.abi.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.inputs.hash(state);
++ self.variadic.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Stmt {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Stmt::Local(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Stmt::Item(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Stmt::Expr(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Stmt::Semi(v0, _) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TraitBound {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.paren_token.hash(state);
++ self.modifier.hash(state);
++ self.lifetimes.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TraitBoundModifier {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TraitBoundModifier::None => {
++ state.write_u8(0u8);
++ }
++ TraitBoundModifier::Maybe(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TraitItem::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ TraitItem::Method(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ TraitItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ TraitItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ TraitItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemMethod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.sig.hash(state);
++ self.default.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Type {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Type::Array(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Type::BareFn(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Type::Group(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Type::ImplTrait(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Type::Infer(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Type::Macro(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Type::Never(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Type::Paren(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Type::Path(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Type::Ptr(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Type::Reference(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Type::Slice(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Type::TraitObject(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Type::Tuple(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Type::Verbatim(v0) => {
++ state.write_u8(14u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeArray {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ self.len.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeBareFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ self.unsafety.hash(state);
++ self.abi.hash(state);
++ self.inputs.hash(state);
++ self.variadic.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeImplTrait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeInfer {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.mac.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeNever {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ self.eq_token.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParamBound {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TypeParamBound::Trait(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ TypeParamBound::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParen {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypePath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypePtr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.const_token.hash(state);
++ self.mutability.hash(state);
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetime.hash(state);
++ self.mutability.hash(state);
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeSlice {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeTraitObject {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.dyn_token.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elems.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for UnOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ UnOp::Deref(_) => {
++ state.write_u8(0u8);
++ }
++ UnOp::Not(_) => {
++ state.write_u8(1u8);
++ }
++ UnOp::Neg(_) => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseGlob {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseName {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UsePath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.tree.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseRename {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.rename.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseTree {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ UseTree::Path(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ UseTree::Name(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ UseTree::Rename(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ UseTree::Glob(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ UseTree::Group(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Variadic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Variant {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.fields.hash(state);
++ self.discriminant.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisCrate {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisPublic {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisRestricted {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.in_token.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Visibility {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Visibility::Public(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Visibility::Crate(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Visibility::Restricted(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Visibility::Inherited => {
++ state.write_u8(3u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for WhereClause {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.predicates.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for WherePredicate {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ WherePredicate::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ WherePredicate::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ WherePredicate::Eq(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/visit.rs b/third_party/rust/syn/src/gen/visit.rs
+index b667f530c3..24d34b7480 100644
+--- third_party/rust/syn/src/gen/visit.rs
++++ third_party/rust/syn/src/gen/visit.rs
+@@ -20,7 +20,6 @@ macro_rules! full {
+ unreachable!()
+ };
+ }
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! skip {
+ ($($tt:tt)*) => {};
+ }
+@@ -30,7 +29,7 @@ macro_rules! skip {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"visit"` feature.*
++/// *This trait is available only if Syn is built with the `"visit"` feature.*
+ pub trait Visit<'ast> {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_abi(&mut self, i: &'ast Abi) {
+@@ -434,35 +433,27 @@ pub trait Visit<'ast> {
+ fn visit_lifetime_def(&mut self, i: &'ast LifetimeDef) {
+ visit_lifetime_def(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit(&mut self, i: &'ast Lit) {
+ visit_lit(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_bool(&mut self, i: &'ast LitBool) {
+ visit_lit_bool(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte(&mut self, i: &'ast LitByte) {
+ visit_lit_byte(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_str(&mut self, i: &'ast LitByteStr) {
+ visit_lit_byte_str(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_char(&mut self, i: &'ast LitChar) {
+ visit_lit_char(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_float(&mut self, i: &'ast LitFloat) {
+ visit_lit_float(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_int(&mut self, i: &'ast LitInt) {
+ visit_lit_int(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_str(&mut self, i: &'ast LitStr) {
+ visit_lit_str(self, i)
+ }
+@@ -2537,7 +2528,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit<'ast, V>(v: &mut V, node: &'ast Lit)
+ where
+ V: Visit<'ast> + ?Sized,
+@@ -2569,7 +2559,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_bool<'ast, V>(v: &mut V, node: &'ast LitBool)
+ where
+ V: Visit<'ast> + ?Sized,
+@@ -2577,37 +2566,31 @@ where
+ skip!(node.value);
+ v.visit_span(&node.span);
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte<'ast, V>(v: &mut V, node: &'ast LitByte)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_str<'ast, V>(v: &mut V, node: &'ast LitByteStr)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_char<'ast, V>(v: &mut V, node: &'ast LitChar)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_float<'ast, V>(v: &mut V, node: &'ast LitFloat)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_int<'ast, V>(v: &mut V, node: &'ast LitInt)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_str<'ast, V>(v: &mut V, node: &'ast LitStr)
+ where
+ V: Visit<'ast> + ?Sized,
+diff --git a/third_party/rust/syn/src/gen/visit_mut.rs b/third_party/rust/syn/src/gen/visit_mut.rs
+index 5cddb827c6..5ce11f0b2e 100644
+--- third_party/rust/syn/src/gen/visit_mut.rs
++++ third_party/rust/syn/src/gen/visit_mut.rs
+@@ -20,7 +20,6 @@ macro_rules! full {
+ unreachable!()
+ };
+ }
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! skip {
+ ($($tt:tt)*) => {};
+ }
+@@ -31,7 +30,7 @@ macro_rules! skip {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"visit-mut"` feature.*
++/// *This trait is available only if Syn is built with the `"visit-mut"` feature.*
+ pub trait VisitMut {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_abi_mut(&mut self, i: &mut Abi) {
+@@ -438,35 +437,27 @@ pub trait VisitMut {
+ fn visit_lifetime_def_mut(&mut self, i: &mut LifetimeDef) {
+ visit_lifetime_def_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_mut(&mut self, i: &mut Lit) {
+ visit_lit_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_bool_mut(&mut self, i: &mut LitBool) {
+ visit_lit_bool_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_mut(&mut self, i: &mut LitByte) {
+ visit_lit_byte_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_str_mut(&mut self, i: &mut LitByteStr) {
+ visit_lit_byte_str_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_char_mut(&mut self, i: &mut LitChar) {
+ visit_lit_char_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_float_mut(&mut self, i: &mut LitFloat) {
+ visit_lit_float_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_int_mut(&mut self, i: &mut LitInt) {
+ visit_lit_int_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_str_mut(&mut self, i: &mut LitStr) {
+ visit_lit_str_mut(self, i)
+ }
+@@ -2543,7 +2534,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_mut<V>(v: &mut V, node: &mut Lit)
+ where
+ V: VisitMut + ?Sized,
+@@ -2575,7 +2565,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_bool_mut<V>(v: &mut V, node: &mut LitBool)
+ where
+ V: VisitMut + ?Sized,
+@@ -2583,37 +2572,31 @@ where
+ skip!(node.value);
+ v.visit_span_mut(&mut node.span);
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_mut<V>(v: &mut V, node: &mut LitByte)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_str_mut<V>(v: &mut V, node: &mut LitByteStr)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_char_mut<V>(v: &mut V, node: &mut LitChar)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_float_mut<V>(v: &mut V, node: &mut LitFloat)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_int_mut<V>(v: &mut V, node: &mut LitInt)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_str_mut<V>(v: &mut V, node: &mut LitStr)
+ where
+ V: VisitMut + ?Sized,
+diff --git a/third_party/rust/syn/src/generics.rs b/third_party/rust/syn/src/generics.rs
+index 95ab2e404a..05e8ef5cdf 100644
+--- third_party/rust/syn/src/generics.rs
++++ third_party/rust/syn/src/generics.rs
+@@ -1,13 +1,16 @@
+ use super::*;
+ use crate::punctuated::{Iter, IterMut, Punctuated};
++#[cfg(all(feature = "printing", feature = "extra-traits"))]
++use std::fmt::{self, Debug};
++#[cfg(all(feature = "printing", feature = "extra-traits"))]
++use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// Lifetimes and type parameters attached to a declaration of a function,
+ /// enum, trait, etc.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[derive(Default)]
+ pub struct Generics {
+ pub lt_token: Option<Token![<]>,
+ pub params: Punctuated<GenericParam, Token![,]>,
+@@ -20,7 +23,7 @@ ast_enum_of_structs! {
+ /// A generic type parameter, lifetime, or const generic: `T: Into<String>`,
+ /// `'a: 'b`, `const LEN: usize`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -28,9 +31,6 @@ ast_enum_of_structs! {
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+- //
+- // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+- // blocked on https://github.com/rust-lang/rust/issues/62833
+ pub enum GenericParam {
+ /// A generic type parameter: `T: Into<String>`.
+ Type(TypeParam),
+@@ -46,7 +46,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A generic type parameter: `T: Into<String>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeParam {
+ pub attrs: Vec<Attribute>,
+@@ -61,7 +61,7 @@ ast_struct! {
+ ast_struct! {
+ /// A lifetime definition: `'a: 'b + 'c + 'd`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct LifetimeDef {
+ pub attrs: Vec<Attribute>,
+@@ -74,7 +74,7 @@ ast_struct! {
+ ast_struct! {
+ /// A const generic parameter: `const LENGTH: usize`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ConstParam {
+ pub attrs: Vec<Attribute>,
+@@ -87,6 +87,17 @@ ast_struct! {
+ }
+ }
+
++impl Default for Generics {
++ fn default() -> Self {
++ Generics {
++ lt_token: None,
++ params: Punctuated::new(),
++ gt_token: None,
++ where_clause: None,
++ }
++ }
++}
++
+ impl Generics {
+ /// Returns an
+ /// <code
+@@ -280,29 +291,23 @@ impl<'a> Iterator for ConstParamsMut<'a> {
+
+ /// Returned by `Generics::split_for_impl`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct ImplGenerics<'a>(&'a Generics);
+
+ /// Returned by `Generics::split_for_impl`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct TypeGenerics<'a>(&'a Generics);
+
+ /// Returned by `TypeGenerics::as_turbofish`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct Turbofish<'a>(&'a Generics);
+
+ #[cfg(feature = "printing")]
+@@ -314,9 +319,8 @@ impl Generics {
+ /// # use proc_macro2::{Span, Ident};
+ /// # use quote::quote;
+ /// #
+- /// # fn main() {
+- /// # let generics: syn::Generics = Default::default();
+- /// # let name = Ident::new("MyType", Span::call_site());
++ /// # let generics: syn::Generics = Default::default();
++ /// # let name = Ident::new("MyType", Span::call_site());
+ /// #
+ /// let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
+ /// quote! {
+@@ -324,11 +328,10 @@ impl Generics {
+ /// // ...
+ /// }
+ /// }
+- /// # ;
+- /// # }
++ /// # ;
+ /// ```
+ ///
+- /// *This method is available if Syn is built with the `"derive"` or
++ /// *This method is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature and the `"printing"` feature.*
+ pub fn split_for_impl(&self) -> (ImplGenerics, TypeGenerics, Option<&WhereClause>) {
+ (
+@@ -339,11 +342,57 @@ impl Generics {
+ }
+ }
+
++#[cfg(feature = "printing")]
++macro_rules! generics_wrapper_impls {
++ ($ty:ident) => {
++ #[cfg(feature = "clone-impls")]
++ impl<'a> Clone for $ty<'a> {
++ fn clone(&self) -> Self {
++ $ty(self.0)
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Debug for $ty<'a> {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ formatter
++ .debug_tuple(stringify!($ty))
++ .field(self.0)
++ .finish()
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Eq for $ty<'a> {}
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> PartialEq for $ty<'a> {
++ fn eq(&self, other: &Self) -> bool {
++ self.0 == other.0
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Hash for $ty<'a> {
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ self.0.hash(state);
++ }
++ }
++ };
++}
++
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(ImplGenerics);
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(TypeGenerics);
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(Turbofish);
++
+ #[cfg(feature = "printing")]
+ impl<'a> TypeGenerics<'a> {
+ /// Turn a type's generics like `<X, Y>` into a turbofish like `::<X, Y>`.
+ ///
+- /// *This method is available if Syn is built with the `"derive"` or
++ /// *This method is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature and the `"printing"` feature.*
+ pub fn as_turbofish(&self) -> Turbofish {
+ Turbofish(self.0)
+@@ -353,9 +402,8 @@ impl<'a> TypeGenerics<'a> {
+ ast_struct! {
+ /// A set of bound lifetimes: `for<'a, 'b, 'c>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[derive(Default)]
+ pub struct BoundLifetimes {
+ pub for_token: Token![for],
+ pub lt_token: Token![<],
+@@ -364,6 +412,17 @@ ast_struct! {
+ }
+ }
+
++impl Default for BoundLifetimes {
++ fn default() -> Self {
++ BoundLifetimes {
++ for_token: Default::default(),
++ lt_token: Default::default(),
++ lifetimes: Punctuated::new(),
++ gt_token: Default::default(),
++ }
++ }
++}
++
+ impl LifetimeDef {
+ pub fn new(lifetime: Lifetime) -> Self {
+ LifetimeDef {
+@@ -391,7 +450,7 @@ impl From<Ident> for TypeParam {
+ ast_enum_of_structs! {
+ /// A trait or lifetime used as a bound on a type parameter.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum TypeParamBound {
+ Trait(TraitBound),
+@@ -402,7 +461,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A trait used as a bound on a type parameter.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct TraitBound {
+ pub paren_token: Option<token::Paren>,
+@@ -418,9 +477,8 @@ ast_enum! {
+ /// A modifier on a trait bound, currently only used for the `?` in
+ /// `?Sized`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum TraitBoundModifier {
+ None,
+ Maybe(Token![?]),
+@@ -431,7 +489,7 @@ ast_struct! {
+ /// A `where` clause in a definition: `where T: Deserialize<'de>, D:
+ /// 'static`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct WhereClause {
+ pub where_token: Token![where],
+@@ -442,7 +500,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// A single predicate in a `where` clause: `T: Deserialize<'de>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -450,9 +508,6 @@ ast_enum_of_structs! {
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+- //
+- // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+- // blocked on https://github.com/rust-lang/rust/issues/62833
+ pub enum WherePredicate {
+ /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
+ Type(PredicateType),
+@@ -468,7 +523,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateType {
+ /// Any lifetimes from a `for` binding
+@@ -484,7 +539,7 @@ ast_struct! {
+ ast_struct! {
+ /// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateLifetime {
+ pub lifetime: Lifetime,
+@@ -496,7 +551,7 @@ ast_struct! {
+ ast_struct! {
+ /// An equality predicate in a `where` clause (unsupported).
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateEq {
+ pub lhs_ty: Type,
+@@ -521,7 +576,6 @@ pub mod parsing {
+
+ let mut params = Punctuated::new();
+ let mut allow_lifetime_param = true;
+- let mut allow_type_param = true;
+ loop {
+ if input.peek(Token![>]) {
+ break;
+@@ -534,7 +588,7 @@ pub mod parsing {
+ attrs,
+ ..input.parse()?
+ }));
+- } else if allow_type_param && lookahead.peek(Ident) {
++ } else if lookahead.peek(Ident) {
+ allow_lifetime_param = false;
+ params.push_value(GenericParam::Type(TypeParam {
+ attrs,
+@@ -542,7 +596,6 @@ pub mod parsing {
+ }));
+ } else if lookahead.peek(Token![const]) {
+ allow_lifetime_param = false;
+- allow_type_param = false;
+ params.push_value(GenericParam::Const(ConstParam {
+ attrs,
+ ..input.parse()?
+@@ -665,57 +718,53 @@ pub mod parsing {
+
+ impl Parse for TypeParam {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let has_colon;
+- let has_default;
+- Ok(TypeParam {
+- attrs: input.call(Attribute::parse_outer)?,
+- ident: input.parse()?,
+- colon_token: {
+- if input.peek(Token![:]) {
+- has_colon = true;
+- Some(input.parse()?)
+- } else {
+- has_colon = false;
+- None
+- }
+- },
+- bounds: {
+- let mut bounds = Punctuated::new();
+- if has_colon {
+- loop {
+- if input.peek(Token![,])
+- || input.peek(Token![>])
+- || input.peek(Token![=])
+- {
+- break;
+- }
+- let value = input.parse()?;
+- bounds.push_value(value);
+- if !input.peek(Token![+]) {
+- break;
+- }
+- let punct = input.parse()?;
+- bounds.push_punct(punct);
+- }
++ let attrs = input.call(Attribute::parse_outer)?;
++ let ident: Ident = input.parse()?;
++ let colon_token: Option<Token![:]> = input.parse()?;
++
++ let begin_bound = input.fork();
++ let mut is_maybe_const = false;
++ let mut bounds = Punctuated::new();
++ if colon_token.is_some() {
++ loop {
++ if input.peek(Token![,]) || input.peek(Token![>]) || input.peek(Token![=]) {
++ break;
+ }
+- bounds
+- },
+- eq_token: {
+- if input.peek(Token![=]) {
+- has_default = true;
+- Some(input.parse()?)
+- } else {
+- has_default = false;
+- None
++ if input.peek(Token![?]) && input.peek2(Token![const]) {
++ input.parse::<Token![?]>()?;
++ input.parse::<Token![const]>()?;
++ is_maybe_const = true;
+ }
+- },
+- default: {
+- if has_default {
+- Some(input.parse()?)
+- } else {
+- None
++ let value: TypeParamBound = input.parse()?;
++ bounds.push_value(value);
++ if !input.peek(Token![+]) {
++ break;
+ }
+- },
++ let punct: Token![+] = input.parse()?;
++ bounds.push_punct(punct);
++ }
++ }
++
++ let mut eq_token: Option<Token![=]> = input.parse()?;
++ let mut default = if eq_token.is_some() {
++ Some(input.parse::<Type>()?)
++ } else {
++ None
++ };
++
++ if is_maybe_const {
++ bounds.clear();
++ eq_token = None;
++ default = Some(Type::Verbatim(verbatim::between(begin_bound, input)));
++ }
++
++ Ok(TypeParam {
++ attrs,
++ ident,
++ colon_token,
++ bounds,
++ eq_token,
++ default,
+ })
+ }
+ }
+@@ -898,6 +947,8 @@ mod printing {
+ use super::*;
+
+ use proc_macro2::TokenStream;
++ #[cfg(feature = "full")]
++ use proc_macro2::TokenTree;
+ use quote::{ToTokens, TokenStreamExt};
+
+ use crate::attr::FilterAttrs;
+@@ -1080,9 +1131,25 @@ mod printing {
+ TokensOrDefault(&self.colon_token).to_tokens(tokens);
+ self.bounds.to_tokens(tokens);
+ }
+- if self.default.is_some() {
++ if let Some(default) = &self.default {
++ #[cfg(feature = "full")]
++ {
++ if self.eq_token.is_none() {
++ if let Type::Verbatim(default) = default {
++ let mut iter = default.clone().into_iter();
++ match (iter.next(), iter.next()) {
++ (Some(TokenTree::Punct(ref q)), Some(TokenTree::Ident(ref c)))
++ if q.as_char() == '?' && c == "const" =>
++ {
++ return default.to_tokens(tokens);
++ }
++ _ => {}
++ }
++ }
++ }
++ }
+ TokensOrDefault(&self.eq_token).to_tokens(tokens);
+- self.default.to_tokens(tokens);
++ default.to_tokens(tokens);
+ }
+ }
+ }
+@@ -1117,9 +1184,9 @@ mod printing {
+ self.ident.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+- if self.default.is_some() {
++ if let Some(default) = &self.default {
+ TokensOrDefault(&self.eq_token).to_tokens(tokens);
+- self.default.to_tokens(tokens);
++ default.to_tokens(tokens);
+ }
+ }
+ }
+diff --git a/third_party/rust/syn/src/item.rs b/third_party/rust/syn/src/item.rs
+index ff4485ace9..0d8f7d3ddc 100644
+--- third_party/rust/syn/src/item.rs
++++ third_party/rust/syn/src/item.rs
+@@ -1,17 +1,15 @@
+ use super::*;
+-use crate::derive::{Data, DeriveInput};
++use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
+ use crate::punctuated::Punctuated;
+ use proc_macro2::TokenStream;
+
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
++#[cfg(feature = "parsing")]
++use std::mem;
+
+ ast_enum_of_structs! {
+ /// Things that can appear directly inside of a module or scope.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -21,7 +19,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Item #manual_extra_traits {
++ pub enum Item {
+ /// A constant item: `const MAX: u16 = 65535`.
+ Const(ItemConst),
+
+@@ -83,7 +81,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A constant item: `const MAX: u16 = 65535`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemConst {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -100,7 +98,7 @@ ast_struct! {
+ ast_struct! {
+ /// An enum definition: `enum Foo<A, B> { A(A), B(B) }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemEnum {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -115,7 +113,7 @@ ast_struct! {
+ ast_struct! {
+ /// An `extern crate` item: `extern crate serde`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemExternCrate {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -131,7 +129,7 @@ ast_struct! {
+ /// A free-standing function: `fn process(n: usize) -> Result<()> { ...
+ /// }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemFn {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -143,7 +141,7 @@ ast_struct! {
+ ast_struct! {
+ /// A block of foreign items: `extern "C" { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemForeignMod {
+ pub attrs: Vec<Attribute>,
+ pub abi: Abi,
+@@ -156,7 +154,7 @@ ast_struct! {
+ /// An impl block providing trait or associated items: `impl<A> Trait
+ /// for Data<A> { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemImpl {
+ pub attrs: Vec<Attribute>,
+ pub defaultness: Option<Token![default]>,
+@@ -175,7 +173,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation, which includes `macro_rules!` definitions.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemMacro {
+ pub attrs: Vec<Attribute>,
+ /// The `example` in `macro_rules! example { ... }`.
+@@ -188,8 +186,8 @@ ast_struct! {
+ ast_struct! {
+ /// A 2.0-style declarative macro introduced by the `macro` keyword.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
+- pub struct ItemMacro2 #manual_extra_traits {
++ /// *This type is available only if Syn is built with the `"full"` feature.*
++ pub struct ItemMacro2 {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub macro_token: Token![macro],
+@@ -201,7 +199,7 @@ ast_struct! {
+ ast_struct! {
+ /// A module or module declaration: `mod m` or `mod m { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemMod {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -215,7 +213,7 @@ ast_struct! {
+ ast_struct! {
+ /// A static item: `static BIKE: Shed = Shed(42)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemStatic {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -233,7 +231,7 @@ ast_struct! {
+ ast_struct! {
+ /// A struct definition: `struct Foo<A> { x: A }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemStruct {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -248,7 +246,7 @@ ast_struct! {
+ ast_struct! {
+ /// A trait definition: `pub trait Iterator { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemTrait {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -267,7 +265,7 @@ ast_struct! {
+ ast_struct! {
+ /// A trait alias: `pub trait SharableIterator = Iterator + Sync`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemTraitAlias {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -283,7 +281,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type alias: `type Result<T> = std::result::Result<T, MyError>`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -299,7 +297,7 @@ ast_struct! {
+ ast_struct! {
+ /// A union definition: `union Foo<A, B> { x: A, y: B }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemUnion {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -313,7 +311,7 @@ ast_struct! {
+ ast_struct! {
+ /// A use declaration: `use std::collections::HashMap`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemUse {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -324,145 +322,32 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Item {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Item {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Item::Const(this), Item::Const(other)) => this == other,
+- (Item::Enum(this), Item::Enum(other)) => this == other,
+- (Item::ExternCrate(this), Item::ExternCrate(other)) => this == other,
+- (Item::Fn(this), Item::Fn(other)) => this == other,
+- (Item::ForeignMod(this), Item::ForeignMod(other)) => this == other,
+- (Item::Impl(this), Item::Impl(other)) => this == other,
+- (Item::Macro(this), Item::Macro(other)) => this == other,
+- (Item::Macro2(this), Item::Macro2(other)) => this == other,
+- (Item::Mod(this), Item::Mod(other)) => this == other,
+- (Item::Static(this), Item::Static(other)) => this == other,
+- (Item::Struct(this), Item::Struct(other)) => this == other,
+- (Item::Trait(this), Item::Trait(other)) => this == other,
+- (Item::TraitAlias(this), Item::TraitAlias(other)) => this == other,
+- (Item::Type(this), Item::Type(other)) => this == other,
+- (Item::Union(this), Item::Union(other)) => this == other,
+- (Item::Use(this), Item::Use(other)) => this == other,
+- (Item::Verbatim(this), Item::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Item {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
++impl Item {
++ #[cfg(feature = "parsing")]
++ pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
+ match self {
+- Item::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- Item::Enum(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- Item::ExternCrate(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- Item::Fn(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- Item::ForeignMod(item) => {
+- state.write_u8(4);
+- item.hash(state);
+- }
+- Item::Impl(item) => {
+- state.write_u8(5);
+- item.hash(state);
+- }
+- Item::Macro(item) => {
+- state.write_u8(6);
+- item.hash(state);
+- }
+- Item::Macro2(item) => {
+- state.write_u8(7);
+- item.hash(state);
+- }
+- Item::Mod(item) => {
+- state.write_u8(8);
+- item.hash(state);
+- }
+- Item::Static(item) => {
+- state.write_u8(9);
+- item.hash(state);
+- }
+- Item::Struct(item) => {
+- state.write_u8(10);
+- item.hash(state);
+- }
+- Item::Trait(item) => {
+- state.write_u8(11);
+- item.hash(state);
+- }
+- Item::TraitAlias(item) => {
+- state.write_u8(12);
+- item.hash(state);
+- }
+- Item::Type(item) => {
+- state.write_u8(13);
+- item.hash(state);
+- }
+- Item::Union(item) => {
+- state.write_u8(14);
+- item.hash(state);
+- }
+- Item::Use(item) => {
+- state.write_u8(15);
+- item.hash(state);
+- }
+- Item::Verbatim(item) => {
+- state.write_u8(16);
+- TokenStreamHelper(item).hash(state);
+- }
++ Item::ExternCrate(ItemExternCrate { attrs, .. })
++ | Item::Use(ItemUse { attrs, .. })
++ | Item::Static(ItemStatic { attrs, .. })
++ | Item::Const(ItemConst { attrs, .. })
++ | Item::Fn(ItemFn { attrs, .. })
++ | Item::Mod(ItemMod { attrs, .. })
++ | Item::ForeignMod(ItemForeignMod { attrs, .. })
++ | Item::Type(ItemType { attrs, .. })
++ | Item::Struct(ItemStruct { attrs, .. })
++ | Item::Enum(ItemEnum { attrs, .. })
++ | Item::Union(ItemUnion { attrs, .. })
++ | Item::Trait(ItemTrait { attrs, .. })
++ | Item::TraitAlias(ItemTraitAlias { attrs, .. })
++ | Item::Impl(ItemImpl { attrs, .. })
++ | Item::Macro(ItemMacro { attrs, .. })
++ | Item::Macro2(ItemMacro2 { attrs, .. }) => mem::replace(attrs, new),
++ Item::Verbatim(_) => Vec::new(),
+ Item::__Nonexhaustive => unreachable!(),
+ }
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ItemMacro2 {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ItemMacro2 {
+- fn eq(&self, other: &Self) -> bool {
+- self.attrs == other.attrs
+- && self.vis == other.vis
+- && self.macro_token == other.macro_token
+- && self.ident == other.ident
+- && TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ItemMacro2 {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.attrs.hash(state);
+- self.vis.hash(state);
+- self.macro_token.hash(state);
+- self.ident.hash(state);
+- TokenStreamHelper(&self.rules).hash(state);
+- }
+-}
+-
+ impl From<DeriveInput> for Item {
+ fn from(input: DeriveInput) -> Item {
+ match input.data {
+@@ -496,10 +381,57 @@ impl From<DeriveInput> for Item {
+ }
+ }
+
++impl From<ItemStruct> for DeriveInput {
++ fn from(input: ItemStruct) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Struct(DataStruct {
++ struct_token: input.struct_token,
++ fields: input.fields,
++ semi_token: input.semi_token,
++ }),
++ }
++ }
++}
++
++impl From<ItemEnum> for DeriveInput {
++ fn from(input: ItemEnum) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Enum(DataEnum {
++ enum_token: input.enum_token,
++ brace_token: input.brace_token,
++ variants: input.variants,
++ }),
++ }
++ }
++}
++
++impl From<ItemUnion> for DeriveInput {
++ fn from(input: ItemUnion) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Union(DataUnion {
++ union_token: input.union_token,
++ fields: input.fields,
++ }),
++ }
++ }
++}
++
+ ast_enum_of_structs! {
+ /// A suffix of an import tree in a `use` item: `Type as Renamed` or `*`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -530,7 +462,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A path prefix of imports in a `use` item: `std::...`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UsePath {
+ pub ident: Ident,
+ pub colon2_token: Token![::],
+@@ -541,7 +473,7 @@ ast_struct! {
+ ast_struct! {
+ /// An identifier imported by a `use` item: `HashMap`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseName {
+ pub ident: Ident,
+ }
+@@ -550,7 +482,7 @@ ast_struct! {
+ ast_struct! {
+ /// An renamed identifier imported by a `use` item: `HashMap as Map`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseRename {
+ pub ident: Ident,
+ pub as_token: Token![as],
+@@ -561,7 +493,7 @@ ast_struct! {
+ ast_struct! {
+ /// A glob import in a `use` item: `*`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseGlob {
+ pub star_token: Token![*],
+ }
+@@ -570,7 +502,7 @@ ast_struct! {
+ ast_struct! {
+ /// A braced group of imports in a `use` item: `{A, B, C}`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseGroup {
+ pub brace_token: token::Brace,
+ pub items: Punctuated<UseTree, Token![,]>,
+@@ -580,7 +512,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// An item within an `extern` block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -590,7 +522,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum ForeignItem #manual_extra_traits {
++ pub enum ForeignItem {
+ /// A foreign function in an `extern` block.
+ Fn(ForeignItemFn),
+
+@@ -614,7 +546,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A foreign function in an `extern` block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemFn {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -626,7 +558,7 @@ ast_struct! {
+ ast_struct! {
+ /// A foreign static item in an `extern` block: `static ext: u8`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemStatic {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -642,7 +574,7 @@ ast_struct! {
+ ast_struct! {
+ /// A foreign type in an `extern` block: `type void`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -655,7 +587,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation within an extern block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -663,61 +595,10 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ForeignItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ForeignItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (ForeignItem::Fn(this), ForeignItem::Fn(other)) => this == other,
+- (ForeignItem::Static(this), ForeignItem::Static(other)) => this == other,
+- (ForeignItem::Type(this), ForeignItem::Type(other)) => this == other,
+- (ForeignItem::Macro(this), ForeignItem::Macro(other)) => this == other,
+- (ForeignItem::Verbatim(this), ForeignItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ForeignItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- ForeignItem::Fn(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- ForeignItem::Static(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- ForeignItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- ForeignItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- ForeignItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- ForeignItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_enum_of_structs! {
+ /// An item declaration within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -727,7 +608,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum TraitItem #manual_extra_traits {
++ pub enum TraitItem {
+ /// An associated constant within the definition of a trait.
+ Const(TraitItemConst),
+
+@@ -751,7 +632,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// An associated constant within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemConst {
+ pub attrs: Vec<Attribute>,
+ pub const_token: Token![const],
+@@ -766,7 +647,7 @@ ast_struct! {
+ ast_struct! {
+ /// A trait method within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemMethod {
+ pub attrs: Vec<Attribute>,
+ pub sig: Signature,
+@@ -778,7 +659,7 @@ ast_struct! {
+ ast_struct! {
+ /// An associated type within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemType {
+ pub attrs: Vec<Attribute>,
+ pub type_token: Token![type],
+@@ -794,7 +675,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -802,61 +683,10 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for TraitItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for TraitItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (TraitItem::Const(this), TraitItem::Const(other)) => this == other,
+- (TraitItem::Method(this), TraitItem::Method(other)) => this == other,
+- (TraitItem::Type(this), TraitItem::Type(other)) => this == other,
+- (TraitItem::Macro(this), TraitItem::Macro(other)) => this == other,
+- (TraitItem::Verbatim(this), TraitItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for TraitItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- TraitItem::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- TraitItem::Method(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- TraitItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- TraitItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- TraitItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- TraitItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_enum_of_structs! {
+ /// An item within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -866,7 +696,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum ImplItem #manual_extra_traits {
++ pub enum ImplItem {
+ /// An associated constant within an impl block.
+ Const(ImplItemConst),
+
+@@ -890,7 +720,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// An associated constant within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemConst {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -908,7 +738,7 @@ ast_struct! {
+ ast_struct! {
+ /// A method within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemMethod {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -921,7 +751,7 @@ ast_struct! {
+ ast_struct! {
+ /// An associated type within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -938,7 +768,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -946,62 +776,11 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ImplItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ImplItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (ImplItem::Const(this), ImplItem::Const(other)) => this == other,
+- (ImplItem::Method(this), ImplItem::Method(other)) => this == other,
+- (ImplItem::Type(this), ImplItem::Type(other)) => this == other,
+- (ImplItem::Macro(this), ImplItem::Macro(other)) => this == other,
+- (ImplItem::Verbatim(this), ImplItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ImplItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- ImplItem::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- ImplItem::Method(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- ImplItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- ImplItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- ImplItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- ImplItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_struct! {
+ /// A function signature in a trait or implementation: `unsafe fn
+ /// initialize(&self)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Signature {
+ pub constness: Option<Token![const]>,
+ pub asyncness: Option<Token![async]>,
+@@ -1017,13 +796,34 @@ ast_struct! {
+ }
+ }
+
++impl Signature {
++ /// A method's `self` receiver, such as `&self` or `self: Box<Self>`.
++ pub fn receiver(&self) -> Option<&FnArg> {
++ let arg = self.inputs.first()?;
++ match arg {
++ FnArg::Receiver(_) => Some(arg),
++ FnArg::Typed(PatType { pat, .. }) => {
++ if let Pat::Ident(PatIdent { ident, .. }) = &**pat {
++ if ident == "self" {
++ return Some(arg);
++ }
++ }
++ None
++ }
++ }
++ }
++}
++
+ ast_enum_of_structs! {
+ /// An argument in a function signature: the `n: usize` in `fn f(n: usize)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum FnArg {
+ /// The `self` argument of an associated method, whether taken by value
+ /// or by reference.
++ ///
++ /// Note that `self` receivers with a specified type, such as `self:
++ /// Box<Self>`, are parsed as a `FnArg::Typed`.
+ Receiver(Receiver),
+
+ /// A function argument accepted by pattern and type.
+@@ -1035,7 +835,10 @@ ast_struct! {
+ /// The `self` argument of an associated method, whether taken by value
+ /// or by reference.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// Note that `self` receivers with a specified type, such as `self:
++ /// Box<Self>`, are parsed as a `FnArg::Typed`.
++ ///
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Receiver {
+ pub attrs: Vec<Attribute>,
+ pub reference: Option<(Token![&], Option<Lifetime>)>,
+@@ -1056,7 +859,8 @@ pub mod parsing {
+
+ use crate::ext::IdentExt;
+ use crate::parse::discouraged::Speculative;
+- use crate::parse::{Parse, ParseStream, Result};
++ use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
++ use crate::token::Brace;
+ use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenTree};
+ use std::iter::{self, FromIterator};
+
+@@ -1064,18 +868,26 @@ pub mod parsing {
+
+ impl Parse for Item {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![extern]) {
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ parse_rest_of_fn(input, Vec::new(), vis, sig).map(Item::Fn)
++ }
++ } else if lookahead.peek(Token![extern]) {
+ ahead.parse::<Token![extern]>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Token![crate]) {
+ input.parse().map(Item::ExternCrate)
+- } else if lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else if lookahead.peek(token::Brace) {
+ input.parse().map(Item::ForeignMod)
+ } else if lookahead.peek(LitStr) {
+@@ -1083,8 +895,6 @@ pub mod parsing {
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(token::Brace) {
+ input.parse().map(Item::ForeignMod)
+- } else if lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else {
+ Err(lookahead.error())
+ }
+@@ -1094,18 +904,61 @@ pub mod parsing {
+ } else if lookahead.peek(Token![use]) {
+ input.parse().map(Item::Use)
+ } else if lookahead.peek(Token![static]) {
+- input.parse().map(Item::Static)
++ let vis = input.parse()?;
++ let static_token = input.parse()?;
++ let mutability = input.parse()?;
++ let ident = input.parse()?;
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(Item::Static(ItemStatic {
++ attrs: Vec::new(),
++ vis,
++ static_token,
++ mutability,
++ ident,
++ colon_token,
++ ty,
++ eq_token: input.parse()?,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![const]) {
+ ahead.parse::<Token![const]>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+- input.parse().map(Item::Const)
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(Item::Fn)
++ let vis = input.parse()?;
++ let const_token = input.parse()?;
++ let ident = {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ };
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(Item::Const(ItemConst {
++ attrs: Vec::new(),
++ vis,
++ const_token,
++ ident,
++ colon_token,
++ ty,
++ eq_token: input.parse()?,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else {
+ Err(lookahead.error())
+ }
+@@ -1117,21 +970,19 @@ pub mod parsing {
+ {
+ input.parse().map(Item::Trait)
+ } else if lookahead.peek(Token![impl]) {
+- input.parse().map(Item::Impl)
+- } else if lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(Item::Fn)
++ let allow_const_impl = true;
++ if let Some(item) = parse_impl(input, allow_const_impl)? {
++ Ok(Item::Impl(item))
++ } else {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ }
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![async]) || lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else if lookahead.peek(Token![mod]) {
+ input.parse().map(Item::Mod)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(Item::Type)
++ parse_item_type(begin, input)
+ } else if lookahead.peek(existential) {
+ input.call(item_existential).map(Item::Verbatim)
+ } else if lookahead.peek(Token![struct]) {
+@@ -1147,14 +998,18 @@ pub mod parsing {
+ } else if lookahead.peek(Token![impl])
+ || lookahead.peek(Token![default]) && !ahead.peek2(Token![!])
+ {
+- input.parse().map(Item::Impl)
++ let allow_const_impl = true;
++ if let Some(item) = parse_impl(input, allow_const_impl)? {
++ Ok(Item::Impl(item))
++ } else {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ }
+ } else if lookahead.peek(Token![macro]) {
+ input.parse().map(Item::Macro2)
+ } else if vis.is_inherited()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+@@ -1163,32 +1018,64 @@ pub mod parsing {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- Item::ExternCrate(item) => &mut item.attrs,
+- Item::Use(item) => &mut item.attrs,
+- Item::Static(item) => &mut item.attrs,
+- Item::Const(item) => &mut item.attrs,
+- Item::Fn(item) => &mut item.attrs,
+- Item::Mod(item) => &mut item.attrs,
+- Item::ForeignMod(item) => &mut item.attrs,
+- Item::Type(item) => &mut item.attrs,
+- Item::Struct(item) => &mut item.attrs,
+- Item::Enum(item) => &mut item.attrs,
+- Item::Union(item) => &mut item.attrs,
+- Item::Trait(item) => &mut item.attrs,
+- Item::TraitAlias(item) => &mut item.attrs,
+- Item::Impl(item) => &mut item.attrs,
+- Item::Macro(item) => &mut item.attrs,
+- Item::Macro2(item) => &mut item.attrs,
+- Item::Verbatim(_) => return Ok(item),
+- Item::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
++ attrs.extend(item.replace_attrs(Vec::new()));
++ item.replace_attrs(attrs);
++ Ok(item)
++ }
++ }
++
++ struct FlexibleItemType {
++ vis: Visibility,
++ defaultness: Option<Token![default]>,
++ type_token: Token![type],
++ ident: Ident,
++ generics: Generics,
++ colon_token: Option<Token![:]>,
++ bounds: Punctuated<TypeParamBound, Token![+]>,
++ ty: Option<(Token![=], Type)>,
++ semi_token: Token![;],
++ }
++
++ impl Parse for FlexibleItemType {
++ fn parse(input: ParseStream) -> Result<Self> {
++ let vis: Visibility = input.parse()?;
++ let defaultness: Option<Token![default]> = input.parse()?;
++ let type_token: Token![type] = input.parse()?;
++ let ident: Ident = input.parse()?;
++ let mut generics: Generics = input.parse()?;
++ let colon_token: Option<Token![:]> = input.parse()?;
++ let mut bounds = Punctuated::new();
++ if colon_token.is_some() {
++ loop {
++ bounds.push_value(input.parse::<TypeParamBound>()?);
++ if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) {
++ break;
++ }
++ bounds.push_punct(input.parse::<Token![+]>()?);
++ if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) {
++ break;
++ }
++ }
+ }
++ generics.where_clause = input.parse()?;
++ let ty = if let Some(eq_token) = input.parse()? {
++ Some((eq_token, input.parse::<Type>()?))
++ } else {
++ None
++ };
++ let semi_token: Token![;] = input.parse()?;
+
+- Ok(item)
++ Ok(FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ ty,
++ semi_token,
++ })
+ }
+ }
+
+@@ -1310,7 +1197,6 @@ pub mod parsing {
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+ || lookahead.peek(Token![crate])
+- || lookahead.peek(Token![extern])
+ {
+ let ident = input.call(Ident::parse_any)?;
+ if input.peek(Token![::]) {
+@@ -1392,69 +1278,126 @@ pub mod parsing {
+ }
+ }
+
+- impl Parse for ItemFn {
+- fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+- let vis: Visibility = input.parse()?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
++ fn pop_variadic(args: &mut Punctuated<FnArg, Token![,]>) -> Option<Variadic> {
++ let trailing_punct = args.trailing_punct();
+
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+- let variadic = inputs.last().as_ref().and_then(get_variadic);
+-
+- fn get_variadic(input: &&FnArg) -> Option<Variadic> {
+- if let FnArg::Typed(PatType { ty, .. }) = input {
+- if let Type::Verbatim(tokens) = &**ty {
+- if let Ok(dots) = parse2(tokens.clone()) {
+- return Some(Variadic {
+- attrs: Vec::new(),
+- dots,
+- });
+- }
+- }
+- }
+- None
++ let last = match args.last_mut()? {
++ FnArg::Typed(last) => last,
++ _ => return None,
++ };
++
++ let ty = match last.ty.as_ref() {
++ Type::Verbatim(ty) => ty,
++ _ => return None,
++ };
++
++ let mut variadic = Variadic {
++ attrs: Vec::new(),
++ dots: parse2(ty.clone()).ok()?,
++ };
++
++ if let Pat::Verbatim(pat) = last.pat.as_ref() {
++ if pat.to_string() == "..." && !trailing_punct {
++ variadic.attrs = mem::replace(&mut last.attrs, Vec::new());
++ args.pop();
+ }
++ }
+
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ Some(variadic)
++ }
+
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
+- let stmts = content.call(Block::parse_within)?;
++ fn variadic_to_tokens(dots: &Token![...]) -> TokenStream {
++ TokenStream::from_iter(vec![
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Joint);
++ dot.set_span(dots.spans[0]);
++ dot
++ }),
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Joint);
++ dot.set_span(dots.spans[1]);
++ dot
++ }),
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Alone);
++ dot.set_span(dots.spans[2]);
++ dot
++ }),
++ ])
++ }
+
+- Ok(ItemFn {
+- attrs: private::attrs(outer_attrs, inner_attrs),
+- vis,
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
+- block: Box::new(Block { brace_token, stmts }),
+- })
++ fn peek_signature(input: ParseStream) -> bool {
++ let fork = input.fork();
++ fork.parse::<Option<Token![const]>>().is_ok()
++ && fork.parse::<Option<Token![async]>>().is_ok()
++ && fork.parse::<Option<Token![unsafe]>>().is_ok()
++ && fork.parse::<Option<Abi>>().is_ok()
++ && fork.peek(Token![fn])
++ }
++
++ fn parse_signature(input: ParseStream) -> Result<Signature> {
++ let constness: Option<Token![const]> = input.parse()?;
++ let asyncness: Option<Token![async]> = input.parse()?;
++ let unsafety: Option<Token![unsafe]> = input.parse()?;
++ let abi: Option<Abi> = input.parse()?;
++ let fn_token: Token![fn] = input.parse()?;
++ let ident: Ident = input.parse()?;
++ let generics: Generics = input.parse()?;
++
++ let content;
++ let paren_token = parenthesized!(content in input);
++ let mut inputs = parse_fn_args(&content)?;
++ let variadic = pop_variadic(&mut inputs);
++
++ let output: ReturnType = input.parse()?;
++ let where_clause: Option<WhereClause> = input.parse()?;
++
++ Ok(Signature {
++ constness,
++ asyncness,
++ unsafety,
++ abi,
++ fn_token,
++ ident,
++ paren_token,
++ inputs,
++ output,
++ variadic,
++ generics: Generics {
++ where_clause,
++ ..generics
++ },
++ })
++ }
++
++ impl Parse for ItemFn {
++ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ parse_rest_of_fn(input, outer_attrs, vis, sig)
+ }
+ }
+
++ fn parse_rest_of_fn(
++ input: ParseStream,
++ outer_attrs: Vec<Attribute>,
++ vis: Visibility,
++ sig: Signature,
++ ) -> Result<ItemFn> {
++ let content;
++ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
++ let stmts = content.call(Block::parse_within)?;
++
++ Ok(ItemFn {
++ attrs: private::attrs(outer_attrs, inner_attrs),
++ vis,
++ sig,
++ block: Box::new(Block { brace_token, stmts }),
++ })
++ }
++
+ impl Parse for FnArg {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -1491,26 +1434,79 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_fn_args(input: ParseStream) -> Result<Punctuated<FnArg, Token![,]>> {
++ let mut args = Punctuated::new();
++ let mut has_receiver = false;
++
++ while !input.is_empty() {
++ let attrs = input.call(Attribute::parse_outer)?;
++
++ let arg = if let Some(dots) = input.parse::<Option<Token![...]>>()? {
++ FnArg::Typed(PatType {
++ attrs,
++ pat: Box::new(Pat::Verbatim(variadic_to_tokens(&dots))),
++ colon_token: Token![:](dots.spans[0]),
++ ty: Box::new(Type::Verbatim(variadic_to_tokens(&dots))),
++ })
++ } else {
++ let mut arg: FnArg = input.parse()?;
++ match &mut arg {
++ FnArg::Receiver(receiver) if has_receiver => {
++ return Err(Error::new(
++ receiver.self_token.span,
++ "unexpected second method receiver",
++ ));
++ }
++ FnArg::Receiver(receiver) if !args.is_empty() => {
++ return Err(Error::new(
++ receiver.self_token.span,
++ "unexpected method receiver",
++ ));
++ }
++ FnArg::Receiver(receiver) => {
++ has_receiver = true;
++ receiver.attrs = attrs;
++ }
++ FnArg::Typed(arg) => arg.attrs = attrs,
++ }
++ arg
++ };
++ args.push_value(arg);
++
++ if input.is_empty() {
++ break;
++ }
++
++ let comma: Token![,] = input.parse()?;
++ args.push_punct(comma);
++ }
++
++ Ok(args)
++ }
++
+ fn fn_arg_typed(input: ParseStream) -> Result<PatType> {
++ // Hack to parse pre-2018 syntax in
++ // test/ui/rfc-2565-param-attrs/param-attrs-pretty.rs
++ // because the rest of the test case is valuable.
++ if input.peek(Ident) && input.peek2(Token![<]) {
++ let span = input.fork().parse::<Ident>()?.span();
++ return Ok(PatType {
++ attrs: Vec::new(),
++ pat: Box::new(Pat::Wild(PatWild {
++ attrs: Vec::new(),
++ underscore_token: Token![_](span),
++ })),
++ colon_token: Token![:](span),
++ ty: input.parse()?,
++ });
++ }
++
+ Ok(PatType {
+ attrs: Vec::new(),
+- pat: input.parse()?,
++ pat: Box::new(pat::parsing::multi_pat(input)?),
+ colon_token: input.parse()?,
+ ty: Box::new(match input.parse::<Option<Token![...]>>()? {
+- Some(dot3) => {
+- let args = vec![
+- TokenTree::Punct(Punct::new('.', Spacing::Joint)),
+- TokenTree::Punct(Punct::new('.', Spacing::Joint)),
+- TokenTree::Punct(Punct::new('.', Spacing::Alone)),
+- ];
+- let tokens = TokenStream::from_iter(args.into_iter().zip(&dot3.spans).map(
+- |(mut arg, span)| {
+- arg.set_span(*span);
+- arg
+- },
+- ));
+- Type::Verbatim(tokens)
+- }
++ Some(dot3) => Type::Verbatim(variadic_to_tokens(&dot3)),
+ None => input.parse()?,
+ }),
+ })
+@@ -1581,22 +1577,60 @@ pub mod parsing {
+
+ impl Parse for ForeignItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![fn]) {
+- input.parse().map(ForeignItem::Fn)
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ if input.peek(token::Brace) {
++ let content;
++ braced!(content in input);
++ content.call(Attribute::parse_inner)?;
++ content.call(Block::parse_within)?;
++
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Fn(ForeignItemFn {
++ attrs: Vec::new(),
++ vis,
++ sig,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![static]) {
+- input.parse().map(ForeignItem::Static)
++ let vis = input.parse()?;
++ let static_token = input.parse()?;
++ let mutability = input.parse()?;
++ let ident = input.parse()?;
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![=]) {
++ input.parse::<Token![=]>()?;
++ input.parse::<Expr>()?;
++ input.parse::<Token![;]>()?;
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Static(ForeignItemStatic {
++ attrs: Vec::new(),
++ vis,
++ static_token,
++ mutability,
++ ident,
++ colon_token,
++ ty,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(ForeignItem::Type)
++ parse_foreign_item_type(begin, input)
+ } else if vis.is_inherited()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+@@ -1605,17 +1639,16 @@ pub mod parsing {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- ForeignItem::Fn(item) => &mut item.attrs,
+- ForeignItem::Static(item) => &mut item.attrs,
+- ForeignItem::Type(item) => &mut item.attrs,
+- ForeignItem::Macro(item) => &mut item.attrs,
+- ForeignItem::Verbatim(_) | ForeignItem::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
+- }
++ let item_attrs = match &mut item {
++ ForeignItem::Fn(item) => &mut item.attrs,
++ ForeignItem::Static(item) => &mut item.attrs,
++ ForeignItem::Type(item) => &mut item.attrs,
++ ForeignItem::Macro(item) => &mut item.attrs,
++ ForeignItem::Verbatim(_) => return Ok(item),
++ ForeignItem::__Nonexhaustive => unreachable!(),
++ };
++ attrs.extend(item_attrs.drain(..));
++ *item_attrs = attrs;
+
+ Ok(item)
+ }
+@@ -1625,55 +1658,12 @@ pub mod parsing {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let mut inputs = Punctuated::new();
+- let mut variadic = None;
+- while !content.is_empty() {
+- let attrs = content.call(Attribute::parse_outer)?;
+-
+- if let Some(dots) = content.parse()? {
+- variadic = Some(Variadic { attrs, dots });
+- break;
+- }
+-
+- let mut arg = content.call(fn_arg_typed)?;
+- arg.attrs = attrs;
+- inputs.push_value(FnArg::Typed(arg));
+- if content.is_empty() {
+- break;
+- }
+-
+- inputs.push_punct(content.parse()?);
+- }
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let sig = parse_signature(input)?;
+ let semi_token: Token![;] = input.parse()?;
+-
+ Ok(ForeignItemFn {
+ attrs,
+ vis,
+- sig: Signature {
+- constness: None,
+- asyncness: None,
+- unsafety: None,
+- abi: None,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
++ sig,
+ semi_token,
+ })
+ }
+@@ -1706,6 +1696,37 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_foreign_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ForeignItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some()
++ || generics.lt_token.is_some()
++ || generics.where_clause.is_some()
++ || colon_token.is_some()
++ || ty.is_some()
++ {
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Type(ForeignItemType {
++ attrs: Vec::new(),
++ vis,
++ type_token,
++ ident,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for ForeignItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -1742,6 +1763,36 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_item_type(begin: ParseBuffer, input: ParseStream) -> Result<Item> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some() || colon_token.is_some() || ty.is_none() {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ let (eq_token, ty) = ty.unwrap();
++ Ok(Item::Type(ItemType {
++ attrs: Vec::new(),
++ vis,
++ type_token,
++ ident,
++ generics,
++ eq_token,
++ ty: Box::new(ty),
++ semi_token,
++ }))
++ }
++ }
++
+ #[cfg(not(feature = "printing"))]
+ fn item_existential(input: ParseStream) -> Result<TokenStream> {
+ Err(input.error("existential type is not supported"))
+@@ -1887,7 +1938,7 @@ pub mod parsing {
+
+ impl Parse for ItemTrait {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let attrs = input.call(Attribute::parse_outer)?;
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let unsafety: Option<Token![unsafe]> = input.parse()?;
+ let auto_token: Option<Token![auto]> = input.parse()?;
+@@ -1896,7 +1947,7 @@ pub mod parsing {
+ let generics: Generics = input.parse()?;
+ parse_rest_of_trait(
+ input,
+- attrs,
++ outer_attrs,
+ vis,
+ unsafety,
+ auto_token,
+@@ -1909,7 +1960,7 @@ pub mod parsing {
+
+ fn parse_rest_of_trait(
+ input: ParseStream,
+- attrs: Vec<Attribute>,
++ outer_attrs: Vec<Attribute>,
+ vis: Visibility,
+ unsafety: Option<Token![unsafe]>,
+ auto_token: Option<Token![auto]>,
+@@ -1937,13 +1988,14 @@ pub mod parsing {
+
+ let content;
+ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
+ let mut items = Vec::new();
+ while !content.is_empty() {
+ items.push(content.parse()?);
+ }
+
+ Ok(ItemTrait {
+- attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ vis,
+ unsafety,
+ auto_token,
+@@ -2014,14 +2066,19 @@ pub mod parsing {
+
+ impl Parse for TraitItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
++ let vis: Visibility = input.parse()?;
++ let defaultness: Option<Token![default]> = input.parse()?;
+ let ahead = input.fork();
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![const]) {
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ input.parse().map(TraitItem::Method)
++ } else if lookahead.peek(Token![const]) {
+ ahead.parse::<Token![const]>()?;
+ let lookahead = ahead.lookahead1();
+- if lookahead.peek(Ident) {
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+ input.parse().map(TraitItem::Const)
+ } else if lookahead.peek(Token![async])
+ || lookahead.peek(Token![unsafe])
+@@ -2032,18 +2089,11 @@ pub mod parsing {
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![async])
+- || lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(TraitItem::Method)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(TraitItem::Type)
++ parse_trait_item_type(begin.fork(), input)
+ } else if lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::])
+ {
+@@ -2052,18 +2102,20 @@ pub mod parsing {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- TraitItem::Const(item) => &mut item.attrs,
+- TraitItem::Method(item) => &mut item.attrs,
+- TraitItem::Type(item) => &mut item.attrs,
+- TraitItem::Macro(item) => &mut item.attrs,
+- TraitItem::Verbatim(_) | TraitItem::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
++ match (vis, defaultness) {
++ (Visibility::Inherited, None) => {}
++ _ => return Ok(TraitItem::Verbatim(verbatim::between(begin, input))),
+ }
+
++ let item_attrs = match &mut item {
++ TraitItem::Const(item) => &mut item.attrs,
++ TraitItem::Method(item) => &mut item.attrs,
++ TraitItem::Type(item) => &mut item.attrs,
++ TraitItem::Macro(item) => &mut item.attrs,
++ TraitItem::Verbatim(_) | TraitItem::__Nonexhaustive => unreachable!(),
++ };
++ attrs.extend(item_attrs.drain(..));
++ *item_attrs = attrs;
+ Ok(item)
+ }
+ }
+@@ -2073,7 +2125,14 @@ pub mod parsing {
+ Ok(TraitItemConst {
+ attrs: input.call(Attribute::parse_outer)?,
+ const_token: input.parse()?,
+- ident: input.parse()?,
++ ident: {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ },
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ default: {
+@@ -2093,20 +2152,7 @@ pub mod parsing {
+ impl Parse for TraitItemMethod {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let outer_attrs = input.call(Attribute::parse_outer)?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let sig = parse_signature(input)?;
+
+ let lookahead = input.lookahead1();
+ let (brace_token, inner_attrs, stmts, semi_token) = if lookahead.peek(token::Brace) {
+@@ -2124,22 +2170,7 @@ pub mod parsing {
+
+ Ok(TraitItemMethod {
+ attrs: private::attrs(outer_attrs, inner_attrs),
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic: None,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
++ sig,
+ default: brace_token.map(|brace_token| Block { brace_token, stmts }),
+ semi_token,
+ })
+@@ -2188,6 +2219,35 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_trait_item_type(begin: ParseBuffer, input: ParseStream) -> Result<TraitItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some() || vis.is_some() {
++ Ok(TraitItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(TraitItem::Type(TraitItemType {
++ attrs: Vec::new(),
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ default: ty,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for TraitItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -2207,52 +2267,67 @@ pub mod parsing {
+
+ impl Parse for ItemImpl {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+- let defaultness: Option<Token![default]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let impl_token: Token![impl] = input.parse()?;
+-
+- let has_generics = input.peek(Token![<])
+- && (input.peek2(Token![>])
+- || input.peek2(Token![#])
+- || (input.peek2(Ident) || input.peek2(Lifetime))
+- && (input.peek3(Token![:])
+- || input.peek3(Token![,])
+- || input.peek3(Token![>])));
+- let generics: Generics = if has_generics {
+- input.parse()?
+- } else {
+- Generics::default()
+- };
+-
+- let trait_ = {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- if ahead.parse::<Option<Token![!]>>().is_ok()
+- && ahead.parse::<Path>().is_ok()
+- && ahead.parse::<Token![for]>().is_ok()
+- {
+- let polarity: Option<Token![!]> = input.parse()?;
+- let path: Path = input.parse()?;
+- let for_token: Token![for] = input.parse()?;
+- Some((polarity, path, for_token))
+- } else {
+- None
+- }
+- };
+- let self_ty: Type = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let allow_const_impl = false;
++ parse_impl(input, allow_const_impl).map(Option::unwrap)
++ }
++ }
++
++ fn parse_impl(input: ParseStream, allow_const_impl: bool) -> Result<Option<ItemImpl>> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
++ let defaultness: Option<Token![default]> = input.parse()?;
++ let unsafety: Option<Token![unsafe]> = input.parse()?;
++ let impl_token: Token![impl] = input.parse()?;
++
++ let has_generics = input.peek(Token![<])
++ && (input.peek2(Token![>])
++ || input.peek2(Token![#])
++ || (input.peek2(Ident) || input.peek2(Lifetime))
++ && (input.peek3(Token![:])
++ || input.peek3(Token![,])
++ || input.peek3(Token![>]))
++ || input.peek2(Token![const]));
++ let generics: Generics = if has_generics {
++ input.parse()?
++ } else {
++ Generics::default()
++ };
+
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
++ let is_const_impl = allow_const_impl
++ && (input.peek(Token![const]) || input.peek(Token![?]) && input.peek2(Token![const]));
++ if is_const_impl {
++ input.parse::<Option<Token![?]>>()?;
++ input.parse::<Token![const]>()?;
++ }
+
+- let mut items = Vec::new();
+- while !content.is_empty() {
+- items.push(content.parse()?);
++ let trait_ = (|| -> Option<_> {
++ let ahead = input.fork();
++ let polarity: Option<Token![!]> = ahead.parse().ok()?;
++ let mut path: Path = ahead.parse().ok()?;
++ if path.segments.last().unwrap().arguments.is_empty() && ahead.peek(token::Paren) {
++ let parenthesized = PathArguments::Parenthesized(ahead.parse().ok()?);
++ path.segments.last_mut().unwrap().arguments = parenthesized;
+ }
++ let for_token: Token![for] = ahead.parse().ok()?;
++ input.advance_to(&ahead);
++ Some((polarity, path, for_token))
++ })();
++
++ let self_ty: Type = input.parse()?;
++ let where_clause: Option<WhereClause> = input.parse()?;
++
++ let content;
++ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
++
++ let mut items = Vec::new();
++ while !content.is_empty() {
++ items.push(content.parse()?);
++ }
+
+- Ok(ItemImpl {
++ if is_const_impl {
++ Ok(None)
++ } else {
++ Ok(Some(ItemImpl {
+ attrs: private::attrs(outer_attrs, inner_attrs),
+ defaultness,
+ unsafety,
+@@ -2265,12 +2340,13 @@ pub mod parsing {
+ self_ty: Box::new(self_ty),
+ brace_token,
+ items,
+- })
++ }))
+ }
+ }
+
+ impl Parse for ImplItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+@@ -2284,28 +2360,38 @@ pub mod parsing {
+ None
+ };
+
+- let mut item = if lookahead.peek(Token![const]) {
+- ahead.parse::<Token![const]>()?;
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ input.parse().map(ImplItem::Method)
++ } else if lookahead.peek(Token![const]) {
++ let const_token: Token![const] = ahead.parse()?;
+ let lookahead = ahead.lookahead1();
+- if lookahead.peek(Ident) {
+- input.parse().map(ImplItem::Const)
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(ImplItem::Method)
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.advance_to(&ahead);
++ let ident: Ident = input.call(Ident::parse_any)?;
++ let colon_token: Token![:] = input.parse()?;
++ let ty: Type = input.parse()?;
++ if let Some(eq_token) = input.parse()? {
++ return Ok(ImplItem::Const(ImplItemConst {
++ attrs,
++ vis,
++ defaultness,
++ const_token,
++ ident,
++ colon_token,
++ ty,
++ eq_token,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }));
++ } else {
++ input.parse::<Token![;]>()?;
++ return Ok(ImplItem::Verbatim(verbatim::between(begin, input)));
++ }
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(ImplItem::Method)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(ImplItem::Type)
++ parse_impl_item_type(begin, input)
+ } else if vis.is_inherited() && defaultness.is_none() && lookahead.peek(existential) {
+ input.call(item_existential).map(ImplItem::Verbatim)
+ } else if vis.is_inherited()
+@@ -2313,7 +2399,6 @@ pub mod parsing {
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+@@ -2346,7 +2431,14 @@ pub mod parsing {
+ vis: input.parse()?,
+ defaultness: input.parse()?,
+ const_token: input.parse()?,
+- ident: input.parse()?,
++ ident: {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ },
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ eq_token: input.parse()?,
+@@ -2358,50 +2450,39 @@ pub mod parsing {
+
+ impl Parse for ImplItemMethod {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
++ let mut attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let defaultness: Option<Token![default]> = input.parse()?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
+-
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
+- let stmts = content.call(Block::parse_within)?;
++ let sig = parse_signature(input)?;
++
++ let block = if let Some(semi) = input.parse::<Option<Token![;]>>()? {
++ // Accept methods without a body in an impl block because
++ // rustc's *parser* does not reject them (the compilation error
++ // is emitted later than parsing) and it can be useful for macro
++ // DSLs.
++ let mut punct = Punct::new(';', Spacing::Alone);
++ punct.set_span(semi.span);
++ let tokens = TokenStream::from_iter(vec![TokenTree::Punct(punct)]);
++ Block {
++ brace_token: Brace::default(),
++ stmts: vec![Stmt::Item(Item::Verbatim(tokens))],
++ }
++ } else {
++ let content;
++ let brace_token = braced!(content in input);
++ attrs.extend(content.call(Attribute::parse_inner)?);
++ Block {
++ brace_token,
++ stmts: content.call(Block::parse_within)?,
++ }
++ };
+
+ Ok(ImplItemMethod {
+- attrs: private::attrs(outer_attrs, inner_attrs),
++ attrs,
+ vis,
+ defaultness,
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic: None,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
+- block: Block { brace_token, stmts },
++ sig,
++ block,
+ })
+ }
+ }
+@@ -2426,6 +2507,37 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_impl_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ImplItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if colon_token.is_some() || ty.is_none() {
++ Ok(ImplItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ let (eq_token, ty) = ty.unwrap();
++ Ok(ImplItem::Type(ImplItemType {
++ attrs: Vec::new(),
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ eq_token,
++ ty,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for ImplItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -2471,6 +2583,7 @@ mod printing {
+
+ use crate::attr::FilterAttrs;
+ use crate::print::TokensOrDefault;
++ use crate::punctuated::Pair;
+
+ impl ToTokens for ItemExternCrate {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+@@ -2835,6 +2948,14 @@ mod printing {
+ self.vis.to_tokens(tokens);
+ self.defaultness.to_tokens(tokens);
+ self.sig.to_tokens(tokens);
++ if self.block.stmts.len() == 1 {
++ if let Stmt::Item(Item::Verbatim(verbatim)) = &self.block.stmts[0] {
++ if verbatim.to_string() == ";" {
++ verbatim.to_tokens(tokens);
++ return;
++ }
++ }
++ }
+ self.block.brace_token.surround(tokens, |tokens| {
+ tokens.append_all(self.attrs.inner());
+ tokens.append_all(&self.block.stmts);
+@@ -2905,6 +3026,33 @@ mod printing {
+ }
+ }
+
++ fn maybe_variadic_to_tokens(arg: &FnArg, tokens: &mut TokenStream) -> bool {
++ let arg = match arg {
++ FnArg::Typed(arg) => arg,
++ FnArg::Receiver(receiver) => {
++ receiver.to_tokens(tokens);
++ return false;
++ }
++ };
++
++ match arg.ty.as_ref() {
++ Type::Verbatim(ty) if ty.to_string() == "..." => {
++ match arg.pat.as_ref() {
++ Pat::Verbatim(pat) if pat.to_string() == "..." => {
++ tokens.append_all(arg.attrs.outer());
++ pat.to_tokens(tokens);
++ }
++ _ => arg.to_tokens(tokens),
++ }
++ true
++ }
++ _ => {
++ arg.to_tokens(tokens);
++ false
++ }
++ }
++ }
++
+ impl ToTokens for Signature {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.constness.to_tokens(tokens);
+@@ -2915,11 +3063,24 @@ mod printing {
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ self.paren_token.surround(tokens, |tokens| {
+- self.inputs.to_tokens(tokens);
+- if self.variadic.is_some() && !self.inputs.empty_or_trailing() {
+- <Token![,]>::default().to_tokens(tokens);
++ let mut last_is_variadic = false;
++ for input in self.inputs.pairs() {
++ match input {
++ Pair::Punctuated(input, comma) => {
++ maybe_variadic_to_tokens(input, tokens);
++ comma.to_tokens(tokens);
++ }
++ Pair::End(input) => {
++ last_is_variadic = maybe_variadic_to_tokens(input, tokens);
++ }
++ }
++ }
++ if self.variadic.is_some() && !last_is_variadic {
++ if !self.inputs.empty_or_trailing() {
++ <Token![,]>::default().to_tokens(tokens);
++ }
++ self.variadic.to_tokens(tokens);
+ }
+- self.variadic.to_tokens(tokens);
+ });
+ self.output.to_tokens(tokens);
+ self.generics.where_clause.to_tokens(tokens);
+diff --git a/third_party/rust/syn/src/keyword.rs b/third_party/rust/syn/src/keyword.rs
+deleted file mode 100644
+index e69de29bb2..0000000000
+diff --git a/third_party/rust/syn/src/lib.rs b/third_party/rust/syn/src/lib.rs
+index c8ada7e638..3da506731e 100644
+--- third_party/rust/syn/src/lib.rs
++++ third_party/rust/syn/src/lib.rs
+@@ -1,3 +1,11 @@
++//! [![github]](https://github.com/dtolnay/syn)&ensp;[![crates-io]](https://crates.io/crates/syn)&ensp;[![docs-rs]](https://docs.rs/syn)
++//!
++//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
++//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
++//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K
++//!
++//! <br>
++//!
+ //! Syn is a parsing library for parsing a stream of Rust tokens into a syntax
+ //! tree of Rust source code.
+ //!
+@@ -62,8 +70,8 @@
+ //! ```
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use quote::quote;
+ //! use syn::{parse_macro_input, DeriveInput};
+@@ -242,35 +250,48 @@
+ //! dynamic library libproc_macro from rustc toolchain.
+
+ // Syn types in rustdoc of other crates get linked to here.
+-#![doc(html_root_url = "https://docs.rs/syn/1.0.5")]
++#![doc(html_root_url = "https://docs.rs/syn/1.0.40")]
+ #![deny(clippy::all, clippy::pedantic)]
+ // Ignored clippy lints.
+ #![allow(
+- clippy::block_in_if_condition_stmt,
++ clippy::blocks_in_if_conditions,
+ clippy::cognitive_complexity,
+ clippy::doc_markdown,
+ clippy::eval_order_dependence,
+ clippy::inherent_to_string,
+ clippy::large_enum_variant,
++ clippy::manual_non_exhaustive,
++ clippy::match_like_matches_macro,
++ clippy::match_on_vec_items,
++ clippy::needless_doctest_main,
+ clippy::needless_pass_by_value,
+ clippy::never_loop,
+ clippy::suspicious_op_assign_impl,
+ clippy::too_many_arguments,
+- clippy::trivially_copy_pass_by_ref
++ clippy::trivially_copy_pass_by_ref,
++ clippy::unnecessary_unwrap
+ )]
+ // Ignored clippy_pedantic lints.
+ #![allow(
+ clippy::cast_possible_truncation,
++ clippy::default_trait_access,
+ clippy::empty_enum,
++ clippy::expl_impl_clone_on_copy,
+ clippy::if_not_else,
+ clippy::items_after_statements,
++ clippy::match_same_arms,
++ clippy::missing_errors_doc,
+ clippy::module_name_repetitions,
++ clippy::must_use_candidate,
++ clippy::option_if_let_else,
+ clippy::shadow_unrelated,
+ clippy::similar_names,
+ clippy::single_match_else,
++ clippy::too_many_lines,
+ clippy::unseparated_literal_suffix,
+ clippy::use_self,
+- clippy::used_underscore_binding
++ clippy::used_underscore_binding,
++ clippy::wildcard_imports
+ )]
+
+ #[cfg(all(
+@@ -284,7 +305,6 @@ extern crate unicode_xid;
+ #[cfg(feature = "printing")]
+ extern crate quote;
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[macro_use]
+ mod macros;
+
+@@ -307,7 +327,6 @@ pub use crate::attr::{
+ AttrStyle, Attribute, AttributeArgs, Meta, MetaList, MetaNameValue, NestedMeta,
+ };
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ mod bigint;
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+@@ -364,9 +383,7 @@ pub use crate::file::File;
+ mod lifetime;
+ pub use crate::lifetime::Lifetime;
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ mod lit;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ pub use crate::lit::{
+ Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr, StrStyle,
+ };
+@@ -441,6 +458,9 @@ pub mod parse_macro_input;
+ #[cfg(all(feature = "parsing", feature = "printing"))]
+ pub mod spanned;
+
++#[cfg(all(feature = "parsing", feature = "full"))]
++mod whitespace;
++
+ mod gen {
+ /// Syntax tree traversal to walk a shared borrow of a syntax tree.
+ ///
+@@ -482,7 +502,7 @@ mod gen {
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"visit"` feature.*
++ /// *This module is available only if Syn is built with the `"visit"` feature.*
+ ///
+ /// <br>
+ ///
+@@ -603,7 +623,7 @@ mod gen {
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"visit-mut"`
++ /// *This module is available only if Syn is built with the `"visit-mut"`
+ /// feature.*
+ ///
+ /// <br>
+@@ -702,7 +722,7 @@ mod gen {
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"fold"` feature.*
++ /// *This module is available only if Syn is built with the `"fold"` feature.*
+ ///
+ /// <br>
+ ///
+@@ -744,6 +764,22 @@ mod gen {
+ #[rustfmt::skip]
+ pub mod fold;
+
++ #[cfg(feature = "clone-impls")]
++ #[rustfmt::skip]
++ mod clone;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod eq;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod hash;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod debug;
++
+ #[cfg(any(feature = "full", feature = "derive"))]
+ #[path = "../gen_helper.rs"]
+ mod helper;
+@@ -757,6 +793,8 @@ pub mod export;
+ mod custom_keyword;
+ mod custom_punctuation;
+ mod sealed;
++mod span;
++mod thread;
+
+ #[cfg(feature = "parsing")]
+ mod lookahead;
+@@ -764,13 +802,15 @@ mod lookahead;
+ #[cfg(feature = "parsing")]
+ pub mod parse;
+
+-mod span;
++#[cfg(feature = "full")]
++mod reserved;
++
++#[cfg(all(any(feature = "full", feature = "derive"), feature = "parsing"))]
++mod verbatim;
+
+ #[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
+ mod print;
+
+-mod thread;
+-
+ ////////////////////////////////////////////////////////////////////////////////
+
+ #[allow(dead_code, non_camel_case_types)]
+@@ -800,14 +840,14 @@ pub use crate::error::{Error, Result};
+ ///
+ /// [`syn::parse2`]: parse2
+ ///
+-/// *This function is available if Syn is built with both the `"parsing"` and
++/// *This function is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ ///
+ /// # Examples
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use quote::quote;
+ /// use syn::DeriveInput;
+@@ -847,7 +887,7 @@ pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> {
+ ///
+ /// [`syn::parse`]: parse()
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` feature.*
++/// *This function is available only if Syn is built with the `"parsing"` feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+ parse::Parser::parse2(T::parse, tokens)
+@@ -855,7 +895,7 @@ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+
+ /// Parse a string of Rust code into the chosen syntax tree node.
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` feature.*
++/// *This function is available only if Syn is built with the `"parsing"` feature.*
+ ///
+ /// # Hygiene
+ ///
+@@ -874,9 +914,7 @@ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+ /// Ok(())
+ /// }
+ /// #
+-/// # fn main() {
+-/// # run().unwrap();
+-/// # }
++/// # run().unwrap();
+ /// ```
+ #[cfg(feature = "parsing")]
+ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+@@ -894,7 +932,7 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+ ///
+ /// If present, either of these would be an error using `from_str`.
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` and
++/// *This function is available only if Syn is built with the `"parsing"` and
+ /// `"full"` features.*
+ ///
+ /// # Examples
+@@ -918,9 +956,7 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+ /// Ok(())
+ /// }
+ /// #
+-/// # fn main() {
+-/// # run().unwrap();
+-/// # }
++/// # run().unwrap();
+ /// ```
+ #[cfg(all(feature = "parsing", feature = "full"))]
+ pub fn parse_file(mut content: &str) -> Result<File> {
+@@ -931,13 +967,16 @@ pub fn parse_file(mut content: &str) -> Result<File> {
+ }
+
+ let mut shebang = None;
+- if content.starts_with("#!") && !content.starts_with("#![") {
+- if let Some(idx) = content.find('\n') {
+- shebang = Some(content[..idx].to_string());
+- content = &content[idx..];
+- } else {
+- shebang = Some(content.to_string());
+- content = "";
++ if content.starts_with("#!") {
++ let rest = whitespace::skip(&content[2..]);
++ if !rest.starts_with('[') {
++ if let Some(idx) = content.find('\n') {
++ shebang = Some(content[..idx].to_string());
++ content = &content[idx..];
++ } else {
++ shebang = Some(content.to_string());
++ content = "";
++ }
+ }
+ }
+
+diff --git a/third_party/rust/syn/src/lifetime.rs b/third_party/rust/syn/src/lifetime.rs
+index d51c48e827..959cc5f9c6 100644
+--- third_party/rust/syn/src/lifetime.rs
++++ third_party/rust/syn/src/lifetime.rs
+@@ -18,10 +18,8 @@ use crate::lookahead;
+ /// - All following characters must be Unicode code points with the XID_Continue
+ /// property.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+-#[cfg_attr(feature = "extra-traits", derive(Debug))]
+-#[derive(Clone)]
+ pub struct Lifetime {
+ pub apostrophe: Span,
+ pub ident: Ident,
+@@ -72,6 +70,15 @@ impl Display for Lifetime {
+ }
+ }
+
++impl Clone for Lifetime {
++ fn clone(&self) -> Self {
++ Lifetime {
++ apostrophe: self.apostrophe,
++ ident: self.ident.clone(),
++ }
++ }
++}
++
+ impl PartialEq for Lifetime {
+ fn eq(&self, other: &Lifetime) -> bool {
+ self.ident.eq(&other.ident)
+diff --git a/third_party/rust/syn/src/lit.rs b/third_party/rust/syn/src/lit.rs
+index f2209a2980..ee77e75bec 100644
+--- third_party/rust/syn/src/lit.rs
++++ third_party/rust/syn/src/lit.rs
+@@ -22,9 +22,6 @@ use crate::{Error, Result};
+ ast_enum_of_structs! {
+ /// A Rust literal such as a string or integer or boolean.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
+- ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+@@ -33,7 +30,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Lit #manual_extra_traits {
++ pub enum Lit {
+ /// A UTF-8 string literal: `"foo"`.
+ Str(LitStr),
+
+@@ -64,61 +61,44 @@ ast_enum_of_structs! {
+
+ ast_struct! {
+ /// A UTF-8 string literal: `"foo"`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitStr #manual_extra_traits_debug {
+- repr: Box<LitStrRepr>,
++ pub struct LitStr {
++ repr: Box<LitRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+-struct LitStrRepr {
+- token: Literal,
+- suffix: Box<str>,
+-}
+-
+ ast_struct! {
+ /// A byte string literal: `b"foo"`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitByteStr #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitByteStr {
++ repr: Box<LitRepr>,
+ }
+ }
+
+ ast_struct! {
+ /// A byte literal: `b'f'`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitByte #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitByte {
++ repr: Box<LitRepr>,
+ }
+ }
+
+ ast_struct! {
+ /// A character literal: `'a'`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitChar #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitChar {
++ repr: Box<LitRepr>,
+ }
+ }
+
++struct LitRepr {
++ token: Literal,
++ suffix: Box<str>,
++}
++
+ ast_struct! {
+ /// An integer literal: `1` or `1u16`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitInt #manual_extra_traits_debug {
++ pub struct LitInt {
+ repr: Box<LitIntRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ struct LitIntRepr {
+ token: Literal,
+ digits: Box<str>,
+@@ -129,15 +109,11 @@ ast_struct! {
+ /// A floating point literal: `1f64` or `1.0e10f64`.
+ ///
+ /// Must be finite. May not be infinte or NaN.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitFloat #manual_extra_traits_debug {
++ pub struct LitFloat {
+ repr: Box<LitFloatRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ struct LitFloatRepr {
+ token: Literal,
+ digits: Box<str>,
+@@ -146,92 +122,27 @@ struct LitFloatRepr {
+
+ ast_struct! {
+ /// A boolean literal: `true` or `false`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitBool #manual_extra_traits_debug {
++ pub struct LitBool {
+ pub value: bool,
+ pub span: Span,
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Lit {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Lit {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Lit::Str(this), Lit::Str(other)) => this == other,
+- (Lit::ByteStr(this), Lit::ByteStr(other)) => this == other,
+- (Lit::Byte(this), Lit::Byte(other)) => this == other,
+- (Lit::Char(this), Lit::Char(other)) => this == other,
+- (Lit::Int(this), Lit::Int(other)) => this == other,
+- (Lit::Float(this), Lit::Float(other)) => this == other,
+- (Lit::Bool(this), Lit::Bool(other)) => this == other,
+- (Lit::Verbatim(this), Lit::Verbatim(other)) => this.to_string() == other.to_string(),
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Lit {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Lit::Str(lit) => {
+- hash.write_u8(0);
+- lit.hash(hash);
+- }
+- Lit::ByteStr(lit) => {
+- hash.write_u8(1);
+- lit.hash(hash);
+- }
+- Lit::Byte(lit) => {
+- hash.write_u8(2);
+- lit.hash(hash);
+- }
+- Lit::Char(lit) => {
+- hash.write_u8(3);
+- lit.hash(hash);
+- }
+- Lit::Int(lit) => {
+- hash.write_u8(4);
+- lit.hash(hash);
+- }
+- Lit::Float(lit) => {
+- hash.write_u8(5);
+- lit.hash(hash);
+- }
+- Lit::Bool(lit) => {
+- hash.write_u8(6);
+- lit.hash(hash);
+- }
+- Lit::Verbatim(lit) => {
+- hash.write_u8(7);
+- lit.to_string().hash(hash);
+- }
+- }
+- }
+-}
+-
+ impl LitStr {
+ pub fn new(value: &str, span: Span) -> Self {
+- let mut lit = Literal::string(value);
+- lit.set_span(span);
++ let mut token = Literal::string(value);
++ token.set_span(span);
+ LitStr {
+- repr: Box::new(LitStrRepr {
+- token: lit,
++ repr: Box::new(LitRepr {
++ token,
+ suffix: Box::<str>::default(),
+ }),
+ }
+ }
+
+ pub fn value(&self) -> String {
+- let (value, _) = value::parse_lit_str(&self.repr.token.to_string());
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_str(&repr);
+ String::from(value)
+ }
+
+@@ -311,7 +222,7 @@ impl LitStr {
+ fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
+ match &mut token {
+ TokenTree::Group(g) => {
+- let stream = respan_token_stream(g.stream().clone(), span);
++ let stream = respan_token_stream(g.stream(), span);
+ *g = Group::new(g.delimiter(), stream);
+ g.set_span(span);
+ }
+@@ -345,19 +256,30 @@ impl LitByteStr {
+ pub fn new(value: &[u8], span: Span) -> Self {
+ let mut token = Literal::byte_string(value);
+ token.set_span(span);
+- LitByteStr { token }
++ LitByteStr {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> Vec<u8> {
+- value::parse_lit_byte_str(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_byte_str(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+@@ -365,19 +287,30 @@ impl LitByte {
+ pub fn new(value: u8, span: Span) -> Self {
+ let mut token = Literal::u8_suffixed(value);
+ token.set_span(span);
+- LitByte { token }
++ LitByte {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> u8 {
+- value::parse_lit_byte(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_byte(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+@@ -385,36 +318,52 @@ impl LitChar {
+ pub fn new(value: char, span: Span) -> Self {
+ let mut token = Literal::character(value);
+ token.set_span(span);
+- LitChar { token }
++ LitChar {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> char {
+- value::parse_lit_char(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_char(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+ impl LitInt {
+ pub fn new(repr: &str, span: Span) -> Self {
+- if let Some((digits, suffix)) = value::parse_lit_int(repr) {
+- let mut token = value::to_literal(repr);
+- token.set_span(span);
+- LitInt {
+- repr: Box::new(LitIntRepr {
+- token,
+- digits,
+- suffix,
+- }),
+- }
+- } else {
+- panic!("Not an integer literal: `{}`", repr);
++ let (digits, suffix) = match value::parse_lit_int(repr) {
++ Some(parse) => parse,
++ None => panic!("Not an integer literal: `{}`", repr),
++ };
++
++ let mut token = match value::to_literal(repr, &digits, &suffix) {
++ Some(token) => token,
++ None => panic!("Unsupported integer literal: `{}`", repr),
++ };
++
++ token.set_span(span);
++ LitInt {
++ repr: Box::new(LitIntRepr {
++ token,
++ digits,
++ suffix,
++ }),
+ }
+ }
+
+@@ -492,18 +441,23 @@ impl Display for LitInt {
+
+ impl LitFloat {
+ pub fn new(repr: &str, span: Span) -> Self {
+- if let Some((digits, suffix)) = value::parse_lit_float(repr) {
+- let mut token = value::to_literal(repr);
+- token.set_span(span);
+- LitFloat {
+- repr: Box::new(LitFloatRepr {
+- token,
+- digits,
+- suffix,
+- }),
+- }
+- } else {
+- panic!("Not a float literal: `{}`", repr);
++ let (digits, suffix) = match value::parse_lit_float(repr) {
++ Some(parse) => parse,
++ None => panic!("Not a float literal: `{}`", repr),
++ };
++
++ let mut token = match value::to_literal(repr, &digits, &suffix) {
++ Some(token) => token,
++ None => panic!("Unsupported float literal: `{}`", repr),
++ };
++
++ token.set_span(span);
++ LitFloat {
++ repr: Box::new(LitFloatRepr {
++ token,
++ digits,
++ suffix,
++ }),
+ }
+ }
+
+@@ -575,7 +529,7 @@ mod debug_impls {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitByteStr")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+@@ -584,7 +538,7 @@ mod debug_impls {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitByte")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+@@ -593,7 +547,7 @@ mod debug_impls {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitChar")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+@@ -626,15 +580,53 @@ mod debug_impls {
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl Clone for LitRepr {
++ fn clone(&self) -> Self {
++ LitRepr {
++ token: self.token.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for LitIntRepr {
++ fn clone(&self) -> Self {
++ LitIntRepr {
++ token: self.token.clone(),
++ digits: self.digits.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for LitFloatRepr {
++ fn clone(&self) -> Self {
++ LitFloatRepr {
++ token: self.token.clone(),
++ digits: self.digits.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
+ macro_rules! lit_extra_traits {
+- ($ty:ident, $($field:ident).+) => {
+- #[cfg(feature = "extra-traits")]
+- impl Eq for $ty {}
++ ($ty:ident) => {
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $ty {
++ fn clone(&self) -> Self {
++ $ty {
++ repr: self.repr.clone(),
++ }
++ }
++ }
+
+ #[cfg(feature = "extra-traits")]
+ impl PartialEq for $ty {
+ fn eq(&self, other: &Self) -> bool {
+- self.$($field).+.to_string() == other.$($field).+.to_string()
++ self.repr.token.to_string() == other.repr.token.to_string()
+ }
+ }
+
+@@ -644,7 +636,7 @@ macro_rules! lit_extra_traits {
+ where
+ H: Hasher,
+ {
+- self.$($field).+.to_string().hash(state);
++ self.repr.token.to_string().hash(state);
+ }
+ }
+
+@@ -657,20 +649,23 @@ macro_rules! lit_extra_traits {
+ };
+ }
+
+-lit_extra_traits!(LitStr, repr.token);
+-lit_extra_traits!(LitByteStr, token);
+-lit_extra_traits!(LitByte, token);
+-lit_extra_traits!(LitChar, token);
+-lit_extra_traits!(LitInt, repr.token);
+-lit_extra_traits!(LitFloat, repr.token);
+-lit_extra_traits!(LitBool, value);
++lit_extra_traits!(LitStr);
++lit_extra_traits!(LitByteStr);
++lit_extra_traits!(LitByte);
++lit_extra_traits!(LitChar);
++lit_extra_traits!(LitInt);
++lit_extra_traits!(LitFloat);
++
++#[cfg(feature = "parsing")]
++#[doc(hidden)]
++#[allow(non_snake_case)]
++pub fn LitBool(marker: lookahead::TokenMarker) -> LitBool {
++ match marker {}
++}
+
+ ast_enum! {
+ /// The style of a string literal, either plain quoted or a raw string like
+ /// `r##"data"##`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
+ pub enum StrStyle #no_visit {
+ /// An ordinary string like `"data"`.
+ Cooked,
+@@ -691,7 +686,9 @@ pub fn Lit(marker: lookahead::TokenMarker) -> Lit {
+ #[cfg(feature = "parsing")]
+ pub mod parsing {
+ use super::*;
++ use crate::buffer::Cursor;
+ use crate::parse::{Parse, ParseStream, Result};
++ use proc_macro2::Punct;
+
+ impl Parse for Lit {
+ fn parse(input: ParseStream) -> Result<Self> {
+@@ -699,25 +696,73 @@ pub mod parsing {
+ if let Some((lit, rest)) = cursor.literal() {
+ return Ok((Lit::new(lit), rest));
+ }
+- while let Some((ident, rest)) = cursor.ident() {
+- let value = if ident == "true" {
+- true
+- } else if ident == "false" {
+- false
+- } else {
+- break;
+- };
+- let lit_bool = LitBool {
+- value,
+- span: ident.span(),
+- };
+- return Ok((Lit::Bool(lit_bool), rest));
++
++ if let Some((ident, rest)) = cursor.ident() {
++ let value = ident == "true";
++ if value || ident == "false" {
++ let lit_bool = LitBool {
++ value,
++ span: ident.span(),
++ };
++ return Ok((Lit::Bool(lit_bool), rest));
++ }
+ }
++
++ if let Some((punct, rest)) = cursor.punct() {
++ if punct.as_char() == '-' {
++ if let Some((lit, rest)) = parse_negative_lit(punct, rest) {
++ return Ok((lit, rest));
++ }
++ }
++ }
++
+ Err(cursor.error("expected literal"))
+ })
+ }
+ }
+
++ fn parse_negative_lit(neg: Punct, cursor: Cursor) -> Option<(Lit, Cursor)> {
++ let (lit, rest) = cursor.literal()?;
++
++ let mut span = neg.span();
++ span = span.join(lit.span()).unwrap_or(span);
++
++ let mut repr = lit.to_string();
++ repr.insert(0, '-');
++
++ if !(repr.ends_with("f32") || repr.ends_with("f64")) {
++ if let Some((digits, suffix)) = value::parse_lit_int(&repr) {
++ if let Some(mut token) = value::to_literal(&repr, &digits, &suffix) {
++ token.set_span(span);
++ return Some((
++ Lit::Int(LitInt {
++ repr: Box::new(LitIntRepr {
++ token,
++ digits,
++ suffix,
++ }),
++ }),
++ rest,
++ ));
++ }
++ }
++ }
++
++ let (digits, suffix) = value::parse_lit_float(&repr)?;
++ let mut token = value::to_literal(&repr, &digits, &suffix)?;
++ token.set_span(span);
++ Some((
++ Lit::Float(LitFloat {
++ repr: Box::new(LitFloatRepr {
++ token,
++ digits,
++ suffix,
++ }),
++ }),
++ rest,
++ ))
++ }
++
+ impl Parse for LitStr {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let head = input.fork();
+@@ -803,19 +848,19 @@ mod printing {
+
+ impl ToTokens for LitByteStr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for LitByte {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for LitChar {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+@@ -855,20 +900,29 @@ mod value {
+ b'"' | b'r' => {
+ let (_, suffix) = parse_lit_str(&repr);
+ return Lit::Str(LitStr {
+- repr: Box::new(LitStrRepr { token, suffix }),
++ repr: Box::new(LitRepr { token, suffix }),
+ });
+ }
+ b'b' => match byte(&repr, 1) {
+ b'"' | b'r' => {
+- return Lit::ByteStr(LitByteStr { token });
++ let (_, suffix) = parse_lit_byte_str(&repr);
++ return Lit::ByteStr(LitByteStr {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ b'\'' => {
+- return Lit::Byte(LitByte { token });
++ let (_, suffix) = parse_lit_byte(&repr);
++ return Lit::Byte(LitByte {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ _ => {}
+ },
+ b'\'' => {
+- return Lit::Char(LitChar { token });
++ let (_, suffix) = parse_lit_char(&repr);
++ return Lit::Char(LitChar {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ b'0'..=b'9' | b'-' => {
+ if !(repr.ends_with("f32") || repr.ends_with("f64")) {
+@@ -905,6 +959,44 @@ mod value {
+
+ panic!("Unrecognized literal: `{}`", repr);
+ }
++
++ pub fn suffix(&self) -> &str {
++ match self {
++ Lit::Str(lit) => lit.suffix(),
++ Lit::ByteStr(lit) => lit.suffix(),
++ Lit::Byte(lit) => lit.suffix(),
++ Lit::Char(lit) => lit.suffix(),
++ Lit::Int(lit) => lit.suffix(),
++ Lit::Float(lit) => lit.suffix(),
++ Lit::Bool(_) | Lit::Verbatim(_) => "",
++ }
++ }
++
++ pub fn span(&self) -> Span {
++ match self {
++ Lit::Str(lit) => lit.span(),
++ Lit::ByteStr(lit) => lit.span(),
++ Lit::Byte(lit) => lit.span(),
++ Lit::Char(lit) => lit.span(),
++ Lit::Int(lit) => lit.span(),
++ Lit::Float(lit) => lit.span(),
++ Lit::Bool(lit) => lit.span,
++ Lit::Verbatim(lit) => lit.span(),
++ }
++ }
++
++ pub fn set_span(&mut self, span: Span) {
++ match self {
++ Lit::Str(lit) => lit.set_span(span),
++ Lit::ByteStr(lit) => lit.set_span(span),
++ Lit::Byte(lit) => lit.set_span(span),
++ Lit::Char(lit) => lit.set_span(span),
++ Lit::Int(lit) => lit.set_span(span),
++ Lit::Float(lit) => lit.set_span(span),
++ Lit::Bool(lit) => lit.span = span,
++ Lit::Verbatim(lit) => lit.set_span(span),
++ }
++ }
+ }
+
+ /// Get the byte at offset idx, or a default of `b'\0'` if we're looking
+@@ -1004,19 +1096,18 @@ mod value {
+ pounds += 1;
+ }
+ assert_eq!(byte(s, pounds), b'"');
+- assert_eq!(byte(s, s.len() - pounds - 1), b'"');
+- for end in s[s.len() - pounds..].bytes() {
++ let close = s.rfind('"').unwrap();
++ for end in s[close + 1..close + 1 + pounds].bytes() {
+ assert_eq!(end, b'#');
+ }
+
+- let content = s[pounds + 1..s.len() - pounds - 1]
+- .to_owned()
+- .into_boxed_str();
+- let suffix = Box::<str>::default(); // todo
++ let content = s[pounds + 1..close].to_owned().into_boxed_str();
++ let suffix = s[close + 1 + pounds..].to_owned().into_boxed_str();
+ (content, suffix)
+ }
+
+- pub fn parse_lit_byte_str(s: &str) -> Vec<u8> {
++ // Returns (content, suffix).
++ pub fn parse_lit_byte_str(s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ match byte(s, 1) {
+ b'"' => parse_lit_byte_str_cooked(s),
+@@ -1028,25 +1119,25 @@ mod value {
+ // Clippy false positive
+ // https://github.com/rust-lang-nursery/rust-clippy/issues/2329
+ #[allow(clippy::needless_continue)]
+- fn parse_lit_byte_str_cooked(mut s: &str) -> Vec<u8> {
++ fn parse_lit_byte_str_cooked(mut s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ assert_eq!(byte(s, 1), b'"');
+ s = &s[2..];
+
+ // We're going to want to have slices which don't respect codepoint boundaries.
+- let mut s = s.as_bytes();
++ let mut v = s.as_bytes();
+
+ let mut out = Vec::new();
+ 'outer: loop {
+- let byte = match byte(s, 0) {
++ let byte = match byte(v, 0) {
+ b'"' => break,
+ b'\\' => {
+- let b = byte(s, 1);
+- s = &s[2..];
++ let b = byte(v, 1);
++ v = &v[2..];
+ match b {
+ b'x' => {
+- let (b, rest) = backslash_x(s);
+- s = rest;
++ let (b, rest) = backslash_x(v);
++ v = rest;
+ b
+ }
+ b'n' => b'\n',
+@@ -1057,10 +1148,10 @@ mod value {
+ b'\'' => b'\'',
+ b'"' => b'"',
+ b'\r' | b'\n' => loop {
+- let byte = byte(s, 0);
++ let byte = byte(v, 0);
+ let ch = char::from_u32(u32::from(byte)).unwrap();
+ if ch.is_whitespace() {
+- s = &s[1..];
++ v = &v[1..];
+ } else {
+ continue 'outer;
+ }
+@@ -1069,42 +1160,45 @@ mod value {
+ }
+ }
+ b'\r' => {
+- assert_eq!(byte(s, 1), b'\n', "Bare CR not allowed in string");
+- s = &s[2..];
++ assert_eq!(byte(v, 1), b'\n', "Bare CR not allowed in string");
++ v = &v[2..];
+ b'\n'
+ }
+ b => {
+- s = &s[1..];
++ v = &v[1..];
+ b
+ }
+ };
+ out.push(byte);
+ }
+
+- assert_eq!(s, b"\"");
+- out
++ assert_eq!(byte(v, 0), b'"');
++ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
++ (out, suffix)
+ }
+
+- fn parse_lit_byte_str_raw(s: &str) -> Vec<u8> {
++ fn parse_lit_byte_str_raw(s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+- String::from(parse_lit_str_raw(&s[1..]).0).into_bytes()
++ let (value, suffix) = parse_lit_str_raw(&s[1..]);
++ (String::from(value).into_bytes(), suffix)
+ }
+
+- pub fn parse_lit_byte(s: &str) -> u8 {
++ // Returns (value, suffix).
++ pub fn parse_lit_byte(s: &str) -> (u8, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ assert_eq!(byte(s, 1), b'\'');
+
+ // We're going to want to have slices which don't respect codepoint boundaries.
+- let mut s = s[2..].as_bytes();
++ let mut v = s[2..].as_bytes();
+
+- let b = match byte(s, 0) {
++ let b = match byte(v, 0) {
+ b'\\' => {
+- let b = byte(s, 1);
+- s = &s[2..];
++ let b = byte(v, 1);
++ v = &v[2..];
+ match b {
+ b'x' => {
+- let (b, rest) = backslash_x(s);
+- s = rest;
++ let (b, rest) = backslash_x(v);
++ v = rest;
+ b
+ }
+ b'n' => b'\n',
+@@ -1118,16 +1212,18 @@ mod value {
+ }
+ }
+ b => {
+- s = &s[1..];
++ v = &v[1..];
+ b
+ }
+ };
+
+- assert_eq!(byte(s, 0), b'\'');
+- b
++ assert_eq!(byte(v, 0), b'\'');
++ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
++ (b, suffix)
+ }
+
+- pub fn parse_lit_char(mut s: &str) -> char {
++ // Returns (value, suffix).
++ pub fn parse_lit_char(mut s: &str) -> (char, Box<str>) {
+ assert_eq!(byte(s, 0), b'\'');
+ s = &s[1..];
+
+@@ -1163,8 +1259,9 @@ mod value {
+ ch
+ }
+ };
+- assert_eq!(s, "\'", "Expected end of char literal");
+- ch
++ assert_eq!(byte(s, 0), b'\'');
++ let suffix = s[1..].to_owned().into_boxed_str();
++ (ch, suffix)
+ }
+
+ fn backslash_x<S>(s: &S) -> (u8, &S)
+@@ -1334,7 +1431,11 @@ mod value {
+ }
+ b'e' | b'E' => {
+ if has_e {
+- return None;
++ if has_exponent {
++ break;
++ } else {
++ return None;
++ }
+ }
+ has_e = true;
+ bytes[write] = b'e';
+@@ -1372,11 +1473,33 @@ mod value {
+ }
+ }
+
+- pub fn to_literal(s: &str) -> Literal {
+- let stream = s.parse::<TokenStream>().unwrap();
+- match stream.into_iter().next().unwrap() {
+- TokenTree::Literal(l) => l,
+- _ => unreachable!(),
++ pub fn to_literal(repr: &str, digits: &str, suffix: &str) -> Option<Literal> {
++ if repr.starts_with('-') {
++ if suffix == "f64" {
++ digits.parse().ok().map(Literal::f64_suffixed)
++ } else if suffix == "f32" {
++ digits.parse().ok().map(Literal::f32_suffixed)
++ } else if suffix == "i64" {
++ digits.parse().ok().map(Literal::i64_suffixed)
++ } else if suffix == "i32" {
++ digits.parse().ok().map(Literal::i32_suffixed)
++ } else if suffix == "i16" {
++ digits.parse().ok().map(Literal::i16_suffixed)
++ } else if suffix == "i8" {
++ digits.parse().ok().map(Literal::i8_suffixed)
++ } else if !suffix.is_empty() {
++ None
++ } else if digits.contains('.') {
++ digits.parse().ok().map(Literal::f64_unsuffixed)
++ } else {
++ digits.parse().ok().map(Literal::i64_unsuffixed)
++ }
++ } else {
++ let stream = repr.parse::<TokenStream>().unwrap();
++ match stream.into_iter().next().unwrap() {
++ TokenTree::Literal(l) => Some(l),
++ _ => unreachable!(),
++ }
+ }
+ }
+ }
+diff --git a/third_party/rust/syn/src/mac.rs b/third_party/rust/syn/src/mac.rs
+index 6c3dcae92a..de288a34e1 100644
+--- third_party/rust/syn/src/mac.rs
++++ third_party/rust/syn/src/mac.rs
+@@ -2,21 +2,17 @@ use super::*;
+ use crate::token::{Brace, Bracket, Paren};
+ use proc_macro2::TokenStream;
+ #[cfg(feature = "parsing")]
+-use proc_macro2::{Delimiter, Span, TokenTree};
++use proc_macro2::{Delimiter, Group, Span, TokenTree};
+
+ #[cfg(feature = "parsing")]
+ use crate::parse::{Parse, ParseStream, Parser, Result};
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// A macro invocation: `println!("{}", mac)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- pub struct Macro #manual_extra_traits {
++ pub struct Macro {
+ pub path: Path,
+ pub bang_token: Token![!],
+ pub delimiter: MacroDelimiter,
+@@ -27,7 +23,7 @@ ast_struct! {
+ ast_enum! {
+ /// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}` or `m![...]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum MacroDelimiter {
+ Paren(Paren),
+@@ -36,39 +32,20 @@ ast_enum! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Macro {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Macro {
+- fn eq(&self, other: &Self) -> bool {
+- self.path == other.path
+- && self.bang_token == other.bang_token
+- && self.delimiter == other.delimiter
+- && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Macro {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.path.hash(state);
+- self.bang_token.hash(state);
+- self.delimiter.hash(state);
+- TokenStreamHelper(&self.tokens).hash(state);
+- }
+-}
+-
+ #[cfg(feature = "parsing")]
+-fn delimiter_span(delimiter: &MacroDelimiter) -> Span {
+- match delimiter {
++fn delimiter_span_close(macro_delimiter: &MacroDelimiter) -> Span {
++ let delimiter = match macro_delimiter {
++ MacroDelimiter::Paren(_) => Delimiter::Parenthesis,
++ MacroDelimiter::Brace(_) => Delimiter::Brace,
++ MacroDelimiter::Bracket(_) => Delimiter::Bracket,
++ };
++ let mut group = Group::new(delimiter, TokenStream::new());
++ group.set_span(match macro_delimiter {
+ MacroDelimiter::Paren(token) => token.span,
+ MacroDelimiter::Brace(token) => token.span,
+ MacroDelimiter::Bracket(token) => token.span,
+- }
++ });
++ group.span_close()
+ }
+
+ impl Macro {
+@@ -163,9 +140,7 @@ impl Macro {
+ /// given parser.
+ #[cfg(feature = "parsing")]
+ pub fn parse_body_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+- // TODO: see if we can get a group.span_close() span in here as the
+- // scope, rather than the span of the whole group.
+- let scope = delimiter_span(&self.delimiter);
++ let scope = delimiter_span_close(&self.delimiter);
+ crate::parse::parse_scoped(parser, scope, self.tokens.clone())
+ }
+ }
+diff --git a/third_party/rust/syn/src/macros.rs b/third_party/rust/syn/src/macros.rs
+index 9cac5c15df..8060224381 100644
+--- third_party/rust/syn/src/macros.rs
++++ third_party/rust/syn/src/macros.rs
+@@ -4,15 +4,11 @@ macro_rules! ast_struct {
+ struct $name:ident #full $($rest:tt)*
+ ) => {
+ #[cfg(feature = "full")]
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name $($rest)*
+
+ #[cfg(not(feature = "full"))]
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name {
+- _noconstruct: (),
++ _noconstruct: ::std::marker::PhantomData<::proc_macro2::Span>,
+ }
+
+ #[cfg(all(not(feature = "full"), feature = "printing"))]
+@@ -23,29 +19,10 @@ macro_rules! ast_struct {
+ }
+ };
+
+- (
+- [$($attrs_pub:tt)*]
+- struct $name:ident #manual_extra_traits $($rest:tt)*
+- ) => {
+- #[cfg_attr(feature = "extra-traits", derive(Debug))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* struct $name $($rest)*
+- };
+-
+- (
+- [$($attrs_pub:tt)*]
+- struct $name:ident #manual_extra_traits_debug $($rest:tt)*
+- ) => {
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* struct $name $($rest)*
+- };
+-
+ (
+ [$($attrs_pub:tt)*]
+ struct $name:ident $($rest:tt)*
+ ) => {
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name $($rest)*
+ };
+
+@@ -63,21 +40,10 @@ macro_rules! ast_enum {
+ ast_enum!([$($attrs_pub)*] enum $name $($rest)*);
+ );
+
+- (
+- [$($attrs_pub:tt)*]
+- enum $name:ident #manual_extra_traits $($rest:tt)*
+- ) => (
+- #[cfg_attr(feature = "extra-traits", derive(Debug))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* enum $name $($rest)*
+- );
+-
+ (
+ [$($attrs_pub:tt)*]
+ enum $name:ident $($rest:tt)*
+ ) => (
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* enum $name $($rest)*
+ );
+
+@@ -120,15 +86,9 @@ macro_rules! ast_enum_of_structs_impl {
+ check_keyword_matches!(pub $pub);
+ check_keyword_matches!(enum $enum);
+
+- $(
+- $(
+- impl From<$member> for $name {
+- fn from(e: $member) -> $name {
+- $name::$variant(e)
+- }
+- }
+- )*
+- )*
++ $($(
++ ast_enum_from_struct!($name::$variant, $member);
++ )*)*
+
+ #[cfg(feature = "printing")]
+ generate_to_tokens! {
+@@ -140,6 +100,19 @@ macro_rules! ast_enum_of_structs_impl {
+ };
+ }
+
++macro_rules! ast_enum_from_struct {
++ // No From<TokenStream> for verbatim variants.
++ ($name:ident::Verbatim, $member:ident) => {};
++
++ ($name:ident::$variant:ident, $member:ident) => {
++ impl From<$member> for $name {
++ fn from(e: $member) -> $name {
++ $name::$variant(e)
++ }
++ }
++ };
++}
++
+ #[cfg(feature = "printing")]
+ macro_rules! generate_to_tokens {
+ (do_not_generate_to_tokens $($foo:tt)*) => ();
+diff --git a/third_party/rust/syn/src/op.rs b/third_party/rust/syn/src/op.rs
+index 49fb853c79..d254673b40 100644
+--- third_party/rust/syn/src/op.rs
++++ third_party/rust/syn/src/op.rs
+@@ -1,9 +1,8 @@
+ ast_enum! {
+ /// A binary operator: `+`, `+=`, `&`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum BinOp {
+ /// The `+` operator (addition)
+ Add(Token![+]),
+@@ -67,9 +66,8 @@ ast_enum! {
+ ast_enum! {
+ /// A unary operator: `*`, `!`, `-`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum UnOp {
+ /// The `*` operator for dereferencing
+ Deref(Token![*]),
+diff --git a/third_party/rust/syn/src/parse.rs b/third_party/rust/syn/src/parse.rs
+index 7c7b194308..abb4c4c14f 100644
+--- third_party/rust/syn/src/parse.rs
++++ third_party/rust/syn/src/parse.rs
+@@ -26,8 +26,8 @@
+ //! [`parse_macro_input!`]: ../macro.parse_macro_input.html
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use syn::{braced, parse_macro_input, token, Field, Ident, Result, Token};
+ //! use syn::parse::{Parse, ParseStream};
+@@ -109,9 +109,7 @@
+ //! # Ok(())
+ //! # }
+ //! #
+-//! # fn main() {
+-//! # run_parser().unwrap();
+-//! # }
++//! # run_parser().unwrap();
+ //! ```
+ //!
+ //! The [`parse_quote!`] macro also uses this approach.
+@@ -155,8 +153,8 @@
+ //! [`Parser`]: trait.Parser.html
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use syn::parse::Parser;
+ //! use syn::punctuated::Punctuated;
+@@ -186,7 +184,7 @@
+ //!
+ //! ---
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ #[path = "discouraged.rs"]
+ pub mod discouraged;
+@@ -217,6 +215,11 @@ pub use crate::lookahead::{Lookahead1, Peek};
+
+ /// Parsing interface implemented by all types that can be parsed in a default
+ /// way from a token stream.
++///
++/// Refer to the [module documentation] for details about implementing and using
++/// the `Parse` trait.
++///
++/// [module documentation]: self
+ pub trait Parse: Sized {
+ fn parse(input: ParseStream) -> Result<Self>;
+ }
+@@ -263,13 +266,16 @@ pub struct ParseBuffer<'a> {
+ // the cell.
+ cell: Cell<Cursor<'static>>,
+ marker: PhantomData<Cursor<'a>>,
+- unexpected: Rc<Cell<Option<Span>>>,
++ unexpected: Cell<Option<Rc<Cell<Unexpected>>>>,
+ }
+
+ impl<'a> Drop for ParseBuffer<'a> {
+ fn drop(&mut self) {
+- if !self.is_empty() && self.unexpected.get().is_none() {
+- self.unexpected.set(Some(self.cursor().span()));
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(self.cursor()) {
++ let (inner, old_span) = inner_unexpected(self);
++ if old_span.is_none() {
++ inner.set(Unexpected::Some(unexpected_span));
++ }
+ }
+ }
+ }
+@@ -324,15 +330,12 @@ impl<'a> Debug for ParseBuffer<'a> {
+ /// # input.parse()
+ /// # }
+ /// #
+-/// # fn main() {
+-/// # use syn::parse::Parser;
+-/// # let remainder = remainder_after_skipping_past_next_at
+-/// # .parse_str("a @ b c")
+-/// # .unwrap();
+-/// # assert_eq!(remainder.to_string(), "b c");
+-/// # }
++/// # use syn::parse::Parser;
++/// # let remainder = remainder_after_skipping_past_next_at
++/// # .parse_str("a @ b c")
++/// # .unwrap();
++/// # assert_eq!(remainder.to_string(), "b c");
+ /// ```
+-#[derive(Copy, Clone)]
+ pub struct StepCursor<'c, 'a> {
+ scope: Span,
+ // This field is covariant in 'c.
+@@ -356,6 +359,14 @@ impl<'c, 'a> Deref for StepCursor<'c, 'a> {
+ }
+ }
+
++impl<'c, 'a> Copy for StepCursor<'c, 'a> {}
++
++impl<'c, 'a> Clone for StepCursor<'c, 'a> {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++
+ impl<'c, 'a> StepCursor<'c, 'a> {
+ /// Triggers an error at the current position of the parse stream.
+ ///
+@@ -375,36 +386,81 @@ pub(crate) fn advance_step_cursor<'c, 'a>(proof: StepCursor<'c, 'a>, to: Cursor<
+ unsafe { mem::transmute::<Cursor<'c>, Cursor<'a>>(to) }
+ }
+
+-fn skip(input: ParseStream) -> bool {
+- input
+- .step(|cursor| {
+- if let Some((_lifetime, rest)) = cursor.lifetime() {
+- Ok((true, rest))
+- } else if let Some((_token, rest)) = cursor.token_tree() {
+- Ok((true, rest))
+- } else {
+- Ok((false, *cursor))
+- }
+- })
+- .unwrap()
+-}
+-
+ pub(crate) fn new_parse_buffer(
+ scope: Span,
+ cursor: Cursor,
+- unexpected: Rc<Cell<Option<Span>>>,
++ unexpected: Rc<Cell<Unexpected>>,
+ ) -> ParseBuffer {
+ ParseBuffer {
+ scope,
+ // See comment on `cell` in the struct definition.
+ cell: Cell::new(unsafe { mem::transmute::<Cursor, Cursor<'static>>(cursor) }),
+ marker: PhantomData,
+- unexpected,
++ unexpected: Cell::new(Some(unexpected)),
++ }
++}
++
++pub(crate) enum Unexpected {
++ None,
++ Some(Span),
++ Chain(Rc<Cell<Unexpected>>),
++}
++
++impl Default for Unexpected {
++ fn default() -> Self {
++ Unexpected::None
+ }
+ }
+
+-pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Option<Span>>> {
+- buffer.unexpected.clone()
++impl Clone for Unexpected {
++ fn clone(&self) -> Self {
++ match self {
++ Unexpected::None => Unexpected::None,
++ Unexpected::Some(span) => Unexpected::Some(*span),
++ Unexpected::Chain(next) => Unexpected::Chain(next.clone()),
++ }
++ }
++}
++
++// We call this on Cell<Unexpected> and Cell<Option<T>> where temporarily
++// swapping in a None is cheap.
++fn cell_clone<T: Default + Clone>(cell: &Cell<T>) -> T {
++ let prev = cell.take();
++ let ret = prev.clone();
++ cell.set(prev);
++ ret
++}
++
++fn inner_unexpected(buffer: &ParseBuffer) -> (Rc<Cell<Unexpected>>, Option<Span>) {
++ let mut unexpected = get_unexpected(buffer);
++ loop {
++ match cell_clone(&unexpected) {
++ Unexpected::None => return (unexpected, None),
++ Unexpected::Some(span) => return (unexpected, Some(span)),
++ Unexpected::Chain(next) => unexpected = next,
++ }
++ }
++}
++
++pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Unexpected>> {
++ cell_clone(&buffer.unexpected).unwrap()
++}
++
++fn span_of_unexpected_ignoring_nones(mut cursor: Cursor) -> Option<Span> {
++ if cursor.eof() {
++ return None;
++ }
++ while let Some((inner, _span, rest)) = cursor.group(Delimiter::None) {
++ if let Some(unexpected) = span_of_unexpected_ignoring_nones(inner) {
++ return Some(unexpected);
++ }
++ cursor = rest;
++ }
++ if cursor.eof() {
++ None
++ } else {
++ Some(cursor.span())
++ }
+ }
+
+ impl<'a> ParseBuffer<'a> {
+@@ -566,14 +622,17 @@ impl<'a> ParseBuffer<'a> {
+ /// }
+ /// ```
+ pub fn peek2<T: Peek>(&self, token: T) -> bool {
+- let ahead = self.fork();
+- skip(&ahead) && ahead.peek(token)
++ let _ = token;
++ self.cursor().skip().map_or(false, T::Token::peek)
+ }
+
+ /// Looks at the third-next token in the parse stream.
+ pub fn peek3<T: Peek>(&self, token: T) -> bool {
+- let ahead = self.fork();
+- skip(&ahead) && skip(&ahead) && ahead.peek(token)
++ let _ = token;
++ self.cursor()
++ .skip()
++ .and_then(Cursor::skip)
++ .map_or(false, T::Token::peek)
+ }
+
+ /// Parses zero or more occurrences of `T` separated by punctuation of type
+@@ -615,12 +674,10 @@ impl<'a> ParseBuffer<'a> {
+ /// }
+ /// }
+ /// #
+- /// # fn main() {
+- /// # let input = quote! {
+- /// # struct S(A, B);
+- /// # };
+- /// # syn::parse2::<TupleStruct>(input).unwrap();
+- /// # }
++ /// # let input = quote! {
++ /// # struct S(A, B);
++ /// # };
++ /// # syn::parse2::<TupleStruct>(input).unwrap();
+ /// ```
+ pub fn parse_terminated<T, P: Parse>(
+ &self,
+@@ -847,8 +904,8 @@ impl<'a> ParseBuffer<'a> {
+ cell: self.cell.clone(),
+ marker: PhantomData,
+ // Not the parent's unexpected. Nothing cares whether the clone
+- // parses all the way.
+- unexpected: Rc::new(Cell::new(None)),
++ // parses all the way unless we `advance_to`.
++ unexpected: Cell::new(Some(Rc::new(Cell::new(Unexpected::None)))),
+ }
+ }
+
+@@ -923,13 +980,11 @@ impl<'a> ParseBuffer<'a> {
+ /// # input.parse()
+ /// # }
+ /// #
+- /// # fn main() {
+- /// # use syn::parse::Parser;
+- /// # let remainder = remainder_after_skipping_past_next_at
+- /// # .parse_str("a @ b c")
+- /// # .unwrap();
+- /// # assert_eq!(remainder.to_string(), "b c");
+- /// # }
++ /// # use syn::parse::Parser;
++ /// # let remainder = remainder_after_skipping_past_next_at
++ /// # .parse_str("a @ b c")
++ /// # .unwrap();
++ /// # assert_eq!(remainder.to_string(), "b c");
+ /// ```
+ pub fn step<F, R>(&self, function: F) -> Result<R>
+ where
+@@ -961,6 +1016,18 @@ impl<'a> ParseBuffer<'a> {
+ Ok(node)
+ }
+
++ /// Returns the `Span` of the next token in the parse stream, or
++ /// `Span::call_site()` if this parse stream has completely exhausted its
++ /// input `TokenStream`.
++ pub fn span(&self) -> Span {
++ let cursor = self.cursor();
++ if cursor.eof() {
++ self.scope
++ } else {
++ crate::buffer::open_span_of_group(cursor)
++ }
++ }
++
+ /// Provides low-level access to the token representation underlying this
+ /// parse stream.
+ ///
+@@ -971,7 +1038,7 @@ impl<'a> ParseBuffer<'a> {
+ }
+
+ fn check_unexpected(&self) -> Result<()> {
+- match self.unexpected.get() {
++ match inner_unexpected(self).1 {
+ Some(span) => Err(Error::new(span, "unexpected token")),
+ None => Ok(()),
+ }
+@@ -1048,7 +1115,7 @@ impl Parse for Literal {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"parsing"` feature.*
++/// *This trait is available only if Syn is built with the `"parsing"` feature.*
+ pub trait Parser: Sized {
+ type Output;
+
+@@ -1063,7 +1130,7 @@ pub trait Parser: Sized {
+ /// This function will check that the input is fully parsed. If there are
+ /// any unparsed tokens at the end of the stream, an error is returned.
+ ///
+- /// *This method is available if Syn is built with both the `"parsing"` and
++ /// *This method is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ #[cfg(all(
+ not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
+@@ -1088,6 +1155,7 @@ pub trait Parser: Sized {
+
+ // Not public API.
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
+ let _ = scope;
+ self.parse2(tokens)
+@@ -1095,6 +1163,7 @@ pub trait Parser: Sized {
+
+ // Not public API.
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
+ input.parse().and_then(|tokens| self.parse2(tokens))
+ }
+@@ -1103,7 +1172,7 @@ pub trait Parser: Sized {
+ fn tokens_to_parse_buffer(tokens: &TokenBuffer) -> ParseBuffer {
+ let scope = Span::call_site();
+ let cursor = tokens.begin();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ new_parse_buffer(scope, cursor, unexpected)
+ }
+
+@@ -1118,38 +1187,42 @@ where
+ let state = tokens_to_parse_buffer(&buf);
+ let node = self(&state)?;
+ state.check_unexpected()?;
+- if state.is_empty() {
+- Ok(node)
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) {
++ Err(Error::new(unexpected_span, "unexpected token"))
+ } else {
+- Err(state.error("unexpected token"))
++ Ok(node)
+ }
+ }
+
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
+ let buf = TokenBuffer::new2(tokens);
+ let cursor = buf.begin();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ let state = new_parse_buffer(scope, cursor, unexpected);
+ let node = self(&state)?;
+ state.check_unexpected()?;
+- if state.is_empty() {
+- Ok(node)
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) {
++ Err(Error::new(unexpected_span, "unexpected token"))
+ } else {
+- Err(state.error("unexpected token"))
++ Ok(node)
+ }
+ }
+
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
+ self(input)
+ }
+ }
+
++#[cfg(any(feature = "full", feature = "derive"))]
+ pub(crate) fn parse_scoped<F: Parser>(f: F, scope: Span, tokens: TokenStream) -> Result<F::Output> {
+ f.__parse_scoped(scope, tokens)
+ }
+
++#[cfg(any(feature = "full", feature = "derive"))]
+ pub(crate) fn parse_stream<F: Parser>(f: F, input: ParseStream) -> Result<F::Output> {
+ f.__parse_stream(input)
+ }
+@@ -1160,8 +1233,8 @@ pub(crate) fn parse_stream<F: Parser>(f: F, input: ParseStream) -> Result<F::Out
+ /// provided any attribute args.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::parse_macro_input;
+ /// use syn::parse::Nothing;
+diff --git a/third_party/rust/syn/src/parse_macro_input.rs b/third_party/rust/syn/src/parse_macro_input.rs
+index d6e0725c17..c8fc1cea37 100644
+--- third_party/rust/syn/src/parse_macro_input.rs
++++ third_party/rust/syn/src/parse_macro_input.rs
+@@ -16,8 +16,8 @@
+ /// #\[proc_macro_attribute\] attribute.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, Result};
+ /// use syn::parse::{Parse, ParseStream};
+@@ -43,7 +43,31 @@
+ /// # "".parse().unwrap()
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++///
++/// <br>
++///
++/// # Expansion
++///
++/// `parse_macro_input!($variable as $Type)` expands to something like:
++///
++/// ```no_run
++/// # extern crate proc_macro;
++/// #
++/// # macro_rules! doc_test {
++/// # ($variable:ident as $Type:ty) => {
++/// match syn::parse::<$Type>($variable) {
++/// Ok(syntax_tree) => syntax_tree,
++/// Err(err) => return proc_macro::TokenStream::from(err.to_compile_error()),
++/// }
++/// # };
++/// # }
++/// #
++/// # fn test(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
++/// # let _ = doc_test!(input as syn::Ident);
++/// # proc_macro::TokenStream::new()
++/// # }
++/// ```
++#[macro_export]
+ macro_rules! parse_macro_input {
+ ($tokenstream:ident as $ty:ty) => {
+ match $crate::parse_macro_input::parse::<$ty>($tokenstream) {
+@@ -54,7 +78,7 @@ macro_rules! parse_macro_input {
+ }
+ };
+ ($tokenstream:ident) => {
+- parse_macro_input!($tokenstream as _)
++ $crate::parse_macro_input!($tokenstream as _)
+ };
+ }
+
+diff --git a/third_party/rust/syn/src/parse_quote.rs b/third_party/rust/syn/src/parse_quote.rs
+index 18a47b95c7..66aa818cd0 100644
+--- third_party/rust/syn/src/parse_quote.rs
++++ third_party/rust/syn/src/parse_quote.rs
+@@ -24,7 +24,7 @@
+ /// }
+ /// ```
+ ///
+-/// *This macro is available if Syn is built with the `"parsing"` feature,
++/// *This macro is available only if Syn is built with the `"parsing"` feature,
+ /// although interpolation of syntax tree nodes into the quoted tokens is only
+ /// supported if Syn is built with the `"printing"` feature as well.*
+ ///
+@@ -56,8 +56,10 @@
+ /// or inner like `#![...]`
+ /// - [`Punctuated<T, P>`] — parses zero or more `T` separated by punctuation
+ /// `P` with optional trailing punctuation
++/// - [`Vec<Stmt>`] — parses the same as `Block::parse_within`
+ ///
+ /// [`Punctuated<T, P>`]: punctuated::Punctuated
++/// [`Vec<Stmt>`]: Block::parse_within
+ ///
+ /// # Panics
+ ///
+@@ -67,7 +69,7 @@
+ //
+ // TODO: allow Punctuated to be inferred as intra doc link, currently blocked on
+ // https://github.com/rust-lang/rust/issues/62834
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! parse_quote {
+ ($($tt:tt)*) => {
+ $crate::parse_quote::parse(
+@@ -112,6 +114,8 @@ impl<T: Parse> ParseQuote for T {
+ use crate::punctuated::Punctuated;
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::{attr, Attribute};
++#[cfg(feature = "full")]
++use crate::{Block, Stmt};
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+ impl ParseQuote for Attribute {
+@@ -129,3 +133,10 @@ impl<T: Parse, P: Parse> ParseQuote for Punctuated<T, P> {
+ Self::parse_terminated(input)
+ }
+ }
++
++#[cfg(feature = "full")]
++impl ParseQuote for Vec<Stmt> {
++ fn parse(input: ParseStream) -> Result<Self> {
++ Block::parse_within(input)
++ }
++}
+diff --git a/third_party/rust/syn/src/pat.rs b/third_party/rust/syn/src/pat.rs
+index 9371e05493..e9576a2361 100644
+--- third_party/rust/syn/src/pat.rs
++++ third_party/rust/syn/src/pat.rs
+@@ -1,16 +1,12 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+ use proc_macro2::TokenStream;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_enum_of_structs! {
+ /// A pattern in a local binding, function signature, match expression, or
+ /// various other places.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -20,7 +16,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Pat #manual_extra_traits {
++ pub enum Pat {
+ /// A box pattern: `box v`.
+ Box(PatBox),
+
+@@ -86,7 +82,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A box pattern: `box v`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatBox {
+ pub attrs: Vec<Attribute>,
+ pub box_token: Token![box],
+@@ -97,7 +93,10 @@ ast_struct! {
+ ast_struct! {
+ /// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// It may also be a unit struct or struct variant (e.g. `None`), or a
++ /// constant; these cannot be distinguished syntactically.
++ ///
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatIdent {
+ pub attrs: Vec<Attribute>,
+ pub by_ref: Option<Token![ref]>,
+@@ -113,7 +112,7 @@ ast_struct! {
+ /// This holds an `Expr` rather than a `Lit` because negative numbers
+ /// are represented as an `Expr::Unary`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatLit {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+@@ -123,7 +122,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro in pattern position.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -133,7 +132,7 @@ ast_struct! {
+ ast_struct! {
+ /// A pattern that matches any one of a set of cases.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatOr {
+ pub attrs: Vec<Attribute>,
+ pub leading_vert: Option<Token![|]>,
+@@ -150,7 +149,7 @@ ast_struct! {
+ /// `<A>::B::C` and `<A as Trait>::B::C` can only legally refer to
+ /// associated constants.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatPath {
+ pub attrs: Vec<Attribute>,
+ pub qself: Option<QSelf>,
+@@ -161,7 +160,7 @@ ast_struct! {
+ ast_struct! {
+ /// A range pattern: `1..=2`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatRange {
+ pub attrs: Vec<Attribute>,
+ pub lo: Box<Expr>,
+@@ -173,7 +172,7 @@ ast_struct! {
+ ast_struct! {
+ /// A reference pattern: `&mut var`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatReference {
+ pub attrs: Vec<Attribute>,
+ pub and_token: Token![&],
+@@ -185,7 +184,7 @@ ast_struct! {
+ ast_struct! {
+ /// The dots in a tuple or slice pattern: `[0, 1, ..]`
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatRest {
+ pub attrs: Vec<Attribute>,
+ pub dot2_token: Token![..],
+@@ -195,7 +194,7 @@ ast_struct! {
+ ast_struct! {
+ /// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatSlice {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+@@ -206,7 +205,7 @@ ast_struct! {
+ ast_struct! {
+ /// A struct or struct variant pattern: `Variant { x, y, .. }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatStruct {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+@@ -219,7 +218,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple pattern: `(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatTuple {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+@@ -230,7 +229,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatTupleStruct {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+@@ -241,7 +240,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type ascription pattern: `foo: f64`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatType {
+ pub attrs: Vec<Attribute>,
+ pub pat: Box<Pat>,
+@@ -253,7 +252,7 @@ ast_struct! {
+ ast_struct! {
+ /// A pattern that matches any value: `_`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatWild {
+ pub attrs: Vec<Attribute>,
+ pub underscore_token: Token![_],
+@@ -266,7 +265,7 @@ ast_struct! {
+ /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` are treated
+ /// the same as `x: x, y: ref y, z: ref mut z` but there is no colon token.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct FieldPat {
+ pub attrs: Vec<Attribute>,
+ pub member: Member,
+@@ -275,122 +274,17 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Pat {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Pat {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Pat::Box(this), Pat::Box(other)) => this == other,
+- (Pat::Ident(this), Pat::Ident(other)) => this == other,
+- (Pat::Lit(this), Pat::Lit(other)) => this == other,
+- (Pat::Macro(this), Pat::Macro(other)) => this == other,
+- (Pat::Or(this), Pat::Or(other)) => this == other,
+- (Pat::Path(this), Pat::Path(other)) => this == other,
+- (Pat::Range(this), Pat::Range(other)) => this == other,
+- (Pat::Reference(this), Pat::Reference(other)) => this == other,
+- (Pat::Rest(this), Pat::Rest(other)) => this == other,
+- (Pat::Slice(this), Pat::Slice(other)) => this == other,
+- (Pat::Struct(this), Pat::Struct(other)) => this == other,
+- (Pat::Tuple(this), Pat::Tuple(other)) => this == other,
+- (Pat::TupleStruct(this), Pat::TupleStruct(other)) => this == other,
+- (Pat::Type(this), Pat::Type(other)) => this == other,
+- (Pat::Verbatim(this), Pat::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- (Pat::Wild(this), Pat::Wild(other)) => this == other,
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Pat {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Pat::Box(pat) => {
+- hash.write_u8(0);
+- pat.hash(hash);
+- }
+- Pat::Ident(pat) => {
+- hash.write_u8(1);
+- pat.hash(hash);
+- }
+- Pat::Lit(pat) => {
+- hash.write_u8(2);
+- pat.hash(hash);
+- }
+- Pat::Macro(pat) => {
+- hash.write_u8(3);
+- pat.hash(hash);
+- }
+- Pat::Or(pat) => {
+- hash.write_u8(4);
+- pat.hash(hash);
+- }
+- Pat::Path(pat) => {
+- hash.write_u8(5);
+- pat.hash(hash);
+- }
+- Pat::Range(pat) => {
+- hash.write_u8(6);
+- pat.hash(hash);
+- }
+- Pat::Reference(pat) => {
+- hash.write_u8(7);
+- pat.hash(hash);
+- }
+- Pat::Rest(pat) => {
+- hash.write_u8(8);
+- pat.hash(hash);
+- }
+- Pat::Slice(pat) => {
+- hash.write_u8(9);
+- pat.hash(hash);
+- }
+- Pat::Struct(pat) => {
+- hash.write_u8(10);
+- pat.hash(hash);
+- }
+- Pat::Tuple(pat) => {
+- hash.write_u8(11);
+- pat.hash(hash);
+- }
+- Pat::TupleStruct(pat) => {
+- hash.write_u8(12);
+- pat.hash(hash);
+- }
+- Pat::Type(pat) => {
+- hash.write_u8(13);
+- pat.hash(hash);
+- }
+- Pat::Verbatim(pat) => {
+- hash.write_u8(14);
+- TokenStreamHelper(pat).hash(hash);
+- }
+- Pat::Wild(pat) => {
+- hash.write_u8(15);
+- pat.hash(hash);
+- }
+- Pat::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ #[cfg(feature = "parsing")]
+-mod parsing {
++pub mod parsing {
+ use super::*;
+
+ use crate::ext::IdentExt;
+- use crate::parse::{Parse, ParseStream, Result};
++ use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
+ use crate::path;
+
+ impl Parse for Pat {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let lookahead = input.lookahead1();
+ if lookahead.peek(Ident)
+ && ({
+@@ -411,7 +305,6 @@ mod parsing {
+ || lookahead.peek(Token![<])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ pat_path_or_macro_or_struct_or_range(input)
+@@ -434,7 +327,7 @@ mod parsing {
+ } else if lookahead.peek(token::Bracket) {
+ input.call(pat_slice).map(Pat::Slice)
+ } else if lookahead.peek(Token![..]) && !input.peek(Token![...]) {
+- input.call(pat_rest).map(Pat::Rest)
++ pat_range_half_open(input, begin)
+ } else {
+ Err(lookahead.error())
+ }
+@@ -442,10 +335,11 @@ mod parsing {
+ }
+
+ fn pat_path_or_macro_or_struct_or_range(input: ParseStream) -> Result<Pat> {
++ let begin = input.fork();
+ let (qself, path) = path::parsing::qpath(input, true)?;
+
+ if input.peek(Token![..]) {
+- return pat_range(input, qself, path).map(Pat::Range);
++ return pat_range(input, begin, qself, path);
+ }
+
+ if qself.is_some() {
+@@ -487,7 +381,7 @@ mod parsing {
+ } else if input.peek(token::Paren) {
+ pat_tuple_struct(input, path).map(Pat::TupleStruct)
+ } else if input.peek(Token![..]) {
+- pat_range(input, qself, path).map(Pat::Range)
++ pat_range(input, begin, qself, path)
+ } else {
+ Ok(Pat::Path(PatPath {
+ attrs: Vec::new(),
+@@ -546,7 +440,7 @@ mod parsing {
+ while !content.is_empty() && !content.peek(Token![..]) {
+ let value = content.call(field_pat)?;
+ fields.push_value(value);
+- if !content.peek(Token![,]) {
++ if content.is_empty() {
+ break;
+ }
+ let punct: Token![,] = content.parse()?;
+@@ -578,6 +472,7 @@ mod parsing {
+ }
+
+ fn field_pat(input: ParseStream) -> Result<FieldPat> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ let boxed: Option<Token![box]> = input.parse()?;
+ let by_ref: Option<Token![ref]> = input.parse()?;
+ let mutability: Option<Token![mut]> = input.parse()?;
+@@ -587,10 +482,10 @@ mod parsing {
+ || member.is_unnamed()
+ {
+ return Ok(FieldPat {
+- attrs: Vec::new(),
++ attrs,
+ member,
+ colon_token: input.parse()?,
+- pat: input.parse()?,
++ pat: Box::new(multi_pat(input)?),
+ });
+ }
+
+@@ -610,30 +505,57 @@ mod parsing {
+ if let Some(boxed) = boxed {
+ pat = Pat::Box(PatBox {
+ attrs: Vec::new(),
+- pat: Box::new(pat),
+ box_token: boxed,
++ pat: Box::new(pat),
+ });
+ }
+
+ Ok(FieldPat {
++ attrs,
+ member: Member::Named(ident),
+- pat: Box::new(pat),
+- attrs: Vec::new(),
+ colon_token: None,
++ pat: Box::new(pat),
+ })
+ }
+
+- fn pat_range(input: ParseStream, qself: Option<QSelf>, path: Path) -> Result<PatRange> {
+- Ok(PatRange {
+- attrs: Vec::new(),
+- lo: Box::new(Expr::Path(ExprPath {
++ fn pat_range(
++ input: ParseStream,
++ begin: ParseBuffer,
++ qself: Option<QSelf>,
++ path: Path,
++ ) -> Result<Pat> {
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if let Some(hi) = hi {
++ Ok(Pat::Range(PatRange {
+ attrs: Vec::new(),
+- qself,
+- path,
+- })),
+- limits: input.parse()?,
+- hi: input.call(pat_lit_expr)?,
+- })
++ lo: Box::new(Expr::Path(ExprPath {
++ attrs: Vec::new(),
++ qself,
++ path,
++ })),
++ limits,
++ hi,
++ }))
++ } else {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ }
++ }
++
++ fn pat_range_half_open(input: ParseStream, begin: ParseBuffer) -> Result<Pat> {
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if hi.is_some() {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ } else {
++ match limits {
++ RangeLimits::HalfOpen(dot2_token) => Ok(Pat::Rest(PatRest {
++ attrs: Vec::new(),
++ dot2_token,
++ })),
++ RangeLimits::Closed(_) => Err(input.error("expected range upper bound")),
++ }
++ }
+ }
+
+ fn pat_tuple(input: ParseStream) -> Result<PatTuple> {
+@@ -642,7 +564,7 @@ mod parsing {
+
+ let mut elems = Punctuated::new();
+ while !content.is_empty() {
+- let value: Pat = content.parse()?;
++ let value = multi_pat(&content)?;
+ elems.push_value(value);
+ if content.is_empty() {
+ break;
+@@ -668,14 +590,21 @@ mod parsing {
+ }
+
+ fn pat_lit_or_range(input: ParseStream) -> Result<Pat> {
+- let lo = input.call(pat_lit_expr)?;
++ let begin = input.fork();
++ let lo = input.call(pat_lit_expr)?.unwrap();
+ if input.peek(Token![..]) {
+- Ok(Pat::Range(PatRange {
+- attrs: Vec::new(),
+- lo,
+- limits: input.parse()?,
+- hi: input.call(pat_lit_expr)?,
+- }))
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if let Some(hi) = hi {
++ Ok(Pat::Range(PatRange {
++ attrs: Vec::new(),
++ lo,
++ limits,
++ hi,
++ }))
++ } else {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ }
+ } else {
+ Ok(Pat::Lit(PatLit {
+ attrs: Vec::new(),
+@@ -684,7 +613,17 @@ mod parsing {
+ }
+ }
+
+- fn pat_lit_expr(input: ParseStream) -> Result<Box<Expr>> {
++ fn pat_lit_expr(input: ParseStream) -> Result<Option<Box<Expr>>> {
++ if input.is_empty()
++ || input.peek(Token![|])
++ || input.peek(Token![=>])
++ || input.peek(Token![:]) && !input.peek(Token![::])
++ || input.peek(Token![,])
++ || input.peek(Token![;])
++ {
++ return Ok(None);
++ }
++
+ let neg: Option<Token![-]> = input.parse()?;
+
+ let lookahead = input.lookahead1();
+@@ -696,7 +635,6 @@ mod parsing {
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![Self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ {
+ Expr::Path(input.parse()?)
+@@ -704,7 +642,7 @@ mod parsing {
+ return Err(lookahead.error());
+ };
+
+- Ok(Box::new(if let Some(neg) = neg {
++ Ok(Some(Box::new(if let Some(neg) = neg {
+ Expr::Unary(ExprUnary {
+ attrs: Vec::new(),
+ op: UnOp::Neg(neg),
+@@ -712,7 +650,7 @@ mod parsing {
+ })
+ } else {
+ expr
+- }))
++ })))
+ }
+
+ fn pat_slice(input: ParseStream) -> Result<PatSlice> {
+@@ -721,7 +659,7 @@ mod parsing {
+
+ let mut elems = Punctuated::new();
+ while !content.is_empty() {
+- let value: Pat = content.parse()?;
++ let value = multi_pat(&content)?;
+ elems.push_value(value);
+ if content.is_empty() {
+ break;
+@@ -737,11 +675,35 @@ mod parsing {
+ })
+ }
+
+- fn pat_rest(input: ParseStream) -> Result<PatRest> {
+- Ok(PatRest {
+- attrs: Vec::new(),
+- dot2_token: input.parse()?,
+- })
++ pub fn multi_pat(input: ParseStream) -> Result<Pat> {
++ multi_pat_impl(input, None)
++ }
++
++ pub fn multi_pat_with_leading_vert(input: ParseStream) -> Result<Pat> {
++ let leading_vert: Option<Token![|]> = input.parse()?;
++ multi_pat_impl(input, leading_vert)
++ }
++
++ fn multi_pat_impl(input: ParseStream, leading_vert: Option<Token![|]>) -> Result<Pat> {
++ let mut pat: Pat = input.parse()?;
++ if leading_vert.is_some()
++ || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
++ {
++ let mut cases = Punctuated::new();
++ cases.push_value(pat);
++ while input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=]) {
++ let punct = input.parse()?;
++ cases.push_punct(punct);
++ let pat: Pat = input.parse()?;
++ cases.push_value(pat);
++ }
++ pat = Pat::Or(PatOr {
++ attrs: Vec::new(),
++ leading_vert,
++ cases,
++ });
++ }
++ Ok(pat)
+ }
+ }
+
+@@ -756,12 +718,14 @@ mod printing {
+
+ impl ToTokens for PatWild {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.underscore_token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatIdent {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.by_ref.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+@@ -774,6 +738,7 @@ mod printing {
+
+ impl ToTokens for PatStruct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.path.to_tokens(tokens);
+ self.brace_token.surround(tokens, |tokens| {
+ self.fields.to_tokens(tokens);
+@@ -788,6 +753,7 @@ mod printing {
+
+ impl ToTokens for PatTupleStruct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.path.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+ }
+@@ -804,12 +770,14 @@ mod printing {
+
+ impl ToTokens for PatPath {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ private::print_path(tokens, &self.qself, &self.path);
+ }
+ }
+
+ impl ToTokens for PatTuple {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.paren_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ });
+@@ -818,6 +786,7 @@ mod printing {
+
+ impl ToTokens for PatBox {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.box_token.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+ }
+@@ -825,6 +794,7 @@ mod printing {
+
+ impl ToTokens for PatReference {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.and_token.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+@@ -833,18 +803,21 @@ mod printing {
+
+ impl ToTokens for PatRest {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.dot2_token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatLit {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.expr.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatRange {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.lo.to_tokens(tokens);
+ match &self.limits {
+ RangeLimits::HalfOpen(t) => t.to_tokens(tokens),
+@@ -856,6 +829,7 @@ mod printing {
+
+ impl ToTokens for PatSlice {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.bracket_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ });
+@@ -864,12 +838,14 @@ mod printing {
+
+ impl ToTokens for PatMacro {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.mac.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatOr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.leading_vert.to_tokens(tokens);
+ self.cases.to_tokens(tokens);
+ }
+@@ -877,6 +853,7 @@ mod printing {
+
+ impl ToTokens for FieldPat {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ if let Some(colon_token) = &self.colon_token {
+ self.member.to_tokens(tokens);
+ colon_token.to_tokens(tokens);
+diff --git a/third_party/rust/syn/src/path.rs b/third_party/rust/syn/src/path.rs
+index 8dda43ee67..15c0fcc664 100644
+--- third_party/rust/syn/src/path.rs
++++ third_party/rust/syn/src/path.rs
+@@ -2,9 +2,9 @@ use super::*;
+ use crate::punctuated::Punctuated;
+
+ ast_struct! {
+- /// A path at which a named item is exported: `std::collections::HashMap`.
++ /// A path at which a named item is exported (e.g. `std::collections::HashMap`).
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Path {
+ pub leading_colon: Option<Token![::]>,
+@@ -29,7 +29,7 @@ where
+ ast_struct! {
+ /// A segment of a path together with any path arguments on that segment.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct PathSegment {
+ pub ident: Ident,
+@@ -52,7 +52,7 @@ where
+ ast_enum! {
+ /// Angle bracketed or parenthesized arguments of a path segment.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// ## Angle bracketed
+@@ -98,7 +98,7 @@ impl PathArguments {
+ ast_enum! {
+ /// An individual generic argument, like `'a`, `T`, or `Item = T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum GenericArgument {
+ /// A lifetime argument.
+@@ -122,7 +122,7 @@ ast_struct! {
+ /// Angle bracketed arguments of a path segment: the `<K, V>` in `HashMap<K,
+ /// V>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct AngleBracketedGenericArguments {
+ pub colon2_token: Option<Token![::]>,
+@@ -135,7 +135,7 @@ ast_struct! {
+ ast_struct! {
+ /// A binding (equality constraint) on an associated type: `Item = u8`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Binding {
+ pub ident: Ident,
+@@ -147,7 +147,7 @@ ast_struct! {
+ ast_struct! {
+ /// An associated type bound: `Iterator<Item: Display>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Constraint {
+ pub ident: Ident,
+@@ -160,7 +160,7 @@ ast_struct! {
+ /// Arguments of a function path segment: the `(A, B) -> C` in `Fn(A,B) ->
+ /// C`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct ParenthesizedGenericArguments {
+ pub paren_token: token::Paren,
+@@ -189,7 +189,7 @@ ast_struct! {
+ /// ty position = 0
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct QSelf {
+ pub lt_token: Token![<],
+@@ -291,11 +291,7 @@ pub mod parsing {
+
+ impl PathSegment {
+ fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
+- if input.peek(Token![super])
+- || input.peek(Token![self])
+- || input.peek(Token![crate])
+- || input.peek(Token![extern])
+- {
++ if input.peek(Token![super]) || input.peek(Token![self]) || input.peek(Token![crate]) {
+ let ident = input.call(Ident::parse_any)?;
+ return Ok(PathSegment::from(ident));
+ }
+@@ -358,7 +354,7 @@ pub mod parsing {
+ impl Path {
+ /// Parse a `Path` containing no path arguments on any of its segments.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+@@ -400,7 +396,6 @@ pub mod parsing {
+ && !input.peek(Token![self])
+ && !input.peek(Token![Self])
+ && !input.peek(Token![crate])
+- && !input.peek(Token![extern])
+ {
+ break;
+ }
+@@ -433,7 +428,7 @@ pub mod parsing {
+ /// path arguments, and
+ /// - the ident of the first path segment is equal to the given one.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+@@ -472,7 +467,7 @@ pub mod parsing {
+ /// - the first path segment has no angle bracketed or parenthesized
+ /// path arguments.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ pub fn get_ident(&self) -> Option<&Ident> {
+ if self.leading_colon.is_none()
+diff --git a/third_party/rust/syn/src/punctuated.rs b/third_party/rust/syn/src/punctuated.rs
+index 38c7bf4e82..46c82a65b1 100644
+--- third_party/rust/syn/src/punctuated.rs
++++ third_party/rust/syn/src/punctuated.rs
+@@ -22,6 +22,8 @@
+
+ #[cfg(feature = "extra-traits")]
+ use std::fmt::{self, Debug};
++#[cfg(feature = "extra-traits")]
++use std::hash::{Hash, Hasher};
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use std::iter;
+ use std::iter::FromIterator;
+@@ -41,8 +43,6 @@ use crate::token::Token;
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[cfg_attr(feature = "extra-traits", derive(Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct Punctuated<T, P> {
+ inner: Vec<(T, P)>,
+ last: Option<Box<T>>,
+@@ -76,22 +76,19 @@ impl<T, P> Punctuated<T, P> {
+ self.iter().next()
+ }
+
++ /// Mutably borrows the first element in this sequence.
++ pub fn first_mut(&mut self) -> Option<&mut T> {
++ self.iter_mut().next()
++ }
++
+ /// Borrows the last element in this sequence.
+ pub fn last(&self) -> Option<&T> {
+- if self.last.is_some() {
+- self.last.as_ref().map(Box::as_ref)
+- } else {
+- self.inner.last().map(|pair| &pair.0)
+- }
++ self.iter().next_back()
+ }
+
+ /// Mutably borrows the last element in this sequence.
+ pub fn last_mut(&mut self) -> Option<&mut T> {
+- if self.last.is_some() {
+- self.last.as_mut().map(Box::as_mut)
+- } else {
+- self.inner.last_mut().map(|pair| &mut pair.0)
+- }
++ self.iter_mut().next_back()
+ }
+
+ /// Returns an iterator over borrowed syntax tree nodes of type `&T`.
+@@ -230,13 +227,19 @@ impl<T, P> Punctuated<T, P> {
+ }
+ }
+
++ /// Clears the sequence of all values and punctuation, making it empty.
++ pub fn clear(&mut self) {
++ self.inner.clear();
++ self.last = None;
++ }
++
+ /// Parses zero or more occurrences of `T` separated by punctuation of type
+ /// `P`, with optional trailing punctuation.
+ ///
+ /// Parsing continues until the end of this parse stream. The entire content
+ /// of this parse stream must consist of `T` and `P`.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_terminated(input: ParseStream) -> Result<Self>
+@@ -256,7 +259,7 @@ impl<T, P> Punctuated<T, P> {
+ ///
+ /// [`parse_terminated`]: Punctuated::parse_terminated
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_terminated_with(
+@@ -292,7 +295,7 @@ impl<T, P> Punctuated<T, P> {
+ /// is not followed by a `P`, even if there are remaining tokens in the
+ /// stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_separated_nonempty(input: ParseStream) -> Result<Self>
+@@ -312,7 +315,7 @@ impl<T, P> Punctuated<T, P> {
+ ///
+ /// [`parse_separated_nonempty`]: Punctuated::parse_separated_nonempty
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_separated_nonempty_with(
+@@ -338,6 +341,53 @@ impl<T, P> Punctuated<T, P> {
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl<T, P> Clone for Punctuated<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ Punctuated {
++ inner: self.inner.clone(),
++ last: self.last.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> Eq for Punctuated<T, P>
++where
++ T: Eq,
++ P: Eq,
++{
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> PartialEq for Punctuated<T, P>
++where
++ T: PartialEq,
++ P: PartialEq,
++{
++ fn eq(&self, other: &Self) -> bool {
++ let Punctuated { inner, last } = self;
++ *inner == other.inner && *last == other.last
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> Hash for Punctuated<T, P>
++where
++ T: Hash,
++ P: Hash,
++{
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ let Punctuated { inner, last } = self;
++ inner.hash(state);
++ last.hash(state);
++ }
++}
++
+ #[cfg(feature = "extra-traits")]
+ impl<T: Debug, P: Debug> Debug for Punctuated<T, P> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -536,7 +586,6 @@ impl<'a, T, P> ExactSizeIterator for PairsMut<'a, T, P> {
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[derive(Clone)]
+ pub struct IntoPairs<T, P> {
+ inner: vec::IntoIter<(T, P)>,
+ last: option::IntoIter<T>,
+@@ -572,12 +621,24 @@ impl<T, P> ExactSizeIterator for IntoPairs<T, P> {
+ }
+ }
+
++impl<T, P> Clone for IntoPairs<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ IntoPairs {
++ inner: self.inner.clone(),
++ last: self.last.clone(),
++ }
++ }
++}
++
+ /// An iterator over owned values of type `T`.
+ ///
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[derive(Clone)]
+ pub struct IntoIter<T> {
+ inner: vec::IntoIter<T>,
+ }
+@@ -606,6 +667,17 @@ impl<T> ExactSizeIterator for IntoIter<T> {
+ }
+ }
+
++impl<T> Clone for IntoIter<T>
++where
++ T: Clone,
++{
++ fn clone(&self) -> Self {
++ IntoIter {
++ inner: self.inner.clone(),
++ }
++ }
++}
++
+ /// An iterator over borrowed values of type `&T`.
+ ///
+ /// Refer to the [module documentation] for details about punctuated sequences.
+@@ -799,7 +871,6 @@ impl<'a, T: 'a, I: 'a> IterMutTrait<'a, T> for I where
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub enum Pair<T, P> {
+ Punctuated(T, P),
+ End(T),
+@@ -856,6 +927,20 @@ impl<T, P> Pair<T, P> {
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl<T, P> Clone for Pair<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ match self {
++ Pair::Punctuated(t, p) => Pair::Punctuated(t.clone(), p.clone()),
++ Pair::End(t) => Pair::End(t.clone()),
++ }
++ }
++}
++
+ impl<T, P> Index<usize> for Punctuated<T, P> {
+ type Output = T;
+
+diff --git a/third_party/rust/syn/src/reserved.rs b/third_party/rust/syn/src/reserved.rs
+new file mode 100644
+index 0000000000..ccfb8b5ad0
+--- /dev/null
++++ third_party/rust/syn/src/reserved.rs
+@@ -0,0 +1,42 @@
++// Type for a syntax tree node that is reserved for future use.
++//
++// For example ExprReference contains a field `raw` of type Reserved. If `&raw
++// place` syntax becomes a thing as per https://github.com/rust-lang/rfcs/pull/2582,
++// we can backward compatibly change `raw`'s type to Option<Token![raw]> without
++// the possibility of breaking any code.
++
++use proc_macro2::Span;
++use std::marker::PhantomData;
++
++#[cfg(feature = "extra-traits")]
++use std::fmt::{self, Debug};
++
++ast_struct! {
++ pub struct Reserved {
++ _private: PhantomData<Span>,
++ }
++}
++
++impl Default for Reserved {
++ fn default() -> Self {
++ Reserved {
++ _private: PhantomData,
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for Reserved {
++ fn clone(&self) -> Self {
++ Reserved {
++ _private: self._private,
++ }
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl Debug for Reserved {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ formatter.debug_struct("Reserved").finish()
++ }
++}
+diff --git a/third_party/rust/syn/src/spanned.rs b/third_party/rust/syn/src/spanned.rs
+index 71ffe26b81..01591cedcb 100644
+--- third_party/rust/syn/src/spanned.rs
++++ third_party/rust/syn/src/spanned.rs
+@@ -1,7 +1,7 @@
+ //! A trait that can provide the `Span` of the complete contents of a syntax
+ //! tree node.
+ //!
+-//! *This module is available if Syn is built with both the `"parsing"` and
++//! *This module is available only if Syn is built with both the `"parsing"` and
+ //! `"printing"` features.*
+ //!
+ //! <br>
+@@ -97,7 +97,7 @@ use quote::spanned::Spanned as ToTokens;
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with both the `"parsing"` and
++/// *This trait is available only if Syn is built with both the `"parsing"` and
+ /// `"printing"` features.*
+ pub trait Spanned {
+ /// Returns a `Span` covering the complete contents of this syntax tree
+diff --git a/third_party/rust/syn/src/stmt.rs b/third_party/rust/syn/src/stmt.rs
+index e4277fdbaa..b06e843d75 100644
+--- third_party/rust/syn/src/stmt.rs
++++ third_party/rust/syn/src/stmt.rs
+@@ -3,7 +3,7 @@ use super::*;
+ ast_struct! {
+ /// A braced block containing Rust statements.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Block {
+ pub brace_token: token::Brace,
+ /// Statements in a block
+@@ -14,7 +14,7 @@ ast_struct! {
+ ast_enum! {
+ /// A statement, usually ending in a semicolon.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum Stmt {
+ /// A local (let) binding.
+ Local(Local),
+@@ -33,7 +33,7 @@ ast_enum! {
+ ast_struct! {
+ /// A local `let` binding: `let x: u64 = s.parse()?`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Local {
+ pub attrs: Vec<Attribute>,
+ pub let_token: Token![let],
+@@ -47,14 +47,15 @@ ast_struct! {
+ pub mod parsing {
+ use super::*;
+
++ use crate::parse::discouraged::Speculative;
+ use crate::parse::{Parse, ParseStream, Result};
+- use crate::punctuated::Punctuated;
++ use proc_macro2::TokenStream;
+
+ impl Block {
+ /// Parse the body of a block as zero or more statements, possibly
+ /// including one trailing expression.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+@@ -106,8 +107,8 @@ pub mod parsing {
+ pub fn parse_within(input: ParseStream) -> Result<Vec<Stmt>> {
+ let mut stmts = Vec::new();
+ loop {
+- while input.peek(Token![;]) {
+- input.parse::<Token![;]>()?;
++ while let Some(semi) = input.parse::<Option<Token![;]>>()? {
++ stmts.push(Stmt::Semi(Expr::Verbatim(TokenStream::new()), semi));
+ }
+ if input.is_empty() {
+ break;
+@@ -146,55 +147,55 @@ pub mod parsing {
+ }
+
+ fn parse_stmt(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
+- // TODO: optimize using advance_to
++ let mut attrs = input.call(Attribute::parse_outer)?;
++
++ // brace-style macros; paren and bracket macros get parsed as
++ // expression statements.
+ let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
++ if let Ok(path) = ahead.call(Path::parse_mod_style) {
++ if ahead.peek(Token![!]) && (ahead.peek2(token::Brace) || ahead.peek2(Ident)) {
++ input.advance_to(&ahead);
++ return stmt_mac(input, attrs, path);
++ }
++ }
+
+- if {
+- let ahead = ahead.fork();
+- // Only parse braces here; paren and bracket will get parsed as
+- // expression statements
+- ahead.call(Path::parse_mod_style).is_ok()
+- && ahead.parse::<Token![!]>().is_ok()
+- && (ahead.peek(token::Brace) || ahead.peek(Ident))
+- } {
+- stmt_mac(input)
+- } else if ahead.peek(Token![let]) {
+- stmt_local(input).map(Stmt::Local)
+- } else if ahead.peek(Token![pub])
+- || ahead.peek(Token![crate]) && !ahead.peek2(Token![::])
+- || ahead.peek(Token![extern]) && !ahead.peek2(Token![::])
+- || ahead.peek(Token![use])
+- || ahead.peek(Token![static]) && (ahead.peek2(Token![mut]) || ahead.peek2(Ident))
+- || ahead.peek(Token![const])
+- || ahead.peek(Token![unsafe]) && !ahead.peek2(token::Brace)
+- || ahead.peek(Token![async])
+- && (ahead.peek2(Token![unsafe])
+- || ahead.peek2(Token![extern])
+- || ahead.peek2(Token![fn]))
+- || ahead.peek(Token![fn])
+- || ahead.peek(Token![mod])
+- || ahead.peek(Token![type])
+- || ahead.peek(item::parsing::existential) && ahead.peek2(Token![type])
+- || ahead.peek(Token![struct])
+- || ahead.peek(Token![enum])
+- || ahead.peek(Token![union]) && ahead.peek2(Ident)
+- || ahead.peek(Token![auto]) && ahead.peek2(Token![trait])
+- || ahead.peek(Token![trait])
+- || ahead.peek(Token![default])
+- && (ahead.peek2(Token![unsafe]) || ahead.peek2(Token![impl]))
+- || ahead.peek(Token![impl])
+- || ahead.peek(Token![macro])
++ if input.peek(Token![let]) {
++ stmt_local(input, attrs).map(Stmt::Local)
++ } else if input.peek(Token![pub])
++ || input.peek(Token![crate]) && !input.peek2(Token![::])
++ || input.peek(Token![extern])
++ || input.peek(Token![use])
++ || input.peek(Token![static]) && (input.peek2(Token![mut]) || input.peek2(Ident))
++ || input.peek(Token![const])
++ || input.peek(Token![unsafe]) && !input.peek2(token::Brace)
++ || input.peek(Token![async])
++ && (input.peek2(Token![unsafe])
++ || input.peek2(Token![extern])
++ || input.peek2(Token![fn]))
++ || input.peek(Token![fn])
++ || input.peek(Token![mod])
++ || input.peek(Token![type])
++ || input.peek(item::parsing::existential) && input.peek2(Token![type])
++ || input.peek(Token![struct])
++ || input.peek(Token![enum])
++ || input.peek(Token![union]) && input.peek2(Ident)
++ || input.peek(Token![auto]) && input.peek2(Token![trait])
++ || input.peek(Token![trait])
++ || input.peek(Token![default])
++ && (input.peek2(Token![unsafe]) || input.peek2(Token![impl]))
++ || input.peek(Token![impl])
++ || input.peek(Token![macro])
+ {
+- input.parse().map(Stmt::Item)
++ let mut item: Item = input.parse()?;
++ attrs.extend(item.replace_attrs(Vec::new()));
++ item.replace_attrs(attrs);
++ Ok(Stmt::Item(item))
+ } else {
+- stmt_expr(input, allow_nosemi)
++ stmt_expr(input, allow_nosemi, attrs)
+ }
+ }
+
+- fn stmt_mac(input: ParseStream) -> Result<Stmt> {
+- let attrs = input.call(Attribute::parse_outer)?;
+- let path = input.call(Path::parse_mod_style)?;
++ fn stmt_mac(input: ParseStream, attrs: Vec<Attribute>, path: Path) -> Result<Stmt> {
+ let bang_token: Token![!] = input.parse()?;
+ let ident: Option<Ident> = input.parse()?;
+ let (delimiter, tokens) = mac::parse_delimiter(input)?;
+@@ -213,33 +214,12 @@ pub mod parsing {
+ })))
+ }
+
+- fn stmt_local(input: ParseStream) -> Result<Local> {
++ fn stmt_local(input: ParseStream, attrs: Vec<Attribute>) -> Result<Local> {
+ Ok(Local {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs,
+ let_token: input.parse()?,
+ pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let mut pat: Pat = input.parse()?;
+- if leading_vert.is_some()
+- || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
+- {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|])
+- && !input.peek(Token![||])
+- && !input.peek(Token![|=])
+- {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- pat = Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- });
+- }
++ let mut pat: Pat = pat::parsing::multi_pat_with_leading_vert(input)?;
+ if input.peek(Token![:]) {
+ let colon_token: Token![:] = input.parse()?;
+ let ty: Type = input.parse()?;
+@@ -265,12 +245,19 @@ pub mod parsing {
+ })
+ }
+
+- fn stmt_expr(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
+- let mut attrs = input.call(Attribute::parse_outer)?;
++ fn stmt_expr(
++ input: ParseStream,
++ allow_nosemi: bool,
++ mut attrs: Vec<Attribute>,
++ ) -> Result<Stmt> {
+ let mut e = expr::parsing::expr_early(input)?;
+
+- attrs.extend(e.replace_attrs(Vec::new()));
+- e.replace_attrs(attrs);
++ let mut attr_target = &mut e;
++ while let Expr::Binary(e) = attr_target {
++ attr_target = &mut e.left;
++ }
++ attrs.extend(attr_target.replace_attrs(Vec::new()));
++ attr_target.replace_attrs(attrs);
+
+ if input.peek(Token![;]) {
+ return Ok(Stmt::Semi(e, input.parse()?));
+diff --git a/third_party/rust/syn/src/token.rs b/third_party/rust/syn/src/token.rs
+index 0b8c18192f..8539378c5e 100644
+--- third_party/rust/syn/src/token.rs
++++ third_party/rust/syn/src/token.rs
+@@ -88,7 +88,6 @@
+ //! [Printing]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html
+ //! [`Span`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html
+
+-use std;
+ #[cfg(feature = "extra-traits")]
+ use std::cmp;
+ #[cfg(feature = "extra-traits")]
+@@ -97,13 +96,13 @@ use std::fmt::{self, Debug};
+ use std::hash::{Hash, Hasher};
+ use std::ops::{Deref, DerefMut};
+
+-#[cfg(feature = "parsing")]
+-use proc_macro2::Delimiter;
+ #[cfg(any(feature = "parsing", feature = "printing"))]
+ use proc_macro2::Ident;
+ use proc_macro2::Span;
+ #[cfg(feature = "printing")]
+ use proc_macro2::TokenStream;
++#[cfg(feature = "parsing")]
++use proc_macro2::{Delimiter, Literal, Punct, TokenTree};
+ #[cfg(feature = "printing")]
+ use quote::{ToTokens, TokenStreamExt};
+
+@@ -112,10 +111,8 @@ use self::private::WithSpan;
+ use crate::buffer::Cursor;
+ #[cfg(feature = "parsing")]
+ use crate::error::Result;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ use crate::lifetime::Lifetime;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ use crate::lit::{Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr};
+ #[cfg(feature = "parsing")]
+@@ -155,21 +152,20 @@ mod private {
+ #[cfg(feature = "parsing")]
+ impl private::Sealed for Ident {}
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ fn peek_impl(cursor: Cursor, peek: fn(ParseStream) -> bool) -> bool {
++ use crate::parse::Unexpected;
+ use std::cell::Cell;
+ use std::rc::Rc;
+
+ let scope = Span::call_site();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ let buffer = crate::parse::new_parse_buffer(scope, cursor, unexpected);
+ peek(&buffer)
+ }
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! impl_token {
+- ($name:ident $display:expr) => {
++ ($display:tt $name:ty) => {
+ #[cfg(feature = "parsing")]
+ impl Token for $name {
+ fn peek(cursor: Cursor) -> bool {
+@@ -189,24 +185,38 @@ macro_rules! impl_token {
+ };
+ }
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(Lifetime "lifetime");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(Lit "literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitStr "string literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitByteStr "byte string literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitByte "byte literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitChar "character literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitInt "integer literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitFloat "floating point literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitBool "boolean literal");
++impl_token!("lifetime" Lifetime);
++impl_token!("literal" Lit);
++impl_token!("string literal" LitStr);
++impl_token!("byte string literal" LitByteStr);
++impl_token!("byte literal" LitByte);
++impl_token!("character literal" LitChar);
++impl_token!("integer literal" LitInt);
++impl_token!("floating point literal" LitFloat);
++impl_token!("boolean literal" LitBool);
++impl_token!("group token" proc_macro2::Group);
++
++macro_rules! impl_low_level_token {
++ ($display:tt $ty:ident $get:ident) => {
++ #[cfg(feature = "parsing")]
++ impl Token for $ty {
++ fn peek(cursor: Cursor) -> bool {
++ cursor.$get().is_some()
++ }
++
++ fn display() -> &'static str {
++ $display
++ }
++ }
++
++ #[cfg(feature = "parsing")]
++ impl private::Sealed for $ty {}
++ };
++}
++
++impl_low_level_token!("punctuation token" Punct punct);
++impl_low_level_token!("literal" Literal literal);
++impl_low_level_token!("token" TokenTree token_tree);
+
+ // Not public API.
+ #[doc(hidden)]
+@@ -233,7 +243,6 @@ impl<T: CustomToken> Token for T {
+ macro_rules! define_keywords {
+ ($($token:tt pub struct $name:ident #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[$doc]
+ ///
+ /// Don't try to remember the name of this type &mdash; use the
+@@ -260,6 +269,16 @@ macro_rules! define_keywords {
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -338,7 +357,6 @@ macro_rules! impl_deref_if_len_is_1 {
+ macro_rules! define_punctuation_structs {
+ ($($token:tt pub struct $name:ident/$len:tt #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[repr(C)]
+ #[$doc]
+ ///
+@@ -366,6 +384,16 @@ macro_rules! define_punctuation_structs {
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -436,7 +464,6 @@ macro_rules! define_punctuation {
+ macro_rules! define_delimiters {
+ ($($token:tt pub struct $name:ident #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[$doc]
+ pub struct $name {
+ pub span: Span,
+@@ -458,6 +485,16 @@ macro_rules! define_delimiters {
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -855,7 +892,7 @@ pub mod parsing {
+ }
+
+ pub fn punct<S: FromSpans>(input: ParseStream, token: &str) -> Result<S> {
+- let mut spans = [input.cursor().span(); 3];
++ let mut spans = [input.span(); 3];
+ punct_helper(input, token, &mut spans)?;
+ Ok(S::from_spans(&spans))
+ }
+diff --git a/third_party/rust/syn/src/tt.rs b/third_party/rust/syn/src/tt.rs
+index f860eebb4f..8dba0627cd 100644
+--- third_party/rust/syn/src/tt.rs
++++ third_party/rust/syn/src/tt.rs
+@@ -18,8 +18,8 @@ impl<'a> PartialEq for TokenTreeHelper<'a> {
+ _ => return false,
+ }
+
+- let s1 = g1.stream().clone().into_iter();
+- let mut s2 = g2.stream().clone().into_iter();
++ let s1 = g1.stream().into_iter();
++ let mut s2 = g2.stream().into_iter();
+
+ for item1 in s1 {
+ let item2 = match s2.next() {
+@@ -60,7 +60,7 @@ impl<'a> Hash for TokenTreeHelper<'a> {
+ Delimiter::None => 3u8.hash(h),
+ }
+
+- for item in g.stream().clone() {
++ for item in g.stream() {
+ TokenTreeHelper(&item).hash(h);
+ }
+ 0xffu8.hash(h); // terminator w/ a variant we don't normally hash
+diff --git a/third_party/rust/syn/src/ty.rs b/third_party/rust/syn/src/ty.rs
+index 4ee59bda2a..fd7c97eab7 100644
+--- third_party/rust/syn/src/ty.rs
++++ third_party/rust/syn/src/ty.rs
+@@ -1,15 +1,11 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+ use proc_macro2::TokenStream;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_enum_of_structs! {
+ /// The possible types that a Rust value could have.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -20,7 +16,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Type #manual_extra_traits {
++ pub enum Type {
+ /// A fixed size array type: `[T; n]`.
+ Array(TypeArray),
+
+@@ -77,7 +73,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A fixed size array type: `[T; n]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeArray {
+ pub bracket_token: token::Bracket,
+@@ -90,7 +86,7 @@ ast_struct! {
+ ast_struct! {
+ /// A bare function type: `fn(usize) -> bool`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeBareFn {
+ pub lifetimes: Option<BoundLifetimes>,
+@@ -107,7 +103,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type contained within invisible delimiters.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeGroup {
+ pub group_token: token::Group,
+@@ -119,7 +115,7 @@ ast_struct! {
+ /// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or
+ /// a lifetime.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeImplTrait {
+ pub impl_token: Token![impl],
+@@ -130,7 +126,7 @@ ast_struct! {
+ ast_struct! {
+ /// Indication that a type should be inferred by the compiler: `_`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeInfer {
+ pub underscore_token: Token![_],
+@@ -140,7 +136,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro in the type position.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeMacro {
+ pub mac: Macro,
+@@ -150,7 +146,7 @@ ast_struct! {
+ ast_struct! {
+ /// The never type: `!`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeNever {
+ pub bang_token: Token![!],
+@@ -160,7 +156,7 @@ ast_struct! {
+ ast_struct! {
+ /// A parenthesized type equivalent to the inner type.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeParen {
+ pub paren_token: token::Paren,
+@@ -172,7 +168,7 @@ ast_struct! {
+ /// A path like `std::slice::Iter`, optionally qualified with a
+ /// self-type as in `<Vec<T> as SomeTrait>::Associated`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypePath {
+ pub qself: Option<QSelf>,
+@@ -183,7 +179,7 @@ ast_struct! {
+ ast_struct! {
+ /// A raw pointer type: `*const T` or `*mut T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypePtr {
+ pub star_token: Token![*],
+@@ -196,7 +192,7 @@ ast_struct! {
+ ast_struct! {
+ /// A reference type: `&'a T` or `&'a mut T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeReference {
+ pub and_token: Token![&],
+@@ -209,7 +205,7 @@ ast_struct! {
+ ast_struct! {
+ /// A dynamically sized slice type: `[T]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeSlice {
+ pub bracket_token: token::Bracket,
+@@ -221,7 +217,7 @@ ast_struct! {
+ /// A trait object type `Bound1 + Bound2 + Bound3` where `Bound` is a
+ /// trait or a lifetime.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeTraitObject {
+ pub dyn_token: Option<Token![dyn]>,
+@@ -232,7 +228,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple type: `(A, B, C, String)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeTuple {
+ pub paren_token: token::Paren,
+@@ -240,111 +236,10 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Type {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Type {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Type::Array(this), Type::Array(other)) => this == other,
+- (Type::BareFn(this), Type::BareFn(other)) => this == other,
+- (Type::Group(this), Type::Group(other)) => this == other,
+- (Type::ImplTrait(this), Type::ImplTrait(other)) => this == other,
+- (Type::Infer(this), Type::Infer(other)) => this == other,
+- (Type::Macro(this), Type::Macro(other)) => this == other,
+- (Type::Never(this), Type::Never(other)) => this == other,
+- (Type::Paren(this), Type::Paren(other)) => this == other,
+- (Type::Path(this), Type::Path(other)) => this == other,
+- (Type::Ptr(this), Type::Ptr(other)) => this == other,
+- (Type::Reference(this), Type::Reference(other)) => this == other,
+- (Type::Slice(this), Type::Slice(other)) => this == other,
+- (Type::TraitObject(this), Type::TraitObject(other)) => this == other,
+- (Type::Tuple(this), Type::Tuple(other)) => this == other,
+- (Type::Verbatim(this), Type::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Type {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Type::Array(ty) => {
+- hash.write_u8(0);
+- ty.hash(hash);
+- }
+- Type::BareFn(ty) => {
+- hash.write_u8(1);
+- ty.hash(hash);
+- }
+- Type::Group(ty) => {
+- hash.write_u8(2);
+- ty.hash(hash);
+- }
+- Type::ImplTrait(ty) => {
+- hash.write_u8(3);
+- ty.hash(hash);
+- }
+- Type::Infer(ty) => {
+- hash.write_u8(4);
+- ty.hash(hash);
+- }
+- Type::Macro(ty) => {
+- hash.write_u8(5);
+- ty.hash(hash);
+- }
+- Type::Never(ty) => {
+- hash.write_u8(6);
+- ty.hash(hash);
+- }
+- Type::Paren(ty) => {
+- hash.write_u8(7);
+- ty.hash(hash);
+- }
+- Type::Path(ty) => {
+- hash.write_u8(8);
+- ty.hash(hash);
+- }
+- Type::Ptr(ty) => {
+- hash.write_u8(9);
+- ty.hash(hash);
+- }
+- Type::Reference(ty) => {
+- hash.write_u8(10);
+- ty.hash(hash);
+- }
+- Type::Slice(ty) => {
+- hash.write_u8(11);
+- ty.hash(hash);
+- }
+- Type::TraitObject(ty) => {
+- hash.write_u8(12);
+- ty.hash(hash);
+- }
+- Type::Tuple(ty) => {
+- hash.write_u8(13);
+- ty.hash(hash);
+- }
+- Type::Verbatim(ty) => {
+- hash.write_u8(14);
+- TokenStreamHelper(ty).hash(hash);
+- }
+- Type::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_struct! {
+ /// The binary interface of a function: `extern "C"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Abi {
+ pub extern_token: Token![extern],
+@@ -355,7 +250,7 @@ ast_struct! {
+ ast_struct! {
+ /// An argument in a function type: the `usize` in `fn(usize) -> bool`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct BareFnArg {
+ pub attrs: Vec<Attribute>,
+@@ -377,7 +272,7 @@ ast_struct! {
+ /// }
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Variadic {
+ pub attrs: Vec<Attribute>,
+@@ -388,7 +283,7 @@ ast_struct! {
+ ast_enum! {
+ /// Return type of a function signature.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum ReturnType {
+ /// Return type is not specified.
+@@ -407,10 +302,13 @@ pub mod parsing {
+ use crate::ext::IdentExt;
+ use crate::parse::{Parse, ParseStream, Result};
+ use crate::path;
++ use proc_macro2::{Punct, Spacing, TokenTree};
++ use std::iter::FromIterator;
+
+ impl Parse for Type {
+ fn parse(input: ParseStream) -> Result<Self> {
+- ambig_ty(input, true)
++ let allow_plus = true;
++ ambig_ty(input, allow_plus)
+ }
+ }
+
+@@ -421,15 +319,17 @@ pub mod parsing {
+ ///
+ /// This parser does not allow a `+`, while the default parser does.
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- ambig_ty(input, false)
++ let allow_plus = false;
++ ambig_ty(input, allow_plus)
+ }
+ }
+
+ fn ambig_ty(input: ParseStream, allow_plus: bool) -> Result<Type> {
+- if input.peek(token::Group) {
++ if input.peek(token::Group) && !input.peek2(Token![::]) && !input.peek2(Token![<]) {
+ return input.parse().map(Type::Group);
+ }
+
++ let begin = input.fork();
+ let mut lifetimes = None::<BoundLifetimes>;
+ let mut lookahead = input.lookahead1();
+ if lookahead.peek(Token![for]) {
+@@ -524,7 +424,7 @@ pub mod parsing {
+ ..trait_bound
+ })
+ }
+- other => other,
++ other @ TypeParamBound::Lifetime(_) => other,
+ }
+ }
+ _ => break,
+@@ -549,17 +449,20 @@ pub mod parsing {
+ }))
+ } else if lookahead.peek(Token![fn])
+ || lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![extern]) && !input.peek2(Token![::])
++ || lookahead.peek(Token![extern])
+ {
+- let mut bare_fn: TypeBareFn = input.parse()?;
+- bare_fn.lifetimes = lifetimes;
+- Ok(Type::BareFn(bare_fn))
++ let allow_mut_self = true;
++ if let Some(mut bare_fn) = parse_bare_fn(input, allow_mut_self)? {
++ bare_fn.lifetimes = lifetimes;
++ Ok(Type::BareFn(bare_fn))
++ } else {
++ Ok(Type::Verbatim(verbatim::between(begin, input)))
++ }
+ } else if lookahead.peek(Ident)
+ || input.peek(Token![super])
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![crate])
+- || input.peek(Token![extern])
+ || lookahead.peek(Token![::])
+ || lookahead.peek(Token![<])
+ {
+@@ -722,38 +625,58 @@ pub mod parsing {
+
+ impl Parse for TypeBareFn {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let args;
+- let allow_variadic;
+- Ok(TypeBareFn {
+- lifetimes: input.parse()?,
+- unsafety: input.parse()?,
+- abi: input.parse()?,
+- fn_token: input.parse()?,
+- paren_token: parenthesized!(args in input),
+- inputs: {
+- let mut inputs = Punctuated::new();
+- while !args.is_empty() && !args.peek(Token![...]) {
+- inputs.push_value(args.parse()?);
+- if args.is_empty() {
+- break;
+- }
+- inputs.push_punct(args.parse()?);
+- }
+- allow_variadic = inputs.empty_or_trailing();
+- inputs
+- },
+- variadic: {
+- if allow_variadic && args.peek(Token![...]) {
+- Some(Variadic {
+- attrs: Vec::new(),
++ let allow_mut_self = false;
++ parse_bare_fn(input, allow_mut_self).map(Option::unwrap)
++ }
++ }
++
++ fn parse_bare_fn(input: ParseStream, allow_mut_self: bool) -> Result<Option<TypeBareFn>> {
++ let args;
++ let mut variadic = None;
++ let mut has_mut_self = false;
++
++ let bare_fn = TypeBareFn {
++ lifetimes: input.parse()?,
++ unsafety: input.parse()?,
++ abi: input.parse()?,
++ fn_token: input.parse()?,
++ paren_token: parenthesized!(args in input),
++ inputs: {
++ let mut inputs = Punctuated::new();
++
++ while !args.is_empty() {
++ let attrs = args.call(Attribute::parse_outer)?;
++
++ if inputs.empty_or_trailing() && args.peek(Token![...]) {
++ variadic = Some(Variadic {
++ attrs,
+ dots: args.parse()?,
+- })
++ });
++ break;
++ }
++
++ if let Some(arg) = parse_bare_fn_arg(&args, allow_mut_self)? {
++ inputs.push_value(BareFnArg { attrs, ..arg });
+ } else {
+- None
++ has_mut_self = true;
+ }
+- },
+- output: input.call(ReturnType::without_plus)?,
+- })
++ if args.is_empty() {
++ break;
++ }
++
++ inputs.push_punct(args.parse()?);
++ }
++
++ inputs
++ },
++ variadic,
++ output: input.call(ReturnType::without_plus)?,
++ };
++
++ if has_mut_self {
++ Ok(None)
++ } else {
++ Ok(Some(bare_fn))
+ }
+ }
+
+@@ -776,9 +699,27 @@ pub mod parsing {
+ impl Parse for TypeTuple {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let content;
++ let paren_token = parenthesized!(content in input);
++
++ if content.is_empty() {
++ return Ok(TypeTuple {
++ paren_token,
++ elems: Punctuated::new(),
++ });
++ }
++
++ let first: Type = content.parse()?;
+ Ok(TypeTuple {
+- paren_token: parenthesized!(content in input),
+- elems: content.parse_terminated(Type::parse)?,
++ paren_token,
++ elems: {
++ let mut elems = Punctuated::new();
++ elems.push_value(first);
++ elems.push_punct(content.parse()?);
++ let rest: Punctuated<Type, Token![,]> =
++ content.parse_terminated(Parse::parse)?;
++ elems.extend(rest);
++ elems
++ },
+ })
+ }
+ }
+@@ -807,9 +748,11 @@ pub mod parsing {
+
+ impl ReturnType {
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+
++ #[doc(hidden)]
+ pub fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
+ if input.peek(Token![->]) {
+ let arrow = input.parse()?;
+@@ -844,10 +787,12 @@ pub mod parsing {
+
+ impl TypeTraitObject {
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+
+ // Only allow multiple trait references if allow_plus is true.
++ #[doc(hidden)]
+ pub fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
+ Ok(TypeTraitObject {
+ dyn_token: input.parse()?,
+@@ -910,7 +855,8 @@ pub mod parsing {
+
+ impl Parse for TypeParen {
+ fn parse(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+ }
+
+@@ -926,22 +872,72 @@ pub mod parsing {
+
+ impl Parse for BareFnArg {
+ fn parse(input: ParseStream) -> Result<Self> {
+- Ok(BareFnArg {
+- attrs: input.call(Attribute::parse_outer)?,
+- name: {
+- if (input.peek(Ident) || input.peek(Token![_]))
+- && input.peek2(Token![:])
+- && !input.peek2(Token![::])
+- {
+- let name = input.call(Ident::parse_any)?;
+- let colon: Token![:] = input.parse()?;
+- Some((name, colon))
+- } else {
+- None
+- }
+- },
+- ty: input.parse()?,
+- })
++ let allow_mut_self = false;
++ parse_bare_fn_arg(input, allow_mut_self).map(Option::unwrap)
++ }
++ }
++
++ fn parse_bare_fn_arg(
++ input: ParseStream,
++ mut allow_mut_self: bool,
++ ) -> Result<Option<BareFnArg>> {
++ let mut has_mut_self = false;
++ let arg = BareFnArg {
++ attrs: input.call(Attribute::parse_outer)?,
++ name: {
++ if (input.peek(Ident) || input.peek(Token![_]) || input.peek(Token![self]))
++ && input.peek2(Token![:])
++ && !input.peek2(Token![::])
++ {
++ let name = input.call(Ident::parse_any)?;
++ let colon: Token![:] = input.parse()?;
++ Some((name, colon))
++ } else if allow_mut_self
++ && input.peek(Token![mut])
++ && input.peek2(Token![self])
++ && input.peek3(Token![:])
++ && !input.peek3(Token![::])
++ {
++ has_mut_self = true;
++ allow_mut_self = false;
++ input.parse::<Token![mut]>()?;
++ input.parse::<Token![self]>()?;
++ input.parse::<Token![:]>()?;
++ None
++ } else {
++ None
++ }
++ },
++ ty: if !has_mut_self && input.peek(Token![...]) {
++ let dot3 = input.parse::<Token![...]>()?;
++ let args = vec![
++ TokenTree::Punct(Punct::new('.', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('.', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('.', Spacing::Alone)),
++ ];
++ let tokens = TokenStream::from_iter(args.into_iter().zip(&dot3.spans).map(
++ |(mut arg, span)| {
++ arg.set_span(*span);
++ arg
++ },
++ ));
++ Type::Verbatim(tokens)
++ } else if allow_mut_self && input.peek(Token![mut]) && input.peek2(Token![self]) {
++ has_mut_self = true;
++ input.parse::<Token![mut]>()?;
++ Type::Path(TypePath {
++ qself: None,
++ path: input.parse::<Token![self]>()?.into(),
++ })
++ } else {
++ input.parse()?
++ },
++ };
++
++ if has_mut_self {
++ Ok(None)
++ } else {
++ Ok(Some(arg))
+ }
+ }
+
+diff --git a/third_party/rust/syn/src/verbatim.rs b/third_party/rust/syn/src/verbatim.rs
+new file mode 100644
+index 0000000000..0686352f7a
+--- /dev/null
++++ third_party/rust/syn/src/verbatim.rs
+@@ -0,0 +1,15 @@
++use crate::parse::{ParseBuffer, ParseStream};
++use proc_macro2::TokenStream;
++use std::iter;
++
++pub fn between<'a>(begin: ParseBuffer<'a>, end: ParseStream<'a>) -> TokenStream {
++ let end = end.cursor();
++ let mut cursor = begin.cursor();
++ let mut tokens = TokenStream::new();
++ while cursor != end {
++ let (tt, next) = cursor.token_tree().unwrap();
++ tokens.extend(iter::once(tt));
++ cursor = next;
++ }
++ tokens
++}
+diff --git a/third_party/rust/syn/src/whitespace.rs b/third_party/rust/syn/src/whitespace.rs
+new file mode 100644
+index 0000000000..7be082e1a2
+--- /dev/null
++++ third_party/rust/syn/src/whitespace.rs
+@@ -0,0 +1,65 @@
++pub fn skip(mut s: &str) -> &str {
++ 'skip: while !s.is_empty() {
++ let byte = s.as_bytes()[0];
++ if byte == b'/' {
++ if s.starts_with("//")
++ && (!s.starts_with("///") || s.starts_with("////"))
++ && !s.starts_with("//!")
++ {
++ if let Some(i) = s.find('\n') {
++ s = &s[i + 1..];
++ continue;
++ } else {
++ return "";
++ }
++ } else if s.starts_with("/**/") {
++ s = &s[4..];
++ continue;
++ } else if s.starts_with("/*")
++ && (!s.starts_with("/**") || s.starts_with("/***"))
++ && !s.starts_with("/*!")
++ {
++ let mut depth = 0;
++ let bytes = s.as_bytes();
++ let mut i = 0;
++ let upper = bytes.len() - 1;
++ while i < upper {
++ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
++ depth += 1;
++ i += 1; // eat '*'
++ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
++ depth -= 1;
++ if depth == 0 {
++ s = &s[i + 2..];
++ continue 'skip;
++ }
++ i += 1; // eat '/'
++ }
++ i += 1;
++ }
++ return s;
++ }
++ }
++ match byte {
++ b' ' | 0x09..=0x0d => {
++ s = &s[1..];
++ continue;
++ }
++ b if b <= 0x7f => {}
++ _ => {
++ let ch = s.chars().next().unwrap();
++ if is_whitespace(ch) {
++ s = &s[ch.len_utf8()..];
++ continue;
++ }
++ }
++ }
++ return s;
++ }
++ s
++}
++
++fn is_whitespace(ch: char) -> bool {
++ // Rust treats left-to-right mark and right-to-left mark as whitespace
++ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
++}
+diff --git a/third_party/rust/syn/tests/clone.sh b/third_party/rust/syn/tests/clone.sh
+deleted file mode 100755
+index 8e0863cba6..0000000000
+--- third_party/rust/syn/tests/clone.sh
++++ /dev/null
+@@ -1,16 +0,0 @@
+-#!/bin/bash
+-
+-REV=4560cb830fce63fcffdc4558f4281aaac6a3a1ba
+-
+-set -euo pipefail
+-cd "$(dirname "${BASH_SOURCE[0]}")"
+-mkdir -p rust
+-touch rust/COMMIT
+-
+-if [ "$(cat rust/COMMIT)" != "$REV" ]; then
+- rm -rf rust
+- mkdir rust
+- curl -L "https://github.com/rust-lang/rust/archive/${REV}.tar.gz" \
+- | tar xz --directory rust --strip-components 1
+- echo "$REV" > rust/COMMIT
+-fi
+diff --git a/third_party/rust/syn/tests/common/eq.rs b/third_party/rust/syn/tests/common/eq.rs
+index 13a6c36ae5..7589a07573 100644
+--- third_party/rust/syn/tests/common/eq.rs
++++ third_party/rust/syn/tests/common/eq.rs
+@@ -1,36 +1,35 @@
++extern crate rustc_ast;
+ extern crate rustc_data_structures;
++extern crate rustc_span;
+ extern crate rustc_target;
+-extern crate syntax;
+-extern crate syntax_pos;
+
+ use std::mem;
+
+-use self::rustc_data_structures::sync::Lrc;
+-use self::rustc_data_structures::thin_vec::ThinVec;
+-use self::rustc_target::abi::FloatTy;
+-use self::rustc_target::spec::abi::Abi;
+-use self::syntax::ast::{
+- AngleBracketedArgs, AnonConst, Arg, Arm, AsmDialect, AssocTyConstraint, AssocTyConstraintKind,
+- AttrId, AttrStyle, Attribute, BareFnTy, BinOpKind, BindingMode, Block, BlockCheckMode,
+- CaptureBy, Constness, Crate, CrateSugar, Defaultness, EnumDef, Expr, ExprKind, Field, FieldPat,
+- FnDecl, FnHeader, ForeignItem, ForeignItemKind, ForeignMod, FunctionRetTy, GenericArg,
+- GenericArgs, GenericBound, GenericParam, GenericParamKind, Generics, GlobalAsm, Ident,
+- ImplItem, ImplItemKind, ImplPolarity, InlineAsm, InlineAsmOutput, IntTy, IsAsync, IsAuto, Item,
+- ItemKind, Label, Lifetime, Lit, LitIntType, LitKind, Local, Mac, MacDelimiter, MacStmtStyle,
+- MacroDef, MethodSig, Mod, Movability, MutTy, Mutability, NodeId, ParenthesizedArgs, Pat,
+- PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
+- StmtKind, StrStyle, StructField, TraitBoundModifier, TraitItem, TraitItemKind,
+- TraitObjectSyntax, TraitRef, Ty, TyKind, UintTy, UnOp, UnsafeSource, Unsafety, UseTree,
+- UseTreeKind, Variant, VariantData, VisibilityKind, WhereBoundPredicate, WhereClause,
+- WhereEqPredicate, WherePredicate, WhereRegionPredicate,
++use rustc_ast::ast::{
++ AngleBracketedArg, AngleBracketedArgs, AnonConst, Arm, AssocItemKind, AssocTyConstraint,
++ AssocTyConstraintKind, Async, AttrId, AttrItem, AttrKind, AttrStyle, Attribute, BareFnTy,
++ BinOpKind, BindingMode, Block, BlockCheckMode, BorrowKind, CaptureBy, Const, Crate, CrateSugar,
++ Defaultness, EnumDef, Expr, ExprKind, Extern, Field, FieldPat, FloatTy, FnDecl, FnHeader,
++ FnRetTy, FnSig, ForeignItemKind, ForeignMod, GenericArg, GenericArgs, GenericBound,
++ GenericParam, GenericParamKind, Generics, GlobalAsm, ImplPolarity, InlineAsm, InlineAsmOperand,
++ InlineAsmOptions, InlineAsmRegOrRegClass, InlineAsmTemplatePiece, IntTy, IsAuto, Item,
++ ItemKind, Label, Lifetime, Lit, LitFloatType, LitIntType, LitKind, LlvmAsmDialect,
++ LlvmInlineAsm, LlvmInlineAsmOutput, Local, MacArgs, MacCall, MacCallStmt, MacDelimiter,
++ MacStmtStyle, MacroDef, Mod, Movability, MutTy, Mutability, NodeId, Param, ParenthesizedArgs,
++ Pat, PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
++ StmtKind, StrLit, StrStyle, StructField, TraitBoundModifier, TraitObjectSyntax, TraitRef, Ty,
++ TyKind, UintTy, UnOp, Unsafe, UnsafeSource, UseTree, UseTreeKind, Variant, VariantData,
++ VisibilityKind, WhereBoundPredicate, WhereClause, WhereEqPredicate, WherePredicate,
++ WhereRegionPredicate,
+ };
+-use self::syntax::parse::lexer::comments;
+-use self::syntax::parse::token::{self, DelimToken, Token, TokenKind};
+-use self::syntax::ptr::P;
+-use self::syntax::source_map::Spanned;
+-use self::syntax::symbol::{sym, Symbol};
+-use self::syntax::tokenstream::{DelimSpan, TokenStream, TokenTree};
+-use self::syntax_pos::{Span, SyntaxContext, DUMMY_SP};
++use rustc_ast::ptr::P;
++use rustc_ast::token::{self, CommentKind, DelimToken, Token, TokenKind};
++use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
++use rustc_data_structures::sync::Lrc;
++use rustc_data_structures::thin_vec::ThinVec;
++use rustc_span::source_map::Spanned;
++use rustc_span::symbol::Ident;
++use rustc_span::{Span, Symbol, SyntaxContext};
+
+ pub trait SpanlessEq {
+ fn eq(&self, other: &Self) -> bool;
+@@ -86,14 +85,6 @@ impl<A: SpanlessEq, B: SpanlessEq> SpanlessEq for (A, B) {
+ }
+ }
+
+-impl<A: SpanlessEq, B: SpanlessEq, C: SpanlessEq> SpanlessEq for (A, B, C) {
+- fn eq(&self, other: &Self) -> bool {
+- SpanlessEq::eq(&self.0, &other.0)
+- && SpanlessEq::eq(&self.1, &other.1)
+- && SpanlessEq::eq(&self.2, &other.2)
+- }
+-}
+-
+ macro_rules! spanless_eq_true {
+ ($name:ident) => {
+ impl SpanlessEq for $name {
+@@ -126,17 +117,19 @@ spanless_eq_partial_eq!(u16);
+ spanless_eq_partial_eq!(u128);
+ spanless_eq_partial_eq!(usize);
+ spanless_eq_partial_eq!(char);
++spanless_eq_partial_eq!(String);
+ spanless_eq_partial_eq!(Symbol);
+-spanless_eq_partial_eq!(Abi);
++spanless_eq_partial_eq!(CommentKind);
+ spanless_eq_partial_eq!(DelimToken);
++spanless_eq_partial_eq!(InlineAsmOptions);
+
+ macro_rules! spanless_eq_struct {
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $(![$ignore:ident])*
+ } => {
+- impl SpanlessEq for $name {
++ impl $(<$param: SpanlessEq>)* SpanlessEq for $name $(<$param>)* {
+ fn eq(&self, other: &Self) -> bool {
+ let $name { $($field,)* $($ignore: _,)* } = self;
+ let $name { $($field: $other,)* $($ignore: _,)* } = other;
+@@ -146,14 +139,14 @@ macro_rules! spanless_eq_struct {
+ };
+
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $next:ident
+ $($rest:ident)*
+ $(!$ignore:ident)*
+ } => {
+ spanless_eq_struct! {
+- $name;
++ $name $(<$param>)*;
+ $([$field $other])*
+ [$next other]
+ $($rest)*
+@@ -162,14 +155,14 @@ macro_rules! spanless_eq_struct {
+ };
+
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $(![$ignore:ident])*
+ !$next:ident
+ $(!$rest:ident)*
+ } => {
+ spanless_eq_struct! {
+- $name;
++ $name $(<$param>)*;
+ $([$field $other])*
+ $(![$ignore])*
+ ![$next]
+@@ -263,119 +256,131 @@ macro_rules! spanless_eq_enum {
+ };
+ }
+
+-spanless_eq_struct!(AngleBracketedArgs; span args constraints);
++spanless_eq_struct!(AngleBracketedArgs; span args);
+ spanless_eq_struct!(AnonConst; id value);
+-spanless_eq_struct!(Arg; attrs ty pat id span);
+-spanless_eq_struct!(Arm; attrs pats guard body span id);
++spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder);
+ spanless_eq_struct!(AssocTyConstraint; id ident kind span);
+-spanless_eq_struct!(Attribute; id style path tokens span !is_sugared_doc);
+-spanless_eq_struct!(BareFnTy; unsafety abi generic_params decl);
++spanless_eq_struct!(AttrItem; path args);
++spanless_eq_struct!(Attribute; kind id style span);
++spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl);
+ spanless_eq_struct!(Block; stmts id rules span);
+-spanless_eq_struct!(Crate; module attrs span);
++spanless_eq_struct!(Crate; module attrs span proc_macros);
+ spanless_eq_struct!(EnumDef; variants);
+-spanless_eq_struct!(Expr; id node span attrs);
+-spanless_eq_struct!(Field; ident expr span is_shorthand attrs id);
+-spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span);
+-spanless_eq_struct!(FnDecl; inputs output c_variadic);
+-spanless_eq_struct!(FnHeader; constness asyncness unsafety abi);
+-spanless_eq_struct!(ForeignItem; ident attrs node id span vis);
++spanless_eq_struct!(Expr; id kind span attrs !tokens);
++spanless_eq_struct!(Field; attrs id span ident expr is_shorthand is_placeholder);
++spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span is_placeholder);
++spanless_eq_struct!(FnDecl; inputs output);
++spanless_eq_struct!(FnHeader; constness asyncness unsafety ext);
++spanless_eq_struct!(FnSig; header decl span);
+ spanless_eq_struct!(ForeignMod; abi items);
+-spanless_eq_struct!(GenericParam; id ident attrs bounds kind);
++spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind);
+ spanless_eq_struct!(Generics; params where_clause span);
+ spanless_eq_struct!(GlobalAsm; asm);
+-spanless_eq_struct!(ImplItem; id ident vis defaultness attrs generics node span !tokens);
+-spanless_eq_struct!(InlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
+-spanless_eq_struct!(InlineAsmOutput; constraint expr is_rw is_indirect);
+-spanless_eq_struct!(Item; ident attrs id node vis span !tokens);
++spanless_eq_struct!(InlineAsm; template operands options line_spans);
++spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens);
+ spanless_eq_struct!(Label; ident);
+ spanless_eq_struct!(Lifetime; id ident);
+-spanless_eq_struct!(Lit; token node span);
++spanless_eq_struct!(Lit; token kind span);
++spanless_eq_struct!(LlvmInlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
++spanless_eq_struct!(LlvmInlineAsmOutput; constraint expr is_rw is_indirect);
+ spanless_eq_struct!(Local; pat ty init id span attrs);
+-spanless_eq_struct!(Mac; path delim tts span prior_type_ascription);
+-spanless_eq_struct!(MacroDef; tokens legacy);
+-spanless_eq_struct!(MethodSig; header decl);
++spanless_eq_struct!(MacCall; path args prior_type_ascription);
++spanless_eq_struct!(MacCallStmt; mac style attrs);
++spanless_eq_struct!(MacroDef; body macro_rules);
+ spanless_eq_struct!(Mod; inner items inline);
+ spanless_eq_struct!(MutTy; ty mutbl);
++spanless_eq_struct!(Param; attrs ty pat id span is_placeholder);
+ spanless_eq_struct!(ParenthesizedArgs; span inputs output);
+-spanless_eq_struct!(Pat; id node span);
++spanless_eq_struct!(Pat; id kind span tokens);
+ spanless_eq_struct!(Path; span segments);
+ spanless_eq_struct!(PathSegment; ident id args);
+ spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span);
+ spanless_eq_struct!(QSelf; ty path_span position);
+-spanless_eq_struct!(Stmt; id node span);
+-spanless_eq_struct!(StructField; span ident vis id ty attrs);
++spanless_eq_struct!(Stmt; id kind span);
++spanless_eq_struct!(StrLit; style symbol suffix span symbol_unescaped);
++spanless_eq_struct!(StructField; attrs id span vis ident ty is_placeholder);
+ spanless_eq_struct!(Token; kind span);
+-spanless_eq_struct!(TraitItem; id ident attrs generics node span !tokens);
+ spanless_eq_struct!(TraitRef; path ref_id);
+-spanless_eq_struct!(Ty; id node span);
++spanless_eq_struct!(Ty; id kind span);
+ spanless_eq_struct!(UseTree; prefix kind span);
+-spanless_eq_struct!(Variant; ident attrs id data disr_expr span);
++spanless_eq_struct!(Variant; attrs id span vis ident data disr_expr is_placeholder);
+ spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds);
+-spanless_eq_struct!(WhereClause; predicates span);
++spanless_eq_struct!(WhereClause; has_where_token predicates span);
+ spanless_eq_struct!(WhereEqPredicate; id span lhs_ty rhs_ty);
+ spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
+-spanless_eq_enum!(AsmDialect; Att Intel);
++spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
++spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
+ spanless_eq_enum!(AssocTyConstraintKind; Equality(ty) Bound(bounds));
++spanless_eq_enum!(Async; Yes(span closure_id return_impl_trait_id) No);
++spanless_eq_enum!(AttrKind; Normal(0) DocComment(0 1));
+ spanless_eq_enum!(AttrStyle; Outer Inner);
+ spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
+ spanless_eq_enum!(BindingMode; ByRef(0) ByValue(0));
+ spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
++spanless_eq_enum!(BorrowKind; Ref Raw);
+ spanless_eq_enum!(CaptureBy; Value Ref);
+-spanless_eq_enum!(Constness; Const NotConst);
++spanless_eq_enum!(Const; Yes(0) No);
+ spanless_eq_enum!(CrateSugar; PubCrate JustCrate);
+-spanless_eq_enum!(Defaultness; Default Final);
++spanless_eq_enum!(Defaultness; Default(0) Final);
++spanless_eq_enum!(Extern; None Implicit Explicit(0));
+ spanless_eq_enum!(FloatTy; F32 F64);
+-spanless_eq_enum!(ForeignItemKind; Fn(0 1) Static(0 1) Ty Macro(0));
+-spanless_eq_enum!(FunctionRetTy; Default(0) Ty(0));
++spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
++spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
+ spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0));
+ spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0));
+ spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0));
+-spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty));
+-spanless_eq_enum!(ImplItemKind; Const(0 1) Method(0 1) TyAlias(0) OpaqueTy(0) Macro(0));
+-spanless_eq_enum!(ImplPolarity; Positive Negative);
++spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span));
++spanless_eq_enum!(ImplPolarity; Positive Negative(0));
++spanless_eq_enum!(InlineAsmRegOrRegClass; Reg(0) RegClass(0));
++spanless_eq_enum!(InlineAsmTemplatePiece; String(0) Placeholder(operand_idx modifier span));
+ spanless_eq_enum!(IntTy; Isize I8 I16 I32 I64 I128);
+-spanless_eq_enum!(IsAsync; Async(closure_id return_impl_trait_id) NotAsync);
+ spanless_eq_enum!(IsAuto; Yes No);
++spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
+ spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
++spanless_eq_enum!(LlvmAsmDialect; Att Intel);
++spanless_eq_enum!(MacArgs; Empty Delimited(0 1 2) Eq(0 1));
+ spanless_eq_enum!(MacDelimiter; Parenthesis Bracket Brace);
+ spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
+ spanless_eq_enum!(Movability; Static Movable);
+-spanless_eq_enum!(Mutability; Mutable Immutable);
++spanless_eq_enum!(Mutability; Mut Not);
+ spanless_eq_enum!(RangeEnd; Included(0) Excluded);
+ spanless_eq_enum!(RangeLimits; HalfOpen Closed);
+-spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Mac(0));
++spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
+ spanless_eq_enum!(StrStyle; Cooked Raw(0));
+ spanless_eq_enum!(TokenTree; Token(0) Delimited(0 1 2));
+-spanless_eq_enum!(TraitBoundModifier; None Maybe);
+-spanless_eq_enum!(TraitItemKind; Const(0 1) Method(0 1) Type(0 1) Macro(0));
++spanless_eq_enum!(TraitBoundModifier; None Maybe MaybeConst MaybeConstMaybe);
+ spanless_eq_enum!(TraitObjectSyntax; Dyn None);
+ spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
+ spanless_eq_enum!(UnOp; Deref Not Neg);
++spanless_eq_enum!(Unsafe; Yes(0) No);
+ spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
+-spanless_eq_enum!(Unsafety; Unsafe Normal);
+ spanless_eq_enum!(UseTreeKind; Simple(0 1 2) Nested(0) Glob);
+ spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0));
+ spanless_eq_enum!(VisibilityKind; Public Crate(0) Restricted(path id) Inherited);
+ spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
+-spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1) Tup(0)
++spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1 2) Tup(0)
+ Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1) If(0 1 2)
+ While(0 1 2) ForLoop(0 1 2 3) Loop(0 1) Match(0 1) Closure(0 1 2 3 4 5)
+- Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1) AssignOp(0 1 2)
+- Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1) Break(0 1)
+- Continue(0) Ret(0) InlineAsm(0) Mac(0) Struct(0 1 2) Repeat(0 1) Paren(0)
+- Try(0) Yield(0) Err);
+-spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1)
+- Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1) OpaqueTy(0 1)
+- Enum(0 1) Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
+- Impl(0 1 2 3 4 5 6) Mac(0) MacroDef(0));
++ Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1 2) AssignOp(0 1 2)
++ Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1)
++ Continue(0) Ret(0) InlineAsm(0) LlvmInlineAsm(0) MacCall(0) Struct(0 1 2)
++ Repeat(0 1) Paren(0) Try(0) Yield(0) Err);
++spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
++ InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(expr)
++ Sym(expr));
++spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1 2)
++ Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1 2 3) Enum(0 1)
++ Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
++ Impl(unsafety polarity defaultness constness generics of_trait self_ty items)
++ MacCall(0) MacroDef(0));
+ spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0) Byte(0) Char(0) Int(0 1)
+- Float(0 1) FloatUnsuffixed(0) Bool(0) Err(0));
++ Float(0 1) Bool(0) Err(0));
+ spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2) TupleStruct(0 1)
+ Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest
+- Paren(0) Mac(0));
++ Paren(0) MacCall(0));
+ spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Rptr(0 1) BareFn(0) Never
+ Tup(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1) Paren(0) Typeof(0) Infer
+- ImplicitSelf Mac(0) Err CVarArgs);
++ ImplicitSelf MacCall(0) Err CVarArgs);
+
+ impl SpanlessEq for Ident {
+ fn eq(&self, other: &Self) -> bool {
+@@ -414,44 +419,20 @@ impl SpanlessEq for TokenKind {
+
+ impl SpanlessEq for TokenStream {
+ fn eq(&self, other: &Self) -> bool {
+- SpanlessEq::eq(&expand_tts(self), &expand_tts(other))
+- }
+-}
+-
+-fn expand_tts(tts: &TokenStream) -> Vec<TokenTree> {
+- let mut tokens = Vec::new();
+- for tt in tts.clone().into_trees() {
+- let c = match tt {
+- TokenTree::Token(Token {
+- kind: TokenKind::DocComment(c),
+- ..
+- }) => c,
+- _ => {
+- tokens.push(tt);
+- continue;
++ let mut this = self.clone().into_trees();
++ let mut other = other.clone().into_trees();
++ loop {
++ let this = match this.next() {
++ None => return other.next().is_none(),
++ Some(val) => val,
++ };
++ let other = match other.next() {
++ None => return false,
++ Some(val) => val,
++ };
++ if !SpanlessEq::eq(&this, &other) {
++ return false;
+ }
+- };
+- let contents = comments::strip_doc_comment_decoration(&c.as_str());
+- let style = comments::doc_comment_style(&c.as_str());
+- tokens.push(TokenTree::token(TokenKind::Pound, DUMMY_SP));
+- if style == AttrStyle::Inner {
+- tokens.push(TokenTree::token(TokenKind::Not, DUMMY_SP));
+ }
+- let lit = token::Lit {
+- kind: token::LitKind::Str,
+- symbol: Symbol::intern(&contents),
+- suffix: None,
+- };
+- let tts = vec![
+- TokenTree::token(TokenKind::Ident(sym::doc, false), DUMMY_SP),
+- TokenTree::token(TokenKind::Eq, DUMMY_SP),
+- TokenTree::token(TokenKind::Literal(lit), DUMMY_SP),
+- ];
+- tokens.push(TokenTree::Delimited(
+- DelimSpan::dummy(),
+- DelimToken::Bracket,
+- tts.into_iter().collect::<TokenStream>().into(),
+- ));
+ }
+- tokens
+ }
+diff --git a/third_party/rust/syn/tests/common/mod.rs b/third_party/rust/syn/tests/common/mod.rs
+index 8b784beed7..a1cc80a16f 100644
+--- third_party/rust/syn/tests/common/mod.rs
++++ third_party/rust/syn/tests/common/mod.rs
+@@ -1,5 +1,6 @@
+ #![allow(dead_code)]
+
++use rayon::ThreadPoolBuilder;
+ use std::env;
+
+ pub mod eq;
+@@ -12,3 +13,15 @@ pub fn abort_after() -> usize {
+ Err(_) => usize::max_value(),
+ }
+ }
++
++/// Configure Rayon threadpool.
++pub fn rayon_init() {
++ let stack_size = match env::var("RUST_MIN_STACK") {
++ Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"),
++ Err(_) => 20 * 1024 * 1024,
++ };
++ ThreadPoolBuilder::new()
++ .stack_size(stack_size)
++ .build_global()
++ .unwrap();
++}
+diff --git a/third_party/rust/syn/tests/common/parse.rs b/third_party/rust/syn/tests/common/parse.rs
+index 41d192f6fb..192828fedd 100644
+--- third_party/rust/syn/tests/common/parse.rs
++++ third_party/rust/syn/tests/common/parse.rs
+@@ -1,20 +1,20 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
++extern crate rustc_ast;
++extern crate rustc_expand;
++extern crate rustc_parse as parse;
++extern crate rustc_session;
++extern crate rustc_span;
+
+-use self::syntax::ast;
+-use self::syntax::parse::{self, ParseSess};
+-use self::syntax::ptr::P;
+-use self::syntax::source_map::FilePathMapping;
+-use self::syntax_pos::FileName;
++use rustc_ast::ast;
++use rustc_ast::ptr::P;
++use rustc_session::parse::ParseSess;
++use rustc_span::source_map::FilePathMapping;
++use rustc_span::FileName;
+
+ use std::panic;
+
+-pub fn libsyntax_expr(input: &str) -> Option<P<ast::Expr>> {
++pub fn librustc_expr(input: &str) -> Option<P<ast::Expr>> {
+ match panic::catch_unwind(|| {
+ let sess = ParseSess::new(FilePathMapping::empty());
+- sess.span_diagnostic.set_continue_after_error(false);
+ let e = parse::new_parser_from_source_str(
+ &sess,
+ FileName::Custom("test_precedence".to_string()),
+@@ -32,7 +32,7 @@ pub fn libsyntax_expr(input: &str) -> Option<P<ast::Expr>> {
+ Ok(Some(e)) => Some(e),
+ Ok(None) => None,
+ Err(_) => {
+- errorf!("libsyntax panicked\n");
++ errorf!("librustc panicked\n");
+ None
+ }
+ }
+diff --git a/third_party/rust/syn/tests/debug/gen.rs b/third_party/rust/syn/tests/debug/gen.rs
+index 8450c09ecf..85a1a39079 100644
+--- third_party/rust/syn/tests/debug/gen.rs
++++ third_party/rust/syn/tests/debug/gen.rs
+@@ -2,7 +2,7 @@
+ // It is not intended for manual editing.
+
+ use super::{Lite, RefCast};
+-use std::fmt::{self, Debug};
++use std::fmt::{self, Debug, Display};
+ impl Debug for Lite<syn::Abi> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let _val = &self.value;
+@@ -1039,9 +1039,9 @@ impl Debug for Lite<syn::Expr> {
+ }
+ syn::Expr::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ syn::Expr::While(_val) => {
+@@ -2116,9 +2116,9 @@ impl Debug for Lite<syn::ForeignItem> {
+ }
+ syn::ForeignItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -2432,9 +2432,9 @@ impl Debug for Lite<syn::ImplItem> {
+ }
+ syn::ImplItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -2940,9 +2940,9 @@ impl Debug for Lite<syn::Item> {
+ }
+ syn::Item::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -3437,9 +3437,9 @@ impl Debug for Lite<syn::Lit> {
+ }
+ syn::Lit::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ }
+@@ -3878,9 +3878,9 @@ impl Debug for Lite<syn::Pat> {
+ }
+ syn::Pat::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ syn::Pat::Wild(_val) => {
+@@ -4674,9 +4674,9 @@ impl Debug for Lite<syn::TraitItem> {
+ }
+ syn::TraitItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -5040,9 +5040,9 @@ impl Debug for Lite<syn::Type> {
+ }
+ syn::Type::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+diff --git a/third_party/rust/syn/tests/debug/mod.rs b/third_party/rust/syn/tests/debug/mod.rs
+index c1180532ec..cefebacef7 100644
+--- third_party/rust/syn/tests/debug/mod.rs
++++ third_party/rust/syn/tests/debug/mod.rs
+@@ -1,10 +1,7 @@
+-extern crate proc_macro2;
+-extern crate ref_cast;
+-
+ mod gen;
+
+-use self::proc_macro2::{Ident, Literal, TokenStream};
+-use self::ref_cast::RefCast;
++use proc_macro2::{Ident, Literal, TokenStream};
++use ref_cast::RefCast;
+ use std::fmt::{self, Debug};
+ use std::ops::Deref;
+ use syn::punctuated::Punctuated;
+@@ -66,7 +63,15 @@ impl Debug for Lite<Literal> {
+
+ impl Debug for Lite<TokenStream> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- write!(formatter, "`{}`", self.value)
++ let string = self.value.to_string();
++ if string.len() <= 80 {
++ write!(formatter, "TokenStream(`{}`)", self.value)
++ } else {
++ formatter
++ .debug_tuple("TokenStream")
++ .field(&format_args!("`{}`", string))
++ .finish()
++ }
+ }
+ }
+
+diff --git a/third_party/rust/syn/tests/features/error.rs b/third_party/rust/syn/tests/features/error.rs
+deleted file mode 100644
+index 10ac88965d..0000000000
+--- third_party/rust/syn/tests/features/error.rs
++++ /dev/null
+@@ -1 +0,0 @@
+-"Hello! You want: cargo test --release --all-features"
+diff --git a/third_party/rust/syn/tests/features/mod.rs b/third_party/rust/syn/tests/features/mod.rs
+deleted file mode 100644
+index 83fbe13e7e..0000000000
+--- third_party/rust/syn/tests/features/mod.rs
++++ /dev/null
+@@ -1,22 +0,0 @@
+-#[allow(unused_macros)]
+-macro_rules! hide_from_rustfmt {
+- ($mod:item) => {
+- $mod
+- };
+-}
+-
+-#[cfg(not(all(
+- feature = "derive",
+- feature = "full",
+- feature = "parsing",
+- feature = "printing",
+- feature = "visit",
+- feature = "visit-mut",
+- feature = "fold",
+- feature = "clone-impls",
+- feature = "extra-traits",
+- feature = "proc-macro",
+-)))]
+-hide_from_rustfmt! {
+- mod error;
+-}
+diff --git a/third_party/rust/syn/tests/macros/mod.rs b/third_party/rust/syn/tests/macros/mod.rs
+index c72fd01058..3994615fc4 100644
+--- third_party/rust/syn/tests/macros/mod.rs
++++ third_party/rust/syn/tests/macros/mod.rs
+@@ -1,5 +1,3 @@
+-extern crate proc_macro2;
+-
+ #[path = "../debug/mod.rs"]
+ pub mod debug;
+
+@@ -42,18 +40,18 @@ macro_rules! snapshot_impl {
+ (($expr:ident) as $t:ty, @$snapshot:literal) => {
+ let $expr = crate::macros::Tokens::parse::<$t>($expr).unwrap();
+ let debug = crate::macros::debug::Lite(&$expr);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ };
+ (($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{
+ let syntax_tree = crate::macros::Tokens::parse::<$t>($($expr)*).unwrap();
+ let debug = crate::macros::debug::Lite(&syntax_tree);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ syntax_tree
+ }};
+ (($($expr:tt)*) , @$snapshot:literal) => {{
+ let syntax_tree = $($expr)*;
+ let debug = crate::macros::debug::Lite(&syntax_tree);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ syntax_tree
+ }};
+ (($($expr:tt)*) $next:tt $($rest:tt)*) => {
+diff --git a/third_party/rust/syn/tests/repo/mod.rs b/third_party/rust/syn/tests/repo/mod.rs
+index c22cb03758..1d3e1f0e74 100644
+--- third_party/rust/syn/tests/repo/mod.rs
++++ third_party/rust/syn/tests/repo/mod.rs
+@@ -1,8 +1,37 @@
+-extern crate walkdir;
++mod progress;
+
+-use std::process::Command;
++use self::progress::Progress;
++use anyhow::Result;
++use flate2::read::GzDecoder;
++use std::fs;
++use std::path::Path;
++use tar::Archive;
++use walkdir::DirEntry;
+
+-use self::walkdir::DirEntry;
++const REVISION: &str = "792c645ca7d11a8d254df307d019c5bf01445c37";
++
++#[rustfmt::skip]
++static EXCLUDE: &[&str] = &[
++ // Compile-fail expr parameter in const generic position: f::<1 + 2>()
++ "test/ui/const-generics/const-expression-parameter.rs",
++
++ // Deprecated anonymous parameter syntax in traits
++ "test/ui/issues/issue-13105.rs",
++ "test/ui/issues/issue-13775.rs",
++ "test/ui/issues/issue-34074.rs",
++ "test/ui/proc-macro/trait-fn-args-2015.rs",
++
++ // Not actually test cases
++ "test/rustdoc-ui/test-compile-fail2.rs",
++ "test/rustdoc-ui/test-compile-fail3.rs",
++ "test/ui/include-single-expr-helper.rs",
++ "test/ui/include-single-expr-helper-1.rs",
++ "test/ui/issues/auxiliary/issue-21146-inc.rs",
++ "test/ui/json-bom-plus-crlf-multifile-aux.rs",
++ "test/ui/lint/expansion-time-include.rs",
++ "test/ui/macros/auxiliary/macro-comma-support.rs",
++ "test/ui/macros/auxiliary/macro-include-items-expr.rs",
++];
+
+ pub fn base_dir_filter(entry: &DirEntry) -> bool {
+ let path = entry.path();
+@@ -12,49 +41,95 @@ pub fn base_dir_filter(entry: &DirEntry) -> bool {
+ if path.extension().map(|e| e != "rs").unwrap_or(true) {
+ return false;
+ }
+- let path_string = path.to_string_lossy();
+- let path_string = if cfg!(windows) {
+- path_string.replace('\\', "/").into()
++
++ let mut path_string = path.to_string_lossy();
++ if cfg!(windows) {
++ path_string = path_string.replace('\\', "/").into();
++ }
++ let path = if let Some(path) = path_string.strip_prefix("tests/rust/src/") {
++ path
++ } else if let Some(path) = path_string.strip_prefix("tests/rust/library/") {
++ path
+ } else {
+- path_string
++ panic!("unexpected path in Rust dist: {}", path_string);
+ };
++
+ // TODO assert that parsing fails on the parse-fail cases
+- if path_string.starts_with("tests/rust/src/test/parse-fail")
+- || path_string.starts_with("tests/rust/src/test/compile-fail")
+- || path_string.starts_with("tests/rust/src/test/rustfix")
++ if path.starts_with("test/parse-fail")
++ || path.starts_with("test/compile-fail")
++ || path.starts_with("test/rustfix")
+ {
+ return false;
+ }
+
+- if path_string.starts_with("tests/rust/src/test/ui") {
+- let stderr_path = path.with_extension("stderr");
++ if path.starts_with("test/ui") {
++ let stderr_path = entry.path().with_extension("stderr");
+ if stderr_path.exists() {
+ // Expected to fail in some way
+ return false;
+ }
+ }
+
+- match path_string.as_ref() {
+- // Deprecated placement syntax
+- "tests/rust/src/test/ui/obsolete-in-place/bad.rs" |
+- // Deprecated anonymous parameter syntax in traits
+- "tests/rust/src/test/ui/error-codes/e0119/auxiliary/issue-23563-a.rs" |
+- "tests/rust/src/test/ui/issues/issue-13105.rs" |
+- "tests/rust/src/test/ui/issues/issue-13775.rs" |
+- "tests/rust/src/test/ui/issues/issue-34074.rs" |
+- // Deprecated await macro syntax
+- "tests/rust/src/test/ui/async-await/await-macro.rs" |
+- // 2015-style dyn that libsyntax rejects
+- "tests/rust/src/test/ui/dyn-keyword/dyn-2015-no-warnings-without-lints.rs" |
+- // not actually test cases
+- "tests/rust/src/test/ui/macros/auxiliary/macro-comma-support.rs" |
+- "tests/rust/src/test/ui/macros/auxiliary/macro-include-items-expr.rs" |
+- "tests/rust/src/test/ui/issues/auxiliary/issue-21146-inc.rs" => false,
+- _ => true,
++ !EXCLUDE.contains(&path)
++}
++
++#[allow(dead_code)]
++pub fn edition(path: &Path) -> &'static str {
++ if path.ends_with("dyn-2015-no-warnings-without-lints.rs") {
++ "2015"
++ } else {
++ "2018"
+ }
+ }
+
+ pub fn clone_rust() {
+- let result = Command::new("tests/clone.sh").status().unwrap();
+- assert!(result.success());
++ let needs_clone = match fs::read_to_string("tests/rust/COMMIT") {
++ Err(_) => true,
++ Ok(contents) => contents.trim() != REVISION,
++ };
++ if needs_clone {
++ download_and_unpack().unwrap();
++ }
++ let mut missing = String::new();
++ let test_src = Path::new("tests/rust/src");
++ for exclude in EXCLUDE {
++ if !test_src.join(exclude).exists() {
++ missing += "\ntests/rust/src/";
++ missing += exclude;
++ }
++ }
++ if !missing.is_empty() {
++ panic!("excluded test file does not exist:{}\n", missing);
++ }
++}
++
++fn download_and_unpack() -> Result<()> {
++ let url = format!(
++ "https://github.com/rust-lang/rust/archive/{}.tar.gz",
++ REVISION
++ );
++ let response = reqwest::blocking::get(&url)?.error_for_status()?;
++ let progress = Progress::new(response);
++ let decoder = GzDecoder::new(progress);
++ let mut archive = Archive::new(decoder);
++ let prefix = format!("rust-{}", REVISION);
++
++ let tests_rust = Path::new("tests/rust");
++ if tests_rust.exists() {
++ fs::remove_dir_all(tests_rust)?;
++ }
++
++ for entry in archive.entries()? {
++ let mut entry = entry?;
++ let path = entry.path()?;
++ if path == Path::new("pax_global_header") {
++ continue;
++ }
++ let relative = path.strip_prefix(&prefix)?;
++ let out = tests_rust.join(relative);
++ entry.unpack(&out)?;
++ }
++
++ fs::write("tests/rust/COMMIT", REVISION)?;
++ Ok(())
+ }
+diff --git a/third_party/rust/syn/tests/repo/progress.rs b/third_party/rust/syn/tests/repo/progress.rs
+new file mode 100644
+index 0000000000..28c8a44b12
+--- /dev/null
++++ third_party/rust/syn/tests/repo/progress.rs
+@@ -0,0 +1,37 @@
++use std::io::{Read, Result};
++use std::time::{Duration, Instant};
++
++pub struct Progress<R> {
++ bytes: usize,
++ tick: Instant,
++ stream: R,
++}
++
++impl<R> Progress<R> {
++ pub fn new(stream: R) -> Self {
++ Progress {
++ bytes: 0,
++ tick: Instant::now() + Duration::from_millis(2000),
++ stream,
++ }
++ }
++}
++
++impl<R: Read> Read for Progress<R> {
++ fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
++ let num = self.stream.read(buf)?;
++ self.bytes += num;
++ let now = Instant::now();
++ if now > self.tick {
++ self.tick = now + Duration::from_millis(500);
++ errorf!("downloading... {} bytes\n", self.bytes);
++ }
++ Ok(num)
++ }
++}
++
++impl<R> Drop for Progress<R> {
++ fn drop(&mut self) {
++ errorf!("done ({} bytes)\n", self.bytes);
++ }
++}
+diff --git a/third_party/rust/syn/tests/test_asyncness.rs b/third_party/rust/syn/tests/test_asyncness.rs
+index f868fbcc20..0efef5976f 100644
+--- third_party/rust/syn/tests/test_asyncness.rs
++++ third_party/rust/syn/tests/test_asyncness.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -12,16 +8,16 @@ fn test_async_fn() {
+ let input = "async fn process() {}";
+
+ snapshot!(input as Item, @r###"
+- ⋮Item::Fn {
+- ⋮ vis: Inherited,
+- ⋮ sig: Signature {
+- ⋮ asyncness: Some,
+- ⋮ ident: "process",
+- ⋮ generics: Generics,
+- ⋮ output: Default,
+- ⋮ },
+- ⋮ block: Block,
+- ⋮}
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ asyncness: Some,
++ ident: "process",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ }
+ "###);
+ }
+
+@@ -30,12 +26,12 @@ fn test_async_closure() {
+ let input = "async || {}";
+
+ snapshot!(input as Expr, @r###"
+- ⋮Expr::Closure {
+- ⋮ asyncness: Some,
+- ⋮ output: Default,
+- ⋮ body: Expr::Block {
+- ⋮ block: Block,
+- ⋮ },
+- ⋮}
++ Expr::Closure {
++ asyncness: Some,
++ output: Default,
++ body: Expr::Block {
++ block: Block,
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_attribute.rs b/third_party/rust/syn/tests/test_attribute.rs
+index aff6294fc3..c26bd090ec 100644
+--- third_party/rust/syn/tests/test_attribute.rs
++++ third_party/rust/syn/tests/test_attribute.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -13,14 +9,14 @@ fn test_meta_item_word() {
+ let meta = test("#[foo]");
+
+ snapshot!(meta, @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+@@ -29,17 +25,17 @@ fn test_meta_item_name_value() {
+ let meta = test("#[foo = 5]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+ }
+
+@@ -48,37 +44,37 @@ fn test_meta_item_bool_value() {
+ let meta = test("#[foo = true]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: true,
+- ⋮ },
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: true,
++ },
++ }
+ "###);
+
+ let meta = test("#[foo = false]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: false,
+- ⋮ },
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: false,
++ },
++ }
+ "###);
+ }
+
+@@ -87,19 +83,19 @@ fn test_meta_item_list_lit() {
+ let meta = test("#[foo(5)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+ }
+
+@@ -108,26 +104,26 @@ fn test_meta_item_list_word() {
+ let meta = test("#[foo(bar)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+@@ -136,29 +132,29 @@ fn test_meta_item_list_name_value() {
+ let meta = test("#[foo(bar = 5)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ ],
++ }
+ "###);
+ }
+
+@@ -167,31 +163,31 @@ fn test_meta_item_list_bool_value() {
+ let meta = test("#[foo(bar = true)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: true,
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: true,
++ },
++ }),
++ ],
++ }
+ "###);
+ }
+
+@@ -200,68 +196,68 @@ fn test_meta_item_multiple() {
+ let meta = test("#[foo(word, name = 5, list(name2 = 6), word2)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+@@ -270,21 +266,63 @@ fn test_bool_lit() {
+ let meta = test("#[foo(true)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(Lit::Bool {
+- ⋮ value: true,
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(Lit::Bool {
++ value: true,
++ }),
++ ],
++ }
++ "###);
++}
++
++#[test]
++fn test_negative_lit() {
++ let meta = test("#[form(min = -1, max = 200)]");
++
++ snapshot!(meta, @r###"
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "form",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "min",
++ arguments: None,
++ },
++ ],
++ },
++ lit: -1,
++ }),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "max",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 200,
++ }),
++ ],
++ }
+ "###);
+ }
+
+diff --git a/third_party/rust/syn/tests/test_derive_input.rs b/third_party/rust/syn/tests/test_derive_input.rs
+index de68240166..bf1ebdb67d 100644
+--- third_party/rust/syn/tests/test_derive_input.rs
++++ third_party/rust/syn/tests/test_derive_input.rs
+@@ -1,8 +1,3 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -16,15 +11,15 @@ fn test_unit() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "Unit",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "Unit",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -39,105 +34,105 @@ fn test_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "derive",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `( Debug , Clone )`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: "Item",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Named {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("ident"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Ident",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("attrs"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Vec",
+- ⋮ arguments: PathArguments::AngleBracketed {
+- ⋮ args: [
+- ⋮ Type(Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Attribute",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "derive",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`(Debug , Clone)`),
++ },
++ ],
++ vis: Visibility::Public,
++ ident: "Item",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Visibility::Public,
++ ident: Some("ident"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Ident",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("attrs"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Vec",
++ arguments: PathArguments::AngleBracketed {
++ args: [
++ Type(Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Attribute",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ },
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+
+ snapshot!(input.attrs[0].parse_meta().unwrap(), @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "derive",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Clone",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "derive",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "Clone",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+@@ -151,46 +146,46 @@ fn test_union() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "MaybeUninit",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "T",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ },
+- ⋮ data: Data::Union {
+- ⋮ fields: FieldsNamed {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("uninit"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Tuple,
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("value"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "MaybeUninit",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "T",
++ }),
++ ],
++ gt_token: Some,
++ },
++ data: Data::Union {
++ fields: FieldsNamed {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("uninit"),
++ colon_token: Some,
++ ty: Type::Tuple,
++ },
++ Field {
++ vis: Inherited,
++ ident: Some("value"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+ }
+
+@@ -212,118 +207,118 @@ fn test_enum() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "doc",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `= r" See the std::result module documentation for details."`,
+- ⋮ },
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "must_use",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: "Result",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "T",
+- ⋮ }),
+- ⋮ Type(TypeParam {
+- ⋮ ident: "E",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ },
+- ⋮ data: Data::Enum {
+- ⋮ variants: [
+- ⋮ Variant {
+- ⋮ ident: "Ok",
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "Err",
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "E",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "Surprise",
+- ⋮ fields: Unit,
+- ⋮ discriminant: Some(Expr::Lit {
+- ⋮ lit: 0isize,
+- ⋮ }),
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "ProcMacroHack",
+- ⋮ fields: Unit,
+- ⋮ discriminant: Some(Expr::Field {
+- ⋮ base: Expr::Tuple {
+- ⋮ elems: [
+- ⋮ Expr::Lit {
+- ⋮ lit: 0,
+- ⋮ },
+- ⋮ Expr::Lit {
+- ⋮ lit: "data",
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ member: Unnamed(Index {
+- ⋮ index: 0,
+- ⋮ }),
+- ⋮ }),
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "doc",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`= r" See the std::result module documentation for details."`),
++ },
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "must_use",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Visibility::Public,
++ ident: "Result",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "T",
++ }),
++ Type(TypeParam {
++ ident: "E",
++ }),
++ ],
++ gt_token: Some,
++ },
++ data: Data::Enum {
++ variants: [
++ Variant {
++ ident: "Ok",
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ Variant {
++ ident: "Err",
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "E",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ Variant {
++ ident: "Surprise",
++ fields: Unit,
++ discriminant: Some(Expr::Lit {
++ lit: 0isize,
++ }),
++ },
++ Variant {
++ ident: "ProcMacroHack",
++ fields: Unit,
++ discriminant: Some(Expr::Field {
++ base: Expr::Tuple {
++ elems: [
++ Expr::Lit {
++ lit: 0,
++ },
++ Expr::Lit {
++ lit: "data",
++ },
++ ],
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }),
++ },
++ ],
++ },
++ }
+ "###);
+
+ let meta_items: Vec<_> = input
+@@ -333,27 +328,27 @@ fn test_enum() {
+ .collect();
+
+ snapshot!(meta_items, @r###"
+- ⋮[
+- ⋮ Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "doc",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: " See the std::result module documentation for details.",
+- ⋮ },
+- ⋮ Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "must_use",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ }),
+- ⋮]
++ [
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "doc",
++ arguments: None,
++ },
++ ],
++ },
++ lit: " See the std::result module documentation for details.",
++ },
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "must_use",
++ arguments: None,
++ },
++ ],
++ }),
++ ]
+ "###);
+ }
+
+@@ -366,34 +361,34 @@ fn test_attr_with_path() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ leading_colon: Some,
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "attr_args",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "identity",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `fn main ( ) { assert_eq ! ( foo ( ) , "Hello, world!" ) ; }`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "Dummy",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ leading_colon: Some,
++ segments: [
++ PathSegment {
++ ident: "attr_args",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "identity",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`fn main () { assert_eq ! (foo () , "Hello, world!") ; }`),
++ },
++ ],
++ vis: Inherited,
++ ident: "Dummy",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ assert!(input.attrs[0].parse_meta().is_err());
+@@ -407,29 +402,29 @@ fn test_attr_with_non_mod_style_path() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "inert",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `< T >`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "inert",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`< T >`),
++ },
++ ],
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ assert!(input.attrs[0].parse_meta().is_err());
+@@ -443,48 +438,48 @@ fn test_attr_with_mod_style_path_with_self() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "self",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "self",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ snapshot!(input.attrs[0].parse_meta().unwrap(), @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "self",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "self",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+@@ -496,55 +491,55 @@ fn test_pub_restricted() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "m",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "Z",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "m",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "n",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "u8",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "Z",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "n",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "u8",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -555,15 +550,15 @@ fn test_vis_crate() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Crate,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Crate,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -574,24 +569,24 @@ fn test_pub_restricted_crate() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "crate",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "crate",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -602,24 +597,24 @@ fn test_pub_restricted_super() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "super",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "super",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -630,25 +625,25 @@ fn test_pub_restricted_in_super() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "super",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "super",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -659,15 +654,15 @@ fn test_fields_on_unit_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let data = match input.data {
+@@ -688,47 +683,47 @@ fn test_fields_on_named_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Named {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("foo"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("bar"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("foo"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("bar"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+
+ let data = match input.data {
+@@ -737,38 +732,38 @@ fn test_fields_on_named_struct() {
+ };
+
+ snapshot!(data.fields.into_iter().collect::<Vec<_>>(), @r###"
+- ⋮[
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("foo"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("bar"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮]
++ [
++ Field {
++ vis: Inherited,
++ ident: Some("foo"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("bar"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ]
+ "###);
+ }
+
+@@ -779,44 +774,44 @@ fn test_fields_on_tuple_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let data = match input.data {
+@@ -825,34 +820,34 @@ fn test_fields_on_tuple_struct() {
+ };
+
+ snapshot!(data.fields.iter().collect::<Vec<_>>(), @r###"
+- ⋮[
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮]
++ [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ]
+ "###);
+ }
+
+@@ -864,34 +859,34 @@ fn test_ambiguous_crate() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "crate",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "X",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "crate",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "X",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_expr.rs b/third_party/rust/syn/tests/test_expr.rs
+index c8a11cec2c..b2b65a254f 100644
+--- third_party/rust/syn/tests/test_expr.rs
++++ third_party/rust/syn/tests/test_expr.rs
+@@ -1,40 +1,302 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+ #[macro_use]
+ mod macros;
+
+-use std::str::FromStr;
+-
+-use proc_macro2::TokenStream;
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
+ use syn::{Expr, ExprRange};
+
+ #[test]
+ fn test_expr_parse() {
+- let code = "..100u32";
+- let tt = TokenStream::from_str(code).unwrap();
+- let expr: Expr = syn::parse2(tt.clone()).unwrap();
+- let expr_range: ExprRange = syn::parse2(tt).unwrap();
+- assert_eq!(expr, Expr::Range(expr_range));
++ let tokens = quote!(..100u32);
++ snapshot!(tokens as Expr, @r###"
++ Expr::Range {
++ limits: HalfOpen,
++ to: Some(Expr::Lit {
++ lit: 100u32,
++ }),
++ }
++ "###);
++
++ let tokens = quote!(..100u32);
++ snapshot!(tokens as ExprRange, @r###"
++ ExprRange {
++ limits: HalfOpen,
++ to: Some(Expr::Lit {
++ lit: 100u32,
++ }),
++ }
++ "###);
+ }
+
+ #[test]
+ fn test_await() {
+ // Must not parse as Expr::Field.
+- let expr = syn::parse_str::<Expr>("fut.await").unwrap();
+-
+- snapshot!(expr, @r###"
+- ⋮Expr::Await {
+- ⋮ base: Expr::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "fut",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ let tokens = quote!(fut.await);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Await {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "fut",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ }
++ "###);
++}
++
++#[rustfmt::skip]
++#[test]
++fn test_tuple_multi_index() {
++ for &input in &[
++ "tuple.0.0",
++ "tuple .0.0",
++ "tuple. 0.0",
++ "tuple.0 .0",
++ "tuple.0. 0",
++ "tuple . 0 . 0",
++ ] {
++ snapshot!(input as Expr, @r###"
++ Expr::Field {
++ base: Expr::Field {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "tuple",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }
++ "###);
++ }
++
++ for tokens in vec![
++ quote!(tuple.0.0),
++ quote!(tuple .0.0),
++ quote!(tuple. 0.0),
++ quote!(tuple.0 .0),
++ quote!(tuple.0. 0),
++ quote!(tuple . 0 . 0),
++ ] {
++ snapshot!(tokens as Expr, @r###"
++ Expr::Field {
++ base: Expr::Field {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "tuple",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }
++ "###);
++ }
++}
++
++#[test]
++fn test_macro_variable_func() {
++ // mimics the token stream corresponding to `$fn()`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { f })),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Call {
++ func: Expr::Group {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "f",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ }
++ "###);
++
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('#', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Bracket, quote! { outside })),
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[inside] f })),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Call {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "outside",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ func: Expr::Group {
++ expr: Expr::Path {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "inside",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "f",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_macro() {
++ // mimics the token stream corresponding to `$macro!()`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { m })),
++ TokenTree::Punct(Punct::new('!', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Macro {
++ mac: Macro {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ ],
++ },
++ delimiter: Paren,
++ tokens: TokenStream(``),
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_struct() {
++ // mimics the token stream corresponding to `$struct {}`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { S })),
++ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Struct {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "S",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_match_arm() {
++ // mimics the token stream corresponding to `match v { _ => $expr }`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Ident(Ident::new("match", Span::call_site())),
++ TokenTree::Ident(Ident::new("v", Span::call_site())),
++ TokenTree::Group(Group::new(
++ Delimiter::Brace,
++ TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('_', Spacing::Alone)),
++ TokenTree::Punct(Punct::new('=', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[a] () })),
++ ]),
++ )),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Match {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "v",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ arms: [
++ Arm {
++ pat: Pat::Wild,
++ body: Expr::Group {
++ expr: Expr::Tuple {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "a",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ },
++ },
++ },
++ ],
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_generics.rs b/third_party/rust/syn/tests/test_generics.rs
+index 55c79e066b..b29434a147 100644
+--- third_party/rust/syn/tests/test_generics.rs
++++ third_party/rust/syn/tests/test_generics.rs
+@@ -1,8 +1,3 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -16,90 +11,90 @@ fn test_split_for_impl() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Lifetime(LifetimeDef {
+- ⋮ lifetime: Lifetime {
+- ⋮ ident: "a",
+- ⋮ },
+- ⋮ }),
+- ⋮ Lifetime(LifetimeDef {
+- ⋮ lifetime: Lifetime {
+- ⋮ ident: "b",
+- ⋮ },
+- ⋮ colon_token: Some,
+- ⋮ bounds: [
+- ⋮ Lifetime {
+- ⋮ ident: "a",
+- ⋮ },
+- ⋮ ],
+- ⋮ }),
+- ⋮ Type(TypeParam {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "may_dangle",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ ident: "T",
+- ⋮ colon_token: Some,
+- ⋮ bounds: [
+- ⋮ Lifetime(Lifetime {
+- ⋮ ident: "a",
+- ⋮ }),
+- ⋮ ],
+- ⋮ eq_token: Some,
+- ⋮ default: Some(Type::Tuple),
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ where_clause: Some(WhereClause {
+- ⋮ predicates: [
+- ⋮ Type(PredicateType {
+- ⋮ bounded_ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ bounds: [
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ },
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Lifetime(LifetimeDef {
++ lifetime: Lifetime {
++ ident: "a",
++ },
++ }),
++ Lifetime(LifetimeDef {
++ lifetime: Lifetime {
++ ident: "b",
++ },
++ colon_token: Some,
++ bounds: [
++ Lifetime {
++ ident: "a",
++ },
++ ],
++ }),
++ Type(TypeParam {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "may_dangle",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ ident: "T",
++ colon_token: Some,
++ bounds: [
++ Lifetime(Lifetime {
++ ident: "a",
++ }),
++ ],
++ eq_token: Some,
++ default: Some(Type::Tuple),
++ }),
++ ],
++ gt_token: Some,
++ where_clause: Some(WhereClause {
++ predicates: [
++ Type(PredicateType {
++ bounded_ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ bounds: [
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ }),
++ ],
++ }),
++ },
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let generics = input.generics;
+@@ -131,46 +126,46 @@ fn test_split_for_impl() {
+ fn test_ty_param_bound() {
+ let tokens = quote!('a);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Lifetime(Lifetime {
+- ⋮ ident: "a",
+- ⋮})
++ Lifetime(Lifetime {
++ ident: "a",
++ })
+ "###);
+
+ let tokens = quote!('_);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Lifetime(Lifetime {
+- ⋮ ident: "_",
+- ⋮})
++ Lifetime(Lifetime {
++ ident: "_",
++ })
+ "###);
+
+ let tokens = quote!(Debug);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮})
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ },
++ })
+ "###);
+
+ let tokens = quote!(?Sized);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Trait(TraitBound {
+- ⋮ modifier: Maybe,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Sized",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮})
++ Trait(TraitBound {
++ modifier: Maybe,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Sized",
++ arguments: None,
++ },
++ ],
++ },
++ })
+ "###);
+ }
+
+@@ -187,76 +182,76 @@ fn test_fn_precedence_in_where_clause() {
+ };
+
+ snapshot!(input as ItemFn, @r###"
+- ⋮ItemFn {
+- ⋮ vis: Inherited,
+- ⋮ sig: Signature {
+- ⋮ ident: "f",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "G",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ where_clause: Some(WhereClause {
+- ⋮ predicates: [
+- ⋮ Type(PredicateType {
+- ⋮ bounded_ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "G",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ bounds: [
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "FnOnce",
+- ⋮ arguments: PathArguments::Parenthesized {
+- ⋮ output: Type(
+- ⋮ Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ),
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Send",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ },
+- ⋮ output: Default,
+- ⋮ },
+- ⋮ block: Block,
+- ⋮}
++ ItemFn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "f",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "G",
++ }),
++ ],
++ gt_token: Some,
++ where_clause: Some(WhereClause {
++ predicates: [
++ Type(PredicateType {
++ bounded_ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "G",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ bounds: [
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "FnOnce",
++ arguments: PathArguments::Parenthesized {
++ output: Type(
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ),
++ },
++ },
++ ],
++ },
++ }),
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Send",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ }),
++ ],
++ }),
++ },
++ output: Default,
++ },
++ block: Block,
++ }
+ "###);
+
+ let where_clause = input.sig.generics.where_clause.as_ref().unwrap();
+@@ -270,7 +265,7 @@ fn test_fn_precedence_in_where_clause() {
+ assert_eq!(predicate.bounds.len(), 2, "{:#?}", predicate.bounds);
+
+ let first_bound = &predicate.bounds[0];
+- assert_eq!(quote!(#first_bound).to_string(), "FnOnce ( ) -> i32");
++ assert_eq!(quote!(#first_bound).to_string(), "FnOnce () -> i32");
+
+ let second_bound = &predicate.bounds[1];
+ assert_eq!(quote!(#second_bound).to_string(), "Send");
+diff --git a/third_party/rust/syn/tests/test_grouping.rs b/third_party/rust/syn/tests/test_grouping.rs
+index 1558a47b4b..a0fe716390 100644
+--- third_party/rust/syn/tests/test_grouping.rs
++++ third_party/rust/syn/tests/test_grouping.rs
+@@ -1,8 +1,3 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -28,31 +23,31 @@ fn test_grouping() {
+ TokenTree::Literal(Literal::i32_suffixed(4)),
+ ]);
+
+- assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
++ assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
+
+ snapshot!(tokens as Expr, @r###"
+- ⋮Expr::Binary {
+- ⋮ left: Expr::Lit {
+- ⋮ lit: 1i32,
+- ⋮ },
+- ⋮ op: Add,
+- ⋮ right: Expr::Binary {
+- ⋮ left: Expr::Group {
+- ⋮ expr: Expr::Binary {
+- ⋮ left: Expr::Lit {
+- ⋮ lit: 2i32,
+- ⋮ },
+- ⋮ op: Add,
+- ⋮ right: Expr::Lit {
+- ⋮ lit: 3i32,
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ op: Mul,
+- ⋮ right: Expr::Lit {
+- ⋮ lit: 4i32,
+- ⋮ },
+- ⋮ },
+- ⋮}
++ Expr::Binary {
++ left: Expr::Lit {
++ lit: 1i32,
++ },
++ op: Add,
++ right: Expr::Binary {
++ left: Expr::Group {
++ expr: Expr::Binary {
++ left: Expr::Lit {
++ lit: 2i32,
++ },
++ op: Add,
++ right: Expr::Lit {
++ lit: 3i32,
++ },
++ },
++ },
++ op: Mul,
++ right: Expr::Lit {
++ lit: 4i32,
++ },
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_ident.rs b/third_party/rust/syn/tests/test_ident.rs
+index bec00a70c9..ee01bfcc9f 100644
+--- third_party/rust/syn/tests/test_ident.rs
++++ third_party/rust/syn/tests/test_ident.rs
+@@ -1,8 +1,3 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+-mod features;
+-
+ use proc_macro2::{Ident, Span, TokenStream};
+ use std::str::FromStr;
+ use syn::Result;
+diff --git a/third_party/rust/syn/tests/test_item.rs b/third_party/rust/syn/tests/test_item.rs
+new file mode 100644
+index 0000000000..74ac4baec6
+--- /dev/null
++++ third_party/rust/syn/tests/test_item.rs
+@@ -0,0 +1,45 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::Item;
++
++#[test]
++fn test_macro_variable_attr() {
++ // mimics the token stream corresponding to `$attr fn f() {}`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[test] })),
++ TokenTree::Ident(Ident::new("fn", Span::call_site())),
++ TokenTree::Ident(Ident::new("f", Span::call_site())),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Item, @r###"
++ Item::Fn {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "test",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Inherited,
++ sig: Signature {
++ ident: "f",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_iterators.rs b/third_party/rust/syn/tests/test_iterators.rs
+index 1cf7157e6f..2c8359c157 100644
+--- third_party/rust/syn/tests/test_iterators.rs
++++ third_party/rust/syn/tests/test_iterators.rs
+@@ -1,10 +1,5 @@
+ use syn::punctuated::{Pair, Punctuated};
+-
+-extern crate quote;
+-#[macro_use]
+-extern crate syn;
+-
+-mod features;
++use syn::Token;
+
+ #[macro_use]
+ mod macros;
+diff --git a/third_party/rust/syn/tests/test_lit.rs b/third_party/rust/syn/tests/test_lit.rs
+index 1e8f49d19b..e995f2287f 100644
+--- third_party/rust/syn/tests/test_lit.rs
++++ third_party/rust/syn/tests/test_lit.rs
+@@ -1,13 +1,11 @@
+-extern crate proc_macro2;
+-extern crate quote;
+-extern crate syn;
++#[macro_use]
++mod macros;
+
+-mod features;
+-
+-use proc_macro2::{TokenStream, TokenTree};
++use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
+ use quote::ToTokens;
++use std::iter::FromIterator;
+ use std::str::FromStr;
+-use syn::Lit;
++use syn::{Lit, LitFloat, LitInt};
+
+ fn lit(s: &str) -> Lit {
+ match TokenStream::from_str(s)
+@@ -50,6 +48,9 @@ fn strings() {
+ "contains\nnewlinesescaped newlines",
+ );
+ test_string("r\"raw\nstring\\\nhere\"", "raw\nstring\\\nhere");
++ test_string("\"...\"q", "...");
++ test_string("r\"...\"q", "...");
++ test_string("r##\"...\"##q", "...");
+ }
+
+ #[test]
+@@ -79,6 +80,9 @@ fn byte_strings() {
+ b"contains\nnewlinesescaped newlines",
+ );
+ test_byte_string("br\"raw\nstring\\\nhere\"", b"raw\nstring\\\nhere");
++ test_byte_string("b\"...\"q", b"...");
++ test_byte_string("br\"...\"q", b"...");
++ test_byte_string("br##\"...\"##q", b"...");
+ }
+
+ #[test]
+@@ -100,6 +104,7 @@ fn bytes() {
+ test_byte("b'\\t'", b'\t');
+ test_byte("b'\\''", b'\'');
+ test_byte("b'\"'", b'"');
++ test_byte("b'a'q", b'a');
+ }
+
+ #[test]
+@@ -125,6 +130,7 @@ fn chars() {
+ test_char("'\\''", '\'');
+ test_char("'\"'", '"');
+ test_char("'\\u{1F415}'", '\u{1F415}');
++ test_char("'a'q", 'a');
+ }
+
+ #[test]
+@@ -185,4 +191,59 @@ fn floats() {
+ test_float("5.5e12", 5.5e12, "");
+ test_float("1.0__3e-12", 1.03e-12, "");
+ test_float("1.03e+12", 1.03e12, "");
++ test_float("9e99e99", 9e99, "e99");
++}
++
++#[test]
++fn negative() {
++ let span = Span::call_site();
++ assert_eq!("-1", LitInt::new("-1", span).to_string());
++ assert_eq!("-1i8", LitInt::new("-1i8", span).to_string());
++ assert_eq!("-1i16", LitInt::new("-1i16", span).to_string());
++ assert_eq!("-1i32", LitInt::new("-1i32", span).to_string());
++ assert_eq!("-1i64", LitInt::new("-1i64", span).to_string());
++ assert_eq!("-1.5", LitFloat::new("-1.5", span).to_string());
++ assert_eq!("-1.5f32", LitFloat::new("-1.5f32", span).to_string());
++ assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string());
++}
++
++#[test]
++fn suffix() {
++ fn get_suffix(token: &str) -> String {
++ let lit = syn::parse_str::<Lit>(token).unwrap();
++ match lit {
++ Lit::Str(lit) => lit.suffix().to_owned(),
++ Lit::ByteStr(lit) => lit.suffix().to_owned(),
++ Lit::Byte(lit) => lit.suffix().to_owned(),
++ Lit::Char(lit) => lit.suffix().to_owned(),
++ Lit::Int(lit) => lit.suffix().to_owned(),
++ Lit::Float(lit) => lit.suffix().to_owned(),
++ _ => unimplemented!(),
++ }
++ }
++
++ assert_eq!(get_suffix("\"\"s"), "s");
++ assert_eq!(get_suffix("r\"\"r"), "r");
++ assert_eq!(get_suffix("b\"\"b"), "b");
++ assert_eq!(get_suffix("br\"\"br"), "br");
++ assert_eq!(get_suffix("r#\"\"#r"), "r");
++ assert_eq!(get_suffix("'c'c"), "c");
++ assert_eq!(get_suffix("b'b'b"), "b");
++ assert_eq!(get_suffix("1i32"), "i32");
++ assert_eq!(get_suffix("1_i32"), "i32");
++ assert_eq!(get_suffix("1.0f32"), "f32");
++ assert_eq!(get_suffix("1.0_f32"), "f32");
++}
++
++#[test]
++fn test_deep_group_empty() {
++ let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Literal(Literal::string("hi"))]),
++ ))]),
++ ))]);
++
++ snapshot!(tokens as Lit, @r#""hi""# );
+ }
+diff --git a/third_party/rust/syn/tests/test_meta.rs b/third_party/rust/syn/tests/test_meta.rs
+index 547472d6f4..d37dda948a 100644
+--- third_party/rust/syn/tests/test_meta.rs
++++ third_party/rust/syn/tests/test_meta.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -12,14 +8,14 @@ fn test_parse_meta_item_word() {
+ let input = "hello";
+
+ snapshot!(input as Meta, @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "hello",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "hello",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+@@ -29,31 +25,31 @@ fn test_parse_meta_name_value() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -65,31 +61,31 @@ fn test_parse_meta_name_value_with_keyword() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "static",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "static",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "static",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "static",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -101,31 +97,31 @@ fn test_parse_meta_name_value_with_bool() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "true",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "true",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "true",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "true",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -137,35 +133,35 @@ fn test_parse_meta_item_list_lit() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaList, @r###"
+- ⋮MetaList {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ MetaList {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -177,133 +173,133 @@ fn test_parse_meta_item_multiple() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaList, @r###"
+- ⋮MetaList {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ MetaList {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -316,28 +312,28 @@ fn test_parse_nested_meta() {
+
+ let input = "list(name2 = 6)";
+ snapshot!(input as NestedMeta, @r###"
+- ⋮Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮})
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ })
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_parse_buffer.rs b/third_party/rust/syn/tests/test_parse_buffer.rs
+index f09495187f..57a3c7c38c 100644
+--- third_party/rust/syn/tests/test_parse_buffer.rs
++++ third_party/rust/syn/tests/test_parse_buffer.rs
+@@ -1,7 +1,7 @@
+-#[macro_use]
+-extern crate syn;
+-
++use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenStream, TokenTree};
++use std::iter::FromIterator;
+ use syn::parse::{discouraged::Speculative, Parse, ParseStream, Parser, Result};
++use syn::{parenthesized, Token};
+
+ #[test]
+ #[should_panic(expected = "Fork was not derived from the advancing parse stream")]
+@@ -53,3 +53,38 @@ fn smuggled_speculative_cursor_into_brackets() {
+
+ syn::parse_str::<BreakRules>("()").unwrap();
+ }
++
++#[test]
++fn trailing_empty_none_group() {
++ fn parse(input: ParseStream) -> Result<()> {
++ input.parse::<Token![+]>()?;
++
++ let content;
++ parenthesized!(content in input);
++ content.parse::<Token![+]>()?;
++
++ Ok(())
++ }
++
++ // `+ ( + <Ø Ø> ) <Ø <Ø Ø> Ø>`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
++ TokenTree::Group(Group::new(
++ Delimiter::Parenthesis,
++ TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ ]),
++ )),
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::new(),
++ ))]),
++ )),
++ ]);
++
++ parse.parse2(tokens).unwrap();
++}
+diff --git a/third_party/rust/syn/tests/test_parse_stream.rs b/third_party/rust/syn/tests/test_parse_stream.rs
+new file mode 100644
+index 0000000000..76bd065777
+--- /dev/null
++++ third_party/rust/syn/tests/test_parse_stream.rs
+@@ -0,0 +1,12 @@
++use syn::ext::IdentExt;
++use syn::parse::ParseStream;
++use syn::{Ident, Token};
++
++#[test]
++fn test_peek() {
++ let _ = |input: ParseStream| {
++ let _ = input.peek(Ident);
++ let _ = input.peek(Ident::peek_any);
++ let _ = input.peek(Token![::]);
++ };
++}
+diff --git a/third_party/rust/syn/tests/test_pat.rs b/third_party/rust/syn/tests/test_pat.rs
+index 1343aa646f..73388dd79d 100644
+--- third_party/rust/syn/tests/test_pat.rs
++++ third_party/rust/syn/tests/test_pat.rs
+@@ -1,10 +1,5 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ use quote::quote;
+-use syn::Pat;
++use syn::{Item, Pat, Stmt};
+
+ #[test]
+ fn test_pat_ident() {
+@@ -21,3 +16,23 @@ fn test_pat_path() {
+ value => panic!("expected PatPath, got {:?}", value),
+ }
+ }
++
++#[test]
++fn test_leading_vert() {
++ // https://github.com/rust-lang/rust/blob/1.43.0/src/test/ui/or-patterns/remove-leading-vert.rs
++
++ syn::parse_str::<Item>("fn f() {}").unwrap();
++ syn::parse_str::<Item>("fn fun1(| A: E) {}").unwrap_err();
++ syn::parse_str::<Item>("fn fun2(|| A: E) {}").unwrap_err();
++
++ syn::parse_str::<Stmt>("let | () = ();").unwrap();
++ syn::parse_str::<Stmt>("let (| A): E;").unwrap_err();
++ syn::parse_str::<Stmt>("let (|| A): (E);").unwrap_err();
++ syn::parse_str::<Stmt>("let (| A,): (E,);").unwrap_err();
++ syn::parse_str::<Stmt>("let [| A]: [E; 1];").unwrap_err();
++ syn::parse_str::<Stmt>("let [|| A]: [E; 1];").unwrap_err();
++ syn::parse_str::<Stmt>("let TS(| A): TS;").unwrap_err();
++ syn::parse_str::<Stmt>("let TS(|| A): TS;").unwrap_err();
++ syn::parse_str::<Stmt>("let NS { f: | A }: NS;").unwrap_err();
++ syn::parse_str::<Stmt>("let NS { f: || A }: NS;").unwrap_err();
++}
+diff --git a/third_party/rust/syn/tests/test_path.rs b/third_party/rust/syn/tests/test_path.rs
+new file mode 100644
+index 0000000000..2ce12066f5
+--- /dev/null
++++ third_party/rust/syn/tests/test_path.rs
+@@ -0,0 +1,52 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::{Expr, Type};
++
++#[test]
++fn parse_interpolated_leading_component() {
++ // mimics the token stream corresponding to `$mod::rest`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { first })),
++ TokenTree::Punct(Punct::new(':', Spacing::Joint)),
++ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
++ TokenTree::Ident(Ident::new("rest", Span::call_site())),
++ ]);
++
++ snapshot!(tokens.clone() as Expr, @r###"
++ Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "first",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "rest",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++
++ snapshot!(tokens as Type, @r###"
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "first",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "rest",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_precedence.rs b/third_party/rust/syn/tests/test_precedence.rs
+index 53ee66e372..a586b3fe48 100644
+--- third_party/rust/syn/tests/test_precedence.rs
++++ third_party/rust/syn/tests/test_precedence.rs
+@@ -4,35 +4,26 @@
+
+ //! The tests in this module do the following:
+ //!
+-//! 1. Parse a given expression in both `syn` and `libsyntax`.
++//! 1. Parse a given expression in both `syn` and `librustc`.
+ //! 2. Fold over the expression adding brackets around each subexpression (with
+-//! some complications - see the `syn_brackets` and `libsyntax_brackets`
++//! some complications - see the `syn_brackets` and `librustc_brackets`
+ //! methods).
+ //! 3. Serialize the `syn` expression back into a string, and re-parse it with
+-//! `libsyntax`.
++//! `librustc`.
+ //! 4. Respan all of the expressions, replacing the spans with the default
+ //! spans.
+ //! 5. Compare the expressions with one another, if they are not equal fail.
+
+-extern crate quote;
+-extern crate rayon;
+-extern crate regex;
++extern crate rustc_ast;
+ extern crate rustc_data_structures;
+-extern crate smallvec;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
+-extern crate walkdir;
+-
+-mod features;
++extern crate rustc_span;
+
+ use quote::quote;
+ use rayon::iter::{IntoParallelIterator, ParallelIterator};
+ use regex::Regex;
+-use smallvec::smallvec;
+-use syntax::ast;
+-use syntax::ptr::P;
+-use syntax_pos::edition::Edition;
++use rustc_ast::ast;
++use rustc_ast::ptr::P;
++use rustc_span::edition::Edition;
+ use walkdir::{DirEntry, WalkDir};
+
+ use std::fs::File;
+@@ -73,7 +64,7 @@ fn test_simple_precedence() {
+ continue;
+ };
+
+- let pf = match test_expressions(vec![expr]) {
++ let pf = match test_expressions(Edition::Edition2018, vec![expr]) {
+ (1, 0) => "passed",
+ (0, 1) => {
+ failed += 1;
+@@ -91,8 +82,8 @@ fn test_simple_precedence() {
+
+ /// Test expressions from rustc, like in `test_round_trip`.
+ #[test]
+-#[cfg_attr(target_os = "windows", ignore = "requires nix .sh")]
+ fn test_rustc_precedence() {
++ common::rayon_init();
+ repo::clone_rust();
+ let abort_after = common::abort_after();
+ if abort_after == 0 {
+@@ -118,15 +109,6 @@ fn test_rustc_precedence() {
+ return;
+ }
+
+- // Our version of `libsyntax` can't parse this tests
+- if path
+- .to_str()
+- .unwrap()
+- .ends_with("optional_comma_in_match_arm.rs")
+- {
+- return;
+- }
+-
+ let mut file = File::open(path).unwrap();
+ let mut content = String::new();
+ file.read_to_string(&mut content).unwrap();
+@@ -134,8 +116,9 @@ fn test_rustc_precedence() {
+
+ let (l_passed, l_failed) = match syn::parse_file(&content) {
+ Ok(file) => {
++ let edition = repo::edition(path).parse().unwrap();
+ let exprs = collect_exprs(file);
+- test_expressions(exprs)
++ test_expressions(edition, exprs)
+ }
+ Err(msg) => {
+ errorf!("syn failed to parse\n{:?}\n", msg);
+@@ -169,36 +152,36 @@ fn test_rustc_precedence() {
+ }
+ }
+
+-fn test_expressions(exprs: Vec<syn::Expr>) -> (usize, usize) {
++fn test_expressions(edition: Edition, exprs: Vec<syn::Expr>) -> (usize, usize) {
+ let mut passed = 0;
+ let mut failed = 0;
+
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(edition, || {
+ for expr in exprs {
+ let raw = quote!(#expr).to_string();
+
+- let libsyntax_ast = if let Some(e) = libsyntax_parse_and_rewrite(&raw) {
++ let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&raw) {
+ e
+ } else {
+ failed += 1;
+- errorf!("\nFAIL - libsyntax failed to parse raw\n");
++ errorf!("\nFAIL - librustc failed to parse raw\n");
+ continue;
+ };
+
+ let syn_expr = syn_brackets(expr);
+- let syn_ast = if let Some(e) = parse::libsyntax_expr(&quote!(#syn_expr).to_string()) {
++ let syn_ast = if let Some(e) = parse::librustc_expr(&quote!(#syn_expr).to_string()) {
+ e
+ } else {
+ failed += 1;
+- errorf!("\nFAIL - libsyntax failed to parse bracketed\n");
++ errorf!("\nFAIL - librustc failed to parse bracketed\n");
+ continue;
+ };
+
+- if SpanlessEq::eq(&syn_ast, &libsyntax_ast) {
++ if SpanlessEq::eq(&syn_ast, &librustc_ast) {
+ passed += 1;
+ } else {
+ failed += 1;
+- errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, libsyntax_ast);
++ errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, librustc_ast);
+ }
+ }
+ });
+@@ -206,54 +189,106 @@ fn test_expressions(exprs: Vec<syn::Expr>) -> (usize, usize) {
+ (passed, failed)
+ }
+
+-fn libsyntax_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
+- parse::libsyntax_expr(input).and_then(libsyntax_brackets)
++fn librustc_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
++ parse::librustc_expr(input).and_then(librustc_brackets)
+ }
+
+ /// Wrap every expression which is not already wrapped in parens with parens, to
+ /// reveal the precidence of the parsed expressions, and produce a stringified
+ /// form of the resulting expression.
+ ///
+-/// This method operates on libsyntax objects.
+-fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
++/// This method operates on librustc objects.
++fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
++ use rustc_ast::ast::{
++ Block, BorrowKind, Expr, ExprKind, Field, GenericArg, MacCall, Pat, Stmt, StmtKind, Ty,
++ };
++ use rustc_ast::mut_visit::{noop_visit_generic_arg, MutVisitor};
++ use rustc_data_structures::map_in_place::MapInPlace;
+ use rustc_data_structures::thin_vec::ThinVec;
+- use smallvec::SmallVec;
++ use rustc_span::DUMMY_SP;
+ use std::mem;
+- use syntax::ast::{Expr, ExprKind, Field, Mac, Pat, Stmt, StmtKind, Ty};
+- use syntax::mut_visit::{noop_visit_expr, MutVisitor};
+- use syntax_pos::DUMMY_SP;
+
+ struct BracketsVisitor {
+ failed: bool,
+ };
+
++ fn flat_map_field<T: MutVisitor>(mut f: Field, vis: &mut T) -> Vec<Field> {
++ if f.is_shorthand {
++ noop_visit_expr(&mut f.expr, vis);
++ } else {
++ vis.visit_expr(&mut f.expr);
++ }
++ vec![f]
++ }
++
++ fn flat_map_stmt<T: MutVisitor>(stmt: Stmt, vis: &mut T) -> Vec<Stmt> {
++ let kind = match stmt.kind {
++ // Don't wrap toplevel expressions in statements.
++ StmtKind::Expr(mut e) => {
++ noop_visit_expr(&mut e, vis);
++ StmtKind::Expr(e)
++ }
++ StmtKind::Semi(mut e) => {
++ noop_visit_expr(&mut e, vis);
++ StmtKind::Semi(e)
++ }
++ s => s,
++ };
++
++ vec![Stmt { kind, ..stmt }]
++ }
++
++ fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) {
++ use rustc_ast::mut_visit::{noop_visit_expr, visit_opt, visit_thin_attrs};
++ match &mut e.kind {
++ ExprKind::AddrOf(BorrowKind::Raw, ..) => {}
++ ExprKind::Struct(path, fields, expr) => {
++ vis.visit_path(path);
++ fields.flat_map_in_place(|field| flat_map_field(field, vis));
++ visit_opt(expr, |expr| vis.visit_expr(expr));
++ vis.visit_id(&mut e.id);
++ vis.visit_span(&mut e.span);
++ visit_thin_attrs(&mut e.attrs, vis);
++ }
++ _ => noop_visit_expr(e, vis),
++ }
++ }
++
+ impl MutVisitor for BracketsVisitor {
+ fn visit_expr(&mut self, e: &mut P<Expr>) {
+ noop_visit_expr(e, self);
+- match e.node {
++ match e.kind {
+ ExprKind::If(..) | ExprKind::Block(..) | ExprKind::Let(..) => {}
+ _ => {
+ let inner = mem::replace(
+ e,
+ P(Expr {
+ id: ast::DUMMY_NODE_ID,
+- node: ExprKind::Err,
++ kind: ExprKind::Err,
+ span: DUMMY_SP,
+ attrs: ThinVec::new(),
++ tokens: None,
+ }),
+ );
+- e.node = ExprKind::Paren(inner);
++ e.kind = ExprKind::Paren(inner);
+ }
+ }
+ }
+
+- fn flat_map_field(&mut self, mut f: Field) -> SmallVec<[Field; 1]> {
+- if f.is_shorthand {
+- noop_visit_expr(&mut f.expr, self);
+- } else {
+- self.visit_expr(&mut f.expr);
++ fn visit_generic_arg(&mut self, arg: &mut GenericArg) {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericArg::Const(arg) => noop_visit_expr(&mut arg.value, self),
++ _ => noop_visit_generic_arg(arg, self),
+ }
+- SmallVec::from([f])
++ }
++
++ fn visit_block(&mut self, block: &mut P<Block>) {
++ self.visit_id(&mut block.id);
++ block
++ .stmts
++ .flat_map_in_place(|stmt| flat_map_stmt(stmt, self));
++ self.visit_span(&mut block.span);
+ }
+
+ // We don't want to look at expressions that might appear in patterns or
+@@ -267,25 +302,8 @@ fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>>
+ let _ = ty;
+ }
+
+- fn flat_map_stmt(&mut self, stmt: Stmt) -> SmallVec<[Stmt; 1]> {
+- let node = match stmt.node {
+- // Don't wrap toplevel expressions in statements.
+- StmtKind::Expr(mut e) => {
+- noop_visit_expr(&mut e, self);
+- StmtKind::Expr(e)
+- }
+- StmtKind::Semi(mut e) => {
+- noop_visit_expr(&mut e, self);
+- StmtKind::Semi(e)
+- }
+- s => s,
+- };
+-
+- smallvec![Stmt { node, ..stmt }]
+- }
+-
+- fn visit_mac(&mut self, mac: &mut Mac) {
+- // By default when folding over macros, libsyntax panics. This is
++ fn visit_mac(&mut self, mac: &mut MacCall) {
++ // By default when folding over macros, librustc panics. This is
+ // because it's usually not what you want, you want to run after
+ // macro expansion. We do want to do that (syn doesn't do macro
+ // expansion), so we implement visit_mac to just return the macro
+@@ -295,11 +313,11 @@ fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>>
+ }
+
+ let mut folder = BracketsVisitor { failed: false };
+- folder.visit_expr(&mut libsyntax_expr);
++ folder.visit_expr(&mut librustc_expr);
+ if folder.failed {
+ None
+ } else {
+- Some(libsyntax_expr)
++ Some(librustc_expr)
+ }
+ }
+
+@@ -318,14 +336,33 @@ fn syn_brackets(syn_expr: syn::Expr) -> syn::Expr {
+ Expr::If(..) | Expr::Unsafe(..) | Expr::Block(..) | Expr::Let(..) => {
+ fold_expr(self, expr)
+ }
+- node => Expr::Paren(ExprParen {
++ _ => Expr::Paren(ExprParen {
+ attrs: Vec::new(),
+- expr: Box::new(fold_expr(self, node)),
++ expr: Box::new(fold_expr(self, expr)),
+ paren_token: token::Paren::default(),
+ }),
+ }
+ }
+
++ fn fold_generic_argument(&mut self, arg: GenericArgument) -> GenericArgument {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericArgument::Const(a) => GenericArgument::Const(fold_expr(self, a)),
++ _ => fold_generic_argument(self, arg),
++ }
++ }
++
++ fn fold_generic_method_argument(
++ &mut self,
++ arg: GenericMethodArgument,
++ ) -> GenericMethodArgument {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericMethodArgument::Const(a) => GenericMethodArgument::Const(fold_expr(self, a)),
++ _ => fold_generic_method_argument(self, arg),
++ }
++ }
++
+ fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
+ match stmt {
+ // Don't wrap toplevel expressions in statements.
+@@ -360,7 +397,10 @@ fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
+ struct CollectExprs(Vec<Expr>);
+ impl Fold for CollectExprs {
+ fn fold_expr(&mut self, expr: Expr) -> Expr {
+- self.0.push(expr);
++ match expr {
++ Expr::Verbatim(tokens) if tokens.is_empty() => {}
++ _ => self.0.push(expr),
++ }
+
+ Expr::Tuple(ExprTuple {
+ attrs: vec![],
+diff --git a/third_party/rust/syn/tests/test_receiver.rs b/third_party/rust/syn/tests/test_receiver.rs
+new file mode 100644
+index 0000000000..923df96ba9
+--- /dev/null
++++ third_party/rust/syn/tests/test_receiver.rs
+@@ -0,0 +1,127 @@
++use syn::{parse_quote, FnArg, Receiver, TraitItemMethod};
++
++#[test]
++fn test_by_value() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_value(self: Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_mut_value() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_mut(mut self: Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_ref() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_ref(self: &Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_box() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_box(self: Box<Self>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_pin() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_pin(self: Pin<Self>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_explicit_type() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn explicit_type(self: Pin<MyType>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_value_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn value_shorthand(self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: None,
++ mutability: None,
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver without ref/mut, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_mut_value_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn mut_value_shorthand(mut self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: None,
++ mutability: Some(_),
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with mut, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_ref_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn ref_shorthand(&self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: Some(_),
++ mutability: None,
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with ref, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_ref_mut_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn ref_mut_shorthand(&mut self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: Some(_),
++ mutability: Some(_),
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with ref+mut, got {:?}", value),
++ }
++}
+diff --git a/third_party/rust/syn/tests/test_round_trip.rs b/third_party/rust/syn/tests/test_round_trip.rs
+index 2fc9cecd86..260dd0c3d9 100644
+--- third_party/rust/syn/tests/test_round_trip.rs
++++ third_party/rust/syn/tests/test_round_trip.rs
+@@ -2,22 +2,20 @@
+ #![recursion_limit = "1024"]
+ #![feature(rustc_private)]
+
+-extern crate quote;
+-extern crate rayon;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
+-extern crate walkdir;
+-
+-mod features;
++extern crate rustc_ast;
++extern crate rustc_errors;
++extern crate rustc_expand;
++extern crate rustc_parse as parse;
++extern crate rustc_session;
++extern crate rustc_span;
+
+ use quote::quote;
+ use rayon::iter::{IntoParallelIterator, ParallelIterator};
+-use syntax::ast;
+-use syntax::parse::{self, PResult, ParseSess};
+-use syntax::source_map::FilePathMapping;
+-use syntax_pos::edition::Edition;
+-use syntax_pos::FileName;
++use rustc_ast::ast;
++use rustc_errors::PResult;
++use rustc_session::parse::ParseSess;
++use rustc_span::source_map::FilePathMapping;
++use rustc_span::FileName;
+ use walkdir::{DirEntry, WalkDir};
+
+ use std::fs::File;
+@@ -38,8 +36,8 @@ mod repo;
+ use common::eq::SpanlessEq;
+
+ #[test]
+-#[cfg_attr(target_os = "windows", ignore = "requires nix .sh")]
+ fn test_round_trip() {
++ common::rayon_init();
+ repo::clone_rust();
+ let abort_after = common::abort_after();
+ if abort_after == 0 {
+@@ -78,11 +76,12 @@ fn test_round_trip() {
+ }
+ };
+ let back = quote!(#krate).to_string();
++ let edition = repo::edition(path).parse().unwrap();
+
+ let equal = panic::catch_unwind(|| {
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(edition, || {
+ let sess = ParseSess::new(FilePathMapping::empty());
+- let before = match libsyntax_parse(content, &sess) {
++ let before = match librustc_parse(content, &sess) {
+ Ok(before) => before,
+ Err(mut diagnostic) => {
+ diagnostic.cancel();
+@@ -93,7 +92,7 @@ fn test_round_trip() {
+ errorf!("=== {}: ignore\n", path.display());
+ } else {
+ errorf!(
+- "=== {}: ignore - libsyntax failed to parse original content: {}\n",
++ "=== {}: ignore - librustc failed to parse original content: {}\n",
+ path.display(),
+ diagnostic.message()
+ );
+@@ -101,10 +100,10 @@ fn test_round_trip() {
+ return true;
+ }
+ };
+- let after = match libsyntax_parse(back, &sess) {
++ let after = match librustc_parse(back, &sess) {
+ Ok(after) => after,
+ Err(mut diagnostic) => {
+- errorf!("=== {}: libsyntax failed to parse", path.display());
++ errorf!("=== {}: librustc failed to parse", path.display());
+ diagnostic.emit();
+ return false;
+ }
+@@ -130,7 +129,7 @@ fn test_round_trip() {
+ })
+ });
+ match equal {
+- Err(_) => errorf!("=== {}: ignoring libsyntax panic\n", path.display()),
++ Err(_) => errorf!("=== {}: ignoring librustc panic\n", path.display()),
+ Ok(true) => {}
+ Ok(false) => {
+ let prev_failed = failed.fetch_add(1, Ordering::SeqCst);
+@@ -147,7 +146,7 @@ fn test_round_trip() {
+ }
+ }
+
+-fn libsyntax_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
++fn librustc_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
+ let name = FileName::Custom("test_round_trip".to_string());
+ parse::parse_crate_from_source_str(name, content, sess)
+ }
+diff --git a/third_party/rust/syn/tests/test_shebang.rs b/third_party/rust/syn/tests/test_shebang.rs
+new file mode 100644
+index 0000000000..dc26b9aab3
+--- /dev/null
++++ third_party/rust/syn/tests/test_shebang.rs
+@@ -0,0 +1,59 @@
++#[macro_use]
++mod macros;
++
++#[test]
++fn test_basic() {
++ let content = "#!/usr/bin/env rustx\nfn main() {}";
++ let file = syn::parse_file(content).unwrap();
++ snapshot!(file, @r###"
++ File {
++ shebang: Some("#!/usr/bin/env rustx"),
++ items: [
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "main",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ },
++ ],
++ }
++ "###);
++}
++
++#[test]
++fn test_comment() {
++ let content = "#!//am/i/a/comment\n[allow(dead_code)] fn main() {}";
++ let file = syn::parse_file(content).unwrap();
++ snapshot!(file, @r###"
++ File {
++ attrs: [
++ Attribute {
++ style: Inner,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "allow",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`(dead_code)`),
++ },
++ ],
++ items: [
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "main",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ },
++ ],
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_should_parse.rs b/third_party/rust/syn/tests/test_should_parse.rs
+index aadf42e3af..180d859916 100644
+--- third_party/rust/syn/tests/test_should_parse.rs
++++ third_party/rust/syn/tests/test_should_parse.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ macro_rules! should_parse {
+ ($name:ident, { $($in:tt)* }) => {
+ #[test]
+diff --git a/third_party/rust/syn/tests/test_size.rs b/third_party/rust/syn/tests/test_size.rs
+index 386d4df889..01e8401158 100644
+--- third_party/rust/syn/tests/test_size.rs
++++ third_party/rust/syn/tests/test_size.rs
+@@ -1,7 +1,5 @@
+ #![cfg(target_pointer_width = "64")]
+
+-mod features;
+-
+ use std::mem;
+ use syn::*;
+
+diff --git a/third_party/rust/syn/tests/test_stmt.rs b/third_party/rust/syn/tests/test_stmt.rs
+new file mode 100644
+index 0000000000..d68b47fd2f
+--- /dev/null
++++ third_party/rust/syn/tests/test_stmt.rs
+@@ -0,0 +1,44 @@
++#[macro_use]
++mod macros;
++
++use syn::Stmt;
++
++#[test]
++fn test_raw_operator() {
++ let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap();
++
++ snapshot!(stmt, @r###"
++ Local(Local {
++ pat: Pat::Wild,
++ init: Some(Verbatim(`& raw const x`)),
++ })
++ "###);
++}
++
++#[test]
++fn test_raw_variable() {
++ let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap();
++
++ snapshot!(stmt, @r###"
++ Local(Local {
++ pat: Pat::Wild,
++ init: Some(Expr::Reference {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "raw",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ }),
++ })
++ "###);
++}
++
++#[test]
++fn test_raw_invalid() {
++ assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
++}
+diff --git a/third_party/rust/syn/tests/test_token_trees.rs b/third_party/rust/syn/tests/test_token_trees.rs
+index 70a9a72aab..5b00448af8 100644
+--- third_party/rust/syn/tests/test_token_trees.rs
++++ third_party/rust/syn/tests/test_token_trees.rs
+@@ -1,9 +1,3 @@
+-extern crate proc_macro2;
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -21,7 +15,11 @@ fn test_struct() {
+ }
+ ";
+
+- snapshot!(input as TokenStream, @"`# [ derive ( Debug , Clone ) ] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`");
++ snapshot!(input as TokenStream, @r###"
++ TokenStream(
++ `# [derive (Debug , Clone)] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`,
++ )
++ "###);
+ }
+
+ #[test]
+diff --git a/third_party/rust/syn/tests/test_ty.rs b/third_party/rust/syn/tests/test_ty.rs
+new file mode 100644
+index 0000000000..9cbdcd6b99
+--- /dev/null
++++ third_party/rust/syn/tests/test_ty.rs
+@@ -0,0 +1,53 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::Type;
++
++#[test]
++fn test_mut_self() {
++ syn::parse_str::<Type>("fn(mut self)").unwrap();
++ syn::parse_str::<Type>("fn(mut self: ())").unwrap();
++ syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err();
++ syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err();
++ syn::parse_str::<Type>("fn(mut self::T)").unwrap_err();
++}
++
++#[test]
++fn test_macro_variable_type() {
++ // mimics the token stream corresponding to `$ty<T>`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
++ TokenTree::Punct(Punct::new('<', Spacing::Alone)),
++ TokenTree::Ident(Ident::new("T", Span::call_site())),
++ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
++ ]);
++
++ snapshot!(tokens as Type, @r###"
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "ty",
++ arguments: PathArguments::AngleBracketed {
++ args: [
++ Type(Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ },
++ },
++ ],
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_visibility.rs b/third_party/rust/syn/tests/test_visibility.rs
+new file mode 100644
+index 0000000000..c3d0ac7a5b
+--- /dev/null
++++ third_party/rust/syn/tests/test_visibility.rs
+@@ -0,0 +1,145 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use std::iter::FromIterator;
++use syn::parse::{Parse, ParseStream};
++use syn::{DeriveInput, Result, Visibility};
++
++#[derive(Debug)]
++struct VisRest {
++ vis: Visibility,
++ rest: TokenStream,
++}
++
++impl Parse for VisRest {
++ fn parse(input: ParseStream) -> Result<Self> {
++ Ok(VisRest {
++ vis: input.parse()?,
++ rest: input.parse()?,
++ })
++ }
++}
++
++macro_rules! assert_vis_parse {
++ ($input:expr, Ok($p:pat)) => {
++ assert_vis_parse!($input, Ok($p) + "");
++ };
++
++ ($input:expr, Ok($p:pat) + $rest:expr) => {
++ let expected = $rest.parse::<TokenStream>().unwrap();
++ let parse: VisRest = syn::parse_str($input).unwrap();
++
++ match parse.vis {
++ $p => {}
++ _ => panic!("Expected {}, got {:?}", stringify!($p), parse.vis),
++ }
++
++ // NOTE: Round-trips through `to_string` to avoid potential whitespace
++ // diffs.
++ assert_eq!(parse.rest.to_string(), expected.to_string());
++ };
++
++ ($input:expr, Err) => {
++ syn::parse2::<VisRest>($input.parse().unwrap()).unwrap_err();
++ };
++}
++
++#[test]
++fn test_pub() {
++ assert_vis_parse!("pub", Ok(Visibility::Public(_)));
++}
++
++#[test]
++fn test_crate() {
++ assert_vis_parse!("crate", Ok(Visibility::Crate(_)));
++}
++
++#[test]
++fn test_inherited() {
++ assert_vis_parse!("", Ok(Visibility::Inherited));
++}
++
++#[test]
++fn test_in() {
++ assert_vis_parse!("pub(in foo::bar)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_crate() {
++ assert_vis_parse!("pub(crate)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_self() {
++ assert_vis_parse!("pub(self)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_super() {
++ assert_vis_parse!("pub(super)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_missing_in() {
++ assert_vis_parse!("pub(foo::bar)", Ok(Visibility::Public(_)) + "(foo::bar)");
++}
++
++#[test]
++fn test_missing_in_path() {
++ assert_vis_parse!("pub(in)", Err);
++}
++
++#[test]
++fn test_crate_path() {
++ assert_vis_parse!("pub(crate::A, crate::B)", Ok(Visibility::Public(_)) + "(crate::A, crate::B)");
++}
++
++#[test]
++fn test_junk_after_in() {
++ assert_vis_parse!("pub(in some::path @@garbage)", Err);
++}
++
++#[test]
++fn test_empty_group_vis() {
++ // mimics `struct S { $vis $field: () }` where $vis is empty
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Ident(Ident::new("struct", Span::call_site())),
++ TokenTree::Ident(Ident::new("S", Span::call_site())),
++ TokenTree::Group(Group::new(
++ Delimiter::Brace,
++ TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Ident(Ident::new(
++ "f",
++ Span::call_site(),
++ ))]),
++ )),
++ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]),
++ )),
++ ]);
++
++ snapshot!(tokens as DeriveInput, @r###"
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("f"),
++ colon_token: Some,
++ ty: Type::Tuple,
++ },
++ ],
++ },
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/zzz_stable.rs b/third_party/rust/syn/tests/zzz_stable.rs
+index a81b3df4d0..a1a670d9ed 100644
+--- third_party/rust/syn/tests/zzz_stable.rs
++++ third_party/rust/syn/tests/zzz_stable.rs
+@@ -1,7 +1,5 @@
+ #![cfg(syn_disable_nightly_tests)]
+
+-extern crate termcolor;
+-
+ use std::io::{self, Write};
+ use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
+
+@@ -10,7 +8,7 @@ const MSG: &str = "\
+ ‖ WARNING:
+ ‖ This is not a nightly compiler so not all tests were able to
+ ‖ run. Syn includes tests that compare Syn's parser against the
+-‖ compiler's parser, which requires access to unstable libsyntax
++‖ compiler's parser, which requires access to unstable librustc
+ ‖ data structures and a nightly compiler.
+ ‖
+ ";
+--
+2.28.0
+
Property changes on: head/mail/thunderbird/files/patch-bug1663715
___________________________________________________________________
Added: fbsd:nokeywords
## -0,0 +1 ##
+yes
\ No newline at end of property
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Index: head/misc/broot/Makefile
===================================================================
--- head/misc/broot/Makefile (revision 552220)
+++ head/misc/broot/Makefile (revision 552221)
@@ -1,249 +1,250 @@
# $FreeBSD$
PORTNAME= broot
DISTVERSIONPREFIX= v
DISTVERSION= 1.0.3
+PORTREVISION= 1
CATEGORIES= misc
MAINTAINER= vulcan@wired.sh
COMMENT= Quick and easy new way to see and navigate directory trees
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
LIB_DEPENDS= libgit2.so:devel/libgit2 \
libonig.so:devel/oniguruma
RUN_DEPENDS= git:devel/git
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= canop
CARGO_CRATES= adler-0.2.3 \
adler32-1.2.0 \
aho-corasick-0.7.13 \
ansi_colours-1.0.1 \
ansi_term-0.11.0 \
arc-swap-0.4.7 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
atty-0.2.14 \
autocfg-1.0.1 \
base64-0.12.3 \
bet-0.3.4 \
bincode-1.3.1 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
bstr-0.2.13 \
bumpalo-3.4.0 \
bytemuck-1.4.1 \
byteorder-1.3.4 \
cast-0.2.3 \
cc-1.0.60 \
cfg-if-0.1.10 \
chrono-0.4.18 \
clap-2.33.3 \
clipboard-win-4.0.3 \
cloudabi-0.0.3 \
color_quant-1.0.1 \
constant_time_eq-0.1.5 \
crc32fast-1.2.0 \
criterion-0.3.3 \
criterion-plot-0.4.3 \
crossbeam-0.7.3 \
crossbeam-channel-0.4.4 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-queue-0.2.3 \
crossbeam-utils-0.7.2 \
crossterm-0.17.7 \
crossterm_winapi-0.6.1 \
csv-1.1.3 \
csv-core-0.1.10 \
custom_error-1.8.0 \
deflate-0.8.6 \
directories-2.0.2 \
dirs-2.0.2 \
dirs-sys-0.3.5 \
either-1.6.1 \
error-code-2.0.2 \
file-size-1.0.3 \
flate2-1.0.17 \
fnv-1.0.7 \
getrandom-0.1.15 \
gif-0.11.1 \
git2-0.13.11 \
glob-0.3.0 \
half-1.6.0 \
hashbrown-0.9.1 \
hermit-abi-0.1.16 \
id-arena-2.2.1 \
idna-0.2.0 \
image-0.23.10 \
indexmap-1.6.0 \
is_executable-0.1.2 \
itertools-0.9.0 \
itoa-0.4.6 \
jobserver-0.1.21 \
jpeg-decoder-0.1.20 \
js-sys-0.3.45 \
lazy-regex-0.1.4 \
lazy_static-1.4.0 \
lazycell-1.3.0 \
libc-0.2.77 \
libgit2-sys-0.12.13+1.0.1 \
libz-sys-1.1.2 \
line-wrap-0.1.1 \
linked-hash-map-0.5.3 \
lock_api-0.3.4 \
log-0.4.11 \
lzw-0.10.0 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memmap-0.7.0 \
memoffset-0.5.6 \
minimad-0.6.5 \
miniz_oxide-0.3.7 \
miniz_oxide-0.4.2 \
mio-0.7.1 \
miow-0.3.5 \
ntapi-0.3.4 \
num-integer-0.1.43 \
num-iter-0.1.41 \
num-rational-0.3.0 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
onig-6.1.0 \
onig_sys-69.5.1 \
oorandom-11.1.2 \
open-1.4.0 \
parking_lot-0.10.2 \
parking_lot_core-0.7.2 \
pathdiff-0.1.0 \
percent-encoding-2.1.0 \
phf-0.8.0 \
phf_generator-0.8.0 \
phf_macros-0.8.0 \
phf_shared-0.8.0 \
pkg-config-0.3.18 \
plist-1.0.0 \
plotters-0.2.15 \
png-0.16.7 \
ppv-lite86-0.2.9 \
proc-macro-hack-0.5.18 \
proc-macro2-1.0.23 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rand_pcg-0.2.1 \
rayon-1.4.1 \
rayon-core-1.8.1 \
redox_syscall-0.1.57 \
redox_users-0.3.5 \
regex-1.3.9 \
regex-automata-0.1.9 \
regex-syntax-0.6.18 \
rust-argon2-0.8.2 \
rustc_version-0.2.3 \
ryu-1.0.5 \
safemem-0.3.3 \
same-file-1.0.6 \
scoped_threadpool-0.1.9 \
scopeguard-1.1.0 \
secular-0.2.0 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.116 \
serde_cbor-0.11.1 \
serde_derive-1.0.116 \
serde_json-1.0.57 \
signal-hook-0.1.16 \
signal-hook-registry-1.2.1 \
simplelog-0.7.6 \
siphasher-0.3.3 \
smallvec-1.4.2 \
socket2-0.3.15 \
str-buf-1.0.4 \
strict-0.1.4 \
strsim-0.8.0 \
syn-1.0.42 \
syntect-4.4.0 \
term-0.6.1 \
termimad-0.8.27 \
terminal-clipboard-0.1.1 \
textwrap-0.11.0 \
thiserror-1.0.20 \
thiserror-impl-1.0.20 \
thread_local-1.0.1 \
tiff-0.5.0 \
time-0.1.44 \
tinytemplate-1.1.0 \
tinyvec-0.3.4 \
toml-0.5.6 \
umask-1.0.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
url-2.1.1 \
users-0.10.0 \
vcpkg-0.2.10 \
vec_map-0.8.2 \
walkdir-2.3.1 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasi-0.10.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.68 \
wasm-bindgen-backend-0.2.68 \
wasm-bindgen-macro-0.2.68 \
wasm-bindgen-macro-support-0.2.68 \
wasm-bindgen-shared-0.2.68 \
web-sys-0.3.45 \
weezl-0.1.0 \
winapi-0.3.9 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
x11-clipboard-0.5.1 \
xcb-0.9.0 \
xml-rs-0.8.3 \
yaml-rust-0.4.4
CARGO_FEATURES= --no-default-features
SUB_FILES= pkg-message
PLIST_FILES= bin/${PORTNAME} \
man/man1/${PORTNAME}.1.gz
PORTDOCS= CHANGELOG.md README.md client-server.md
OPTIONS_DEFINE= CLIENT_SERVER CLIPBOARD DOCS
OPTIONS_DEFAULT= CLIENT_SERVER CLIPBOARD
CLIENT_SERVER_DESC= Enable client/server support
CLIPBOARD_DESC= Enable terminal X11 clipboard support
CLIENT_SERVER_VARS= CARGO_FEATURES+=client-server
CLIPBOARD_USES= python:3.6+,build xorg
CLIPBOARD_USE= XORG=xcb
CLIPBOARD_BINARY_ALIAS= python3=${PYTHON_CMD}
CLIPBOARD_VARS= CARGO_FEATURES+=clipboard
_BUILD_VERSION= ${DISTVERSION}
_BUILD_DATE= $$(date +'%Y/%m/%d')
pre-build:
@${MV} ${WRKSRC}/man/page ${WRKSRC}/man/${PORTNAME}.1
@${REINPLACE_CMD} -e 's|#version|${_BUILD_VERSION}|g' \
-e "s|#date|${_BUILD_DATE}|g" ${WRKSRC}/man/${PORTNAME}.1
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
${INSTALL_MAN} ${WRKSRC}/man/${PORTNAME}.1 ${STAGEDIR}${MAN1PREFIX}/man/man1
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_DATA} ${PORTDOCS:S|^|${WRKSRC}/|} ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/misc/ruut/Makefile
===================================================================
--- head/misc/ruut/Makefile (revision 552220)
+++ head/misc/ruut/Makefile (revision 552221)
@@ -1,74 +1,74 @@
# $FreeBSD$
PORTNAME= ruut
DISTVERSIONPREFIX= v
DISTVERSION= 0.6.1
-PORTREVISION= 3
+PORTREVISION= 4
CATEGORIES= misc
MAINTAINER= yuri@FreeBSD.org
COMMENT= Print arbitrary trees on the command line
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= HarrisonB
CARGO_CRATES= ansi_term-0.11.0 \
atty-0.2.14 \
bitflags-1.2.1 \
block-buffer-0.7.3 \
block-padding-0.1.5 \
byte-tools-0.3.1 \
byteorder-1.3.4 \
clap-2.33.0 \
digest-0.8.1 \
either-1.5.3 \
exitcode-1.1.2 \
fake-simd-0.1.2 \
generic-array-0.12.3 \
heck-0.3.1 \
hermit-abi-0.1.6 \
itertools-0.8.2 \
itoa-0.4.5 \
json5-0.2.5 \
libc-0.2.66 \
maplit-1.0.2 \
opaque-debug-0.2.3 \
pest-2.1.3 \
pest_derive-2.1.0 \
pest_generator-2.1.2 \
pest_meta-2.1.3 \
proc-macro2-0.4.30 \
proc-macro2-1.0.9 \
quote-0.6.13 \
quote-1.0.3 \
ryu-1.0.2 \
serde-1.0.104 \
serde_json-1.0.47 \
sha-1-0.8.2 \
strsim-0.8.0 \
structopt-0.2.18 \
structopt-derive-0.2.18 \
syn-0.15.44 \
syn-1.0.16 \
textwrap-0.11.0 \
typenum-1.11.2 \
ucd-trie-0.1.2 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.7 \
unicode-xid-0.1.0 \
unicode-xid-0.2.0 \
vec_map-0.8.1 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0
PLIST_FILES= bin/${PORTNAME}
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
.include <bsd.port.mk>
Index: head/multimedia/librav1e/Makefile
===================================================================
--- head/multimedia/librav1e/Makefile (revision 552220)
+++ head/multimedia/librav1e/Makefile (revision 552221)
@@ -1,27 +1,27 @@
# $FreeBSD$
-PORTREVISION= 0
+PORTREVISION= 1
PKGNAMEPREFIX= lib
MASTERDIR= ${.CURDIR}/../rav1e
PLIST= ${.CURDIR}/pkg-plist
BUILD_DEPENDS= cargo-cbuild:devel/cargo-c
PLIST_FILES= # empty
PLIST_SUB= VERSION=${DISTVERSION:C/-.*//}
do-build:
@${CARGO_CARGO_RUN} cbuild \
${CARGO_BUILD_ARGS}
do-install:
@${CARGO_CARGO_RUN} cinstall \
--destdir "${STAGEDIR}" \
--prefix "${PREFIX}" \
${CARGO_BUILD_ARGS}
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/lib/*.so
.include "${MASTERDIR}/Makefile"
Index: head/multimedia/rav1e/Makefile
===================================================================
--- head/multimedia/rav1e/Makefile (revision 552220)
+++ head/multimedia/rav1e/Makefile (revision 552221)
@@ -1,198 +1,198 @@
# $FreeBSD$
PORTNAME= rav1e
DISTVERSIONPREFIX= v
DISTVERSION= 0.3.4
-PORTREVISION?= 0
+PORTREVISION?= 1
CATEGORIES= multimedia
MAINTAINER= jbeich@FreeBSD.org
COMMENT= Fast and safe AV1 encoder
LICENSE= BSD2CLAUSE
LICENSE_FILE= ${WRKSRC}/LICENSE
BUILD_DEPENDS+= ${BUILD_DEPENDS_${ARCH}}
BUILD_DEPENDS_amd64= nasm:devel/nasm
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= xiph
PLIST_FILES?= bin/${PORTNAME}
CARGO_CRATES= addr2line-0.13.0 \
adler-0.2.3 \
adler32-1.2.0 \
aho-corasick-0.7.13 \
ansi_term-0.11.0 \
aom-sys-0.2.1 \
arbitrary-0.2.0 \
arc-swap-0.4.7 \
arg_enum_proc_macro-0.3.0 \
arrayvec-0.5.1 \
assert_cmd-1.0.1 \
atty-0.2.14 \
autocfg-1.0.1 \
backtrace-0.3.50 \
bindgen-0.54.0 \
bitflags-1.2.1 \
bitstream-io-0.8.5 \
bstr-0.2.13 \
bumpalo-3.4.0 \
bytemuck-1.4.1 \
byteorder-1.3.4 \
cast-0.2.3 \
cc-1.0.59 \
cexpr-0.4.0 \
cfg-if-0.1.10 \
chrono-0.4.15 \
clang-sys-0.29.3 \
clap-2.33.3 \
cmake-0.1.44 \
console-0.11.3 \
crc32fast-1.2.0 \
criterion-0.3.3 \
criterion-plot-0.4.3 \
crossbeam-channel-0.4.4 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-utils-0.7.2 \
csv-1.1.3 \
csv-core-0.1.10 \
ctor-0.1.15 \
dav1d-sys-0.3.2 \
deflate-0.8.6 \
difference-2.0.0 \
doc-comment-0.3.3 \
either-1.6.0 \
encode_unicode-0.3.6 \
env_logger-0.7.1 \
error-chain-0.10.0 \
fern-0.6.0 \
getrandom-0.1.14 \
gimli-0.22.0 \
glob-0.3.0 \
half-1.6.0 \
hermit-abi-0.1.15 \
humantime-1.3.0 \
image-0.23.9 \
interpolate_name-0.2.3 \
itertools-0.8.2 \
itertools-0.9.0 \
itoa-0.4.6 \
jobserver-0.1.21 \
js-sys-0.3.45 \
lazy_static-1.4.0 \
lazycell-1.3.0 \
libc-0.2.76 \
libloading-0.5.2 \
log-0.4.11 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memoffset-0.5.5 \
metadeps-1.1.2 \
miniz_oxide-0.3.7 \
miniz_oxide-0.4.1 \
nasm-rs-0.2.0 \
nom-5.1.2 \
noop_proc_macro-0.2.1 \
num-derive-0.3.2 \
num-integer-0.1.43 \
num-iter-0.1.41 \
num-rational-0.3.0 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
object-0.20.0 \
oorandom-11.1.2 \
output_vt100-0.1.2 \
paste-0.1.18 \
paste-impl-0.1.18 \
peeking_take_while-0.1.2 \
pkg-config-0.3.18 \
plotters-0.2.15 \
png-0.16.7 \
ppv-lite86-0.2.9 \
predicates-1.0.5 \
predicates-core-1.0.0 \
predicates-tree-1.0.0 \
pretty_assertions-0.6.1 \
proc-macro-hack-0.5.18 \
proc-macro2-1.0.20 \
quick-error-1.2.3 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rayon-1.4.0 \
rayon-core-1.8.0 \
regex-1.3.9 \
regex-automata-0.1.9 \
regex-syntax-0.6.18 \
rust_hawktracer-0.7.0 \
rust_hawktracer_normal_macro-0.4.1 \
rust_hawktracer_proc_macro-0.4.1 \
rust_hawktracer_sys-0.4.2 \
rustc-demangle-0.1.16 \
rustc-hash-1.1.0 \
rustc_version-0.2.3 \
ryu-1.0.5 \
same-file-1.0.6 \
scan_fmt-0.2.5 \
scopeguard-1.1.0 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.115 \
serde_cbor-0.11.1 \
serde_derive-1.0.115 \
serde_json-1.0.57 \
shlex-0.1.1 \
signal-hook-0.1.16 \
signal-hook-registry-1.2.1 \
simd_helpers-0.1.0 \
strsim-0.8.0 \
syn-1.0.40 \
termcolor-1.1.0 \
terminal_size-0.1.13 \
termios-0.3.2 \
textwrap-0.11.0 \
thiserror-1.0.20 \
thiserror-impl-1.0.20 \
thread_local-1.0.1 \
time-0.1.44 \
tinytemplate-1.1.0 \
toml-0.2.1 \
toml-0.5.6 \
treeline-0.1.0 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
vec_map-0.8.2 \
version_check-0.9.2 \
wait-timeout-0.2.0 \
walkdir-2.3.1 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasi-0.10.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.68 \
wasm-bindgen-backend-0.2.68 \
wasm-bindgen-macro-0.2.68 \
wasm-bindgen-macro-support-0.2.68 \
wasm-bindgen-shared-0.2.68 \
web-sys-0.3.45 \
which-3.1.1 \
winapi-0.3.9 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
y4m-0.5.3
post-patch:
# Extract (snapshot) version from the port instead of CARGO_PKG_VERSION
@${REINPLACE_CMD} 's/env!("VERGEN_SEMVER_LIGHTWEIGHT")/"${DISTVERSIONFULL}"/' \
${WRKSRC}/src/capi.rs ${WRKSRC}/src/lib.rs
.if !target(post-install)
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
.endif
.include <bsd.port.mk>
Index: head/multimedia/termplay/Makefile
===================================================================
--- head/multimedia/termplay/Makefile (revision 552220)
+++ head/multimedia/termplay/Makefile (revision 552221)
@@ -1,119 +1,119 @@
# $FreeBSD$
PORTNAME= termplay
DISTVERSION= 2.0.6
-PORTREVISION= 1
+PORTREVISION= 2
CATEGORIES= multimedia
MASTER_SITES= CRATESIO
DISTFILES= ${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= yuri@FreeBSD.org
COMMENT= Tool that converts images to ANSI sequences, plays videos in terminals
LICENSE= APACHE20
LICENSE_FILE= ${WRKSRC}/LICENSE
LIB_DEPENDS= libsixel.so:graphics/libsixel
USES= cargo gnome
USE_GNOME= glib20
USE_GSTREAMER1= yes
CARGO_FEATURES= bin
CARGO_CRATES= \
adler32-1.0.4 \
ansi_term-0.11.0 \
atty-0.2.14 \
autocfg-0.1.7 \
backtrace-0.3.40 \
backtrace-sys-0.1.32 \
bitflags-1.2.1 \
byteorder-1.3.2 \
cc-1.0.49 \
cfg-if-0.1.10 \
clap-2.33.0 \
color_quant-1.0.1 \
crc32fast-1.2.0 \
crossbeam-deque-0.7.2 \
crossbeam-epoch-0.8.0 \
crossbeam-queue-0.2.1 \
crossbeam-utils-0.7.0 \
deflate-0.7.20 \
either-1.5.3 \
failure-0.1.6 \
failure_derive-0.1.6 \
futures-channel-0.3.1 \
futures-core-0.3.1 \
futures-executor-0.3.1 \
futures-macro-0.3.1 \
futures-task-0.3.1 \
futures-util-0.3.1 \
gif-0.10.3 \
glib-0.9.0 \
glib-sys-0.9.1 \
gobject-sys-0.9.1 \
gstreamer-0.15.0 \
gstreamer-app-0.15.0 \
gstreamer-app-sys-0.8.1 \
gstreamer-base-0.15.0 \
gstreamer-base-sys-0.8.1 \
gstreamer-sys-0.8.1 \
hermit-abi-0.1.6 \
image-0.22.3 \
inflate-0.4.5 \
jpeg-decoder-0.1.18 \
lazy_static-1.4.0 \
libc-0.2.66 \
lzw-0.10.0 \
memoffset-0.5.3 \
muldiv-0.2.1 \
num-derive-0.2.5 \
num-integer-0.1.41 \
num-iter-0.1.39 \
num-rational-0.2.2 \
num-traits-0.2.10 \
num_cpus-1.11.1 \
numtoa-0.1.0 \
paste-0.1.6 \
paste-impl-0.1.6 \
pin-utils-0.1.0-alpha.4 \
pkg-config-0.3.17 \
png-0.15.2 \
proc-macro-hack-0.5.11 \
proc-macro-nested-0.1.3 \
proc-macro2-0.4.30 \
proc-macro2-1.0.7 \
quote-0.6.13 \
quote-1.0.2 \
rayon-1.3.0 \
rayon-core-1.7.0 \
redox_syscall-0.1.56 \
redox_termios-0.1.1 \
rustc-demangle-0.1.16 \
rustc_version-0.2.3 \
scoped_threadpool-0.1.9 \
scopeguard-1.0.0 \
semver-0.9.0 \
semver-parser-0.7.0 \
slab-0.4.2 \
strsim-0.8.0 \
syn-0.15.44 \
syn-1.0.13 \
synstructure-0.12.3 \
termion-1.5.4 \
textwrap-0.11.0 \
tiff-0.3.1 \
unicode-width-0.1.7 \
unicode-xid-0.1.0 \
unicode-xid-0.2.0 \
vec_map-0.8.1 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0
PLIST_FILES= bin/${PORTNAME}
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
.include <bsd.port.mk>
Index: head/net/findomain/Makefile
===================================================================
--- head/net/findomain/Makefile (revision 552220)
+++ head/net/findomain/Makefile (revision 552221)
@@ -1,266 +1,267 @@
# $FreeBSD$
PORTNAME= findomain
DISTVERSION= 2.1.4
+PORTREVISION= 1
CATEGORIES= net security
MAINTAINER= vulcan@wired.sh
COMMENT= Cross-platform subdomain enumerator
LICENSE= GPLv3
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo ssl
USE_GITHUB= yes
GH_ACCOUNT= Findomain
GH_PROJECT= ${GH_ACCOUNT}
CARGO_CRATES= aho-corasick-0.7.10 \
ansi_term-0.11.0 \
anyhow-1.0.26 \
arrayvec-0.4.12 \
async-trait-0.1.35 \
atty-0.2.14 \
autocfg-0.1.7 \
autocfg-1.0.0 \
backtrace-0.3.44 \
backtrace-sys-0.1.32 \
base64-0.11.0 \
base64-0.12.1 \
bitflags-1.2.1 \
block-buffer-0.7.3 \
block-padding-0.1.5 \
bumpalo-3.1.2 \
byte-tools-0.3.1 \
byteorder-1.3.2 \
bytes-0.5.3 \
c2-chacha-0.2.3 \
cc-1.0.50 \
cfg-if-0.1.10 \
clap-2.33.3 \
cloudabi-0.1.0 \
config-0.10.1 \
core-foundation-0.6.4 \
core-foundation-sys-0.6.2 \
crossbeam-channel-0.4.3 \
crossbeam-deque-0.7.2 \
crossbeam-epoch-0.8.0 \
crossbeam-utils-0.7.0 \
crypto-mac-0.7.0 \
digest-0.8.1 \
dtoa-0.4.4 \
either-1.5.3 \
encoding_rs-0.8.22 \
enum-as-inner-0.3.0 \
failure-0.1.8 \
failure_derive-0.1.7 \
fake-simd-0.1.2 \
fallible-iterator-0.2.0 \
fnv-1.0.6 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.3.4 \
futures-channel-0.3.4 \
futures-core-0.3.4 \
futures-executor-0.3.4 \
futures-io-0.3.4 \
futures-macro-0.3.4 \
futures-sink-0.3.4 \
futures-task-0.3.4 \
futures-util-0.3.4 \
generic-array-0.12.3 \
generic-array-0.13.2 \
getrandom-0.1.14 \
h2-0.2.2 \
heck-0.3.1 \
hermit-abi-0.1.6 \
hmac-0.7.1 \
hostname-0.1.5 \
http-0.2.0 \
http-body-0.3.1 \
httparse-1.3.4 \
hyper-0.13.4 \
hyper-tls-0.4.1 \
idna-0.2.0 \
indexmap-1.3.1 \
instant-0.1.6 \
iovec-0.1.4 \
ipconfig-0.2.1 \
ipnet-2.3.0 \
itoa-0.4.4 \
js-sys-0.3.35 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
lexical-core-0.6.2 \
libc-0.2.73 \
linked-hash-map-0.3.0 \
linked-hash-map-0.5.2 \
lock_api-0.4.1 \
log-0.4.8 \
lru-cache-0.1.2 \
matches-0.1.8 \
md5-0.7.0 \
memchr-2.3.0 \
memoffset-0.5.3 \
mime-0.3.16 \
mime_guess-2.0.1 \
mio-0.6.21 \
mio-uds-0.6.7 \
miow-0.2.1 \
native-tls-0.2.3 \
net2-0.2.33 \
nodrop-0.1.14 \
nom-4.2.3 \
nom-5.1.1 \
num-traits-0.1.43 \
num-traits-0.2.11 \
num_cpus-1.12.0 \
opaque-debug-0.2.3 \
openssl-0.10.30 \
openssl-probe-0.1.2 \
openssl-sys-0.9.58 \
parking_lot-0.11.0 \
parking_lot_core-0.8.0 \
percent-encoding-2.1.0 \
phf-0.8.0 \
phf_shared-0.8.0 \
pin-project-0.4.7 \
pin-project-internal-0.4.7 \
pin-project-lite-0.1.4 \
pin-utils-0.1.0-alpha.4 \
pkg-config-0.3.17 \
postgres-0.17.5 \
postgres-protocol-0.5.0 \
postgres-types-0.1.2 \
ppv-lite86-0.2.6 \
proc-macro-hack-0.5.11 \
proc-macro-nested-0.1.3 \
proc-macro2-1.0.19 \
quick-error-1.2.3 \
quote-1.0.2 \
rand-0.7.3 \
rand_chacha-0.2.1 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rayon-1.4.0 \
rayon-core-1.8.0 \
redox_syscall-0.1.56 \
regex-1.3.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.2 \
reqwest-0.10.8 \
resolv-conf-0.6.2 \
ring-0.16.12 \
rust-ini-0.13.0 \
rustc-demangle-0.1.16 \
rustc_version-0.2.3 \
rustls-0.17.0 \
ryu-1.0.2 \
schannel-0.1.16 \
scopeguard-1.1.0 \
sct-0.6.0 \
security-framework-0.3.4 \
security-framework-sys-0.3.3 \
semver-0.9.0 \
semver-0.10.0 \
semver-parser-0.7.0 \
serde-0.8.23 \
serde-1.0.115 \
serde-hjson-0.9.1 \
serde_derive-1.0.115 \
serde_json-1.0.44 \
serde_test-0.8.23 \
serde_urlencoded-0.6.1 \
sha2-0.8.1 \
siphasher-0.3.1 \
slab-0.4.2 \
smallvec-1.4.1 \
socket2-0.3.11 \
sourcefile-0.1.4 \
spin-0.5.2 \
static_assertions-0.3.4 \
stringprep-0.1.2 \
strsim-0.8.0 \
subtle-1.0.0 \
syn-1.0.35 \
synstructure-0.12.3 \
tempfile-3.1.0 \
textwrap-0.11.0 \
thiserror-1.0.19 \
thiserror-impl-1.0.19 \
thread_local-1.0.1 \
time-0.1.42 \
tokio-0.2.21 \
tokio-postgres-0.5.5 \
tokio-rustls-0.13.1 \
tokio-tls-0.3.0 \
tokio-util-0.2.0 \
tokio-util-0.3.1 \
toml-0.5.6 \
tower-service-0.3.0 \
trust-dns-proto-0.19.5 \
trust-dns-resolver-0.19.5 \
trust-dns-rustls-0.19.5 \
try-lock-0.2.2 \
typenum-1.11.2 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.11 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
untrusted-0.7.0 \
url-2.1.1 \
vcpkg-0.2.8 \
vec_map-0.8.1 \
version_check-0.1.5 \
version_check-0.9.1 \
want-0.3.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.58 \
wasm-bindgen-backend-0.2.58 \
wasm-bindgen-futures-0.4.8 \
wasm-bindgen-macro-0.2.58 \
wasm-bindgen-macro-support-0.2.58 \
wasm-bindgen-shared-0.2.58 \
wasm-bindgen-webidl-0.2.58 \
web-sys-0.3.35 \
webpki-0.21.0 \
webpki-roots-0.19.0 \
weedle-0.10.0 \
widestring-0.4.0 \
winapi-0.2.8 \
winapi-0.3.8 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.6.2 \
winreg-0.7.0 \
winutil-0.1.1 \
ws2_32-sys-0.2.1 \
yaml-rust-0.3.5 \
yaml-rust-0.4.4
PLIST_FILES= bin/${PORTNAME} \
man/man1/${PORTNAME}.1.gz
PORTDOCS= ${DOCS:T}
OPTIONS_DEFINE= DOCS PGSQL
PGSQL_DESC= Subdomains monitoring support
PGSQL_USES= pgsql
PGSQL_VARS= WANT_PGSQL=server
DOCS= docs/create_telegram_webhook.md README.md
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
${INSTALL_MAN} ${WRKSRC}/${PORTNAME}.1 ${STAGEDIR}${MAN1PREFIX}/man/man1
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
cd ${WRKSRC} && ${INSTALL_DATA} ${DOCS} ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/net/proby/Makefile
===================================================================
--- head/net/proby/Makefile (revision 552220)
+++ head/net/proby/Makefile (revision 552221)
@@ -1,238 +1,238 @@
# $FreeBSD$
PORTNAME= proby
DISTVERSIONPREFIX= v
DISTVERSION= 1.0.2
-PORTREVISION= 1
+PORTREVISION= 2
CATEGORIES= net
MAINTAINER= yuri@FreeBSD.org
COMMENT= Check whether hosts are reachable on certain ports
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= svenstaro
CARGO_CRATES= actix-codec-0.2.0 \
actix-connect-1.0.2 \
actix-http-1.0.1 \
actix-macros-0.1.2 \
actix-router-0.2.4 \
actix-rt-1.1.1 \
actix-server-1.0.3 \
actix-service-1.0.6 \
actix-testing-1.0.1 \
actix-threadpool-0.3.3 \
actix-tls-1.0.0 \
actix-utils-1.0.6 \
actix-web-2.0.0 \
actix-web-codegen-0.2.2 \
addr2line-0.13.0 \
adler-0.2.3 \
aho-corasick-0.7.13 \
ansi_term-0.11.0 \
anyhow-1.0.32 \
arc-swap-0.4.7 \
assert_cmd-1.0.1 \
async-trait-0.1.36 \
atty-0.2.14 \
autocfg-1.0.0 \
awc-1.0.1 \
backtrace-0.3.50 \
base64-0.11.0 \
bitflags-1.2.1 \
brotli-sys-0.3.2 \
brotli2-0.3.2 \
bumpalo-3.4.0 \
byteorder-1.3.4 \
bytes-0.4.12 \
bytes-0.5.6 \
bytestring-0.1.5 \
cc-1.0.58 \
cfg-if-0.1.10 \
chrono-0.4.13 \
clap-2.33.2 \
cloudabi-0.1.0 \
copyless-0.1.5 \
crc32fast-1.2.0 \
crossbeam-channel-0.3.9 \
crossbeam-utils-0.6.6 \
curl-0.4.31 \
curl-sys-0.4.34+curl-7.71.1 \
derive_more-0.99.9 \
difference-2.0.0 \
doc-comment-0.3.3 \
dtoa-0.4.6 \
either-1.5.3 \
encoding_rs-0.8.23 \
enum-as-inner-0.3.3 \
failure-0.1.8 \
failure_derive-0.1.8 \
flate2-1.0.16 \
fnv-1.0.7 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.1.29 \
futures-0.3.5 \
futures-channel-0.3.5 \
futures-channel-preview-0.3.0-alpha.19 \
futures-core-0.3.5 \
futures-core-preview-0.3.0-alpha.19 \
futures-executor-0.3.5 \
futures-executor-preview-0.3.0-alpha.19 \
futures-io-0.3.5 \
futures-io-preview-0.3.0-alpha.19 \
futures-macro-0.3.5 \
futures-preview-0.3.0-alpha.19 \
futures-sink-0.3.5 \
futures-sink-preview-0.3.0-alpha.19 \
futures-task-0.3.5 \
futures-util-0.3.5 \
futures-util-preview-0.3.0-alpha.19 \
fxhash-0.2.1 \
getrandom-0.1.14 \
gimli-0.22.0 \
h2-0.2.6 \
hashbrown-0.8.2 \
heck-0.3.1 \
hermit-abi-0.1.15 \
hostname-0.3.1 \
http-0.1.21 \
http-0.2.1 \
httparse-1.3.4 \
idna-0.2.0 \
indexmap-1.5.1 \
instant-0.1.6 \
iovec-0.1.4 \
ipconfig-0.2.2 \
isahc-0.7.6 \
itoa-0.4.6 \
js-sys-0.3.44 \
kernel32-sys-0.2.2 \
language-tags-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.74 \
libnghttp2-sys-0.1.4+1.41.0 \
libz-sys-1.0.25 \
linked-hash-map-0.5.3 \
lock_api-0.4.1 \
log-0.4.11 \
lru-cache-0.1.2 \
match_cfg-0.1.0 \
matches-0.1.8 \
memchr-2.3.3 \
mime-0.3.16 \
mime_guess-2.0.3 \
miniz_oxide-0.4.0 \
mio-0.6.22 \
mio-uds-0.6.8 \
miow-0.2.1 \
net2-0.2.34 \
num-integer-0.1.43 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
object-0.20.0 \
once_cell-1.4.0 \
openssl-probe-0.1.2 \
openssl-sys-0.9.58 \
parking_lot-0.11.0 \
parking_lot_core-0.8.0 \
percent-encoding-2.1.0 \
pin-project-0.4.23 \
pin-project-internal-0.4.23 \
pin-project-lite-0.1.7 \
pin-utils-0.1.0 \
pkg-config-0.3.18 \
port_check-0.1.5 \
ppv-lite86-0.2.8 \
predicates-1.0.5 \
predicates-core-1.0.0 \
predicates-tree-1.0.0 \
proc-macro-error-1.0.4 \
proc-macro-error-attr-1.0.4 \
proc-macro-hack-0.5.18 \
proc-macro-nested-0.1.6 \
proc-macro2-1.0.19 \
quick-error-1.2.3 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
redox_syscall-0.1.57 \
regex-1.3.9 \
regex-syntax-0.6.18 \
resolv-conf-0.6.3 \
rustc-demangle-0.1.16 \
ryu-1.0.5 \
schannel-0.1.19 \
scopeguard-1.1.0 \
serde-1.0.114 \
serde_derive-1.0.114 \
serde_json-1.0.57 \
serde_urlencoded-0.6.1 \
serde_with-1.4.0 \
serde_with_macros-1.1.0 \
sha1-0.6.0 \
signal-hook-registry-1.2.1 \
simplelog-0.8.0 \
slab-0.4.2 \
sluice-0.4.2 \
smallvec-1.4.1 \
socket2-0.3.12 \
strsim-0.8.0 \
structopt-0.3.16 \
structopt-derive-0.4.9 \
surf-1.0.3 \
syn-1.0.38 \
synstructure-0.12.4 \
termcolor-1.1.0 \
textwrap-0.11.0 \
thread_local-1.0.1 \
threadpool-1.8.1 \
time-0.1.43 \
tinyvec-0.3.3 \
tokio-0.2.22 \
tokio-io-0.1.13 \
tokio-util-0.2.0 \
tokio-util-0.3.1 \
tracing-0.1.18 \
tracing-core-0.1.13 \
treeline-0.1.0 \
trust-dns-proto-0.18.0-alpha.2 \
trust-dns-resolver-0.18.0-alpha.2 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
url-2.1.1 \
vcpkg-0.2.10 \
vec_map-0.8.2 \
version_check-0.9.2 \
wait-timeout-0.2.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.67 \
wasm-bindgen-backend-0.2.67 \
wasm-bindgen-futures-0.3.27 \
wasm-bindgen-macro-0.2.67 \
wasm-bindgen-macro-support-0.2.67 \
wasm-bindgen-shared-0.2.67 \
web-sys-0.3.44 \
widestring-0.4.2 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.6.2 \
ws2_32-sys-0.2.1
PLIST_FILES= bin/${PORTNAME}
.include <bsd.port.mk>
Index: head/net/quiche/Makefile
===================================================================
--- head/net/quiche/Makefile (revision 552220)
+++ head/net/quiche/Makefile (revision 552221)
@@ -1,142 +1,142 @@
# $FreeBSD$
PORTNAME= quiche
DISTVERSIONPREFIX= v
DISTVERSION= ${GIT_VERSION}
DISTVERSIONSUFFIX= -g${GIT_HASH}
-PORTREVISION= 1
+PORTREVISION= 2
CATEGORIES= net
MAINTAINER= junho.choi@gmail.com
COMMENT= Savoury implementation of the QUIC transport protocol and HTTP/3
LICENSE= BSD2CLAUSE
LICENSE_FILE= ${WRKSRC}/COPYING
USES= cargo
USE_LDCONFIG= yes
USE_GITHUB= yes
GH_ACCOUNT= cloudflare
GH_PROJECT= quiche
GH_TAGNAME= ${GIT_HASH}
GH_TUPLE= google:boringssl:597b810:boringssl/deps/boringssl
# git describe
GIT_VERSION= 0.5.1-6
GIT_HASH= c15dd5c
OPTIONS_DEFINE= DOCS QLOG
OPTIONS_DEFAULT=DOCS QLOG
QLOG_VARS= CARGO_FEATURES+=qlog
QLOG_DESC= Enable qlog support
PORTDOCS= README.md
CARGO_FEATURES= --no-default-features
PLIST_FILES= bin/quiche-client \
bin/quiche-server \
include/quiche.h \
lib/libquiche.a \
lib/libquiche.so
CARGO_CRATES= aho-corasick-0.7.13 \
atty-0.2.14 \
bitflags-1.2.1 \
bumpalo-3.4.0 \
cc-1.0.58 \
cfg-if-0.1.10 \
cmake-0.1.44 \
docopt-1.1.0 \
env_logger-0.6.2 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
hermit-abi-0.1.15 \
humantime-1.3.0 \
idna-0.1.5 \
iovec-0.1.4 \
itoa-0.4.6 \
js-sys-0.3.44 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.74 \
libm-0.2.1 \
log-0.4.11 \
matches-0.1.8 \
memchr-2.3.3 \
mio-0.6.22 \
miow-0.2.1 \
net2-0.2.34 \
once_cell-1.4.0 \
percent-encoding-1.0.1 \
proc-macro2-1.0.19 \
quick-error-1.2.3 \
quote-1.0.7 \
regex-1.3.9 \
regex-syntax-0.6.18 \
ring-0.16.15 \
ryu-1.0.5 \
serde-1.0.114 \
serde_derive-1.0.114 \
serde_json-1.0.57 \
serde_with-1.4.0 \
serde_with_macros-1.1.0 \
slab-0.4.2 \
spin-0.5.2 \
strsim-0.9.3 \
syn-1.0.36 \
termcolor-1.1.0 \
thread_local-1.0.1 \
tinyvec-0.3.3 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-xid-0.2.1 \
untrusted-0.7.1 \
url-1.7.2 \
wasm-bindgen-0.2.67 \
wasm-bindgen-backend-0.2.67 \
wasm-bindgen-macro-0.2.67 \
wasm-bindgen-macro-support-0.2.67 \
wasm-bindgen-shared-0.2.67 \
web-sys-0.3.44 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
ws2_32-sys-0.2.1
LIBQUICHE_CARGOTOML= ${WRKSRC}/Cargo.toml
QUICHEAPPS_CARGOTOML= ${WRKSRC}/tools/apps/Cargo.toml
QUICHEAPPS_CARGOLOCK= ${WRKSRC}/tools/apps/Cargo.lock
CARGO_CARGOTOML= ${QUICHEAPPS_CARGOTOML}
CARGO_CARGOLOCK= ${QUICHEAPPS_CARGOLOCK}
# build libquiche and apps
do-build:
@${CARGO_CARGO_RUN} build \
--manifest-path ${LIBQUICHE_CARGOTOML} \
--verbose \
${CARGO_BUILD_ARGS}
@${CARGO_CARGO_RUN} build \
--manifest-path ${QUICHEAPPS_CARGOTOML} \
--verbose \
${CARGO_BUILD_ARGS}
# install quiche apps and libquiche
do-install:
${INSTALL_DATA} ${WRKSRC}/include/quiche.h ${STAGEDIR}${PREFIX}/include
${INSTALL_LIB} ${CARGO_TARGET_DIR}/release/libquiche.so ${STAGEDIR}${PREFIX}/lib
${INSTALL_LIB} ${CARGO_TARGET_DIR}/release/libquiche.a ${STAGEDIR}${PREFIX}/lib
${INSTALL_PROGRAM} ${CARGO_TARGET_DIR}/release/quiche-server ${STAGEDIR}${PREFIX}/bin
${INSTALL_PROGRAM} ${CARGO_TARGET_DIR}/release/quiche-client ${STAGEDIR}${PREFIX}/bin
do-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_DATA} ${WRKSRC}/README.md ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/net/rabbiteer/Makefile
===================================================================
--- head/net/rabbiteer/Makefile (revision 552220)
+++ head/net/rabbiteer/Makefile (revision 552221)
@@ -1,88 +1,88 @@
# $FreeBSD$
PORTNAME= rabbiteer
DISTVERSION= 1.4.1
-PORTREVISION= 23
+PORTREVISION= 24
CATEGORIES= net
MAINTAINER= dch@FreeBSD.org
COMMENT= AMQP & RabbitMQ command-line tool
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENCE-MIT
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= algesten
GH_PROJECT= ${PORTNAME}-rs
GH_TAGNAME= c881238
CARGO_CRATES= aho-corasick-0.5.3 \
amq-proto-0.1.0 \
amqp-0.1.1 \
ansi_term-0.11.0 \
atty-0.2.11 \
backtrace-0.3.9 \
backtrace-sys-0.1.24 \
bit-vec-0.4.4 \
bitflags-1.0.4 \
byteorder-0.5.3 \
cc-1.0.24 \
cfg-if-0.1.5 \
clap-2.32.0 \
cloudabi-0.0.3 \
conduit-mime-types-0.7.3 \
enum_primitive-0.1.1 \
env_logger-0.3.5 \
error-chain-0.10.0 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
idna-0.1.5 \
kernel32-sys-0.2.2 \
libc-0.2.49 \
log-0.3.9 \
log-0.4.5 \
matches-0.1.8 \
memchr-0.1.11 \
num-traits-0.1.43 \
num-traits-0.2.5 \
percent-encoding-1.0.1 \
rand-0.5.5 \
rand_core-0.2.1 \
redox_syscall-0.1.40 \
redox_termios-0.1.1 \
regex-0.1.80 \
regex-syntax-0.3.9 \
rustc-demangle-0.1.9 \
rustc-serialize-0.3.24 \
strsim-0.7.0 \
termion-1.5.1 \
textwrap-0.10.0 \
thread-id-2.0.0 \
thread_local-0.2.7 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.7 \
unicode-width-0.1.5 \
url-1.7.1 \
utf8-ranges-0.1.3 \
vec_map-0.8.1 \
winapi-0.2.8 \
winapi-0.3.5 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0
PLIST_FILES= bin/rabbiteer
PORTDOCS= README.md
OPTIONS_DEFINE= DOCS
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/rabbiteer
post-install-DOCS-on:
(cd ${WRKSRC} && ${COPYTREE_SHARE} README.md ${STAGEDIR}${DOCSDIR})
.include <bsd.port.mk>
Index: head/net/routinator/Makefile
===================================================================
--- head/net/routinator/Makefile (revision 552220)
+++ head/net/routinator/Makefile (revision 552221)
@@ -1,229 +1,229 @@
# $FreeBSD$
PORTNAME= routinator
PORTVERSION= 0.7.1
DISTVERSIONPREFIX= v
-PORTREVISION= 4
+PORTREVISION= 5
CATEGORIES= net
MAINTAINER= jaap@NLnetLabs.nl
COMMENT= RPKI signed route collector and validator
LICENSE= BSD3CLAUSE
LICENSE_FILE= ${WRKSRC}/LICENSE
ONLY_FOR_ARCHS= amd64 i386
ONLY_FOR_ARCHS_REASON= ring crate not ported to other architectures
RUN_DEPENDS= rsync:net/rsync
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= NLnetLabs
USE_RC_SUBR= routinator
SUB_FILES= pkg-message
USERS= ${PORTNAME}
GROUPS= ${PORTNAME}
SUB_LIST+= USERS=${USERS} \
GROUPS=${GROUPS}
OPTIONS_SUB= yes
CARGO_CRATES= addr2line-0.12.1 \
ansi_term-0.11.0 \
arc-swap-0.4.7 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
atty-0.2.14 \
autocfg-1.0.0 \
backtrace-0.3.48 \
base64-0.11.0 \
base64-0.12.1 \
bcder-0.5.0 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
boxfnonce-0.1.1 \
bumpalo-3.4.0 \
byteorder-1.3.4 \
bytes-0.4.12 \
bytes-0.5.4 \
cc-1.0.54 \
cfg-if-0.1.10 \
chrono-0.4.11 \
clap-2.33.1 \
constant_time_eq-0.1.5 \
core-foundation-0.7.0 \
core-foundation-sys-0.7.0 \
crossbeam-queue-0.2.2 \
crossbeam-utils-0.7.2 \
daemonize-0.4.1 \
dirs-2.0.2 \
dirs-sys-0.3.5 \
dtoa-0.4.5 \
either-1.5.3 \
encoding_rs-0.8.23 \
error-chain-0.12.2 \
fern-0.6.0 \
fnv-1.0.7 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.3.5 \
futures-channel-0.3.5 \
futures-core-0.3.5 \
futures-executor-0.3.5 \
futures-io-0.3.5 \
futures-macro-0.3.5 \
futures-sink-0.3.5 \
futures-task-0.3.5 \
futures-util-0.3.5 \
getrandom-0.1.14 \
gimli-0.21.0 \
h2-0.2.5 \
hermit-abi-0.1.13 \
http-0.2.1 \
http-body-0.3.1 \
httparse-1.3.4 \
hyper-0.13.6 \
hyper-rustls-0.20.0 \
hyper-tls-0.4.1 \
idna-0.2.0 \
indexmap-1.4.0 \
iovec-0.1.4 \
itoa-0.4.5 \
js-sys-0.3.40 \
json-0.12.4 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.71 \
listenfd-0.3.3 \
log-0.4.8 \
log-reroute-0.1.5 \
matches-0.1.8 \
memchr-2.3.3 \
mime-0.3.16 \
mime_guess-2.0.3 \
mio-0.6.22 \
mio-uds-0.6.8 \
miow-0.2.1 \
native-tls-0.2.4 \
net2-0.2.34 \
num-integer-0.1.42 \
num-traits-0.2.11 \
num_cpus-1.13.0 \
object-0.19.0 \
once_cell-1.4.0 \
openssl-0.10.29 \
openssl-probe-0.1.2 \
openssl-sys-0.9.58 \
percent-encoding-2.1.0 \
pin-project-0.4.20 \
pin-project-internal-0.4.20 \
pin-project-lite-0.1.7 \
pin-utils-0.1.0 \
pkg-config-0.3.17 \
ppv-lite86-0.2.8 \
proc-macro-hack-0.5.16 \
proc-macro-nested-0.1.5 \
proc-macro2-1.0.18 \
quick-xml-0.18.1 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
redox_syscall-0.1.56 \
redox_users-0.3.4 \
remove_dir_all-0.5.2 \
reqwest-0.10.6 \
ring-0.16.14 \
rpki-0.9.2 \
rpki-rtr-0.1.1 \
rust-argon2-0.7.0 \
rustc-demangle-0.1.16 \
rustc_version-0.2.3 \
rustls-0.17.0 \
ryu-1.0.5 \
schannel-0.1.19 \
sct-0.6.0 \
security-framework-0.4.4 \
security-framework-sys-0.4.3 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.111 \
serde_derive-1.0.111 \
serde_json-1.0.53 \
serde_urlencoded-0.6.1 \
signal-hook-registry-1.2.0 \
slab-0.4.2 \
smallvec-1.4.0 \
socket2-0.3.12 \
spin-0.5.2 \
strsim-0.8.0 \
syn-1.0.30 \
syslog-5.0.0 \
tempfile-3.1.0 \
textwrap-0.11.0 \
thiserror-1.0.19 \
thiserror-impl-1.0.19 \
time-0.1.43 \
tokio-0.2.21 \
tokio-macros-0.2.5 \
tokio-rustls-0.13.1 \
tokio-socks-0.2.2 \
tokio-tls-0.3.1 \
tokio-util-0.3.1 \
toml-0.5.6 \
tower-service-0.3.0 \
try-lock-0.2.2 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.12 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
untrusted-0.7.1 \
unwrap-1.2.1 \
url-2.1.1 \
uuid-0.6.5 \
uuid-0.8.1 \
vcpkg-0.2.9 \
vec_map-0.8.2 \
version_check-0.9.2 \
want-0.3.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.63 \
wasm-bindgen-backend-0.2.63 \
wasm-bindgen-futures-0.4.13 \
wasm-bindgen-macro-0.2.63 \
wasm-bindgen-macro-support-0.2.63 \
wasm-bindgen-shared-0.2.63 \
web-sys-0.3.40 \
webpki-0.21.3 \
webpki-roots-0.19.0 \
winapi-0.2.8 \
winapi-0.3.8 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.7.0 \
ws2_32-sys-0.2.1
PLIST_FILES= bin/routinator \
${ETCDIR_REL}/routinator.conf.example \
${ETCDIR_REL}/routinator.conf.system-service \
man/man1/routinator.1.gz
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/routinator
${INSTALL_MAN} ${WRKSRC}/doc/routinator.1 ${STAGEDIR}${PREFIX}/man/man1/
@${MKDIR} ${STAGEDIR}/${PREFIX}/etc/routinator
${INSTALL_DATA} ${WRKSRC}/etc/routinator.conf.system-service \
${STAGEDIR}${PREFIX}/etc/routinator/routinator.conf.system-service
${INSTALL_DATA} ${WRKSRC}/etc/routinator.conf.example \
${STAGEDIR}${PREFIX}/etc/routinator/routinator.conf.example
.include <bsd.port.mk>
Index: head/net-im/fractal/Makefile
===================================================================
--- head/net-im/fractal/Makefile (revision 552220)
+++ head/net-im/fractal/Makefile (revision 552221)
@@ -1,46 +1,46 @@
# $FreeBSD$
PORTNAME= fractal
DISTVERSION= 4.2.2
-PORTREVISION= 10
+PORTREVISION= 11
CATEGORIES= net-im
MASTER_SITES= https://gitlab.gnome.org/World/fractal/uploads/${GL_HASH}/
MAINTAINER= greg@unrelenting.technology
COMMENT= GTK+ Matrix IM client
LICENSE= GPLv3+
LICENSE_FILE= ${WRKSRC}/LICENSE.txt
# gmake for the gettext-sys crate
BUILD_DEPENDS= cargo:lang/${RUST_DEFAULT} \
gmake:devel/gmake
LIB_DEPENDS= libdbus-1.so:devel/dbus \
libgmp.so:math/gmp \
libgspell-1.so:textproc/gspell \
libges-1.0.so:multimedia/gstreamer1-editing-services \
libhandy-0.0.so:x11-toolkits/libhandy0
USES= gettext gnome meson pkgconfig python:3.5+,build ssl tar:xz
USE_GNOME= cairo gtk30 gtksourceview4
USE_GSTREAMER1= bad
GL_HASH= 9f2b34d98cfe3c002f3afbfcbf14bc7c
BINARY_ALIAS= python3=${PYTHON_CMD}
GLIB_SCHEMAS= org.gnome.Fractal.gschema.xml
INSTALLS_ICONS= yes
# for the gettext-sys crate
MAKE_ENV+= GETTEXT_BIN_DIR=${LOCALBASE}/bin \
GETTEXT_LIB_DIR=${LOCALBASE}/lib \
GETTEXT_INCLUDE_DIR=${LOCALBASE}/include
post-patch:
# Disable vendor checksums
@${REINPLACE_CMD} -e 's/"files":{[^}]*}/"files":{}/' \
${WRKSRC}/vendor/*/.cargo-checksum.json
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/fractal
.include <bsd.port.mk>
Index: head/net-mgmt/bandwhich/Makefile
===================================================================
--- head/net-mgmt/bandwhich/Makefile (revision 552220)
+++ head/net-mgmt/bandwhich/Makefile (revision 552221)
@@ -1,277 +1,278 @@
# $FreeBSD$
PORTNAME= bandwhich
DISTVERSION= 0.19.0
+PORTREVISION= 1
CATEGORIES= net-mgmt
MAINTAINER= petteri.valkonen@iki.fi
COMMENT= Terminal bandwidth utilization tool
LICENSE= MIT
RUN_DEPENDS= lsof:sysutils/lsof
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= imsnif
GH_TUPLE= tailhook:resolv-conf:83c0f25ebcb0615550488692c5213ca1ae4acd8f:resolvconf
CARGO_CRATES= adler-0.2.3 \
adler32-1.2.0 \
aho-corasick-0.7.6 \
ansi_term-0.11.0 \
arc-swap-0.4.3 \
async-trait-0.1.21 \
atty-0.2.13 \
autocfg-0.1.7 \
autocfg-1.0.1 \
backtrace-0.3.40 \
backtrace-sys-0.1.32 \
bitflags-0.5.0 \
bitflags-1.2.1 \
block-buffer-0.7.3 \
block-padding-0.1.5 \
byte-tools-0.3.1 \
byteorder-1.3.2 \
bytes-0.5.0 \
bzip2-0.3.3 \
bzip2-sys-0.1.9+1.0.8 \
c2-chacha-0.2.3 \
cargo-insta-0.11.0 \
cassowary-0.3.0 \
cc-1.0.47 \
cfg-if-0.1.9 \
chrono-0.4.9 \
clap-2.33.0 \
clicolors-control-1.0.1 \
cloudabi-0.0.3 \
console-0.7.7 \
console-0.8.0 \
console-0.9.1 \
core-foundation-0.7.0 \
core-foundation-sys-0.7.0 \
crc32fast-1.2.0 \
crossbeam-channel-0.4.2 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-utils-0.7.2 \
crossterm-0.17.7 \
crossterm_winapi-0.6.1 \
derive-new-0.5.8 \
difference-2.0.0 \
digest-0.8.1 \
doc-comment-0.3.3 \
dtoa-0.4.4 \
either-1.5.3 \
encode_unicode-0.3.6 \
enum-as-inner-0.3.0 \
failure-0.1.6 \
failure_derive-0.1.6 \
fake-simd-0.1.2 \
flate2-1.0.17 \
fnv-1.0.6 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.3.1 \
futures-channel-0.3.1 \
futures-core-0.3.1 \
futures-executor-0.3.1 \
futures-io-0.3.1 \
futures-macro-0.3.1 \
futures-sink-0.3.1 \
futures-task-0.3.1 \
futures-util-0.3.1 \
generic-array-0.12.3 \
getrandom-0.1.13 \
glob-0.2.11 \
heck-0.3.1 \
hermit-abi-0.1.15 \
hex-0.4.2 \
hostname-0.3.1 \
http_req-0.7.0 \
idna-0.2.0 \
insta-0.11.0 \
insta-0.12.0 \
iovec-0.1.4 \
ipconfig-0.2.1 \
ipnetwork-0.12.8 \
ipnetwork-0.16.0 \
itoa-0.4.4 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.76 \
libflate-1.0.2 \
libflate_lz77-1.0.0 \
linked-hash-map-0.5.2 \
lock_api-0.3.4 \
log-0.3.9 \
log-0.4.8 \
lru-cache-0.1.2 \
maplit-1.0.2 \
match_cfg-0.1.0 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.2.1 \
memoffset-0.5.5 \
miniz_oxide-0.4.1 \
mio-0.6.21 \
mio-0.7.0 \
miow-0.2.1 \
miow-0.3.5 \
native-tls-0.2.4 \
net2-0.2.33 \
netstat2-0.9.0 \
ntapi-0.3.4 \
num-derive-0.3.2 \
num-integer-0.1.41 \
num-traits-0.2.8 \
num_cpus-1.13.0 \
once_cell-1.4.1 \
opaque-debug-0.2.3 \
openssl-0.10.30 \
openssl-probe-0.1.2 \
openssl-sys-0.9.58 \
packet-builder-0.5.0 \
parking_lot-0.10.2 \
parking_lot_core-0.7.2 \
percent-encoding-2.1.0 \
pest-2.1.2 \
pest_derive-2.1.0 \
pest_generator-2.1.1 \
pest_meta-2.1.2 \
pin-project-lite-0.1.1 \
pin-utils-0.1.0-alpha.4 \
pkg-config-0.3.18 \
pnet-0.26.0 \
pnet_base-0.26.0 \
pnet_datalink-0.26.0 \
pnet_macros-0.26.0 \
pnet_macros_support-0.26.0 \
pnet_packet-0.26.0 \
pnet_sys-0.26.0 \
pnet_transport-0.26.0 \
podio-0.1.7 \
ppv-lite86-0.2.6 \
proc-macro-error-0.2.6 \
proc-macro-hack-0.5.11 \
proc-macro-nested-0.1.3 \
proc-macro2-0.4.30 \
proc-macro2-1.0.6 \
procfs-0.7.9 \
quick-error-1.2.2 \
quote-0.6.13 \
quote-1.0.2 \
rand-0.6.5 \
rand-0.7.2 \
rand_chacha-0.1.1 \
rand_chacha-0.2.1 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rayon-1.4.0 \
rayon-core-1.8.0 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
regex-1.3.1 \
regex-syntax-0.6.12 \
remove_dir_all-0.5.3 \
rle-decode-fast-1.0.1 \
rustc-demangle-0.1.16 \
rustc-serialize-0.3.24 \
ryu-1.0.2 \
same-file-1.0.5 \
schannel-0.1.19 \
scopeguard-1.0.0 \
security-framework-0.4.4 \
security-framework-sys-0.4.3 \
serde-1.0.102 \
serde_derive-1.0.102 \
serde_json-1.0.41 \
serde_yaml-0.8.11 \
sha-1-0.8.1 \
signal-hook-0.1.16 \
signal-hook-registry-1.2.1 \
slab-0.4.2 \
smallvec-0.6.13 \
smallvec-1.0.0 \
socket2-0.3.11 \
strsim-0.8.0 \
structopt-0.2.18 \
structopt-0.3.4 \
structopt-derive-0.2.18 \
structopt-derive-0.3.4 \
syn-0.15.44 \
syn-1.0.11 \
synstructure-0.12.3 \
syntex-0.42.2 \
syntex_errors-0.42.0 \
syntex_pos-0.42.0 \
syntex_syntax-0.42.0 \
sysinfo-0.15.1 \
tempfile-3.1.0 \
term-0.4.6 \
termios-0.3.1 \
textwrap-0.11.0 \
thiserror-1.0.20 \
thiserror-impl-1.0.20 \
thread_local-0.3.6 \
time-0.1.42 \
tokio-0.2.2 \
trust-dns-proto-0.18.1 \
trust-dns-resolver-0.18.1 \
tui-0.12.0 \
typenum-1.11.2 \
ucd-trie-0.1.2 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.9 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.0.3 \
unicode-xid-0.1.0 \
unicode-xid-0.2.0 \
url-2.1.0 \
uuid-0.7.4 \
uuid-0.8.1 \
vcpkg-0.2.10 \
vec_map-0.8.1 \
version_check-0.9.2 \
walkdir-2.2.9 \
wasi-0.7.0 \
widestring-0.4.0 \
winapi-0.2.8 \
winapi-0.3.8 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.2 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.6.2 \
ws2_32-sys-0.2.1 \
yaml-rust-0.4.3 \
zip-0.5.6
CARGO_USE_GITHUB= yes
PLIST_FILES= bin/bandwhich \
man/man1/bandwhich.1.gz
.include <bsd.port.options.mk>
.if ${ARCH} == aarch64 || ${ARCH:Marmv*} || ${ARCH:Mpowerpc*}
EXTRA_PATCHES= ${FILESDIR}/extra-patch-cargo-crates_pnet__datalink-0.26.0_src_bpf.rs
.endif
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/bandwhich
${INSTALL_MAN} ${WRKSRC}/docs/bandwhich.1 ${STAGEDIR}${MAN1PREFIX}/man/man1
.include <bsd.port.mk>
Index: head/net-p2p/openethereum/Makefile
===================================================================
--- head/net-p2p/openethereum/Makefile (revision 552220)
+++ head/net-p2p/openethereum/Makefile (revision 552221)
@@ -1,36 +1,36 @@
# $FreeBSD$
PORTNAME= openethereum
DISTVERSIONPREFIX= v
DISTVERSION= 3.0.1
-PORTREVISION= 5
+PORTREVISION= 6
CATEGORIES= net-p2p
MAINTAINER= ale@FreeBSD.org
COMMENT= Fast and feature-rich multi-network Ethereum client
LICENSE= GPLv3
LICENSE_FILE= ${WRKSRC}/LICENSE
BROKEN_i386= fails to build
BROKEN_FreeBSD_11= fails to build
BUILD_DEPENDS= ${LOCALBASE}/bin/llvm-config90:devel/llvm90
USES= cargo
USE_GITHUB= yes
PLIST_FILES= bin/${PORTNAME}
CARGO_FEATURES= final
CARGO_TEST= yes
MAKE_ENV+= LLVM_CONFIG_PATH="${LOCALBASE}/bin/llvm-config90"
.include "${.CURDIR}/Makefile.crates"
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
.include <bsd.port.mk>
Index: head/security/acmed/Makefile
===================================================================
--- head/security/acmed/Makefile (revision 552220)
+++ head/security/acmed/Makefile (revision 552221)
@@ -1,148 +1,149 @@
# $FreeBSD$
PORTNAME= acmed
DISTVERSIONPREFIX= v
DISTVERSION= 0.10.0
+PORTREVISION= 1
CATEGORIES= security
MAINTAINER= greg@unrelenting.technology
COMMENT= ACME (RFC 8555) client daemon written in Rust
LICENSE= APACHE20 MIT
LICENSE_COMB= dual
LICENSE_FILE_APACHE20= ${WRKSRC}/LICENSE-APACHE-2.0.txt
LICENSE_FILE_MIT= ${WRKSRC}/LICENSE-MIT.txt
NOT_FOR_ARCHS= i386
NOT_FOR_ARCHS_REASON= 32-bit time-related error - see https://github.com/breard-r/acmed/issues/37
USES= cargo gmake ssl
USE_GITHUB= yes
GH_ACCOUNT= breard-r
CARGO_INSTALL_PATH= ./acmed ./tacd
CARGO_CRATES= aho-corasick-0.7.13 \
ansi_term-0.11.0 \
arrayvec-0.5.1 \
attohttpc-0.15.0 \
atty-0.2.14 \
autocfg-1.0.1 \
base64-0.12.3 \
bitflags-1.2.1 \
block-buffer-0.7.3 \
block-padding-0.1.5 \
boxfnonce-0.1.1 \
byte-tools-0.3.1 \
byteorder-1.3.4 \
bytes-0.5.6 \
cc-1.0.59 \
cfg-if-0.1.10 \
clap-2.33.3 \
core-foundation-0.7.0 \
core-foundation-sys-0.7.0 \
daemonize-0.4.1 \
digest-0.8.1 \
encoding_rs-0.8.24 \
encoding_rs_io-0.1.7 \
env_logger-0.7.1 \
error-chain-0.12.4 \
fake-simd-0.1.2 \
fnv-1.0.7 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
generic-array-0.12.3 \
getrandom-0.1.14 \
glob-0.3.0 \
handlebars-3.4.0 \
hermit-abi-0.1.15 \
http-0.2.1 \
humantime-1.3.0 \
idna-0.2.0 \
itoa-0.4.6 \
lazy_static-1.4.0 \
lexical-core-0.7.4 \
libc-0.2.76 \
log-0.4.11 \
maplit-1.0.2 \
matches-0.1.8 \
memchr-2.3.3 \
native-tls-0.2.4 \
nix-0.18.0 \
nom-5.1.2 \
opaque-debug-0.2.3 \
openssl-0.10.30 \
openssl-probe-0.1.2 \
openssl-sys-0.9.58 \
percent-encoding-2.1.0 \
pest-2.1.3 \
pest_derive-2.1.0 \
pest_generator-2.1.3 \
pest_meta-2.1.3 \
pkg-config-0.3.18 \
ppv-lite86-0.2.9 \
proc-macro2-1.0.19 \
punycode-0.4.1 \
quick-error-1.2.3 \
quick-error-2.0.0 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
redox_syscall-0.1.57 \
regex-1.3.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.3 \
ryu-1.0.5 \
schannel-0.1.19 \
security-framework-0.4.4 \
security-framework-sys-0.4.3 \
serde-1.0.115 \
serde_derive-1.0.115 \
serde_json-1.0.57 \
sha-1-0.8.2 \
static_assertions-1.1.0 \
strsim-0.8.0 \
syn-1.0.39 \
syslog-5.0.0 \
tempfile-3.1.0 \
termcolor-1.1.0 \
textwrap-0.11.0 \
thread_local-1.0.1 \
time-0.1.44 \
tinyvec-0.3.4 \
toml-0.5.6 \
typenum-1.12.0 \
ucd-trie-0.1.3 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
url-2.1.1 \
vcpkg-0.2.10 \
vec_map-0.8.2 \
version_check-0.9.2 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasi-0.10.0+wasi-snapshot-preview1 \
winapi-0.3.9 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0
post-patch:
${REINPLACE_CMD} -e 's|"/etc/acmed/acmed.toml"|"${PREFIX}/etc/acmed/acmed.toml"|' \
-e 's|"/etc/acmed/accounts"|"${PREFIX}/etc/acmed/accounts"|' \
-e 's|"/etc/acmed/certs"|"${PREFIX}/etc/acmed/certs"|' \
${WRKSRC}/acmed/src/main.rs
post-install:
${INSTALL_MAN} ${WRKSRC}/man/en/acmed.8 ${STAGEDIR}${MAN8PREFIX}/man/man8/
${INSTALL_MAN} ${WRKSRC}/man/en/acmed.toml.5 ${STAGEDIR}${MAN5PREFIX}/man/man5/
${INSTALL_MAN} ${WRKSRC}/man/en/tacd.8 ${STAGEDIR}${MAN8PREFIX}/man/man8/
${MKDIR} ${STAGEDIR}${PREFIX}/etc/acmed
${INSTALL_DATA} ${WRKSRC}/acmed/config/acmed.toml ${STAGEDIR}${PREFIX}/etc/acmed/acmed.toml.sample
${INSTALL_DATA} ${WRKSRC}/acmed/config/default_hooks.toml ${STAGEDIR}${PREFIX}/etc/acmed/default_hooks.toml.sample
${MKDIR} ${STAGEDIR}${PREFIX}/etc/acmed/accounts
${MKDIR} ${STAGEDIR}${PREFIX}/etc/acmed/certs
.include <bsd.port.mk>
Index: head/security/cargo-audit/Makefile
===================================================================
--- head/security/cargo-audit/Makefile (revision 552220)
+++ head/security/cargo-audit/Makefile (revision 552221)
@@ -1,242 +1,243 @@
# $FreeBSD$
PORTNAME= cargo-audit
DISTVERSIONPREFIX= v
DISTVERSION= 0.12.1
+PORTREVISION= 1
CATEGORIES= security
MAINTAINER= ports@FreeBSD.org
COMMENT= Audit Cargo.lock for crates with security vulnerabilities
LICENSE= APACHE20 MIT
LICENSE_COMB= dual
LICENSE_FILE_APACHE20= ${WRKSRC}/LICENSE-APACHE
LICENSE_FILE_MIT= ${WRKSRC}/LICENSE-MIT
LIB_DEPENDS= libgit2.so:devel/libgit2 \
libssh2.so:security/libssh2
USES= cargo ssl
USE_GITHUB= yes
GH_ACCOUNT= RustSec
CARGO_CRATES= abscissa_core-0.5.2 \
abscissa_derive-0.5.0 \
aho-corasick-0.7.10 \
ansi_term-0.11.0 \
arc-swap-0.4.6 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
ascii-0.9.3 \
atty-0.2.14 \
autocfg-1.0.0 \
backtrace-0.3.46 \
backtrace-sys-0.1.37 \
base64-0.11.0 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
bumpalo-3.2.1 \
byteorder-1.3.4 \
bytes-0.5.4 \
canonical-path-2.0.2 \
cargo-edit-0.6.0 \
cargo-lock-4.0.1 \
cargo_metadata-0.9.1 \
cc-1.0.52 \
cfg-if-0.1.10 \
chrono-0.4.11 \
clap-2.33.0 \
color-backtrace-0.3.0 \
combine-3.8.1 \
constant_time_eq-0.1.5 \
core-foundation-0.7.0 \
core-foundation-sys-0.7.0 \
crates-index-0.14.3 \
crossbeam-utils-0.7.2 \
cvss-1.0.0 \
darling-0.10.2 \
darling_core-0.10.2 \
darling_macro-0.10.2 \
dirs-2.0.2 \
dirs-sys-0.3.4 \
dtoa-0.4.5 \
either-1.5.3 \
encoding_rs-0.8.22 \
env_proxy-0.4.0 \
error-chain-0.12.2 \
failure-0.1.8 \
failure_derive-0.1.8 \
fixedbitset-0.2.0 \
fnv-1.0.6 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-channel-0.3.4 \
futures-core-0.3.4 \
futures-io-0.3.4 \
futures-sink-0.3.4 \
futures-task-0.3.4 \
futures-util-0.3.4 \
generational-arena-0.2.7 \
getrandom-0.1.14 \
git2-0.13.5 \
glob-0.3.0 \
gumdrop-0.7.0 \
gumdrop_derive-0.7.0 \
h2-0.2.4 \
heck-0.3.1 \
hermit-abi-0.1.12 \
hex-0.4.2 \
home-0.5.3 \
http-0.2.1 \
http-body-0.3.1 \
httparse-1.3.4 \
hyper-0.13.5 \
hyper-tls-0.4.1 \
ident_case-1.0.1 \
idna-0.2.0 \
indexmap-1.3.2 \
iovec-0.1.4 \
itoa-0.4.5 \
jobserver-0.1.21 \
js-sys-0.3.39 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.69 \
libgit2-sys-0.12.5+1.0.0 \
libssh2-sys-0.2.17 \
libz-sys-1.0.25 \
linked-hash-map-0.5.2 \
log-0.4.8 \
matchers-0.0.1 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
mime-0.3.16 \
mime_guess-2.0.3 \
mio-0.6.22 \
miow-0.2.1 \
native-tls-0.2.4 \
net2-0.2.34 \
num-integer-0.1.42 \
num-traits-0.2.11 \
num_cpus-1.13.0 \
once_cell-1.3.1 \
openssl-0.10.29 \
openssl-probe-0.1.2 \
openssl-sys-0.9.55 \
owning_ref-0.4.1 \
percent-encoding-2.1.0 \
petgraph-0.5.0 \
pin-project-0.4.10 \
pin-project-internal-0.4.10 \
pin-project-lite-0.1.4 \
pin-utils-0.1.0 \
pkg-config-0.3.17 \
platforms-0.2.1 \
ppv-lite86-0.2.6 \
proc-macro-error-1.0.2 \
proc-macro-error-attr-1.0.2 \
proc-macro2-1.0.12 \
quote-1.0.4 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
redox_syscall-0.1.56 \
redox_users-0.3.4 \
regex-1.3.7 \
regex-automata-0.1.9 \
regex-syntax-0.6.17 \
remove_dir_all-0.5.2 \
reqwest-0.10.4 \
rust-argon2-0.7.0 \
rustc-demangle-0.1.16 \
rustsec-0.20.0 \
ryu-1.0.4 \
schannel-0.1.18 \
secrecy-0.6.0 \
security-framework-0.4.3 \
security-framework-sys-0.4.3 \
semver-0.9.0 \
semver-parser-0.7.0 \
semver-parser-0.9.0 \
serde-1.0.106 \
serde_derive-1.0.106 \
serde_json-1.0.52 \
serde_urlencoded-0.6.1 \
signal-hook-0.1.14 \
signal-hook-registry-1.2.0 \
slab-0.4.2 \
smallvec-0.6.13 \
smallvec-1.4.0 \
smol_str-0.1.16 \
stable_deref_trait-1.1.1 \
strsim-0.8.0 \
strsim-0.9.3 \
structopt-0.3.14 \
structopt-derive-0.4.7 \
subprocess-0.2.4 \
syn-1.0.18 \
syn-mid-0.5.0 \
synstructure-0.12.3 \
tempfile-3.1.0 \
termcolor-1.1.0 \
textwrap-0.11.0 \
thiserror-1.0.16 \
thiserror-impl-1.0.16 \
thread_local-1.0.1 \
time-0.1.43 \
tokio-0.2.20 \
tokio-tls-0.3.0 \
tokio-util-0.3.1 \
toml-0.5.6 \
toml_edit-0.1.5 \
tower-service-0.3.0 \
tracing-0.1.13 \
tracing-attributes-0.1.7 \
tracing-core-0.1.10 \
tracing-log-0.1.1 \
tracing-subscriber-0.1.6 \
try-lock-0.2.2 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.12 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
unreachable-1.0.0 \
url-2.1.1 \
vcpkg-0.2.8 \
vec_map-0.8.2 \
version_check-0.9.1 \
void-1.0.2 \
wait-timeout-0.2.0 \
want-0.3.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.62 \
wasm-bindgen-backend-0.2.62 \
wasm-bindgen-futures-0.4.12 \
wasm-bindgen-macro-0.2.62 \
wasm-bindgen-macro-support-0.2.62 \
wasm-bindgen-shared-0.2.62 \
web-sys-0.3.39 \
winapi-0.2.8 \
winapi-0.3.8 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.6.2 \
ws2_32-sys-0.2.1 \
zeroize-1.1.0
NO_TEST= yes
PLIST_FILES= bin/cargo-audit
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/cargo-audit
.include <bsd.port.mk>
Index: head/security/cloak/Makefile
===================================================================
--- head/security/cloak/Makefile (revision 552220)
+++ head/security/cloak/Makefile (revision 552221)
@@ -1,77 +1,77 @@
# $FreeBSD$
PORTNAME= cloak
DISTVERSIONPREFIX= v
DISTVERSION= 0.2.0
-PORTREVISION= 13
+PORTREVISION= 14
CATEGORIES= security
MAINTAINER= ports@FreeBSD.org
COMMENT= Command line OTP Authenticator application
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= evansmurithi
CARGO_CRATES= ansi_term-0.11.0 \
argon2rs-0.2.5 \
arrayvec-0.4.10 \
atty-0.2.11 \
autocfg-0.1.2 \
backtrace-0.3.15 \
backtrace-sys-0.1.28 \
bitflags-1.0.4 \
blake2-rfc-0.2.18 \
cc-1.0.35 \
cfg-if-0.1.7 \
clap-2.33.0 \
cloudabi-0.0.3 \
constant_time_eq-0.1.3 \
data-encoding-2.1.2 \
dirs-1.0.5 \
failure-0.1.5 \
failure_derive-0.1.5 \
fuchsia-cprng-0.1.1 \
lazy_static-1.3.0 \
libc-0.2.51 \
nodrop-0.1.13 \
numtoa-0.1.0 \
open-1.2.2 \
proc-macro2-0.4.28 \
quote-0.6.12 \
rand_core-0.3.1 \
rand_core-0.4.0 \
rand_os-0.1.3 \
rdrand-0.4.0 \
redox_syscall-0.1.54 \
redox_termios-0.1.1 \
redox_users-0.3.0 \
ring-0.14.6 \
rustc-demangle-0.1.14 \
scoped_threadpool-0.1.9 \
serde-1.0.90 \
serde_derive-1.0.90 \
spin-0.5.0 \
strsim-0.8.0 \
syn-0.15.32 \
synstructure-0.10.1 \
termion-1.5.2 \
textwrap-0.11.0 \
toml-0.5.0 \
unicode-width-0.1.5 \
unicode-xid-0.1.0 \
untrusted-0.6.2 \
vec_map-0.8.1 \
winapi-0.3.7 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0
PLIST_FILES= bin/cloak
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/cloak
.include <bsd.port.mk>
Index: head/security/sequoia/Makefile
===================================================================
--- head/security/sequoia/Makefile (revision 552220)
+++ head/security/sequoia/Makefile (revision 552221)
@@ -1,338 +1,339 @@
# $FreeBSD$
PORTNAME= sequoia
PORTVERSION= 0.19.0
+PORTREVISION= 1
DISTVERSIONPREFIX= v
CATEGORIES= security
MAINTAINER= phryk-ports@wzff.de
COMMENT= Modern PGP implementation, written in rust
LICENSE= GPLv2+
LICENSE_FILE= ${WRKSRC}/LICENSE.txt
BUILD_DEPENDS+= llvm${LLVM_DEFAULT}>0:devel/llvm${LLVM_DEFAULT}
LIB_DEPENDS+= libcapnp.so:devel/capnproto
LIB_DEPENDS+= libgmp.so:math/gmp
LIB_DEPENDS+= libnettle.so:security/nettle
USES= cargo ssl
USE_LDCONFIG= yes
USE_GITLAB= yes
GL_ACCOUNT= sequoia-pgp
GL_PROJECT= sequoia
GL_COMMIT= 383133f6be990237044900a4df676488bf8dd71e
CARGO_BUILD_ARGS+= --all
SOVERS= ${PORTVERSION}
SUB_LIST= VERSION=${SOVERS}
SUB_FILES= sequoia-openpgp.pc
SUB_FILES+= sequoia.pc
# autogenerated by make cargo-crates
CARGO_CRATES= adler32-1.2.0 \
aho-corasick-0.7.13 \
ansi_term-0.11.0 \
anyhow-1.0.32 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
ascii-canvas-2.0.0 \
assert_cli-0.6.3 \
atty-0.2.14 \
autocfg-0.1.7 \
autocfg-1.0.1 \
backtrace-0.3.46 \
backtrace-sys-0.1.37 \
base64-0.11.0 \
base64-0.12.3 \
bindgen-0.51.1 \
bit-set-0.5.2 \
bit-vec-0.6.2 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
block-buffer-0.7.3 \
block-buffer-0.9.0 \
block-padding-0.1.5 \
bstr-0.2.13 \
byte-tools-0.3.1 \
byteorder-1.3.4 \
bytes-0.4.12 \
bzip2-0.3.3 \
bzip2-sys-0.1.9+1.0.8 \
capnp-0.10.3 \
capnp-futures-0.10.1 \
capnp-rpc-0.10.0 \
capnpc-0.10.2 \
cc-1.0.59 \
cexpr-0.3.6 \
cfg-if-0.1.10 \
chrono-0.4.15 \
clang-sys-0.28.1 \
clap-2.33.3 \
cloudabi-0.0.3 \
colored-1.9.1 \
constant_time_eq-0.1.5 \
core-foundation-0.7.0 \
core-foundation-sys-0.7.0 \
cpuid-bool-0.1.2 \
crc32fast-1.2.0 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-queue-0.2.3 \
crossbeam-utils-0.7.2 \
crossterm-0.13.3 \
crossterm_winapi-0.4.0 \
csv-1.1.3 \
csv-core-0.1.10 \
ctor-0.1.15 \
curve25519-dalek-3.0.0 \
diff-0.1.12 \
difference-2.0.0 \
digest-0.8.1 \
digest-0.9.0 \
dirs-1.0.5 \
dirs-2.0.2 \
dirs-sys-0.3.5 \
doc-comment-0.3.3 \
docopt-1.1.0 \
dyn-clone-1.0.1 \
ed25519-1.0.1 \
ed25519-dalek-1.0.0 \
either-1.6.0 \
ena-0.13.1 \
encode_unicode-0.3.6 \
environment-0.1.1 \
failure-0.1.8 \
failure_derive-0.1.8 \
fake-simd-0.1.2 \
fallible-iterator-0.2.0 \
fallible-streaming-iterator-0.1.9 \
filetime-0.2.12 \
fixedbitset-0.1.9 \
flate2-1.0.14 \
fnv-1.0.7 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fs2-0.4.3 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.1.29 \
futures-cpupool-0.1.8 \
generic-array-0.12.3 \
generic-array-0.14.4 \
getrandom-0.1.14 \
glob-0.3.0 \
h2-0.1.26 \
hashbrown-0.8.2 \
heck-0.3.1 \
hermit-abi-0.1.15 \
http-0.1.21 \
http-body-0.1.0 \
httparse-1.3.4 \
hyper-0.12.35 \
hyper-tls-0.3.2 \
idna-0.2.0 \
indexmap-1.5.1 \
iovec-0.1.4 \
itertools-0.8.2 \
itoa-0.4.6 \
kernel32-sys-0.2.2 \
lalrpop-0.17.2 \
lalrpop-util-0.17.2 \
lazy_static-1.4.0 \
libc-0.2.76 \
libloading-0.5.2 \
libm-0.2.1 \
libsqlite3-sys-0.15.0 \
linked-hash-map-0.5.3 \
lock_api-0.3.4 \
log-0.4.11 \
lru-cache-0.1.2 \
mach_o_sys-0.1.1 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memoffset-0.5.5 \
memsec-0.5.7 \
miniz_oxide-0.3.7 \
mio-0.6.22 \
mio-named-pipes-0.1.7 \
mio-uds-0.6.8 \
miow-0.2.1 \
miow-0.3.5 \
native-tls-0.2.4 \
net2-0.2.34 \
nettle-7.0.0 \
nettle-src-3.5.1-2 \
nettle-sys-2.0.4 \
new_debug_unreachable-1.0.4 \
nom-4.2.3 \
num-bigint-dig-0.6.0 \
num-integer-0.1.43 \
num-iter-0.1.41 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
opaque-debug-0.2.3 \
opaque-debug-0.3.0 \
openssl-0.10.30 \
openssl-probe-0.1.2 \
openssl-sys-0.9.58 \
ordermap-0.3.5 \
parity-tokio-ipc-0.4.0 \
parking_lot-0.9.0 \
parking_lot_core-0.6.2 \
peeking_take_while-0.1.2 \
percent-encoding-2.1.0 \
petgraph-0.4.13 \
phf_generator-0.7.24 \
phf_shared-0.7.24 \
pkg-config-0.3.18 \
ppv-lite86-0.2.9 \
precomputed-hash-0.1.1 \
prettytable-rs-0.8.0 \
proc-macro-error-1.0.4 \
proc-macro-error-attr-1.0.4 \
proc-macro2-1.0.19 \
quickcheck-0.9.2 \
quote-1.0.7 \
rand-0.6.5 \
rand-0.7.3 \
rand_chacha-0.1.1 \
rand_chacha-0.2.2 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rdrand-0.4.0 \
redox_syscall-0.1.57 \
redox_users-0.3.4 \
regex-1.3.9 \
regex-automata-0.1.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.3 \
rpassword-4.0.3 \
rusqlite-0.19.0 \
rust-argon2-0.7.0 \
rustc-demangle-0.1.16 \
rustc-hash-1.1.0 \
rustc_version-0.2.3 \
ryu-1.0.5 \
schannel-0.1.19 \
scoped-tls-0.1.2 \
scopeguard-1.1.0 \
security-framework-0.4.4 \
security-framework-sys-0.4.3 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.115 \
serde_derive-1.0.115 \
serde_json-1.0.57 \
sha2-0.8.2 \
sha2-0.9.1 \
shlex-0.1.1 \
signature-1.2.2 \
siphasher-0.2.3 \
slab-0.4.2 \
smallvec-0.6.13 \
smallvec-1.4.2 \
socket2-0.3.11 \
spin-0.5.2 \
string-0.2.1 \
string_cache-0.7.5 \
string_cache_codegen-0.4.4 \
string_cache_shared-0.3.0 \
strsim-0.8.0 \
strsim-0.9.3 \
structopt-0.3.16 \
structopt-derive-0.4.9 \
subtle-2.2.3 \
syn-1.0.38 \
synstructure-0.12.4 \
tempfile-3.1.0 \
term-0.5.2 \
textwrap-0.11.0 \
thiserror-1.0.20 \
thiserror-impl-1.0.20 \
thread_local-1.0.1 \
time-0.1.43 \
tokio-0.1.22 \
tokio-buf-0.1.1 \
tokio-codec-0.1.2 \
tokio-core-0.1.17 \
tokio-current-thread-0.1.7 \
tokio-executor-0.1.10 \
tokio-fs-0.1.7 \
tokio-io-0.1.13 \
tokio-named-pipes-0.1.0 \
tokio-reactor-0.1.12 \
tokio-sync-0.1.8 \
tokio-tcp-0.1.4 \
tokio-threadpool-0.1.18 \
tokio-timer-0.2.13 \
tokio-udp-0.1.6 \
tokio-uds-0.2.7 \
try-lock-0.2.3 \
typenum-1.12.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.9 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.1.0 \
unicode-xid-0.2.1 \
url-2.1.1 \
vcpkg-0.2.10 \
vec_map-0.8.2 \
version_check-0.1.5 \
version_check-0.9.2 \
want-0.2.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
win-crypto-ng-0.2.0 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
ws2_32-sys-0.2.1 \
zbase32-0.1.2 \
zeroize-1.1.0 \
zeroize_derive-1.0.0
do-install:
${MKDIR} ${STAGEDIR}${PREFIX}/lib/sequoia \
${STAGEDIR}${PREFIX}/etc/bash_completion.d \
${STAGEDIR}${PREFIX}/share/fish/completions \
${STAGEDIR}${PREFIX}/share/zsh/site-functions
.for f in ffi ffi_macros openpgp_ffi
${STRIP_CMD} ${CARGO_TARGET_DIR}/release/libsequoia_${f}.so
${INSTALL_LIB} ${CARGO_TARGET_DIR}/release/libsequoia_${f}.so ${STAGEDIR}${PREFIX}/lib/libsequoia_${f}.so.${SOVERS}
${LN} -sf libsequoia_${f}.so.${SOVERS} ${STAGEDIR}${PREFIX}/lib/libsequoia_${f}.so
.endfor
.for f in sq sqop sqv
${STRIP_CMD} ${CARGO_TARGET_DIR}/release/${f}
${INSTALL_PROGRAM} ${CARGO_TARGET_DIR}/release/${f} ${STAGEDIR}${PREFIX}/bin
.endfor
${STRIP_CMD} ${CARGO_TARGET_DIR}/release/sequoia-public-key-store
${INSTALL_PROGRAM} ${CARGO_TARGET_DIR}/release/sequoia-public-key-store ${STAGEDIR}${PREFIX}/lib/sequoia/sequoia-public-key-store
(cd ${WRKSRC}/ffi/include && ${COPYTREE_SHARE} . ${STAGEDIR}${PREFIX}/include)
(cd ${WRKSRC}/openpgp-ffi/include/sequoia && ${COPYTREE_SHARE} . ${STAGEDIR}${PREFIX}/include/sequoia)
${INSTALL_DATA} ${CARGO_TARGET_DIR}/sq.bash ${STAGEDIR}${PREFIX}/etc/bash_completion.d/sq
${INSTALL_DATA} ${CARGO_TARGET_DIR}/sqv.bash ${STAGEDIR}${PREFIX}/etc/bash_completion.d/sqv
${INSTALL_DATA} ${CARGO_TARGET_DIR}/sq.fish ${STAGEDIR}${PREFIX}/share/fish/completions
${INSTALL_DATA} ${CARGO_TARGET_DIR}/sqv.fish ${STAGEDIR}${PREFIX}/share/fish/completions
${INSTALL_DATA} ${CARGO_TARGET_DIR}/_sq ${STAGEDIR}${PREFIX}/share/zsh/site-functions
${INSTALL_DATA} ${CARGO_TARGET_DIR}/_sqv ${STAGEDIR}${PREFIX}/share/zsh/site-functions
${INSTALL_DATA} ${WRKDIR}/sequoia.pc ${STAGEDIR}${PREFIX}/libdata/pkgconfig/
${INSTALL_DATA} ${WRKDIR}/sequoia-openpgp.pc ${STAGEDIR}${PREFIX}/libdata/pkgconfig/
.include <bsd.port.mk>
Index: head/security/suricata/Makefile
===================================================================
--- head/security/suricata/Makefile (revision 552220)
+++ head/security/suricata/Makefile (revision 552221)
@@ -1,155 +1,155 @@
# Created by: Patrick Tracanelli <eksffa@freebsdbrasil.com.br>
# $FreeBSD$
PORTNAME= suricata
DISTVERSION= 5.0.2
-PORTREVISION= 8
+PORTREVISION= 9
CATEGORIES= security
MASTER_SITES= https://www.openinfosecfoundation.org/download/
MAINTAINER= franco@opnsense.org
COMMENT= High Performance Network IDS, IPS and Security Monitoring engine
LICENSE= GPLv2
LICENSE_FILE= ${WRKSRC}/LICENSE
BUILD_DEPENDS= rustc:lang/${RUST_DEFAULT}
LIB_DEPENDS= libpcre.so:devel/pcre \
libnet.so:net/libnet \
liblz4.so:archivers/liblz4 \
libyaml.so:textproc/libyaml
USES= autoreconf cpe gmake iconv:translit libtool pathfix pkgconfig
CONFLICTS_INSTALL= libhtp suricata5
USE_LDCONFIG= yes
USE_RC_SUBR= ${PORTNAME}
PLIST_SUB= PORTVERSION=${DISTVERSION:C/-/_/g}
GNU_CONFIGURE= yes
CPE_VENDOR= openinfosecfoundation
INSTALL_TARGET= install-strip
TEST_TARGET= check
OPTIONS_DEFINE= GEOIP IPFW JSON NETMAP NSS PORTS_PCAP PRELUDE \
PYTHON REDIS TESTS
OPTIONS_DEFINE_amd64= HYPERSCAN
OPTIONS_DEFAULT= IPFW JSON NETMAP PYTHON
OPTIONS_SUB= yes
OPTIONS_RADIO= SCRIPTS
OPTIONS_RADIO_SCRIPTS= LUA LUAJIT
GEOIP_DESC= GeoIP support
HYPERSCAN_DESC= Hyperscan support
IPFW_DESC= IPFW and IP Divert support for inline IDP
JSON_DESC= JSON output support
LUAJIT_DESC= LuaJIT scripting support
LUA_DESC= LUA scripting support
NETMAP_DESC= Netmap support for inline IDP
NSS_DESC= File checksums and SSL/TLS fingerprinting
PORTS_PCAP_DESC= Use libpcap from ports
PRELUDE_DESC= Prelude support for NIDS alerts
PYTHON_DESC= Python-based update and control utilities
REDIS_DESC= Redis output support
SCRIPTS_DESC= Scripting
TESTS_DESC= Unit tests in suricata binary
GEOIP_LIB_DEPENDS= libmaxminddb.so:net/libmaxminddb
GEOIP_CONFIGURE_ON= --enable-geoip
HYPERSCAN_LIB_DEPENDS= libhs.so:devel/hyperscan
HYPERSCAN_CONFIGURE_ON= --with-libhs-includes=${LOCALBASE}/include \
--with-libhs-libraries=${LOCALBASE}/lib
IPFW_CONFIGURE_ON= --enable-ipfw
JSON_LIB_DEPENDS= libjansson.so:devel/jansson
JSON_CONFIGURE_ON= --with-libjansson-includes=${LOCALBASE}/include \
--with-libjansson-libraries=${LOCALBASE}/lib
LUA_USES= lua:51
LUA_CONFIGURE_ON= --enable-lua \
--with-liblua-includes=${LUA_INCDIR} \
--with-liblua-libraries=${LUA_LIBDIR}
LUAJIT_LIB_DEPENDS= libluajit-5.1.so:lang/luajit
LUAJIT_CONFIGURE_ON= --enable-luajit
NSS_LIB_DEPENDS= libnss3.so:security/nss \
libnspr4.so:devel/nspr
NSS_CONFIGURE_OFF= --disable-nss --disable-nspr
NSS_CONFIGURE_ON= --with-libnss-includes=${LOCALBASE}/include/nss/nss \
--with-libnss-libraries=${LOCALBASE}/lib \
--with-libnspr-libraries=${LOCALBASE}/lib \
--with-libnspr-includes=${LOCALBASE}/include/nspr
NETMAP_CONFIGURE_ENABLE= netmap
PORTS_PCAP_LIB_DEPENDS= libpcap.so.1:net/libpcap
PORTS_PCAP_CONFIGURE_ON= --with-libpcap-includes=${LOCALBASE}/include \
--with-libpcap-libraries=${LOCALBASE}/lib
PORTS_PCAP_CONFIGURE_OFF= --with-libpcap-includes=/usr/include \
--with-libpcap-libraries=/usr/lib
PRELUDE_LIB_DEPENDS= libprelude.so:security/libprelude \
libgnutls.so:security/gnutls \
libgcrypt.so:security/libgcrypt \
libgpg-error.so:security/libgpg-error \
libltdl.so:devel/libltdl
PRELUDE_CONFIGURE_ENABLE= prelude
PRELUDE_CONFIGURE_ON= --with-libprelude-prefix=${LOCALBASE}
PYTHON_USES= python
PYTHON_USE= PYTHON=py3kplist
PYTHON_CONFIGURE_ENABLE= python
PYTHON_RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}yaml>0:devel/py-yaml@${PY_FLAVOR}
PYTHON_BUILD_DEPENDS= ${PYTHON_RUN_DEPENDS}
REDIS_LIB_DEPENDS= libhiredis.so:databases/hiredis
REDIS_CONFIGURE_ON= --enable-hiredis \
--with-libhiredis-includes=${LOCALBASE}/include \
--with-libhiredis-libraries=${LOCALBASE}/lib
TESTS_CONFIGURE_ENABLE= unittests
SUB_FILES= pkg-message
CONFIGURE_ARGS+=--enable-gccprotect \
--enable-bundled-htp \
--with-libpcre-includes=${LOCALBASE}/include \
--with-libpcre-libraries=${LOCALBASE}/lib \
--with-libyaml-includes=${LOCALBASE}/include \
--with-libyaml-libraries=${LOCALBASE}/lib \
--with-libnet-includes=${LOCALBASE}/include \
--with-libnet-libraries=${LOCALBASE}/lib \
--with-libhtp-includes=${LOCALBASE}/include/ \
--with-libhtp-libraries=${LOCALBASE}/lib \
--disable-gccmarch-native
pre-patch:
@${CP} ${FILESDIR}/ax_check_compile_flag.m4 ${WRKSRC}/m4
post-patch-PYTHON-on:
@${REINPLACE_CMD} -e "/AC_PATH_PROGS.*HAVE_PYTHON/ s/python[^,]*,/${PYTHON_VERSION},/g" \
${WRKSRC}/configure.ac
post-install:
@${MKDIR} ${STAGEDIR}${ETCDIR} ${STAGEDIR}/var/log/suricata
.for f in classification.config reference.config
@${MV} ${STAGEDIR}${DATADIR}/${f} ${STAGEDIR}${ETCDIR}/${f}.sample
.endfor
.for f in suricata.yaml threshold.config
${INSTALL_DATA} ${WRKSRC}/${f} ${STAGEDIR}${ETCDIR}/${f}.sample
.endfor
post-install-PYTHON-on:
(cd ${STAGEDIR}${PREFIX} \
&& ${PYTHON_CMD} ${PYTHON_LIBDIR}/compileall.py \
-d ${PYTHONPREFIX_SITELIBDIR} -f ${PYTHONPREFIX_SITELIBDIR:S;${PREFIX}/;;})
.include <bsd.port.mk>
Index: head/shells/ion/Makefile
===================================================================
--- head/shells/ion/Makefile (revision 552220)
+++ head/shells/ion/Makefile (revision 552221)
@@ -1,281 +1,281 @@
# $FreeBSD$
PORTNAME= ion
DISTVERSION= 1.0.5-1355
-PORTREVISION= 14
+PORTREVISION= 15
CATEGORIES= shells
PKGNAMESUFFIX= -shell
MAINTAINER= ports@FreeBSD.org
COMMENT= Modern system shell written in Rust
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
USE_GITHUB= nodefault
GH_TUPLE= alkis:decimal:972c8547a0a76c7ad9a314e28a335aa57d46a543:decimal
USE_GITLAB= yes
GL_SITE= https://gitlab.redox-os.org
GL_ACCOUNT= redox-os
GL_COMMIT= a8872014dbce730ccd00aaa722397dc394a52bf4
GL_TUPLE= redox-os:calc:afba9c5d184ddb9db5e4e71dc357da0499e212cf:calculate \
redox-os:liner:3f3d1d4e6058067e9d03a9b1510d8f2edac073c7:liner \
redox-os:small:022635fcb0b0b631b3a06c79c45fa8ebaf4f64f5:small \
redox-os:termion:c27678efc2ed14576361c7ce6d806a6bb576f1a9:termion
CARGO_CRATES= adler32-1.0.3 \
aho-corasick-0.7.4 \
andrew-0.2.1 \
android_glue-0.2.3 \
ansi_term-0.11.0 \
approx-0.3.2 \
arrayvec-0.4.11 \
atty-0.2.13 \
auto_enums-0.5.9 \
auto_enums_core-0.5.9 \
auto_enums_derive-0.5.9 \
autocfg-0.1.5 \
backtrace-0.3.33 \
backtrace-sys-0.1.31 \
bitflags-1.1.0 \
block-0.1.6 \
bstr-0.2.4 \
bytecount-0.1.7 \
bytecount-0.3.2 \
byteorder-1.3.2 \
c2-chacha-0.2.2 \
cast-0.2.2 \
cc-1.0.37 \
cfg-if-0.1.9 \
cgl-0.2.3 \
clap-2.33.0 \
cloudabi-0.0.3 \
cocoa-0.18.4 \
color_quant-1.0.1 \
core-foundation-0.6.4 \
core-foundation-sys-0.6.2 \
core-graphics-0.17.3 \
criterion-0.2.11 \
criterion-plot-0.3.1 \
crossbeam-deque-0.6.3 \
crossbeam-epoch-0.7.1 \
crossbeam-queue-0.1.2 \
crossbeam-utils-0.6.5 \
csv-1.1.1 \
csv-core-0.1.6 \
darling-0.9.0 \
darling_core-0.9.0 \
darling_macro-0.9.0 \
deflate-0.7.20 \
derivative-1.0.2 \
derive_utils-0.7.2 \
dlib-0.4.1 \
downcast-rs-1.0.4 \
draw_state-0.8.0 \
either-1.5.2 \
err-derive-0.1.5 \
errno-dragonfly-0.1.1 \
failure-0.1.5 \
failure_derive-0.1.5 \
fnv-1.0.6 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fuchsia-cprng-0.1.1 \
gcc-0.3.55 \
getrandom-0.1.6 \
gfx-0.17.1 \
gfx_core-0.8.3 \
gfx_device_gl-0.15.5 \
gfx_gl-0.5.0 \
gif-0.10.2 \
gl-0.11.0 \
gl_generator-0.10.0 \
gl_generator-0.11.0 \
gl_generator-0.9.0 \
gleam-0.6.18 \
glob-0.3.0 \
glutin-0.21.0 \
glutin_egl_sys-0.1.3 \
glutin_emscripten_sys-0.1.0 \
glutin_gles2_sys-0.1.3 \
glutin_glx_sys-0.1.5 \
glutin_wgl_sys-0.1.3 \
hashbrown-0.5.0 \
heck-0.3.1 \
ident_case-1.0.1 \
image-0.21.2 \
inflate-0.4.5 \
interpolation-0.2.0 \
itertools-0.8.0 \
itoa-0.4.4 \
jpeg-decoder-0.1.15 \
khronos_api-2.2.0 \
khronos_api-3.1.0 \
lazy_static-1.3.0 \
lexical-2.2.1 \
lexical-core-0.4.2 \
libc-0.2.60 \
libloading-0.5.2 \
line_drawing-0.7.0 \
lock_api-0.1.5 \
log-0.4.7 \
lzw-0.10.0 \
malloc_buf-0.0.6 \
memchr-2.2.1 \
memmap-0.7.0 \
memoffset-0.2.1 \
nix-0.14.1 \
nodrop-0.1.13 \
num-0.2.0 \
num-bigint-0.2.2 \
num-complex-0.2.3 \
num-derive-0.2.5 \
num-integer-0.1.41 \
num-iter-0.1.39 \
num-rational-0.2.2 \
num-traits-0.2.8 \
num_cpus-1.10.1 \
numtoa-0.1.0 \
objc-0.2.6 \
object-pool-0.3.1 \
ordered-float-1.0.2 \
osmesa-sys-0.1.2 \
owning_ref-0.4.0 \
parking_lot-0.7.1 \
parking_lot_core-0.4.0 \
percent-encoding-1.0.1 \
permutate-0.3.2 \
piston-0.49.0 \
piston-ai_behavior-0.31.0 \
piston-float-1.0.0 \
piston-gfx_texture-0.36.0 \
piston-graphics_api_version-0.2.0 \
piston-shaders_graphics2d-0.3.1 \
piston-texture-0.6.0 \
piston-viewport-1.0.0 \
piston2d-gfx_graphics-0.61.0 \
piston2d-graphics-0.32.0 \
piston2d-sprite-0.55.0 \
piston_window-0.100.0 \
pistoncore-event_loop-0.49.0 \
pistoncore-glutin_window-0.63.0 \
pistoncore-input-0.28.0 \
pistoncore-window-0.44.0 \
pkg-config-0.3.14 \
png-0.14.1 \
ppv-lite86-0.2.5 \
proc-macro2-0.4.30 \
quote-0.6.13 \
rand-0.4.6 \
rand-0.6.5 \
rand-0.7.0 \
rand_chacha-0.1.1 \
rand_chacha-0.2.0 \
rand_core-0.3.1 \
rand_core-0.4.0 \
rand_core-0.5.0 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rand_xoshiro-0.1.0 \
rayon-1.1.0 \
rayon-core-1.5.0 \
rdrand-0.4.0 \
read_color-1.0.0 \
redox_syscall-0.1.56 \
redox_termios-0.1.1 \
regex-1.2.0 \
regex-automata-0.1.8 \
regex-syntax-0.6.10 \
rustc-demangle-0.1.15 \
rustc_version-0.2.3 \
rusttype-0.7.7 \
ryu-0.2.8 \
ryu-1.0.0 \
same-file-1.0.5 \
scoped_threadpool-0.1.9 \
scopeguard-0.3.3 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.97 \
serde_derive-1.0.97 \
serde_json-1.0.40 \
serial_test-0.2.0 \
serial_test_derive-0.2.0 \
shader_version-0.6.0 \
shared_library-0.1.9 \
smallvec-0.6.10 \
smithay-client-toolkit-0.4.6 \
spin-0.5.0 \
stable_deref_trait-1.1.1 \
stackvector-1.0.6 \
static_assertions-0.2.5 \
stb_truetype-0.2.6 \
strsim-0.7.0 \
strsim-0.8.0 \
structopt-0.2.18 \
structopt-derive-0.2.18 \
syn-0.15.40 \
synstructure-0.10.2 \
textwrap-0.11.0 \
thread_local-0.3.6 \
tiff-0.2.2 \
tinytemplate-1.0.2 \
ucd-util-0.1.5 \
unicode-segmentation-1.3.0 \
unicode-width-0.1.5 \
unicode-xid-0.1.0 \
unreachable-1.0.0 \
users-0.9.1 \
utf8-ranges-1.0.3 \
uuid-0.6.5 \
vec_map-0.8.1 \
vecmath-1.0.0 \
version_check-0.9.1 \
void-1.0.2 \
walkdir-2.2.9 \
wayland-client-0.21.13 \
wayland-commons-0.21.13 \
wayland-protocols-0.21.13 \
wayland-scanner-0.21.13 \
wayland-sys-0.21.13 \
winapi-0.3.7 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.2 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winit-0.19.1 \
x11-dl-2.18.3 \
xdg-2.2.0 \
xml-rs-0.7.0 \
xml-rs-0.8.0
CARGO_USE_GITHUB= yes
CARGO_USE_GITLAB= yes
CONFLICTS_INSTALL= ion
OPTIONS_DEFINE= DOCS EXAMPLES
# liner-0.4.4 does not build with Rust 1.40.0. Use the newer copy
# that is also already available.
post-patch:
@${REINPLACE_CMD} 's,^liner = .*,liner = \{ path = "${WRKSRC_liner}" \},' \
${WRKSRC_calculate}/Cargo.toml
post-configure:
${ECHO_CMD} ${GL_COMMIT} > ${WRKSRC}/git_revision.txt
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/ion
post-install-DOCS-on:
@cd ${WRKSRC}/manual/src && ${COPYTREE_SHARE} . ${STAGEDIR}${DOCSDIR}
post-install-EXAMPLES-on:
@cd ${WRKSRC}/tests && ${COPYTREE_SHARE} . ${STAGEDIR}${EXAMPLESDIR}
.include <bsd.port.mk>
Index: head/shells/starship/Makefile
===================================================================
--- head/shells/starship/Makefile (revision 552220)
+++ head/shells/starship/Makefile (revision 552221)
@@ -1,189 +1,189 @@
# $FreeBSD$
PORTNAME= starship
DISTVERSION= 0.44.0
-PORTREVISION= 2
+PORTREVISION= 3
CATEGORIES= shells
MASTER_SITES= CRATESIO
DISTFILES= ${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= yuri@FreeBSD.org
COMMENT= Cross-shell prompt for astronauts
LICENSE= ISCL
LICENSE_FILE= ${WRKSRC}/LICENSE
LIB_DEPENDS= libgit2.so:devel/libgit2 \
libssh2.so:security/libssh2
USES= cargo ssl
CARGO_CRATES= aho-corasick-0.7.13 \
ansi_term-0.11.0 \
ansi_term-0.12.1 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
attohttpc-0.15.0 \
atty-0.2.14 \
autocfg-1.0.0 \
base64-0.11.0 \
battery-0.7.5 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
block-buffer-0.7.3 \
block-padding-0.1.5 \
byte-tools-0.3.1 \
byte-unit-4.0.8 \
byteorder-1.3.4 \
bytes-0.5.5 \
cc-1.0.57 \
cfg-if-0.1.10 \
chrono-0.4.13 \
clap-2.33.1 \
constant_time_eq-0.1.5 \
core-foundation-0.6.4 \
core-foundation-0.7.0 \
core-foundation-sys-0.6.2 \
core-foundation-sys-0.7.0 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-queue-0.2.3 \
crossbeam-utils-0.7.2 \
digest-0.8.1 \
dirs-next-1.0.1 \
dirs-sys-next-0.1.0 \
doc-comment-0.3.3 \
dtoa-0.4.6 \
either-1.5.3 \
env_logger-0.7.1 \
fake-simd-0.1.2 \
fnv-1.0.7 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
generator-0.6.21 \
generic-array-0.12.3 \
gethostname-0.2.1 \
getrandom-0.1.14 \
git2-0.13.6 \
hermit-abi-0.1.15 \
http-0.2.1 \
humantime-1.3.0 \
idna-0.2.0 \
indexmap-1.4.0 \
itoa-0.4.6 \
jobserver-0.1.21 \
lazy_static-1.4.0 \
lazycell-1.2.1 \
lexical-core-0.7.4 \
libc-0.2.71 \
libgit2-sys-0.12.7+1.0.0 \
libz-sys-1.0.25 \
linked-hash-map-0.5.3 \
log-0.4.8 \
loom-0.3.4 \
mach-0.2.3 \
maplit-1.0.2 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memoffset-0.5.5 \
native-tls-0.2.4 \
nix-0.15.0 \
nom-5.1.2 \
ntapi-0.3.4 \
num-integer-0.1.43 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
once_cell-1.4.0 \
opaque-debug-0.2.3 \
open-1.4.0 \
openssl-0.10.30 \
openssl-probe-0.1.2 \
openssl-src-111.10.0+1.1.1g \
openssl-sys-0.9.58 \
os_info-2.0.6 \
path-slash-0.1.3 \
percent-encoding-2.1.0 \
pest-2.1.3 \
pest_derive-2.1.0 \
pest_generator-2.1.3 \
pest_meta-2.1.3 \
pkg-config-0.3.17 \
ppv-lite86-0.2.8 \
pretty_env_logger-0.4.0 \
proc-macro2-1.0.18 \
quick-error-1.2.3 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rayon-1.3.1 \
rayon-core-1.7.1 \
redox_syscall-0.1.56 \
redox_users-0.3.4 \
regex-1.3.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.3 \
rust-argon2-0.7.0 \
rustc_version-0.2.3 \
ryu-1.0.5 \
schannel-0.1.19 \
scoped-tls-0.1.2 \
scopeguard-1.1.0 \
security-framework-0.4.4 \
security-framework-sys-0.4.3 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.114 \
serde_derive-1.0.114 \
serde_json-1.0.56 \
serde_urlencoded-0.6.1 \
sha-1-0.8.2 \
starship_module_config_derive-0.1.1 \
static_assertions-1.1.0 \
strsim-0.8.0 \
syn-1.0.33 \
sysinfo-0.14.9 \
tempfile-3.1.0 \
term_size-0.3.2 \
termcolor-1.1.0 \
textwrap-0.11.0 \
textwrap-0.12.1 \
thread_local-1.0.1 \
time-0.1.43 \
tinyvec-0.3.3 \
toml-0.5.6 \
typenum-1.12.0 \
ucd-trie-0.1.3 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
uom-0.26.0 \
url-2.1.1 \
urlencoding-1.1.1 \
utf8-width-0.1.3 \
vcpkg-0.2.10 \
vec_map-0.8.2 \
version_check-0.9.2 \
void-1.0.2 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.3.9 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
yaml-rust-0.4.4
PLIST_FILES= bin/${PORTNAME}
OPTIONS_DEFINE= FONTS
OPTIONS_DEFAULT= FONTS
FONTS_DESC= Install fonts
FONTS_RUN_DEPENDS= nerd-fonts>0:x11-fonts/nerd-fonts
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
.include <bsd.port.mk>
Index: head/sysutils/diskonaut/Makefile
===================================================================
--- head/sysutils/diskonaut/Makefile (revision 552220)
+++ head/sysutils/diskonaut/Makefile (revision 552221)
@@ -1,113 +1,114 @@
# $FreeBSD$
PORTNAME= diskonaut
PORTVERSION= 0.11.0
+PORTREVISION= 1
CATEGORIES= sysutils
MAINTAINER= yuri@FreeBSD.org
COMMENT= Terminal disk space navigator
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= imsnif
CARGO_CRATES= addr2line-0.12.1 \
adler32-1.1.0 \
ansi_term-0.11.0 \
arc-swap-0.4.7 \
atty-0.2.14 \
autocfg-1.0.0 \
backtrace-0.3.49 \
bitflags-1.2.1 \
cassowary-0.3.0 \
cc-1.0.55 \
cfg-if-0.1.10 \
clap-2.33.1 \
clicolors-control-1.0.1 \
cloudabi-0.0.3 \
console-0.10.3 \
crossbeam-0.7.3 \
crossbeam-channel-0.4.2 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-queue-0.2.3 \
crossbeam-utils-0.7.2 \
crossterm-0.17.7 \
crossterm_winapi-0.6.1 \
difference-2.0.0 \
dtoa-0.4.6 \
either-1.5.3 \
encode_unicode-0.3.6 \
failure-0.1.8 \
failure_derive-0.1.8 \
filesize-0.2.0 \
gimli-0.21.0 \
heck-0.3.1 \
hermit-abi-0.1.14 \
insta-0.16.0 \
itoa-0.4.6 \
jwalk-0.5.1 \
lazy_static-1.4.0 \
libc-0.2.71 \
linked-hash-map-0.5.3 \
lock_api-0.3.4 \
log-0.4.11 \
maybe-uninit-2.0.0 \
memoffset-0.5.4 \
miniz_oxide-0.3.7 \
mio-0.7.0 \
miow-0.3.5 \
nix-0.17.0 \
ntapi-0.3.4 \
num_cpus-1.13.0 \
object-0.20.0 \
parking_lot-0.10.2 \
parking_lot_core-0.7.2 \
proc-macro-error-1.0.2 \
proc-macro-error-attr-1.0.2 \
proc-macro2-1.0.18 \
quote-1.0.7 \
rayon-1.3.1 \
rayon-core-1.7.1 \
redox_syscall-0.1.56 \
rustc-demangle-0.1.16 \
ryu-1.0.5 \
scopeguard-1.1.0 \
serde-1.0.112 \
serde_derive-1.0.112 \
serde_json-1.0.55 \
serde_yaml-0.8.13 \
signal-hook-0.1.16 \
signal-hook-registry-1.2.0 \
smallvec-1.4.2 \
socket2-0.3.15 \
strsim-0.8.0 \
structopt-0.3.15 \
structopt-derive-0.4.8 \
syn-1.0.31 \
syn-mid-0.5.0 \
synstructure-0.12.4 \
terminal_size-0.1.12 \
termios-0.3.2 \
textwrap-0.11.0 \
tui-0.11.0 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
vec_map-0.8.2 \
version_check-0.9.2 \
void-1.0.2 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
yaml-rust-0.4.4
PLIST_FILES= bin/${PORTNAME}
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
.include <bsd.port.mk>
Index: head/sysutils/dua-cli/Makefile
===================================================================
--- head/sysutils/dua-cli/Makefile (revision 552220)
+++ head/sysutils/dua-cli/Makefile (revision 552221)
@@ -1,114 +1,114 @@
# $FreeBSD$
PORTNAME= dua-cli
DISTVERSIONPREFIX= v
DISTVERSION= 2.10.2
-PORTREVISION= 1
+PORTREVISION= 2
CATEGORIES= sysutils
MAINTAINER= vulcan@wired.sh
COMMENT= Conveniently learn about the disk usage of directories
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= Byron
CARGO_CRATES= ansi_term-0.11.0 \
anyhow-1.0.31 \
arc-swap-0.4.7 \
atty-0.2.14 \
autocfg-1.0.0 \
bitflags-1.2.1 \
byte-unit-4.0.8 \
cassowary-0.3.0 \
cfg-if-0.1.10 \
clap-2.33.1 \
cloudabi-0.0.3 \
colored-2.0.0 \
crossbeam-0.7.3 \
crossbeam-channel-0.4.3 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-queue-0.2.3 \
crossbeam-utils-0.7.2 \
crossterm-0.17.7 \
crossterm_winapi-0.6.1 \
crosstermion-0.3.0 \
ctor-0.1.15 \
difference-2.0.0 \
either-1.5.3 \
filesize-0.2.0 \
fixedbitset-0.2.0 \
glob-0.3.0 \
hashbrown-0.8.1 \
heck-0.3.1 \
hermit-abi-0.1.15 \
indexmap-1.5.0 \
itertools-0.9.0 \
jwalk-0.5.1 \
lazy_static-1.4.0 \
libc-0.2.73 \
lock_api-0.3.4 \
log-0.4.11 \
maybe-uninit-2.0.0 \
memoffset-0.5.5 \
mio-0.7.0 \
miow-0.3.5 \
ntapi-0.3.4 \
num_cpus-1.13.0 \
numtoa-0.1.0 \
open-1.4.0 \
output_vt100-0.1.2 \
parking_lot-0.10.2 \
parking_lot_core-0.7.2 \
petgraph-0.5.1 \
pretty_assertions-0.6.1 \
proc-macro-error-1.0.3 \
proc-macro-error-attr-1.0.3 \
proc-macro2-1.0.19 \
quote-1.0.7 \
rayon-1.3.1 \
rayon-core-1.7.1 \
redox_syscall-0.1.57 \
redox_termios-0.1.1 \
scopeguard-1.1.0 \
signal-hook-0.1.16 \
signal-hook-registry-1.2.0 \
smallvec-1.4.1 \
socket2-0.3.12 \
strsim-0.8.0 \
structopt-0.3.15 \
structopt-derive-0.4.8 \
syn-1.0.35 \
syn-mid-0.5.0 \
termion-1.5.5 \
textwrap-0.11.0 \
tui-0.10.0 \
tui-react-0.10.1 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
utf8-width-0.1.3 \
vec_map-0.8.2 \
version_check-0.9.2 \
wild-2.0.4 \
winapi-0.3.9 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0
PLIST_FILES= bin/dua
PORTDOCS= README.md
OPTIONS_DEFINE= DOCS
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/dua
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_DATA} ${WRKSRC}/${PORTDOCS} ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/sysutils/exa/Makefile
===================================================================
--- head/sysutils/exa/Makefile (revision 552220)
+++ head/sysutils/exa/Makefile (revision 552221)
@@ -1,101 +1,101 @@
# $FreeBSD$
PORTNAME= exa
DISTVERSIONPREFIX= v
DISTVERSION= 0.9.0
-PORTREVISION= 15
+PORTREVISION= 16
CATEGORIES= sysutils
MAINTAINER= ports@FreeBSD.org
COMMENT= Modern replacement for ls
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENCE
LIB_DEPENDS= libgit2.so:devel/libgit2
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= ogham
CARGO_CRATES= aho-corasick-0.7.3 \
ansi_term-0.12.0 \
atty-0.2.11 \
autocfg-0.1.4 \
bitflags-1.0.4 \
byteorder-1.3.2 \
cc-1.0.35 \
cfg-if-0.1.7 \
datetime-0.4.7 \
env_logger-0.6.1 \
git2-0.9.1 \
glob-0.3.0 \
humantime-1.2.0 \
idna-0.1.5 \
iso8601-0.1.1 \
kernel32-sys-0.2.2 \
lazy_static-1.3.0 \
libc-0.2.51 \
libgit2-sys-0.8.1 \
libz-sys-1.0.25 \
locale-0.2.2 \
log-0.4.6 \
matches-0.1.8 \
memchr-2.2.0 \
natord-1.0.9 \
nom-1.2.4 \
num-traits-0.1.43 \
num-traits-0.2.6 \
num_cpus-1.10.0 \
number_prefix-0.3.0 \
openssl-src-111.3.0+1.1.1c \
openssl-sys-0.9.47 \
pad-0.1.5 \
percent-encoding-1.0.1 \
pkg-config-0.3.14 \
quick-error-1.2.2 \
redox_syscall-0.1.54 \
redox_termios-0.1.1 \
regex-1.1.6 \
regex-syntax-0.6.6 \
scoped_threadpool-0.1.9 \
smallvec-0.6.9 \
term_grid-0.1.7 \
term_size-0.3.1 \
termcolor-1.0.4 \
termion-1.5.1 \
thread_local-0.3.6 \
ucd-util-0.1.3 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.8 \
unicode-width-0.1.5 \
url-1.7.2 \
users-0.9.1 \
utf8-ranges-1.0.2 \
vcpkg-0.2.6 \
winapi-0.2.8 \
winapi-0.3.7 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.2 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
wincolor-1.0.1 \
zoneinfo_compiled-0.4.8
PLIST_FILES= bin/exa \
man/man1/exa.1.gz \
etc/bash_completion.d/exa.bash \
share/fish/completions/exa.fish \
share/zsh/site-functions/_exa
post-install:
@${MKDIR} ${STAGEDIR}${PREFIX}/etc/bash_completion.d
${INSTALL_DATA} ${WRKSRC}/contrib/completions.bash ${STAGEDIR}${PREFIX}/etc/bash_completion.d/exa.bash
@${MKDIR} ${STAGEDIR}${PREFIX}/share/fish/completions
${INSTALL_DATA} ${WRKSRC}/contrib/completions.fish ${STAGEDIR}${PREFIX}/share/fish/completions/exa.fish
@${MKDIR} ${STAGEDIR}${PREFIX}/share/zsh/site-functions
${INSTALL_DATA} ${WRKSRC}/contrib/completions.zsh ${STAGEDIR}${PREFIX}/share/zsh/site-functions/_exa
${INSTALL_MAN} ${WRKSRC}/contrib/man/exa.1 ${STAGEDIR}${PREFIX}/man/man1
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/exa
.include <bsd.port.mk>
Index: head/sysutils/fd/Makefile
===================================================================
--- head/sysutils/fd/Makefile (revision 552220)
+++ head/sysutils/fd/Makefile (revision 552221)
@@ -1,113 +1,113 @@
# Created by: Andrey Cherkashin <andoriyu@gmail.com>
# $FreeBSD$
PORTNAME= fd
DISTVERSIONPREFIX= v
DISTVERSION= 8.1.1
-PORTREVISION= 4
+PORTREVISION= 5
CATEGORIES= sysutils
PKGNAMESUFFIX= -find
MAINTAINER= ports@FreeBSD.org
COMMENT= Simple, fast and user-friendly alternative to find
LICENSE= APACHE20 MIT
LICENSE_COMB= dual
LICENSE_FILE_APACHE20= ${WRKSRC}/LICENSE-APACHE
LICENSE_FILE_MIT= ${WRKSRC}/LICENSE-MIT
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= sharkdp
CARGO_CRATES= aho-corasick-0.7.10 \
ansi_term-0.11.0 \
ansi_term-0.12.1 \
anyhow-1.0.31 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
atty-0.2.14 \
autocfg-1.0.0 \
base64-0.11.0 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
bstr-0.2.13 \
cc-1.0.53 \
cfg-if-0.1.10 \
clap-2.33.1 \
constant_time_eq-0.1.5 \
crossbeam-utils-0.7.2 \
ctrlc-3.1.4 \
diff-0.1.12 \
dirs-2.0.2 \
dirs-sys-0.3.4 \
filetime-0.2.10 \
fnv-1.0.7 \
fs_extra-1.1.0 \
fuchsia-cprng-0.1.1 \
getrandom-0.1.14 \
globset-0.4.5 \
hermit-abi-0.1.13 \
humantime-2.0.0 \
ignore-0.4.15 \
jemalloc-sys-0.3.2 \
jemallocator-0.3.2 \
lazy_static-1.4.0 \
libc-0.2.70 \
log-0.4.8 \
lscolors-0.7.0 \
memchr-2.3.3 \
nix-0.17.0 \
num_cpus-1.13.0 \
rand-0.4.6 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
redox_users-0.3.4 \
regex-1.3.7 \
regex-syntax-0.6.17 \
remove_dir_all-0.5.2 \
rust-argon2-0.7.0 \
same-file-1.0.6 \
strsim-0.8.0 \
tempdir-0.3.7 \
term_size-0.3.2 \
textwrap-0.11.0 \
thread_local-1.0.1 \
unicode-width-0.1.7 \
users-0.10.0 \
vec_map-0.8.2 \
version_check-0.9.1 \
void-1.0.2 \
walkdir-2.3.1 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0
MAKE_ENV= SHELL_COMPLETIONS_DIR=${WRKDIR}/completions
CONFLICTS_INSTALL= fd
PLIST_FILES= bin/fd \
etc/bash_completion.d/fd.bash \
share/fish/completions/fd.fish \
share/man/man1/fd.1.gz \
share/zsh/site-functions/_fd
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/fd
${INSTALL_MAN} ${WRKSRC}/doc/fd.1 \
${STAGEDIR}${PREFIX}/share/man/man1
@${MKDIR} ${STAGEDIR}${PREFIX}/etc/bash_completion.d
${INSTALL_DATA} ${WRKDIR}/completions/fd.bash \
${STAGEDIR}${PREFIX}/etc/bash_completion.d
@${MKDIR} ${STAGEDIR}${PREFIX}/share/fish/completions
${INSTALL_DATA} ${WRKDIR}/completions/fd.fish \
${STAGEDIR}${PREFIX}/share/fish/completions
@${MKDIR} ${STAGEDIR}${PREFIX}/share/zsh/site-functions
${INSTALL_DATA} ${WRKDIR}/completions/_fd \
${STAGEDIR}${PREFIX}/share/zsh/site-functions
.include <bsd.port.mk>
Index: head/sysutils/flowgger/Makefile
===================================================================
--- head/sysutils/flowgger/Makefile (revision 552220)
+++ head/sysutils/flowgger/Makefile (revision 552221)
@@ -1,178 +1,178 @@
# $FreeBSD$
PORTNAME= flowgger
DISTVERSION= 0.2.9
-PORTREVISION= 9
+PORTREVISION= 10
CATEGORIES= sysutils
MASTER_SITES= CRATESIO
DISTFILES= ${CARGO_DIST_SUBDIR}/${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= ports@FreeBSD.org
COMMENT= Fast data collector
LICENSE= BSD2CLAUSE
LICENSE_FILE= ${WRKSRC}/LICENSE
BUILD_DEPENDS= capnp:devel/capnproto
USES= cargo ssl
CARGO_CRATES= adler32-1.0.4 \
ansi_term-0.11.0 \
ascii-0.9.3 \
atty-0.2.13 \
autocfg-0.1.6 \
backtrace-0.3.38 \
backtrace-sys-0.1.31 \
bitflags-1.2.0 \
build_const-0.2.1 \
byteorder-0.5.3 \
byteorder-1.3.2 \
bytes-0.4.12 \
c2-chacha-0.2.2 \
capnp-0.10.3 \
capnpc-0.10.2 \
cc-1.0.45 \
cfg-if-0.1.10 \
chrono-0.4.9 \
clap-2.33.0 \
cloudabi-0.0.3 \
combine-3.8.1 \
crc-1.8.1 \
crc32fast-1.2.0 \
crossbeam-0.7.3 \
crossbeam-channel-0.4.0 \
crossbeam-deque-0.7.2 \
crossbeam-epoch-0.8.0 \
crossbeam-queue-0.2.1 \
crossbeam-utils-0.6.6 \
crossbeam-utils-0.7.0 \
dtoa-0.2.2 \
either-1.5.3 \
error-chain-0.10.0 \
filetime-0.2.7 \
flate2-0.2.20 \
flate2-1.0.11 \
fnv-1.0.6 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fsevent-0.4.0 \
fsevent-sys-2.0.1 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.1.29 \
generator-0.6.19 \
getrandom-0.1.12 \
glob-0.3.0 \
idna-0.1.5 \
inotify-0.6.1 \
inotify-sys-0.1.3 \
iovec-0.1.2 \
itoa-0.1.1 \
kafka-0.8.0 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
lazycell-1.2.1 \
libc-0.2.62 \
lock_api-0.1.5 \
log-0.3.9 \
log-0.4.8 \
matches-0.1.8 \
may-0.3.14 \
may_queue-0.1.7 \
memchr-2.2.1 \
memoffset-0.5.3 \
miniz-sys-0.1.12 \
miniz_oxide-0.3.2 \
mio-0.6.19 \
mio-extras-2.0.5 \
miow-0.2.1 \
miow-0.3.3 \
net2-0.2.33 \
nix-0.15.0 \
notify-4.0.13 \
num-integer-0.1.41 \
num-traits-0.1.43 \
num-traits-0.2.8 \
num_cpus-1.10.1 \
openssl-0.10.24 \
openssl-sys-0.9.49 \
owning_ref-0.4.0 \
parking_lot-0.7.1 \
parking_lot_core-0.4.0 \
percent-encoding-1.0.1 \
pkg-config-0.3.16 \
ppv-lite86-0.2.5 \
rand-0.4.6 \
rand-0.5.6 \
rand-0.6.5 \
rand-0.7.2 \
rand_chacha-0.1.1 \
rand_chacha-0.2.1 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rdrand-0.4.0 \
redis-0.10.0 \
redox_syscall-0.1.56 \
ref_slice-1.2.0 \
remove_dir_all-0.5.2 \
rustc-demangle-0.1.16 \
rustc_version-0.2.3 \
same-file-1.0.5 \
scopeguard-0.3.3 \
scopeguard-1.0.0 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-0.8.23 \
serde-1.0.101 \
serde_json-0.8.6 \
sha1-0.6.0 \
slab-0.4.2 \
smallvec-0.6.10 \
snap-0.2.5 \
socket2-0.3.11 \
stable_deref_trait-1.1.1 \
strsim-0.8.0 \
tempdir-0.3.7 \
textwrap-0.11.0 \
time-0.1.42 \
tokio-codec-0.1.1 \
tokio-executor-0.1.8 \
tokio-io-0.1.12 \
tokio-reactor-0.1.9 \
tokio-sync-0.1.6 \
tokio-tcp-0.1.3 \
toml-0.5.3 \
twox-hash-1.5.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.8 \
unicode-width-0.1.6 \
unreachable-1.0.0 \
url-1.7.2 \
vcpkg-0.2.7 \
vec_map-0.8.1 \
void-1.0.2 \
walkdir-2.2.9 \
wasi-0.7.0 \
winapi-0.2.8 \
winapi-0.3.8 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.2 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
ws2_32-sys-0.2.1
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/flowgger
${INSTALL_DATA} ${WRKSRC}/flowgger.toml \
${STAGEDIR}${PREFIX}/etc/flowgger.toml.sample
.include <bsd.port.mk>
Index: head/sysutils/fselect/Makefile
===================================================================
--- head/sysutils/fselect/Makefile (revision 552220)
+++ head/sysutils/fselect/Makefile (revision 552221)
@@ -1,158 +1,159 @@
# $FreeBSD$
PORTNAME= fselect
DISTVERSION= 0.7.1
+PORTREVISION= 1
CATEGORIES= sysutils
MAINTAINER= vulcan@wired.sh
COMMENT= Find files with SQL-like queries
LICENSE= APACHE20 MIT
LICENSE_COMB= dual
LICENSE_FILE_APACHE20= ${WRKSRC}/LICENSE-APACHE
LICENSE_FILE_MIT= ${WRKSRC}/LICENSE-MIT
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= jhspetersson
CARGO_CRATES= adler32-1.2.0 \
ahash-0.3.8 \
aho-corasick-0.7.13 \
ansi_term-0.12.1 \
app_dirs-1.2.1 \
atty-0.2.14 \
autocfg-1.0.1 \
base64-0.13.0 \
bitflags-1.2.1 \
bitreader-0.3.2 \
bitstream-io-0.8.5 \
block-buffer-0.9.0 \
block-padding-0.2.1 \
bstr-0.2.13 \
bytecount-0.6.0 \
byteorder-1.3.4 \
bzip2-0.3.3 \
bzip2-sys-0.1.9+1.0.8 \
cc-1.0.60 \
cfg-if-0.1.10 \
chrono-0.4.19 \
chrono-english-0.1.4 \
cloudabi-0.0.3 \
cpuid-bool-0.1.2 \
crc32fast-1.2.0 \
csv-1.1.3 \
csv-core-0.1.10 \
digest-0.9.0 \
either-1.6.1 \
fixedbitset-0.2.0 \
flate2-1.0.14 \
fnv-1.0.7 \
generic-array-0.14.4 \
getrandom-0.1.15 \
hashbrown-0.7.2 \
hashbrown-0.9.1 \
hermit-abi-0.1.17 \
humansize-1.1.0 \
imagesize-0.8.8 \
indexmap-1.6.0 \
itertools-0.8.2 \
itoa-0.4.6 \
kamadak-exif-0.5.2 \
keccak-0.1.0 \
lazy_static-1.4.0 \
libc-0.2.79 \
lock_api-0.3.4 \
log-0.4.11 \
lscolors-0.7.1 \
matroska-0.5.4 \
memchr-1.0.2 \
memchr-2.3.3 \
miniz_oxide-0.3.7 \
mp3-metadata-0.3.3 \
mp4parse-0.11.4 \
mutate_once-0.1.1 \
nom-3.2.1 \
num-integer-0.1.43 \
num-traits-0.2.12 \
ole32-sys-0.2.0 \
opaque-debug-0.3.0 \
parking_lot-0.10.2 \
parking_lot_core-0.7.2 \
petgraph-0.5.1 \
phf-0.8.0 \
phf_generator-0.8.0 \
phf_macros-0.8.0 \
phf_shared-0.8.0 \
pkg-config-0.3.18 \
ppv-lite86-0.2.9 \
proc-macro-hack-0.5.18 \
proc-macro2-1.0.24 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rand_pcg-0.2.1 \
redox_syscall-0.1.57 \
regex-1.3.9 \
regex-automata-0.1.9 \
regex-syntax-0.6.18 \
ryu-1.0.5 \
scanlex-0.1.2 \
scopeguard-1.1.0 \
serde-1.0.116 \
serde_derive-1.0.116 \
serde_json-1.0.58 \
sha-1-0.9.1 \
sha2-0.9.1 \
sha3-0.9.1 \
shell32-sys-0.1.2 \
siphasher-0.3.3 \
smallvec-1.4.2 \
static_assertions-1.1.0 \
syn-1.0.42 \
text_io-0.1.8 \
thiserror-1.0.21 \
thiserror-impl-1.0.21 \
thread_local-1.0.1 \
time-0.1.44 \
toml-0.5.6 \
tree_magic-0.2.3 \
typenum-1.12.0 \
unicode-xid-0.2.1 \
users-0.11.0 \
version_check-0.9.2 \
wana_kana-2.0.1 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasi-0.10.0+wasi-snapshot-preview1 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
xattr-0.2.2 \
xdg-2.2.0 \
zip-0.5.8
CARGO_FEATURES= --no-default-features
PLIST_FILES= bin/${PORTNAME} \
man/man1/${PORTNAME}.1.gz
_DOCS= docs/usage.md README.md
PORTDOCS= ${_DOCS:T}
OPTIONS_DEFINE= DOCS USERS
OPTIONS_DEFAULT= USERS
USERS_DESC= Query with 'user' and 'group' fields
USERS_VARS= CARGO_FEATURES+=users
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
${INSTALL_MAN} ${WRKSRC}/docs/${PORTNAME}.1 ${STAGEDIR}${MAN1PREFIX}/man/man1
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
cd ${WRKSRC} && ${INSTALL_DATA} ${_DOCS} ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/sysutils/fusefs-sandboxfs/Makefile
===================================================================
--- head/sysutils/fusefs-sandboxfs/Makefile (revision 552220)
+++ head/sysutils/fusefs-sandboxfs/Makefile (revision 552221)
@@ -1,93 +1,93 @@
# $FreeBSD$
PORTNAME= sandboxfs
DISTVERSIONPREFIX= sandboxfs-
DISTVERSION= 0.2.0
-PORTREVISION= 5
+PORTREVISION= 6
CATEGORIES= sysutils
PKGNAMEPREFIX= fusefs-
MAINTAINER= ports@FreeBSD.org
COMMENT= Virtual file system for sandboxing
LICENSE= APACHE20
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo fuse
USE_GITHUB= yes
GH_ACCOUNT= bazelbuild
CARGO_CRATES= aho-corasick-0.7.10 \
arc-swap-0.4.5 \
atty-0.2.14 \
backtrace-0.3.46 \
backtrace-sys-0.1.35 \
bitflags-1.2.1 \
cc-1.0.50 \
cfg-if-0.1.10 \
cpuprofiler-0.0.4 \
env_logger-0.5.13 \
error-chain-0.12.2 \
failure-0.1.7 \
failure_derive-0.1.7 \
fuse-0.3.1 \
getopts-0.2.21 \
getrandom-0.1.14 \
hermit-abi-0.1.11 \
humantime-1.3.0 \
itoa-0.4.5 \
lazy_static-1.4.0 \
libc-0.2.69 \
log-0.3.9 \
log-0.4.8 \
memchr-2.3.3 \
nix-0.12.1 \
num_cpus-1.13.0 \
pkg-config-0.3.17 \
ppv-lite86-0.2.6 \
proc-macro2-1.0.10 \
quick-error-1.2.3 \
quote-1.0.3 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
redox_syscall-0.1.56 \
regex-1.3.7 \
regex-syntax-0.6.17 \
remove_dir_all-0.5.2 \
rustc-demangle-0.1.16 \
ryu-1.0.3 \
serde-1.0.106 \
serde_derive-1.0.106 \
serde_json-1.0.51 \
signal-hook-0.1.13 \
signal-hook-registry-1.2.0 \
syn-1.0.17 \
synstructure-0.12.3 \
tempfile-3.1.0 \
termcolor-1.1.0 \
thread-scoped-1.0.2 \
thread_local-1.0.1 \
threadpool-1.7.1 \
time-0.1.43 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
users-0.9.1 \
version_check-0.9.1 \
void-1.0.2 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
xattr-0.2.2
PLIST_FILES= bin/sandboxfs \
share/man/man1/sandboxfs.1.gz
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/sandboxfs
${INSTALL_MAN} ${WRKSRC}/man/sandboxfs.1 ${STAGEDIR}${PREFIX}/share/man/man1
.include <bsd.port.mk>
Index: head/sysutils/hexyl/Makefile
===================================================================
--- head/sysutils/hexyl/Makefile (revision 552220)
+++ head/sysutils/hexyl/Makefile (revision 552221)
@@ -1,42 +1,42 @@
# $FreeBSD$
PORTNAME= hexyl
DISTVERSIONPREFIX= v
DISTVERSION= 0.8.0
-PORTREVISION= 4
+PORTREVISION= 5
CATEGORIES= sysutils
MAINTAINER= ports@FreeBSD.org
COMMENT= Command-line hex viewer
LICENSE= APACHE20 MIT
LICENSE_COMB= dual
LICENSE_FILE_APACHE20= ${WRKSRC}/LICENSE-APACHE
LICENSE_FILE_MIT= ${WRKSRC}/LICENSE-MIT
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= sharkdp
CARGO_CRATES= ansi_term-0.11.0 \
ansi_term-0.12.1 \
atty-0.2.14 \
bitflags-1.2.1 \
clap-2.33.1 \
hermit-abi-0.1.13 \
libc-0.2.71 \
strsim-0.8.0 \
term_size-0.3.2 \
textwrap-0.11.0 \
unicode-width-0.1.7 \
vec_map-0.8.2 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0
PLIST_FILES= bin/hexyl
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/hexyl
.include <bsd.port.mk>
Index: head/sysutils/lsd/Makefile
===================================================================
--- head/sysutils/lsd/Makefile (revision 552220)
+++ head/sysutils/lsd/Makefile (revision 552221)
@@ -1,102 +1,102 @@
# $FreeBSD$
PORTNAME= lsd
DISTVERSION= 0.17.0
-PORTREVISION= 5
+PORTREVISION= 6
CATEGORIES= sysutils
MAINTAINER= andoriyu@gmail.com
COMMENT= Pretty ls alternative with support for icons
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= Peltoche
CARGO_CRATES= aho-corasick-0.7.6 \
ansi_term-0.11.0 \
ansi_term-0.12.1 \
assert_cmd-0.11.1 \
assert_fs-0.11.3 \
atty-0.2.13 \
autocfg-0.1.7 \
bitflags-1.2.1 \
bstr-0.2.8 \
c2-chacha-0.2.3 \
cfg-if-0.1.10 \
chrono-0.4.9 \
chrono-humanize-0.0.11 \
clap-2.33.0 \
crossbeam-channel-0.3.9 \
crossbeam-utils-0.6.6 \
difference-2.0.0 \
escargot-0.4.0 \
float-cmp-0.4.0 \
fnv-1.0.6 \
getrandom-0.1.12 \
glob-0.3.0 \
globset-0.4.4 \
globwalk-0.5.0 \
ignore-0.4.10 \
itoa-0.4.4 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.65 \
log-0.4.8 \
lscolors-0.6.0 \
memchr-2.2.1 \
normalize-line-endings-0.2.2 \
num-integer-0.1.41 \
num-traits-0.2.8 \
ppv-lite86-0.2.6 \
predicates-1.0.1 \
predicates-core-1.0.0 \
predicates-tree-1.0.0 \
proc-macro2-1.0.6 \
quote-1.0.2 \
rand-0.7.2 \
rand_chacha-0.2.1 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
redox_syscall-0.1.56 \
regex-1.3.1 \
regex-syntax-0.6.12 \
remove_dir_all-0.5.2 \
ryu-1.0.2 \
same-file-1.0.5 \
serde-1.0.101 \
serde_derive-1.0.101 \
serde_json-1.0.41 \
strsim-0.8.0 \
syn-1.0.5 \
tempfile-3.1.0 \
term_grid-0.1.7 \
term_size-0.3.1 \
terminal_size-0.1.8 \
textwrap-0.11.0 \
thread_local-0.3.6 \
time-0.1.42 \
treeline-0.1.0 \
unicode-width-0.1.6 \
unicode-xid-0.2.0 \
users-0.9.1 \
vec_map-0.8.1 \
version_check-0.9.1 \
walkdir-2.2.9 \
wasi-0.7.0 \
wild-2.0.2 \
winapi-0.2.8 \
winapi-0.3.8 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.2 \
winapi-x86_64-pc-windows-gnu-0.4.0
PLIST_FILES= bin/lsd
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/lsd
.include <bsd.port.mk>
Index: head/sysutils/onefetch/Makefile
===================================================================
--- head/sysutils/onefetch/Makefile (revision 552220)
+++ head/sysutils/onefetch/Makefile (revision 552221)
@@ -1,260 +1,261 @@
# $FreeBSD$
PORTNAME= onefetch
DISTVERSIONPREFIX= v
DISTVERSION= 2.4.0
+PORTREVISION= 1
CATEGORIES= sysutils devel
MAINTAINER= vulcan@wired.sh
COMMENT= Git repository summary on your terminal
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE.md
LIB_DEPENDS= libgit2.so:devel/libgit2
RUN_DEPENDS= git:devel/git
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= o2sh
CARGO_CRATES= adler-0.2.3 \
adler32-1.0.4 \
ahash-0.3.8 \
aho-corasick-0.7.10 \
ansi_term-0.11.0 \
ansi_term-0.12.1 \
arc-swap-0.4.7 \
arrayref-0.3.5 \
arrayvec-0.4.12 \
askalono-0.4.3 \
atty-0.2.13 \
autocfg-0.1.7 \
autocfg-1.0.0 \
backtrace-0.3.40 \
backtrace-sys-0.1.32 \
base64-0.10.1 \
base64-0.13.0 \
bitflags-1.2.1 \
blake2b_simd-0.5.8 \
block-buffer-0.7.3 \
block-padding-0.1.4 \
bstr-0.2.8 \
byte-tools-0.3.1 \
bytecount-0.6.0 \
bytemuck-1.3.1 \
byteorder-1.3.2 \
bytes-0.5.6 \
cc-1.0.46 \
cfg-if-0.1.10 \
chrono-0.4.11 \
chrono-tz-0.5.2 \
clap-2.33.3 \
cloudabi-0.0.3 \
color_quant-1.0.1 \
colored-2.0.0 \
const-random-0.1.8 \
const-random-macro-0.1.8 \
constant_time_eq-0.1.4 \
crc32fast-1.2.0 \
crossbeam-channel-0.4.2 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-queue-0.2.3 \
crossbeam-utils-0.6.6 \
crossbeam-utils-0.7.2 \
dashmap-3.11.4 \
deflate-0.8.6 \
deunicode-0.4.3 \
digest-0.8.1 \
dirs-2.0.2 \
dirs-sys-0.3.4 \
either-1.5.3 \
encoding_rs-0.8.20 \
encoding_rs_io-0.1.7 \
env_logger-0.7.1 \
failure-0.1.6 \
failure_derive-0.1.6 \
fake-simd-0.1.2 \
flate2-1.0.17 \
fnv-1.0.6 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.3.5 \
futures-channel-0.3.5 \
futures-core-0.3.5 \
futures-executor-0.3.5 \
futures-io-0.3.5 \
futures-macro-0.3.5 \
futures-sink-0.3.5 \
futures-task-0.3.5 \
futures-util-0.3.5 \
generic-array-0.12.3 \
getrandom-0.1.13 \
gif-0.11.1 \
git2-0.13.11 \
glob-0.3.0 \
globset-0.4.5 \
globwalk-0.8.0 \
grep-matcher-0.1.3 \
grep-searcher-0.1.7 \
heck-0.3.1 \
hermit-abi-0.1.14 \
humansize-1.1.0 \
humantime-1.3.0 \
idna-0.2.0 \
ignore-0.4.16 \
image-0.23.10 \
iovec-0.1.4 \
itertools-0.9.0 \
itoa-0.4.4 \
jobserver-0.1.17 \
jpeg-decoder-0.1.20 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.78 \
libgit2-sys-0.12.13+1.0.1 \
libz-sys-1.1.0 \
lock_api-0.3.4 \
log-0.4.8 \
lzw-0.10.0 \
maplit-1.0.2 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.2.1 \
memmap-0.7.0 \
memoffset-0.5.1 \
miniz_oxide-0.3.7 \
miniz_oxide-0.4.2 \
mio-0.6.22 \
mio-named-pipes-0.1.7 \
mio-uds-0.6.8 \
miow-0.2.1 \
miow-0.3.5 \
more-asserts-0.2.1 \
net2-0.2.34 \
nodrop-0.1.14 \
num-integer-0.1.43 \
num-iter-0.1.39 \
num-rational-0.3.0 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
once_cell-1.4.0 \
opaque-debug-0.2.3 \
parking_lot-0.10.2 \
parking_lot_core-0.7.2 \
parse-zoneinfo-0.3.0 \
paste-1.0.1 \
percent-encoding-2.1.0 \
pest-2.1.2 \
pest_derive-2.1.0 \
pest_generator-2.1.1 \
pest_meta-2.1.2 \
pin-project-0.4.23 \
pin-project-internal-0.4.23 \
pin-project-lite-0.1.7 \
pin-utils-0.1.0 \
pkg-config-0.3.16 \
png-0.16.7 \
ppv-lite86-0.2.8 \
proc-macro-hack-0.5.16 \
proc-macro-nested-0.1.6 \
proc-macro2-1.0.18 \
quick-error-1.2.3 \
quote-1.0.2 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rand_os-0.1.3 \
rayon-1.3.1 \
rayon-core-1.7.1 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
redox_users-0.3.1 \
regex-1.3.9 \
regex-syntax-0.6.18 \
rmp-0.8.8 \
rmp-serde-0.14.4 \
rust-argon2-0.5.1 \
rustc-demangle-0.1.16 \
rustc_version-0.2.3 \
ryu-1.0.2 \
same-file-1.0.5 \
scoped_threadpool-0.1.9 \
scopeguard-1.0.0 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.114 \
serde_derive-1.0.114 \
serde_json-1.0.55 \
sha-1-0.8.1 \
signal-hook-registry-1.2.1 \
slab-0.4.2 \
slug-0.1.4 \
smallvec-0.6.10 \
smallvec-1.4.0 \
socket2-0.3.12 \
strsim-0.8.0 \
strum-0.19.2 \
strum_macros-0.19.2 \
syn-1.0.33 \
synstructure-0.12.1 \
tera-1.3.1 \
term_size-0.3.2 \
termcolor-1.0.5 \
textwrap-0.11.0 \
thread_local-1.0.1 \
tiff-0.5.0 \
time-0.1.42 \
tokei-12.0.4 \
tokio-0.2.22 \
tokio-macros-0.2.5 \
toml-0.5.6 \
typenum-1.11.2 \
ucd-trie-0.1.2 \
unic-char-property-0.9.0 \
unic-char-range-0.9.0 \
unic-common-0.9.0 \
unic-segment-0.9.0 \
unic-ucd-segment-0.9.0 \
unic-ucd-version-0.9.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.8 \
unicode-segmentation-1.3.0 \
unicode-width-0.1.6 \
unicode-xid-0.2.0 \
url-2.1.1 \
vcpkg-0.2.7 \
vec_map-0.8.1 \
walkdir-2.2.9 \
wasi-0.7.0 \
weezl-0.1.0 \
winapi-0.2.8 \
winapi-0.3.8 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.2 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
wincolor-1.0.2 \
ws2_32-sys-0.2.1 \
zstd-0.5.3+zstd.1.4.5 \
zstd-safe-2.0.5+zstd.1.4.5 \
zstd-sys-1.4.17+zstd.1.4.5
PLIST_FILES= bin/${PORTNAME}
PORTDOCS= CHANGELOG.md CONTRIBUTING.md README.md
OPTIONS_DEFINE= DOCS
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_DATA} ${PORTDOCS:S|^|${WRKSRC}/|} ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/sysutils/potnet/Makefile
===================================================================
--- head/sysutils/potnet/Makefile (revision 552220)
+++ head/sysutils/potnet/Makefile (revision 552221)
@@ -1,110 +1,110 @@
# $FreeBSD$
PORTNAME= potnet
DISTVERSION= 0.4.4
-PORTREVISION= 6
+PORTREVISION= 7
CATEGORIES= sysutils
MAINTAINER= pizzamig@FreeBSD.org
COMMENT= Utility to help sysutils/pot to manage the internal network
LICENSE= BSD3CLAUSE
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= pizzamig
PLIST_FILES= bin/potcpu bin/potnet
CARGO_CRATES= ansi_term-0.11.0 \
arrayref-0.3.5 \
arrayvec-0.4.11 \
atty-0.2.13 \
autocfg-0.1.6 \
backtrace-0.3.38 \
backtrace-sys-0.1.31 \
base64-0.10.1 \
bitflags-1.1.0 \
blake2b_simd-0.5.8 \
bytecount-0.4.0 \
byteorder-1.3.2 \
cargo_metadata-0.6.4 \
cc-1.0.45 \
cfg-if-0.1.9 \
chrono-0.4.9 \
clap-2.33.0 \
cloudabi-0.0.3 \
constant_time_eq-0.1.4 \
crossbeam-utils-0.6.6 \
dirs-2.0.2 \
dirs-sys-0.3.4 \
either-1.5.3 \
error-chain-0.12.1 \
failure-0.1.5 \
failure_derive-0.1.5 \
fuchsia-cprng-0.1.1 \
glob-0.2.11 \
heck-0.3.1 \
ipnet-2.0.0 \
itertools-0.8.0 \
itoa-0.4.4 \
lazy_static-1.4.0 \
libc-0.2.62 \
log-0.4.8 \
nodrop-0.1.13 \
num-integer-0.1.41 \
num-traits-0.2.8 \
proc-macro-error-0.2.6 \
proc-macro2-0.4.30 \
proc-macro2-1.0.4 \
pulldown-cmark-0.2.0 \
quote-0.6.13 \
quote-1.0.2 \
rand-0.4.6 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_os-0.1.3 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
redox_users-0.3.1 \
remove_dir_all-0.5.2 \
rust-argon2-0.5.1 \
rustc-demangle-0.1.16 \
ryu-1.0.0 \
same-file-1.0.5 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.101 \
serde_derive-1.0.101 \
serde_json-1.0.40 \
simplelog-0.7.3 \
skeptic-0.13.4 \
strsim-0.8.0 \
structopt-0.3.2 \
structopt-derive-0.3.2 \
structopt-flags-0.3.5 \
syn-0.15.44 \
syn-1.0.5 \
synstructure-0.10.2 \
tempdir-0.3.7 \
term-0.6.1 \
textwrap-0.11.0 \
time-0.1.42 \
unicode-segmentation-1.3.0 \
unicode-width-0.1.6 \
unicode-xid-0.1.0 \
unicode-xid-0.2.0 \
vec_map-0.8.1 \
version_check-0.1.5 \
walkdir-2.2.9 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.2 \
winapi-x86_64-pc-windows-gnu-0.4.0
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/potnet
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/potcpu
.include <bsd.port.mk>
Index: head/sysutils/rsfetch/Makefile
===================================================================
--- head/sysutils/rsfetch/Makefile (revision 552220)
+++ head/sysutils/rsfetch/Makefile (revision 552221)
@@ -1,221 +1,221 @@
# $FreeBSD$
PORTNAME= rsfetch
DISTVERSION= 2.0.0
-PORTREVISION= 7
+PORTREVISION= 8
CATEGORIES= sysutils
MAINTAINER= vulcan@wired.sh
COMMENT= Minimal fetch program written in Rust
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo ssl
USE_GITHUB= yes
GH_ACCOUNT= ${PORTNAME}
CARGO_CRATES= adler32-1.0.4 \
aho-corasick-0.7.10 \
ansi_term-0.11.0 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
atty-0.2.14 \
autocfg-0.1.7 \
autocfg-1.0.0 \
backtrace-0.3.45 \
backtrace-sys-0.1.34 \
base64-0.10.1 \
base64-0.11.0 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
bstr-0.2.12 \
byteorder-1.3.4 \
bytes-0.4.12 \
cc-1.0.50 \
cfg-if-0.1.10 \
chrono-0.4.11 \
clap-2.33.0 \
cloudabi-0.0.3 \
constant_time_eq-0.1.5 \
cookie-0.12.0 \
cookie_store-0.7.0 \
core-foundation-0.7.0 \
core-foundation-sys-0.7.0 \
crc32fast-1.2.0 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-queue-0.2.1 \
crossbeam-utils-0.7.2 \
csv-1.1.3 \
csv-core-0.1.10 \
dirs-1.0.5 \
dtoa-0.4.5 \
either-1.5.3 \
encode_unicode-0.3.6 \
encoding_rs-0.8.22 \
env_logger-0.6.2 \
error-chain-0.12.2 \
failure-0.1.7 \
failure_derive-0.1.7 \
flate2-1.0.13 \
fnv-1.0.6 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.1.29 \
futures-cpupool-0.1.8 \
getrandom-0.1.14 \
h2-0.1.26 \
hermit-abi-0.1.8 \
http-0.1.21 \
http-body-0.1.0 \
httparse-1.3.4 \
humantime-1.3.0 \
hyper-0.12.35 \
hyper-tls-0.3.2 \
idna-0.1.5 \
idna-0.2.0 \
indexmap-1.3.2 \
iovec-0.1.4 \
itoa-0.4.5 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.67 \
lock_api-0.3.3 \
log-0.4.8 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memoffset-0.5.3 \
mime-0.3.16 \
mime_guess-2.0.3 \
miniz_oxide-0.3.6 \
mio-0.6.21 \
miow-0.2.1 \
native-tls-0.2.4 \
net2-0.2.33 \
num-integer-0.1.42 \
num-traits-0.2.11 \
num_cpus-1.12.0 \
openssl-0.10.28 \
openssl-probe-0.1.2 \
openssl-sys-0.9.54 \
parking_lot-0.9.0 \
parking_lot_core-0.6.2 \
percent-encoding-1.0.1 \
percent-encoding-2.1.0 \
pkg-config-0.3.17 \
ppv-lite86-0.2.6 \
pretty_env_logger-0.3.1 \
prettytable-rs-0.8.0 \
proc-macro2-0.4.30 \
proc-macro2-1.0.9 \
publicsuffix-1.5.4 \
quick-error-1.2.3 \
quote-0.6.13 \
quote-1.0.3 \
rand-0.6.5 \
rand-0.7.3 \
rand_chacha-0.1.1 \
rand_chacha-0.2.2 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
redox_users-0.3.4 \
regex-1.3.5 \
regex-automata-0.1.9 \
regex-syntax-0.6.17 \
remove_dir_all-0.5.2 \
reqwest-0.9.24 \
rust-argon2-0.7.0 \
rustc-demangle-0.1.16 \
rustc_version-0.2.3 \
ryu-1.0.3 \
schannel-0.1.17 \
scopeguard-1.1.0 \
security-framework-0.4.1 \
security-framework-sys-0.4.1 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.104 \
serde_derive-1.0.104 \
serde_json-1.0.48 \
serde_urlencoded-0.5.5 \
slab-0.4.2 \
smallvec-0.6.13 \
smallvec-1.2.0 \
snafu-0.3.1 \
snafu-derive-0.3.1 \
string-0.2.1 \
strsim-0.8.0 \
syn-0.15.44 \
syn-1.0.16 \
synstructure-0.12.3 \
tempfile-3.1.0 \
term-0.5.2 \
termcolor-1.1.0 \
textwrap-0.11.0 \
thread_local-1.0.1 \
time-0.1.42 \
tokio-0.1.22 \
tokio-buf-0.1.1 \
tokio-current-thread-0.1.7 \
tokio-executor-0.1.10 \
tokio-io-0.1.13 \
tokio-reactor-0.1.12 \
tokio-sync-0.1.8 \
tokio-tcp-0.1.4 \
tokio-threadpool-0.1.18 \
tokio-timer-0.2.13 \
try-lock-0.2.2 \
try_from-0.3.2 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.12 \
unicode-width-0.1.7 \
unicode-xid-0.1.0 \
unicode-xid-0.2.0 \
url-1.7.2 \
url-2.1.1 \
uuid-0.7.4 \
vcpkg-0.2.8 \
vec_map-0.8.1 \
version_check-0.9.1 \
want-0.2.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.2.8 \
winapi-0.3.8 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.3 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.6.2 \
ws2_32-sys-0.2.1
PLIST_FILES= bin/${PORTNAME} \
man/man1/${PORTNAME}.1.gz
PORTDOCS= CHANGELOG.md README.md
OPTIONS_DEFINE= DOCS
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
${INSTALL_MAN} ${WRKSRC}/${PORTNAME}.1 ${STAGEDIR}${MAN1PREFIX}/man/man1
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_DATA} ${PORTDOCS:S|^|${WRKSRC}/|} ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/sysutils/tealdeer/Makefile
===================================================================
--- head/sysutils/tealdeer/Makefile (revision 552220)
+++ head/sysutils/tealdeer/Makefile (revision 552221)
@@ -1,228 +1,229 @@
# $FreeBSD$
PORTNAME= tealdeer
DISTVERSIONPREFIX= v
DISTVERSION= 1.4.1
+PORTREVISION= 1
CATEGORIES= sysutils
MAINTAINER= vulcan@wired.sh
COMMENT= Fast tldr client written in Rust
LICENSE= APACHE20 MIT
LICENSE_COMB= dual
LICENSE_FILE_APACHE20= ${WRKSRC}/LICENSE-APACHE
LICENSE_FILE_MIT= ${WRKSRC}/LICENSE-MIT
USES= cargo ssl
USE_GITHUB= yes
GH_ACCOUNT= dbrgn
CARGO_CRATES= addr2line-0.13.0 \
adler-0.2.3 \
aho-corasick-0.7.13 \
android_log-sys-0.1.2 \
ansi_term-0.12.1 \
app_dirs2-2.3.0 \
ascii-0.9.3 \
assert_cmd-1.0.1 \
atty-0.2.14 \
autocfg-1.0.1 \
backtrace-0.3.50 \
base64-0.12.3 \
bitflags-1.2.1 \
bumpalo-3.4.0 \
byteorder-1.3.4 \
bytes-0.5.6 \
cc-1.0.59 \
cesu8-1.1.0 \
cfg-if-0.1.10 \
combine-3.8.1 \
core-foundation-0.7.0 \
core-foundation-sys-0.7.0 \
crc32fast-1.2.0 \
derivative-2.1.1 \
difference-2.0.0 \
doc-comment-0.3.3 \
docopt-1.1.0 \
dtoa-0.4.6 \
either-1.6.0 \
encoding_rs-0.8.24 \
env_logger-0.7.1 \
errno-0.2.6 \
errno-dragonfly-0.1.1 \
error-chain-0.12.4 \
escargot-0.5.0 \
filetime-0.2.12 \
flate2-1.0.17 \
float-cmp-0.8.0 \
fnv-1.0.7 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-channel-0.3.5 \
futures-core-0.3.5 \
futures-io-0.3.5 \
futures-sink-0.3.5 \
futures-task-0.3.5 \
futures-util-0.3.5 \
gcc-0.3.55 \
getrandom-0.1.14 \
gimli-0.22.0 \
h2-0.2.6 \
hashbrown-0.8.2 \
hermit-abi-0.1.15 \
http-0.2.1 \
http-body-0.3.1 \
httparse-1.3.4 \
humantime-1.3.0 \
hyper-0.13.7 \
hyper-tls-0.4.3 \
idna-0.2.0 \
indexmap-1.5.2 \
iovec-0.1.4 \
ipnet-2.3.0 \
itoa-0.4.6 \
jni-0.14.0 \
jni-sys-0.3.0 \
js-sys-0.3.44 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.76 \
log-0.4.11 \
matches-0.1.8 \
memchr-2.3.3 \
mime-0.3.16 \
mime_guess-2.0.3 \
miniz_oxide-0.4.1 \
mio-0.6.22 \
miow-0.2.1 \
native-tls-0.2.4 \
ndk-0.1.0 \
ndk-glue-0.1.0 \
ndk-sys-0.1.0 \
net2-0.2.34 \
normalize-line-endings-0.3.0 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
num_enum-0.4.3 \
num_enum_derive-0.4.3 \
object-0.20.0 \
once_cell-1.4.1 \
openssl-0.10.30 \
openssl-probe-0.1.2 \
openssl-sys-0.9.58 \
pager-0.15.0 \
percent-encoding-2.1.0 \
pin-project-0.4.23 \
pin-project-internal-0.4.23 \
pin-project-lite-0.1.7 \
pin-utils-0.1.0 \
pkg-config-0.3.18 \
ppv-lite86-0.2.9 \
predicates-1.0.5 \
predicates-core-1.0.0 \
predicates-tree-1.0.0 \
proc-macro-crate-0.1.5 \
proc-macro2-1.0.19 \
quick-error-1.2.3 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
redox_syscall-0.1.57 \
regex-1.3.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.2 \
reqwest-0.10.8 \
rustc-demangle-0.1.16 \
ryu-1.0.5 \
same-file-1.0.6 \
schannel-0.1.19 \
security-framework-0.4.4 \
security-framework-sys-0.4.3 \
serde-1.0.115 \
serde_derive-1.0.115 \
serde_json-1.0.57 \
serde_urlencoded-0.6.1 \
slab-0.4.2 \
socket2-0.3.12 \
strsim-0.9.3 \
syn-1.0.39 \
tar-0.4.30 \
tempfile-3.1.0 \
termcolor-1.1.0 \
thread_local-1.0.1 \
time-0.1.44 \
tinyvec-0.3.4 \
tokio-0.2.22 \
tokio-tls-0.3.1 \
tokio-util-0.3.1 \
toml-0.5.6 \
tower-service-0.3.0 \
tracing-0.1.19 \
tracing-core-0.1.15 \
treeline-0.1.0 \
try-lock-0.2.3 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-xid-0.2.1 \
unreachable-1.0.0 \
url-2.1.1 \
vcpkg-0.2.10 \
version_check-0.9.2 \
void-1.0.2 \
wait-timeout-0.2.0 \
walkdir-2.3.1 \
want-0.3.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasi-0.10.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.67 \
wasm-bindgen-backend-0.2.67 \
wasm-bindgen-futures-0.4.17 \
wasm-bindgen-macro-0.2.67 \
wasm-bindgen-macro-support-0.2.67 \
wasm-bindgen-shared-0.2.67 \
web-sys-0.3.44 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.7.0 \
ws2_32-sys-0.2.1 \
xattr-0.2.2 \
xdg-2.2.0
CARGO_FEATURES= --no-default-features
PLIST_FILES= bin/tldr \
etc/bash_completion.d/tldr.bash \
share/fish/completions/tldr.fish \
share/zsh/site_functions/_tldr
PORTDOCS= CHANGELOG.md README.md
OPTIONS_DEFINE= DOCS LOGGER
LOGGER_DESC= Debug build with logging enabled
LOGGER_VARS= CARGO_FEATURES+=logging
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/tldr
@${MKDIR} ${STAGEDIR}${PREFIX}/etc/bash_completion.d
${INSTALL_DATA} ${WRKSRC}/bash_${PORTNAME} \
${STAGEDIR}${PREFIX}/etc/bash_completion.d/tldr.bash
@${MKDIR} ${STAGEDIR}${PREFIX}/share/fish/completions
${INSTALL_DATA} ${WRKSRC}/fish_${PORTNAME} \
${STAGEDIR}${PREFIX}/share/fish/completions/tldr.fish
@${MKDIR} ${STAGEDIR}${PREFIX}/share/zsh/site_functions
${INSTALL_DATA} ${WRKSRC}/zsh_${PORTNAME} \
${STAGEDIR}${PREFIX}/share/zsh/site_functions/_tldr
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_DATA} ${PORTDOCS:S|^|${WRKSRC}/|} ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/sysutils/vector/Makefile
===================================================================
--- head/sysutils/vector/Makefile (revision 552220)
+++ head/sysutils/vector/Makefile (revision 552221)
@@ -1,486 +1,486 @@
# $FreeBSD$
PORTNAME= vector
DISTVERSIONPREFIX= v
DISTVERSION= 0.7.1
-PORTREVISION= 9
+PORTREVISION= 10
CATEGORIES= sysutils
MAINTAINER= greg@unrelenting.technology
COMMENT= High performance logs, metrics, and events router
LICENSE= APACHE20
LICENSE_FILE= ${WRKSRC}/LICENSE
BROKEN_FreeBSD_12_powerpc64= fails to link: /usr/local/bin/ld: cannot find -lc++
BUILD_DEPENDS= protoc:devel/protobuf
LIB_DEPENDS= libonig.so:devel/oniguruma \
librdkafka.so:net/librdkafka
USES= cargo lua:53 ssl
USE_GITHUB= yes
GH_ACCOUNT= timberio
PLIST_FILES= bin/vector
GH_TUPLE= timberio:leveldb:64265815bcf1b69f30e6cb35bf687fbd6dd64afb:leveldb \
timberio:leveldb-sys:0f226b0cce86aff28f255ef89082916e4fdda4c7:leveldbsys \
timberio:rlua:c41bfa06cfaf3df543796d3104ec910dd1a24c44:rlua \
tokio-rs:tracing:8720792dbdf3158c4dbfcaf879e28da7af67c3f7:tracingfutures
CARGO_CRATES= adler32-1.0.4 \
aho-corasick-0.7.6 \
ansi_term-0.11.0 \
antidote-1.0.0 \
approx-0.3.2 \
arc-swap-0.4.3 \
arrayref-0.3.5 \
arrayvec-0.4.12 \
assert_matches-1.3.0 \
atty-0.2.13 \
autocfg-0.1.7 \
backtrace-0.3.40 \
backtrace-sys-0.1.32 \
base64-0.9.3 \
base64-0.10.1 \
base64-0.11.0 \
bit-set-0.5.1 \
bit-vec-0.5.1 \
bitflags-1.2.1 \
blake2b_simd-0.5.8 \
block-buffer-0.7.3 \
block-padding-0.1.4 \
bstr-0.2.8 \
buf_redux-0.8.4 \
build_const-0.2.1 \
built-0.3.2 \
byte-tools-0.3.1 \
byteorder-1.3.2 \
bytes-0.4.12 \
bytesize-1.0.0 \
c2-chacha-0.2.3 \
cast-0.2.2 \
cc-1.0.46 \
cfg-if-0.1.10 \
chrono-0.4.9 \
clap-2.33.0 \
cloudabi-0.0.3 \
cmake-0.1.42 \
colored-1.9.0 \
constant_time_eq-0.1.4 \
cookie-0.12.0 \
cookie_store-0.7.0 \
core-foundation-0.6.4 \
core-foundation-sys-0.6.2 \
crc-1.8.1 \
crc32fast-1.2.0 \
criterion-0.2.11 \
criterion-0.3.0 \
criterion-plot-0.3.1 \
criterion-plot-0.4.0 \
crossbeam-channel-0.3.9 \
crossbeam-deque-0.7.1 \
crossbeam-epoch-0.7.2 \
crossbeam-queue-0.1.2 \
crossbeam-utils-0.6.6 \
crypto-mac-0.7.0 \
csv-1.1.1 \
csv-core-0.1.6 \
ctor-0.1.12 \
data-encoding-2.1.2 \
data-encoding-macro-0.1.7 \
data-encoding-macro-internal-0.1.7 \
db-key-0.0.5 \
derivative-1.0.3 \
derive_is_enum_variant-0.1.1 \
difference-2.0.0 \
digest-0.8.1 \
dirs-1.0.5 \
doc-comment-0.3.1 \
dtoa-0.4.4 \
either-1.5.3 \
elastic_responses-0.20.10 \
encoding_rs-0.8.20 \
endian-type-0.1.2 \
enum-as-inner-0.2.1 \
env_logger-0.5.13 \
env_logger-0.6.2 \
erased-serde-0.3.9 \
error-chain-0.12.1 \
evmap-7.1.2 \
exitcode-1.1.2 \
failure-0.1.6 \
failure_derive-0.1.6 \
fake-simd-0.1.2 \
fallible-iterator-0.2.0 \
fallible-streaming-iterator-0.1.9 \
filetime-0.2.7 \
fixedbitset-0.1.9 \
flate2-1.0.12 \
fnv-1.0.6 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fs_extra-1.1.0 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.1.29 \
futures-0.3.1 \
futures-channel-0.3.1 \
futures-core-0.3.1 \
futures-cpupool-0.1.8 \
futures-io-0.3.1 \
futures-sink-0.3.1 \
futures-task-0.3.1 \
futures-util-0.3.1 \
futures01-0.1.29 \
generic-array-0.12.3 \
getrandom-0.1.13 \
ghost-0.1.1 \
git2-0.10.1 \
glob-0.2.11 \
glob-0.3.0 \
goauth-0.6.0 \
grok-1.0.1 \
h2-0.1.26 \
hashbrown-0.1.8 \
hdrhistogram-6.3.4 \
headers-0.2.3 \
headers-core-0.1.1 \
heck-0.3.1 \
hermit-abi-0.1.3 \
hex-0.3.2 \
hmac-0.7.1 \
hostname-0.1.5 \
hotmic-0.8.2 \
http-0.1.19 \
http-body-0.1.0 \
http-connection-0.1.0 \
httparse-1.3.4 \
humantime-1.3.0 \
hyper-0.12.35 \
hyper-openssl-0.7.1 \
hyper-tls-0.3.2 \
hyperlocal-0.6.0 \
idna-0.1.5 \
idna-0.2.0 \
indexmap-1.3.0 \
input_buffer-0.2.0 \
inventory-0.1.4 \
inventory-impl-0.1.4 \
iovec-0.1.4 \
ipconfig-0.2.1 \
itertools-0.7.11 \
itertools-0.8.1 \
itoa-0.4.4 \
jemalloc-sys-0.3.2 \
jemallocator-0.3.2 \
jobserver-0.1.17 \
k8s-openapi-0.5.1 \
kernel32-sys-0.2.2 \
kube-0.16.1 \
lazy_static-1.4.0 \
leveldb-sys-2.0.5 \
lexical-core-0.4.6 \
libc-0.2.65 \
libgit2-sys-0.9.1 \
libsqlite3-sys-0.16.0 \
libz-sys-1.0.25 \
linked-hash-map-0.5.2 \
linked_hash_set-0.1.3 \
listenfd-0.3.3 \
lock_api-0.1.5 \
lock_api-0.3.1 \
log-0.3.9 \
log-0.4.8 \
lz4-sys-1.8.3 \
logfmt-0.0.2 \
lru-cache-0.1.2 \
matchers-0.0.1 \
matches-0.1.8 \
maxminddb-0.13.0 \
md5-0.6.1 \
memchr-2.2.1 \
memoffset-0.5.2 \
mime-0.2.6 \
mime-0.3.14 \
mime_guess-1.8.7 \
mime_guess-2.0.1 \
miniz_oxide-0.3.5 \
mio-0.6.19 \
mio-named-pipes-0.1.6 \
mio-uds-0.6.7 \
miow-0.2.1 \
miow-0.3.3 \
multimap-0.4.0 \
multipart-0.16.1 \
native-tls-0.2.3 \
net2-0.2.33 \
new_debug_unreachable-1.0.3 \
nibble_vec-0.0.4 \
nodrop-0.1.14 \
nom-4.2.3 \
nom-5.0.1 \
num-integer-0.1.41 \
num-traits-0.2.8 \
num_cpus-0.2.13 \
num_cpus-1.11.0 \
onig-4.3.3 \
onig_sys-69.1.0 \
opaque-debug-0.2.3 \
openssl-0.10.26 \
openssl-probe-0.1.2 \
openssl-src-111.6.0+1.1.1d \
openssl-sys-0.9.53 \
ordered-float-1.0.2 \
output_vt100-0.1.2 \
owning_ref-0.4.0 \
parking_lot-0.7.1 \
parking_lot-0.9.0 \
parking_lot_core-0.4.0 \
parking_lot_core-0.6.2 \
percent-encoding-1.0.1 \
percent-encoding-2.1.0 \
petgraph-0.4.13 \
phf-0.7.24 \
phf_codegen-0.7.24 \
phf_generator-0.7.24 \
phf_shared-0.7.24 \
pin-project-0.4.6 \
pin-project-internal-0.4.6 \
pin-utils-0.1.0-alpha.4 \
pkg-config-0.3.16 \
ppv-lite86-0.2.6 \
precomputed-hash-0.1.1 \
pretty_assertions-0.6.1 \
proc-macro-hack-0.5.11 \
proc-macro2-0.4.30 \
proc-macro2-1.0.6 \
proptest-0.9.4 \
prost-0.4.0 \
prost-0.5.0 \
prost-build-0.4.0 \
prost-derive-0.4.0 \
prost-derive-0.5.0 \
prost-types-0.4.0 \
prost-types-0.5.0 \
publicsuffix-1.5.3 \
rdkafka-0.22.0 \
rdkafka-sys-1.2.2 \
quanta-0.2.0 \
quick-error-1.2.2 \
quickcheck-0.6.2 \
quote-0.3.15 \
quote-0.6.13 \
quote-1.0.2 \
radix_trie-0.1.5 \
rand-0.4.6 \
rand-0.5.6 \
rand-0.6.5 \
rand-0.7.2 \
rand_chacha-0.1.1 \
rand_chacha-0.2.1 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_os-0.2.2 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rand_xoshiro-0.1.0 \
rand_xoshiro-0.3.1 \
rayon-1.2.0 \
rayon-core-1.6.0 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
redox_users-0.3.1 \
regex-1.3.1 \
regex-automata-0.1.8 \
regex-syntax-0.6.12 \
remove_dir_all-0.5.2 \
reqwest-0.9.22 \
resolv-conf-0.6.2 \
rusoto_cloudwatch-0.41.0 \
rusoto_core-0.41.0 \
rusoto_credential-0.41.1 \
rusoto_firehose-0.41.0 \
rusoto_kinesis-0.41.0 \
rusoto_logs-0.41.0 \
rusoto_s3-0.41.0 \
rusoto_sts-0.41.0 \
rusqlite-0.20.0 \
rust-argon2-0.5.1 \
rustc-demangle-0.1.16 \
rustc_version-0.2.3 \
rusty-fork-0.2.2 \
ryu-1.0.2 \
safemem-0.3.3 \
same-file-1.0.5 \
scan_fmt-0.2.4 \
schannel-0.1.16 \
scoped-tls-1.0.0 \
scopeguard-0.3.3 \
scopeguard-1.0.0 \
seahash-3.0.6 \
security-framework-0.3.1 \
security-framework-sys-0.3.1 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.102 \
serde-value-0.6.0 \
serde_derive-1.0.102 \
serde_json-1.0.41 \
serde_urlencoded-0.5.5 \
serde_urlencoded-0.6.1 \
serde_yaml-0.8.11 \
sha-1-0.8.1 \
sha2-0.8.0 \
shiplift-0.6.0 \
shlex-0.1.1 \
signal-hook-0.1.11 \
signal-hook-registry-1.1.1 \
siphasher-0.2.3 \
slab-0.4.2 \
smallvec-0.6.12 \
smallvec-1.0.0 \
smpl_jwt-0.4.0 \
snafu-0.4.4 \
snafu-derive-0.4.4 \
socket2-0.3.11 \
spin-0.5.2 \
stable_deref_trait-1.1.1 \
static_assertions-0.3.4 \
stream-cancel-0.4.4 \
string-0.2.1 \
string_cache-0.7.5 \
string_cache_codegen-0.4.4 \
string_cache_shared-0.3.0 \
strip-ansi-escapes-0.1.0 \
strsim-0.8.0 \
structopt-0.2.18 \
structopt-derive-0.2.18 \
subtle-1.0.0 \
syn-0.11.11 \
syn-0.14.9 \
syn-0.15.44 \
syn-1.0.7 \
synom-0.11.3 \
synstructure-0.12.1 \
syslog_rfc5424-0.6.1 \
tar-0.4.26 \
tempdir-0.3.7 \
tempfile-3.1.0 \
termcolor-1.0.5 \
textwrap-0.11.0 \
thread_local-0.3.6 \
time-0.1.42 \
tinytemplate-1.0.2 \
tokio-0.1.22 \
tokio-buf-0.1.1 \
tokio-codec-0.1.1 \
tokio-current-thread-0.1.6 \
tokio-executor-0.1.8 \
tokio-fs-0.1.6 \
tokio-io-0.1.12 \
tokio-openssl-0.3.0 \
tokio-process-0.2.4 \
tokio-reactor-0.1.10 \
tokio-retry-0.2.0 \
tokio-signal-0.2.7 \
tokio-sync-0.1.7 \
tokio-tcp-0.1.3 \
tokio-threadpool-0.1.16 \
tokio-timer-0.2.11 \
tokio-tls-0.2.1 \
tokio-udp-0.1.5 \
tokio-uds-0.2.5 \
tokio01-test-0.1.1 \
toml-0.4.10 \
toml-0.5.5 \
tower-0.1.1 \
tower-buffer-0.1.2 \
tower-discover-0.1.0 \
tower-http-util-0.1.0 \
tower-hyper-0.1.1 \
tower-layer-0.1.0 \
tower-limit-0.1.1 \
tower-load-shed-0.1.0 \
tower-retry-0.1.0 \
tower-service-0.2.0 \
tower-test-0.1.0 \
tower-timeout-0.1.1 \
tower-util-0.1.0 \
tracing-0.1.10 \
tracing-attributes-0.1.5 \
tracing-core-0.1.7 \
tracing-futures-0.2.0 \
tracing-log-0.1.1 \
tracing-subscriber-0.1.6 \
trust-dns-0.17.0 \
trust-dns-proto-0.8.0 \
trust-dns-resolver-0.12.0 \
trust-dns-server-0.17.0 \
try-lock-0.2.2 \
try_from-0.3.2 \
tungstenite-0.9.2 \
twoway-0.1.8 \
typenum-1.11.2 \
typetag-0.1.4 \
typetag-impl-0.1.4 \
unicase-1.4.2 \
unicase-2.5.1 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.8 \
unicode-segmentation-1.5.0 \
unicode-width-0.1.6 \
unicode-xid-0.0.4 \
unicode-xid-0.1.0 \
unicode-xid-0.2.0 \
url-1.7.2 \
url-2.1.0 \
urlencoding-1.0.0 \
utf-8-0.7.5 \
utf8-ranges-1.0.4 \
utf8parse-0.1.1 \
uuid-0.6.5 \
uuid-0.7.4 \
vcpkg-0.2.7 \
vec_map-0.8.1 \
version_check-0.1.5 \
vte-0.3.3 \
wait-timeout-0.2.0 \
walkdir-2.2.9 \
want-0.2.0 \
warp-0.1.20 \
wasi-0.7.0 \
which-2.0.1 \
widestring-0.4.0 \
winapi-0.2.8 \
winapi-0.3.8 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.2 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
wincolor-1.0.2 \
winreg-0.6.2 \
winutil-0.1.1 \
ws2_32-sys-0.2.1 \
xattr-0.2.2 \
xml-rs-0.8.0 \
yaml-rust-0.4.3
CARGO_USE_GITHUB= yes
CARGO_GIT_SUBDIR= tracingfutures:tracing-futures:tracing-futures \
tracingfutures:tracing-tower:tracing-tower
post-patch:
${REINPLACE_CMD} -e 's|.probe("lua")|.probe("lua-5.3")|' \
${WRKSRC_rlua}/build.rs
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/vector
.include <bsd.port.mk>
Index: head/sysutils/zoxide/Makefile
===================================================================
--- head/sysutils/zoxide/Makefile (revision 552220)
+++ head/sysutils/zoxide/Makefile (revision 552221)
@@ -1,77 +1,77 @@
# $FreeBSD$
PORTNAME= zoxide
DISTVERSIONPREFIX= v
DISTVERSION= 0.4.1
-PORTREVISION= 5
+PORTREVISION= 6
CATEGORIES= sysutils
MAINTAINER= andoriyu@gmail.com
COMMENT= Fast cd alternative that learns your habits
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= ajeetdsouza
CARGO_CRATES= ansi_term-0.11.0 \
anyhow-1.0.31 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
atty-0.2.14 \
autocfg-1.0.0 \
base64-0.11.0 \
bincode-1.2.1 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
byteorder-1.3.4 \
cfg-if-0.1.10 \
clap-2.33.1 \
constant_time_eq-0.1.5 \
crossbeam-utils-0.7.2 \
dirs-2.0.2 \
dirs-sys-0.3.4 \
dunce-1.0.0 \
float-ord-0.2.0 \
getrandom-0.1.14 \
heck-0.3.1 \
hermit-abi-0.1.13 \
lazy_static-1.4.0 \
libc-0.2.70 \
ppv-lite86-0.2.8 \
proc-macro-error-1.0.2 \
proc-macro-error-attr-1.0.2 \
proc-macro2-1.0.17 \
quote-1.0.6 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
redox_syscall-0.1.56 \
redox_users-0.3.4 \
rust-argon2-0.7.0 \
serde-1.0.110 \
serde_derive-1.0.110 \
strsim-0.8.0 \
structopt-0.3.14 \
structopt-derive-0.4.7 \
syn-1.0.23 \
syn-mid-0.5.0 \
textwrap-0.11.0 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
uuid-0.8.1 \
vec_map-0.8.2 \
version_check-0.9.2 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0
PLIST_FILES= bin/zoxide
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/zoxide
.include <bsd.port.mk>
Index: head/textproc/angle-grinder/Makefile
===================================================================
--- head/textproc/angle-grinder/Makefile (revision 552220)
+++ head/textproc/angle-grinder/Makefile (revision 552221)
@@ -1,278 +1,279 @@
# $FreeBSD$
PORTNAME= angle-grinder
DISTVERSIONPREFIX= v
DISTVERSION= 0.15.0
+PORTREVISION= 1
CATEGORIES= textproc
MAINTAINER= vulcan@wired.sh
COMMENT= Slice and dice logs on the command line
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo ssl
USE_GITHUB= yes
GH_ACCOUNT= rcoh
CARGO_CRATES= addr2line-0.13.0 \
adler-0.2.3 \
aho-corasick-0.7.13 \
annotate-snippets-0.5.0 \
ansi_term-0.11.0 \
assert_cli-0.6.3 \
atty-0.2.14 \
autocfg-1.0.0 \
backtrace-0.3.50 \
base64-0.9.3 \
base64-0.12.3 \
bitflags-1.2.1 \
bstr-0.2.13 \
bumpalo-3.4.0 \
bytecount-0.3.2 \
byteorder-1.3.4 \
bytes-0.4.12 \
bytes-0.5.6 \
cargo-husky-1.5.0 \
cast-0.2.3 \
cc-1.0.58 \
cfg-if-0.1.10 \
chrono-0.4.13 \
chrono-tz-0.5.2 \
clap-2.33.2 \
clap-verbosity-flag-0.2.0 \
colored-1.9.3 \
console-0.11.3 \
criterion-0.3.3 \
criterion-plot-0.4.3 \
crossbeam-channel-0.3.9 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-queue-0.2.3 \
crossbeam-utils-0.6.6 \
crossbeam-utils-0.7.2 \
csv-1.1.3 \
csv-core-0.1.10 \
difference-2.0.0 \
dtoa-0.4.6 \
dtparse-1.1.0 \
either-1.5.3 \
encode_unicode-0.3.6 \
encoding_rs-0.8.23 \
env_logger-0.5.13 \
environment-0.1.1 \
exitfailure-0.5.1 \
failure-0.1.8 \
failure_derive-0.1.8 \
fnv-1.0.7 \
fs_extra-1.1.0 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-channel-0.3.5 \
futures-core-0.3.5 \
futures-io-0.3.5 \
futures-macro-0.3.5 \
futures-sink-0.3.5 \
futures-task-0.3.5 \
futures-util-0.3.5 \
getopts-0.2.21 \
getrandom-0.1.14 \
gimli-0.22.0 \
glob-0.2.11 \
glob-0.3.0 \
globset-0.4.5 \
globwalk-0.3.1 \
h2-0.2.6 \
half-1.6.0 \
hashbrown-0.8.2 \
heck-0.3.1 \
hermit-abi-0.1.15 \
http-0.2.1 \
http-body-0.3.1 \
httparse-1.3.4 \
human-panic-1.0.3 \
humantime-1.3.0 \
hyper-0.13.7 \
hyper-old-types-0.11.0 \
hyper-rustls-0.21.0 \
idna-0.2.0 \
ignore-0.4.16 \
im-13.0.0 \
include_dir-0.2.1 \
include_dir_impl-0.2.1 \
indexmap-1.5.1 \
indicatif-0.13.0 \
iovec-0.1.4 \
ipnet-2.3.0 \
itertools-0.8.2 \
itertools-0.9.0 \
itoa-0.4.6 \
jemalloc-sys-0.3.2 \
jemallocator-0.3.2 \
js-sys-0.3.44 \
kernel32-sys-0.2.2 \
language-tags-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.74 \
log-0.4.11 \
logfmt-0.0.2 \
maplit-1.0.2 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memoffset-0.5.5 \
mime-0.3.16 \
mime_guess-2.0.3 \
miniz_oxide-0.4.0 \
mio-0.6.22 \
miow-0.2.1 \
net2-0.2.34 \
nom-4.2.3 \
nom_locate-0.3.1 \
num-0.2.1 \
num-bigint-0.2.6 \
num-complex-0.2.4 \
num-derive-0.2.5 \
num-integer-0.1.43 \
num-iter-0.1.41 \
num-rational-0.2.4 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
number_prefix-0.3.0 \
object-0.20.0 \
once_cell-1.4.0 \
oorandom-11.1.2 \
ordered-float-2.0.0 \
os_type-2.2.0 \
parse-zoneinfo-0.3.0 \
percent-encoding-1.0.1 \
percent-encoding-2.1.0 \
pin-project-0.4.23 \
pin-project-internal-0.4.23 \
pin-project-lite-0.1.7 \
pin-utils-0.1.0 \
plotters-0.2.15 \
ppv-lite86-0.2.8 \
proc-macro-hack-0.4.2 \
proc-macro-hack-0.5.18 \
proc-macro-hack-impl-0.4.2 \
proc-macro-nested-0.1.6 \
proc-macro2-0.4.30 \
proc-macro2-1.0.19 \
pulldown-cmark-0.2.0 \
quantiles-0.7.1 \
quick-error-1.2.3 \
quick-xml-0.17.2 \
quicli-0.4.0 \
quote-0.6.13 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rayon-1.3.1 \
rayon-core-1.7.1 \
redox_syscall-0.1.57 \
regex-1.3.9 \
regex-automata-0.1.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.3 \
reqwest-0.10.7 \
ring-0.16.15 \
rust_decimal-0.10.2 \
rustc-demangle-0.1.16 \
rustc_version-0.2.3 \
rustls-0.18.0 \
ryu-1.0.5 \
safemem-0.3.3 \
same-file-1.0.6 \
scopeguard-1.1.0 \
sct-0.6.0 \
self_update-0.19.0 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.114 \
serde_cbor-0.11.1 \
serde_derive-1.0.114 \
serde_json-1.0.57 \
serde_urlencoded-0.6.1 \
sized-chunks-0.3.1 \
slab-0.4.2 \
socket2-0.3.12 \
spin-0.5.2 \
strfmt-0.1.6 \
strsim-0.8.0 \
structopt-0.2.18 \
structopt-derive-0.2.18 \
syn-0.14.9 \
syn-0.15.44 \
syn-1.0.38 \
synstructure-0.12.4 \
tempfile-3.1.0 \
termcolor-1.1.0 \
terminal_size-0.1.13 \
termios-0.3.2 \
test-generator-0.3.0 \
textwrap-0.11.0 \
thread_local-1.0.1 \
time-0.1.43 \
tinytemplate-1.1.0 \
tinyvec-0.3.3 \
tokio-0.2.22 \
tokio-rustls-0.14.0 \
tokio-util-0.3.1 \
toml-0.4.10 \
toml-0.5.6 \
tower-service-0.3.0 \
tracing-0.1.18 \
tracing-core-0.1.13 \
try-lock-0.2.3 \
typenum-1.12.0 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.1.0 \
unicode-xid-0.2.1 \
untrusted-0.7.1 \
url-2.1.1 \
uuid-0.8.1 \
vec_map-0.8.2 \
version_check-0.1.5 \
version_check-0.9.2 \
walkdir-2.3.1 \
want-0.3.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.67 \
wasm-bindgen-backend-0.2.67 \
wasm-bindgen-futures-0.4.17 \
wasm-bindgen-macro-0.2.67 \
wasm-bindgen-macro-support-0.2.67 \
wasm-bindgen-shared-0.2.67 \
web-sys-0.3.44 \
webpki-0.21.3 \
webpki-roots-0.19.0 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.7.0 \
ws2_32-sys-0.2.1
CARGO_FEATURES= --no-default-features
PLIST_FILES= bin/agrind
PORTDOCS= README.md
OPTIONS_DEFINE= DOCS
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/agrind
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_DATA} ${WRKSRC}/${PORTDOCS} ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/textproc/bat/Makefile
===================================================================
--- head/textproc/bat/Makefile (revision 552220)
+++ head/textproc/bat/Makefile (revision 552221)
@@ -1,177 +1,177 @@
# $FreeBSD$
PORTNAME= bat
DISTVERSIONPREFIX= v
DISTVERSION= 0.16.0
-PORTREVISION= 0
+PORTREVISION= 1
CATEGORIES= textproc
MAINTAINER= pizzamig@FreeBSD.org
COMMENT= Clone of cat with syntax highlighting
LICENSE= APACHE20
BUILD_DEPENDS= ${LOCALBASE}/llvm${LLVM_DEFAULT}/lib/libclang.so:devel/llvm${LLVM_DEFAULT}
LIB_DEPENDS= libgit2.so:devel/libgit2 \
libonig.so:devel/oniguruma
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= sharkdp
PLIST_FILES= bin/bat \
man/man1/bat.1.gz
CARGO_CRATES= adler-0.2.3 \
aho-corasick-0.7.13 \
ansi_colours-1.0.1 \
ansi_term-0.11.0 \
ansi_term-0.12.1 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
assert_cmd-1.0.1 \
atty-0.2.14 \
autocfg-1.0.1 \
base64-0.12.3 \
bincode-1.3.1 \
bit-set-0.5.2 \
bit-vec-0.6.2 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
block-buffer-0.7.3 \
block-padding-0.1.5 \
bstr-0.2.13 \
byte-tools-0.3.1 \
byteorder-1.3.4 \
cc-1.0.60 \
cfg-if-0.1.10 \
chrono-0.4.19 \
clap-2.33.3 \
console-0.12.0 \
constant_time_eq-0.1.5 \
content_inspector-0.2.4 \
crc32fast-1.2.0 \
crossbeam-utils-0.7.2 \
difference-2.0.0 \
digest-0.8.1 \
dirs-3.0.1 \
dirs-sys-0.3.5 \
doc-comment-0.3.3 \
dtoa-0.4.6 \
encode_unicode-0.3.6 \
encoding-0.2.33 \
encoding-index-japanese-1.20141219.5 \
encoding-index-korean-1.20141219.5 \
encoding-index-simpchinese-1.20141219.5 \
encoding-index-singlebyte-1.20141219.5 \
encoding-index-tradchinese-1.20141219.5 \
encoding_index_tests-0.1.4 \
error-chain-0.12.4 \
fake-simd-0.1.2 \
fancy-regex-0.3.5 \
flate2-1.0.18 \
float-cmp-0.8.0 \
fnv-1.0.7 \
fuchsia-cprng-0.1.1 \
generic-array-0.12.3 \
getrandom-0.1.15 \
git2-0.13.11 \
glob-0.3.0 \
globset-0.4.5 \
hashbrown-0.9.1 \
hermit-abi-0.1.16 \
idna-0.2.0 \
indexmap-1.6.0 \
itoa-0.4.6 \
jobserver-0.1.21 \
lazy_static-1.4.0 \
lazycell-1.3.0 \
libc-0.2.78 \
libgit2-sys-0.12.13+1.0.1 \
libz-sys-1.1.2 \
line-wrap-0.1.1 \
linked-hash-map-0.5.3 \
log-0.4.11 \
maplit-1.0.2 \
matches-0.1.8 \
memchr-2.3.3 \
miniz_oxide-0.4.2 \
normalize-line-endings-0.3.0 \
num-integer-0.1.43 \
num-traits-0.2.12 \
onig-6.1.0 \
onig_sys-69.5.1 \
opaque-debug-0.2.3 \
path_abs-0.5.0 \
percent-encoding-2.1.0 \
pest-2.1.3 \
pest_derive-2.1.0 \
pest_generator-2.1.3 \
pest_meta-2.1.3 \
pkg-config-0.3.18 \
plist-1.0.0 \
predicates-1.0.5 \
predicates-core-1.0.0 \
predicates-tree-1.0.0 \
proc-macro2-1.0.24 \
quote-1.0.7 \
rand-0.4.6 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rdrand-0.4.0 \
redox_syscall-0.1.57 \
redox_users-0.3.5 \
regex-1.3.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.3 \
rust-argon2-0.8.2 \
ryu-1.0.5 \
safemem-0.3.3 \
same-file-1.0.6 \
semver-0.11.0 \
semver-parser-0.10.0 \
serde-1.0.116 \
serde_derive-1.0.116 \
serde_json-1.0.58 \
serde_yaml-0.8.13 \
sha-1-0.8.2 \
shell-words-1.0.0 \
std_prelude-0.2.12 \
strsim-0.8.0 \
syn-1.0.42 \
syntect-4.4.0 \
tempdir-0.3.7 \
term_size-0.3.2 \
terminal_size-0.1.13 \
termios-0.3.2 \
textwrap-0.11.0 \
thread_local-1.0.1 \
tinyvec-0.3.4 \
treeline-0.1.0 \
typenum-1.12.0 \
ucd-trie-0.1.3 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
url-2.1.1 \
vcpkg-0.2.10 \
vec_map-0.8.2 \
version_check-0.9.2 \
wait-timeout-0.2.0 \
walkdir-2.3.1 \
wasi-0.9.0+wasi-snapshot-preview1 \
wild-2.0.4 \
winapi-0.3.9 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
xml-rs-0.8.3 \
yaml-rust-0.4.4
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/bat
find ${WRKDIR}/target -name "bat.1" -exec cp {} ${WRKSRC}/doc/bat.1 \;
${INSTALL_MAN} ${WRKSRC}/doc/bat.1 ${STAGEDIR}${MAN1PREFIX}/man/man1
.include <bsd.port.mk>
Index: head/textproc/mdbook/Makefile
===================================================================
--- head/textproc/mdbook/Makefile (revision 552220)
+++ head/textproc/mdbook/Makefile (revision 552221)
@@ -1,204 +1,205 @@
# $FreeBSD$
PORTNAME= mdbook
DISTVERSIONPREFIX= v
DISTVERSION= 0.4.3
+PORTREVISION= 1
CATEGORIES= textproc
MAINTAINER= 0mp@FreeBSD.org
COMMENT= Create book from markdown files (like Gitbook but implemented in Rust)
LICENSE= MPL20
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= rust-lang
GH_PROJECT= mdBook
PLIST_FILES= bin/mdbook
CARGO_CRATES= aho-corasick-0.7.10 \
ammonia-3.1.0 \
ansi_term-0.11.0 \
anyhow-1.0.28 \
atty-0.2.14 \
autocfg-1.0.0 \
base64-0.11.0 \
base64-0.12.0 \
bit-set-0.5.1 \
bit-vec-0.5.1 \
bitflags-1.2.1 \
block-buffer-0.7.3 \
block-padding-0.1.5 \
byte-tools-0.3.1 \
byteorder-1.3.4 \
bytes-0.5.4 \
cfg-if-0.1.10 \
chrono-0.4.11 \
clap-2.33.0 \
ctor-0.1.13 \
difference-2.0.0 \
digest-0.8.1 \
dtoa-0.4.5 \
elasticlunr-rs-2.3.8 \
env_logger-0.7.1 \
fake-simd-0.1.2 \
filetime-0.2.9 \
fnv-1.0.6 \
fsevent-0.4.0 \
fsevent-sys-2.0.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futf-0.1.4 \
futures-0.3.4 \
futures-channel-0.3.4 \
futures-core-0.3.4 \
futures-executor-0.3.4 \
futures-io-0.3.4 \
futures-macro-0.3.4 \
futures-sink-0.3.4 \
futures-task-0.3.4 \
futures-util-0.3.4 \
generic-array-0.12.3 \
getopts-0.2.21 \
getrandom-0.1.14 \
gitignore-1.0.6 \
glob-0.2.11 \
h2-0.2.4 \
handlebars-3.0.1 \
headers-0.3.2 \
headers-core-0.2.0 \
heck-0.3.1 \
hermit-abi-0.1.11 \
html5ever-0.25.1 \
http-0.2.1 \
http-body-0.3.1 \
httparse-1.3.4 \
humantime-1.3.0 \
hyper-0.13.5 \
idna-0.2.0 \
indexmap-1.3.2 \
inotify-0.7.0 \
inotify-sys-0.1.3 \
input_buffer-0.3.1 \
iovec-0.1.4 \
itoa-0.4.5 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
lazycell-1.2.1 \
libc-0.2.69 \
log-0.4.8 \
mac-0.1.1 \
maplit-1.0.2 \
markup5ever-0.10.0 \
markup5ever_rcdom-0.1.0 \
matches-0.1.8 \
memchr-2.3.3 \
mime-0.3.16 \
mime_guess-2.0.3 \
mio-0.6.21 \
mio-extras-2.0.6 \
miow-0.2.1 \
net2-0.2.33 \
new_debug_unreachable-1.0.4 \
notify-4.0.15 \
num-integer-0.1.42 \
num-traits-0.2.11 \
opaque-debug-0.2.3 \
open-1.4.0 \
output_vt100-0.1.2 \
percent-encoding-2.1.0 \
pest-2.1.3 \
pest_derive-2.1.0 \
pest_generator-2.1.3 \
pest_meta-2.1.3 \
phf-0.8.0 \
phf_codegen-0.8.0 \
phf_generator-0.8.0 \
phf_shared-0.8.0 \
pin-project-0.4.9 \
pin-project-internal-0.4.9 \
pin-project-lite-0.1.4 \
pin-utils-0.1.0-alpha.4 \
ppv-lite86-0.2.6 \
precomputed-hash-0.1.1 \
pretty_assertions-0.6.1 \
proc-macro-hack-0.5.15 \
proc-macro-nested-0.1.4 \
proc-macro2-1.0.10 \
pulldown-cmark-0.7.0 \
quick-error-1.2.3 \
quote-1.0.3 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rand_pcg-0.2.1 \
redox_syscall-0.1.56 \
regex-1.3.7 \
regex-syntax-0.6.17 \
remove_dir_all-0.5.2 \
ryu-1.0.3 \
same-file-1.0.6 \
scoped-tls-1.0.0 \
select-0.5.0 \
serde-1.0.106 \
serde_derive-1.0.106 \
serde_json-1.0.52 \
serde_urlencoded-0.6.1 \
sha-1-0.8.2 \
shlex-0.1.1 \
siphasher-0.3.2 \
slab-0.4.2 \
smallvec-1.3.0 \
string_cache-0.8.0 \
string_cache_codegen-0.5.1 \
strsim-0.8.0 \
strum-0.16.0 \
strum_macros-0.16.0 \
syn-1.0.17 \
tempfile-3.1.0 \
tendril-0.4.1 \
termcolor-1.1.0 \
textwrap-0.11.0 \
thread_local-1.0.1 \
time-0.1.43 \
tokio-0.2.18 \
tokio-macros-0.2.5 \
tokio-tungstenite-0.10.1 \
tokio-util-0.3.1 \
toml-0.5.6 \
tower-service-0.3.0 \
try-lock-0.2.2 \
tungstenite-0.10.1 \
typenum-1.12.0 \
ucd-trie-0.1.3 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.12 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
url-2.1.1 \
urlencoding-1.0.0 \
utf-8-0.7.5 \
vec_map-0.8.1 \
version_check-0.9.1 \
walkdir-2.3.1 \
want-0.3.0 \
warp-0.2.2 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.2.8 \
winapi-0.3.8 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
ws2_32-sys-0.2.1 \
xml5ever-0.16.1
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/mdbook
.include <bsd.port.mk>
Index: head/textproc/ripgrep/Makefile
===================================================================
--- head/textproc/ripgrep/Makefile (revision 552220)
+++ head/textproc/ripgrep/Makefile (revision 552221)
@@ -1,128 +1,128 @@
# Created by: Petteri Valkonen <petteri.valkonen@iki.fi>
# $FreeBSD$
PORTNAME= ripgrep
DISTVERSION= 12.1.1
-PORTREVISION= 5
+PORTREVISION= 6
CATEGORIES= textproc
MAINTAINER= petteri.valkonen@iki.fi
COMMENT= Command line search tool
LICENSE= MIT UNLICENSE
LICENSE_COMB= dual
LICENSE_FILE_MIT= ${WRKSRC}/LICENSE-MIT
LICENSE_FILE_UNLICENSE= ${WRKSRC}/UNLICENSE
BUILD_DEPENDS= asciidoctor:textproc/rubygem-asciidoctor
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= BurntSushi
CARGO_CRATES= aho-corasick-0.7.10 \
atty-0.2.14 \
autocfg-1.0.0 \
base64-0.12.1 \
bitflags-1.2.1 \
bstr-0.2.13 \
bytecount-0.6.0 \
byteorder-1.3.4 \
cc-1.0.54 \
cfg-if-0.1.10 \
clap-2.33.1 \
crossbeam-channel-0.4.2 \
crossbeam-utils-0.7.2 \
encoding_rs-0.8.23 \
encoding_rs_io-0.1.7 \
fnv-1.0.7 \
fs_extra-1.1.0 \
glob-0.3.0 \
hermit-abi-0.1.13 \
itoa-0.4.5 \
jemalloc-sys-0.3.2 \
jemallocator-0.3.2 \
lazy_static-1.4.0 \
libc-0.2.71 \
log-0.4.8 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memmap-0.7.0 \
num_cpus-1.13.0 \
packed_simd-0.3.3 \
pcre2-0.2.3 \
pcre2-sys-0.2.2 \
pkg-config-0.3.17 \
proc-macro2-1.0.17 \
quote-1.0.6 \
regex-1.3.9 \
regex-automata-0.1.9 \
regex-syntax-0.6.18 \
ryu-1.0.4 \
same-file-1.0.6 \
serde-1.0.110 \
serde_derive-1.0.110 \
serde_json-1.0.53 \
strsim-0.8.0 \
syn-1.0.27 \
termcolor-1.1.0 \
textwrap-0.11.0 \
thread_local-1.0.1 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
walkdir-2.3.1 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0
RIPGREP_OUTDIR= ${WRKDIR}/cargo-out
CARGO_ENV= RIPGREP_OUTDIR=${RIPGREP_OUTDIR}
PLIST_FILES= bin/rg \
etc/bash_completion.d/rg.bash \
man/man1/rg.1.gz \
share/fish/vendor_completions.d/rg.fish \
share/zsh/site-functions/_rg
PORTDOCS= CHANGELOG.md FAQ.md GUIDE.md
OPTIONS_DEFINE= DOCS PCRE2 SIMD
OPTIONS_DEFAULT=PCRE2
OPTIONS_DEFAULT_aarch64=SIMD
OPTIONS_DEFAULT_amd64= ${OPTIONS_DEFAULT_i386}
OPTIONS_DEFAULT_i386= ${MACHINE_CPU:tu:S/SSE2/SIMD/}
PCRE2_DESC= ${PCRE_DESC} version 2
PCRE2_LIB_DEPENDS= libpcre2-8.so:devel/pcre2
PCRE2_VARS= CARGO_FEATURES+=pcre2
# simd crate uses cfg_target_feature which isn't stable yet, so unlock
# unstable features similar to how lang/rust bootstraps. www/firefox
# uses the same hack when building with --enable-rust-simd.
SIMD_MAKE_ENV= RUSTC_BOOTSTRAP=1
SIMD_VARS= CARGO_FEATURES+=simd-accel ${SIMD_VARS_${ARCH}}
SIMD_VARS_i386= RUSTFLAGS+="-C target-feature=+sse2"
post-patch:
@${REINPLACE_CMD} -e 's|OUT_DIR|RIPGREP_OUTDIR|' ${WRKSRC}/build.rs
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/rg
${INSTALL_MAN} ${RIPGREP_OUTDIR}/rg.1 ${STAGEDIR}${MAN1PREFIX}/man/man1
@${MKDIR} ${STAGEDIR}${PREFIX}/etc/bash_completion.d/
${INSTALL_DATA} ${RIPGREP_OUTDIR}/rg.bash \
${STAGEDIR}${PREFIX}/etc/bash_completion.d/
@${MKDIR} ${STAGEDIR}${PREFIX}/share/fish/vendor_completions.d/
${INSTALL_DATA} ${RIPGREP_OUTDIR}/rg.fish \
${STAGEDIR}${PREFIX}/share/fish/vendor_completions.d/
@${MKDIR} ${STAGEDIR}${PREFIX}/share/zsh/site-functions/
${INSTALL_DATA} ${WRKSRC}/complete/_rg \
${STAGEDIR}${PREFIX}/share/zsh/site-functions/
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
cd ${WRKSRC} && ${INSTALL_DATA} ${PORTDOCS} ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/textproc/sd/Makefile
===================================================================
--- head/textproc/sd/Makefile (revision 552220)
+++ head/textproc/sd/Makefile (revision 552221)
@@ -1,121 +1,121 @@
# $FreeBSD$
PORTNAME= sd
DISTVERSIONPREFIX= v
DISTVERSION= 0.7.6
-PORTREVISION= 2
+PORTREVISION= 3
CATEGORIES= textproc
MAINTAINER= ports@FreeBSD.org
COMMENT= Intuitive find and replace tool
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= chmln
CARGO_CRATES= aho-corasick-0.7.10 \
ansi_term-0.11.0 \
anyhow-1.0.32 \
assert_cmd-1.0.1 \
atty-0.2.14 \
autocfg-1.0.0 \
bitflags-1.2.1 \
cfg-if-0.1.10 \
clap-2.33.0 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-queue-0.2.1 \
crossbeam-utils-0.7.2 \
difference-2.0.0 \
doc-comment-0.3.3 \
either-1.5.3 \
getrandom-0.1.14 \
heck-0.3.1 \
hermit-abi-0.1.11 \
lazy_static-1.4.0 \
libc-0.2.69 \
man-0.3.0 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memmap-0.7.0 \
memoffset-0.5.4 \
num_cpus-1.13.0 \
ppv-lite86-0.2.6 \
predicates-1.0.4 \
predicates-core-1.0.0 \
predicates-tree-1.0.0 \
proc-macro-error-1.0.2 \
proc-macro-error-attr-1.0.2 \
proc-macro2-1.0.10 \
quote-1.0.3 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rayon-1.3.1 \
rayon-core-1.7.1 \
redox_syscall-0.1.56 \
regex-1.3.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.2 \
roff-0.1.0 \
scopeguard-1.1.0 \
strsim-0.8.0 \
structopt-0.3.15 \
structopt-derive-0.4.8 \
syn-1.0.17 \
syn-mid-0.5.0 \
tempfile-3.1.0 \
textwrap-0.11.0 \
thiserror-1.0.20 \
thiserror-impl-1.0.20 \
thread_local-1.0.1 \
treeline-0.1.0 \
unescape-0.1.0 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
vec_map-0.8.1 \
version_check-0.9.1 \
wait-timeout-0.2.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0
_SD_OUT_DIR= ${WRKDIR}/cargo-out
MAKE_ENV= SD_OUT_DIR=${_SD_OUT_DIR}
PLIST_FILES= bin/sd \
etc/bash_completion.d/sd.bash \
man/man1/sd.1.gz \
share/fish/completions/sd.fish \
share/zsh/site-functions/_sd
PORTDOCS= README.md
OPTIONS_DEFINE= DOCS
post-patch:
@${REINPLACE_CMD} 's,"OUT_DIR","SD_OUT_DIR",g' ${WRKSRC}/build.rs
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/sd
${INSTALL_MAN} ${_SD_OUT_DIR}/sd.1 \
${STAGEDIR}${PREFIX}/man/man1
@${MKDIR} ${STAGEDIR}${PREFIX}/etc/bash_completion.d
${INSTALL_DATA} ${_SD_OUT_DIR}/sd.bash \
${STAGEDIR}${PREFIX}/etc/bash_completion.d
@${MKDIR} ${STAGEDIR}${PREFIX}/share/fish/completions
${INSTALL_DATA} ${_SD_OUT_DIR}/sd.fish \
${STAGEDIR}${PREFIX}/share/fish/completions
@${MKDIR} ${STAGEDIR}${PREFIX}/share/zsh/site-functions
${INSTALL_DATA} ${_SD_OUT_DIR}/_sd \
${STAGEDIR}${PREFIX}/share/zsh/site-functions
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_MAN} ${WRKSRC}/README.md ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/textproc/sonic/Makefile
===================================================================
--- head/textproc/sonic/Makefile (revision 552220)
+++ head/textproc/sonic/Makefile (revision 552221)
@@ -1,118 +1,118 @@
# $FreeBSD$
PORTNAME= sonic
DISTVERSIONPREFIX= v
DISTVERSION= 1.3.0
-PORTREVISION= 4
+PORTREVISION= 5
CATEGORIES= textproc
MAINTAINER= ports@FreeBSD.org
COMMENT= Fast, lightweight, and schema-less search backend
LICENSE= MPL20
LICENSE_FILE= ${WRKSRC}/LICENSE.md
BUILD_DEPENDS= llvm${LLVM_DEFAULT}>0:devel/llvm${LLVM_DEFAULT}
USES= cargo compiler:c++11-lang
USE_GITHUB= yes
GH_ACCOUNT= valeriansaliou
USE_RC_SUBR= sonic
CARGO_CRATES= ahash-0.3.8 \
aho-corasick-0.7.13 \
ansi_term-0.11.0 \
atty-0.2.14 \
autocfg-1.0.0 \
bindgen-0.53.3 \
bitflags-1.2.1 \
byteorder-1.3.4 \
cc-1.0.55 \
cexpr-0.4.0 \
cfg-if-0.1.10 \
clang-sys-0.29.3 \
clap-2.33.1 \
env_logger-0.7.1 \
fs_extra-1.1.0 \
fst-0.3.5 \
fst-levenshtein-0.2.1 \
fst-regex-0.2.2 \
getrandom-0.1.14 \
glob-0.3.0 \
hashbrown-0.7.2 \
hashbrown-0.8.0 \
hermit-abi-0.1.14 \
humantime-1.3.0 \
jemalloc-sys-0.3.2 \
jemallocator-0.3.2 \
jobserver-0.1.21 \
lazy_static-1.4.0 \
lazycell-1.2.1 \
libc-0.2.71 \
libloading-0.5.2 \
librocksdb-sys-6.7.4 \
linked-hash-map-0.5.3 \
linked_hash_set-0.1.3 \
log-0.4.8 \
memchr-2.3.3 \
memmap-0.6.2 \
nix-0.17.0 \
nom-5.1.2 \
peeking_take_while-0.1.2 \
ppv-lite86-0.2.8 \
proc-macro2-1.0.18 \
quick-error-1.2.3 \
quote-1.0.7 \
radix-0.6.0 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
regex-1.3.9 \
regex-syntax-0.3.9 \
regex-syntax-0.6.18 \
rocksdb-0.14.0 \
rustc-hash-1.1.0 \
serde-1.0.114 \
serde_derive-1.0.114 \
shlex-0.1.1 \
strsim-0.8.0 \
syn-1.0.33 \
termcolor-1.1.0 \
textwrap-0.11.0 \
thread_local-1.0.1 \
toml-0.5.6 \
twox-hash-1.5.0 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.7 \
unicode-xid-0.2.1 \
utf8-ranges-1.0.4 \
vec_map-0.8.2 \
version_check-0.9.2 \
void-1.0.2 \
wasi-0.9.0+wasi-snapshot-preview1 \
whatlang-0.9.0 \
which-3.1.1 \
winapi-0.3.9 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0
USERS= sonic
GROUPS= sonic
PLIST_FILES= bin/sonic \
"@sample ${ETCDIR}/config.cfg.sample"
PORTDOCS= CONFIGURATION.md PROTOCOL.md README.md
OPTIONS_DEFINE= DOCS
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/sonic
@${MKDIR} ${STAGEDIR}${ETCDIR}
${INSTALL_DATA} ${WRKSRC}/config.cfg ${STAGEDIR}${ETCDIR}/config.cfg.sample
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
cd ${WRKSRC} && ${INSTALL_MAN} ${PORTDOCS} ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/textproc/xsv-rs/Makefile
===================================================================
--- head/textproc/xsv-rs/Makefile (revision 552220)
+++ head/textproc/xsv-rs/Makefile (revision 552221)
@@ -1,77 +1,77 @@
# $FreeBSD$
PORTNAME= xsv
DISTVERSION= 0.13.0
-PORTREVISION= 24
+PORTREVISION= 25
CATEGORIES= textproc
PKGNAMESUFFIX= -rs
MAINTAINER= ports@FreeBSD.org
COMMENT= Fast CSV toolkit
LICENSE= MIT UNLICENSE
LICENSE_COMB= dual
LICENSE_FILE_MIT= ${WRKSRC}/LICENSE-MIT
LICENSE_FILE_UNLICENSE= ${WRKSRC}/UNLICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= BurntSushi
CARGO_CRATES= aho-corasick-0.6.4 \
bitflags-1.0.3 \
byteorder-1.2.2 \
cfg-if-0.1.3 \
chan-0.1.21 \
csv-1.0.0 \
csv-core-0.1.4 \
csv-index-0.1.5 \
docopt-1.0.0 \
filetime-0.1.15 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
lazy_static-1.0.0 \
libc-0.2.49 \
log-0.4.1 \
memchr-2.0.1 \
num-traits-0.2.4 \
num_cpus-1.8.0 \
proc-macro2-0.3.8 \
quickcheck-0.6.2 \
quote-0.5.2 \
rand-0.3.22 \
rand-0.4.2 \
redox_syscall-0.1.37 \
regex-1.0.0 \
regex-syntax-0.6.0 \
serde-1.0.54 \
serde_derive-1.0.54 \
streaming-stats-0.2.0 \
strsim-0.7.0 \
syn-0.13.9 \
tabwriter-1.0.4 \
thread_local-0.3.5 \
threadpool-1.7.1 \
ucd-util-0.1.1 \
unicode-width-0.1.4 \
unicode-xid-0.1.0 \
unreachable-1.0.0 \
utf8-ranges-1.0.0 \
void-1.0.2 \
winapi-0.3.4 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0
PLIST_FILES= bin/xsv
PORTDOCS= README.md
OPTIONS_DEFINE= DOCS
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/xsv
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_DATA} ${WRKSRC}/README.md ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/www/cliqz/Makefile
===================================================================
--- head/www/cliqz/Makefile (revision 552220)
+++ head/www/cliqz/Makefile (revision 552221)
@@ -1,191 +1,191 @@
# $FreeBSD$
PORTNAME= cliqz
DISTVERSION= 1.38.0
-PORTREVISION= 3
+PORTREVISION= 4
CATEGORIES= www
MASTER_SITES= https://s3.amazonaws.com/cdn.cliqz.com/browser-f/APT/:amazon \
http://repository.cliqz.com/dist/${CLIQZ_CHANNEL}/${DISTVERSION}/${CLIQZ_LAST_BUILD_ID}/:cliqz
DISTFILES= adult-domains.bin:amazon \
cliqz@cliqz.com.xpi:cliqz \
dat@cliqz.com.xpi:cliqz \
gdprtool@cliqz.com.xpi:cliqz \
https-everywhere@cliqz.com.xpi:cliqz
DIST_SUBDIR= ${PORTNAME}
EXTRACT_ONLY= ${DISTNAME}${_GITHUB_EXTRACT_SUFX}
MAINTAINER= fox@FreeBSD.org
COMMENT= Secure browser (Mozilla based) with built-in quick search
LICENSE= MPL20
LICENSE_FILE= ${WRKSRC}/LICENSE
DEPRECATED= Uses deprecated version of python
EXPIRATION_DATE= 2020-12-31
FETCH_DEPENDS= ca_root_nss>=0:security/ca_root_nss
LIB_DEPENDS= libdbus-1.so:devel/dbus \
libdbus-glib-1.so:devel/dbus-glib \
libfontconfig.so:x11-fonts/fontconfig \
libfreetype.so:print/freetype2 \
libgraphite2.so:graphics/graphite2 \
libharfbuzz.so:print/harfbuzz \
libnspr4.so:devel/nspr \
libnssutil3.so:security/nss \
libplc4.so:devel/nspr \
libplds4.so:devel/nspr \
libvpx.so:multimedia/libvpx
BUILD_DEPENDS= ${LOCALBASE}/libdata/pkgconfig/xt.pc:x11-toolkits/libXt \
${PYTHON_PKGNAMEPREFIX}sqlite3>0:databases/py-sqlite3@${PY_FLAVOR} \
py${PYTHON3_DEFAULT:S/.//}-sqlite3>0:databases/py-sqlite3@py${PYTHON3_DEFAULT:S/.//} \
${RUST_DEFAULT}>=1.41:lang/${RUST_DEFAULT} \
autoconf-2.13:devel/autoconf213 \
bash:shells/bash \
graphite2>=1.3.14:graphics/graphite2 \
harfbuzz>=2.6.8:print/harfbuzz \
icu>=67.1,1:devel/icu \
libevent>=2.1.8:devel/libevent \
libvorbis>=1.3.6,3:audio/libvorbis \
libvpx>=1.8.2:multimedia/libvpx \
${LOCALBASE}/bin/python${PYTHON3_DEFAULT}:lang/python${PYTHON3_DEFAULT:S/.//g} \
llvm${LLVM_DEFAULT}>=0:devel/llvm${LLVM_DEFAULT} \
nasm:devel/nasm \
node:www/node \
nspr>=4.25:devel/nspr \
nss>=3.54:security/nss \
png>=1.6.35:graphics/png \
rust-cbindgen>=0.14.1:devel/rust-cbindgen \
sqlite3>=3.30.1:databases/sqlite3 \
v4l_compat>0:multimedia/v4l_compat \
yasm:devel/yasm \
zip:archivers/zip
USES= compiler:c++17-lang desktop-file-utils gmake gnome pkgconfig \
python:2.7,build shebangfix tar:xz xorg
USE_GNOME= cairo gdkpixbuf2 gtk20 gtk30
USE_XORG= x11 xcb xcomposite xcursor xdamage xext xfixes xi xrender xt
USE_GITHUB= yes
GH_ACCOUNT= cliqz-oss
GH_PROJECT= browser-f
USE_LDCONFIG= yes
bash_OLD_CMD= "/bin/bash"
bash_CMD= ${LOCALBASE}/bin/bash
SHEBANG_FILES= magic_build_and_package.sh
CLIQZ_CHANNEL= release
# If the DISTVERSION is updated, make sure to update the last build id from
# fetch -qo - https://repository.cliqz.com/dist/${CLIQZ_CHANNEL}/${DISTVERSION}/lastbuildid
CLIQZ_LAST_BUILD_ID= 20200721193739
CLIQZ_ICON= ${PORTNAME}.png
CLIQZ_ICON_SRC= ${WRKSRC}/mozilla-release/browser/branding/${PORTNAME}/default48.png
MOZ_DESKTOP= ${WRKSRC}/mozilla-release/toolkit/mozapps/installer/linux/rpm/mozilla.desktop
CLIQZ_DESKTOP= ${WRKSRC}/mozilla-release/toolkit/mozapps/installer/linux/rpm/cliqz.desktop
MAKE_ENV+= CQZ_BUILD_ID=${CLIQZ_LAST_BUILD_ID} \
CQZ_RELEASE_CHANNEL=${CLIQZ_CHANNEL} \
LLVM_CONFIG=llvm-config${LLVM_DEFAULT} \
LLVM_OBJDUMP=${LOCALBASE}/llvm${LLVM_DEFAULT}/bin/llvm-objdump \
PYTHON3="${LOCALBASE}/bin/python${PYTHON3_DEFAULT}" \
RUSTFLAGS=${RUSTFLAGS} \
MOZBUILD_STATE_PATH=${WRKDIR}
BINARY_ALIAS+= python3=python${PYTHON3_DEFAULT}
# Configure args passed into mach build system
MOZ_CONFIGURE_ARGS+= "--disable-crashreporter" \
"--disable-debug" \
"--disable-debug-symbols" \
"--disable-tests" \
"--disable-updater" \
"--with-system-libvpx" \
"--libclang-path=${LOCALBASE}/llvm${LLVM_DEFAULT}/lib"
.include "${.CURDIR}/Makefile.options"
.include <bsd.port.pre.mk>
# Adjust -C target-cpu if -march/-mcpu is set by bsd.cpu.mk
.if ${ARCH} == amd64 || ${ARCH} == i386
RUSTFLAGS+= ${CFLAGS:M-march=*:S/-march=/-C target-cpu=/}
.else
RUSTFLAGS+= ${CFLAGS:M-mcpu=*:S/-mcpu=/-C target-cpu=/}
.endif
.if ${ARCH:Maarch64} || ${MACHINE_CPU:Msse2}
MOZ_CONFIGURE_ARGS+= "--enable-rust-simd"
.else
MOZ_CONFIGURE_ARGS+= "--disable-rust-simd"
.endif
# Require newer Clang than what's in base system unless user opted out
.if ${CC} == cc && ${CXX} == c++ && exists(/usr/lib/libc++.so)
CPP= ${LOCALBASE}/bin/clang-cpp${LLVM_DEFAULT}
CC= ${LOCALBASE}/bin/clang${LLVM_DEFAULT}
CXX= ${LOCALBASE}/bin/clang++${LLVM_DEFAULT}
# XXX avoid warnings
USES:= ${USES:Ncompiler\:*}
.endif
post-extract:
${CP} ${DISTDIR}/${DIST_SUBDIR}/adult-domains.bin \
${WRKSRC}/mozilla-release/browser/adult-domains.bin
${MKDIR} ${WRKSRC}/obj/dist/bin/browser/features
${CP} ${DISTDIR}/${DIST_SUBDIR}/cliqz@cliqz.com.xpi \
${DISTDIR}/${DIST_SUBDIR}/https-everywhere@cliqz.com.xpi \
${DISTDIR}/${DIST_SUBDIR}/gdprtool@cliqz.com.xpi \
${WRKSRC}/obj/dist/bin/browser/features
post-patch:
@${CP} ${MOZ_DESKTOP} ${CLIQZ_DESKTOP}
@${REINPLACE_CMD} -e 's/@MOZ_APP_DISPLAYNAME@/Cliqz Internet/g' \
-e 's/@MOZ_APP_NAME@/${PORTNAME}/g' \
-e '/Icon=${PORTNAME}/ s/${PORTNAME}/${CLIQZ_ICON}/' \
-e '/StartupWMClass/d' \
${CLIQZ_DESKTOP}
.for MOZ_CONFIGURE_ARG in ${MOZ_CONFIGURE_ARGS}
@${ECHO_CMD} "ac_add_options" ${MOZ_CONFIGURE_ARG} >> \
${WRKSRC}/mozilla-release/browser/config/cliqz.mozconfig;
.endfor
# This prevents linker exhausting memory in i386 builds
.if ${ARCH} == "i386"
@${ECHO_CMD} 'export LDFLAGS="-Wl,--no-keep-memory -Wl,--as-needed"' >> \
${WRKSRC}/mozilla-release/browser/config/cliqz.mozconfig
.endif
# Disable vendor checksums like lang/rust
@${REINPLACE_CMD} 's,"files":{[^}]*},"files":{},' \
${WRKSRC}/mozilla-release/third_party/rust/*/.cargo-checksum.json
pre-configure-script:
# Check that the running kernel has COMPAT_FREEBSD11 required by lang/rust post-ino64
@${SETENV} CC="${CC}" OPSYS="${OPSYS}" OSVERSION="${OSVERSION}" WRKDIR="${WRKDIR}" \
${SH} ${SCRIPTSDIR}/rust-compat11-canary.sh
do-build:
(cd ${WRKSRC} && ${SETENV} ${MAKE_ENV} ./magic_build_and_package.sh)
do-install:
${MKDIR} ${STAGEDIR}${PREFIX}/lib/${PORTNAME}
(cd ${WRKSRC}/obj/dist/${PORTNAME} && \
${COPYTREE_SHARE} . ${STAGEDIR}${PREFIX}/lib/${PORTNAME})
# Check if the wayland lib was generated, this happens if
# x11-toolkits/gtk30 has WAYLAND option enabled.
@if [ -f ${STAGEDIR}${PREFIX}/lib/${PORTNAME}/libmozwayland.so ]; then \
${REINPLACE_CMD} -e 's|%%WAYLAND%%||' ${TMPPLIST}; \
else \
${REINPLACE_CMD} -e 's|%%WAYLAND%%|@comment |' ${TMPPLIST}; \
fi
@${CHMOD} 755 ${STAGEDIR}${PREFIX}/lib/${PORTNAME}/${PORTNAME}-bin
@${CHMOD} 755 ${STAGEDIR}${PREFIX}/lib/${PORTNAME}/${PORTNAME}
post-install:
${RLN} ${STAGEDIR}${PREFIX}/lib/${PORTNAME}/${PORTNAME}-bin ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
${INSTALL_DATA} ${CLIQZ_DESKTOP} ${STAGEDIR}${PREFIX}/share/applications/
${MKDIR} ${STAGEDIR}${PREFIX}/share/pixmaps
@${CP} ${CLIQZ_ICON_SRC} ${STAGEDIR}${PREFIX}/share/pixmaps/${CLIQZ_ICON}
.include <bsd.port.post.mk>
Index: head/www/cliqz/files/patch-bug1663715
===================================================================
--- head/www/cliqz/files/patch-bug1663715 (nonexistent)
+++ head/www/cliqz/files/patch-bug1663715 (revision 552221)
@@ -0,0 +1,31087 @@
+From 63678ae69e03325d65255d29f1af4a6ea3dd354a Mon Sep 17 00:00:00 2001
+From: Thomas Deutschmann <whissi@gentoo.org>
+Date: Sat, 10 Oct 2020 16:07:49 +0200
+Subject: [PATCH 36/38] bmo#1643201: Cherry-pick some servo changes to
+ derive_common
+
+Link: https://bugzilla.mozilla.org/show_bug.cgi?id=1663715#c7
+Signed-off-by: Thomas Deutschmann <whissi@gentoo.org>
+---
+ servo/components/derive_common/cg.rs | 6 +++++-
+ 1 file changed, 5 insertions(+), 1 deletion(-)
+
+diff --git a/servo/components/derive_common/cg.rs b/servo/components/derive_common/cg.rs
+index 55a75398c7..c51c0d7750 100644
+--- mozilla-release/servo/components/derive_common/cg.rs
++++ mozilla-release/servo/components/derive_common/cg.rs
+@@ -7,7 +7,7 @@ use proc_macro2::{Span, TokenStream};
+ use quote::TokenStreamExt;
+ use syn::{self, AngleBracketedGenericArguments, Binding, DeriveInput, Field};
+ use syn::{GenericArgument, GenericParam, Ident, Path};
+-use syn::{PathArguments, PathSegment, QSelf, Type, TypeArray};
++use syn::{PathArguments, PathSegment, QSelf, Type, TypeArray, TypeGroup};
+ use syn::{TypeParam, TypeParen, TypePath, TypeSlice, TypeTuple};
+ use syn::{Variant, WherePredicate};
+ use synstructure::{self, BindStyle, BindingInfo, VariantAst, VariantInfo};
+@@ -208,6 +208,10 @@ where
+ elem: Box::new(map_type_params(&inner.elem, params, f)),
+ ..inner.clone()
+ }),
++ Type::Group(ref inner) => Type::from(TypeGroup {
++ elem: Box::new(map_type_params(&inner.elem, params, f)),
++ ..inner.clone()
++ }),
+ ref ty => panic!("type {:?} cannot be mapped yet", ty),
+ }
+ }
+--
+2.28.0
+
+From 23f22e9de6cc2236d58cc03997a1040e62c532e1 Mon Sep 17 00:00:00 2001
+From: Thomas Deutschmann <whissi@gentoo.org>
+Date: Sat, 10 Oct 2020 16:09:04 +0200
+Subject: [PATCH 37/38] bmo#1653339: Teach style_derive's map_type_params about
+ mapping self correctly
+
+Link: https://bugzilla.mozilla.org/show_bug.cgi?id=1663715#c7
+Signed-off-by: Thomas Deutschmann <whissi@gentoo.org>
+---
+ servo/components/derive_common/cg.rs | 30 +++++++++++--------
+ .../style_derive/to_computed_value.rs | 4 ++-
+ 2 files changed, 21 insertions(+), 13 deletions(-)
+
+diff --git a/servo/components/derive_common/cg.rs b/servo/components/derive_common/cg.rs
+index c51c0d7750..8abfd87149 100644
+--- mozilla-release/servo/components/derive_common/cg.rs
++++ mozilla-release/servo/components/derive_common/cg.rs
+@@ -154,19 +154,19 @@ pub fn fmap_trait_output(input: &DeriveInput, trait_path: &Path, trait_output: &
+ segment.into()
+ }
+
+-pub fn map_type_params<F>(ty: &Type, params: &[&TypeParam], f: &mut F) -> Type
++pub fn map_type_params<F>(ty: &Type, params: &[&TypeParam], self_type: &Path, f: &mut F) -> Type
+ where
+ F: FnMut(&Ident) -> Type,
+ {
+ match *ty {
+ Type::Slice(ref inner) => Type::from(TypeSlice {
+- elem: Box::new(map_type_params(&inner.elem, params, f)),
++ elem: Box::new(map_type_params(&inner.elem, params, self_type, f)),
+ ..inner.clone()
+ }),
+ Type::Array(ref inner) => {
+ //ref ty, ref expr) => {
+ Type::from(TypeArray {
+- elem: Box::new(map_type_params(&inner.elem, params, f)),
++ elem: Box::new(map_type_params(&inner.elem, params, self_type, f)),
+ ..inner.clone()
+ })
+ },
+@@ -175,7 +175,7 @@ where
+ elems: inner
+ .elems
+ .iter()
+- .map(|ty| map_type_params(&ty, params, f))
++ .map(|ty| map_type_params(&ty, params, self_type, f))
+ .collect(),
+ ..inner.clone()
+ }),
+@@ -187,10 +187,16 @@ where
+ if params.iter().any(|ref param| &param.ident == ident) {
+ return f(ident);
+ }
++ if ident == "Self" {
++ return Type::from(TypePath {
++ qself: None,
++ path: self_type.clone(),
++ });
++ }
+ }
+ Type::from(TypePath {
+ qself: None,
+- path: map_type_params_in_path(path, params, f),
++ path: map_type_params_in_path(path, params, self_type, f),
+ })
+ },
+ Type::Path(TypePath {
+@@ -198,25 +204,25 @@ where
+ ref path,
+ }) => Type::from(TypePath {
+ qself: qself.as_ref().map(|qself| QSelf {
+- ty: Box::new(map_type_params(&qself.ty, params, f)),
++ ty: Box::new(map_type_params(&qself.ty, params, self_type, f)),
+ position: qself.position,
+ ..qself.clone()
+ }),
+- path: map_type_params_in_path(path, params, f),
++ path: map_type_params_in_path(path, params, self_type, f),
+ }),
+ Type::Paren(ref inner) => Type::from(TypeParen {
+- elem: Box::new(map_type_params(&inner.elem, params, f)),
++ elem: Box::new(map_type_params(&inner.elem, params, self_type, f)),
+ ..inner.clone()
+ }),
+ Type::Group(ref inner) => Type::from(TypeGroup {
+- elem: Box::new(map_type_params(&inner.elem, params, f)),
++ elem: Box::new(map_type_params(&inner.elem, params, self_type, f)),
+ ..inner.clone()
+ }),
+ ref ty => panic!("type {:?} cannot be mapped yet", ty),
+ }
+ }
+
+-fn map_type_params_in_path<F>(path: &Path, params: &[&TypeParam], f: &mut F) -> Path
++fn map_type_params_in_path<F>(path: &Path, params: &[&TypeParam], self_type: &Path, f: &mut F) -> Path
+ where
+ F: FnMut(&Ident) -> Type,
+ {
+@@ -236,11 +242,11 @@ where
+ .map(|arg| match arg {
+ ty @ &GenericArgument::Lifetime(_) => ty.clone(),
+ &GenericArgument::Type(ref data) => {
+- GenericArgument::Type(map_type_params(data, params, f))
++ GenericArgument::Type(map_type_params(data, params, self_type, f))
+ },
+ &GenericArgument::Binding(ref data) => {
+ GenericArgument::Binding(Binding {
+- ty: map_type_params(&data.ty, params, f),
++ ty: map_type_params(&data.ty, params, self_type, f),
+ ..data.clone()
+ })
+ },
+diff --git a/servo/components/style_derive/to_computed_value.rs b/servo/components/style_derive/to_computed_value.rs
+index fe6bddb7ed..1dc422e2dd 100644
+--- mozilla-release/servo/components/style_derive/to_computed_value.rs
++++ mozilla-release/servo/components/style_derive/to_computed_value.rs
+@@ -47,12 +47,15 @@ pub fn derive_to_value(
+ cg::add_predicate(&mut where_clause, parse_quote!(#param: #trait_path));
+ }
+
++ let computed_value_type = cg::fmap_trait_output(&input, &trait_path, &output_type_name);
++
+ let mut add_field_bound = |binding: &BindingInfo| {
+ let ty = &binding.ast().ty;
+
+ let output_type = cg::map_type_params(
+ ty,
+ &params,
++ &computed_value_type,
+ &mut |ident| parse_quote!(<#ident as #trait_path>::#output_type_name),
+ );
+
+@@ -142,7 +145,6 @@ pub fn derive_to_value(
+
+ input.generics.where_clause = where_clause;
+ let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
+- let computed_value_type = cg::fmap_trait_output(&input, &trait_path, &output_type_name);
+
+ let impl_ = trait_impl(from_body, to_body);
+
+--
+2.28.0
+
+From 300e01e71c9dc536d499d80563968c5fc7f7e34a Mon Sep 17 00:00:00 2001
+From: Thomas Deutschmann <whissi@gentoo.org>
+Date: Sat, 10 Oct 2020 16:10:20 +0200
+Subject: [PATCH 38/38] bmo#1663715: Update syn and proc-macro2 so that Firefox
+ can build on Rust nightly again
+
+Link: https://bugzilla.mozilla.org/show_bug.cgi?id=1663715#c7
+Signed-off-by: Thomas Deutschmann <whissi@gentoo.org>
+---
+ Cargo.lock | 8 +-
+ .../rust/lucet-wasi/.cargo-checksum.json | 2 +-
+ .../rust/packed_simd/.cargo-checksum.json | 2 +-
+ .../rust/proc-macro2/.cargo-checksum.json | 2 +-
+ third_party/rust/proc-macro2/Cargo.toml | 15 +-
+ third_party/rust/proc-macro2/README.md | 2 +-
+ third_party/rust/proc-macro2/build.rs | 20 +
+ third_party/rust/proc-macro2/src/detection.rs | 67 +
+ third_party/rust/proc-macro2/src/fallback.rs | 1010 ++----
+ third_party/rust/proc-macro2/src/lib.rs | 225 +-
+ third_party/rust/proc-macro2/src/marker.rs | 18 +
+ third_party/rust/proc-macro2/src/parse.rs | 849 +++++
+ third_party/rust/proc-macro2/src/strnom.rs | 391 ---
+ third_party/rust/proc-macro2/src/wrapper.rs | 258 +-
+ .../rust/proc-macro2/tests/comments.rs | 103 +
+ third_party/rust/proc-macro2/tests/marker.rs | 33 +
+ third_party/rust/proc-macro2/tests/test.rs | 240 +-
+ .../rust/proc-macro2/tests/test_fmt.rs | 26 +
+ .../spirv-cross-internal/.cargo-checksum.json | 2 +-
+ third_party/rust/syn/.cargo-checksum.json | 2 +-
+ third_party/rust/syn/Cargo.toml | 35 +-
+ third_party/rust/syn/README.md | 16 +-
+ third_party/rust/syn/benches/file.rs | 7 +
+ third_party/rust/syn/benches/rust.rs | 45 +-
+ third_party/rust/syn/build.rs | 38 +-
+ third_party/rust/syn/src/attr.rs | 126 +-
+ third_party/rust/syn/src/buffer.rs | 56 +-
+ third_party/rust/syn/src/custom_keyword.rs | 12 +-
+ .../rust/syn/src/custom_punctuation.rs | 50 +-
+ third_party/rust/syn/src/data.rs | 96 +-
+ third_party/rust/syn/src/derive.rs | 10 +-
+ third_party/rust/syn/src/discouraged.rs | 27 +-
+ third_party/rust/syn/src/error.rs | 33 +-
+ third_party/rust/syn/src/expr.rs | 826 +++--
+ third_party/rust/syn/src/ext.rs | 12 +-
+ third_party/rust/syn/src/file.rs | 4 +-
+ third_party/rust/syn/src/gen/clone.rs | 2051 ++++++++++++
+ third_party/rust/syn/src/gen/debug.rs | 2857 +++++++++++++++++
+ third_party/rust/syn/src/gen/eq.rs | 1930 +++++++++++
+ third_party/rust/syn/src/gen/fold.rs | 287 +-
+ third_party/rust/syn/src/gen/hash.rs | 2691 ++++++++++++++++
+ third_party/rust/syn/src/gen/visit.rs | 19 +-
+ third_party/rust/syn/src/gen/visit_mut.rs | 19 +-
+ third_party/rust/syn/src/generics.rs | 255 +-
+ third_party/rust/syn/src/item.rs | 1515 +++++----
+ third_party/rust/syn/src/keyword.rs | 0
+ third_party/rust/syn/src/lib.rs | 109 +-
+ third_party/rust/syn/src/lifetime.rs | 13 +-
+ third_party/rust/syn/src/lit.rs | 581 ++--
+ third_party/rust/syn/src/mac.rs | 55 +-
+ third_party/rust/syn/src/macros.rs | 61 +-
+ third_party/rust/syn/src/op.rs | 6 +-
+ third_party/rust/syn/src/parse.rs | 211 +-
+ third_party/rust/syn/src/parse_macro_input.rs | 32 +-
+ third_party/rust/syn/src/parse_quote.rs | 15 +-
+ third_party/rust/syn/src/pat.rs | 313 +-
+ third_party/rust/syn/src/path.rs | 33 +-
+ third_party/rust/syn/src/punctuated.rs | 123 +-
+ third_party/rust/syn/src/reserved.rs | 42 +
+ third_party/rust/syn/src/spanned.rs | 4 +-
+ third_party/rust/syn/src/stmt.rs | 141 +-
+ third_party/rust/syn/src/token.rs | 99 +-
+ third_party/rust/syn/src/tt.rs | 6 +-
+ third_party/rust/syn/src/ty.rs | 364 ++-
+ third_party/rust/syn/src/verbatim.rs | 15 +
+ third_party/rust/syn/src/whitespace.rs | 65 +
+ third_party/rust/syn/tests/clone.sh | 16 -
+ third_party/rust/syn/tests/common/eq.rs | 247 +-
+ third_party/rust/syn/tests/common/mod.rs | 13 +
+ third_party/rust/syn/tests/common/parse.rs | 24 +-
+ third_party/rust/syn/tests/debug/gen.rs | 50 +-
+ third_party/rust/syn/tests/debug/mod.rs | 17 +-
+ third_party/rust/syn/tests/features/error.rs | 1 -
+ third_party/rust/syn/tests/features/mod.rs | 22 -
+ third_party/rust/syn/tests/macros/mod.rs | 8 +-
+ third_party/rust/syn/tests/repo/mod.rs | 137 +-
+ third_party/rust/syn/tests/repo/progress.rs | 37 +
+ third_party/rust/syn/tests/test_asyncness.rs | 38 +-
+ third_party/rust/syn/tests/test_attribute.rs | 452 +--
+ .../rust/syn/tests/test_derive_input.rs | 1321 ++++----
+ third_party/rust/syn/tests/test_expr.rs | 314 +-
+ third_party/rust/syn/tests/test_generics.rs | 371 ++-
+ third_party/rust/syn/tests/test_grouping.rs | 53 +-
+ third_party/rust/syn/tests/test_ident.rs | 5 -
+ third_party/rust/syn/tests/test_item.rs | 45 +
+ third_party/rust/syn/tests/test_iterators.rs | 7 +-
+ third_party/rust/syn/tests/test_lit.rs | 75 +-
+ third_party/rust/syn/tests/test_meta.rs | 498 ++-
+ .../rust/syn/tests/test_parse_buffer.rs | 41 +-
+ .../rust/syn/tests/test_parse_stream.rs | 12 +
+ third_party/rust/syn/tests/test_pat.rs | 27 +-
+ third_party/rust/syn/tests/test_path.rs | 52 +
+ third_party/rust/syn/tests/test_precedence.rs | 196 +-
+ third_party/rust/syn/tests/test_receiver.rs | 127 +
+ third_party/rust/syn/tests/test_round_trip.rs | 41 +-
+ third_party/rust/syn/tests/test_shebang.rs | 59 +
+ .../rust/syn/tests/test_should_parse.rs | 4 -
+ third_party/rust/syn/tests/test_size.rs | 2 -
+ third_party/rust/syn/tests/test_stmt.rs | 44 +
+ .../rust/syn/tests/test_token_trees.rs | 12 +-
+ third_party/rust/syn/tests/test_ty.rs | 53 +
+ third_party/rust/syn/tests/test_visibility.rs | 145 +
+ third_party/rust/syn/tests/zzz_stable.rs | 4 +-
+ 103 files changed, 17319 insertions(+), 5831 deletions(-)
+ create mode 100644 third_party/rust/proc-macro2/src/detection.rs
+ create mode 100644 third_party/rust/proc-macro2/src/marker.rs
+ create mode 100644 third_party/rust/proc-macro2/src/parse.rs
+ delete mode 100644 third_party/rust/proc-macro2/src/strnom.rs
+ create mode 100644 third_party/rust/proc-macro2/tests/comments.rs
+ create mode 100644 third_party/rust/proc-macro2/tests/test_fmt.rs
+ create mode 100644 third_party/rust/syn/src/gen/clone.rs
+ create mode 100644 third_party/rust/syn/src/gen/debug.rs
+ create mode 100644 third_party/rust/syn/src/gen/eq.rs
+ create mode 100644 third_party/rust/syn/src/gen/hash.rs
+ delete mode 100644 third_party/rust/syn/src/keyword.rs
+ create mode 100644 third_party/rust/syn/src/reserved.rs
+ create mode 100644 third_party/rust/syn/src/verbatim.rs
+ create mode 100644 third_party/rust/syn/src/whitespace.rs
+ delete mode 100755 third_party/rust/syn/tests/clone.sh
+ delete mode 100644 third_party/rust/syn/tests/features/error.rs
+ delete mode 100644 third_party/rust/syn/tests/features/mod.rs
+ create mode 100644 third_party/rust/syn/tests/repo/progress.rs
+ create mode 100644 third_party/rust/syn/tests/test_item.rs
+ create mode 100644 third_party/rust/syn/tests/test_parse_stream.rs
+ create mode 100644 third_party/rust/syn/tests/test_path.rs
+ create mode 100644 third_party/rust/syn/tests/test_receiver.rs
+ create mode 100644 third_party/rust/syn/tests/test_shebang.rs
+ create mode 100644 third_party/rust/syn/tests/test_stmt.rs
+ create mode 100644 third_party/rust/syn/tests/test_ty.rs
+ create mode 100644 third_party/rust/syn/tests/test_visibility.rs
+
+diff --git a/Cargo.lock b/Cargo.lock
+index 19117e8368..d5fe0f6457 100644
+--- mozilla-release/Cargo.lock
++++ mozilla-release/Cargo.lock
+@@ -3717,9 +3717,9 @@ dependencies = [
+
+ [[package]]
+ name = "proc-macro2"
+-version = "1.0.5"
++version = "1.0.24"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"
++checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
+ dependencies = [
+ "unicode-xid",
+ ]
+@@ -4647,9 +4647,9 @@ dependencies = [
+
+ [[package]]
+ name = "syn"
+-version = "1.0.5"
++version = "1.0.40"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"
++checksum = "963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"
+ dependencies = [
+ "proc-macro2",
+ "quote",
+diff --git a/third_party/rust/lucet-wasi/.cargo-checksum.json b/third_party/rust/lucet-wasi/.cargo-checksum.json
+index 229fc9978c..2c8c0a3c22 100644
+--- mozilla-release/third_party/rust/lucet-wasi/.cargo-checksum.json
++++ mozilla-release/third_party/rust/lucet-wasi/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"fea1408a1c1b1e84b06044a0b12cb26c8fd3253ca124debb6cd3e4faab48fcbd","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","LICENSE.cloudabi-utils":"86a34251f0aab76b7dc3daf8d252afbdf481ea94aa5b46d020205178b7e2eac1","LICENSE.wasmtime":"a6c48161a09acc75a0e25503bab66a731eb5fba5392ed4bb4743e4ba5085327a","bindings.json":"fad8362f845e6f7a2af1d7547cee5730894e7b96804f338181fc070ffdcfae1e","build.rs":"593173ad03963afcbef43f1843be6f04cde1df3eae0298ca20bf881019dd350a","examples/.gitignore":"44575cf5b28512d75644bf54a517dcef304ff809fd511747621b4d64f19aac66","examples/Makefile":"d2d2ceeb1bc4435189ea9a2710b6f5f5331ce6aa73ae8a4f4edcca215058a9b4","examples/README.md":"f2a5be6cc88d511c9f4d3bfefdd42dcb2ace813bc23f6a4430b6b543f7373341","examples/hello.c":"9cbc0d3173e02309e15372835fa849d196b2a202d81806fea60378e1878d0c53","examples/pseudoquine.c":"8fd696f8e1b8fb86188564a05f4776875ead31d785a12e3aa4af9d9c1b46d5b5","include/lucet_wasi.h":"497f712c64f753ebdf73ab05b0b340d50094f9c59bb8637ccbf99d895cd20966","src/bindings.rs":"edbeb51d1a93fd31039ee1f1dc7c1b6c0bded2cf5dad10039e8b7da81a4d4a12","src/c_api.rs":"a9c73070a88a461882a28e3e2adfd773f569b964c7ffabde39a3cef907782f71","src/ctx.rs":"578f87c35cce12eaebec95d03e31954c3e6cd0afa214a0fec068f03814eb0cc7","src/fdentry.rs":"94a8480fa587e5586327dfd6b66d8a6a3ef1f8091ba8deb335bf45642f4f98e6","src/host.rs":"6f05f8fea2afed827abfc3c008a5854a8023d91d066580ecfb49e5c8036ef3a3","src/hostcalls/fs.rs":"4726e6f695f7d1d4e371ec52c57f4e36b0ba0d2302fc008b21a301f5fd7a5c97","src/hostcalls/fs_helpers.rs":"474bce0a1f15fa23b0b0b8aa83143d993dd2cbd7cdfc38c118d452d04e80caea","src/hostcalls/misc.rs":"83d087891d92af08cfa2d2e0c5f41cc47cb8219460f6dbcc8666b418dfef206e","src/hostcalls/mod.rs":"4c5d3f65c69503e11e647770879026c37c0e5e01a99b7116c8fb9411b4797187","src/hostcalls/timers.rs":"e65d6a491256b5d6051b6816f6c5049ba3cdc6142651bac81f34d659c1c2a104","src/lib.rs":"5554e1a3f0cd3756173ece6435a0d01b2f520b3401cd5fc33180a04fb9f69bbe","src/memory.rs":"0a09026b15d27f99d74e560cd94795f645cba414a8491bc961987fab9d9da69b","src/wasi_host.rs":"cacbdac28304a837b11e5ad400ae9de3ee79c0284be335e64606ecdfe426ad6e","src/wasm32.rs":"13a5dc6e59784662f1e55eccb457cbbae241a96f70cfa72c41d55858ca05b980","tests/guests/cant_dotdot.c":"609b8cece9443e375a0b38a7e43651b179f66ee9c686edba6696fe1bcd45b111","tests/guests/clock_getres.c":"f5e41c0a2b05a8d7cdb5b4da6c8b6778b858004c1e9d115503c45a1d976be33b","tests/guests/duplicate_import.wat":"4bd8d7a5c1d1597dbe7648300e94e3fab84d7ab068d56cfb656aa1a208026cee","tests/guests/exitcode.c":"b7c5cec3ead0ed82326c568287a1f3398e71ae7e447ce49a3c4c7114c82495af","tests/guests/follow_symlink.c":"de3143ad2bbbfe834c0c32b54c9fcf144ca4eba5cdcf7588929e5f47225ab616","tests/guests/fs.c":"0dca5232ff5da1b7745e3b44bca39333c01a20ba4eae1a6a0a1c492c71ca1efa","tests/guests/getentropy.c":"5d80bcc68dcf3ba91576969055099d61635ae713c057b3cb36afb122a5f26347","tests/guests/getrusage.c":"8114c103b85eb564d9ab43684958bc1939de3794d314b7c121762f3a2f0434a6","tests/guests/gettimeofday.c":"4a57f376b06f4228017b82695448a0bd213fb91455f5301d689cd87fcff01f06","tests/guests/notdir.c":"bd8f8b24360b7cf8d5dced9d9ba4c15843fcbbae89fecc13e3a457c33a275e28","tests/guests/poll.c":"aefaa9b58ce9906dc379e0bd25fa68dfbf8cdffb48cd5ecde1d67708b83b366d","tests/guests/preopen_populates.c":"f186e4eb4aab6a1d9ec7bc5c49eaea6d9d162e0159dfe8f953bb48ade9b58d43","tests/guests/read_file.c":"1aab9393f005f05b69592826d7c4d384a115d5bca42c66f10a901811b4b1dcac","tests/guests/read_file_twice.c":"04a3dad7a43b93e36efd4e2c822c11b3f129429ec799af304d82b358686c578a","tests/guests/stat.c":"02756933ea7d4337b4fa04344b32968851b02f9d0bd5ea1cb0e2f022e8c65ab0","tests/guests/stdin.c":"66efc4b54f68d1138046f1afefae15f7d4555b2904b4a988818e61e67fe8fefb","tests/guests/symlink_escape.c":"686e047b5c986e29c854bcd93996d027dcdc8721219fa9fa532efc98d2798f5c","tests/guests/symlink_loop.c":"2bbddf3a5edfc6e5f3c0fa82cee4ac92b18804810509e263abd17f5240cd37e5","tests/guests/write_file.c":"9e9b14552c2445cfa6d0aa26b334081a59e6e3428dbb17ceca005a9ba59d3220","tests/test_helpers/mod.rs":"bc18194317611fe1be5c439a7a9e0de75399555c3b6de4275af149fb180456c8","tests/tests.rs":"173a7e0f086f6ed46474686cc3413ee68bbd2ff67004f7790e963a1392c7c46e"},"package":null}
+\ No newline at end of file
++{"files":{"Cargo.toml":"fea1408a1c1b1e84b06044a0b12cb26c8fd3253ca124debb6cd3e4faab48fcbd","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","LICENSE.cloudabi-utils":"86a34251f0aab76b7dc3daf8d252afbdf481ea94aa5b46d020205178b7e2eac1","LICENSE.wasmtime":"a6c48161a09acc75a0e25503bab66a731eb5fba5392ed4bb4743e4ba5085327a","bindings.json":"fad8362f845e6f7a2af1d7547cee5730894e7b96804f338181fc070ffdcfae1e","build.rs":"593173ad03963afcbef43f1843be6f04cde1df3eae0298ca20bf881019dd350a","examples/Makefile":"d2d2ceeb1bc4435189ea9a2710b6f5f5331ce6aa73ae8a4f4edcca215058a9b4","examples/README.md":"f2a5be6cc88d511c9f4d3bfefdd42dcb2ace813bc23f6a4430b6b543f7373341","examples/hello.c":"9cbc0d3173e02309e15372835fa849d196b2a202d81806fea60378e1878d0c53","examples/pseudoquine.c":"8fd696f8e1b8fb86188564a05f4776875ead31d785a12e3aa4af9d9c1b46d5b5","include/lucet_wasi.h":"497f712c64f753ebdf73ab05b0b340d50094f9c59bb8637ccbf99d895cd20966","src/bindings.rs":"edbeb51d1a93fd31039ee1f1dc7c1b6c0bded2cf5dad10039e8b7da81a4d4a12","src/c_api.rs":"a9c73070a88a461882a28e3e2adfd773f569b964c7ffabde39a3cef907782f71","src/ctx.rs":"578f87c35cce12eaebec95d03e31954c3e6cd0afa214a0fec068f03814eb0cc7","src/fdentry.rs":"94a8480fa587e5586327dfd6b66d8a6a3ef1f8091ba8deb335bf45642f4f98e6","src/host.rs":"6f05f8fea2afed827abfc3c008a5854a8023d91d066580ecfb49e5c8036ef3a3","src/hostcalls/fs.rs":"4726e6f695f7d1d4e371ec52c57f4e36b0ba0d2302fc008b21a301f5fd7a5c97","src/hostcalls/fs_helpers.rs":"474bce0a1f15fa23b0b0b8aa83143d993dd2cbd7cdfc38c118d452d04e80caea","src/hostcalls/misc.rs":"83d087891d92af08cfa2d2e0c5f41cc47cb8219460f6dbcc8666b418dfef206e","src/hostcalls/mod.rs":"4c5d3f65c69503e11e647770879026c37c0e5e01a99b7116c8fb9411b4797187","src/hostcalls/timers.rs":"e65d6a491256b5d6051b6816f6c5049ba3cdc6142651bac81f34d659c1c2a104","src/lib.rs":"5554e1a3f0cd3756173ece6435a0d01b2f520b3401cd5fc33180a04fb9f69bbe","src/memory.rs":"0a09026b15d27f99d74e560cd94795f645cba414a8491bc961987fab9d9da69b","src/wasi_host.rs":"cacbdac28304a837b11e5ad400ae9de3ee79c0284be335e64606ecdfe426ad6e","src/wasm32.rs":"13a5dc6e59784662f1e55eccb457cbbae241a96f70cfa72c41d55858ca05b980","tests/guests/cant_dotdot.c":"609b8cece9443e375a0b38a7e43651b179f66ee9c686edba6696fe1bcd45b111","tests/guests/clock_getres.c":"f5e41c0a2b05a8d7cdb5b4da6c8b6778b858004c1e9d115503c45a1d976be33b","tests/guests/duplicate_import.wat":"4bd8d7a5c1d1597dbe7648300e94e3fab84d7ab068d56cfb656aa1a208026cee","tests/guests/exitcode.c":"b7c5cec3ead0ed82326c568287a1f3398e71ae7e447ce49a3c4c7114c82495af","tests/guests/follow_symlink.c":"de3143ad2bbbfe834c0c32b54c9fcf144ca4eba5cdcf7588929e5f47225ab616","tests/guests/fs.c":"0dca5232ff5da1b7745e3b44bca39333c01a20ba4eae1a6a0a1c492c71ca1efa","tests/guests/getentropy.c":"5d80bcc68dcf3ba91576969055099d61635ae713c057b3cb36afb122a5f26347","tests/guests/getrusage.c":"8114c103b85eb564d9ab43684958bc1939de3794d314b7c121762f3a2f0434a6","tests/guests/gettimeofday.c":"4a57f376b06f4228017b82695448a0bd213fb91455f5301d689cd87fcff01f06","tests/guests/notdir.c":"bd8f8b24360b7cf8d5dced9d9ba4c15843fcbbae89fecc13e3a457c33a275e28","tests/guests/poll.c":"aefaa9b58ce9906dc379e0bd25fa68dfbf8cdffb48cd5ecde1d67708b83b366d","tests/guests/preopen_populates.c":"f186e4eb4aab6a1d9ec7bc5c49eaea6d9d162e0159dfe8f953bb48ade9b58d43","tests/guests/read_file.c":"1aab9393f005f05b69592826d7c4d384a115d5bca42c66f10a901811b4b1dcac","tests/guests/read_file_twice.c":"04a3dad7a43b93e36efd4e2c822c11b3f129429ec799af304d82b358686c578a","tests/guests/stat.c":"02756933ea7d4337b4fa04344b32968851b02f9d0bd5ea1cb0e2f022e8c65ab0","tests/guests/stdin.c":"66efc4b54f68d1138046f1afefae15f7d4555b2904b4a988818e61e67fe8fefb","tests/guests/symlink_escape.c":"686e047b5c986e29c854bcd93996d027dcdc8721219fa9fa532efc98d2798f5c","tests/guests/symlink_loop.c":"2bbddf3a5edfc6e5f3c0fa82cee4ac92b18804810509e263abd17f5240cd37e5","tests/guests/write_file.c":"9e9b14552c2445cfa6d0aa26b334081a59e6e3428dbb17ceca005a9ba59d3220","tests/test_helpers/mod.rs":"bc18194317611fe1be5c439a7a9e0de75399555c3b6de4275af149fb180456c8","tests/tests.rs":"173a7e0f086f6ed46474686cc3413ee68bbd2ff67004f7790e963a1392c7c46e"},"package":null}
+\ No newline at end of file
+diff --git a/third_party/rust/packed_simd/.cargo-checksum.json b/third_party/rust/packed_simd/.cargo-checksum.json
+index 01afcc1efd..c727a10006 100644
+--- mozilla-release/third_party/rust/packed_simd/.cargo-checksum.json
++++ mozilla-release/third_party/rust/packed_simd/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{".appveyor.yml":"f1ed01850e0d725f9498f52a1a63ddf40702ad6e0bf5b2d7c4c04d76e96794a3",".travis.yml":"e9258d9a54fdaf4cbc12405fe5993ac4497eb2b29021691dbc91b19cb9b52227","Cargo.toml":"089941ba3c89ea111cbea3cc3abdcdcf2b9d0ae0db268d7269ee38226db950e5","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","bors.toml":"dee881dc69b9b7834e4eba5d95c3ed5a416d4628815a167d6a22d4cb4fb064b8","build.rs":"f3baefc5e5bb9b250e762a1466371b922fd7ee4243c217b2d014307603c2f57a","ci/all.sh":"a23d14e10cb26a0eb719e389c30eb955fa53cddcd436890646df09af640bd2eb","ci/android-install-ndk.sh":"0f1746108cc30bf9b9ba45bcde7b19fc1a8bdf5b0258035b4eb8dc69b75efac4","ci/android-install-sdk.sh":"3490432022c5c8f5a115c084f7a9aca1626f96c0c87ffb62019228c4346b47e4","ci/android-sysimage.sh":"ebf4e5daa1f0fe1b2092b79f0f3f161c4c4275cb744e52352c4d81ab451e4c5a","ci/benchmark.sh":"b61d19ef6b90deba8fb79dee74c8b062d94844676293da346da87bb78a9a49a4","ci/deploy_and_run_on_ios_simulator.rs":"ec8ecf82d92072676aa47f0d1a3d021b60a7ae3531153ef12d2ff4541fc294dc","ci/docker/aarch64-linux-android/Dockerfile":"ace2e7d33c87bc0f6d3962a4a3408c04557646f7f51ab99cfbf574906796b016","ci/docker/aarch64-unknown-linux-gnu/Dockerfile":"1ecdac757101d951794fb2ab0deaa278199cf25f2e08a15c7d40ff31a8556184","ci/docker/arm-linux-androideabi/Dockerfile":"370e55d3330a413a3ccf677b3afb3e0ef9018a5fab263faa97ae8ac017fc2286","ci/docker/arm-unknown-linux-gnueabi/Dockerfile":"e25d88f6c0c94aada3d2e3f08243f755feb7e869dc5dc505b3799719cb1af591","ci/docker/arm-unknown-linux-gnueabihf/Dockerfile":"f126f4c7bae8c11ab8b16df06ad997863f0838825a9c08c9899a3eedb6d570bd","ci/docker/armv7-unknown-linux-gnueabihf/Dockerfile":"b647545c158ee480a4c581dbdc1f57833aef056c8d498acc04b573e842bf803c","ci/docker/i586-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/i686-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/mips-unknown-linux-gnu/Dockerfile":"323776469bb7b160385f3621d66e3ee14c75242f8180f916e65af048a29d4ea0","ci/docker/mips64-unknown-linux-gnuabi64/Dockerfile":"c647f6948a9a43b0be695cbed4eac752120d0faf28e5e69c718cb10406921dab","ci/docker/mips64el-unknown-linux-gnuabi64/Dockerfile":"77bfd00cc8639509be381b394f077e39b45a00158ad61b4e1656714c714665d1","ci/docker/mipsel-unknown-linux-musl/Dockerfile":"ec5bea6c98a3b626731fdb95f9ff2d1182639c76e8fb16d3271d0fc884901524","ci/docker/powerpc-unknown-linux-gnu/Dockerfile":"4f2b662de66e83d1354f650b7077692309637f786c2ea5516c31b5c2ee10af2d","ci/docker/powerpc64-unknown-linux-gnu/Dockerfile":"a9595402b772bc365982e22a0096a8988825d90b09b5faa97ab192e76072f71d","ci/docker/powerpc64le-unknown-linux-gnu/Dockerfile":"df3c381c157439695ae8cd10ab71664702c061e3b4ab22906a5ad6c2680acfed","ci/docker/s390x-unknown-linux-gnu/Dockerfile":"93fb44df3d7fd31ead158570667c97b5076a05c3d968af4a84bc13819a8f2db8","ci/docker/sparc64-unknown-linux-gnu/Dockerfile":"da1c39a3ff1fe22e41395fa7c8934e90b4c1788e551b9aec6e38bfd94effc437","ci/docker/thumbv7neon-linux-androideabi/Dockerfile":"c2decd5591bd7a09378901bef629cd944acf052eb55e4f35b79eb9cb4d62246a","ci/docker/thumbv7neon-unknown-linux-gnueabihf/Dockerfile":"75c0c56161c7382b439de74c00de1c0e3dc9d59560cd6720976a751034b78714","ci/docker/wasm32-unknown-unknown/Dockerfile":"3e5f294bc1e004aa599086c2af49d6f3e7459fa250f5fbdd60cf67d53db78758","ci/docker/x86_64-linux-android/Dockerfile":"685040273cf350d5509e580ac451555efa19790c8723ca2af066adadc6880ad2","ci/docker/x86_64-unknown-linux-gnu-emulated/Dockerfile":"44b6203d9290bfdc53d81219f0937e1110847a23dd982ec8c4de388354f01536","ci/docker/x86_64-unknown-linux-gnu/Dockerfile":"d253c86803b22da428fa9cc671a05f18d3318eca7733b8dccb4f7be1ddf524c5","ci/dox.sh":"5b61711be47a4e3dde0ddd15ba73d256ea95fd75af3897732c24db1dc7e66366","ci/linux-s390x.sh":"d6b732d7795b4ba131326aff893bca6228a7d2eb0e9402f135705413dbbe0dce","ci/linux-sparc64.sh":"c92966838b1ab7ad3b7a344833ee726aba6b647cf5952e56f0ad1ba420b13325","ci/lld-shim.rs":"3d7f71ec23a49e2b67f694a0168786f9a954dda15f5a138815d966643fd3fcc3","ci/max_line_width.sh":"0a1518bba4c9ecaa55694cb2e9930d0e19c265baabf73143f17f9cf285aaa5bb","ci/run-docker.sh":"92e036390ad9b0d16f109579df1b5ced2e72e9afea40c7d011400ebd3a2a90de","ci/run.sh":"63259e22a96ba539f53c06b1b39f53e3a78a71171652e7afc170836110ccd913","ci/run_examples.sh":"d1a23c6c35374a0678ba5114b9b8fefd8be0a79e774872a8bf0898d1baca18d0","ci/runtest-android.rs":"145a8e9799a5223975061fe7e586ade5669ee4877a7d7a4cf6b4ab48e8e36c7c","ci/setup_benchmarks.sh":"73fb981a8fdb1dcd54409d3c0fbbfb8f77a3ceabf8626a6b9bf9d21d6bc8ce72","ci/test-runner-linux":"c8aa6025cff5306f4f31d0c61dc5f9d4dd5a1d189ab613ef8d4c367c694d9ccd","contributing.md":"2cc8c9c560ae17867e69b06d09b758dbf7bc39eb774ada50a743724b10acc0a2","perf-guide/.gitignore":"fe82c7da551079d832cf74200b0b359b4df9828cb4a0416fa7384f07a2ae6a13","perf-guide/book.toml":"115a98284126c6b180178b44713314cc494f08a71662ee2ce15cf67f17a51064","perf-guide/src/SUMMARY.md":"3e03bffc991fdc2050f3d51842d72d9d21ea6abab56a3baf3b2d5973a78b89e1","perf-guide/src/ascii.css":"29afb08833b2fe2250f0412e1fa1161a2432a0820a14953c87124407417c741a","perf-guide/src/bound_checks.md":"5e4991ff58a183ef0cd9fdc1feb4cd12d083b44bdf87393bbb0927808ef3ce7d","perf-guide/src/float-math/approx.md":"8c09032fa2d795a0c5db1775826c850d28eb2627846d0965c60ee72de63735ad","perf-guide/src/float-math/fma.md":"311076ba4b741d604a82e74b83a8d7e8c318fcbd7f64c4392d1cf5af95c60243","perf-guide/src/float-math/fp.md":"04153e775ab6e4f0d7837bcc515230d327b04edfa34c84ce9c9e10ebaeef2be8","perf-guide/src/float-math/svml.md":"0798873b8eedaeda5fed62dc91645b57c20775a02d3cd74d8bd06958f1516506","perf-guide/src/introduction.md":"9f5a19e9e6751f25d2daad39891a0cc600974527ec4c8305843f9618910671bd","perf-guide/src/prof/linux.md":"447731eb5de7d69166728fdbc5ecb0c0c9db678ea493b45a592d67dd002184c0","perf-guide/src/prof/mca.md":"f56d54f3d20e7aa4d32052186e8237b03d65971eb5d112802b442570ff11d344","perf-guide/src/prof/profiling.md":"8a650c0fd6ede0964789bb6577557eeef1d8226a896788602ce61528e260e43c","perf-guide/src/target-feature/attribute.md":"615f88dca0a707b6c416fa605435dd6e1fb5361cc639429cbf68cd87624bd78b","perf-guide/src/target-feature/features.md":"17077760ff24c006b606dd21889c53d87228f4311f3ba3a574f9afdeacd86165","perf-guide/src/target-feature/inlining.md":"7ed1d7068d8173a00d84c16cfe5871cd68b9f04f8d0cca2d01ebc84957ebf2f6","perf-guide/src/target-feature/practice.md":"c4b371842e0086df178488fec97f20def8f0c62ee588bcd25fd948b9b1fa227e","perf-guide/src/target-feature/runtime.md":"835425f5ee597fb3e51d36e725a81ebee29f4561231d19563cd4da81dbb1cfcb","perf-guide/src/target-feature/rustflags.md":"ab49712e9293a65d74d540ba4784fcb57ff1119ec05a575d895c071f1a620f64","perf-guide/src/vert-hor-ops.md":"c6211c0ee91e60552ec592d89d9d957eedc21dee3cbd89e1ad6765ea06a27471","readme.md":"585a8f0e16877fb9abb00cd17a175fcb9d7857840c6c61209f1827ffab095070","rustfmt.toml":"de6101d0670bad65fb3b337d56957d2a024e017e5ab146ec784d77312daaf8ff","src/api.rs":"331a3a4abb19cee2df5f2df4ad7c3e88b45e62cf23fdacfc9bbaa633dc5cf788","src/api/bit_manip.rs":"e68290ee679cc5abc9c73afbe635c1035f8cbfe849e5c751a1680e459244c39e","src/api/cast.rs":"03b94a3d316ac7b7be7068810044911e965e889a0ace7bae762749ca74a92747","src/api/cast/macros.rs":"b0a14d0c83ad2ebb7a275180f6d9e3f2bc312ba57a7d3d6c39fad4e0f20f9408","src/api/cast/v128.rs":"63e28c6a3edf1a7a635f51b8d3c6adbb1d46f884d92a196b3d4a6e743d809416","src/api/cast/v16.rs":"2a584eeb57fd47baad6f3533764301b04aaaac23702b7a8db12598ac02899262","src/api/cast/v256.rs":"b91c15ed8d1536ecd97b4eb79ff9d5aba0552cd9b6f0ea6435b05f2273e23b3a","src/api/cast/v32.rs":"62ec89fcce7fa7f28497ee5770adc8f81d2d3a6b2925b02f7dc06504c40e8f38","src/api/cast/v512.rs":"d855cb943ae7106e9599ef38e30a3afb1c6bd5433178baca54cb128fd9a7d143","src/api/cast/v64.rs":"fe0f7dfaf4fc0c0c1a78c96fcfcdfdc2a1e2845843b11aa797a0c6fb52a8f774","src/api/cmp.rs":"357c3a2a09c6d4611c32dd7fa95be2fae933d513e229026ec9b44451a77b884e","src/api/cmp/eq.rs":"60f70f355bae4cb5b17db53204cacc3890f70670611c17df638d4c04f7cc8075","src/api/cmp/ord.rs":"589f7234761c294fa5df8f525bc4acd5a47cdb602207d524a0d4e19804cd9695","src/api/cmp/partial_eq.rs":"3ed23d2a930b0f9750c3a5309da766b03dc4f9c4d375b42ad3c50fe732693d15","src/api/cmp/partial_ord.rs":"e16b11805c94048acd058c93994b5bc74bb187f8d7e3b86a87df60e1601467f9","src/api/cmp/vertical.rs":"de3d62f38eba817299aa16f1e1939954c9a447e316509397465c2830852ba053","src/api/default.rs":"b61f92fc0e33a2633b3375eb405beba480da071cde03df4d437d8a6058afcd97","src/api/fmt.rs":"67fb804bb86b6cd77cf8cd492b5733ce437071b66fe3297278b8a6552c325dda","src/api/fmt/binary.rs":"35cb5c266197d6224d598fb3d286e5fe48ef0c01ed356c2ff6fe9ba946f96a92","src/api/fmt/debug.rs":"aa18eea443bf353fea3db8b1a025132bbcaf91e747ecfa43b8d9fce9af395a0c","src/api/fmt/lower_hex.rs":"69d5be366631af309f214e8031c8c20267fcc27a695eac6f45c6bc1df72a67e6","src/api/fmt/octal.rs":"9eb11ba3d990213f3c7f1ec25edba7ce997cb1320e16d308c83498ba6b9bfbd9","src/api/fmt/upper_hex.rs":"a4637d085b7bb20e759ce58e08435b510a563ba3dd468af2b03560fdc5511562","src/api/from.rs":"2e599d8329cb05eaf06224cc441355c4b7b51254fc19256619333be8c149d444","src/api/from/from_array.rs":"4151593c7bba7455821fffa5b59867005a77c95d32f1f0cc3fd87294000157d9","src/api/from/from_vector.rs":"9764371aa9e6005aace74dea14f59e5611a095b7cf42707940924749282c52f0","src/api/hash.rs":"562cfa3f1d8eb9a733c035a3665a599c2f1e341ee820d8fbdd102a4398a441bc","src/api/into_bits.rs":"82297f0697d67b5a015e904e7e6e7b2a7066ba825bc54b94b4ff3e22d7a1eefb","src/api/into_bits/arch_specific.rs":"1f925390b0ce7132587d95f2419c6e2ad3e1a9d17eb1d9c120a1c1c4bdf4277e","src/api/into_bits/macros.rs":"d762406de25aedff88d460dec7a80dc8e825a2a419d53218ce007efa6a1d3e04","src/api/into_bits/v128.rs":"ecdc5893664c71d7ab1ff3697c3fbe490d20d8748b9b76881d05e7625e40d74c","src/api/into_bits/v16.rs":"5459ec7dad1ad7bd30dc7e48374580b993abf23701d9c3cb22203fa0a9aabb6d","src/api/into_bits/v256.rs":"90ea351da0380ead1bf0f63b620afd40d01d638d09f7e7be31840bd2c1d9c663","src/api/into_bits/v32.rs":"ee1dc5a430050e16f51154b5fe85b1536f5feddf2ea23dd1d3859b67c4afc6fc","src/api/into_bits/v512.rs":"f72098ed1c9a23944f3d01abaf5e0f2d0e81d35a06fdadd2183e896d41b59867","src/api/into_bits/v64.rs":"6394462facdfe7827349c742b7801f1291e75a720dfb8c0b52100df46f371c98","src/api/math.rs":"8b2a2fc651917a850539f993aa0b9e5bf4da67b11685285b8de8cdca311719ec","src/api/math/float.rs":"61d2794d68262a1090ae473bd30793b5f65cf732f32a6694a3af2ce5d9225616","src/api/math/float/abs.rs":"5b6b2701e2e11135b7ce58a05052ea8120e10e4702c95d046b9d21b827b26bf8","src/api/math/float/consts.rs":"78acba000d3fa527111300b6327c1932de9c4c1e02d4174e1a5615c01463d38c","src/api/math/float/cos.rs":"4c2dd7173728ef189314f1576c9486e03be21b7da98843b2f9011282a7979e31","src/api/math/float/exp.rs":"7c6d5f1e304f498a01cfa23b92380c815d7da0ad94eae3483783bc377d287eef","src/api/math/float/ln.rs":"54c7583f3df793b39ff57534fade27b41bb992439e5dc178252f5ca3190a3e54","src/api/math/float/mul_add.rs":"62cac77660d20159276d4c9ef066eb90c81cbddb808e8e157182c607625ad2eb","src/api/math/float/mul_adde.rs":"bae056ee9f3a70df39ec3c3b2f6437c65303888a7b843ef1a5bcf1f5aca0e602","src/api/math/float/powf.rs":"9ddb938984b36d39d82a82f862f80df8f7fb013f1d222d45698d41d88472f568","src/api/math/float/recpre.rs":"589225794ff1dbf31158dff660e6d4509ecc8befbb57c633900dea5ac0b840d6","src/api/math/float/rsqrte.rs":"a32abdcc318d7ccc8448231f54d75b884b7cbeb03a7d595713ab6243036f4dbf","src/api/math/float/sin.rs":"cbd3622b7df74f19691743001c8cf747a201f8977ad90542fee915f37dcd1e49","src/api/math/float/sqrt.rs":"0c66d5d63fb08e4d99c6b82a8828e41173aff1ac9fa1a2764a11fac217ccf2ac","src/api/math/float/sqrte.rs":"731e1c9f321b662accdd27dacb3aac2e8043b7aecb2f2161dde733bd9f025362","src/api/minimal.rs":"1f22bcc528555444e76de569ec0ae2029b9ae9d04805efeafa93369c8098036b","src/api/minimal/iuf.rs":"c501a6696950cf5e521765f178de548af64fdfb6e10d026616d09fab93ca2d17","src/api/minimal/mask.rs":"42e415f536c5193d0218f5a754b34b87fd7c971bff068009f958712166ff056d","src/api/minimal/ptr.rs":"a9ee482d1dd1c956fb8f3f179e6e620b1de4e9d713961461d4c6923a4ef2e67c","src/api/ops.rs":"3e273b277a0f3019d42c3c59ca94a5afd4885d5ae6d2182e5089bbeec9de42ee","src/api/ops/scalar_arithmetic.rs":"d2d5ad897a59dd0787544f927e0e7ca4072c3e58b0f4a2324083312b0d5a21d7","src/api/ops/scalar_bitwise.rs":"482204e459ca6be79568e1c9f70adbe2d2151412ddf122fb2161be8ebb51c40c","src/api/ops/scalar_mask_bitwise.rs":"c250f52042e37b22d57256c80d4604104cfd2fbe2a2e127c676267270ca5d350","src/api/ops/scalar_shifts.rs":"987f8fdebeedc16e3d77c1b732e7826ef70633c541d16dfa290845d5c6289150","src/api/ops/vector_arithmetic.rs":"ddca15d09ddeef502c2ed66117a62300ca65d87e959e8b622d767bdf1c307910","src/api/ops/vector_bitwise.rs":"b3968f7005b649edcc22a54e2379b14d5ee19045f2e784029805781ae043b5ee","src/api/ops/vector_float_min_max.rs":"f5155dce75219f4ba11275b1f295d2fdcddd49d174a6f1fb2ace7ea42813ce41","src/api/ops/vector_int_min_max.rs":"a378789c6ff9b32a51fbd0a97ffd36ed102cd1fe6a067d2b02017c1df342def6","src/api/ops/vector_mask_bitwise.rs":"5052d18517d765415d40327e6e8e55a312daaca0a5e2aec959bfa54b1675f9c8","src/api/ops/vector_neg.rs":"5c62f6b0221983cdbd23cd0a3af3672e6ba1255f0dfe8b19aae6fbd6503e231b","src/api/ops/vector_rotates.rs":"03cbe8a400fd7c688e4ee771a990a6754f2031b1a59b19ae81158b21471167e5","src/api/ops/vector_shifts.rs":"9bf69d0087268f61009e39aea52e03a90f378910206b6a28e8393178b6a5d0e0","src/api/ptr.rs":"8a793251bed6130dcfb2f1519ceaa18b751bbb15875928d0fb6deb5a5e07523a","src/api/ptr/gather_scatter.rs":"9ddd960365e050674b25b2fd3116e24d94669b4375d74e71c03e3f1469576066","src/api/reductions.rs":"ae5baca81352ecd44526d6c30c0a1feeda475ec73ddd3c3ec6b14e944e5448ee","src/api/reductions/bitwise.rs":"8bf910ae226188bd15fc7e125f058cd2566b6186fcd0cd8fd020f352c39ce139","src/api/reductions/float_arithmetic.rs":"e58c8c87806a95df2b2b5b48ac5991036df024096d9d7c171a480fe9282896a4","src/api/reductions/integer_arithmetic.rs":"47471da1c5f859489680bb5d34ced3d3aa20081c16053a3af121a4496fcb57bf","src/api/reductions/mask.rs":"db83327a950e33a317f37fd33ca4e20c347fb415975ec024f3e23da8509425af","src/api/reductions/min_max.rs":"f27be3aa28e1c1f46de7890198db6e12f00c207085e89ef2de7e57ee443cdb98","src/api/select.rs":"a98e2ccf9fc6bdeed32d337c8675bc96c2fbe2cc34fbf149ad6047fb8e749774","src/api/shuffle.rs":"da58200790868c09659819322a489929a5b6e56c596ed07e6a44293ea02e7d09","src/api/shuffle1_dyn.rs":"bfea5a91905b31444e9ef7ca6eddb7a9606b7e22d3f71bb842eb2795a0346620","src/api/slice.rs":"ee87484e8af329547b9a5d4f2a69e8bed6ea10bbd96270d706083843d4eea2ac","src/api/slice/from_slice.rs":"4d4fe8a329c885fcb4fbcbedf99efb15a95296fe6b3f595056cc37037450d5ac","src/api/slice/write_to_slice.rs":"f5b23b2c4b91cfb26b713a9013a6c0da7f45eaefb79ba06dcbc27f3f23bda679","src/api/swap_bytes.rs":"4a6792a2e49a77475e1b237592b4b2804dbddb79c474331acd0dd71b36934259","src/codegen.rs":"c6eebc3d3665420aa6a2f317977e3c41a4f43e0550ac630cdbe8e4bbed5e2031","src/codegen/bit_manip.rs":"5559e095105a80003e0de35af1d19b0c65c9ab04eb743c7e01c5442d882eb34e","src/codegen/llvm.rs":"d1299c189abb17a6133f047574cffc7a6db4c1be37cb7d4785491cb5e8f8cf54","src/codegen/math.rs":"35f96e37a78fcf0cdb02146b7f27a45108fe06a37fc2a54d8851ce131a326178","src/codegen/math/float.rs":"dd86c0449e576c83b719700962ac017c332987fac08d91f2b7a2b1b883598170","src/codegen/math/float/abs.rs":"f56e2b4b8055ea861c1f5cbc6b6e1d8e7e5af163b62c13574ddee4e09513bfbc","src/codegen/math/float/cos.rs":"ef3b511a24d23045b310315e80348a9b7fedb576fc2de52d74290616a0abeb2a","src/codegen/math/float/cos_pi.rs":"4e7631a5d73dac21531e09ef1802d1180f8997509c2c8fa9f67f322194263a97","src/codegen/math/float/exp.rs":"61b691598c41b5622f24e4320c1bdd08701e612a516438bdddcc728fc3405c8c","src/codegen/math/float/ln.rs":"46b718b1ba8c9d99e1ad40f53d20dfde08a3063ca7bd2a9fdd6698e060da687e","src/codegen/math/float/macros.rs":"dd42135fff13f9aca4fd3a1a4e14c7e6c31aadc6d817d63b0d2fb9e62e062744","src/codegen/math/float/mul_add.rs":"a37bf764345d4b1714f97e83897b7cf0855fc2811704bcbc0012db91825339e1","src/codegen/math/float/mul_adde.rs":"c75702bfcb361de45964a93caf959a695ef2376bd069227600b8c6872665c755","src/codegen/math/float/powf.rs":"642346e982bc4c39203de0864d2149c4179cd7b21cf67a2951687932b4675872","src/codegen/math/float/sin.rs":"9d68164c90cdca6a85155040cdac42e27342ebe0b925273ef1593df721af4258","src/codegen/math/float/sin_cos_pi.rs":"9be02ad48585a1e8d99129382fbffbaed47852f15459256a708850b6b7a75405","src/codegen/math/float/sin_pi.rs":"9890347905b4d4a3c7341c3eb06406e46e60582bcf6960688bd727e5dadc6c57","src/codegen/math/float/sqrt.rs":"e3c60dcfb0c6d2fc62adabcc931b2d4040b83cab294dea36443fb4b89eb79e34","src/codegen/math/float/sqrte.rs":"f0f4ef9eb475ae41bcc7ec6a95ad744ba6b36925faa8b2c2814004396d196b63","src/codegen/pointer_sized_int.rs":"a70697169c28218b56fd2e8d5353f2e00671d1150d0c8cef77d613bdfacd84cb","src/codegen/reductions.rs":"645e2514746d01387ddd07f0aa4ffd8430cc9ab428d4fb13773ea319fa25dd95","src/codegen/reductions/mask.rs":"8f1afe6aabf096a3278e1fc3a30f736e04aa8b9ce96373cee22162d18cfe2702","src/codegen/reductions/mask/aarch64.rs":"cba6e17603d39795dcfe8339b6b7d8714c3e162a1f0a635979f037aa24fe4206","src/codegen/reductions/mask/arm.rs":"9447904818aa2c7c25d0963eead452a639a11ca7dbd6d21eedbfcaade07a0f33","src/codegen/reductions/mask/fallback.rs":"7a0ef9f7fd03ae318b495b95e121350cd61caffc5cc6ee17fabf130d5d933453","src/codegen/reductions/mask/fallback_impl.rs":"76547f396e55ef403327c77c314cf8db8c7a5c9b9819bfb925abeacf130249e5","src/codegen/reductions/mask/x86.rs":"14bd2c482071f2355beebcf7b7ecf950ff2dfcdb08c3ca50993092434a9de717","src/codegen/reductions/mask/x86/avx.rs":"b4913d87844c522903641cbbf10db4551addb1ce5e9e78278e21612fa65c733b","src/codegen/reductions/mask/x86/avx2.rs":"677aed3f056285285daa3adff8bc65e739630b4424defa6d9665e160f027507e","src/codegen/reductions/mask/x86/sse.rs":"226610b4ff88c676d5187114dd57b4a8800de6ce40884675e9198445b1ed0306","src/codegen/reductions/mask/x86/sse2.rs":"bc38e6c31cb4b3d62147eba6cac264e519e2a48e0f7ce9010cfa9ef0cf0ec9fd","src/codegen/shuffle.rs":"0abca97e92cdce49a58a39cc447eb09dc7d7715ef256c8dbd2181a186e61bb64","src/codegen/shuffle1_dyn.rs":"04523e9338133bdedb012dd076c2c564b79ce5593b0fc56d0fb6910e04190a81","src/codegen/swap_bytes.rs":"1d6cdc716eadddc92b4fd506b2445a821caa8dc00860447de09d7ebd69c2087f","src/codegen/v128.rs":"94226b31ec403d18d9d2fe06713f147c9c79e9b5f9105089088266313f843185","src/codegen/v16.rs":"ddec4ffb66b6f7aaffb9a1780c5ddba82557abd74f45073d335047e04cf74924","src/codegen/v256.rs":"6b63917f0444118d6b1595bff2045e59b97c4d24012bd575f69f1f0efc5a0241","src/codegen/v32.rs":"3477b3c5540aed86e61e2f5807dd31db947413cec9181c587d93ed6ec74f0eba","src/codegen/v512.rs":"5854f99d3aabc4cd42b28a20d9ce447756dc2ba024a409a69b6a8ae1f1842fc5","src/codegen/v64.rs":"e9e89caebfe63d10c0cbca61e4dfdba3b7e02ee0989170f80beed23237ddd950","src/codegen/vPtr.rs":"96d609a9eece4dcbbcc01ba0b8744d7f5958be12774176a2945bc676f4e6b5cb","src/codegen/vSize.rs":"eeee9858749aa82142b27bc120d1989bb74a6b82e1e4efbbeaccc9634dc9acfc","src/lib.rs":"1b5d419ff05ee0370d671810423ccc254708cc8d415c1dbac2a7a36be4bf63a8","src/masks.rs":"870f429967b2d7d5133f4d28d6c753fc5cef0570b27b29d4e966a066d22d2d0e","src/sealed.rs":"ff7f0324276408ae8249941cfa32c90b8835a54d750896b683efea857af19db2","src/testing.rs":"1d3a7862ef625e235a5734ad7204e68d350f902c0695182b1f08a0552432416e","src/testing/macros.rs":"6378856d7a40ba5ec5c7c0dad6327d79f0c77266921c24296d10aed6c68e9b98","src/testing/utils.rs":"d6fd5a5017f1f85d9d99585754f8f6ad06fc3d683b34083543e67a7cc6c1772c","src/v128.rs":"18fe263c4aa28cd06461c7070b0269f69f4a2e75749b8f142a83dfdfe4d22bf5","src/v16.rs":"e5c663c9fb3547eaeac78a5f7db9969f4d8b5ec96112bf2954602fff11f0aebd","src/v256.rs":"68732cd688ad12a56d8b4f8ddf279f77bdfe1be2943c7dc0c1b4f1a76798aa0f","src/v32.rs":"785b22a1ccb4a41bb53dfeb0670f624c0ce42e6cdf62d1747e3283777a1c70bd","src/v512.rs":"d1337bfe07f06a8f37f8e8fa7d4315b9307476ee435ad80dd5269eaed564fbfa","src/v64.rs":"3077468d65125b8f085e9454c8b2463a4d5225697464ba6a1300f8799528fd4b","src/vPtr.rs":"c9a53f41f466e17b6648a4ce390fd8f4d3a848d440eb8a9a803a11608d76eb05","src/vSize.rs":"5c46d3e8c3ee5863d9b6e37e681f871386e0efc254d6d84ba711edb529ce7b3c","tests/endianness.rs":"541a144be017e3dd7da7c8ea49d907dc02538245e8c5f3deb5bd43da92c929e1"},"package":null}
+\ No newline at end of file
++{"files":{".appveyor.yml":"f1ed01850e0d725f9498f52a1a63ddf40702ad6e0bf5b2d7c4c04d76e96794a3",".travis.yml":"e9258d9a54fdaf4cbc12405fe5993ac4497eb2b29021691dbc91b19cb9b52227","Cargo.toml":"089941ba3c89ea111cbea3cc3abdcdcf2b9d0ae0db268d7269ee38226db950e5","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","bors.toml":"dee881dc69b9b7834e4eba5d95c3ed5a416d4628815a167d6a22d4cb4fb064b8","build.rs":"f3baefc5e5bb9b250e762a1466371b922fd7ee4243c217b2d014307603c2f57a","ci/all.sh":"a23d14e10cb26a0eb719e389c30eb955fa53cddcd436890646df09af640bd2eb","ci/android-install-ndk.sh":"0f1746108cc30bf9b9ba45bcde7b19fc1a8bdf5b0258035b4eb8dc69b75efac4","ci/android-install-sdk.sh":"3490432022c5c8f5a115c084f7a9aca1626f96c0c87ffb62019228c4346b47e4","ci/android-sysimage.sh":"ebf4e5daa1f0fe1b2092b79f0f3f161c4c4275cb744e52352c4d81ab451e4c5a","ci/benchmark.sh":"b61d19ef6b90deba8fb79dee74c8b062d94844676293da346da87bb78a9a49a4","ci/deploy_and_run_on_ios_simulator.rs":"ec8ecf82d92072676aa47f0d1a3d021b60a7ae3531153ef12d2ff4541fc294dc","ci/docker/aarch64-linux-android/Dockerfile":"ace2e7d33c87bc0f6d3962a4a3408c04557646f7f51ab99cfbf574906796b016","ci/docker/aarch64-unknown-linux-gnu/Dockerfile":"1ecdac757101d951794fb2ab0deaa278199cf25f2e08a15c7d40ff31a8556184","ci/docker/arm-linux-androideabi/Dockerfile":"370e55d3330a413a3ccf677b3afb3e0ef9018a5fab263faa97ae8ac017fc2286","ci/docker/arm-unknown-linux-gnueabi/Dockerfile":"e25d88f6c0c94aada3d2e3f08243f755feb7e869dc5dc505b3799719cb1af591","ci/docker/arm-unknown-linux-gnueabihf/Dockerfile":"f126f4c7bae8c11ab8b16df06ad997863f0838825a9c08c9899a3eedb6d570bd","ci/docker/armv7-unknown-linux-gnueabihf/Dockerfile":"b647545c158ee480a4c581dbdc1f57833aef056c8d498acc04b573e842bf803c","ci/docker/i586-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/i686-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/mips-unknown-linux-gnu/Dockerfile":"323776469bb7b160385f3621d66e3ee14c75242f8180f916e65af048a29d4ea0","ci/docker/mips64-unknown-linux-gnuabi64/Dockerfile":"c647f6948a9a43b0be695cbed4eac752120d0faf28e5e69c718cb10406921dab","ci/docker/mips64el-unknown-linux-gnuabi64/Dockerfile":"77bfd00cc8639509be381b394f077e39b45a00158ad61b4e1656714c714665d1","ci/docker/mipsel-unknown-linux-musl/Dockerfile":"ec5bea6c98a3b626731fdb95f9ff2d1182639c76e8fb16d3271d0fc884901524","ci/docker/powerpc-unknown-linux-gnu/Dockerfile":"4f2b662de66e83d1354f650b7077692309637f786c2ea5516c31b5c2ee10af2d","ci/docker/powerpc64-unknown-linux-gnu/Dockerfile":"a9595402b772bc365982e22a0096a8988825d90b09b5faa97ab192e76072f71d","ci/docker/powerpc64le-unknown-linux-gnu/Dockerfile":"df3c381c157439695ae8cd10ab71664702c061e3b4ab22906a5ad6c2680acfed","ci/docker/s390x-unknown-linux-gnu/Dockerfile":"93fb44df3d7fd31ead158570667c97b5076a05c3d968af4a84bc13819a8f2db8","ci/docker/sparc64-unknown-linux-gnu/Dockerfile":"da1c39a3ff1fe22e41395fa7c8934e90b4c1788e551b9aec6e38bfd94effc437","ci/docker/thumbv7neon-linux-androideabi/Dockerfile":"c2decd5591bd7a09378901bef629cd944acf052eb55e4f35b79eb9cb4d62246a","ci/docker/thumbv7neon-unknown-linux-gnueabihf/Dockerfile":"75c0c56161c7382b439de74c00de1c0e3dc9d59560cd6720976a751034b78714","ci/docker/wasm32-unknown-unknown/Dockerfile":"3e5f294bc1e004aa599086c2af49d6f3e7459fa250f5fbdd60cf67d53db78758","ci/docker/x86_64-linux-android/Dockerfile":"685040273cf350d5509e580ac451555efa19790c8723ca2af066adadc6880ad2","ci/docker/x86_64-unknown-linux-gnu-emulated/Dockerfile":"44b6203d9290bfdc53d81219f0937e1110847a23dd982ec8c4de388354f01536","ci/docker/x86_64-unknown-linux-gnu/Dockerfile":"d253c86803b22da428fa9cc671a05f18d3318eca7733b8dccb4f7be1ddf524c5","ci/dox.sh":"5b61711be47a4e3dde0ddd15ba73d256ea95fd75af3897732c24db1dc7e66366","ci/linux-s390x.sh":"d6b732d7795b4ba131326aff893bca6228a7d2eb0e9402f135705413dbbe0dce","ci/linux-sparc64.sh":"c92966838b1ab7ad3b7a344833ee726aba6b647cf5952e56f0ad1ba420b13325","ci/lld-shim.rs":"3d7f71ec23a49e2b67f694a0168786f9a954dda15f5a138815d966643fd3fcc3","ci/max_line_width.sh":"0a1518bba4c9ecaa55694cb2e9930d0e19c265baabf73143f17f9cf285aaa5bb","ci/run-docker.sh":"92e036390ad9b0d16f109579df1b5ced2e72e9afea40c7d011400ebd3a2a90de","ci/run.sh":"63259e22a96ba539f53c06b1b39f53e3a78a71171652e7afc170836110ccd913","ci/run_examples.sh":"d1a23c6c35374a0678ba5114b9b8fefd8be0a79e774872a8bf0898d1baca18d0","ci/runtest-android.rs":"145a8e9799a5223975061fe7e586ade5669ee4877a7d7a4cf6b4ab48e8e36c7c","ci/setup_benchmarks.sh":"73fb981a8fdb1dcd54409d3c0fbbfb8f77a3ceabf8626a6b9bf9d21d6bc8ce72","ci/test-runner-linux":"c8aa6025cff5306f4f31d0c61dc5f9d4dd5a1d189ab613ef8d4c367c694d9ccd","contributing.md":"2cc8c9c560ae17867e69b06d09b758dbf7bc39eb774ada50a743724b10acc0a2","perf-guide/book.toml":"115a98284126c6b180178b44713314cc494f08a71662ee2ce15cf67f17a51064","perf-guide/src/SUMMARY.md":"3e03bffc991fdc2050f3d51842d72d9d21ea6abab56a3baf3b2d5973a78b89e1","perf-guide/src/ascii.css":"29afb08833b2fe2250f0412e1fa1161a2432a0820a14953c87124407417c741a","perf-guide/src/bound_checks.md":"5e4991ff58a183ef0cd9fdc1feb4cd12d083b44bdf87393bbb0927808ef3ce7d","perf-guide/src/float-math/approx.md":"8c09032fa2d795a0c5db1775826c850d28eb2627846d0965c60ee72de63735ad","perf-guide/src/float-math/fma.md":"311076ba4b741d604a82e74b83a8d7e8c318fcbd7f64c4392d1cf5af95c60243","perf-guide/src/float-math/fp.md":"04153e775ab6e4f0d7837bcc515230d327b04edfa34c84ce9c9e10ebaeef2be8","perf-guide/src/float-math/svml.md":"0798873b8eedaeda5fed62dc91645b57c20775a02d3cd74d8bd06958f1516506","perf-guide/src/introduction.md":"9f5a19e9e6751f25d2daad39891a0cc600974527ec4c8305843f9618910671bd","perf-guide/src/prof/linux.md":"447731eb5de7d69166728fdbc5ecb0c0c9db678ea493b45a592d67dd002184c0","perf-guide/src/prof/mca.md":"f56d54f3d20e7aa4d32052186e8237b03d65971eb5d112802b442570ff11d344","perf-guide/src/prof/profiling.md":"8a650c0fd6ede0964789bb6577557eeef1d8226a896788602ce61528e260e43c","perf-guide/src/target-feature/attribute.md":"615f88dca0a707b6c416fa605435dd6e1fb5361cc639429cbf68cd87624bd78b","perf-guide/src/target-feature/features.md":"17077760ff24c006b606dd21889c53d87228f4311f3ba3a574f9afdeacd86165","perf-guide/src/target-feature/inlining.md":"7ed1d7068d8173a00d84c16cfe5871cd68b9f04f8d0cca2d01ebc84957ebf2f6","perf-guide/src/target-feature/practice.md":"c4b371842e0086df178488fec97f20def8f0c62ee588bcd25fd948b9b1fa227e","perf-guide/src/target-feature/runtime.md":"835425f5ee597fb3e51d36e725a81ebee29f4561231d19563cd4da81dbb1cfcb","perf-guide/src/target-feature/rustflags.md":"ab49712e9293a65d74d540ba4784fcb57ff1119ec05a575d895c071f1a620f64","perf-guide/src/vert-hor-ops.md":"c6211c0ee91e60552ec592d89d9d957eedc21dee3cbd89e1ad6765ea06a27471","readme.md":"585a8f0e16877fb9abb00cd17a175fcb9d7857840c6c61209f1827ffab095070","rustfmt.toml":"de6101d0670bad65fb3b337d56957d2a024e017e5ab146ec784d77312daaf8ff","src/api.rs":"331a3a4abb19cee2df5f2df4ad7c3e88b45e62cf23fdacfc9bbaa633dc5cf788","src/api/bit_manip.rs":"e68290ee679cc5abc9c73afbe635c1035f8cbfe849e5c751a1680e459244c39e","src/api/cast.rs":"03b94a3d316ac7b7be7068810044911e965e889a0ace7bae762749ca74a92747","src/api/cast/macros.rs":"b0a14d0c83ad2ebb7a275180f6d9e3f2bc312ba57a7d3d6c39fad4e0f20f9408","src/api/cast/v128.rs":"63e28c6a3edf1a7a635f51b8d3c6adbb1d46f884d92a196b3d4a6e743d809416","src/api/cast/v16.rs":"2a584eeb57fd47baad6f3533764301b04aaaac23702b7a8db12598ac02899262","src/api/cast/v256.rs":"b91c15ed8d1536ecd97b4eb79ff9d5aba0552cd9b6f0ea6435b05f2273e23b3a","src/api/cast/v32.rs":"62ec89fcce7fa7f28497ee5770adc8f81d2d3a6b2925b02f7dc06504c40e8f38","src/api/cast/v512.rs":"d855cb943ae7106e9599ef38e30a3afb1c6bd5433178baca54cb128fd9a7d143","src/api/cast/v64.rs":"fe0f7dfaf4fc0c0c1a78c96fcfcdfdc2a1e2845843b11aa797a0c6fb52a8f774","src/api/cmp.rs":"357c3a2a09c6d4611c32dd7fa95be2fae933d513e229026ec9b44451a77b884e","src/api/cmp/eq.rs":"60f70f355bae4cb5b17db53204cacc3890f70670611c17df638d4c04f7cc8075","src/api/cmp/ord.rs":"589f7234761c294fa5df8f525bc4acd5a47cdb602207d524a0d4e19804cd9695","src/api/cmp/partial_eq.rs":"3ed23d2a930b0f9750c3a5309da766b03dc4f9c4d375b42ad3c50fe732693d15","src/api/cmp/partial_ord.rs":"e16b11805c94048acd058c93994b5bc74bb187f8d7e3b86a87df60e1601467f9","src/api/cmp/vertical.rs":"de3d62f38eba817299aa16f1e1939954c9a447e316509397465c2830852ba053","src/api/default.rs":"b61f92fc0e33a2633b3375eb405beba480da071cde03df4d437d8a6058afcd97","src/api/fmt.rs":"67fb804bb86b6cd77cf8cd492b5733ce437071b66fe3297278b8a6552c325dda","src/api/fmt/binary.rs":"35cb5c266197d6224d598fb3d286e5fe48ef0c01ed356c2ff6fe9ba946f96a92","src/api/fmt/debug.rs":"aa18eea443bf353fea3db8b1a025132bbcaf91e747ecfa43b8d9fce9af395a0c","src/api/fmt/lower_hex.rs":"69d5be366631af309f214e8031c8c20267fcc27a695eac6f45c6bc1df72a67e6","src/api/fmt/octal.rs":"9eb11ba3d990213f3c7f1ec25edba7ce997cb1320e16d308c83498ba6b9bfbd9","src/api/fmt/upper_hex.rs":"a4637d085b7bb20e759ce58e08435b510a563ba3dd468af2b03560fdc5511562","src/api/from.rs":"2e599d8329cb05eaf06224cc441355c4b7b51254fc19256619333be8c149d444","src/api/from/from_array.rs":"4151593c7bba7455821fffa5b59867005a77c95d32f1f0cc3fd87294000157d9","src/api/from/from_vector.rs":"9764371aa9e6005aace74dea14f59e5611a095b7cf42707940924749282c52f0","src/api/hash.rs":"562cfa3f1d8eb9a733c035a3665a599c2f1e341ee820d8fbdd102a4398a441bc","src/api/into_bits.rs":"82297f0697d67b5a015e904e7e6e7b2a7066ba825bc54b94b4ff3e22d7a1eefb","src/api/into_bits/arch_specific.rs":"1f925390b0ce7132587d95f2419c6e2ad3e1a9d17eb1d9c120a1c1c4bdf4277e","src/api/into_bits/macros.rs":"d762406de25aedff88d460dec7a80dc8e825a2a419d53218ce007efa6a1d3e04","src/api/into_bits/v128.rs":"ecdc5893664c71d7ab1ff3697c3fbe490d20d8748b9b76881d05e7625e40d74c","src/api/into_bits/v16.rs":"5459ec7dad1ad7bd30dc7e48374580b993abf23701d9c3cb22203fa0a9aabb6d","src/api/into_bits/v256.rs":"90ea351da0380ead1bf0f63b620afd40d01d638d09f7e7be31840bd2c1d9c663","src/api/into_bits/v32.rs":"ee1dc5a430050e16f51154b5fe85b1536f5feddf2ea23dd1d3859b67c4afc6fc","src/api/into_bits/v512.rs":"f72098ed1c9a23944f3d01abaf5e0f2d0e81d35a06fdadd2183e896d41b59867","src/api/into_bits/v64.rs":"6394462facdfe7827349c742b7801f1291e75a720dfb8c0b52100df46f371c98","src/api/math.rs":"8b2a2fc651917a850539f993aa0b9e5bf4da67b11685285b8de8cdca311719ec","src/api/math/float.rs":"61d2794d68262a1090ae473bd30793b5f65cf732f32a6694a3af2ce5d9225616","src/api/math/float/abs.rs":"5b6b2701e2e11135b7ce58a05052ea8120e10e4702c95d046b9d21b827b26bf8","src/api/math/float/consts.rs":"78acba000d3fa527111300b6327c1932de9c4c1e02d4174e1a5615c01463d38c","src/api/math/float/cos.rs":"4c2dd7173728ef189314f1576c9486e03be21b7da98843b2f9011282a7979e31","src/api/math/float/exp.rs":"7c6d5f1e304f498a01cfa23b92380c815d7da0ad94eae3483783bc377d287eef","src/api/math/float/ln.rs":"54c7583f3df793b39ff57534fade27b41bb992439e5dc178252f5ca3190a3e54","src/api/math/float/mul_add.rs":"62cac77660d20159276d4c9ef066eb90c81cbddb808e8e157182c607625ad2eb","src/api/math/float/mul_adde.rs":"bae056ee9f3a70df39ec3c3b2f6437c65303888a7b843ef1a5bcf1f5aca0e602","src/api/math/float/powf.rs":"9ddb938984b36d39d82a82f862f80df8f7fb013f1d222d45698d41d88472f568","src/api/math/float/recpre.rs":"589225794ff1dbf31158dff660e6d4509ecc8befbb57c633900dea5ac0b840d6","src/api/math/float/rsqrte.rs":"a32abdcc318d7ccc8448231f54d75b884b7cbeb03a7d595713ab6243036f4dbf","src/api/math/float/sin.rs":"cbd3622b7df74f19691743001c8cf747a201f8977ad90542fee915f37dcd1e49","src/api/math/float/sqrt.rs":"0c66d5d63fb08e4d99c6b82a8828e41173aff1ac9fa1a2764a11fac217ccf2ac","src/api/math/float/sqrte.rs":"731e1c9f321b662accdd27dacb3aac2e8043b7aecb2f2161dde733bd9f025362","src/api/minimal.rs":"1f22bcc528555444e76de569ec0ae2029b9ae9d04805efeafa93369c8098036b","src/api/minimal/iuf.rs":"c501a6696950cf5e521765f178de548af64fdfb6e10d026616d09fab93ca2d17","src/api/minimal/mask.rs":"42e415f536c5193d0218f5a754b34b87fd7c971bff068009f958712166ff056d","src/api/minimal/ptr.rs":"a9ee482d1dd1c956fb8f3f179e6e620b1de4e9d713961461d4c6923a4ef2e67c","src/api/ops.rs":"3e273b277a0f3019d42c3c59ca94a5afd4885d5ae6d2182e5089bbeec9de42ee","src/api/ops/scalar_arithmetic.rs":"d2d5ad897a59dd0787544f927e0e7ca4072c3e58b0f4a2324083312b0d5a21d7","src/api/ops/scalar_bitwise.rs":"482204e459ca6be79568e1c9f70adbe2d2151412ddf122fb2161be8ebb51c40c","src/api/ops/scalar_mask_bitwise.rs":"c250f52042e37b22d57256c80d4604104cfd2fbe2a2e127c676267270ca5d350","src/api/ops/scalar_shifts.rs":"987f8fdebeedc16e3d77c1b732e7826ef70633c541d16dfa290845d5c6289150","src/api/ops/vector_arithmetic.rs":"ddca15d09ddeef502c2ed66117a62300ca65d87e959e8b622d767bdf1c307910","src/api/ops/vector_bitwise.rs":"b3968f7005b649edcc22a54e2379b14d5ee19045f2e784029805781ae043b5ee","src/api/ops/vector_float_min_max.rs":"f5155dce75219f4ba11275b1f295d2fdcddd49d174a6f1fb2ace7ea42813ce41","src/api/ops/vector_int_min_max.rs":"a378789c6ff9b32a51fbd0a97ffd36ed102cd1fe6a067d2b02017c1df342def6","src/api/ops/vector_mask_bitwise.rs":"5052d18517d765415d40327e6e8e55a312daaca0a5e2aec959bfa54b1675f9c8","src/api/ops/vector_neg.rs":"5c62f6b0221983cdbd23cd0a3af3672e6ba1255f0dfe8b19aae6fbd6503e231b","src/api/ops/vector_rotates.rs":"03cbe8a400fd7c688e4ee771a990a6754f2031b1a59b19ae81158b21471167e5","src/api/ops/vector_shifts.rs":"9bf69d0087268f61009e39aea52e03a90f378910206b6a28e8393178b6a5d0e0","src/api/ptr.rs":"8a793251bed6130dcfb2f1519ceaa18b751bbb15875928d0fb6deb5a5e07523a","src/api/ptr/gather_scatter.rs":"9ddd960365e050674b25b2fd3116e24d94669b4375d74e71c03e3f1469576066","src/api/reductions.rs":"ae5baca81352ecd44526d6c30c0a1feeda475ec73ddd3c3ec6b14e944e5448ee","src/api/reductions/bitwise.rs":"8bf910ae226188bd15fc7e125f058cd2566b6186fcd0cd8fd020f352c39ce139","src/api/reductions/float_arithmetic.rs":"e58c8c87806a95df2b2b5b48ac5991036df024096d9d7c171a480fe9282896a4","src/api/reductions/integer_arithmetic.rs":"47471da1c5f859489680bb5d34ced3d3aa20081c16053a3af121a4496fcb57bf","src/api/reductions/mask.rs":"db83327a950e33a317f37fd33ca4e20c347fb415975ec024f3e23da8509425af","src/api/reductions/min_max.rs":"f27be3aa28e1c1f46de7890198db6e12f00c207085e89ef2de7e57ee443cdb98","src/api/select.rs":"a98e2ccf9fc6bdeed32d337c8675bc96c2fbe2cc34fbf149ad6047fb8e749774","src/api/shuffle.rs":"da58200790868c09659819322a489929a5b6e56c596ed07e6a44293ea02e7d09","src/api/shuffle1_dyn.rs":"bfea5a91905b31444e9ef7ca6eddb7a9606b7e22d3f71bb842eb2795a0346620","src/api/slice.rs":"ee87484e8af329547b9a5d4f2a69e8bed6ea10bbd96270d706083843d4eea2ac","src/api/slice/from_slice.rs":"4d4fe8a329c885fcb4fbcbedf99efb15a95296fe6b3f595056cc37037450d5ac","src/api/slice/write_to_slice.rs":"f5b23b2c4b91cfb26b713a9013a6c0da7f45eaefb79ba06dcbc27f3f23bda679","src/api/swap_bytes.rs":"4a6792a2e49a77475e1b237592b4b2804dbddb79c474331acd0dd71b36934259","src/codegen.rs":"c6eebc3d3665420aa6a2f317977e3c41a4f43e0550ac630cdbe8e4bbed5e2031","src/codegen/bit_manip.rs":"5559e095105a80003e0de35af1d19b0c65c9ab04eb743c7e01c5442d882eb34e","src/codegen/llvm.rs":"d1299c189abb17a6133f047574cffc7a6db4c1be37cb7d4785491cb5e8f8cf54","src/codegen/math.rs":"35f96e37a78fcf0cdb02146b7f27a45108fe06a37fc2a54d8851ce131a326178","src/codegen/math/float.rs":"dd86c0449e576c83b719700962ac017c332987fac08d91f2b7a2b1b883598170","src/codegen/math/float/abs.rs":"f56e2b4b8055ea861c1f5cbc6b6e1d8e7e5af163b62c13574ddee4e09513bfbc","src/codegen/math/float/cos.rs":"ef3b511a24d23045b310315e80348a9b7fedb576fc2de52d74290616a0abeb2a","src/codegen/math/float/cos_pi.rs":"4e7631a5d73dac21531e09ef1802d1180f8997509c2c8fa9f67f322194263a97","src/codegen/math/float/exp.rs":"61b691598c41b5622f24e4320c1bdd08701e612a516438bdddcc728fc3405c8c","src/codegen/math/float/ln.rs":"46b718b1ba8c9d99e1ad40f53d20dfde08a3063ca7bd2a9fdd6698e060da687e","src/codegen/math/float/macros.rs":"dd42135fff13f9aca4fd3a1a4e14c7e6c31aadc6d817d63b0d2fb9e62e062744","src/codegen/math/float/mul_add.rs":"a37bf764345d4b1714f97e83897b7cf0855fc2811704bcbc0012db91825339e1","src/codegen/math/float/mul_adde.rs":"c75702bfcb361de45964a93caf959a695ef2376bd069227600b8c6872665c755","src/codegen/math/float/powf.rs":"642346e982bc4c39203de0864d2149c4179cd7b21cf67a2951687932b4675872","src/codegen/math/float/sin.rs":"9d68164c90cdca6a85155040cdac42e27342ebe0b925273ef1593df721af4258","src/codegen/math/float/sin_cos_pi.rs":"9be02ad48585a1e8d99129382fbffbaed47852f15459256a708850b6b7a75405","src/codegen/math/float/sin_pi.rs":"9890347905b4d4a3c7341c3eb06406e46e60582bcf6960688bd727e5dadc6c57","src/codegen/math/float/sqrt.rs":"e3c60dcfb0c6d2fc62adabcc931b2d4040b83cab294dea36443fb4b89eb79e34","src/codegen/math/float/sqrte.rs":"f0f4ef9eb475ae41bcc7ec6a95ad744ba6b36925faa8b2c2814004396d196b63","src/codegen/pointer_sized_int.rs":"a70697169c28218b56fd2e8d5353f2e00671d1150d0c8cef77d613bdfacd84cb","src/codegen/reductions.rs":"645e2514746d01387ddd07f0aa4ffd8430cc9ab428d4fb13773ea319fa25dd95","src/codegen/reductions/mask.rs":"8f1afe6aabf096a3278e1fc3a30f736e04aa8b9ce96373cee22162d18cfe2702","src/codegen/reductions/mask/aarch64.rs":"cba6e17603d39795dcfe8339b6b7d8714c3e162a1f0a635979f037aa24fe4206","src/codegen/reductions/mask/arm.rs":"9447904818aa2c7c25d0963eead452a639a11ca7dbd6d21eedbfcaade07a0f33","src/codegen/reductions/mask/fallback.rs":"7a0ef9f7fd03ae318b495b95e121350cd61caffc5cc6ee17fabf130d5d933453","src/codegen/reductions/mask/fallback_impl.rs":"76547f396e55ef403327c77c314cf8db8c7a5c9b9819bfb925abeacf130249e5","src/codegen/reductions/mask/x86.rs":"14bd2c482071f2355beebcf7b7ecf950ff2dfcdb08c3ca50993092434a9de717","src/codegen/reductions/mask/x86/avx.rs":"b4913d87844c522903641cbbf10db4551addb1ce5e9e78278e21612fa65c733b","src/codegen/reductions/mask/x86/avx2.rs":"677aed3f056285285daa3adff8bc65e739630b4424defa6d9665e160f027507e","src/codegen/reductions/mask/x86/sse.rs":"226610b4ff88c676d5187114dd57b4a8800de6ce40884675e9198445b1ed0306","src/codegen/reductions/mask/x86/sse2.rs":"bc38e6c31cb4b3d62147eba6cac264e519e2a48e0f7ce9010cfa9ef0cf0ec9fd","src/codegen/shuffle.rs":"0abca97e92cdce49a58a39cc447eb09dc7d7715ef256c8dbd2181a186e61bb64","src/codegen/shuffle1_dyn.rs":"04523e9338133bdedb012dd076c2c564b79ce5593b0fc56d0fb6910e04190a81","src/codegen/swap_bytes.rs":"1d6cdc716eadddc92b4fd506b2445a821caa8dc00860447de09d7ebd69c2087f","src/codegen/v128.rs":"94226b31ec403d18d9d2fe06713f147c9c79e9b5f9105089088266313f843185","src/codegen/v16.rs":"ddec4ffb66b6f7aaffb9a1780c5ddba82557abd74f45073d335047e04cf74924","src/codegen/v256.rs":"6b63917f0444118d6b1595bff2045e59b97c4d24012bd575f69f1f0efc5a0241","src/codegen/v32.rs":"3477b3c5540aed86e61e2f5807dd31db947413cec9181c587d93ed6ec74f0eba","src/codegen/v512.rs":"5854f99d3aabc4cd42b28a20d9ce447756dc2ba024a409a69b6a8ae1f1842fc5","src/codegen/v64.rs":"e9e89caebfe63d10c0cbca61e4dfdba3b7e02ee0989170f80beed23237ddd950","src/codegen/vPtr.rs":"96d609a9eece4dcbbcc01ba0b8744d7f5958be12774176a2945bc676f4e6b5cb","src/codegen/vSize.rs":"eeee9858749aa82142b27bc120d1989bb74a6b82e1e4efbbeaccc9634dc9acfc","src/lib.rs":"1b5d419ff05ee0370d671810423ccc254708cc8d415c1dbac2a7a36be4bf63a8","src/masks.rs":"870f429967b2d7d5133f4d28d6c753fc5cef0570b27b29d4e966a066d22d2d0e","src/sealed.rs":"ff7f0324276408ae8249941cfa32c90b8835a54d750896b683efea857af19db2","src/testing.rs":"1d3a7862ef625e235a5734ad7204e68d350f902c0695182b1f08a0552432416e","src/testing/macros.rs":"6378856d7a40ba5ec5c7c0dad6327d79f0c77266921c24296d10aed6c68e9b98","src/testing/utils.rs":"d6fd5a5017f1f85d9d99585754f8f6ad06fc3d683b34083543e67a7cc6c1772c","src/v128.rs":"18fe263c4aa28cd06461c7070b0269f69f4a2e75749b8f142a83dfdfe4d22bf5","src/v16.rs":"e5c663c9fb3547eaeac78a5f7db9969f4d8b5ec96112bf2954602fff11f0aebd","src/v256.rs":"68732cd688ad12a56d8b4f8ddf279f77bdfe1be2943c7dc0c1b4f1a76798aa0f","src/v32.rs":"785b22a1ccb4a41bb53dfeb0670f624c0ce42e6cdf62d1747e3283777a1c70bd","src/v512.rs":"d1337bfe07f06a8f37f8e8fa7d4315b9307476ee435ad80dd5269eaed564fbfa","src/v64.rs":"3077468d65125b8f085e9454c8b2463a4d5225697464ba6a1300f8799528fd4b","src/vPtr.rs":"c9a53f41f466e17b6648a4ce390fd8f4d3a848d440eb8a9a803a11608d76eb05","src/vSize.rs":"5c46d3e8c3ee5863d9b6e37e681f871386e0efc254d6d84ba711edb529ce7b3c","tests/endianness.rs":"541a144be017e3dd7da7c8ea49d907dc02538245e8c5f3deb5bd43da92c929e1"},"package":null}
+\ No newline at end of file
+diff --git a/third_party/rust/proc-macro2/.cargo-checksum.json b/third_party/rust/proc-macro2/.cargo-checksum.json
+index eeef4120af..e7849f2896 100644
+--- mozilla-release/third_party/rust/proc-macro2/.cargo-checksum.json
++++ mozilla-release/third_party/rust/proc-macro2/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"e2c1fc6ed317eeef8462fcd192f6b6389e1d84f0d7afeac78f12c23903deddf8","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"362a2156f7645528061b6e8487a2eb0f32f1693012ed82ee57afa05c039bba0d","build.rs":"0cc6e2cb919ddbff59cf1d810283939f97a59f0037540c0f2ee3453237635ff8","src/fallback.rs":"5c6379a90735e27abcc40253b223158c6b1e5784f3850bc423335363e87ef038","src/lib.rs":"ae5251296ad3fcd8b600919a993fec0afd8b56da3e11fef6bc7265b273129936","src/strnom.rs":"37f7791f73f123817ad5403af1d4e2a0714be27401729a2d451bc80b1f26bac9","src/wrapper.rs":"81372e910604217a625aa71c47d43e65f4e008456eae93ac39325c9abf10701a","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"8c427be9cba1fa8d4a16647e53e3545e5863e29e2c0b311c93c9dd1399abf6a1"},"package":"90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"302d447d62c8d091d6241cf62bdad607c0d4ed8ff9f43d9b254c9d99c253ee8e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"e1f9d4fc22cff2c049f166a403b41458632a94357890d31cf0e3ad83807fb430","build.rs":"a71283fbc495095eebbbf46753df3fe2c19505c745b508dea157f65796b64dd7","src/detection.rs":"9d25d896889e65330858f2d6f6223c1b98cd1dad189813ad4161ff189fbda2b8","src/fallback.rs":"b114e013695260f6066395c8712cea112ec2a386010397a80f15a60f8b986444","src/lib.rs":"7f528764a958587f007f0c2a330a6a414bae2c8e73d5ed9fb64ff1b42b1805b1","src/marker.rs":"87fce2d0357f5b7998b6d9dfb064f4a0cbc9dabb19e33d4b514a446243ebe2e8","src/parse.rs":"1d2253eacbd40eb3a2a933be2adcee356af922bdb48cc89ff266252a41fd98a1","src/wrapper.rs":"f52646ce1705c1f6265516f30d4c43297b5f529dd31fb91f4c806be89d5a4122","tests/comments.rs":"ea6cbe6f4c8852e6a0612893c7d4f2c144a2e6a134a6c3db641a320cbfc3c800","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"652db9f25c69ffc65baa60cdca8f195aa2e254d4de0a9ddc85de4dc2470544b6","tests/test.rs":"5f30a704eeb2b9198b57f416d622da72d25cb9bf8d8b12e6d0e90aa2cb0e43fc","tests/test_fmt.rs":"745dfdc41d09c5308c221395eb43f2041f0a1413d2927a813bc2ad4554438fe2"},"package":"1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"}
+\ No newline at end of file
+diff --git a/third_party/rust/proc-macro2/Cargo.toml b/third_party/rust/proc-macro2/Cargo.toml
+index 95d653633d..22150c516a 100644
+--- mozilla-release/third_party/rust/proc-macro2/Cargo.toml
++++ mozilla-release/third_party/rust/proc-macro2/Cargo.toml
+@@ -13,21 +13,22 @@
+ [package]
+ edition = "2018"
+ name = "proc-macro2"
+-version = "1.0.5"
+-authors = ["Alex Crichton <alex@alexcrichton.com>"]
+-description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
+-homepage = "https://github.com/alexcrichton/proc-macro2"
++version = "1.0.24"
++authors = ["Alex Crichton <alex@alexcrichton.com>", "David Tolnay <dtolnay@gmail.com>"]
++description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n"
+ documentation = "https://docs.rs/proc-macro2"
+ readme = "README.md"
+ keywords = ["macros"]
++categories = ["development-tools::procedural-macro-helpers"]
+ license = "MIT OR Apache-2.0"
+ repository = "https://github.com/alexcrichton/proc-macro2"
+ [package.metadata.docs.rs]
+ rustc-args = ["--cfg", "procmacro2_semver_exempt"]
+ rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
++targets = ["x86_64-unknown-linux-gnu"]
+
+-[lib]
+-name = "proc_macro2"
++[package.metadata.playground]
++features = ["span-locations"]
+ [dependencies.unicode-xid]
+ version = "0.2"
+ [dev-dependencies.quote]
+@@ -39,5 +40,3 @@ default = ["proc-macro"]
+ nightly = []
+ proc-macro = []
+ span-locations = []
+-[badges.travis-ci]
+-repository = "alexcrichton/proc-macro2"
+diff --git a/third_party/rust/proc-macro2/README.md b/third_party/rust/proc-macro2/README.md
+index 19b0c3b5f8..3d05e871a7 100644
+--- mozilla-release/third_party/rust/proc-macro2/README.md
++++ mozilla-release/third_party/rust/proc-macro2/README.md
+@@ -1,6 +1,6 @@
+ # proc-macro2
+
+-[![Build Status](https://api.travis-ci.com/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.com/alexcrichton/proc-macro2)
++[![Build Status](https://img.shields.io/github/workflow/status/alexcrichton/proc-macro2/build%20and%20test)](https://github.com/alexcrichton/proc-macro2/actions)
+ [![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
+ [![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
+
+diff --git a/third_party/rust/proc-macro2/build.rs b/third_party/rust/proc-macro2/build.rs
+index deb9b92719..b247d874f6 100644
+--- mozilla-release/third_party/rust/proc-macro2/build.rs
++++ mozilla-release/third_party/rust/proc-macro2/build.rs
+@@ -14,6 +14,10 @@
+ // procmacro2_semver_exempt surface area is implemented by using the
+ // nightly-only proc_macro API.
+ //
++// "hygiene"
++// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at
++// and Span::located_at. Enabled on Rust 1.45+.
++//
+ // "proc_macro_span"
+ // Enable non-dummy behavior of Span::start and Span::end methods which
+ // requires an unstable compiler feature. Enabled when building with
+@@ -57,6 +61,22 @@ fn main() {
+ println!("cargo:rustc-cfg=span_locations");
+ }
+
++ if version.minor < 32 {
++ println!("cargo:rustc-cfg=no_libprocmacro_unwind_safe");
++ }
++
++ if version.minor < 39 {
++ println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard");
++ }
++
++ if version.minor >= 44 {
++ println!("cargo:rustc-cfg=lexerror_display");
++ }
++
++ if version.minor >= 45 {
++ println!("cargo:rustc-cfg=hygiene");
++ }
++
+ let target = env::var("TARGET").unwrap();
+ if !enable_use_proc_macro(&target) {
+ return;
+diff --git a/third_party/rust/proc-macro2/src/detection.rs b/third_party/rust/proc-macro2/src/detection.rs
+new file mode 100644
+index 0000000000..c597bc99c6
+--- /dev/null
++++ mozilla-release/third_party/rust/proc-macro2/src/detection.rs
+@@ -0,0 +1,67 @@
++use std::panic::{self, PanicInfo};
++use std::sync::atomic::*;
++use std::sync::Once;
++
++static WORKS: AtomicUsize = AtomicUsize::new(0);
++static INIT: Once = Once::new();
++
++pub(crate) fn inside_proc_macro() -> bool {
++ match WORKS.load(Ordering::SeqCst) {
++ 1 => return false,
++ 2 => return true,
++ _ => {}
++ }
++
++ INIT.call_once(initialize);
++ inside_proc_macro()
++}
++
++pub(crate) fn force_fallback() {
++ WORKS.store(1, Ordering::SeqCst);
++}
++
++pub(crate) fn unforce_fallback() {
++ initialize();
++}
++
++// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
++// then use catch_unwind to determine whether the compiler's proc_macro is
++// working. When proc-macro2 is used from outside of a procedural macro all
++// of the proc_macro crate's APIs currently panic.
++//
++// The Once is to prevent the possibility of this ordering:
++//
++// thread 1 calls take_hook, gets the user's original hook
++// thread 1 calls set_hook with the null hook
++// thread 2 calls take_hook, thinks null hook is the original hook
++// thread 2 calls set_hook with the null hook
++// thread 1 calls set_hook with the actual original hook
++// thread 2 calls set_hook with what it thinks is the original hook
++//
++// in which the user's hook has been lost.
++//
++// There is still a race condition where a panic in a different thread can
++// happen during the interval that the user's original panic hook is
++// unregistered such that their hook is incorrectly not called. This is
++// sufficiently unlikely and less bad than printing panic messages to stderr
++// on correct use of this crate. Maybe there is a libstd feature request
++// here. For now, if a user needs to guarantee that this failure mode does
++// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
++// the main thread before launching any other threads.
++fn initialize() {
++ type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
++
++ let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
++ let sanity_check = &*null_hook as *const PanicHook;
++ let original_hook = panic::take_hook();
++ panic::set_hook(null_hook);
++
++ let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
++ WORKS.store(works as usize + 1, Ordering::SeqCst);
++
++ let hopefully_null_hook = panic::take_hook();
++ panic::set_hook(original_hook);
++ if sanity_check != &*hopefully_null_hook {
++ panic!("observed race condition in proc_macro2::inside_proc_macro");
++ }
++}
+diff --git a/third_party/rust/proc-macro2/src/fallback.rs b/third_party/rust/proc-macro2/src/fallback.rs
+index fe582b3b5f..8900c5ff0f 100644
+--- mozilla-release/third_party/rust/proc-macro2/src/fallback.rs
++++ mozilla-release/third_party/rust/proc-macro2/src/fallback.rs
+@@ -1,27 +1,41 @@
++use crate::parse::{token_stream, Cursor};
++use crate::{Delimiter, Spacing, TokenTree};
+ #[cfg(span_locations)]
+ use std::cell::RefCell;
+ #[cfg(span_locations)]
+ use std::cmp;
+-use std::fmt;
+-use std::iter;
++use std::fmt::{self, Debug, Display};
++use std::iter::FromIterator;
++use std::mem;
+ use std::ops::RangeBounds;
+ #[cfg(procmacro2_semver_exempt)]
+ use std::path::Path;
+ use std::path::PathBuf;
+ use std::str::FromStr;
+ use std::vec;
+-
+-use crate::strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
+-use crate::{Delimiter, Punct, Spacing, TokenTree};
+ use unicode_xid::UnicodeXID;
+
++/// Force use of proc-macro2's fallback implementation of the API for now, even
++/// if the compiler's implementation is available.
++pub fn force() {
++ #[cfg(wrap_proc_macro)]
++ crate::detection::force_fallback();
++}
++
++/// Resume using the compiler's implementation of the proc macro API if it is
++/// available.
++pub fn unforce() {
++ #[cfg(wrap_proc_macro)]
++ crate::detection::unforce_fallback();
++}
++
+ #[derive(Clone)]
+-pub struct TokenStream {
+- inner: Vec<TokenTree>,
++pub(crate) struct TokenStream {
++ pub(crate) inner: Vec<TokenTree>,
+ }
+
+ #[derive(Debug)]
+-pub struct LexError;
++pub(crate) struct LexError;
+
+ impl TokenStream {
+ pub fn new() -> TokenStream {
+@@ -31,6 +45,72 @@ impl TokenStream {
+ pub fn is_empty(&self) -> bool {
+ self.inner.len() == 0
+ }
++
++ fn take_inner(&mut self) -> Vec<TokenTree> {
++ mem::replace(&mut self.inner, Vec::new())
++ }
++
++ fn push_token(&mut self, token: TokenTree) {
++ // https://github.com/alexcrichton/proc-macro2/issues/235
++ match token {
++ #[cfg(not(no_bind_by_move_pattern_guard))]
++ TokenTree::Literal(crate::Literal {
++ #[cfg(wrap_proc_macro)]
++ inner: crate::imp::Literal::Fallback(literal),
++ #[cfg(not(wrap_proc_macro))]
++ inner: literal,
++ ..
++ }) if literal.text.starts_with('-') => {
++ push_negative_literal(self, literal);
++ }
++ #[cfg(no_bind_by_move_pattern_guard)]
++ TokenTree::Literal(crate::Literal {
++ #[cfg(wrap_proc_macro)]
++ inner: crate::imp::Literal::Fallback(literal),
++ #[cfg(not(wrap_proc_macro))]
++ inner: literal,
++ ..
++ }) => {
++ if literal.text.starts_with('-') {
++ push_negative_literal(self, literal);
++ } else {
++ self.inner
++ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
++ }
++ }
++ _ => self.inner.push(token),
++ }
++
++ #[cold]
++ fn push_negative_literal(stream: &mut TokenStream, mut literal: Literal) {
++ literal.text.remove(0);
++ let mut punct = crate::Punct::new('-', Spacing::Alone);
++ punct.set_span(crate::Span::_new_stable(literal.span));
++ stream.inner.push(TokenTree::Punct(punct));
++ stream
++ .inner
++ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
++ }
++ }
++}
++
++// Nonrecursive to prevent stack overflow.
++impl Drop for TokenStream {
++ fn drop(&mut self) {
++ while let Some(token) = self.inner.pop() {
++ let group = match token {
++ TokenTree::Group(group) => group.inner,
++ _ => continue,
++ };
++ #[cfg(wrap_proc_macro)]
++ let group = match group {
++ crate::imp::Group::Fallback(group) => group,
++ _ => continue,
++ };
++ let mut group = group;
++ self.inner.extend(group.stream.take_inner());
++ }
++ }
+ }
+
+ #[cfg(span_locations)]
+@@ -59,20 +139,22 @@ impl FromStr for TokenStream {
+ // Create a dummy file & add it to the source map
+ let cursor = get_cursor(src);
+
+- match token_stream(cursor) {
+- Ok((input, output)) => {
+- if skip_whitespace(input).len() != 0 {
+- Err(LexError)
+- } else {
+- Ok(output)
+- }
+- }
+- Err(LexError) => Err(LexError),
++ let (rest, tokens) = token_stream(cursor)?;
++ if rest.is_empty() {
++ Ok(tokens)
++ } else {
++ Err(LexError)
+ }
+ }
+ }
+
+-impl fmt::Display for TokenStream {
++impl Display for LexError {
++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
++ f.write_str("cannot parse string into token stream")
++ }
++}
++
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut joint = false;
+ for (i, tt) in self.inner.iter().enumerate() {
+@@ -80,37 +162,22 @@ impl fmt::Display for TokenStream {
+ write!(f, " ")?;
+ }
+ joint = false;
+- match *tt {
+- TokenTree::Group(ref tt) => {
+- let (start, end) = match tt.delimiter() {
+- Delimiter::Parenthesis => ("(", ")"),
+- Delimiter::Brace => ("{", "}"),
+- Delimiter::Bracket => ("[", "]"),
+- Delimiter::None => ("", ""),
+- };
+- if tt.stream().into_iter().next().is_none() {
+- write!(f, "{} {}", start, end)?
+- } else {
+- write!(f, "{} {} {}", start, tt.stream(), end)?
+- }
+- }
+- TokenTree::Ident(ref tt) => write!(f, "{}", tt)?,
+- TokenTree::Punct(ref tt) => {
+- write!(f, "{}", tt.as_char())?;
+- match tt.spacing() {
+- Spacing::Alone => {}
+- Spacing::Joint => joint = true,
+- }
++ match tt {
++ TokenTree::Group(tt) => Display::fmt(tt, f),
++ TokenTree::Ident(tt) => Display::fmt(tt, f),
++ TokenTree::Punct(tt) => {
++ joint = tt.spacing() == Spacing::Joint;
++ Display::fmt(tt, f)
+ }
+- TokenTree::Literal(ref tt) => write!(f, "{}", tt)?,
+- }
++ TokenTree::Literal(tt) => Display::fmt(tt, f),
++ }?
+ }
+
+ Ok(())
+ }
+ }
+
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+@@ -139,28 +206,26 @@ impl From<TokenStream> for proc_macro::TokenStream {
+
+ impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+- TokenStream { inner: vec![tree] }
++ let mut stream = TokenStream::new();
++ stream.push_token(tree);
++ stream
+ }
+ }
+
+-impl iter::FromIterator<TokenTree> for TokenStream {
+- fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
+- let mut v = Vec::new();
+-
+- for token in streams.into_iter() {
+- v.push(token);
+- }
+-
+- TokenStream { inner: v }
++impl FromIterator<TokenTree> for TokenStream {
++ fn from_iter<I: IntoIterator<Item = TokenTree>>(tokens: I) -> Self {
++ let mut stream = TokenStream::new();
++ stream.extend(tokens);
++ stream
+ }
+ }
+
+-impl iter::FromIterator<TokenStream> for TokenStream {
++impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut v = Vec::new();
+
+- for stream in streams.into_iter() {
+- v.extend(stream.inner);
++ for mut stream in streams {
++ v.extend(stream.take_inner());
+ }
+
+ TokenStream { inner: v }
+@@ -168,31 +233,30 @@ impl iter::FromIterator<TokenStream> for TokenStream {
+ }
+
+ impl Extend<TokenTree> for TokenStream {
+- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
+- self.inner.extend(streams);
++ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) {
++ tokens.into_iter().for_each(|token| self.push_token(token));
+ }
+ }
+
+ impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+- self.inner
+- .extend(streams.into_iter().flat_map(|stream| stream));
++ self.inner.extend(streams.into_iter().flatten());
+ }
+ }
+
+-pub type TokenTreeIter = vec::IntoIter<TokenTree>;
++pub(crate) type TokenTreeIter = vec::IntoIter<TokenTree>;
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = TokenTreeIter;
+
+- fn into_iter(self) -> TokenTreeIter {
+- self.inner.into_iter()
++ fn into_iter(mut self) -> TokenTreeIter {
++ self.take_inner().into_iter()
+ }
+ }
+
+ #[derive(Clone, PartialEq, Eq)]
+-pub struct SourceFile {
++pub(crate) struct SourceFile {
+ path: PathBuf,
+ }
+
+@@ -208,7 +272,7 @@ impl SourceFile {
+ }
+ }
+
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+@@ -218,7 +282,7 @@ impl fmt::Debug for SourceFile {
+ }
+
+ #[derive(Clone, Copy, Debug, PartialEq, Eq)]
+-pub struct LineColumn {
++pub(crate) struct LineColumn {
+ pub line: usize,
+ pub column: usize,
+ }
+@@ -228,23 +292,11 @@ thread_local! {
+ static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
+ // NOTE: We start with a single dummy file which all call_site() and
+ // def_site() spans reference.
+- files: vec![{
++ files: vec![FileInfo {
+ #[cfg(procmacro2_semver_exempt)]
+- {
+- FileInfo {
+- name: "<unspecified>".to_owned(),
+- span: Span { lo: 0, hi: 0 },
+- lines: vec![0],
+- }
+- }
+-
+- #[cfg(not(procmacro2_semver_exempt))]
+- {
+- FileInfo {
+- span: Span { lo: 0, hi: 0 },
+- lines: vec![0],
+- }
+- }
++ name: "<unspecified>".to_owned(),
++ span: Span { lo: 0, hi: 0 },
++ lines: vec![0],
+ }],
+ });
+ }
+@@ -282,16 +334,21 @@ impl FileInfo {
+ }
+ }
+
+-/// Computesthe offsets of each line in the given source string.
++/// Computes the offsets of each line in the given source string
++/// and the total number of characters
+ #[cfg(span_locations)]
+-fn lines_offsets(s: &str) -> Vec<usize> {
++fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
+ let mut lines = vec![0];
+- let mut prev = 0;
+- while let Some(len) = s[prev..].find('\n') {
+- prev += len + 1;
+- lines.push(prev);
++ let mut total = 0;
++
++ for ch in s.chars() {
++ total += 1;
++ if ch == '\n' {
++ lines.push(total);
++ }
+ }
+- lines
++
++ (total, lines)
+ }
+
+ #[cfg(span_locations)]
+@@ -310,23 +367,22 @@ impl SourceMap {
+ }
+
+ fn add_file(&mut self, name: &str, src: &str) -> Span {
+- let lines = lines_offsets(src);
++ let (len, lines) = lines_offsets(src);
+ let lo = self.next_start_pos();
+ // XXX(nika): Shouild we bother doing a checked cast or checked add here?
+ let span = Span {
+ lo,
+- hi: lo + (src.len() as u32),
++ hi: lo + (len as u32),
+ };
+
+- #[cfg(procmacro2_semver_exempt)]
+ self.files.push(FileInfo {
++ #[cfg(procmacro2_semver_exempt)]
+ name: name.to_owned(),
+ span,
+ lines,
+ });
+
+ #[cfg(not(procmacro2_semver_exempt))]
+- self.files.push(FileInfo { span, lines });
+ let _ = name;
+
+ span
+@@ -343,11 +399,11 @@ impl SourceMap {
+ }
+
+ #[derive(Clone, Copy, PartialEq, Eq)]
+-pub struct Span {
++pub(crate) struct Span {
+ #[cfg(span_locations)]
+- lo: u32,
++ pub(crate) lo: u32,
+ #[cfg(span_locations)]
+- hi: u32,
++ pub(crate) hi: u32,
+ }
+
+ impl Span {
+@@ -361,12 +417,16 @@ impl Span {
+ Span { lo: 0, hi: 0 }
+ }
+
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ Span::call_site()
++ }
++
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn def_site() -> Span {
+ Span::call_site()
+ }
+
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn resolved_at(&self, _other: Span) -> Span {
+ // Stable spans consist only of line/column information, so
+ // `resolved_at` and `located_at` only select which span the
+@@ -374,7 +434,6 @@ impl Span {
+ *self
+ }
+
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn located_at(&self, other: Span) -> Span {
+ other
+ }
+@@ -427,26 +486,59 @@ impl Span {
+ })
+ })
+ }
++
++ #[cfg(not(span_locations))]
++ fn first_byte(self) -> Self {
++ self
++ }
++
++ #[cfg(span_locations)]
++ fn first_byte(self) -> Self {
++ Span {
++ lo: self.lo,
++ hi: cmp::min(self.lo.saturating_add(1), self.hi),
++ }
++ }
++
++ #[cfg(not(span_locations))]
++ fn last_byte(self) -> Self {
++ self
++ }
++
++ #[cfg(span_locations)]
++ fn last_byte(self) -> Self {
++ Span {
++ lo: cmp::max(self.hi.saturating_sub(1), self.lo),
++ hi: self.hi,
++ }
++ }
+ }
+
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ return write!(f, "bytes({}..{})", self.lo, self.hi);
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ write!(f, "Span")
+ }
+ }
+
+-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+- if cfg!(procmacro2_semver_exempt) {
++pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
++ #[cfg(span_locations)]
++ {
++ if span.lo == 0 && span.hi == 0 {
++ return;
++ }
++ }
++
++ if cfg!(span_locations) {
+ debug.field("span", &span);
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Group {
++pub(crate) struct Group {
+ delimiter: Delimiter,
+ stream: TokenStream,
+ span: Span,
+@@ -474,11 +566,11 @@ impl Group {
+ }
+
+ pub fn span_open(&self) -> Span {
+- self.span
++ self.span.first_byte()
+ }
+
+ pub fn span_close(&self) -> Span {
+- self.span
++ self.span.last_byte()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+@@ -486,36 +578,45 @@ impl Group {
+ }
+ }
+
+-impl fmt::Display for Group {
++impl Display for Group {
++ // We attempt to match libproc_macro's formatting.
++ // Empty parens: ()
++ // Nonempty parens: (...)
++ // Empty brackets: []
++ // Nonempty brackets: [...]
++ // Empty braces: { }
++ // Nonempty braces: { ... }
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- let (left, right) = match self.delimiter {
++ let (open, close) = match self.delimiter {
+ Delimiter::Parenthesis => ("(", ")"),
+- Delimiter::Brace => ("{", "}"),
++ Delimiter::Brace => ("{ ", "}"),
+ Delimiter::Bracket => ("[", "]"),
+ Delimiter::None => ("", ""),
+ };
+
+- f.write_str(left)?;
+- self.stream.fmt(f)?;
+- f.write_str(right)?;
++ f.write_str(open)?;
++ Display::fmt(&self.stream, f)?;
++ if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty() {
++ f.write_str(" ")?;
++ }
++ f.write_str(close)?;
+
+ Ok(())
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Group");
+ debug.field("delimiter", &self.delimiter);
+ debug.field("stream", &self.stream);
+- #[cfg(procmacro2_semver_exempt)]
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Ident {
++pub(crate) struct Ident {
+ sym: String,
+ span: Span,
+ raw: bool,
+@@ -549,16 +650,14 @@ impl Ident {
+ }
+ }
+
+-#[inline]
+-fn is_ident_start(c: char) -> bool {
++pub(crate) fn is_ident_start(c: char) -> bool {
+ ('a' <= c && c <= 'z')
+ || ('A' <= c && c <= 'Z')
+ || c == '_'
+ || (c > '\x7f' && UnicodeXID::is_xid_start(c))
+ }
+
+-#[inline]
+-fn is_ident_continue(c: char) -> bool {
++pub(crate) fn is_ident_continue(c: char) -> bool {
+ ('a' <= c && c <= 'z')
+ || ('A' <= c && c <= 'Z')
+ || c == '_'
+@@ -615,18 +714,18 @@ where
+ }
+ }
+
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ if self.raw {
+- "r#".fmt(f)?;
++ f.write_str("r#")?;
+ }
+- self.sym.fmt(f)
++ Display::fmt(&self.sym, f)
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ // Ident(proc_macro), Ident(r#union)
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = f.debug_tuple("Ident");
+ debug.field(&format_args!("{}", self));
+@@ -637,17 +736,17 @@ impl fmt::Debug for Ident {
+ // sym: proc_macro,
+ // span: bytes(128..138)
+ // }
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = f.debug_struct("Ident");
+ debug.field("sym", &format_args!("{}", self));
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Literal {
++pub(crate) struct Literal {
+ text: String,
+ span: Span,
+ }
+@@ -669,7 +768,7 @@ macro_rules! unsuffixed_numbers {
+ }
+
+ impl Literal {
+- fn _new(text: String) -> Literal {
++ pub(crate) fn _new(text: String) -> Literal {
+ Literal {
+ text,
+ span: Span::call_site(),
+@@ -711,7 +810,7 @@ impl Literal {
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+ let mut s = f.to_string();
+- if !s.contains(".") {
++ if !s.contains('.') {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+@@ -719,7 +818,7 @@ impl Literal {
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+ let mut s = f.to_string();
+- if !s.contains(".") {
++ if !s.contains('.') {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+@@ -730,10 +829,10 @@ impl Literal {
+ text.push('"');
+ for c in t.chars() {
+ if c == '\'' {
+- // escape_default turns this into "\'" which is unnecessary.
++ // escape_debug turns this into "\'" which is unnecessary.
+ text.push(c);
+ } else {
+- text.extend(c.escape_default());
++ text.extend(c.escape_debug());
+ }
+ }
+ text.push('"');
+@@ -744,10 +843,10 @@ impl Literal {
+ let mut text = String::new();
+ text.push('\'');
+ if t == '"' {
+- // escape_default turns this into '\"' which is unnecessary.
++ // escape_debug turns this into '\"' which is unnecessary.
+ text.push(t);
+ } else {
+- text.extend(t.escape_default());
++ text.extend(t.escape_debug());
+ }
+ text.push('\'');
+ Literal::_new(text)
+@@ -756,6 +855,7 @@ impl Literal {
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ let mut escaped = "b\"".to_string();
+ for b in bytes {
++ #[allow(clippy::match_overlapping_arm)]
+ match *b {
+ b'\0' => escaped.push_str(r"\0"),
+ b'\t' => escaped.push_str(r"\t"),
+@@ -784,651 +884,17 @@ impl Literal {
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.text.fmt(f)
++ Display::fmt(&self.text, f)
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Literal");
+ debug.field("lit", &format_args!("{}", self.text));
+- #[cfg(procmacro2_semver_exempt)]
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+-
+-fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
+- let mut trees = Vec::new();
+- loop {
+- let input_no_ws = skip_whitespace(input);
+- if input_no_ws.rest.len() == 0 {
+- break;
+- }
+- if let Ok((a, tokens)) = doc_comment(input_no_ws) {
+- input = a;
+- trees.extend(tokens);
+- continue;
+- }
+-
+- let (a, tt) = match token_tree(input_no_ws) {
+- Ok(p) => p,
+- Err(_) => break,
+- };
+- trees.push(tt);
+- input = a;
+- }
+- Ok((input, TokenStream { inner: trees }))
+-}
+-
+-#[cfg(not(span_locations))]
+-fn spanned<'a, T>(
+- input: Cursor<'a>,
+- f: fn(Cursor<'a>) -> PResult<'a, T>,
+-) -> PResult<'a, (T, crate::Span)> {
+- let (a, b) = f(skip_whitespace(input))?;
+- Ok((a, ((b, crate::Span::_new_stable(Span::call_site())))))
+-}
+-
+-#[cfg(span_locations)]
+-fn spanned<'a, T>(
+- input: Cursor<'a>,
+- f: fn(Cursor<'a>) -> PResult<'a, T>,
+-) -> PResult<'a, (T, crate::Span)> {
+- let input = skip_whitespace(input);
+- let lo = input.off;
+- let (a, b) = f(input)?;
+- let hi = a.off;
+- let span = crate::Span::_new_stable(Span { lo, hi });
+- Ok((a, (b, span)))
+-}
+-
+-fn token_tree(input: Cursor) -> PResult<TokenTree> {
+- let (rest, (mut tt, span)) = spanned(input, token_kind)?;
+- tt.set_span(span);
+- Ok((rest, tt))
+-}
+-
+-named!(token_kind -> TokenTree, alt!(
+- map!(group, |g| TokenTree::Group(crate::Group::_new_stable(g)))
+- |
+- map!(literal, |l| TokenTree::Literal(crate::Literal::_new_stable(l))) // must be before symbol
+- |
+- map!(op, TokenTree::Punct)
+- |
+- symbol_leading_ws
+-));
+-
+-named!(group -> Group, alt!(
+- delimited!(
+- punct!("("),
+- token_stream,
+- punct!(")")
+- ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
+- |
+- delimited!(
+- punct!("["),
+- token_stream,
+- punct!("]")
+- ) => { |ts| Group::new(Delimiter::Bracket, ts) }
+- |
+- delimited!(
+- punct!("{"),
+- token_stream,
+- punct!("}")
+- ) => { |ts| Group::new(Delimiter::Brace, ts) }
+-));
+-
+-fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
+- symbol(skip_whitespace(input))
+-}
+-
+-fn symbol(input: Cursor) -> PResult<TokenTree> {
+- let raw = input.starts_with("r#");
+- let rest = input.advance((raw as usize) << 1);
+-
+- let (rest, sym) = symbol_not_raw(rest)?;
+-
+- if !raw {
+- let ident = crate::Ident::new(sym, crate::Span::call_site());
+- return Ok((rest, ident.into()));
+- }
+-
+- if sym == "_" {
+- return Err(LexError);
+- }
+-
+- let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
+- Ok((rest, ident.into()))
+-}
+-
+-fn symbol_not_raw(input: Cursor) -> PResult<&str> {
+- let mut chars = input.char_indices();
+-
+- match chars.next() {
+- Some((_, ch)) if is_ident_start(ch) => {}
+- _ => return Err(LexError),
+- }
+-
+- let mut end = input.len();
+- for (i, ch) in chars {
+- if !is_ident_continue(ch) {
+- end = i;
+- break;
+- }
+- }
+-
+- Ok((input.advance(end), &input.rest[..end]))
+-}
+-
+-fn literal(input: Cursor) -> PResult<Literal> {
+- let input_no_ws = skip_whitespace(input);
+-
+- match literal_nocapture(input_no_ws) {
+- Ok((a, ())) => {
+- let start = input.len() - input_no_ws.len();
+- let len = input_no_ws.len() - a.len();
+- let end = start + len;
+- Ok((a, Literal::_new(input.rest[start..end].to_string())))
+- }
+- Err(LexError) => Err(LexError),
+- }
+-}
+-
+-named!(literal_nocapture -> (), alt!(
+- string
+- |
+- byte_string
+- |
+- byte
+- |
+- character
+- |
+- float
+- |
+- int
+-));
+-
+-named!(string -> (), alt!(
+- quoted_string
+- |
+- preceded!(
+- punct!("r"),
+- raw_string
+- ) => { |_| () }
+-));
+-
+-named!(quoted_string -> (), do_parse!(
+- punct!("\"") >>
+- cooked_string >>
+- tag!("\"") >>
+- option!(symbol_not_raw) >>
+- (())
+-));
+-
+-fn cooked_string(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices().peekable();
+- while let Some((byte_offset, ch)) = chars.next() {
+- match ch {
+- '"' => {
+- return Ok((input.advance(byte_offset), ()));
+- }
+- '\r' => {
+- if let Some((_, '\n')) = chars.next() {
+- // ...
+- } else {
+- break;
+- }
+- }
+- '\\' => match chars.next() {
+- Some((_, 'x')) => {
+- if !backslash_x_char(&mut chars) {
+- break;
+- }
+- }
+- Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
+- | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
+- Some((_, 'u')) => {
+- if !backslash_u(&mut chars) {
+- break;
+- }
+- }
+- Some((_, '\n')) | Some((_, '\r')) => {
+- while let Some(&(_, ch)) = chars.peek() {
+- if ch.is_whitespace() {
+- chars.next();
+- } else {
+- break;
+- }
+- }
+- }
+- _ => break,
+- },
+- _ch => {}
+- }
+- }
+- Err(LexError)
+-}
+-
+-named!(byte_string -> (), alt!(
+- delimited!(
+- punct!("b\""),
+- cooked_byte_string,
+- tag!("\"")
+- ) => { |_| () }
+- |
+- preceded!(
+- punct!("br"),
+- raw_string
+- ) => { |_| () }
+-));
+-
+-fn cooked_byte_string(mut input: Cursor) -> PResult<()> {
+- let mut bytes = input.bytes().enumerate();
+- 'outer: while let Some((offset, b)) = bytes.next() {
+- match b {
+- b'"' => {
+- return Ok((input.advance(offset), ()));
+- }
+- b'\r' => {
+- if let Some((_, b'\n')) = bytes.next() {
+- // ...
+- } else {
+- break;
+- }
+- }
+- b'\\' => match bytes.next() {
+- Some((_, b'x')) => {
+- if !backslash_x_byte(&mut bytes) {
+- break;
+- }
+- }
+- Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
+- | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
+- Some((newline, b'\n')) | Some((newline, b'\r')) => {
+- let rest = input.advance(newline + 1);
+- for (offset, ch) in rest.char_indices() {
+- if !ch.is_whitespace() {
+- input = rest.advance(offset);
+- bytes = input.bytes().enumerate();
+- continue 'outer;
+- }
+- }
+- break;
+- }
+- _ => break,
+- },
+- b if b < 0x80 => {}
+- _ => break,
+- }
+- }
+- Err(LexError)
+-}
+-
+-fn raw_string(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices();
+- let mut n = 0;
+- while let Some((byte_offset, ch)) = chars.next() {
+- match ch {
+- '"' => {
+- n = byte_offset;
+- break;
+- }
+- '#' => {}
+- _ => return Err(LexError),
+- }
+- }
+- for (byte_offset, ch) in chars {
+- match ch {
+- '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
+- let rest = input.advance(byte_offset + 1 + n);
+- return Ok((rest, ()));
+- }
+- '\r' => {}
+- _ => {}
+- }
+- }
+- Err(LexError)
+-}
+-
+-named!(byte -> (), do_parse!(
+- punct!("b") >>
+- tag!("'") >>
+- cooked_byte >>
+- tag!("'") >>
+- (())
+-));
+-
+-fn cooked_byte(input: Cursor) -> PResult<()> {
+- let mut bytes = input.bytes().enumerate();
+- let ok = match bytes.next().map(|(_, b)| b) {
+- Some(b'\\') => match bytes.next().map(|(_, b)| b) {
+- Some(b'x') => backslash_x_byte(&mut bytes),
+- Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
+- | Some(b'"') => true,
+- _ => false,
+- },
+- b => b.is_some(),
+- };
+- if ok {
+- match bytes.next() {
+- Some((offset, _)) => {
+- if input.chars().as_str().is_char_boundary(offset) {
+- Ok((input.advance(offset), ()))
+- } else {
+- Err(LexError)
+- }
+- }
+- None => Ok((input.advance(input.len()), ())),
+- }
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-named!(character -> (), do_parse!(
+- punct!("'") >>
+- cooked_char >>
+- tag!("'") >>
+- (())
+-));
+-
+-fn cooked_char(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices();
+- let ok = match chars.next().map(|(_, ch)| ch) {
+- Some('\\') => match chars.next().map(|(_, ch)| ch) {
+- Some('x') => backslash_x_char(&mut chars),
+- Some('u') => backslash_u(&mut chars),
+- Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
+- true
+- }
+- _ => false,
+- },
+- ch => ch.is_some(),
+- };
+- if ok {
+- match chars.next() {
+- Some((idx, _)) => Ok((input.advance(idx), ())),
+- None => Ok((input.advance(input.len()), ())),
+- }
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-macro_rules! next_ch {
+- ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
+- match $chars.next() {
+- Some((_, ch)) => match ch {
+- $pat $(| $rest)* => ch,
+- _ => return false,
+- },
+- None => return false
+- }
+- };
+-}
+-
+-fn backslash_x_char<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, char)>,
+-{
+- next_ch!(chars @ '0'..='7');
+- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+- true
+-}
+-
+-fn backslash_x_byte<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, u8)>,
+-{
+- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+- true
+-}
+-
+-fn backslash_u<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, char)>,
+-{
+- next_ch!(chars @ '{');
+- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+- loop {
+- let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
+- if c == '}' {
+- return true;
+- }
+- }
+-}
+-
+-fn float(input: Cursor) -> PResult<()> {
+- let (mut rest, ()) = float_digits(input)?;
+- if let Some(ch) = rest.chars().next() {
+- if is_ident_start(ch) {
+- rest = symbol_not_raw(rest)?.0;
+- }
+- }
+- word_break(rest)
+-}
+-
+-fn float_digits(input: Cursor) -> PResult<()> {
+- let mut chars = input.chars().peekable();
+- match chars.next() {
+- Some(ch) if ch >= '0' && ch <= '9' => {}
+- _ => return Err(LexError),
+- }
+-
+- let mut len = 1;
+- let mut has_dot = false;
+- let mut has_exp = false;
+- while let Some(&ch) = chars.peek() {
+- match ch {
+- '0'..='9' | '_' => {
+- chars.next();
+- len += 1;
+- }
+- '.' => {
+- if has_dot {
+- break;
+- }
+- chars.next();
+- if chars
+- .peek()
+- .map(|&ch| ch == '.' || is_ident_start(ch))
+- .unwrap_or(false)
+- {
+- return Err(LexError);
+- }
+- len += 1;
+- has_dot = true;
+- }
+- 'e' | 'E' => {
+- chars.next();
+- len += 1;
+- has_exp = true;
+- break;
+- }
+- _ => break,
+- }
+- }
+-
+- let rest = input.advance(len);
+- if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
+- return Err(LexError);
+- }
+-
+- if has_exp {
+- let mut has_exp_value = false;
+- while let Some(&ch) = chars.peek() {
+- match ch {
+- '+' | '-' => {
+- if has_exp_value {
+- break;
+- }
+- chars.next();
+- len += 1;
+- }
+- '0'..='9' => {
+- chars.next();
+- len += 1;
+- has_exp_value = true;
+- }
+- '_' => {
+- chars.next();
+- len += 1;
+- }
+- _ => break,
+- }
+- }
+- if !has_exp_value {
+- return Err(LexError);
+- }
+- }
+-
+- Ok((input.advance(len), ()))
+-}
+-
+-fn int(input: Cursor) -> PResult<()> {
+- let (mut rest, ()) = digits(input)?;
+- if let Some(ch) = rest.chars().next() {
+- if is_ident_start(ch) {
+- rest = symbol_not_raw(rest)?.0;
+- }
+- }
+- word_break(rest)
+-}
+-
+-fn digits(mut input: Cursor) -> PResult<()> {
+- let base = if input.starts_with("0x") {
+- input = input.advance(2);
+- 16
+- } else if input.starts_with("0o") {
+- input = input.advance(2);
+- 8
+- } else if input.starts_with("0b") {
+- input = input.advance(2);
+- 2
+- } else {
+- 10
+- };
+-
+- let mut len = 0;
+- let mut empty = true;
+- for b in input.bytes() {
+- let digit = match b {
+- b'0'..=b'9' => (b - b'0') as u64,
+- b'a'..=b'f' => 10 + (b - b'a') as u64,
+- b'A'..=b'F' => 10 + (b - b'A') as u64,
+- b'_' => {
+- if empty && base == 10 {
+- return Err(LexError);
+- }
+- len += 1;
+- continue;
+- }
+- _ => break,
+- };
+- if digit >= base {
+- return Err(LexError);
+- }
+- len += 1;
+- empty = false;
+- }
+- if empty {
+- Err(LexError)
+- } else {
+- Ok((input.advance(len), ()))
+- }
+-}
+-
+-fn op(input: Cursor) -> PResult<Punct> {
+- let input = skip_whitespace(input);
+- match op_char(input) {
+- Ok((rest, '\'')) => {
+- symbol(rest)?;
+- Ok((rest, Punct::new('\'', Spacing::Joint)))
+- }
+- Ok((rest, ch)) => {
+- let kind = match op_char(rest) {
+- Ok(_) => Spacing::Joint,
+- Err(LexError) => Spacing::Alone,
+- };
+- Ok((rest, Punct::new(ch, kind)))
+- }
+- Err(LexError) => Err(LexError),
+- }
+-}
+-
+-fn op_char(input: Cursor) -> PResult<char> {
+- if input.starts_with("//") || input.starts_with("/*") {
+- // Do not accept `/` of a comment as an op.
+- return Err(LexError);
+- }
+-
+- let mut chars = input.chars();
+- let first = match chars.next() {
+- Some(ch) => ch,
+- None => {
+- return Err(LexError);
+- }
+- };
+- let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
+- if recognized.contains(first) {
+- Ok((input.advance(first.len_utf8()), first))
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
+- let mut trees = Vec::new();
+- let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
+- trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
+- if inner {
+- trees.push(Punct::new('!', Spacing::Alone).into());
+- }
+- let mut stream = vec![
+- TokenTree::Ident(crate::Ident::new("doc", span)),
+- TokenTree::Punct(Punct::new('=', Spacing::Alone)),
+- TokenTree::Literal(crate::Literal::string(comment)),
+- ];
+- for tt in stream.iter_mut() {
+- tt.set_span(span);
+- }
+- let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
+- trees.push(crate::Group::_new_stable(group).into());
+- for tt in trees.iter_mut() {
+- tt.set_span(span);
+- }
+- Ok((rest, trees))
+-}
+-
+-named!(doc_comment_contents -> (&str, bool), alt!(
+- do_parse!(
+- punct!("//!") >>
+- s: take_until_newline_or_eof!() >>
+- ((s, true))
+- )
+- |
+- do_parse!(
+- option!(whitespace) >>
+- peek!(tag!("/*!")) >>
+- s: block_comment >>
+- ((s, true))
+- )
+- |
+- do_parse!(
+- punct!("///") >>
+- not!(tag!("/")) >>
+- s: take_until_newline_or_eof!() >>
+- ((s, false))
+- )
+- |
+- do_parse!(
+- option!(whitespace) >>
+- peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
+- s: block_comment >>
+- ((s, false))
+- )
+-));
+diff --git a/third_party/rust/proc-macro2/src/lib.rs b/third_party/rust/proc-macro2/src/lib.rs
+index a08be3e815..c20fb50d4a 100644
+--- mozilla-release/third_party/rust/proc-macro2/src/lib.rs
++++ mozilla-release/third_party/rust/proc-macro2/src/lib.rs
+@@ -78,27 +78,24 @@
+ //! a different thread.
+
+ // Proc-macro2 types in rustdoc of other crates get linked to here.
+-#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.5")]
++#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.24")]
+ #![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
+ #![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
++#![allow(clippy::needless_doctest_main)]
+
+ #[cfg(use_proc_macro)]
+ extern crate proc_macro;
+
+-use std::cmp::Ordering;
+-use std::fmt;
+-use std::hash::{Hash, Hasher};
+-use std::iter::FromIterator;
+-use std::marker;
+-use std::ops::RangeBounds;
+-#[cfg(procmacro2_semver_exempt)]
+-use std::path::PathBuf;
+-use std::rc::Rc;
+-use std::str::FromStr;
++mod marker;
++mod parse;
++
++#[cfg(wrap_proc_macro)]
++mod detection;
+
+-#[macro_use]
+-mod strnom;
+-mod fallback;
++// Public for proc_macro2::fallback::force() and unforce(), but those are quite
++// a niche use case so we omit it from rustdoc.
++#[doc(hidden)]
++pub mod fallback;
+
+ #[cfg(not(wrap_proc_macro))]
+ use crate::fallback as imp;
+@@ -106,6 +103,17 @@ use crate::fallback as imp;
+ #[cfg(wrap_proc_macro)]
+ mod imp;
+
++use crate::marker::Marker;
++use std::cmp::Ordering;
++use std::error::Error;
++use std::fmt::{self, Debug, Display};
++use std::hash::{Hash, Hasher};
++use std::iter::FromIterator;
++use std::ops::RangeBounds;
++#[cfg(procmacro2_semver_exempt)]
++use std::path::PathBuf;
++use std::str::FromStr;
++
+ /// An abstract stream of tokens, or more concretely a sequence of token trees.
+ ///
+ /// This type provides interfaces for iterating over token trees and for
+@@ -116,27 +124,27 @@ mod imp;
+ #[derive(Clone)]
+ pub struct TokenStream {
+ inner: imp::TokenStream,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ /// Error returned from `TokenStream::from_str`.
+ pub struct LexError {
+ inner: imp::LexError,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl TokenStream {
+ fn _new(inner: imp::TokenStream) -> TokenStream {
+ TokenStream {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+ fn _new_stable(inner: fallback::TokenStream) -> TokenStream {
+ TokenStream {
+ inner: inner.into(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -173,7 +181,7 @@ impl FromStr for TokenStream {
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let e = src.parse().map_err(|e| LexError {
+ inner: e,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ })?;
+ Ok(TokenStream::_new(e))
+ }
+@@ -228,25 +236,33 @@ impl FromIterator<TokenStream> for TokenStream {
+ /// convertible back into the same token stream (modulo spans), except for
+ /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+ /// numeric literals.
+-impl fmt::Display for TokenStream {
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+ /// Prints token in a form convenient for debugging.
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
++ Debug::fmt(&self.inner, f)
++ }
++}
++
++impl Debug for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Debug for LexError {
++impl Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
++impl Error for LexError {}
++
+ /// The source file of a given `Span`.
+ ///
+ /// This type is semver exempt and not exposed by default.
+@@ -254,7 +270,7 @@ impl fmt::Debug for LexError {
+ #[derive(Clone, PartialEq, Eq)]
+ pub struct SourceFile {
+ inner: imp::SourceFile,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+@@ -262,7 +278,7 @@ impl SourceFile {
+ fn _new(inner: imp::SourceFile) -> Self {
+ SourceFile {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -291,9 +307,9 @@ impl SourceFile {
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -311,25 +327,41 @@ pub struct LineColumn {
+ pub column: usize,
+ }
+
++#[cfg(span_locations)]
++impl Ord for LineColumn {
++ fn cmp(&self, other: &Self) -> Ordering {
++ self.line
++ .cmp(&other.line)
++ .then(self.column.cmp(&other.column))
++ }
++}
++
++#[cfg(span_locations)]
++impl PartialOrd for LineColumn {
++ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
++ Some(self.cmp(other))
++ }
++}
++
+ /// A region of source code, along with macro expansion information.
+ #[derive(Copy, Clone)]
+ pub struct Span {
+ inner: imp::Span,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl Span {
+ fn _new(inner: imp::Span) -> Span {
+ Span {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+ fn _new_stable(inner: fallback::Span) -> Span {
+ Span {
+ inner: inner.into(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -342,6 +374,16 @@ impl Span {
+ Span::_new(imp::Span::call_site())
+ }
+
++ /// The span located at the invocation of the procedural macro, but with
++ /// local variables, labels, and `$crate` resolved at the definition site
++ /// of the macro. This is the same hygiene behavior as `macro_rules`.
++ ///
++ /// This function requires Rust 1.45 or later.
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ Span::_new(imp::Span::mixed_site())
++ }
++
+ /// A span that resolves at the macro definition site.
+ ///
+ /// This method is semver exempt and not exposed by default.
+@@ -352,18 +394,12 @@ impl Span {
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+- ///
+- /// This method is semver exempt and not exposed by default.
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.resolved_at(other.inner))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+- ///
+- /// This method is semver exempt and not exposed by default.
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn located_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.located_at(other.inner))
+ }
+@@ -439,9 +475,9 @@ impl Span {
+ }
+
+ /// Prints a span in a form convenient for debugging.
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -462,11 +498,11 @@ impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+- match *self {
+- TokenTree::Group(ref t) => t.span(),
+- TokenTree::Ident(ref t) => t.span(),
+- TokenTree::Punct(ref t) => t.span(),
+- TokenTree::Literal(ref t) => t.span(),
++ match self {
++ TokenTree::Group(t) => t.span(),
++ TokenTree::Ident(t) => t.span(),
++ TokenTree::Punct(t) => t.span(),
++ TokenTree::Literal(t) => t.span(),
+ }
+ }
+
+@@ -476,11 +512,11 @@ impl TokenTree {
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+- match *self {
+- TokenTree::Group(ref mut t) => t.set_span(span),
+- TokenTree::Ident(ref mut t) => t.set_span(span),
+- TokenTree::Punct(ref mut t) => t.set_span(span),
+- TokenTree::Literal(ref mut t) => t.set_span(span),
++ match self {
++ TokenTree::Group(t) => t.set_span(span),
++ TokenTree::Ident(t) => t.set_span(span),
++ TokenTree::Punct(t) => t.set_span(span),
++ TokenTree::Literal(t) => t.set_span(span),
+ }
+ }
+ }
+@@ -513,32 +549,32 @@ impl From<Literal> for TokenTree {
+ /// convertible back into the same token tree (modulo spans), except for
+ /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+ /// numeric literals.
+-impl fmt::Display for TokenTree {
++impl Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- match *self {
+- TokenTree::Group(ref t) => t.fmt(f),
+- TokenTree::Ident(ref t) => t.fmt(f),
+- TokenTree::Punct(ref t) => t.fmt(f),
+- TokenTree::Literal(ref t) => t.fmt(f),
++ match self {
++ TokenTree::Group(t) => Display::fmt(t, f),
++ TokenTree::Ident(t) => Display::fmt(t, f),
++ TokenTree::Punct(t) => Display::fmt(t, f),
++ TokenTree::Literal(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+ /// Prints token tree in a form convenient for debugging.
+-impl fmt::Debug for TokenTree {
++impl Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+- match *self {
+- TokenTree::Group(ref t) => t.fmt(f),
+- TokenTree::Ident(ref t) => {
++ match self {
++ TokenTree::Group(t) => Debug::fmt(t, f),
++ TokenTree::Ident(t) => {
+ let mut debug = f.debug_struct("Ident");
+ debug.field("sym", &format_args!("{}", t));
+ imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner);
+ debug.finish()
+ }
+- TokenTree::Punct(ref t) => t.fmt(f),
+- TokenTree::Literal(ref t) => t.fmt(f),
++ TokenTree::Punct(t) => Debug::fmt(t, f),
++ TokenTree::Literal(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+@@ -651,15 +687,15 @@ impl Group {
+ /// Prints the group as a string that should be losslessly convertible back
+ /// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+ /// with `Delimiter::None` delimiters.
+-impl fmt::Display for Group {
++impl Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- fmt::Display::fmt(&self.inner, formatter)
++ Display::fmt(&self.inner, formatter)
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- fmt::Debug::fmt(&self.inner, formatter)
++ Debug::fmt(&self.inner, formatter)
+ }
+ }
+
+@@ -669,7 +705,7 @@ impl fmt::Debug for Group {
+ /// `Punct` with different forms of `Spacing` returned.
+ #[derive(Clone)]
+ pub struct Punct {
+- op: char,
++ ch: char,
+ spacing: Spacing,
+ span: Span,
+ }
+@@ -695,9 +731,9 @@ impl Punct {
+ ///
+ /// The returned `Punct` will have the default span of `Span::call_site()`
+ /// which can be further configured with the `set_span` method below.
+- pub fn new(op: char, spacing: Spacing) -> Punct {
++ pub fn new(ch: char, spacing: Spacing) -> Punct {
+ Punct {
+- op,
++ ch,
+ spacing,
+ span: Span::call_site(),
+ }
+@@ -705,7 +741,7 @@ impl Punct {
+
+ /// Returns the value of this punctuation character as `char`.
+ pub fn as_char(&self) -> char {
+- self.op
++ self.ch
+ }
+
+ /// Returns the spacing of this punctuation character, indicating whether
+@@ -730,16 +766,16 @@ impl Punct {
+
+ /// Prints the punctuation character as a string that should be losslessly
+ /// convertible back into the same character.
+-impl fmt::Display for Punct {
++impl Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.op.fmt(f)
++ Display::fmt(&self.ch, f)
+ }
+ }
+
+-impl fmt::Debug for Punct {
++impl Debug for Punct {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Punct");
+- debug.field("op", &self.op);
++ debug.field("char", &self.ch);
+ debug.field("spacing", &self.spacing);
+ imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner);
+ debug.finish()
+@@ -813,14 +849,14 @@ impl fmt::Debug for Punct {
+ #[derive(Clone)]
+ pub struct Ident {
+ inner: imp::Ident,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl Ident {
+ fn _new(inner: imp::Ident) -> Ident {
+ Ident {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -920,15 +956,15 @@ impl Hash for Ident {
+
+ /// Prints the identifier as a string that should be losslessly convertible back
+ /// into the same identifier.
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -941,7 +977,7 @@ impl fmt::Debug for Ident {
+ #[derive(Clone)]
+ pub struct Literal {
+ inner: imp::Literal,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ macro_rules! suffixed_int_literals {
+@@ -988,14 +1024,14 @@ impl Literal {
+ fn _new(inner: imp::Literal) -> Literal {
+ Literal {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+ fn _new_stable(inner: fallback::Literal) -> Literal {
+ Literal {
+ inner: inner.into(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -1140,26 +1176,25 @@ impl Literal {
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+ /// Public implementation details for the `TokenStream` type, such as iterators.
+ pub mod token_stream {
+- use std::fmt;
+- use std::marker;
+- use std::rc::Rc;
++ use crate::marker::Marker;
++ use crate::{imp, TokenTree};
++ use std::fmt::{self, Debug};
+
+ pub use crate::TokenStream;
+- use crate::{imp, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ ///
+@@ -1168,7 +1203,7 @@ pub mod token_stream {
+ #[derive(Clone)]
+ pub struct IntoIter {
+ inner: imp::TokenTreeIter,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl Iterator for IntoIter {
+@@ -1179,9 +1214,9 @@ pub mod token_stream {
+ }
+ }
+
+- impl fmt::Debug for IntoIter {
++ impl Debug for IntoIter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -1192,7 +1227,7 @@ pub mod token_stream {
+ fn into_iter(self) -> IntoIter {
+ IntoIter {
+ inner: self.inner.into_iter(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+ }
+diff --git a/third_party/rust/proc-macro2/src/marker.rs b/third_party/rust/proc-macro2/src/marker.rs
+new file mode 100644
+index 0000000000..58729baf4a
+--- /dev/null
++++ mozilla-release/third_party/rust/proc-macro2/src/marker.rs
+@@ -0,0 +1,18 @@
++use std::marker::PhantomData;
++use std::panic::{RefUnwindSafe, UnwindSafe};
++use std::rc::Rc;
++
++// Zero sized marker with the correct set of autotrait impls we want all proc
++// macro types to have.
++pub(crate) type Marker = PhantomData<ProcMacroAutoTraits>;
++
++pub(crate) use self::value::*;
++
++mod value {
++ pub(crate) use std::marker::PhantomData as Marker;
++}
++
++pub(crate) struct ProcMacroAutoTraits(Rc<()>);
++
++impl UnwindSafe for ProcMacroAutoTraits {}
++impl RefUnwindSafe for ProcMacroAutoTraits {}
+diff --git a/third_party/rust/proc-macro2/src/parse.rs b/third_party/rust/proc-macro2/src/parse.rs
+new file mode 100644
+index 0000000000..365fe0484d
+--- /dev/null
++++ mozilla-release/third_party/rust/proc-macro2/src/parse.rs
+@@ -0,0 +1,849 @@
++use crate::fallback::{
++ is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
++};
++use crate::{Delimiter, Punct, Spacing, TokenTree};
++use std::char;
++use std::str::{Bytes, CharIndices, Chars};
++
++#[derive(Copy, Clone, Eq, PartialEq)]
++pub(crate) struct Cursor<'a> {
++ pub rest: &'a str,
++ #[cfg(span_locations)]
++ pub off: u32,
++}
++
++impl<'a> Cursor<'a> {
++ fn advance(&self, bytes: usize) -> Cursor<'a> {
++ let (_front, rest) = self.rest.split_at(bytes);
++ Cursor {
++ rest,
++ #[cfg(span_locations)]
++ off: self.off + _front.chars().count() as u32,
++ }
++ }
++
++ fn starts_with(&self, s: &str) -> bool {
++ self.rest.starts_with(s)
++ }
++
++ pub(crate) fn is_empty(&self) -> bool {
++ self.rest.is_empty()
++ }
++
++ fn len(&self) -> usize {
++ self.rest.len()
++ }
++
++ fn as_bytes(&self) -> &'a [u8] {
++ self.rest.as_bytes()
++ }
++
++ fn bytes(&self) -> Bytes<'a> {
++ self.rest.bytes()
++ }
++
++ fn chars(&self) -> Chars<'a> {
++ self.rest.chars()
++ }
++
++ fn char_indices(&self) -> CharIndices<'a> {
++ self.rest.char_indices()
++ }
++
++ fn parse(&self, tag: &str) -> Result<Cursor<'a>, LexError> {
++ if self.starts_with(tag) {
++ Ok(self.advance(tag.len()))
++ } else {
++ Err(LexError)
++ }
++ }
++}
++
++type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
++
++fn skip_whitespace(input: Cursor) -> Cursor {
++ let mut s = input;
++
++ while !s.is_empty() {
++ let byte = s.as_bytes()[0];
++ if byte == b'/' {
++ if s.starts_with("//")
++ && (!s.starts_with("///") || s.starts_with("////"))
++ && !s.starts_with("//!")
++ {
++ let (cursor, _) = take_until_newline_or_eof(s);
++ s = cursor;
++ continue;
++ } else if s.starts_with("/**/") {
++ s = s.advance(4);
++ continue;
++ } else if s.starts_with("/*")
++ && (!s.starts_with("/**") || s.starts_with("/***"))
++ && !s.starts_with("/*!")
++ {
++ match block_comment(s) {
++ Ok((rest, _)) => {
++ s = rest;
++ continue;
++ }
++ Err(LexError) => return s,
++ }
++ }
++ }
++ match byte {
++ b' ' | 0x09..=0x0d => {
++ s = s.advance(1);
++ continue;
++ }
++ b if b <= 0x7f => {}
++ _ => {
++ let ch = s.chars().next().unwrap();
++ if is_whitespace(ch) {
++ s = s.advance(ch.len_utf8());
++ continue;
++ }
++ }
++ }
++ return s;
++ }
++ s
++}
++
++fn block_comment(input: Cursor) -> PResult<&str> {
++ if !input.starts_with("/*") {
++ return Err(LexError);
++ }
++
++ let mut depth = 0;
++ let bytes = input.as_bytes();
++ let mut i = 0;
++ let upper = bytes.len() - 1;
++
++ while i < upper {
++ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
++ depth += 1;
++ i += 1; // eat '*'
++ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
++ depth -= 1;
++ if depth == 0 {
++ return Ok((input.advance(i + 2), &input.rest[..i + 2]));
++ }
++ i += 1; // eat '/'
++ }
++ i += 1;
++ }
++
++ Err(LexError)
++}
++
++fn is_whitespace(ch: char) -> bool {
++ // Rust treats left-to-right mark and right-to-left mark as whitespace
++ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
++}
++
++fn word_break(input: Cursor) -> Result<Cursor, LexError> {
++ match input.chars().next() {
++ Some(ch) if is_ident_continue(ch) => Err(LexError),
++ Some(_) | None => Ok(input),
++ }
++}
++
++pub(crate) fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
++ let mut trees = Vec::new();
++ let mut stack = Vec::new();
++
++ loop {
++ input = skip_whitespace(input);
++
++ if let Ok((rest, tt)) = doc_comment(input) {
++ trees.extend(tt);
++ input = rest;
++ continue;
++ }
++
++ #[cfg(span_locations)]
++ let lo = input.off;
++
++ let first = match input.bytes().next() {
++ Some(first) => first,
++ None => break,
++ };
++
++ if let Some(open_delimiter) = match first {
++ b'(' => Some(Delimiter::Parenthesis),
++ b'[' => Some(Delimiter::Bracket),
++ b'{' => Some(Delimiter::Brace),
++ _ => None,
++ } {
++ input = input.advance(1);
++ let frame = (open_delimiter, trees);
++ #[cfg(span_locations)]
++ let frame = (lo, frame);
++ stack.push(frame);
++ trees = Vec::new();
++ } else if let Some(close_delimiter) = match first {
++ b')' => Some(Delimiter::Parenthesis),
++ b']' => Some(Delimiter::Bracket),
++ b'}' => Some(Delimiter::Brace),
++ _ => None,
++ } {
++ input = input.advance(1);
++ let frame = stack.pop().ok_or(LexError)?;
++ #[cfg(span_locations)]
++ let (lo, frame) = frame;
++ let (open_delimiter, outer) = frame;
++ if open_delimiter != close_delimiter {
++ return Err(LexError);
++ }
++ let mut g = Group::new(open_delimiter, TokenStream { inner: trees });
++ g.set_span(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: input.off,
++ });
++ trees = outer;
++ trees.push(TokenTree::Group(crate::Group::_new_stable(g)));
++ } else {
++ let (rest, mut tt) = leaf_token(input)?;
++ tt.set_span(crate::Span::_new_stable(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: rest.off,
++ }));
++ trees.push(tt);
++ input = rest;
++ }
++ }
++
++ if stack.is_empty() {
++ Ok((input, TokenStream { inner: trees }))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn leaf_token(input: Cursor) -> PResult<TokenTree> {
++ if let Ok((input, l)) = literal(input) {
++ // must be parsed before ident
++ Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l))))
++ } else if let Ok((input, p)) = punct(input) {
++ Ok((input, TokenTree::Punct(p)))
++ } else if let Ok((input, i)) = ident(input) {
++ Ok((input, TokenTree::Ident(i)))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn ident(input: Cursor) -> PResult<crate::Ident> {
++ if ["r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#"]
++ .iter()
++ .any(|prefix| input.starts_with(prefix))
++ {
++ Err(LexError)
++ } else {
++ ident_any(input)
++ }
++}
++
++fn ident_any(input: Cursor) -> PResult<crate::Ident> {
++ let raw = input.starts_with("r#");
++ let rest = input.advance((raw as usize) << 1);
++
++ let (rest, sym) = ident_not_raw(rest)?;
++
++ if !raw {
++ let ident = crate::Ident::new(sym, crate::Span::call_site());
++ return Ok((rest, ident));
++ }
++
++ if sym == "_" {
++ return Err(LexError);
++ }
++
++ let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
++ Ok((rest, ident))
++}
++
++fn ident_not_raw(input: Cursor) -> PResult<&str> {
++ let mut chars = input.char_indices();
++
++ match chars.next() {
++ Some((_, ch)) if is_ident_start(ch) => {}
++ _ => return Err(LexError),
++ }
++
++ let mut end = input.len();
++ for (i, ch) in chars {
++ if !is_ident_continue(ch) {
++ end = i;
++ break;
++ }
++ }
++
++ Ok((input.advance(end), &input.rest[..end]))
++}
++
++fn literal(input: Cursor) -> PResult<Literal> {
++ match literal_nocapture(input) {
++ Ok(a) => {
++ let end = input.len() - a.len();
++ Ok((a, Literal::_new(input.rest[..end].to_string())))
++ }
++ Err(LexError) => Err(LexError),
++ }
++}
++
++fn literal_nocapture(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(ok) = string(input) {
++ Ok(ok)
++ } else if let Ok(ok) = byte_string(input) {
++ Ok(ok)
++ } else if let Ok(ok) = byte(input) {
++ Ok(ok)
++ } else if let Ok(ok) = character(input) {
++ Ok(ok)
++ } else if let Ok(ok) = float(input) {
++ Ok(ok)
++ } else if let Ok(ok) = int(input) {
++ Ok(ok)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn literal_suffix(input: Cursor) -> Cursor {
++ match ident_not_raw(input) {
++ Ok((input, _)) => input,
++ Err(LexError) => input,
++ }
++}
++
++fn string(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(input) = input.parse("\"") {
++ cooked_string(input)
++ } else if let Ok(input) = input.parse("r") {
++ raw_string(input)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn cooked_string(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.char_indices().peekable();
++
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' => {
++ let input = input.advance(i + 1);
++ return Ok(literal_suffix(input));
++ }
++ '\r' => match chars.next() {
++ Some((_, '\n')) => {}
++ _ => break,
++ },
++ '\\' => match chars.next() {
++ Some((_, 'x')) => {
++ if !backslash_x_char(&mut chars) {
++ break;
++ }
++ }
++ Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
++ | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
++ Some((_, 'u')) => {
++ if !backslash_u(&mut chars) {
++ break;
++ }
++ }
++ Some((_, ch @ '\n')) | Some((_, ch @ '\r')) => {
++ let mut last = ch;
++ loop {
++ if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') {
++ return Err(LexError);
++ }
++ match chars.peek() {
++ Some((_, ch)) if ch.is_whitespace() => {
++ last = *ch;
++ chars.next();
++ }
++ _ => break,
++ }
++ }
++ }
++ _ => break,
++ },
++ _ch => {}
++ }
++ }
++ Err(LexError)
++}
++
++fn byte_string(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(input) = input.parse("b\"") {
++ cooked_byte_string(input)
++ } else if let Ok(input) = input.parse("br") {
++ raw_string(input)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, LexError> {
++ let mut bytes = input.bytes().enumerate();
++ while let Some((offset, b)) = bytes.next() {
++ match b {
++ b'"' => {
++ let input = input.advance(offset + 1);
++ return Ok(literal_suffix(input));
++ }
++ b'\r' => match bytes.next() {
++ Some((_, b'\n')) => {}
++ _ => break,
++ },
++ b'\\' => match bytes.next() {
++ Some((_, b'x')) => {
++ if !backslash_x_byte(&mut bytes) {
++ break;
++ }
++ }
++ Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
++ | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
++ Some((newline, b @ b'\n')) | Some((newline, b @ b'\r')) => {
++ let mut last = b as char;
++ let rest = input.advance(newline + 1);
++ let mut chars = rest.char_indices();
++ loop {
++ if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') {
++ return Err(LexError);
++ }
++ match chars.next() {
++ Some((_, ch)) if ch.is_whitespace() => last = ch,
++ Some((offset, _)) => {
++ input = rest.advance(offset);
++ bytes = input.bytes().enumerate();
++ break;
++ }
++ None => return Err(LexError),
++ }
++ }
++ }
++ _ => break,
++ },
++ b if b < 0x80 => {}
++ _ => break,
++ }
++ }
++ Err(LexError)
++}
++
++fn raw_string(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.char_indices();
++ let mut n = 0;
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' => {
++ n = i;
++ break;
++ }
++ '#' => {}
++ _ => return Err(LexError),
++ }
++ }
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => {
++ let rest = input.advance(i + 1 + n);
++ return Ok(literal_suffix(rest));
++ }
++ '\r' => match chars.next() {
++ Some((_, '\n')) => {}
++ _ => break,
++ },
++ _ => {}
++ }
++ }
++ Err(LexError)
++}
++
++fn byte(input: Cursor) -> Result<Cursor, LexError> {
++ let input = input.parse("b'")?;
++ let mut bytes = input.bytes().enumerate();
++ let ok = match bytes.next().map(|(_, b)| b) {
++ Some(b'\\') => match bytes.next().map(|(_, b)| b) {
++ Some(b'x') => backslash_x_byte(&mut bytes),
++ Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
++ | Some(b'"') => true,
++ _ => false,
++ },
++ b => b.is_some(),
++ };
++ if !ok {
++ return Err(LexError);
++ }
++ let (offset, _) = bytes.next().ok_or(LexError)?;
++ if !input.chars().as_str().is_char_boundary(offset) {
++ return Err(LexError);
++ }
++ let input = input.advance(offset).parse("'")?;
++ Ok(literal_suffix(input))
++}
++
++fn character(input: Cursor) -> Result<Cursor, LexError> {
++ let input = input.parse("'")?;
++ let mut chars = input.char_indices();
++ let ok = match chars.next().map(|(_, ch)| ch) {
++ Some('\\') => match chars.next().map(|(_, ch)| ch) {
++ Some('x') => backslash_x_char(&mut chars),
++ Some('u') => backslash_u(&mut chars),
++ Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
++ true
++ }
++ _ => false,
++ },
++ ch => ch.is_some(),
++ };
++ if !ok {
++ return Err(LexError);
++ }
++ let (idx, _) = chars.next().ok_or(LexError)?;
++ let input = input.advance(idx).parse("'")?;
++ Ok(literal_suffix(input))
++}
++
++macro_rules! next_ch {
++ ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
++ match $chars.next() {
++ Some((_, ch)) => match ch {
++ $pat $(| $rest)* => ch,
++ _ => return false,
++ },
++ None => return false,
++ }
++ };
++}
++
++fn backslash_x_char<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, char)>,
++{
++ next_ch!(chars @ '0'..='7');
++ next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
++ true
++}
++
++fn backslash_x_byte<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, u8)>,
++{
++ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
++ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
++ true
++}
++
++fn backslash_u<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, char)>,
++{
++ next_ch!(chars @ '{');
++ let mut value = 0;
++ let mut len = 0;
++ while let Some((_, ch)) = chars.next() {
++ let digit = match ch {
++ '0'..='9' => ch as u8 - b'0',
++ 'a'..='f' => 10 + ch as u8 - b'a',
++ 'A'..='F' => 10 + ch as u8 - b'A',
++ '_' if len > 0 => continue,
++ '}' if len > 0 => return char::from_u32(value).is_some(),
++ _ => return false,
++ };
++ if len == 6 {
++ return false;
++ }
++ value *= 0x10;
++ value += u32::from(digit);
++ len += 1;
++ }
++ false
++}
++
++fn float(input: Cursor) -> Result<Cursor, LexError> {
++ let mut rest = float_digits(input)?;
++ if let Some(ch) = rest.chars().next() {
++ if is_ident_start(ch) {
++ rest = ident_not_raw(rest)?.0;
++ }
++ }
++ word_break(rest)
++}
++
++fn float_digits(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.chars().peekable();
++ match chars.next() {
++ Some(ch) if ch >= '0' && ch <= '9' => {}
++ _ => return Err(LexError),
++ }
++
++ let mut len = 1;
++ let mut has_dot = false;
++ let mut has_exp = false;
++ while let Some(&ch) = chars.peek() {
++ match ch {
++ '0'..='9' | '_' => {
++ chars.next();
++ len += 1;
++ }
++ '.' => {
++ if has_dot {
++ break;
++ }
++ chars.next();
++ if chars
++ .peek()
++ .map(|&ch| ch == '.' || is_ident_start(ch))
++ .unwrap_or(false)
++ {
++ return Err(LexError);
++ }
++ len += 1;
++ has_dot = true;
++ }
++ 'e' | 'E' => {
++ chars.next();
++ len += 1;
++ has_exp = true;
++ break;
++ }
++ _ => break,
++ }
++ }
++
++ if !(has_dot || has_exp) {
++ return Err(LexError);
++ }
++
++ if has_exp {
++ let token_before_exp = if has_dot {
++ Ok(input.advance(len - 1))
++ } else {
++ Err(LexError)
++ };
++ let mut has_sign = false;
++ let mut has_exp_value = false;
++ while let Some(&ch) = chars.peek() {
++ match ch {
++ '+' | '-' => {
++ if has_exp_value {
++ break;
++ }
++ if has_sign {
++ return token_before_exp;
++ }
++ chars.next();
++ len += 1;
++ has_sign = true;
++ }
++ '0'..='9' => {
++ chars.next();
++ len += 1;
++ has_exp_value = true;
++ }
++ '_' => {
++ chars.next();
++ len += 1;
++ }
++ _ => break,
++ }
++ }
++ if !has_exp_value {
++ return token_before_exp;
++ }
++ }
++
++ Ok(input.advance(len))
++}
++
++fn int(input: Cursor) -> Result<Cursor, LexError> {
++ let mut rest = digits(input)?;
++ if let Some(ch) = rest.chars().next() {
++ if is_ident_start(ch) {
++ rest = ident_not_raw(rest)?.0;
++ }
++ }
++ word_break(rest)
++}
++
++fn digits(mut input: Cursor) -> Result<Cursor, LexError> {
++ let base = if input.starts_with("0x") {
++ input = input.advance(2);
++ 16
++ } else if input.starts_with("0o") {
++ input = input.advance(2);
++ 8
++ } else if input.starts_with("0b") {
++ input = input.advance(2);
++ 2
++ } else {
++ 10
++ };
++
++ let mut len = 0;
++ let mut empty = true;
++ for b in input.bytes() {
++ match b {
++ b'0'..=b'9' => {
++ let digit = (b - b'0') as u64;
++ if digit >= base {
++ return Err(LexError);
++ }
++ }
++ b'a'..=b'f' => {
++ let digit = 10 + (b - b'a') as u64;
++ if digit >= base {
++ break;
++ }
++ }
++ b'A'..=b'F' => {
++ let digit = 10 + (b - b'A') as u64;
++ if digit >= base {
++ break;
++ }
++ }
++ b'_' => {
++ if empty && base == 10 {
++ return Err(LexError);
++ }
++ len += 1;
++ continue;
++ }
++ _ => break,
++ };
++ len += 1;
++ empty = false;
++ }
++ if empty {
++ Err(LexError)
++ } else {
++ Ok(input.advance(len))
++ }
++}
++
++fn punct(input: Cursor) -> PResult<Punct> {
++ match punct_char(input) {
++ Ok((rest, '\'')) => {
++ if ident_any(rest)?.0.starts_with("'") {
++ Err(LexError)
++ } else {
++ Ok((rest, Punct::new('\'', Spacing::Joint)))
++ }
++ }
++ Ok((rest, ch)) => {
++ let kind = match punct_char(rest) {
++ Ok(_) => Spacing::Joint,
++ Err(LexError) => Spacing::Alone,
++ };
++ Ok((rest, Punct::new(ch, kind)))
++ }
++ Err(LexError) => Err(LexError),
++ }
++}
++
++fn punct_char(input: Cursor) -> PResult<char> {
++ if input.starts_with("//") || input.starts_with("/*") {
++ // Do not accept `/` of a comment as a punct.
++ return Err(LexError);
++ }
++
++ let mut chars = input.chars();
++ let first = match chars.next() {
++ Some(ch) => ch,
++ None => {
++ return Err(LexError);
++ }
++ };
++ let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
++ if recognized.contains(first) {
++ Ok((input.advance(first.len_utf8()), first))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
++ #[cfg(span_locations)]
++ let lo = input.off;
++ let (rest, (comment, inner)) = doc_comment_contents(input)?;
++ let span = crate::Span::_new_stable(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: rest.off,
++ });
++
++ let mut scan_for_bare_cr = comment;
++ while let Some(cr) = scan_for_bare_cr.find('\r') {
++ let rest = &scan_for_bare_cr[cr + 1..];
++ if !rest.starts_with('\n') {
++ return Err(LexError);
++ }
++ scan_for_bare_cr = rest;
++ }
++
++ let mut trees = Vec::new();
++ trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
++ if inner {
++ trees.push(Punct::new('!', Spacing::Alone).into());
++ }
++ let mut stream = vec![
++ TokenTree::Ident(crate::Ident::new("doc", span)),
++ TokenTree::Punct(Punct::new('=', Spacing::Alone)),
++ TokenTree::Literal(crate::Literal::string(comment)),
++ ];
++ for tt in stream.iter_mut() {
++ tt.set_span(span);
++ }
++ let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
++ trees.push(crate::Group::_new_stable(group).into());
++ for tt in trees.iter_mut() {
++ tt.set_span(span);
++ }
++ Ok((rest, trees))
++}
++
++fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
++ if input.starts_with("//!") {
++ let input = input.advance(3);
++ let (input, s) = take_until_newline_or_eof(input);
++ Ok((input, (s, true)))
++ } else if input.starts_with("/*!") {
++ let (input, s) = block_comment(input)?;
++ Ok((input, (&s[3..s.len() - 2], true)))
++ } else if input.starts_with("///") {
++ let input = input.advance(3);
++ if input.starts_with("/") {
++ return Err(LexError);
++ }
++ let (input, s) = take_until_newline_or_eof(input);
++ Ok((input, (s, false)))
++ } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
++ let (input, s) = block_comment(input)?;
++ Ok((input, (&s[3..s.len() - 2], false)))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
++ let chars = input.char_indices();
++
++ for (i, ch) in chars {
++ if ch == '\n' {
++ return (input.advance(i), &input.rest[..i]);
++ } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
++ return (input.advance(i + 1), &input.rest[..i]);
++ }
++ }
++
++ (input.advance(input.len()), input.rest)
++}
+diff --git a/third_party/rust/proc-macro2/src/strnom.rs b/third_party/rust/proc-macro2/src/strnom.rs
+deleted file mode 100644
+index eb7d0b8a8e..0000000000
+--- mozilla-release/third_party/rust/proc-macro2/src/strnom.rs
++++ mozilla-release//dev/null
+@@ -1,391 +0,0 @@
+-//! Adapted from [`nom`](https://github.com/Geal/nom).
+-
+-use crate::fallback::LexError;
+-use std::str::{Bytes, CharIndices, Chars};
+-use unicode_xid::UnicodeXID;
+-
+-#[derive(Copy, Clone, Eq, PartialEq)]
+-pub struct Cursor<'a> {
+- pub rest: &'a str,
+- #[cfg(span_locations)]
+- pub off: u32,
+-}
+-
+-impl<'a> Cursor<'a> {
+- #[cfg(not(span_locations))]
+- pub fn advance(&self, amt: usize) -> Cursor<'a> {
+- Cursor {
+- rest: &self.rest[amt..],
+- }
+- }
+- #[cfg(span_locations)]
+- pub fn advance(&self, amt: usize) -> Cursor<'a> {
+- Cursor {
+- rest: &self.rest[amt..],
+- off: self.off + (amt as u32),
+- }
+- }
+-
+- pub fn find(&self, p: char) -> Option<usize> {
+- self.rest.find(p)
+- }
+-
+- pub fn starts_with(&self, s: &str) -> bool {
+- self.rest.starts_with(s)
+- }
+-
+- pub fn is_empty(&self) -> bool {
+- self.rest.is_empty()
+- }
+-
+- pub fn len(&self) -> usize {
+- self.rest.len()
+- }
+-
+- pub fn as_bytes(&self) -> &'a [u8] {
+- self.rest.as_bytes()
+- }
+-
+- pub fn bytes(&self) -> Bytes<'a> {
+- self.rest.bytes()
+- }
+-
+- pub fn chars(&self) -> Chars<'a> {
+- self.rest.chars()
+- }
+-
+- pub fn char_indices(&self) -> CharIndices<'a> {
+- self.rest.char_indices()
+- }
+-}
+-
+-pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
+-
+-pub fn whitespace(input: Cursor) -> PResult<()> {
+- if input.is_empty() {
+- return Err(LexError);
+- }
+-
+- let bytes = input.as_bytes();
+- let mut i = 0;
+- while i < bytes.len() {
+- let s = input.advance(i);
+- if bytes[i] == b'/' {
+- if s.starts_with("//")
+- && (!s.starts_with("///") || s.starts_with("////"))
+- && !s.starts_with("//!")
+- {
+- if let Some(len) = s.find('\n') {
+- i += len + 1;
+- continue;
+- }
+- break;
+- } else if s.starts_with("/**/") {
+- i += 4;
+- continue;
+- } else if s.starts_with("/*")
+- && (!s.starts_with("/**") || s.starts_with("/***"))
+- && !s.starts_with("/*!")
+- {
+- let (_, com) = block_comment(s)?;
+- i += com.len();
+- continue;
+- }
+- }
+- match bytes[i] {
+- b' ' | 0x09..=0x0d => {
+- i += 1;
+- continue;
+- }
+- b if b <= 0x7f => {}
+- _ => {
+- let ch = s.chars().next().unwrap();
+- if is_whitespace(ch) {
+- i += ch.len_utf8();
+- continue;
+- }
+- }
+- }
+- return if i > 0 { Ok((s, ())) } else { Err(LexError) };
+- }
+- Ok((input.advance(input.len()), ()))
+-}
+-
+-pub fn block_comment(input: Cursor) -> PResult<&str> {
+- if !input.starts_with("/*") {
+- return Err(LexError);
+- }
+-
+- let mut depth = 0;
+- let bytes = input.as_bytes();
+- let mut i = 0;
+- let upper = bytes.len() - 1;
+- while i < upper {
+- if bytes[i] == b'/' && bytes[i + 1] == b'*' {
+- depth += 1;
+- i += 1; // eat '*'
+- } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
+- depth -= 1;
+- if depth == 0 {
+- return Ok((input.advance(i + 2), &input.rest[..i + 2]));
+- }
+- i += 1; // eat '/'
+- }
+- i += 1;
+- }
+- Err(LexError)
+-}
+-
+-pub fn skip_whitespace(input: Cursor) -> Cursor {
+- match whitespace(input) {
+- Ok((rest, _)) => rest,
+- Err(LexError) => input,
+- }
+-}
+-
+-fn is_whitespace(ch: char) -> bool {
+- // Rust treats left-to-right mark and right-to-left mark as whitespace
+- ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
+-}
+-
+-pub fn word_break(input: Cursor) -> PResult<()> {
+- match input.chars().next() {
+- Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
+- Some(_) | None => Ok((input, ())),
+- }
+-}
+-
+-macro_rules! named {
+- ($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
+- fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
+- $submac!(i, $($args)*)
+- }
+- };
+-}
+-
+-macro_rules! alt {
+- ($i:expr, $e:ident | $($rest:tt)*) => {
+- alt!($i, call!($e) | $($rest)*)
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
+- match $subrule!($i, $($args)*) {
+- res @ Ok(_) => res,
+- _ => alt!($i, $($rest)*)
+- }
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
+- match $subrule!($i, $($args)*) {
+- Ok((i, o)) => Ok((i, $gen(o))),
+- Err(LexError) => alt!($i, $($rest)*)
+- }
+- };
+-
+- ($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
+- alt!($i, call!($e) => { $gen } | $($rest)*)
+- };
+-
+- ($i:expr, $e:ident => { $gen:expr }) => {
+- alt!($i, call!($e) => { $gen })
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
+- match $subrule!($i, $($args)*) {
+- Ok((i, o)) => Ok((i, $gen(o))),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-
+- ($i:expr, $e:ident) => {
+- alt!($i, call!($e))
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)*)) => {
+- $subrule!($i, $($args)*)
+- };
+-}
+-
+-macro_rules! do_parse {
+- ($i:expr, ( $($rest:expr),* )) => {
+- Ok(($i, ( $($rest),* )))
+- };
+-
+- ($i:expr, $e:ident >> $($rest:tt)*) => {
+- do_parse!($i, call!($e) >> $($rest)*)
+- };
+-
+- ($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, _)) => do_parse!(i, $($rest)*),
+- }
+- };
+-
+- ($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
+- do_parse!($i, $field: call!($e) >> $($rest)*)
+- };
+-
+- ($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => {
+- let $field = o;
+- do_parse!(i, $($rest)*)
+- },
+- }
+- };
+-}
+-
+-macro_rules! peek {
+- ($i:expr, $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Ok((_, o)) => Ok(($i, o)),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-}
+-
+-macro_rules! call {
+- ($i:expr, $fun:expr $(, $args:expr)*) => {
+- $fun($i $(, $args)*)
+- };
+-}
+-
+-macro_rules! option {
+- ($i:expr, $f:expr) => {
+- match $f($i) {
+- Ok((i, o)) => Ok((i, Some(o))),
+- Err(LexError) => Ok(($i, None)),
+- }
+- };
+-}
+-
+-macro_rules! take_until_newline_or_eof {
+- ($i:expr,) => {{
+- if $i.len() == 0 {
+- Ok(($i, ""))
+- } else {
+- match $i.find('\n') {
+- Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
+- None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
+- }
+- }
+- }};
+-}
+-
+-macro_rules! tuple {
+- ($i:expr, $($rest:tt)*) => {
+- tuple_parser!($i, (), $($rest)*)
+- };
+-}
+-
+-/// Do not use directly. Use `tuple!`.
+-macro_rules! tuple_parser {
+- ($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
+- tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
+- };
+-
+- ($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
+- }
+- };
+-
+- ($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
+- }
+- };
+-
+- ($i:expr, ($($parsed:tt),*), $e:ident) => {
+- tuple_parser!($i, ($($parsed),*), call!($e))
+- };
+-
+- ($i:expr, (), $submac:ident!( $($args:tt)* )) => {
+- $submac!($i, $($args)*)
+- };
+-
+- ($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => Ok((i, ($($parsed),*, o)))
+- }
+- };
+-
+- ($i:expr, ($($parsed:expr),*)) => {
+- Ok(($i, ($($parsed),*)))
+- };
+-}
+-
+-macro_rules! not {
+- ($i:expr, $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Ok((_, _)) => Err(LexError),
+- Err(LexError) => Ok(($i, ())),
+- }
+- };
+-}
+-
+-macro_rules! tag {
+- ($i:expr, $tag:expr) => {
+- if $i.starts_with($tag) {
+- Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
+- } else {
+- Err(LexError)
+- }
+- };
+-}
+-
+-macro_rules! punct {
+- ($i:expr, $punct:expr) => {
+- $crate::strnom::punct($i, $punct)
+- };
+-}
+-
+-/// Do not use directly. Use `punct!`.
+-pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
+- let input = skip_whitespace(input);
+- if input.starts_with(token) {
+- Ok((input.advance(token.len()), token))
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-macro_rules! preceded {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
+- match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
+- Ok((remaining, (_, o))) => Ok((remaining, o)),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-
+- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
+- preceded!($i, $submac!($($args)*), call!($g))
+- };
+-}
+-
+-macro_rules! delimited {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
+- match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i1, (_, o, _))) => Ok((i1, o))
+- }
+- };
+-}
+-
+-macro_rules! map {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => Ok((i, call!(o, $g)))
+- }
+- };
+-
+- ($i:expr, $f:expr, $g:expr) => {
+- map!($i, call!($f), $g)
+- };
+-}
+diff --git a/third_party/rust/proc-macro2/src/wrapper.rs b/third_party/rust/proc-macro2/src/wrapper.rs
+index 552b9381cf..3df044af17 100644
+--- mozilla-release/third_party/rust/proc-macro2/src/wrapper.rs
++++ mozilla-release/third_party/rust/proc-macro2/src/wrapper.rs
+@@ -1,15 +1,15 @@
+-use std::fmt;
+-use std::iter;
++use crate::detection::inside_proc_macro;
++use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
++use std::fmt::{self, Debug, Display};
++use std::iter::FromIterator;
+ use std::ops::RangeBounds;
+-use std::panic::{self, PanicInfo};
++use std::panic;
+ #[cfg(super_unstable)]
+ use std::path::PathBuf;
+ use std::str::FromStr;
+
+-use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
+-
+ #[derive(Clone)]
+-pub enum TokenStream {
++pub(crate) enum TokenStream {
+ Compiler(DeferredTokenStream),
+ Fallback(fallback::TokenStream),
+ }
+@@ -19,73 +19,16 @@ pub enum TokenStream {
+ // we hold on to the appended tokens and do proc_macro::TokenStream::extend as
+ // late as possible to batch together consecutive uses of the Extend impl.
+ #[derive(Clone)]
+-pub struct DeferredTokenStream {
++pub(crate) struct DeferredTokenStream {
+ stream: proc_macro::TokenStream,
+ extra: Vec<proc_macro::TokenTree>,
+ }
+
+-pub enum LexError {
++pub(crate) enum LexError {
+ Compiler(proc_macro::LexError),
+ Fallback(fallback::LexError),
+ }
+
+-fn nightly_works() -> bool {
+- use std::sync::atomic::*;
+- use std::sync::Once;
+-
+- static WORKS: AtomicUsize = AtomicUsize::new(0);
+- static INIT: Once = Once::new();
+-
+- match WORKS.load(Ordering::SeqCst) {
+- 1 => return false,
+- 2 => return true,
+- _ => {}
+- }
+-
+- // Swap in a null panic hook to avoid printing "thread panicked" to stderr,
+- // then use catch_unwind to determine whether the compiler's proc_macro is
+- // working. When proc-macro2 is used from outside of a procedural macro all
+- // of the proc_macro crate's APIs currently panic.
+- //
+- // The Once is to prevent the possibility of this ordering:
+- //
+- // thread 1 calls take_hook, gets the user's original hook
+- // thread 1 calls set_hook with the null hook
+- // thread 2 calls take_hook, thinks null hook is the original hook
+- // thread 2 calls set_hook with the null hook
+- // thread 1 calls set_hook with the actual original hook
+- // thread 2 calls set_hook with what it thinks is the original hook
+- //
+- // in which the user's hook has been lost.
+- //
+- // There is still a race condition where a panic in a different thread can
+- // happen during the interval that the user's original panic hook is
+- // unregistered such that their hook is incorrectly not called. This is
+- // sufficiently unlikely and less bad than printing panic messages to stderr
+- // on correct use of this crate. Maybe there is a libstd feature request
+- // here. For now, if a user needs to guarantee that this failure mode does
+- // not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
+- // the main thread before launching any other threads.
+- INIT.call_once(|| {
+- type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
+-
+- let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
+- let sanity_check = &*null_hook as *const PanicHook;
+- let original_hook = panic::take_hook();
+- panic::set_hook(null_hook);
+-
+- let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
+- WORKS.store(works as usize + 1, Ordering::SeqCst);
+-
+- let hopefully_null_hook = panic::take_hook();
+- panic::set_hook(original_hook);
+- if sanity_check != &*hopefully_null_hook {
+- panic!("observed race condition in proc_macro2::nightly_works");
+- }
+- });
+- nightly_works()
+-}
+-
+ fn mismatch() -> ! {
+ panic!("stable/nightly mismatch")
+ }
+@@ -103,7 +46,12 @@ impl DeferredTokenStream {
+ }
+
+ fn evaluate_now(&mut self) {
+- self.stream.extend(self.extra.drain(..));
++ // If-check provides a fast short circuit for the common case of `extra`
++ // being empty, which saves a round trip over the proc macro bridge.
++ // Improves macro expansion time in winrt by 6% in debug mode.
++ if !self.extra.is_empty() {
++ self.stream.extend(self.extra.drain(..));
++ }
+ }
+
+ fn into_token_stream(mut self) -> proc_macro::TokenStream {
+@@ -114,7 +62,7 @@ impl DeferredTokenStream {
+
+ impl TokenStream {
+ pub fn new() -> TokenStream {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
+ } else {
+ TokenStream::Fallback(fallback::TokenStream::new())
+@@ -147,9 +95,9 @@ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Ok(TokenStream::Compiler(DeferredTokenStream::new(
+- src.parse()?,
++ proc_macro_parse(src)?,
+ )))
+ } else {
+ Ok(TokenStream::Fallback(src.parse()?))
+@@ -157,11 +105,17 @@ impl FromStr for TokenStream {
+ }
+ }
+
+-impl fmt::Display for TokenStream {
++// Work around https://github.com/rust-lang/rust/issues/58736.
++fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
++ panic::catch_unwind(|| src.parse().map_err(LexError::Compiler))
++ .unwrap_or(Err(LexError::Fallback(fallback::LexError)))
++}
++
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
+- TokenStream::Fallback(tts) => tts.fmt(f),
++ TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f),
++ TokenStream::Fallback(tts) => Display::fmt(tts, f),
+ }
+ }
+ }
+@@ -187,7 +141,7 @@ impl From<fallback::TokenStream> for TokenStream {
+ }
+ }
+
+-// Assumes nightly_works().
++// Assumes inside_proc_macro().
+ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+ match token {
+ TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
+@@ -196,9 +150,9 @@ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+ Spacing::Joint => proc_macro::Spacing::Joint,
+ Spacing::Alone => proc_macro::Spacing::Alone,
+ };
+- let mut op = proc_macro::Punct::new(tt.as_char(), spacing);
+- op.set_span(tt.span().inner.unwrap_nightly());
+- op.into()
++ let mut punct = proc_macro::Punct::new(tt.as_char(), spacing);
++ punct.set_span(tt.span().inner.unwrap_nightly());
++ punct.into()
+ }
+ TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
+ TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
+@@ -207,7 +161,7 @@ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+
+ impl From<TokenTree> for TokenStream {
+ fn from(token: TokenTree) -> TokenStream {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
+ } else {
+ TokenStream::Fallback(token.into())
+@@ -215,9 +169,9 @@ impl From<TokenTree> for TokenStream {
+ }
+ }
+
+-impl iter::FromIterator<TokenTree> for TokenStream {
++impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(
+ trees.into_iter().map(into_compiler_token).collect(),
+ ))
+@@ -227,7 +181,7 @@ impl iter::FromIterator<TokenTree> for TokenStream {
+ }
+ }
+
+-impl iter::FromIterator<TokenStream> for TokenStream {
++impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut streams = streams.into_iter();
+ match streams.next() {
+@@ -252,14 +206,15 @@ impl iter::FromIterator<TokenStream> for TokenStream {
+ }
+
+ impl Extend<TokenTree> for TokenStream {
+- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
++ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
+ match self {
+ TokenStream::Compiler(tts) => {
+ // Here is the reason for DeferredTokenStream.
+- tts.extra
+- .extend(streams.into_iter().map(into_compiler_token));
++ for token in stream {
++ tts.extra.push(into_compiler_token(token));
++ }
+ }
+- TokenStream::Fallback(tts) => tts.extend(streams),
++ TokenStream::Fallback(tts) => tts.extend(stream),
+ }
+ }
+ }
+@@ -270,20 +225,20 @@ impl Extend<TokenStream> for TokenStream {
+ TokenStream::Compiler(tts) => {
+ tts.evaluate_now();
+ tts.stream
+- .extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
++ .extend(streams.into_iter().map(TokenStream::unwrap_nightly));
+ }
+ TokenStream::Fallback(tts) => {
+- tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()));
++ tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
+ }
+ }
+ }
+ }
+
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
+- TokenStream::Fallback(tts) => tts.fmt(f),
++ TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f),
++ TokenStream::Fallback(tts) => Debug::fmt(tts, f),
+ }
+ }
+ }
+@@ -300,17 +255,29 @@ impl From<fallback::LexError> for LexError {
+ }
+ }
+
+-impl fmt::Debug for LexError {
++impl Debug for LexError {
++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ LexError::Compiler(e) => Debug::fmt(e, f),
++ LexError::Fallback(e) => Debug::fmt(e, f),
++ }
++ }
++}
++
++impl Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- LexError::Compiler(e) => e.fmt(f),
+- LexError::Fallback(e) => e.fmt(f),
++ #[cfg(lexerror_display)]
++ LexError::Compiler(e) => Display::fmt(e, f),
++ #[cfg(not(lexerror_display))]
++ LexError::Compiler(_e) => Display::fmt(&fallback::LexError, f),
++ LexError::Fallback(e) => Display::fmt(e, f),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum TokenTreeIter {
++pub(crate) enum TokenTreeIter {
+ Compiler(proc_macro::token_stream::IntoIter),
+ Fallback(fallback::TokenTreeIter),
+ }
+@@ -361,7 +328,7 @@ impl Iterator for TokenTreeIter {
+ }
+ }
+
+-impl fmt::Debug for TokenTreeIter {
++impl Debug for TokenTreeIter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("TokenTreeIter").finish()
+ }
+@@ -369,7 +336,7 @@ impl fmt::Debug for TokenTreeIter {
+
+ #[derive(Clone, PartialEq, Eq)]
+ #[cfg(super_unstable)]
+-pub enum SourceFile {
++pub(crate) enum SourceFile {
+ Compiler(proc_macro::SourceFile),
+ Fallback(fallback::SourceFile),
+ }
+@@ -397,58 +364,77 @@ impl SourceFile {
+ }
+
+ #[cfg(super_unstable)]
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- SourceFile::Compiler(a) => a.fmt(f),
+- SourceFile::Fallback(a) => a.fmt(f),
++ SourceFile::Compiler(a) => Debug::fmt(a, f),
++ SourceFile::Fallback(a) => Debug::fmt(a, f),
+ }
+ }
+ }
+
+ #[cfg(any(super_unstable, feature = "span-locations"))]
+-pub struct LineColumn {
++pub(crate) struct LineColumn {
+ pub line: usize,
+ pub column: usize,
+ }
+
+ #[derive(Copy, Clone)]
+-pub enum Span {
++pub(crate) enum Span {
+ Compiler(proc_macro::Span),
+ Fallback(fallback::Span),
+ }
+
+ impl Span {
+ pub fn call_site() -> Span {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::call_site())
+ } else {
+ Span::Fallback(fallback::Span::call_site())
+ }
+ }
+
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ if inside_proc_macro() {
++ Span::Compiler(proc_macro::Span::mixed_site())
++ } else {
++ Span::Fallback(fallback::Span::mixed_site())
++ }
++ }
++
+ #[cfg(super_unstable)]
+ pub fn def_site() -> Span {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::def_site())
+ } else {
+ Span::Fallback(fallback::Span::def_site())
+ }
+ }
+
+- #[cfg(super_unstable)]
+ pub fn resolved_at(&self, other: Span) -> Span {
+ match (self, other) {
++ #[cfg(hygiene)]
+ (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
++
++ // Name resolution affects semantics, but location is only cosmetic
++ #[cfg(not(hygiene))]
++ (Span::Compiler(_), Span::Compiler(_)) => other,
++
+ (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
+ _ => mismatch(),
+ }
+ }
+
+- #[cfg(super_unstable)]
+ pub fn located_at(&self, other: Span) -> Span {
+ match (self, other) {
++ #[cfg(hygiene)]
+ (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
++
++ // Name resolution affects semantics, but location is only cosmetic
++ #[cfg(not(hygiene))]
++ (Span::Compiler(_), Span::Compiler(_)) => *self,
++
+ (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
+ _ => mismatch(),
+ }
+@@ -542,16 +528,16 @@ impl From<fallback::Span> for Span {
+ }
+ }
+
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Span::Compiler(s) => s.fmt(f),
+- Span::Fallback(s) => s.fmt(f),
++ Span::Compiler(s) => Debug::fmt(s, f),
++ Span::Fallback(s) => Debug::fmt(s, f),
+ }
+ }
+ }
+
+-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
++pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+ match span {
+ Span::Compiler(s) => {
+ debug.field("span", &s);
+@@ -561,7 +547,7 @@ pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span)
+ }
+
+ #[derive(Clone)]
+-pub enum Group {
++pub(crate) enum Group {
+ Compiler(proc_macro::Group),
+ Fallback(fallback::Group),
+ }
+@@ -652,26 +638,26 @@ impl From<fallback::Group> for Group {
+ }
+ }
+
+-impl fmt::Display for Group {
++impl Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Group::Compiler(group) => group.fmt(formatter),
+- Group::Fallback(group) => group.fmt(formatter),
++ Group::Compiler(group) => Display::fmt(group, formatter),
++ Group::Fallback(group) => Display::fmt(group, formatter),
+ }
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Group::Compiler(group) => group.fmt(formatter),
+- Group::Fallback(group) => group.fmt(formatter),
++ Group::Compiler(group) => Debug::fmt(group, formatter),
++ Group::Fallback(group) => Debug::fmt(group, formatter),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum Ident {
++pub(crate) enum Ident {
+ Compiler(proc_macro::Ident),
+ Fallback(fallback::Ident),
+ }
+@@ -747,26 +733,26 @@ where
+ }
+ }
+
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Ident::Compiler(t) => t.fmt(f),
+- Ident::Fallback(t) => t.fmt(f),
++ Ident::Compiler(t) => Display::fmt(t, f),
++ Ident::Fallback(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Ident::Compiler(t) => t.fmt(f),
+- Ident::Fallback(t) => t.fmt(f),
++ Ident::Compiler(t) => Debug::fmt(t, f),
++ Ident::Fallback(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum Literal {
++pub(crate) enum Literal {
+ Compiler(proc_macro::Literal),
+ Fallback(fallback::Literal),
+ }
+@@ -774,7 +760,7 @@ pub enum Literal {
+ macro_rules! suffixed_numbers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::$name(n))
+ } else {
+ Literal::Fallback(fallback::Literal::$name(n))
+@@ -786,7 +772,7 @@ macro_rules! suffixed_numbers {
+ macro_rules! unsuffixed_integers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::$name(n))
+ } else {
+ Literal::Fallback(fallback::Literal::$name(n))
+@@ -830,7 +816,7 @@ impl Literal {
+ }
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
+ } else {
+ Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
+@@ -838,7 +824,7 @@ impl Literal {
+ }
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
+ } else {
+ Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
+@@ -846,7 +832,7 @@ impl Literal {
+ }
+
+ pub fn string(t: &str) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::string(t))
+ } else {
+ Literal::Fallback(fallback::Literal::string(t))
+@@ -854,7 +840,7 @@ impl Literal {
+ }
+
+ pub fn character(t: char) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::character(t))
+ } else {
+ Literal::Fallback(fallback::Literal::character(t))
+@@ -862,7 +848,7 @@ impl Literal {
+ }
+
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::byte_string(bytes))
+ } else {
+ Literal::Fallback(fallback::Literal::byte_string(bytes))
+@@ -908,20 +894,20 @@ impl From<fallback::Literal> for Literal {
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Literal::Compiler(t) => t.fmt(f),
+- Literal::Fallback(t) => t.fmt(f),
++ Literal::Compiler(t) => Display::fmt(t, f),
++ Literal::Fallback(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Literal::Compiler(t) => t.fmt(f),
+- Literal::Fallback(t) => t.fmt(f),
++ Literal::Compiler(t) => Debug::fmt(t, f),
++ Literal::Fallback(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+diff --git a/third_party/rust/proc-macro2/tests/comments.rs b/third_party/rust/proc-macro2/tests/comments.rs
+new file mode 100644
+index 0000000000..708cccb880
+--- /dev/null
++++ mozilla-release/third_party/rust/proc-macro2/tests/comments.rs
+@@ -0,0 +1,103 @@
++use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree};
++
++// #[doc = "..."] -> "..."
++fn lit_of_outer_doc_comment(tokens: TokenStream) -> Literal {
++ lit_of_doc_comment(tokens, false)
++}
++
++// #![doc = "..."] -> "..."
++fn lit_of_inner_doc_comment(tokens: TokenStream) -> Literal {
++ lit_of_doc_comment(tokens, true)
++}
++
++fn lit_of_doc_comment(tokens: TokenStream, inner: bool) -> Literal {
++ let mut iter = tokens.clone().into_iter();
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '#');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ if inner {
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '!');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ }
++ iter = match iter.next().unwrap() {
++ TokenTree::Group(group) => {
++ assert_eq!(group.delimiter(), Delimiter::Bracket);
++ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
++ group.stream().into_iter()
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ };
++ match iter.next().unwrap() {
++ TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"),
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '=');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Literal(literal) => {
++ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
++ literal
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++}
++
++#[test]
++fn closed_immediately() {
++ let stream = "/**/".parse::<TokenStream>().unwrap();
++ let tokens = stream.into_iter().collect::<Vec<_>>();
++ assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
++}
++
++#[test]
++fn incomplete() {
++ assert!("/*/".parse::<TokenStream>().is_err());
++}
++
++#[test]
++fn lit() {
++ let stream = "/// doc".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc\"");
++
++ let stream = "//! doc".parse::<TokenStream>().unwrap();
++ let lit = lit_of_inner_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc\"");
++
++ let stream = "/** doc */".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc \"");
++
++ let stream = "/*! doc */".parse::<TokenStream>().unwrap();
++ let lit = lit_of_inner_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc \"");
++}
++
++#[test]
++fn carriage_return() {
++ let stream = "///\r\n".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\"\"");
++
++ let stream = "/**\r\n*/".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\"\\r\\n\"");
++
++ "///\r".parse::<TokenStream>().unwrap_err();
++ "///\r \n".parse::<TokenStream>().unwrap_err();
++ "/**\r \n*/".parse::<TokenStream>().unwrap_err();
++}
+diff --git a/third_party/rust/proc-macro2/tests/marker.rs b/third_party/rust/proc-macro2/tests/marker.rs
+index 7af2539c1a..70e57677cd 100644
+--- mozilla-release/third_party/rust/proc-macro2/tests/marker.rs
++++ mozilla-release/third_party/rust/proc-macro2/tests/marker.rs
+@@ -57,3 +57,36 @@ mod semver_exempt {
+
+ assert_impl!(SourceFile is not Send or Sync);
+ }
++
++#[cfg(not(no_libprocmacro_unwind_safe))]
++mod unwind_safe {
++ use super::*;
++ use std::panic::{RefUnwindSafe, UnwindSafe};
++
++ macro_rules! assert_unwind_safe {
++ ($($types:ident)*) => {
++ $(
++ assert_impl!($types is UnwindSafe and RefUnwindSafe);
++ )*
++ };
++ }
++
++ assert_unwind_safe! {
++ Delimiter
++ Group
++ Ident
++ LexError
++ Literal
++ Punct
++ Spacing
++ Span
++ TokenStream
++ TokenTree
++ }
++
++ #[cfg(procmacro2_semver_exempt)]
++ assert_unwind_safe! {
++ LineColumn
++ SourceFile
++ }
++}
+diff --git a/third_party/rust/proc-macro2/tests/test.rs b/third_party/rust/proc-macro2/tests/test.rs
+index 7528388138..1e9f633944 100644
+--- mozilla-release/third_party/rust/proc-macro2/tests/test.rs
++++ mozilla-release/third_party/rust/proc-macro2/tests/test.rs
+@@ -1,7 +1,6 @@
++use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+ use std::str::{self, FromStr};
+
+-use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
+-
+ #[test]
+ fn idents() {
+ assert_eq!(
+@@ -84,6 +83,11 @@ fn literal_string() {
+ assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
+ }
+
++#[test]
++fn literal_raw_string() {
++ "r\"\r\n\"".parse::<TokenStream>().unwrap();
++}
++
+ #[test]
+ fn literal_character() {
+ assert_eq!(Literal::character('x').to_string(), "'x'");
+@@ -110,6 +114,37 @@ fn literal_suffix() {
+ assert_eq!(token_count("1._0"), 3);
+ assert_eq!(token_count("1._m"), 3);
+ assert_eq!(token_count("\"\"s"), 1);
++ assert_eq!(token_count("r\"\"r"), 1);
++ assert_eq!(token_count("b\"\"b"), 1);
++ assert_eq!(token_count("br\"\"br"), 1);
++ assert_eq!(token_count("r#\"\"#r"), 1);
++ assert_eq!(token_count("'c'c"), 1);
++ assert_eq!(token_count("b'b'b"), 1);
++ assert_eq!(token_count("0E"), 1);
++ assert_eq!(token_count("0o0A"), 1);
++ assert_eq!(token_count("0E--0"), 4);
++ assert_eq!(token_count("0.0ECMA"), 1);
++}
++
++#[test]
++fn literal_iter_negative() {
++ let negative_literal = Literal::i32_suffixed(-3);
++ let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
++ let mut iter = tokens.into_iter();
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '-');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ unexpected => panic!("unexpected token {:?}", unexpected),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Literal(literal) => {
++ assert_eq!(literal.to_string(), "3i32");
++ }
++ unexpected => panic!("unexpected token {:?}", unexpected),
++ }
++ assert!(iter.next().is_none());
+ }
+
+ #[test]
+@@ -161,41 +196,21 @@ fn fail() {
+ fail("' static");
+ fail("r#1");
+ fail("r#_");
++ fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits)
++ fail("\"\\u{999999}\""); // outside of valid range of char
++ fail("\"\\u{_0}\""); // leading underscore
++ fail("\"\\u{}\""); // empty
++ fail("b\"\r\""); // bare carriage return in byte string
++ fail("r\"\r\""); // bare carriage return in raw string
++ fail("\"\\\r \""); // backslash carriage return
++ fail("'aa'aa");
++ fail("br##\"\"#");
++ fail("\"\\\n\u{85}\r\"");
+ }
+
+ #[cfg(span_locations)]
+ #[test]
+ fn span_test() {
+- use proc_macro2::TokenTree;
+-
+- fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
+- let ts = p.parse::<TokenStream>().unwrap();
+- check_spans_internal(ts, &mut lines);
+- }
+-
+- fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
+- for i in ts {
+- if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
+- *lines = rest;
+-
+- let start = i.span().start();
+- assert_eq!(start.line, sline, "sline did not match for {}", i);
+- assert_eq!(start.column, scol, "scol did not match for {}", i);
+-
+- let end = i.span().end();
+- assert_eq!(end.line, eline, "eline did not match for {}", i);
+- assert_eq!(end.column, ecol, "ecol did not match for {}", i);
+-
+- match i {
+- TokenTree::Group(ref g) => {
+- check_spans_internal(g.stream().clone(), lines);
+- }
+- _ => {}
+- }
+- }
+- }
+- }
+-
+ check_spans(
+ "\
+ /// This is a document comment
+@@ -274,53 +289,11 @@ fn span_join() {
+ #[test]
+ fn no_panic() {
+ let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
+- assert!(s.parse::<proc_macro2::TokenStream>().is_err());
++ assert!(s.parse::<TokenStream>().is_err());
+ }
+
+ #[test]
+-fn tricky_doc_comment() {
+- let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
+-
+- let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
+- match tokens[0] {
+- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
+- _ => panic!("wrong token {:?}", tokens[0]),
+- }
+- let mut tokens = match tokens[1] {
+- proc_macro2::TokenTree::Group(ref tt) => {
+- assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
+- tt.stream().into_iter()
+- }
+- _ => panic!("wrong token {:?}", tokens[0]),
+- };
+-
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
+- t => panic!("wrong token {:?}", t),
+- }
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
+- t => panic!("wrong token {:?}", t),
+- }
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Literal(ref tt) => {
+- assert_eq!(tt.to_string(), "\" doc\"");
+- }
+- t => panic!("wrong token {:?}", t),
+- }
+- assert!(tokens.next().is_none());
+-
+- let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
+-}
+-
+-#[test]
+-fn op_before_comment() {
++fn punct_before_comment() {
+ let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
+ match tts.next().unwrap() {
+ TokenTree::Punct(tt) => {
+@@ -331,6 +304,22 @@ fn op_before_comment() {
+ }
+ }
+
++#[test]
++fn joint_last_token() {
++ // This test verifies that we match the behavior of libproc_macro *not* in
++ // the range nightly-2020-09-06 through nightly-2020-09-10, in which this
++ // behavior was temporarily broken.
++ // See https://github.com/rust-lang/rust/issues/76399
++
++ let joint_punct = Punct::new(':', Spacing::Joint);
++ let stream = TokenStream::from(TokenTree::Punct(joint_punct));
++ let punct = match stream.into_iter().next().unwrap() {
++ TokenTree::Punct(punct) => punct,
++ _ => unreachable!(),
++ };
++ assert_eq!(punct.spacing(), Spacing::Joint);
++}
++
+ #[test]
+ fn raw_identifier() {
+ let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
+@@ -345,11 +334,11 @@ fn raw_identifier() {
+ fn test_debug_ident() {
+ let ident = Ident::new("proc_macro", Span::call_site());
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected = "Ident(proc_macro)";
+
+- #[cfg(procmacro2_semver_exempt)]
+- let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
++ #[cfg(span_locations)]
++ let expected = "Ident { sym: proc_macro }";
+
+ assert_eq!(expected, format!("{:?}", ident));
+ }
+@@ -358,7 +347,7 @@ fn test_debug_ident() {
+ fn test_debug_tokenstream() {
+ let tts = TokenStream::from_str("[a + 1]").unwrap();
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected = "\
+ TokenStream [
+ Group {
+@@ -368,7 +357,7 @@ TokenStream [
+ sym: a,
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone,
+ },
+ Literal {
+@@ -379,7 +368,7 @@ TokenStream [
+ ]\
+ ";
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected_before_trailing_commas = "\
+ TokenStream [
+ Group {
+@@ -389,7 +378,7 @@ TokenStream [
+ sym: a
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone
+ },
+ Literal {
+@@ -400,7 +389,7 @@ TokenStream [
+ ]\
+ ";
+
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ let expected = "\
+ TokenStream [
+ Group {
+@@ -411,7 +400,7 @@ TokenStream [
+ span: bytes(2..3),
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone,
+ span: bytes(4..5),
+ },
+@@ -425,7 +414,7 @@ TokenStream [
+ ]\
+ ";
+
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ let expected_before_trailing_commas = "\
+ TokenStream [
+ Group {
+@@ -436,7 +425,7 @@ TokenStream [
+ span: bytes(2..3)
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone,
+ span: bytes(4..5)
+ },
+@@ -464,3 +453,80 @@ fn default_tokenstream_is_empty() {
+
+ assert!(default_token_stream.is_empty());
+ }
++
++#[test]
++fn tuple_indexing() {
++ // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
++ let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
++ assert_eq!("tuple", tokens.next().unwrap().to_string());
++ assert_eq!(".", tokens.next().unwrap().to_string());
++ assert_eq!("0.0", tokens.next().unwrap().to_string());
++ assert!(tokens.next().is_none());
++}
++
++#[cfg(span_locations)]
++#[test]
++fn non_ascii_tokens() {
++ check_spans("// abc", &[]);
++ check_spans("// ábc", &[]);
++ check_spans("// abc x", &[]);
++ check_spans("// ábc x", &[]);
++ check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
++ check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
++ check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
++ check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
++ check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
++ check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
++ check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
++ check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
++ check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]);
++ check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]);
++ check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
++ check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
++ check_spans("'a'", &[(1, 0, 1, 3)]);
++ check_spans("'á'", &[(1, 0, 1, 3)]);
++ check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
++ check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
++ check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
++ check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
++ check_spans("abc", &[(1, 0, 1, 3)]);
++ check_spans("ábc", &[(1, 0, 1, 3)]);
++ check_spans("ábć", &[(1, 0, 1, 3)]);
++ check_spans("abc// foo", &[(1, 0, 1, 3)]);
++ check_spans("ábc// foo", &[(1, 0, 1, 3)]);
++ check_spans("ábć// foo", &[(1, 0, 1, 3)]);
++ check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
++ check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]);
++}
++
++#[cfg(span_locations)]
++fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
++ let ts = p.parse::<TokenStream>().unwrap();
++ check_spans_internal(ts, &mut lines);
++ assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
++}
++
++#[cfg(span_locations)]
++fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
++ for i in ts {
++ if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
++ *lines = rest;
++
++ let start = i.span().start();
++ assert_eq!(start.line, sline, "sline did not match for {}", i);
++ assert_eq!(start.column, scol, "scol did not match for {}", i);
++
++ let end = i.span().end();
++ assert_eq!(end.line, eline, "eline did not match for {}", i);
++ assert_eq!(end.column, ecol, "ecol did not match for {}", i);
++
++ if let TokenTree::Group(g) = i {
++ check_spans_internal(g.stream().clone(), lines);
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/proc-macro2/tests/test_fmt.rs b/third_party/rust/proc-macro2/tests/test_fmt.rs
+new file mode 100644
+index 0000000000..99a0aee5c8
+--- /dev/null
++++ mozilla-release/third_party/rust/proc-macro2/tests/test_fmt.rs
+@@ -0,0 +1,26 @@
++use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
++use std::iter::{self, FromIterator};
++
++#[test]
++fn test_fmt_group() {
++ let ident = Ident::new("x", Span::call_site());
++ let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident)));
++ let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new());
++ let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone());
++ let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new());
++ let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone());
++ let braces_empty = Group::new(Delimiter::Brace, TokenStream::new());
++ let braces_nonempty = Group::new(Delimiter::Brace, inner.clone());
++ let none_empty = Group::new(Delimiter::None, TokenStream::new());
++ let none_nonempty = Group::new(Delimiter::None, inner.clone());
++
++ // Matches libproc_macro.
++ assert_eq!("()", parens_empty.to_string());
++ assert_eq!("(x)", parens_nonempty.to_string());
++ assert_eq!("[]", brackets_empty.to_string());
++ assert_eq!("[x]", brackets_nonempty.to_string());
++ assert_eq!("{ }", braces_empty.to_string());
++ assert_eq!("{ x }", braces_nonempty.to_string());
++ assert_eq!("", none_empty.to_string());
++ assert_eq!("x", none_nonempty.to_string());
++}
+diff --git a/third_party/rust/spirv-cross-internal/.cargo-checksum.json b/third_party/rust/spirv-cross-internal/.cargo-checksum.json
+index 3c732d6d0e..014aa640e1 100644
+--- mozilla-release/third_party/rust/spirv-cross-internal/.cargo-checksum.json
++++ mozilla-release/third_party/rust/spirv-cross-internal/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"32644850d6dd8a8496fcf2dd0327dfc18a41f87bbd5f0922e174a905babde20d","build.rs":"68d1e7d99b4d890e21606aa7e7904d4f9b8caed6c0951bb8f5028745a7103d1e","src/bindings_native.rs":"6aa4e3a3bd73a638b3282405c0eaf2565ff8f364919c509f94f79e326679b03e","src/bindings_wasm.rs":"331d19691566beb163fc459d630e9830167cc86ec8cacde6be53fb0a7ca2b1be","src/bindings_wasm_functions.rs":"2515dd91e98e769fe282e7dc4f60820f4be7365c784a9f3bd17310d18a136da8","src/compiler.rs":"a53c7e0136ea3deddbfaf158295daca36623a0e244426b7c04d2856922d91c73","src/emscripten.rs":"3169890001970610013026468739910afca0d85e00d7e34beadfdd31bbcbeeb7","src/glsl.rs":"7ca1f6a11db8e7926a813fb91298dac1c0e2f90fe5426cc985cde7d8f36c05c9","src/hlsl.rs":"3734ad89a4f51647ebeb761753cb324a48ebca724970201e7598a3763d094d73","src/lib.rs":"cc41cbbe48f3e96791ba5338c66fa1fe0e533eaed6bbdced3f008d5e9fe6c6ce","src/msl.rs":"5505d1d626d8e7903ffa9d137e7392a1670fa7b574eacc4bbd33673937b36ea7","src/ptr_util.rs":"280404beede469b2c9ae40536323515a9213dac5d30014fac870a23b37672442","src/spirv.rs":"8a2cbe6b554bb6312f2aede0469338ace21e79720b5128c653e7a68c0e161277","src/vendor/SPIRV-Cross/.clang-format":"9ec4314e20afecad827a2dbd4832256be8464e88aab4a53fab45173ed129b2ed","src/vendor/SPIRV-Cross/.gitignore":"7f23cc92ddb5e1f584447e98d3e8ab6543fc182f1543f0f6ec29856f9250cdd6","src/vendor/SPIRV-Cross/.travis.yml":"abcc1b2f622b65feafd3e37a2b5e867fce3cf7211cae9fb2bf92a6de79100be4","src/vendor/SPIRV-Cross/CMakeLists.txt":"0972ab271e1eecdfe41ae0a85bf14ce7fe142b7609f8d2965b2c94f92c63b6aa","src/vendor/SPIRV-Cross/CODE_OF_CONDUCT.md":"a25e58cd66a9543e1500db9a5c3c027e874893ad1a264530bf26eb10918b5d80","src/vendor/SPIRV-Cross/GLSL.std.450.h":"20f32378793c5f416bc0704f44345c2a14c99cba3f411e3beaf1bcea372d58ba","src/vendor/SPIRV-Cross/LICENSE":"cfc7749b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30","src/vendor/SPIRV-Cross/Makefile":"e2255d32e47d447b100ce3cd0753d0758dc26460e0440d14cc157d4caf9b62f1","src/vendor/SPIRV-Cross/README.md":"b944fc5b1de709089fc3cae0dba09186ce2e43b64de6dcae1423ba1c139ee395","src/vendor/SPIRV-Cross/appveyor.yml":"0f18b8ae5fadc027a20f69307fc32e56a97edfbdcb55ac392852308f88545a04","src/vendor/SPIRV-Cross/include/spirv_cross/barrier.hpp":"bb796625e89f75e239e92f9a61597d421ffe5fb1902d200691ebe95cf856a1f8","src/vendor/SPIRV-Cross/include/spirv_cross/external_interface.h":"cdceda962d87133e44989510edc944e99052d713869b406a8b6b2d54e3d02dd7","src/vendor/SPIRV-Cross/include/spirv_cross/image.hpp":"681d0964b144c5009424196a8bc832cb81cfe5df5b91c2f3e1bfb625765a0c50","src/vendor/SPIRV-Cross/include/spirv_cross/internal_interface.hpp":"ab8851e5708b944a9bf340ce17297d94bef4876d30c833ea83d44b16f60726f6","src/vendor/SPIRV-Cross/include/spirv_cross/sampler.hpp":"b0b1bd4796b4315e826985c224ea5fa7d5bc823fcd6091f7527a5e18d9ae42fb","src/vendor/SPIRV-Cross/include/spirv_cross/thread_group.hpp":"70d9e0400f62de71d3775972eadc196ddb218254fa8155e8e33daf8d99957cc0","src/vendor/SPIRV-Cross/main.cpp":"ab2835f6dff9299f562a89755893f98a4bd946d4460abca885ba2172ebce851c","src/vendor/SPIRV-Cross/pkg-config/spirv-cross-c-shared.pc.in":"75a27e069ee07362bc6b7412b2c4e1270778dd639a8b96b8afbb113d21de79e7","src/vendor/SPIRV-Cross/spirv.h":"2c19cd1d262a3757e92276fbaa8e1a706b2328a92f47bda88f849b5cebebb630","src/vendor/SPIRV-Cross/spirv.hpp":"eb7c39a017f0a33e9d9bae7556c678c2840af4cc0beb98b2aeff9d651e370128","src/vendor/SPIRV-Cross/spirv_cfg.cpp":"09b756068d95a87a65a186831484fe21a973d3eb19675225b23864b9d37fe3d0","src/vendor/SPIRV-Cross/spirv_cfg.hpp":"ad98c4e82d6da3ed450dfc7939814115ae6b29ba2a25b0148cfc8c88774055b5","src/vendor/SPIRV-Cross/spirv_common.hpp":"5d0658a6fe9ac2f628754d2439b4d26ec8df7e50e4b196567cd4c7e7b8ce7574","src/vendor/SPIRV-Cross/spirv_cpp.cpp":"d067269ec6fc56860b4ed39edddf0a127f5e2d091512a85a79f02d5f3beccd19","src/vendor/SPIRV-Cross/spirv_cpp.hpp":"751ce82c70a399781f18aca54ea7fbb98083b98706f2a6be1cd1aa90627ef597","src/vendor/SPIRV-Cross/spirv_cross.cpp":"225b4cf4c7f8868019680d0a75a3da7983db428783ee1a3dcbb8c2765a71e79a","src/vendor/SPIRV-Cross/spirv_cross.hpp":"7eab2d7ae4b25a3c4fe85631e664727e036ae23a776aeb8c17db23f08020258a","src/vendor/SPIRV-Cross/spirv_cross_c.cpp":"80f438f4700f0b0b02547771f82be0610ba5e24ba3edb56d4331d59a206a4c2e","src/vendor/SPIRV-Cross/spirv_cross_c.h":"8b0ad63ed9fe52bc62f305afa27b5e33d9bd689a8b3f9935353742faf88a1e91","src/vendor/SPIRV-Cross/spirv_cross_containers.hpp":"409f60a5cb1dba1d0c96ef61761d8643402fc5ab67cecce12f6af3d135cfa338","src/vendor/SPIRV-Cross/spirv_cross_error_handling.hpp":"45868796346eef895cc05825582e60d26c34ec35d1a2b97f5e172f05fc565b19","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.cpp":"7c7e08db4dfc1084fd9cd266a6f829d35a6b2856198b16307a44f4d1c0743106","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.hpp":"d15446565eaa430bf2b14b596b3e579a72095a386b1eb256afc977cdf42ec5b9","src/vendor/SPIRV-Cross/spirv_cross_util.cpp":"f2426dccad2fff44053f6b1b9558719471a1ba0445c317f2c0311bc0d8b4636d","src/vendor/SPIRV-Cross/spirv_cross_util.hpp":"63ca5b2d6c80b5ad39473d6841676e48d5db3d2cbcb252fe6a6db0ef2077f0f5","src/vendor/SPIRV-Cross/spirv_glsl.cpp":"c1704189895442d593819472d3f852979107ac1798375d5a44d8f97be17fa3b5","src/vendor/SPIRV-Cross/spirv_glsl.hpp":"fb5e00ff589e3c5a25c5a15e78cf4809b753ebe04c84a008dc977330b8cbbc27","src/vendor/SPIRV-Cross/spirv_hlsl.cpp":"549ff42d0773a3f9e555d1aebd4c0084709e11528082f47c5f2b8b07de029f7c","src/vendor/SPIRV-Cross/spirv_hlsl.hpp":"18f303e7a384acf0e14677b579b100aee838813f54735330f0d1387f2493b363","src/vendor/SPIRV-Cross/spirv_msl.cpp":"7266afeb827e50782cb35362788d27fa9bd641992380008604ea1a4e48ec9c37","src/vendor/SPIRV-Cross/spirv_msl.hpp":"76b609e0b8d2f421c51e98d6357fcf44666b0d510935e083e4e02f20a80051e1","src/vendor/SPIRV-Cross/spirv_parser.cpp":"4def4db8c77967dcd2d6d93330343dc6ebb309e8cd696f52ee568a7c3ee97036","src/vendor/SPIRV-Cross/spirv_parser.hpp":"fc5f92184d1b4986f78088bee8ed7ddb9b342a34f9a3e55a4a594187292b66b8","src/vendor/SPIRV-Cross/spirv_reflect.cpp":"995a4e4315f67d4a2084e2b5d982de9baf72c7e265dde71d03ae0b8a29a24b92","src/vendor/SPIRV-Cross/spirv_reflect.hpp":"a874b40be0b210c69718e87a90e9add24653957656a995d83656f0918cfd10e4","src/wrapper.cpp":"7800cec5564dc1c7c13059cb1b1cac8d9be9248bbc887d5176600980c0d18142","src/wrapper.hpp":"b9a4f846bf57ee7a10fbb81f8f49df96ad11b1e5749c3c510925bb9b029ac08a","tests/common/mod.rs":"2843bf104c7938d93065f7b5688c9f063ad9e5720c407c737aedc5f2dee5a80f","tests/glsl_tests.rs":"ddd85107370dbfcde9d004d6717e229a7278d29ecba375828d8cb79e86625f66","tests/hlsl_tests.rs":"2058cb590c5ab85a636c93c18d0f2a79eb5ea5e647b7f10a2c83fbf394964dd6","tests/msl_tests.rs":"211d3b9cb43455a4c55bd619c05acdd21953358580c50ae75cac3f06eb26c5dd","tests/shaders/array.vert":"d0dab7ddea131e069961054f40a164602448aa78720b7ff480e141d1f7b0b2d6","tests/shaders/array.vert.spv":"8e44421590ade1716be66ad39f60fb1ce58eedeab8f0806335a7369687b308b1","tests/shaders/multiple_entry_points.cl":"2abbe57f2387f07f7f5f4cd375e47900be9c13bdc79aa0ed98a43a798cb0df81","tests/shaders/multiple_entry_points.cl.spv":"bdd34ce6765dbeab637631c3cbdf251532870d9fd6cd6c54883c0c872058ab3b","tests/shaders/rasterize_disabled.vert":"da6de172549830216933c44edf18b13113d7ca87462e3d09ad50dfc9c9836821","tests/shaders/rasterize_disabled.vert.spv":"2ba809eb500ed6e5a067389ccc056551e796e7019517593d110fb62c9dca2056","tests/shaders/sampler.frag":"4c568e65176afe596dd8ef279485e992607e94d612786214ae1c6702d0322e1f","tests/shaders/sampler.frag.spv":"bd7bd1973a82dcfdf5755361fa4dd420fdf1c32c5de0a6f0896a8d5971f98684","tests/shaders/simple.vert":"ea143c97dff5ef03728b96b2dd893bdc59d56651581ecf9fe50f10807b0efdd0","tests/shaders/simple.vert.spv":"a2b5094ffd76288e0d08c37ce0351e28f20bb6d80ddd73fc44a71c1c7cbbf7db","tests/shaders/specialization.comp":"ce32fa1615737209f2e465ea347d79030ddcb33a88c38447e7cde7dffc920163","tests/shaders/specialization.comp.spv":"848604e37b870b8999692b266677be2ce0df6ce38093a0d81e6bc43d0bdf8a3f","tests/shaders/struct.frag":"d8840bb1961d6f14609b00ee54406c1e3ea31cecd8231b77cfb73d28b71910c0","tests/shaders/struct.frag.spv":"774aa886374eb95abf9bb7d0045ee77d97e26529e9ec96b90991a515fdbca4be","tests/shaders/struct.vert":"9299cda83ddb5b5c3d95ab0d057e4df2af137dfd92d6c4d3e96295b7d42e29a1","tests/shaders/struct.vert.spv":"4a82bdee72616ac058bc60d4255efa3e78199a2b8597570c013bebbee7107fb7","tests/shaders/two_ubo.vert":"be109b2c65e5e9e1bb0dab968d7f651232f6b1c46935a3928f980bf7a40f2d62","tests/shaders/two_ubo.vert.spv":"efd14e2d0a782d61dd944711f30b9e7fcb14af17593c1fe4e11cf2b7e232bcc2","tests/shaders/workgroup.comp":"478044b5392e0d1fb042253d71ea6bf7b8a014af4a6ee35d8db4c5343ac69739","tests/shaders/workgroup.comp.spv":"72f636fe3d1d6d0c5963f71bf4349c7e40d544331b33b6b64fb5b65784e6abee","tests/spirv_tests.rs":"6de5c893f631d550fc86853537bddb824ffb534a0a7f130406be3b9ed166f198"},"package":null}
+\ No newline at end of file
++{"files":{"Cargo.toml":"32644850d6dd8a8496fcf2dd0327dfc18a41f87bbd5f0922e174a905babde20d","build.rs":"68d1e7d99b4d890e21606aa7e7904d4f9b8caed6c0951bb8f5028745a7103d1e","src/bindings_native.rs":"6aa4e3a3bd73a638b3282405c0eaf2565ff8f364919c509f94f79e326679b03e","src/bindings_wasm.rs":"331d19691566beb163fc459d630e9830167cc86ec8cacde6be53fb0a7ca2b1be","src/bindings_wasm_functions.rs":"2515dd91e98e769fe282e7dc4f60820f4be7365c784a9f3bd17310d18a136da8","src/compiler.rs":"a53c7e0136ea3deddbfaf158295daca36623a0e244426b7c04d2856922d91c73","src/emscripten.rs":"3169890001970610013026468739910afca0d85e00d7e34beadfdd31bbcbeeb7","src/glsl.rs":"7ca1f6a11db8e7926a813fb91298dac1c0e2f90fe5426cc985cde7d8f36c05c9","src/hlsl.rs":"3734ad89a4f51647ebeb761753cb324a48ebca724970201e7598a3763d094d73","src/lib.rs":"cc41cbbe48f3e96791ba5338c66fa1fe0e533eaed6bbdced3f008d5e9fe6c6ce","src/msl.rs":"5505d1d626d8e7903ffa9d137e7392a1670fa7b574eacc4bbd33673937b36ea7","src/ptr_util.rs":"280404beede469b2c9ae40536323515a9213dac5d30014fac870a23b37672442","src/spirv.rs":"8a2cbe6b554bb6312f2aede0469338ace21e79720b5128c653e7a68c0e161277","src/vendor/SPIRV-Cross/.clang-format":"9ec4314e20afecad827a2dbd4832256be8464e88aab4a53fab45173ed129b2ed","src/vendor/SPIRV-Cross/.travis.yml":"abcc1b2f622b65feafd3e37a2b5e867fce3cf7211cae9fb2bf92a6de79100be4","src/vendor/SPIRV-Cross/CMakeLists.txt":"0972ab271e1eecdfe41ae0a85bf14ce7fe142b7609f8d2965b2c94f92c63b6aa","src/vendor/SPIRV-Cross/CODE_OF_CONDUCT.md":"a25e58cd66a9543e1500db9a5c3c027e874893ad1a264530bf26eb10918b5d80","src/vendor/SPIRV-Cross/GLSL.std.450.h":"20f32378793c5f416bc0704f44345c2a14c99cba3f411e3beaf1bcea372d58ba","src/vendor/SPIRV-Cross/LICENSE":"cfc7749b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30","src/vendor/SPIRV-Cross/Makefile":"e2255d32e47d447b100ce3cd0753d0758dc26460e0440d14cc157d4caf9b62f1","src/vendor/SPIRV-Cross/README.md":"b944fc5b1de709089fc3cae0dba09186ce2e43b64de6dcae1423ba1c139ee395","src/vendor/SPIRV-Cross/appveyor.yml":"0f18b8ae5fadc027a20f69307fc32e56a97edfbdcb55ac392852308f88545a04","src/vendor/SPIRV-Cross/include/spirv_cross/barrier.hpp":"bb796625e89f75e239e92f9a61597d421ffe5fb1902d200691ebe95cf856a1f8","src/vendor/SPIRV-Cross/include/spirv_cross/external_interface.h":"cdceda962d87133e44989510edc944e99052d713869b406a8b6b2d54e3d02dd7","src/vendor/SPIRV-Cross/include/spirv_cross/image.hpp":"681d0964b144c5009424196a8bc832cb81cfe5df5b91c2f3e1bfb625765a0c50","src/vendor/SPIRV-Cross/include/spirv_cross/internal_interface.hpp":"ab8851e5708b944a9bf340ce17297d94bef4876d30c833ea83d44b16f60726f6","src/vendor/SPIRV-Cross/include/spirv_cross/sampler.hpp":"b0b1bd4796b4315e826985c224ea5fa7d5bc823fcd6091f7527a5e18d9ae42fb","src/vendor/SPIRV-Cross/include/spirv_cross/thread_group.hpp":"70d9e0400f62de71d3775972eadc196ddb218254fa8155e8e33daf8d99957cc0","src/vendor/SPIRV-Cross/main.cpp":"ab2835f6dff9299f562a89755893f98a4bd946d4460abca885ba2172ebce851c","src/vendor/SPIRV-Cross/pkg-config/spirv-cross-c-shared.pc.in":"75a27e069ee07362bc6b7412b2c4e1270778dd639a8b96b8afbb113d21de79e7","src/vendor/SPIRV-Cross/spirv.h":"2c19cd1d262a3757e92276fbaa8e1a706b2328a92f47bda88f849b5cebebb630","src/vendor/SPIRV-Cross/spirv.hpp":"eb7c39a017f0a33e9d9bae7556c678c2840af4cc0beb98b2aeff9d651e370128","src/vendor/SPIRV-Cross/spirv_cfg.cpp":"09b756068d95a87a65a186831484fe21a973d3eb19675225b23864b9d37fe3d0","src/vendor/SPIRV-Cross/spirv_cfg.hpp":"ad98c4e82d6da3ed450dfc7939814115ae6b29ba2a25b0148cfc8c88774055b5","src/vendor/SPIRV-Cross/spirv_common.hpp":"5d0658a6fe9ac2f628754d2439b4d26ec8df7e50e4b196567cd4c7e7b8ce7574","src/vendor/SPIRV-Cross/spirv_cpp.cpp":"d067269ec6fc56860b4ed39edddf0a127f5e2d091512a85a79f02d5f3beccd19","src/vendor/SPIRV-Cross/spirv_cpp.hpp":"751ce82c70a399781f18aca54ea7fbb98083b98706f2a6be1cd1aa90627ef597","src/vendor/SPIRV-Cross/spirv_cross.cpp":"225b4cf4c7f8868019680d0a75a3da7983db428783ee1a3dcbb8c2765a71e79a","src/vendor/SPIRV-Cross/spirv_cross.hpp":"7eab2d7ae4b25a3c4fe85631e664727e036ae23a776aeb8c17db23f08020258a","src/vendor/SPIRV-Cross/spirv_cross_c.cpp":"80f438f4700f0b0b02547771f82be0610ba5e24ba3edb56d4331d59a206a4c2e","src/vendor/SPIRV-Cross/spirv_cross_c.h":"8b0ad63ed9fe52bc62f305afa27b5e33d9bd689a8b3f9935353742faf88a1e91","src/vendor/SPIRV-Cross/spirv_cross_containers.hpp":"409f60a5cb1dba1d0c96ef61761d8643402fc5ab67cecce12f6af3d135cfa338","src/vendor/SPIRV-Cross/spirv_cross_error_handling.hpp":"45868796346eef895cc05825582e60d26c34ec35d1a2b97f5e172f05fc565b19","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.cpp":"7c7e08db4dfc1084fd9cd266a6f829d35a6b2856198b16307a44f4d1c0743106","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.hpp":"d15446565eaa430bf2b14b596b3e579a72095a386b1eb256afc977cdf42ec5b9","src/vendor/SPIRV-Cross/spirv_cross_util.cpp":"f2426dccad2fff44053f6b1b9558719471a1ba0445c317f2c0311bc0d8b4636d","src/vendor/SPIRV-Cross/spirv_cross_util.hpp":"63ca5b2d6c80b5ad39473d6841676e48d5db3d2cbcb252fe6a6db0ef2077f0f5","src/vendor/SPIRV-Cross/spirv_glsl.cpp":"c1704189895442d593819472d3f852979107ac1798375d5a44d8f97be17fa3b5","src/vendor/SPIRV-Cross/spirv_glsl.hpp":"fb5e00ff589e3c5a25c5a15e78cf4809b753ebe04c84a008dc977330b8cbbc27","src/vendor/SPIRV-Cross/spirv_hlsl.cpp":"549ff42d0773a3f9e555d1aebd4c0084709e11528082f47c5f2b8b07de029f7c","src/vendor/SPIRV-Cross/spirv_hlsl.hpp":"18f303e7a384acf0e14677b579b100aee838813f54735330f0d1387f2493b363","src/vendor/SPIRV-Cross/spirv_msl.cpp":"7266afeb827e50782cb35362788d27fa9bd641992380008604ea1a4e48ec9c37","src/vendor/SPIRV-Cross/spirv_msl.hpp":"76b609e0b8d2f421c51e98d6357fcf44666b0d510935e083e4e02f20a80051e1","src/vendor/SPIRV-Cross/spirv_parser.cpp":"4def4db8c77967dcd2d6d93330343dc6ebb309e8cd696f52ee568a7c3ee97036","src/vendor/SPIRV-Cross/spirv_parser.hpp":"fc5f92184d1b4986f78088bee8ed7ddb9b342a34f9a3e55a4a594187292b66b8","src/vendor/SPIRV-Cross/spirv_reflect.cpp":"995a4e4315f67d4a2084e2b5d982de9baf72c7e265dde71d03ae0b8a29a24b92","src/vendor/SPIRV-Cross/spirv_reflect.hpp":"a874b40be0b210c69718e87a90e9add24653957656a995d83656f0918cfd10e4","src/wrapper.cpp":"7800cec5564dc1c7c13059cb1b1cac8d9be9248bbc887d5176600980c0d18142","src/wrapper.hpp":"b9a4f846bf57ee7a10fbb81f8f49df96ad11b1e5749c3c510925bb9b029ac08a","tests/common/mod.rs":"2843bf104c7938d93065f7b5688c9f063ad9e5720c407c737aedc5f2dee5a80f","tests/glsl_tests.rs":"ddd85107370dbfcde9d004d6717e229a7278d29ecba375828d8cb79e86625f66","tests/hlsl_tests.rs":"2058cb590c5ab85a636c93c18d0f2a79eb5ea5e647b7f10a2c83fbf394964dd6","tests/msl_tests.rs":"211d3b9cb43455a4c55bd619c05acdd21953358580c50ae75cac3f06eb26c5dd","tests/shaders/array.vert":"d0dab7ddea131e069961054f40a164602448aa78720b7ff480e141d1f7b0b2d6","tests/shaders/array.vert.spv":"8e44421590ade1716be66ad39f60fb1ce58eedeab8f0806335a7369687b308b1","tests/shaders/multiple_entry_points.cl":"2abbe57f2387f07f7f5f4cd375e47900be9c13bdc79aa0ed98a43a798cb0df81","tests/shaders/multiple_entry_points.cl.spv":"bdd34ce6765dbeab637631c3cbdf251532870d9fd6cd6c54883c0c872058ab3b","tests/shaders/rasterize_disabled.vert":"da6de172549830216933c44edf18b13113d7ca87462e3d09ad50dfc9c9836821","tests/shaders/rasterize_disabled.vert.spv":"2ba809eb500ed6e5a067389ccc056551e796e7019517593d110fb62c9dca2056","tests/shaders/sampler.frag":"4c568e65176afe596dd8ef279485e992607e94d612786214ae1c6702d0322e1f","tests/shaders/sampler.frag.spv":"bd7bd1973a82dcfdf5755361fa4dd420fdf1c32c5de0a6f0896a8d5971f98684","tests/shaders/simple.vert":"ea143c97dff5ef03728b96b2dd893bdc59d56651581ecf9fe50f10807b0efdd0","tests/shaders/simple.vert.spv":"a2b5094ffd76288e0d08c37ce0351e28f20bb6d80ddd73fc44a71c1c7cbbf7db","tests/shaders/specialization.comp":"ce32fa1615737209f2e465ea347d79030ddcb33a88c38447e7cde7dffc920163","tests/shaders/specialization.comp.spv":"848604e37b870b8999692b266677be2ce0df6ce38093a0d81e6bc43d0bdf8a3f","tests/shaders/struct.frag":"d8840bb1961d6f14609b00ee54406c1e3ea31cecd8231b77cfb73d28b71910c0","tests/shaders/struct.frag.spv":"774aa886374eb95abf9bb7d0045ee77d97e26529e9ec96b90991a515fdbca4be","tests/shaders/struct.vert":"9299cda83ddb5b5c3d95ab0d057e4df2af137dfd92d6c4d3e96295b7d42e29a1","tests/shaders/struct.vert.spv":"4a82bdee72616ac058bc60d4255efa3e78199a2b8597570c013bebbee7107fb7","tests/shaders/two_ubo.vert":"be109b2c65e5e9e1bb0dab968d7f651232f6b1c46935a3928f980bf7a40f2d62","tests/shaders/two_ubo.vert.spv":"efd14e2d0a782d61dd944711f30b9e7fcb14af17593c1fe4e11cf2b7e232bcc2","tests/shaders/workgroup.comp":"478044b5392e0d1fb042253d71ea6bf7b8a014af4a6ee35d8db4c5343ac69739","tests/shaders/workgroup.comp.spv":"72f636fe3d1d6d0c5963f71bf4349c7e40d544331b33b6b64fb5b65784e6abee","tests/spirv_tests.rs":"6de5c893f631d550fc86853537bddb824ffb534a0a7f130406be3b9ed166f198"},"package":null}
+\ No newline at end of file
+diff --git a/third_party/rust/syn/.cargo-checksum.json b/third_party/rust/syn/.cargo-checksum.json
+index 77939d8fc6..704f2ed200 100644
+--- mozilla-release/third_party/rust/syn/.cargo-checksum.json
++++ mozilla-release/third_party/rust/syn/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"484d29864d333a361652fa4e24e1dcfab9efa47705ffd8c106d802eb03b78da7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"ca605417b6db8c995458f8407afaad6c177aedcc2274004283600f5638fa1b0c","benches/file.rs":"b45211cc4a0296a77aac2b4de16dbc6b5cb66adfb5afac00a77bccea87f43968","benches/rust.rs":"9cc0f62e944f1583d05c43a395a1556731501cf5976ef67a081f4f6387f883ba","build.rs":"7423ab199728d55c7d64c44b7c6729cfd93bd8273366a77707353003e27565d7","src/attr.rs":"cf81add298f0e75c35a9980a59bc3c2fd3fe933635830d1591374eeb2487c225","src/await.rs":"18f0b2ecb319991f891e300011424985e3cf33d166ea9f29f22d575fc8c83a76","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"2a432c11a3da67a21d46c2272bf9ce60a0bb20893b5750027bbd8ca3e843ab35","src/custom_keyword.rs":"589e46ec1be9a04d6de12c0b8cadf87cc1c05606ed46ddea62e9869cbca4a191","src/custom_punctuation.rs":"2ba2e294e15a0fce7ede3686c42b2891797079a724dd1193b66e7d305624c891","src/data.rs":"cc9b250d084e444782d3ff5e63c1ba387cbde8f7f2e977eab9846d920b4b8c3f","src/derive.rs":"c18878f14be5d5ab11fd7dda2d2ff1ff75c9662daf11eed033de62e4d0670a89","src/discouraged.rs":"50e10915695c4d14f64a78e20ecbef90a2cd53a7c26ee3426a2524a8ee5c9cbf","src/error.rs":"2c17a402f83ed5ae4ad96e753216771bef620235c2ff1ccc23f4bbafc7266fe1","src/export.rs":"dcae67456278c0339acfbcbb4737b8d37cfba5a150ae789f31f4be79abf7e726","src/expr.rs":"871d8eeb43cef02ef88de3bea7477b79b4eabc096a0899dde0e5750edf482f49","src/ext.rs":"b97ed549490b9248b5b5df31b3d5b08ba8791e23e6c5d3a1157a0363eb683ff3","src/file.rs":"3cc2bf5c709238d515a557f721f231c8c725b196400de051f945b549299d38a7","src/gen/fold.rs":"10b3ae33d0ce410d6bbe8b93be9d5f9e856c7dc8212133cc46b703f97d548190","src/gen/visit.rs":"e0f5798552d186024696b7bfc7219d4ff53b0e45f735a83e77cbb6b6578c5fa4","src/gen/visit_mut.rs":"9f7dda83907969971dba84d545aaa563b0728e54db97ffab5050fdf43a79c731","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"d845d7a828863123a5187fd0fe59c9dae3636f63bad302bd035792eed3dcb1ba","src/group.rs":"119b62d8481b4b1c327639bed40e114bf1969765250b68186628247fd4144b3b","src/ident.rs":"503156ce51a7ef0420892e8dbf2ecf8fe51f42a84d52cc2c05654e1a83020cbf","src/item.rs":"213f2f58c65ee1aa222f111bc9b1be681f8fb069caed04ca56586839979318d0","src/keyword.rs":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/lib.rs":"24778e9f15e8025e75aca114c712716ada586b471adb3b3b69278f4d39b8a21b","src/lifetime.rs":"905359708f772ec858954badde69ee016d29e6eeba1dd205b268445b1aff6f3a","src/lit.rs":"5bb0bddb94cbd256e50e92dc091a0baa09f1be40a77058b897507f3b17191e5d","src/lookahead.rs":"5cce8b4cb345a85c24a452ea6d78eadb76f01ca0a789cbf5ce35108334904173","src/mac.rs":"6b468244cc07e3f2f10419f833d9e2ed23edbcd6dc34cf21c5947633699db964","src/macros.rs":"0d8c3bab47539aa2d00bec64e92c901ea2c9c0af74c868051c0905b82650f970","src/op.rs":"93cd44770bb110deadf807a01d9a666efe644b6e3010f4b51cae77ee7438cfbb","src/parse.rs":"5017123c249ebc65866af113a0ad671814b9873f47568180e6539a305eb0317d","src/parse_macro_input.rs":"f799aadb7216c2d333b579f48ed2fedfe07b5e96f004b25b569649ffbaa958d2","src/parse_quote.rs":"81575bf60b18b0d8624d7025a5bcc8dcd6633ad70c454dee2a06e4c391700b6c","src/pat.rs":"db0f2263b9813de1f4e3e3e0396fe0080b1e11c8090c6b4fb6fca3cfbe22bc96","src/path.rs":"32e685ac7fd2d4b9989802de8f326a8d47fa710f86ec3e45fd9d3ff8fdfe97ef","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"384e7b317b26f24118eb4b0c39e949ee9f4f3e700a4c80e462342c83b2cc3282","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"adddb6acae14a0fa340df302b932c31e34b259706ce56fd82ab597ec424500e1","src/stmt.rs":"fbccf2b4da7980fe6ea8d99457d291577c0f225b370c1dd97da41abf2a18fcf7","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"761d8d1793560eb2b631c36ddfdbb14ac65178405f095453aa0e75e8816bdbb9","src/tt.rs":"1e32ae216d14d895ff621bf32bc611f138aa00108b0090be2cbaa0affebe8e2a","src/ty.rs":"ce052e0079b65b66bea4e9502d2ff2c90ad4b867904bf7eb892eb60aa3ef219a","tests/clone.sh":"961243d42540d9992090efbbd5478b7aa395671db64a6c28cdadb6edc610ebdf","tests/common/eq.rs":"a42d339808fd32dd4bfd440c254add8c56d5e2cde3a6bf0c88621b618ce5eaa7","tests/common/mod.rs":"20a3300016351fa210a193fbb0db059ef5125fa7909585ded64790004d4977ed","tests/common/parse.rs":"17ba6d1e74aaa3f8096c6d379d803221f12d95cca69927be047d6ddf8367647f","tests/debug/gen.rs":"57bd5cf585e0b86ad00f29f09ff3db3390c4a756d503514a9b28407500dcea3c","tests/debug/mod.rs":"462d6fe34ee75c3ca1207d4db2ff3bdee5b430b9f9ca632e5671d1588d3f76b3","tests/features/error.rs":"e0581a2869cbd237c2bc18a0a85272296e1577bb5f7317a67fa85e28e04eea6f","tests/features/mod.rs":"66a2605ec54ede29208da350f2bed536dfa763b58408d64d3fca3b13de64b64f","tests/macros/mod.rs":"3f2d758c0ba76b93f54b0c1fc22ad50edff8ef42629ba4d47ac7d7f823da8359","tests/repo/mod.rs":"e851a68972c9194a9a8d7b68538b16ed79ae81cba55e1a2ce210d1b759fb1a21","tests/test_asyncness.rs":"b6c46118b036e6807d24eb0e1779244b4fca23dac0d8031e9843b3edec484ce8","tests/test_attribute.rs":"2d8f18a98c989d3f7adaaeb1aeebd4f8413365ace63feecb37cb3f9db9db4d8f","tests/test_derive_input.rs":"477d80f914c54b526f8ff229788dc0e7798d118f6dcfa348f4c99755edb347b9","tests/test_expr.rs":"f35ca80566849a36e6ba6403d9663519eff37e4224360c468fedff8b561a643e","tests/test_generics.rs":"83a5dc07f5c5701c12625399262f7120b66f01a742523f3eda28da2cf2c87eb3","tests/test_grouping.rs":"aadd75215addd9e5a8fa2f9472117d4cb80f1e8b84e07f4c0845675c9014164f","tests/test_ident.rs":"236c239dd66f543f084f44ff747d7bc3962cf11a019a279777fe972f6e17aa4c","tests/test_iterators.rs":"718938da14778dcba06324d36a99d9317c9d45d81a34c6a44c47e1fa38085e9f","tests/test_lit.rs":"7dff2661a5ac586d6ed2fe27501cb8ff62f4cf3f6c91f596bff6057c67ad7857","tests/test_meta.rs":"8444dee084882243b107dfc8a6aac27f9382f9774162d1ac8ed8ec30d60c048e","tests/test_parse_buffer.rs":"b244bb4bc41ff06d21f239e60a3d663fdec5aa4af33f2a354afef36d34f0aefc","tests/test_pat.rs":"41776b878efae9b8e340f21ffe6296e921cf309f618482efd98609c33e32c28b","tests/test_precedence.rs":"71f3ea52cda8b40166bb7416fb98774e6a653542497b521f8e183e283dcf579d","tests/test_round_trip.rs":"e0de37f45fa223b488d25a41beab185eb92abb7bf765a9f13fe5d870ff31f5f1","tests/test_should_parse.rs":"4da4e25ee2baa7e75135c375042a7f958de136c5698dab03f99ff7a774dcd463","tests/test_size.rs":"970150b9d49ef91ab4c8f8c6a59b83f9a68a02acb779f0280733a5efaec6487a","tests/test_token_trees.rs":"a07ea657bf03b9c667c821b2db2af49b176ca737e3e01217a73cca78b7f11380","tests/zzz_stable.rs":"961d4940a926db4ca523d834b060c62de988e6a8e01c9f5efaa7bb4c86745b47"},"package":"66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"28ddb678a5ccac4423435384c8b7116f804e896eabc5aae9d5c2bc666aaebbb4","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"03f3b53cf858536a0883aa5b5882ee61dcd0f1e71c0930c9106fcfa1d6aad2df","benches/file.rs":"b4724fc7c0f48b8f488e2632a1064f6c0bf16ded3969680fc3f4a2369536269b","benches/rust.rs":"ea6291ef2d2a83d94a3312fe179d48259f8ec0b04c961993ddd181d0a4ab740e","build.rs":"aeca2312f05aec658eaa66980a0ef3d578837db107a55702b39419ea0422eb4a","src/attr.rs":"7d79482634d6544eb4a4825405407b53660d0f5f8b929f7e1671e005b9d92038","src/await.rs":"18f0b2ecb319991f891e300011424985e3cf33d166ea9f29f22d575fc8c83a76","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"cf2a4b3bdc247b80c85ff5625a1dfb7a5f517fd835f6e1518a7b924990e4c293","src/custom_keyword.rs":"9627467063e41776315a6a14b2aaea3875592d8e0ebd2dc6df1fc2f12c06f146","src/custom_punctuation.rs":"b00e7bee96eb473507527e39db65e74e71592dc06421d2cfe45ed899c17d4847","src/data.rs":"7aec9a745cd53ec95688afa353f6efb9576e7fc0143757b51d28bc3d900b1d2a","src/derive.rs":"fa71866df6e383673dd3329f455a9f953585b83f9739050be3bf1f8c6d526b96","src/discouraged.rs":"a1f3d85e20dedf50b1b7b4571d970a3a6e9b2de4afde7dd0c986fe240df2ba46","src/error.rs":"c3005b50e3132026250c5356d0d391bf96db8087f0f5f744de98e360d8a20a3e","src/export.rs":"dcae67456278c0339acfbcbb4737b8d37cfba5a150ae789f31f4be79abf7e726","src/expr.rs":"54455fd20041996653ca5379b03cdf3c2fc1b3dd2e1149b5bc6b1dd492545d55","src/ext.rs":"870086d9021e6a6fcefa2f00cd91b55c4b74dcee8f0f6a07e76d96fb44707d61","src/file.rs":"75167ebc77e7870122078eabde1b872c337142d4b0962c20cedffcaaa2a5b7c6","src/gen/clone.rs":"0845c1bf8624c3f235cd247b4eb748e7e16b4c240097cb0ff16751f688c079ae","src/gen/debug.rs":"d24fe37f4ce1dd74f2dc54136e893782d3c4d0908323c036c97599551a56960c","src/gen/eq.rs":"1e6ef09b17ca7f36861ef23ce2a6991b231ed5f087f046469b5f23da40f5b419","src/gen/fold.rs":"3f59e59ed8ad2ab5dd347bfbe41bbc785c2aabd8ae902087a584a6daed597182","src/gen/hash.rs":"e5b2a52587173076777233a9e57e2b3c8e0dd6d6f41d16fa7c9fde68b05c2bfc","src/gen/visit.rs":"23008c170d4dd3975232876a0a654921d9b6af57372cb9fcc133ca740588d666","src/gen/visit_mut.rs":"42886c3ee02ded72d9c3eec006e20431eaee0c6b90ddefc1a36ec7bf50c6a24a","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"d1c175284ca21e777ef0414c28383929b170ccb00aaf7a929eb18d3b05e18da8","src/group.rs":"119b62d8481b4b1c327639bed40e114bf1969765250b68186628247fd4144b3b","src/ident.rs":"503156ce51a7ef0420892e8dbf2ecf8fe51f42a84d52cc2c05654e1a83020cbf","src/item.rs":"c9ad9881e8cda8ee3f157f0c7602fc53d08a7e3288b9afc388c393689eac5aea","src/lib.rs":"558ad13779233b27bebc4b2fc8025eb1c7e57b32130dc1dd911391e27b427500","src/lifetime.rs":"f390fe06692fc51fbf3eb490bb9f795da70e4452f51c5b0df3bbaa899084ddf1","src/lit.rs":"9fab84e38756b092fbb055dcdf01e31d42d916c49e3eaae8c9019043b0ee4301","src/lookahead.rs":"5cce8b4cb345a85c24a452ea6d78eadb76f01ca0a789cbf5ce35108334904173","src/mac.rs":"e5cecea397fd01a44958162781d8d94343fe2a1b9b9754a5666c3d2ab4d7ef64","src/macros.rs":"2ce05b553f14da4ee550bb681cb0733b7186ad94719cd36f96d53e15fd02cf2b","src/op.rs":"449514e146deab0ab020bc6f764544c294dbc780941c9802bf60cf1b2839d550","src/parse.rs":"bde888c98ee259f2a73489a693515ed4875432b0d79486ac83aea19f441992a3","src/parse_macro_input.rs":"653a020f023cac0eccbc1fcc34aa7bf80567b43e5475deab4ad3e487a5363201","src/parse_quote.rs":"642f21e5fa54df4b7c373fb158289ee1005d49e1a49b1d194df5438faee71c46","src/pat.rs":"1473b258162cc822f1ee0c0869f521053ed345a140c39ed83b9b4dfb6f9f2aca","src/path.rs":"f119f0c2af12fabd360eac9a2312e0f6e6c28c633c9671bde6ef0bece7c5ba3c","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"212f5a601d6c2eb8b8fa679be1167b455b595bee964d2775b0101ebb16c3eaa5","src/reserved.rs":"3625eb2a64589a4992ab79a1674e9679f465bea613ab139a671df5337e88cee6","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"7d77714d585e6f42397091ffb3a799fd7b20c05c5442c737683c429ea7d409a5","src/stmt.rs":"3917fbc897f80efe838267833c55650ff8d636cb49a6d1084e28eff65d0e3ccd","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"a1ca6298bf6592cb80cbab1db4eac2fa4e3fa56729bb807bfb0f08ab0f229ca5","src/tt.rs":"1cc9e200624288322f800f32e3d6e2e53da946467bb312dd40a52c02cdcc4730","src/ty.rs":"cb167cbb16240c59a31b44adec175172caaf75ffef9a0bb168584b51bf105795","src/verbatim.rs":"802a97df997432f18cac6e6200ff6ea29fb2474986005e0fcdbc2b65197f87f7","src/whitespace.rs":"e63dd0aa3d34029f17766a8b09c1a6e4479e36c552c8b7023d710a399333aace","tests/common/eq.rs":"4b190a3833bdfd20a4cb1e3dff25a698751dec71d6f30249cf09426e061a4fb1","tests/common/mod.rs":"25ef6d7daa09bad3198a0e9e91b2812425f92db7c585c1e34a03a84d7362ccd8","tests/common/parse.rs":"8b7ba32f4988c30758c108536c4877dc5a039a237bf9b0687220ef2295797bbd","tests/debug/gen.rs":"d6e2abf2a7bb58a7895a60c2f094a98a4f85c9189d02011d0dcef6ef053f26e3","tests/debug/mod.rs":"868763d0ef1609a3ad5e05e9f1bfa0f813e91e7e9a36653414a188bb2fdaa425","tests/macros/mod.rs":"c0eafa4e3845fc08f6efe6021bac37822c0ac325eb7b51194a5f35236f648d92","tests/repo/mod.rs":"9e316b88d57ae213e81950c35e45443078ec90e702798353bc3528cb8a2810b6","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/test_asyncness.rs":"cff01db49d28ab23b0b258bc6c0a5cc4071be4fe7248eef344a5d79d2fb649b7","tests/test_attribute.rs":"0ffd99384e1a52ae17d9fed5c4053e411e8f9018decef07ffa621d1faa7329d8","tests/test_derive_input.rs":"610444351e3bf99366976bbf1da109c334a70ac9500caef366bcf9b68819829f","tests/test_expr.rs":"0ee83f6f6de950018c043efcc3e85776b4227dae3068309998a8d9709f2fc66c","tests/test_generics.rs":"9d713f90a79d6145efc89fb6f946029ca03486c632219950889da39940152ba0","tests/test_grouping.rs":"46c27baec4daaaf1e891892f0b0515ea8a44619071c7d0cc9192580916f1569f","tests/test_ident.rs":"9eb53d1e21edf23e7c9e14dc74dcc2b2538e9221e19dbcc0a44e3acc2e90f3f6","tests/test_item.rs":"461ed0c8648afffcea3217f52c9a88298182b4d39d73a11803b1281d99c98c25","tests/test_iterators.rs":"53ed6078d37550bd6765d2411e3660be401aef8a31a407350cc064a7d08c7c33","tests/test_lit.rs":"2a46c5f2f2ad1dcbb7e9b0cd11b55861c5ff818c2c4c51351d07e2daa7c74674","tests/test_meta.rs":"1fc98af3279cadc3d8db3c7e8d4d7f9e9dbd4d17548cf6a2f6f4536ed65367f6","tests/test_parse_buffer.rs":"8bbe2d24ca8a3788f72c6908fc96c26d546f11c69687bf8d72727f851d5e2d27","tests/test_parse_stream.rs":"2f449a2c41a3dee6fd14bee24e1666a453cb808eda17332fd91afd127fcdd2a6","tests/test_pat.rs":"2cb331fe404496d51e7cc7e283ae13c519a2265ca82e1c88e113296f860c2cba","tests/test_path.rs":"fcd5591e639fc787acc9763d828a811c8114525c9341282eefda8f331e082a51","tests/test_precedence.rs":"8d03656741b01e577d7501ce24332d1a4febec3e31a043e47c61062b8c527ed2","tests/test_receiver.rs":"084eca59984b9a18651da52f2c4407355da3de1335916a12477652999e2d01cc","tests/test_round_trip.rs":"ba01bf4ec04cd2d6f9e4800c343563925ae960c5f16752dc0797fda4451b6cc2","tests/test_shebang.rs":"f5772cadad5b56e3112cb16308b779f92bce1c3a48091fc9933deb2276a69331","tests/test_should_parse.rs":"1d3535698a446e2755bfc360676bdb161841a1f454cdef6e7556c6d06a95c89d","tests/test_size.rs":"5fae772bab66809d6708232f35cfb4a287882486763b0f763feec2ad79fbb68b","tests/test_stmt.rs":"17e4355843ee2982b51faba2721a18966f8c2b9422e16b052a123b8ee8b80752","tests/test_token_trees.rs":"43e56a701817e3c3bfd0cae54a457dd7a38ccb3ca19da41e2b995fdf20e6ed18","tests/test_ty.rs":"5b7c0bfc4963d41920dd0b39fdea419e34f00409ba86ad4211d6c3c7e8bbe1c0","tests/test_visibility.rs":"3f958e2b3b5908005e756a80eea326a91eac97cc4ab60599bebde8d4b942d65c","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"}
+\ No newline at end of file
+diff --git a/third_party/rust/syn/Cargo.toml b/third_party/rust/syn/Cargo.toml
+index 7a5c962f06..20277fc461 100644
+--- mozilla-release/third_party/rust/syn/Cargo.toml
++++ mozilla-release/third_party/rust/syn/Cargo.toml
+@@ -13,7 +13,7 @@
+ [package]
+ edition = "2018"
+ name = "syn"
+-version = "1.0.5"
++version = "1.0.40"
+ authors = ["David Tolnay <dtolnay@gmail.com>"]
+ include = ["/benches/**", "/build.rs", "/Cargo.toml", "/LICENSE-APACHE", "/LICENSE-MIT", "/README.md", "/src/**", "/tests/**"]
+ description = "Parser for Rust source code"
+@@ -24,25 +24,21 @@ license = "MIT OR Apache-2.0"
+ repository = "https://github.com/dtolnay/syn"
+ [package.metadata.docs.rs]
+ all-features = true
++targets = ["x86_64-unknown-linux-gnu"]
+
+ [package.metadata.playground]
+-all-features = true
+-
+-[lib]
+-name = "syn"
++features = ["full", "visit", "visit-mut", "fold", "extra-traits"]
+
+ [[bench]]
+ name = "rust"
+ harness = false
+ required-features = ["full", "parsing"]
+-edition = "2018"
+
+ [[bench]]
+ name = "file"
+ required-features = ["full", "parsing"]
+-edition = "2018"
+ [dependencies.proc-macro2]
+-version = "1.0"
++version = "1.0.13"
+ default-features = false
+
+ [dependencies.quote]
+@@ -52,18 +48,34 @@ default-features = false
+
+ [dependencies.unicode-xid]
+ version = "0.2"
++[dev-dependencies.anyhow]
++version = "1.0"
++
++[dev-dependencies.flate2]
++version = "1.0"
++
+ [dev-dependencies.insta]
+-version = "0.9"
++version = "0.16"
+
+ [dev-dependencies.rayon]
+ version = "1.0"
+
+ [dev-dependencies.ref-cast]
+-version = "0.2"
++version = "1.0"
+
+ [dev-dependencies.regex]
+ version = "1.0"
+
++[dev-dependencies.reqwest]
++version = "0.10"
++features = ["blocking"]
++
++[dev-dependencies.syn-test-suite]
++version = "0"
++
++[dev-dependencies.tar]
++version = "0.4"
++
+ [dev-dependencies.termcolor]
+ version = "1.0"
+
+@@ -80,7 +92,6 @@ full = []
+ parsing = []
+ printing = ["quote"]
+ proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"]
++test = ["syn-test-suite/all-features"]
+ visit = []
+ visit-mut = []
+-[badges.travis-ci]
+-repository = "dtolnay/syn"
+diff --git a/third_party/rust/syn/README.md b/third_party/rust/syn/README.md
+index 29a7f32a46..12b5f45b3d 100644
+--- mozilla-release/third_party/rust/syn/README.md
++++ mozilla-release/third_party/rust/syn/README.md
+@@ -1,10 +1,10 @@
+ Parser for Rust source code
+ ===========================
+
+-[![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn)
+-[![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn)
+-[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/syn/1.0/syn/)
+-[![Rustc Version 1.31+](https://img.shields.io/badge/rustc-1.31+-lightgray.svg)](https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html)
++[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/syn-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/syn)
++[<img alt="crates.io" src="https://img.shields.io/crates/v/syn.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/syn)
++[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K" height="20">](https://docs.rs/syn)
++[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/syn/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
+
+ Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
+ of Rust source code.
+@@ -46,10 +46,6 @@ contains some APIs that may be useful more generally.
+ [`syn::DeriveInput`]: https://docs.rs/syn/1.0/syn/struct.DeriveInput.html
+ [parser functions]: https://docs.rs/syn/1.0/syn/parse/index.html
+
+-If you get stuck with anything involving procedural macros in Rust I am happy to
+-provide help even if the issue is not related to Syn. Please file a ticket in
+-this repo.
+-
+ *Version requirement: Syn supports rustc 1.31 and up.*
+
+ [*Release notes*](https://github.com/dtolnay/syn/releases)
+@@ -88,8 +84,6 @@ proc-macro = true
+ ```
+
+ ```rust
+-extern crate proc_macro;
+-
+ use proc_macro::TokenStream;
+ use quote::quote;
+ use syn::{parse_macro_input, DeriveInput};
+@@ -271,7 +265,7 @@ points, which are required by the language to use `proc_macro::TokenStream`.
+ The proc-macro2 crate will automatically detect and use the compiler's data
+ structures when a procedural macro is active.
+
+-[proc-macro2]: https://docs.rs/proc-macro2/1.0.0/proc_macro2/
++[proc-macro2]: https://docs.rs/proc-macro2/1.0/proc_macro2/
+
+ <br>
+
+diff --git a/third_party/rust/syn/benches/file.rs b/third_party/rust/syn/benches/file.rs
+index 08ecd90960..58ab8df297 100644
+--- mozilla-release/third_party/rust/syn/benches/file.rs
++++ mozilla-release/third_party/rust/syn/benches/file.rs
+@@ -1,9 +1,16 @@
+ // $ cargo bench --features full --bench file
+
+ #![feature(rustc_private, test)]
++#![recursion_limit = "1024"]
+
+ extern crate test;
+
++#[macro_use]
++#[path = "../tests/macros/mod.rs"]
++mod macros;
++
++#[path = "../tests/common/mod.rs"]
++mod common;
+ #[path = "../tests/repo/mod.rs"]
+ pub mod repo;
+
+diff --git a/third_party/rust/syn/benches/rust.rs b/third_party/rust/syn/benches/rust.rs
+index e3d9cd29ba..50e1a7f601 100644
+--- mozilla-release/third_party/rust/syn/benches/rust.rs
++++ mozilla-release/third_party/rust/syn/benches/rust.rs
+@@ -4,7 +4,14 @@
+ // $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full --bench rust
+
+ #![cfg_attr(not(syn_only), feature(rustc_private))]
++#![recursion_limit = "1024"]
+
++#[macro_use]
++#[path = "../tests/macros/mod.rs"]
++mod macros;
++
++#[path = "../tests/common/mod.rs"]
++mod common;
+ #[path = "../tests/repo/mod.rs"]
+ mod repo;
+
+@@ -28,31 +35,35 @@ mod syn_parse {
+ }
+
+ #[cfg(not(syn_only))]
+-mod libsyntax_parse {
++mod librustc_parse {
+ extern crate rustc_data_structures;
+- extern crate syntax;
+- extern crate syntax_pos;
++ extern crate rustc_errors;
++ extern crate rustc_parse;
++ extern crate rustc_session;
++ extern crate rustc_span;
+
+ use rustc_data_structures::sync::Lrc;
+- use syntax::edition::Edition;
+- use syntax::errors::{emitter::Emitter, DiagnosticBuilder, Handler};
+- use syntax::parse::ParseSess;
+- use syntax::source_map::{FilePathMapping, SourceMap};
+- use syntax_pos::FileName;
++ use rustc_errors::{emitter::Emitter, Diagnostic, Handler};
++ use rustc_session::parse::ParseSess;
++ use rustc_span::source_map::{FilePathMapping, SourceMap};
++ use rustc_span::{edition::Edition, FileName};
+
+ pub fn bench(content: &str) -> Result<(), ()> {
+ struct SilentEmitter;
+
+ impl Emitter for SilentEmitter {
+- fn emit_diagnostic(&mut self, _db: &DiagnosticBuilder) {}
++ fn emit_diagnostic(&mut self, _diag: &Diagnostic) {}
++ fn source_map(&self) -> Option<&Lrc<SourceMap>> {
++ None
++ }
+ }
+
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(Edition::Edition2018, || {
+ let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let emitter = Box::new(SilentEmitter);
+ let handler = Handler::with_emitter(false, None, emitter);
+ let sess = ParseSess::with_span_handler(handler, cm);
+- if let Err(mut diagnostic) = syntax::parse::parse_crate_from_source_str(
++ if let Err(mut diagnostic) = rustc_parse::parse_crate_from_source_str(
+ FileName::Custom("bench".to_owned()),
+ content.to_owned(),
+ &sess,
+@@ -104,11 +115,11 @@ fn main() {
+ repo::clone_rust();
+
+ macro_rules! testcases {
+- ($($(#[$cfg:meta])* $name:path,)*) => {
++ ($($(#[$cfg:meta])* $name:ident,)*) => {
+ vec![
+ $(
+ $(#[$cfg])*
+- (stringify!($name), $name as fn(&str) -> Result<(), ()>),
++ (stringify!($name), $name::bench as fn(&str) -> Result<(), ()>),
+ )*
+ ]
+ };
+@@ -128,12 +139,12 @@ fn main() {
+
+ for (name, f) in testcases!(
+ #[cfg(not(syn_only))]
+- read_from_disk::bench,
++ read_from_disk,
+ #[cfg(not(syn_only))]
+- tokenstream_parse::bench,
+- syn_parse::bench,
++ tokenstream_parse,
++ syn_parse,
+ #[cfg(not(syn_only))]
+- libsyntax_parse::bench,
++ librustc_parse,
+ ) {
+ eprint!("{:20}", format!("{}:", name));
+ let elapsed = exec(f);
+diff --git a/third_party/rust/syn/build.rs b/third_party/rust/syn/build.rs
+index c0f9ed3406..cf7681c3f9 100644
+--- mozilla-release/third_party/rust/syn/build.rs
++++ mozilla-release/third_party/rust/syn/build.rs
+@@ -1,6 +1,6 @@
+ use std::env;
+ use std::process::Command;
+-use std::str::{self, FromStr};
++use std::str;
+
+ // The rustc-cfg strings below are *not* public API. Please let us know by
+ // opening a GitHub issue if your build environment requires some way to enable
+@@ -26,38 +26,14 @@ struct Compiler {
+ }
+
+ fn rustc_version() -> Option<Compiler> {
+- let rustc = match env::var_os("RUSTC") {
+- Some(rustc) => rustc,
+- None => return None,
+- };
+-
+- let output = match Command::new(rustc).arg("--version").output() {
+- Ok(output) => output,
+- Err(_) => return None,
+- };
+-
+- let version = match str::from_utf8(&output.stdout) {
+- Ok(version) => version,
+- Err(_) => return None,
+- };
+-
++ let rustc = env::var_os("RUSTC")?;
++ let output = Command::new(rustc).arg("--version").output().ok()?;
++ let version = str::from_utf8(&output.stdout).ok()?;
+ let mut pieces = version.split('.');
+ if pieces.next() != Some("rustc 1") {
+ return None;
+ }
+-
+- let next = match pieces.next() {
+- Some(next) => next,
+- None => return None,
+- };
+-
+- let minor = match u32::from_str(next) {
+- Ok(minor) => minor,
+- Err(_) => return None,
+- };
+-
+- Some(Compiler {
+- minor: minor,
+- nightly: version.contains("nightly"),
+- })
++ let minor = pieces.next()?.parse().ok()?;
++ let nightly = version.contains("nightly");
++ Some(Compiler { minor, nightly })
+ }
+diff --git a/third_party/rust/syn/src/attr.rs b/third_party/rust/syn/src/attr.rs
+index 34009deabc..fa4f1cb2a3 100644
+--- mozilla-release/third_party/rust/syn/src/attr.rs
++++ mozilla-release/third_party/rust/syn/src/attr.rs
+@@ -9,15 +9,11 @@ use proc_macro2::TokenStream;
+ use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result};
+ #[cfg(feature = "parsing")]
+ use crate::punctuated::Pair;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// An attribute like `#[repr(transparent)]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// <br>
+@@ -111,7 +107,46 @@ ast_struct! {
+ ///
+ /// [`parse_meta()`]: Attribute::parse_meta
+ /// [`parse_args()`]: Attribute::parse_args
+- pub struct Attribute #manual_extra_traits {
++ ///
++ /// <p><br></p>
++ ///
++ /// # Doc comments
++ ///
++ /// The compiler transforms doc comments, such as `/// comment` and `/*!
++ /// comment */`, into attributes before macros are expanded. Each comment is
++ /// expanded into an attribute of the form `#[doc = r"comment"]`.
++ ///
++ /// As an example, the following `mod` items are expanded identically:
++ ///
++ /// ```
++ /// # use syn::{ItemMod, parse_quote};
++ /// let doc: ItemMod = parse_quote! {
++ /// /// Single line doc comments
++ /// /// We write so many!
++ /// /**
++ /// * Multi-line comments...
++ /// * May span many lines
++ /// */
++ /// mod example {
++ /// //! Of course, they can be inner too
++ /// /*! And fit in a single line */
++ /// }
++ /// };
++ /// let attr: ItemMod = parse_quote! {
++ /// #[doc = r" Single line doc comments"]
++ /// #[doc = r" We write so many!"]
++ /// #[doc = r"
++ /// * Multi-line comments...
++ /// * May span many lines
++ /// "]
++ /// mod example {
++ /// #![doc = r" Of course, they can be inner too"]
++ /// #![doc = r" And fit in a single line "]
++ /// }
++ /// };
++ /// assert_eq!(doc, attr);
++ /// ```
++ pub struct Attribute {
+ pub pound_token: Token![#],
+ pub style: AttrStyle,
+ pub bracket_token: token::Bracket,
+@@ -120,39 +155,11 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Attribute {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Attribute {
+- fn eq(&self, other: &Self) -> bool {
+- self.style == other.style
+- && self.pound_token == other.pound_token
+- && self.bracket_token == other.bracket_token
+- && self.path == other.path
+- && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Attribute {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.style.hash(state);
+- self.pound_token.hash(state);
+- self.bracket_token.hash(state);
+- self.path.hash(state);
+- TokenStreamHelper(&self.tokens).hash(state);
+- }
+-}
+-
+ impl Attribute {
+ /// Parses the content of the attribute, consisting of the path and tokens,
+ /// as a [`Meta`] if possible.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_meta(&self) -> Result<Meta> {
+@@ -199,7 +206,7 @@ impl Attribute {
+ /// ^^^^^^^^^ what gets parsed
+ /// ```
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_args<T: Parse>(&self) -> Result<T> {
+@@ -208,7 +215,7 @@ impl Attribute {
+
+ /// Parse the arguments to the attribute using the given parser.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+@@ -221,7 +228,7 @@ impl Attribute {
+
+ /// Parses zero or more outer attributes from the stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
+@@ -234,7 +241,7 @@ impl Attribute {
+
+ /// Parses zero or more inner attributes from the stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
+@@ -247,7 +254,7 @@ impl Attribute {
+ }
+
+ #[cfg(feature = "parsing")]
+-fn error_expected_args(attr: &Attribute) -> Error {
++fn expected_parentheses(attr: &Attribute) -> String {
+ let style = match attr.style {
+ AttrStyle::Outer => "#",
+ AttrStyle::Inner(_) => "#!",
+@@ -261,19 +268,23 @@ fn error_expected_args(attr: &Attribute) -> Error {
+ path += &segment.ident.to_string();
+ }
+
+- let msg = format!("expected attribute arguments: {}[{}(...)]", style, path);
+-
+- #[cfg(feature = "printing")]
+- return Error::new_spanned(attr, msg);
+-
+- #[cfg(not(feature = "printing"))]
+- return Error::new(attr.bracket_token.span, msg);
++ format!("{}[{}(...)]", style, path)
+ }
+
+ #[cfg(feature = "parsing")]
+ fn enter_args<'a>(attr: &Attribute, input: ParseStream<'a>) -> Result<ParseBuffer<'a>> {
+ if input.is_empty() {
+- return Err(error_expected_args(attr));
++ let expected = expected_parentheses(attr);
++ let msg = format!("expected attribute arguments in parentheses: {}", expected);
++ return Err(crate::error::new2(
++ attr.pound_token.span,
++ attr.bracket_token.span,
++ msg,
++ ));
++ } else if input.peek(Token![=]) {
++ let expected = expected_parentheses(attr);
++ let msg = format!("expected parentheses: {}", expected);
++ return Err(input.error(msg));
+ };
+
+ let content;
+@@ -298,7 +309,7 @@ ast_enum! {
+ /// Distinguishes between attributes that decorate an item and attributes
+ /// that are contained within an item.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Outer attributes
+@@ -312,7 +323,6 @@ ast_enum! {
+ /// - `#![feature(proc_macro)]`
+ /// - `//! # Example`
+ /// - `/*! Please file an issue */`
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum AttrStyle {
+ Outer,
+ Inner(Token![!]),
+@@ -322,7 +332,7 @@ ast_enum! {
+ ast_enum_of_structs! {
+ /// Content of a compile-time structured attribute.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// ## Path
+@@ -360,7 +370,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A structured list within an attribute, like `derive(Copy, Clone)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct MetaList {
+ pub path: Path,
+@@ -372,7 +382,7 @@ ast_struct! {
+ ast_struct! {
+ /// A name-value pair within an attribute, like `feature = "nightly"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct MetaNameValue {
+ pub path: Path,
+@@ -398,7 +408,7 @@ impl Meta {
+ ast_enum_of_structs! {
+ /// Element of a compile-time attribute list.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum NestedMeta {
+ /// A structured meta item, like the `Copy` in `#[derive(Copy)]` which
+@@ -429,8 +439,8 @@ ast_enum_of_structs! {
+ /// as type `AttributeArgs`.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, AttributeArgs, ItemFn};
+ ///
+@@ -464,7 +474,7 @@ where
+ fn is_outer(attr: &&Attribute) -> bool {
+ match attr.style {
+ AttrStyle::Outer => true,
+- _ => false,
++ AttrStyle::Inner(_) => false,
+ }
+ }
+ self.into_iter().filter(is_outer)
+@@ -474,7 +484,7 @@ where
+ fn is_inner(attr: &&Attribute) -> bool {
+ match attr.style {
+ AttrStyle::Inner(_) => true,
+- _ => false,
++ AttrStyle::Outer => false,
+ }
+ }
+ self.into_iter().filter(is_inner)
+diff --git a/third_party/rust/syn/src/buffer.rs b/third_party/rust/syn/src/buffer.rs
+index 551a5ac816..a461cc49ea 100644
+--- mozilla-release/third_party/rust/syn/src/buffer.rs
++++ mozilla-release/third_party/rust/syn/src/buffer.rs
+@@ -1,7 +1,7 @@
+ //! A stably addressed token buffer supporting efficient traversal based on a
+ //! cheaply copyable cursor.
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ // This module is heavily commented as it contains most of the unsafe code in
+ // Syn, and caution should be used when editing it. The public-facing interface
+@@ -36,7 +36,7 @@ enum Entry {
+ /// `TokenStream` which requires a deep copy in order to traverse more than
+ /// once.
+ ///
+-/// *This type is available if Syn is built with the `"parsing"` feature.*
++/// *This type is available only if Syn is built with the `"parsing"` feature.*
+ pub struct TokenBuffer {
+ // NOTE: Do not derive clone on this - there are raw pointers inside which
+ // will be messed up. Moving the `TokenBuffer` itself is safe as the actual
+@@ -98,7 +98,7 @@ impl TokenBuffer {
+ /// Creates a `TokenBuffer` containing all the tokens from the input
+ /// `TokenStream`.
+ ///
+- /// *This method is available if Syn is built with both the `"parsing"` and
++ /// *This method is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ #[cfg(all(
+ not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
+@@ -133,8 +133,7 @@ impl TokenBuffer {
+ /// Two cursors are equal if they have the same location in the same input
+ /// stream, and have the same scope.
+ ///
+-/// *This type is available if Syn is built with the `"parsing"` feature.*
+-#[derive(Copy, Clone, Eq, PartialEq)]
++/// *This type is available only if Syn is built with the `"parsing"` feature.*
+ pub struct Cursor<'a> {
+ // The current entry which the `Cursor` is pointing at.
+ ptr: *const Entry,
+@@ -201,13 +200,13 @@ impl<'a> Cursor<'a> {
+ Cursor::create(self.ptr.offset(1), self.scope)
+ }
+
+- /// If the cursor is looking at a `None`-delimited group, move it to look at
+- /// the first token inside instead. If the group is empty, this will move
++ /// While the cursor is looking at a `None`-delimited group, move it to look
++ /// at the first token inside instead. If the group is empty, this will move
+ /// the cursor past the `None`-delimited group.
+ ///
+ /// WARNING: This mutates its argument.
+ fn ignore_none(&mut self) {
+- if let Entry::Group(group, buf) = self.entry() {
++ while let Entry::Group(group, buf) = self.entry() {
+ if group.delimiter() == Delimiter::None {
+ // NOTE: We call `Cursor::create` here to make sure that
+ // situations where we should immediately exit the span after
+@@ -215,13 +214,14 @@ impl<'a> Cursor<'a> {
+ unsafe {
+ *self = Cursor::create(&buf.data[0], self.scope);
+ }
++ } else {
++ break;
+ }
+ }
+ }
+
+ /// Checks whether the cursor is currently pointing at the end of its valid
+ /// scope.
+- #[inline]
+ pub fn eof(self) -> bool {
+ // We're at eof if we're at the end of our scope.
+ self.ptr == self.scope
+@@ -342,6 +342,44 @@ impl<'a> Cursor<'a> {
+ Entry::End(..) => Span::call_site(),
+ }
+ }
++
++ /// Skip over the next token without cloning it. Returns `None` if this
++ /// cursor points to eof.
++ ///
++ /// This method treats `'lifetimes` as a single token.
++ pub(crate) fn skip(self) -> Option<Cursor<'a>> {
++ match self.entry() {
++ Entry::End(..) => None,
++
++ // Treat lifetimes as a single tt for the purposes of 'skip'.
++ Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
++ let next = unsafe { self.bump() };
++ match next.entry() {
++ Entry::Ident(_) => Some(unsafe { next.bump() }),
++ _ => Some(next),
++ }
++ }
++ _ => Some(unsafe { self.bump() }),
++ }
++ }
++}
++
++impl<'a> Copy for Cursor<'a> {}
++
++impl<'a> Clone for Cursor<'a> {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++
++impl<'a> Eq for Cursor<'a> {}
++
++impl<'a> PartialEq for Cursor<'a> {
++ fn eq(&self, other: &Self) -> bool {
++ let Cursor { ptr, scope, marker } = self;
++ let _ = marker;
++ *ptr == other.ptr && *scope == other.scope
++ }
+ }
+
+ pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool {
+diff --git a/third_party/rust/syn/src/custom_keyword.rs b/third_party/rust/syn/src/custom_keyword.rs
+index 200e8478ef..a33044a564 100644
+--- mozilla-release/third_party/rust/syn/src/custom_keyword.rs
++++ mozilla-release/third_party/rust/syn/src/custom_keyword.rs
+@@ -86,7 +86,7 @@
+ /// }
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_keyword {
+ ($ident:ident) => {
+ #[allow(non_camel_case_types)]
+@@ -95,7 +95,7 @@ macro_rules! custom_keyword {
+ }
+
+ #[doc(hidden)]
+- #[allow(non_snake_case)]
++ #[allow(dead_code, non_snake_case)]
+ pub fn $ident<__S: $crate::export::IntoSpans<[$crate::export::Span; 1]>>(
+ span: __S,
+ ) -> $ident {
+@@ -112,10 +112,10 @@ macro_rules! custom_keyword {
+ }
+ }
+
+- impl_parse_for_custom_keyword!($ident);
+- impl_to_tokens_for_custom_keyword!($ident);
+- impl_clone_for_custom_keyword!($ident);
+- impl_extra_traits_for_custom_keyword!($ident);
++ $crate::impl_parse_for_custom_keyword!($ident);
++ $crate::impl_to_tokens_for_custom_keyword!($ident);
++ $crate::impl_clone_for_custom_keyword!($ident);
++ $crate::impl_extra_traits_for_custom_keyword!($ident);
+ };
+ }
+
+diff --git a/third_party/rust/syn/src/custom_punctuation.rs b/third_party/rust/syn/src/custom_punctuation.rs
+index 29fa448bd8..70dff42851 100644
+--- mozilla-release/third_party/rust/syn/src/custom_punctuation.rs
++++ mozilla-release/third_party/rust/syn/src/custom_punctuation.rs
+@@ -74,19 +74,19 @@
+ /// let _: PathSegments = syn::parse_str(input).unwrap();
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ pub struct $ident {
+- pub spans: custom_punctuation_repr!($($tt)+),
++ pub spans: $crate::custom_punctuation_repr!($($tt)+),
+ }
+
+ #[doc(hidden)]
+- #[allow(non_snake_case)]
+- pub fn $ident<__S: $crate::export::IntoSpans<custom_punctuation_repr!($($tt)+)>>(
++ #[allow(dead_code, non_snake_case)]
++ pub fn $ident<__S: $crate::export::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>(
+ spans: __S,
+ ) -> $ident {
+- let _validate_len = 0 $(+ custom_punctuation_len!(strict, $tt))*;
++ let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*;
+ $ident {
+ spans: $crate::export::IntoSpans::into_spans(spans)
+ }
+@@ -98,33 +98,33 @@ macro_rules! custom_punctuation {
+ }
+ }
+
+- impl_parse_for_custom_punctuation!($ident, $($tt)+);
+- impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
+- impl_clone_for_custom_punctuation!($ident, $($tt)+);
+- impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_parse_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_clone_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
+ };
+ }
+
+ // Not public API.
+ #[cfg(feature = "parsing")]
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! impl_parse_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::token::CustomToken for $ident {
+ fn peek(cursor: $crate::buffer::Cursor) -> bool {
+- $crate::token::parsing::peek_punct(cursor, stringify_punct!($($tt)+))
++ $crate::token::parsing::peek_punct(cursor, $crate::stringify_punct!($($tt)+))
+ }
+
+ fn display() -> &'static $crate::export::str {
+- custom_punctuation_concat!("`", stringify_punct!($($tt)+), "`")
++ concat!("`", $crate::stringify_punct!($($tt)+), "`")
+ }
+ }
+
+ impl $crate::parse::Parse for $ident {
+ fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
+- let spans: custom_punctuation_repr!($($tt)+) =
+- $crate::token::parsing::punct(input, stringify_punct!($($tt)+))?;
++ let spans: $crate::custom_punctuation_repr!($($tt)+) =
++ $crate::token::parsing::punct(input, $crate::stringify_punct!($($tt)+))?;
+ Ok($ident(spans))
+ }
+ }
+@@ -142,12 +142,12 @@ macro_rules! impl_parse_for_custom_punctuation {
+ // Not public API.
+ #[cfg(feature = "printing")]
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! impl_to_tokens_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::export::ToTokens for $ident {
+ fn to_tokens(&self, tokens: &mut $crate::export::TokenStream2) {
+- $crate::token::printing::punct(stringify_punct!($($tt)+), &self.spans, tokens)
++ $crate::token::printing::punct($crate::stringify_punct!($($tt)+), &self.spans, tokens)
+ }
+ }
+ };
+@@ -221,16 +221,16 @@ macro_rules! impl_extra_traits_for_custom_punctuation {
+
+ // Not public API.
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_punctuation_repr {
+ ($($tt:tt)+) => {
+- [$crate::export::Span; 0 $(+ custom_punctuation_len!(lenient, $tt))+]
++ [$crate::export::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+]
+ };
+ }
+
+ // Not public API.
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ #[rustfmt::skip]
+ macro_rules! custom_punctuation_len {
+ ($mode:ident, +) => { 1 };
+@@ -279,7 +279,7 @@ macro_rules! custom_punctuation_len {
+ ($mode:ident, -=) => { 2 };
+ ($mode:ident, ~) => { 1 };
+ (lenient, $tt:tt) => { 0 };
+- (strict, $tt:tt) => {{ custom_punctuation_unexpected!($tt); 0 }};
++ (strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }};
+ }
+
+ // Not public API.
+@@ -297,13 +297,3 @@ macro_rules! stringify_punct {
+ concat!($(stringify!($tt)),+)
+ };
+ }
+-
+-// Not public API.
+-// Without this, local_inner_macros breaks when looking for concat!
+-#[doc(hidden)]
+-#[macro_export]
+-macro_rules! custom_punctuation_concat {
+- ($($tt:tt)*) => {
+- concat!($($tt)*)
+- };
+-}
+diff --git a/third_party/rust/syn/src/data.rs b/third_party/rust/syn/src/data.rs
+index be43679874..b217b8ca6f 100644
+--- mozilla-release/third_party/rust/syn/src/data.rs
++++ mozilla-release/third_party/rust/syn/src/data.rs
+@@ -4,7 +4,7 @@ use crate::punctuated::Punctuated;
+ ast_struct! {
+ /// An enum variant.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Variant {
+ /// Attributes tagged on the variant.
+@@ -24,7 +24,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// Data stored within an enum variant or struct.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -52,7 +52,7 @@ ast_struct! {
+ /// Named fields of a struct or struct variant such as `Point { x: f64,
+ /// y: f64 }`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct FieldsNamed {
+ pub brace_token: token::Brace,
+@@ -63,7 +63,7 @@ ast_struct! {
+ ast_struct! {
+ /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct FieldsUnnamed {
+ pub paren_token: token::Paren,
+@@ -93,6 +93,24 @@ impl Fields {
+ Fields::Unnamed(f) => f.unnamed.iter_mut(),
+ }
+ }
++
++ /// Returns the number of fields.
++ pub fn len(&self) -> usize {
++ match self {
++ Fields::Unit => 0,
++ Fields::Named(f) => f.named.len(),
++ Fields::Unnamed(f) => f.unnamed.len(),
++ }
++ }
++
++ /// Returns `true` if there are zero fields.
++ pub fn is_empty(&self) -> bool {
++ match self {
++ Fields::Unit => true,
++ Fields::Named(f) => f.named.is_empty(),
++ Fields::Unnamed(f) => f.unnamed.is_empty(),
++ }
++ }
+ }
+
+ impl IntoIterator for Fields {
+@@ -129,7 +147,7 @@ impl<'a> IntoIterator for &'a mut Fields {
+ ast_struct! {
+ /// A field of a struct or enum variant.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Field {
+ /// Attributes tagged on the field.
+@@ -154,7 +172,7 @@ ast_enum_of_structs! {
+ /// The visibility level of an item: inherited or `pub` or
+ /// `pub(restricted)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -184,7 +202,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A public visibility level: `pub`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisPublic {
+ pub pub_token: Token![pub],
+@@ -194,7 +212,7 @@ ast_struct! {
+ ast_struct! {
+ /// A crate-level visibility: `crate`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisCrate {
+ pub crate_token: Token![crate],
+@@ -205,7 +223,7 @@ ast_struct! {
+ /// A visibility level restricted to some path: `pub(self)` or
+ /// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisRestricted {
+ pub pub_token: Token![pub],
+@@ -220,12 +238,15 @@ pub mod parsing {
+ use super::*;
+
+ use crate::ext::IdentExt;
++ use crate::parse::discouraged::Speculative;
+ use crate::parse::{Parse, ParseStream, Result};
+
+ impl Parse for Variant {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
++ let _visibility: Visibility = input.parse()?;
+ Ok(Variant {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs,
+ ident: input.parse()?,
+ fields: {
+ if input.peek(token::Brace) {
+@@ -295,6 +316,17 @@ pub mod parsing {
+
+ impl Parse for Visibility {
+ fn parse(input: ParseStream) -> Result<Self> {
++ // Recognize an empty None-delimited group, as produced by a $:vis
++ // matcher that matched no tokens.
++ if input.peek(token::Group) {
++ let ahead = input.fork();
++ let group = crate::group::parse_group(&ahead)?;
++ if group.content.is_empty() {
++ input.advance_to(&ahead);
++ return Ok(Visibility::Inherited);
++ }
++ }
++
+ if input.peek(Token![pub]) {
+ Self::parse_pub(input)
+ } else if input.peek(Token![crate]) {
+@@ -310,27 +342,39 @@ pub mod parsing {
+ let pub_token = input.parse::<Token![pub]>()?;
+
+ if input.peek(token::Paren) {
+- // TODO: optimize using advance_to
+ let ahead = input.fork();
+- let mut content;
+- parenthesized!(content in ahead);
+
++ let content;
++ let paren_token = parenthesized!(content in ahead);
+ if content.peek(Token![crate])
+ || content.peek(Token![self])
+ || content.peek(Token![super])
+ {
+- return Ok(Visibility::Restricted(VisRestricted {
+- pub_token,
+- paren_token: parenthesized!(content in input),
+- in_token: None,
+- path: Box::new(Path::from(content.call(Ident::parse_any)?)),
+- }));
++ let path = content.call(Ident::parse_any)?;
++
++ // Ensure there are no additional tokens within `content`.
++ // Without explicitly checking, we may misinterpret a tuple
++ // field as a restricted visibility, causing a parse error.
++ // e.g. `pub (crate::A, crate::B)` (Issue #720).
++ if content.is_empty() {
++ input.advance_to(&ahead);
++ return Ok(Visibility::Restricted(VisRestricted {
++ pub_token,
++ paren_token,
++ in_token: None,
++ path: Box::new(Path::from(path)),
++ }));
++ }
+ } else if content.peek(Token![in]) {
++ let in_token: Token![in] = content.parse()?;
++ let path = content.call(Path::parse_mod_style)?;
++
++ input.advance_to(&ahead);
+ return Ok(Visibility::Restricted(VisRestricted {
+ pub_token,
+- paren_token: parenthesized!(content in input),
+- in_token: Some(content.parse()?),
+- path: Box::new(content.call(Path::parse_mod_style)?),
++ paren_token,
++ in_token: Some(in_token),
++ path: Box::new(path),
+ }));
+ }
+ }
+@@ -347,6 +391,14 @@ pub mod parsing {
+ }))
+ }
+ }
++
++ #[cfg(feature = "full")]
++ pub(crate) fn is_some(&self) -> bool {
++ match self {
++ Visibility::Inherited => false,
++ _ => true,
++ }
++ }
+ }
+ }
+
+diff --git a/third_party/rust/syn/src/derive.rs b/third_party/rust/syn/src/derive.rs
+index 8cb9cf7b6d..3fa9d89a93 100644
+--- mozilla-release/third_party/rust/syn/src/derive.rs
++++ mozilla-release/third_party/rust/syn/src/derive.rs
+@@ -4,7 +4,7 @@ use crate::punctuated::Punctuated;
+ ast_struct! {
+ /// Data structure sent to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` feature.*
++ /// *This type is available only if Syn is built with the `"derive"` feature.*
+ pub struct DeriveInput {
+ /// Attributes tagged on the whole struct or enum.
+ pub attrs: Vec<Attribute>,
+@@ -26,7 +26,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// The storage of a struct, enum or union data structure.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` feature.*
++ /// *This type is available only if Syn is built with the `"derive"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -53,7 +53,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A struct input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataStruct {
+ pub struct_token: Token![struct],
+@@ -65,7 +65,7 @@ ast_struct! {
+ ast_struct! {
+ /// An enum input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataEnum {
+ pub enum_token: Token![enum],
+@@ -77,7 +77,7 @@ ast_struct! {
+ ast_struct! {
+ /// An untagged union input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataUnion {
+ pub union_token: Token![union],
+diff --git a/third_party/rust/syn/src/discouraged.rs b/third_party/rust/syn/src/discouraged.rs
+index 4d9ff93728..76c9fce6f8 100644
+--- mozilla-release/third_party/rust/syn/src/discouraged.rs
++++ mozilla-release/third_party/rust/syn/src/discouraged.rs
+@@ -16,7 +16,7 @@ pub trait Speculative {
+ /// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem
+ /// is that when the fork fails to parse an `A`, it's impossible to tell
+ /// whether that was because of a syntax error and the user meant to provide
+- /// an `A`, or that the `A`s are finished and its time to start parsing
++ /// an `A`, or that the `A`s are finished and it's time to start parsing
+ /// `B`s. Use with care.
+ ///
+ /// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by
+@@ -72,7 +72,6 @@ pub trait Speculative {
+ /// || input.peek(Token![self])
+ /// || input.peek(Token![Self])
+ /// || input.peek(Token![crate])
+- /// || input.peek(Token![extern])
+ /// {
+ /// let ident = input.call(Ident::parse_any)?;
+ /// return Ok(PathSegment::from(ident));
+@@ -164,6 +163,30 @@ impl<'a> Speculative for ParseBuffer<'a> {
+ panic!("Fork was not derived from the advancing parse stream");
+ }
+
++ let (self_unexp, self_sp) = inner_unexpected(self);
++ let (fork_unexp, fork_sp) = inner_unexpected(fork);
++ if !Rc::ptr_eq(&self_unexp, &fork_unexp) {
++ match (fork_sp, self_sp) {
++ // Unexpected set on the fork, but not on `self`, copy it over.
++ (Some(span), None) => {
++ self_unexp.set(Unexpected::Some(span));
++ }
++ // Unexpected unset. Use chain to propagate errors from fork.
++ (None, None) => {
++ fork_unexp.set(Unexpected::Chain(self_unexp));
++
++ // Ensure toplevel 'unexpected' tokens from the fork don't
++ // bubble up the chain by replacing the root `unexpected`
++ // pointer, only 'unexpected' tokens from existing group
++ // parsers should bubble.
++ fork.unexpected
++ .set(Some(Rc::new(Cell::new(Unexpected::None))));
++ }
++ // Unexpected has been set on `self`. No changes needed.
++ (_, Some(_)) => {}
++ }
++ }
++
+ // See comment on `cell` in the struct definition.
+ self.cell
+ .set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) })
+diff --git a/third_party/rust/syn/src/error.rs b/third_party/rust/syn/src/error.rs
+index 146d652299..dba34f9254 100644
+--- mozilla-release/third_party/rust/syn/src/error.rs
++++ mozilla-release/third_party/rust/syn/src/error.rs
+@@ -1,4 +1,3 @@
+-use std;
+ use std::fmt::{self, Debug, Display};
+ use std::iter::FromIterator;
+ use std::slice;
+@@ -32,8 +31,8 @@ pub type Result<T> = std::result::Result<T, Error>;
+ /// conversion to `compile_error!` automatically.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, AttributeArgs, ItemFn};
+ ///
+@@ -82,7 +81,6 @@ pub type Result<T> = std::result::Result<T, Error>;
+ /// # }
+ /// # }
+ /// ```
+-#[derive(Clone)]
+ pub struct Error {
+ messages: Vec<ErrorMessage>,
+ }
+@@ -250,6 +248,17 @@ pub fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) -> Error {
+ }
+ }
+
++#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
++pub fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
++ Error {
++ messages: vec![ErrorMessage {
++ start_span: ThreadBound::new(start),
++ end_span: ThreadBound::new(end),
++ message: message.to_string(),
++ }],
++ }
++}
++
+ impl Debug for Error {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ if self.messages.len() == 1 {
+@@ -278,6 +287,14 @@ impl Display for Error {
+ }
+ }
+
++impl Clone for Error {
++ fn clone(&self) -> Self {
++ Error {
++ messages: self.messages.clone(),
++ }
++ }
++}
++
+ impl Clone for ErrorMessage {
+ fn clone(&self) -> Self {
+ let start = self
+@@ -355,3 +372,11 @@ impl<'a> Iterator for Iter<'a> {
+ })
+ }
+ }
++
++impl Extend<Error> for Error {
++ fn extend<T: IntoIterator<Item = Error>>(&mut self, iter: T) {
++ for err in iter {
++ self.combine(err);
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/expr.rs b/third_party/rust/syn/src/expr.rs
+index 2874a463aa..2fe0e0b5d8 100644
+--- mozilla-release/third_party/rust/syn/src/expr.rs
++++ mozilla-release/third_party/rust/syn/src/expr.rs
+@@ -1,18 +1,21 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
++#[cfg(feature = "full")]
++use crate::reserved::Reserved;
+ use proc_macro2::{Span, TokenStream};
+-#[cfg(feature = "extra-traits")]
++#[cfg(feature = "printing")]
++use quote::IdentFragment;
++#[cfg(feature = "printing")]
++use std::fmt::{self, Display};
+ use std::hash::{Hash, Hasher};
+-#[cfg(all(feature = "parsing", feature = "full"))]
++#[cfg(feature = "parsing")]
+ use std::mem;
+
+ ast_enum_of_structs! {
+ /// A Rust expression.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
++ /// feature, but most of the variants are not available unless "full" is enabled.*
+ ///
+ /// # Syntax tree enums
+ ///
+@@ -83,7 +86,7 @@ ast_enum_of_structs! {
+ /// A sign that you may not be choosing the right variable names is if you
+ /// see names getting repeated in your code, like accessing
+ /// `receiver.receiver` or `pat.pat` or `cond.cond`.
+- pub enum Expr #manual_extra_traits {
++ pub enum Expr {
+ /// A slice literal expression: `[a, b, c, d]`.
+ Array(ExprArray),
+
+@@ -228,7 +231,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A slice literal expression: `[a, b, c, d]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprArray #full {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+@@ -239,7 +242,7 @@ ast_struct! {
+ ast_struct! {
+ /// An assignment expression: `a = compute()`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAssign #full {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+@@ -251,7 +254,7 @@ ast_struct! {
+ ast_struct! {
+ /// A compound assignment expression: `counter += 1`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAssignOp #full {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+@@ -263,7 +266,7 @@ ast_struct! {
+ ast_struct! {
+ /// An async block: `async { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAsync #full {
+ pub attrs: Vec<Attribute>,
+ pub async_token: Token![async],
+@@ -275,7 +278,7 @@ ast_struct! {
+ ast_struct! {
+ /// An await expression: `fut.await`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAwait #full {
+ pub attrs: Vec<Attribute>,
+ pub base: Box<Expr>,
+@@ -287,7 +290,7 @@ ast_struct! {
+ ast_struct! {
+ /// A binary operation: `a + b`, `a * b`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprBinary {
+ pub attrs: Vec<Attribute>,
+@@ -300,7 +303,7 @@ ast_struct! {
+ ast_struct! {
+ /// A blocked scope: `{ ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBlock #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -311,7 +314,7 @@ ast_struct! {
+ ast_struct! {
+ /// A box expression: `box f`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBox #full {
+ pub attrs: Vec<Attribute>,
+ pub box_token: Token![box],
+@@ -323,7 +326,7 @@ ast_struct! {
+ /// A `break`, with an optional label to break and an optional
+ /// expression.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBreak #full {
+ pub attrs: Vec<Attribute>,
+ pub break_token: Token![break],
+@@ -335,7 +338,7 @@ ast_struct! {
+ ast_struct! {
+ /// A function call expression: `invoke(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprCall {
+ pub attrs: Vec<Attribute>,
+@@ -348,7 +351,7 @@ ast_struct! {
+ ast_struct! {
+ /// A cast expression: `foo as f64`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprCast {
+ pub attrs: Vec<Attribute>,
+@@ -361,7 +364,7 @@ ast_struct! {
+ ast_struct! {
+ /// A closure expression: `|a, b| a + b`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprClosure #full {
+ pub attrs: Vec<Attribute>,
+ pub asyncness: Option<Token![async]>,
+@@ -378,7 +381,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `continue`, with an optional label.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprContinue #full {
+ pub attrs: Vec<Attribute>,
+ pub continue_token: Token![continue],
+@@ -390,7 +393,7 @@ ast_struct! {
+ /// Access of a named struct field (`obj.k`) or unnamed tuple struct
+ /// field (`obj.0`).
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprField {
+ pub attrs: Vec<Attribute>,
+ pub base: Box<Expr>,
+@@ -402,7 +405,7 @@ ast_struct! {
+ ast_struct! {
+ /// A for loop: `for pat in expr { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprForLoop #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -421,7 +424,7 @@ ast_struct! {
+ /// of expressions and is related to `None`-delimited spans in a
+ /// `TokenStream`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprGroup #full {
+ pub attrs: Vec<Attribute>,
+ pub group_token: token::Group,
+@@ -436,7 +439,7 @@ ast_struct! {
+ /// The `else` branch expression may only be an `If` or `Block`
+ /// expression, not any of the other types of expression.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprIf #full {
+ pub attrs: Vec<Attribute>,
+ pub if_token: Token![if],
+@@ -449,7 +452,7 @@ ast_struct! {
+ ast_struct! {
+ /// A square bracketed indexing expression: `vector[2]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprIndex {
+ pub attrs: Vec<Attribute>,
+@@ -462,7 +465,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `let` guard: `let Some(x) = opt`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprLet #full {
+ pub attrs: Vec<Attribute>,
+ pub let_token: Token![let],
+@@ -475,7 +478,7 @@ ast_struct! {
+ ast_struct! {
+ /// A literal in place of an expression: `1`, `"foo"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprLit {
+ pub attrs: Vec<Attribute>,
+@@ -486,7 +489,7 @@ ast_struct! {
+ ast_struct! {
+ /// Conditionless loop: `loop { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprLoop #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -498,7 +501,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation expression: `format!("{}", q)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMacro #full {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -508,7 +511,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `match` expression: `match n { Some(n) => {}, None => {} }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMatch #full {
+ pub attrs: Vec<Attribute>,
+ pub match_token: Token![match],
+@@ -521,7 +524,7 @@ ast_struct! {
+ ast_struct! {
+ /// A method call expression: `x.foo::<T>(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMethodCall #full {
+ pub attrs: Vec<Attribute>,
+ pub receiver: Box<Expr>,
+@@ -536,7 +539,7 @@ ast_struct! {
+ ast_struct! {
+ /// A parenthesized expression: `(a + b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprParen {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+@@ -550,7 +553,7 @@ ast_struct! {
+ ///
+ /// A plain identifier like `x` is a path of length 1.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprPath {
+ pub attrs: Vec<Attribute>,
+@@ -562,7 +565,7 @@ ast_struct! {
+ ast_struct! {
+ /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprRange #full {
+ pub attrs: Vec<Attribute>,
+ pub from: Option<Box<Expr>>,
+@@ -574,7 +577,7 @@ ast_struct! {
+ ast_struct! {
+ /// A referencing operation: `&a` or `&mut a`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprReference #full {
+ pub attrs: Vec<Attribute>,
+ pub and_token: Token![&],
+@@ -587,7 +590,7 @@ ast_struct! {
+ ast_struct! {
+ /// An array literal constructed from one repeated element: `[0u8; N]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprRepeat #full {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+@@ -600,7 +603,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `return`, with an optional value to be returned.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprReturn #full {
+ pub attrs: Vec<Attribute>,
+ pub return_token: Token![return],
+@@ -614,7 +617,7 @@ ast_struct! {
+ /// The `rest` provides the value of the remaining fields as in `S { a:
+ /// 1, b: 1, ..rest }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprStruct #full {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+@@ -628,7 +631,7 @@ ast_struct! {
+ ast_struct! {
+ /// A try-expression: `expr?`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTry #full {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+@@ -639,7 +642,7 @@ ast_struct! {
+ ast_struct! {
+ /// A try block: `try { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTryBlock #full {
+ pub attrs: Vec<Attribute>,
+ pub try_token: Token![try],
+@@ -650,7 +653,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple expression: `(a, b, c, d)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTuple #full {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+@@ -661,7 +664,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type ascription expression: `foo: f64`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprType #full {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+@@ -673,7 +676,7 @@ ast_struct! {
+ ast_struct! {
+ /// A unary operation: `!x`, `*x`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprUnary {
+ pub attrs: Vec<Attribute>,
+@@ -685,7 +688,7 @@ ast_struct! {
+ ast_struct! {
+ /// An unsafe block: `unsafe { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprUnsafe #full {
+ pub attrs: Vec<Attribute>,
+ pub unsafe_token: Token![unsafe],
+@@ -696,7 +699,7 @@ ast_struct! {
+ ast_struct! {
+ /// A while loop: `while expr { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprWhile #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -709,7 +712,7 @@ ast_struct! {
+ ast_struct! {
+ /// A yield expression: `yield expr`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprYield #full {
+ pub attrs: Vec<Attribute>,
+ pub yield_token: Token![yield],
+@@ -717,232 +720,6 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Expr {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Expr {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Expr::Array(this), Expr::Array(other)) => this == other,
+- (Expr::Assign(this), Expr::Assign(other)) => this == other,
+- (Expr::AssignOp(this), Expr::AssignOp(other)) => this == other,
+- (Expr::Async(this), Expr::Async(other)) => this == other,
+- (Expr::Await(this), Expr::Await(other)) => this == other,
+- (Expr::Binary(this), Expr::Binary(other)) => this == other,
+- (Expr::Block(this), Expr::Block(other)) => this == other,
+- (Expr::Box(this), Expr::Box(other)) => this == other,
+- (Expr::Break(this), Expr::Break(other)) => this == other,
+- (Expr::Call(this), Expr::Call(other)) => this == other,
+- (Expr::Cast(this), Expr::Cast(other)) => this == other,
+- (Expr::Closure(this), Expr::Closure(other)) => this == other,
+- (Expr::Continue(this), Expr::Continue(other)) => this == other,
+- (Expr::Field(this), Expr::Field(other)) => this == other,
+- (Expr::ForLoop(this), Expr::ForLoop(other)) => this == other,
+- (Expr::Group(this), Expr::Group(other)) => this == other,
+- (Expr::If(this), Expr::If(other)) => this == other,
+- (Expr::Index(this), Expr::Index(other)) => this == other,
+- (Expr::Let(this), Expr::Let(other)) => this == other,
+- (Expr::Lit(this), Expr::Lit(other)) => this == other,
+- (Expr::Loop(this), Expr::Loop(other)) => this == other,
+- (Expr::Macro(this), Expr::Macro(other)) => this == other,
+- (Expr::Match(this), Expr::Match(other)) => this == other,
+- (Expr::MethodCall(this), Expr::MethodCall(other)) => this == other,
+- (Expr::Paren(this), Expr::Paren(other)) => this == other,
+- (Expr::Path(this), Expr::Path(other)) => this == other,
+- (Expr::Range(this), Expr::Range(other)) => this == other,
+- (Expr::Reference(this), Expr::Reference(other)) => this == other,
+- (Expr::Repeat(this), Expr::Repeat(other)) => this == other,
+- (Expr::Return(this), Expr::Return(other)) => this == other,
+- (Expr::Struct(this), Expr::Struct(other)) => this == other,
+- (Expr::Try(this), Expr::Try(other)) => this == other,
+- (Expr::TryBlock(this), Expr::TryBlock(other)) => this == other,
+- (Expr::Tuple(this), Expr::Tuple(other)) => this == other,
+- (Expr::Type(this), Expr::Type(other)) => this == other,
+- (Expr::Unary(this), Expr::Unary(other)) => this == other,
+- (Expr::Unsafe(this), Expr::Unsafe(other)) => this == other,
+- (Expr::Verbatim(this), Expr::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- (Expr::While(this), Expr::While(other)) => this == other,
+- (Expr::Yield(this), Expr::Yield(other)) => this == other,
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Expr {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Expr::Array(expr) => {
+- hash.write_u8(0);
+- expr.hash(hash);
+- }
+- Expr::Assign(expr) => {
+- hash.write_u8(1);
+- expr.hash(hash);
+- }
+- Expr::AssignOp(expr) => {
+- hash.write_u8(2);
+- expr.hash(hash);
+- }
+- Expr::Async(expr) => {
+- hash.write_u8(3);
+- expr.hash(hash);
+- }
+- Expr::Await(expr) => {
+- hash.write_u8(4);
+- expr.hash(hash);
+- }
+- Expr::Binary(expr) => {
+- hash.write_u8(5);
+- expr.hash(hash);
+- }
+- Expr::Block(expr) => {
+- hash.write_u8(6);
+- expr.hash(hash);
+- }
+- Expr::Box(expr) => {
+- hash.write_u8(7);
+- expr.hash(hash);
+- }
+- Expr::Break(expr) => {
+- hash.write_u8(8);
+- expr.hash(hash);
+- }
+- Expr::Call(expr) => {
+- hash.write_u8(9);
+- expr.hash(hash);
+- }
+- Expr::Cast(expr) => {
+- hash.write_u8(10);
+- expr.hash(hash);
+- }
+- Expr::Closure(expr) => {
+- hash.write_u8(11);
+- expr.hash(hash);
+- }
+- Expr::Continue(expr) => {
+- hash.write_u8(12);
+- expr.hash(hash);
+- }
+- Expr::Field(expr) => {
+- hash.write_u8(13);
+- expr.hash(hash);
+- }
+- Expr::ForLoop(expr) => {
+- hash.write_u8(14);
+- expr.hash(hash);
+- }
+- Expr::Group(expr) => {
+- hash.write_u8(15);
+- expr.hash(hash);
+- }
+- Expr::If(expr) => {
+- hash.write_u8(16);
+- expr.hash(hash);
+- }
+- Expr::Index(expr) => {
+- hash.write_u8(17);
+- expr.hash(hash);
+- }
+- Expr::Let(expr) => {
+- hash.write_u8(18);
+- expr.hash(hash);
+- }
+- Expr::Lit(expr) => {
+- hash.write_u8(19);
+- expr.hash(hash);
+- }
+- Expr::Loop(expr) => {
+- hash.write_u8(20);
+- expr.hash(hash);
+- }
+- Expr::Macro(expr) => {
+- hash.write_u8(21);
+- expr.hash(hash);
+- }
+- Expr::Match(expr) => {
+- hash.write_u8(22);
+- expr.hash(hash);
+- }
+- Expr::MethodCall(expr) => {
+- hash.write_u8(23);
+- expr.hash(hash);
+- }
+- Expr::Paren(expr) => {
+- hash.write_u8(24);
+- expr.hash(hash);
+- }
+- Expr::Path(expr) => {
+- hash.write_u8(25);
+- expr.hash(hash);
+- }
+- Expr::Range(expr) => {
+- hash.write_u8(26);
+- expr.hash(hash);
+- }
+- Expr::Reference(expr) => {
+- hash.write_u8(27);
+- expr.hash(hash);
+- }
+- Expr::Repeat(expr) => {
+- hash.write_u8(28);
+- expr.hash(hash);
+- }
+- Expr::Return(expr) => {
+- hash.write_u8(29);
+- expr.hash(hash);
+- }
+- Expr::Struct(expr) => {
+- hash.write_u8(30);
+- expr.hash(hash);
+- }
+- Expr::Try(expr) => {
+- hash.write_u8(31);
+- expr.hash(hash);
+- }
+- Expr::TryBlock(expr) => {
+- hash.write_u8(32);
+- expr.hash(hash);
+- }
+- Expr::Tuple(expr) => {
+- hash.write_u8(33);
+- expr.hash(hash);
+- }
+- Expr::Type(expr) => {
+- hash.write_u8(34);
+- expr.hash(hash);
+- }
+- Expr::Unary(expr) => {
+- hash.write_u8(35);
+- expr.hash(hash);
+- }
+- Expr::Unsafe(expr) => {
+- hash.write_u8(36);
+- expr.hash(hash);
+- }
+- Expr::Verbatim(expr) => {
+- hash.write_u8(37);
+- TokenStreamHelper(expr).hash(hash);
+- }
+- Expr::While(expr) => {
+- hash.write_u8(38);
+- expr.hash(hash);
+- }
+- Expr::Yield(expr) => {
+- hash.write_u8(39);
+- expr.hash(hash);
+- }
+- Expr::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ impl Expr {
+ #[cfg(all(feature = "parsing", feature = "full"))]
+ pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
+@@ -996,7 +773,7 @@ ast_enum! {
+ /// A struct or tuple struct field accessed in a struct literal or field
+ /// expression.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum Member {
+ /// A named field like `self.x`.
+@@ -1006,12 +783,50 @@ ast_enum! {
+ }
+ }
+
++impl Eq for Member {}
++
++impl PartialEq for Member {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Member::Named(this), Member::Named(other)) => this == other,
++ (Member::Unnamed(this), Member::Unnamed(other)) => this == other,
++ _ => false,
++ }
++ }
++}
++
++impl Hash for Member {
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ match self {
++ Member::Named(m) => m.hash(state),
++ Member::Unnamed(m) => m.hash(state),
++ }
++ }
++}
++
++#[cfg(feature = "printing")]
++impl IdentFragment for Member {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Member::Named(m) => Display::fmt(m, formatter),
++ Member::Unnamed(m) => Display::fmt(&m.index, formatter),
++ }
++ }
++
++ fn span(&self) -> Option<Span> {
++ match self {
++ Member::Named(m) => Some(m.span()),
++ Member::Unnamed(m) => Some(m.span),
++ }
++ }
++}
++
+ ast_struct! {
+ /// The index of an unnamed tuple struct field.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- pub struct Index #manual_extra_traits {
++ pub struct Index {
+ pub index: u32,
+ pub span: Span,
+ }
+@@ -1027,28 +842,28 @@ impl From<usize> for Index {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+ impl Eq for Index {}
+
+-#[cfg(feature = "extra-traits")]
+ impl PartialEq for Index {
+ fn eq(&self, other: &Self) -> bool {
+ self.index == other.index
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+ impl Hash for Index {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.index.hash(state);
+ }
+ }
+
+-#[cfg(feature = "full")]
+-ast_struct! {
+- #[derive(Default)]
+- pub struct Reserved {
+- private: (),
++#[cfg(feature = "printing")]
++impl IdentFragment for Index {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ Display::fmt(&self.index, formatter)
++ }
++
++ fn span(&self) -> Option<Span> {
++ Some(self.span)
+ }
+ }
+
+@@ -1057,7 +872,7 @@ ast_struct! {
+ /// The `::<>` explicit type parameters passed to a method call:
+ /// `parse::<u64>()`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct MethodTurbofish {
+ pub colon2_token: Token![::],
+ pub lt_token: Token![<],
+@@ -1070,7 +885,7 @@ ast_struct! {
+ ast_enum! {
+ /// An individual generic argument to a method, like `T`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum GenericMethodArgument {
+ /// A type argument.
+ Type(Type),
+@@ -1086,7 +901,7 @@ ast_enum! {
+ ast_struct! {
+ /// A field-value pair in a struct literal.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct FieldValue {
+ /// Attributes tagged on the field.
+ pub attrs: Vec<Attribute>,
+@@ -1107,7 +922,7 @@ ast_struct! {
+ ast_struct! {
+ /// A lifetime labeling a `for`, `while`, or `loop`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Label {
+ pub name: Lifetime,
+ pub colon_token: Token![:],
+@@ -1134,7 +949,7 @@ ast_struct! {
+ /// # }
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Arm {
+ pub attrs: Vec<Attribute>,
+ pub pat: Pat,
+@@ -1149,8 +964,7 @@ ast_struct! {
+ ast_enum! {
+ /// Limit types of a range, inclusive or exclusive.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum RangeLimits {
+ /// Inclusive at the beginning, exclusive at the end.
+ HalfOpen(Token![..]),
+@@ -1162,7 +976,7 @@ ast_enum! {
+ #[cfg(any(feature = "parsing", feature = "printing"))]
+ #[cfg(feature = "full")]
+ pub(crate) fn requires_terminator(expr: &Expr) -> bool {
+- // see https://github.com/rust-lang/rust/blob/eb8f2586e/src/libsyntax/parse/classify.rs#L17-L37
++ // see https://github.com/rust-lang/rust/blob/2679c38fc/src/librustc_ast/util/classify.rs#L7-L25
+ match *expr {
+ Expr::Unsafe(..)
+ | Expr::Block(..)
+@@ -1183,16 +997,17 @@ pub(crate) mod parsing {
+
+ use crate::parse::{Parse, ParseStream, Result};
+ use crate::path;
++ use std::cmp::Ordering;
++
++ crate::custom_keyword!(raw);
+
+ // When we're parsing expressions which occur before blocks, like in an if
+ // statement's condition, we cannot parse a struct literal.
+ //
+ // Struct literals are ambiguous in certain positions
+ // https://github.com/rust-lang/rfcs/pull/92
+- #[derive(Copy, Clone)]
+ pub struct AllowStruct(bool);
+
+- #[derive(Copy, Clone, PartialEq, PartialOrd)]
+ enum Precedence {
+ Any,
+ Assign,
+@@ -1246,9 +1061,121 @@ pub(crate) mod parsing {
+ }
+ }
+
+- #[cfg(feature = "full")]
+- fn expr_no_struct(input: ParseStream) -> Result<Expr> {
+- ambiguous_expr(input, AllowStruct(false))
++ impl Expr {
++ /// An alternative to the primary `Expr::parse` parser (from the
++ /// [`Parse`] trait) for ambiguous syntactic positions in which a
++ /// trailing brace should not be taken as part of the expression.
++ ///
++ /// Rust grammar has an ambiguity where braces sometimes turn a path
++ /// expression into a struct initialization and sometimes do not. In the
++ /// following code, the expression `S {}` is one expression. Presumably
++ /// there is an empty struct `struct S {}` defined somewhere which it is
++ /// instantiating.
++ ///
++ /// ```
++ /// # struct S;
++ /// # impl std::ops::Deref for S {
++ /// # type Target = bool;
++ /// # fn deref(&self) -> &Self::Target {
++ /// # &true
++ /// # }
++ /// # }
++ /// let _ = *S {};
++ ///
++ /// // parsed by rustc as: `*(S {})`
++ /// ```
++ ///
++ /// We would want to parse the above using `Expr::parse` after the `=`
++ /// token.
++ ///
++ /// But in the following, `S {}` is *not* a struct init expression.
++ ///
++ /// ```
++ /// # const S: &bool = &true;
++ /// if *S {} {}
++ ///
++ /// // parsed by rustc as:
++ /// //
++ /// // if (*S) {
++ /// // /* empty block */
++ /// // }
++ /// // {
++ /// // /* another empty block */
++ /// // }
++ /// ```
++ ///
++ /// For that reason we would want to parse if-conditions using
++ /// `Expr::parse_without_eager_brace` after the `if` token. Same for
++ /// similar syntactic positions such as the condition expr after a
++ /// `while` token or the expr at the top of a `match`.
++ ///
++ /// The Rust grammar's choices around which way this ambiguity is
++ /// resolved at various syntactic positions is fairly arbitrary. Really
++ /// either parse behavior could work in most positions, and language
++ /// designers just decide each case based on which is more likely to be
++ /// what the programmer had in mind most of the time.
++ ///
++ /// ```
++ /// # struct S;
++ /// # fn doc() -> S {
++ /// if return S {} {}
++ /// # unreachable!()
++ /// # }
++ ///
++ /// // parsed by rustc as:
++ /// //
++ /// // if (return (S {})) {
++ /// // }
++ /// //
++ /// // but could equally well have been this other arbitrary choice:
++ /// //
++ /// // if (return S) {
++ /// // }
++ /// // {}
++ /// ```
++ ///
++ /// Note the grammar ambiguity on trailing braces is distinct from
++ /// precedence and is not captured by assigning a precedence level to
++ /// the braced struct init expr in relation to other operators. This can
++ /// be illustrated by `return 0..S {}` vs `match 0..S {}`. The former
++ /// parses as `return (0..(S {}))` implying tighter precedence for
++ /// struct init than `..`, while the latter parses as `match (0..S) {}`
++ /// implying tighter precedence for `..` than struct init, a
++ /// contradiction.
++ #[cfg(feature = "full")]
++ pub fn parse_without_eager_brace(input: ParseStream) -> Result<Expr> {
++ ambiguous_expr(input, AllowStruct(false))
++ }
++ }
++
++ impl Copy for AllowStruct {}
++
++ impl Clone for AllowStruct {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
++ impl Copy for Precedence {}
++
++ impl Clone for Precedence {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
++ impl PartialEq for Precedence {
++ fn eq(&self, other: &Self) -> bool {
++ *self as u8 == *other as u8
++ }
++ }
++
++ impl PartialOrd for Precedence {
++ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
++ let this = *self as u8;
++ let other = *other as u8;
++ Some(this.cmp(&other))
++ }
+ }
+
+ #[cfg(feature = "full")]
+@@ -1430,56 +1357,84 @@ pub(crate) mod parsing {
+ parse_expr(input, lhs, allow_struct, Precedence::Any)
+ }
+
++ #[cfg(feature = "full")]
++ fn expr_attrs(input: ParseStream) -> Result<Vec<Attribute>> {
++ let mut attrs = Vec::new();
++ loop {
++ if input.peek(token::Group) {
++ let ahead = input.fork();
++ let group = crate::group::parse_group(&ahead)?;
++ if !group.content.peek(Token![#]) || group.content.peek2(Token![!]) {
++ break;
++ }
++ let attr = group.content.call(attr::parsing::single_parse_outer)?;
++ if !group.content.is_empty() {
++ break;
++ }
++ attrs.push(attr);
++ } else if input.peek(Token![#]) {
++ attrs.push(input.call(attr::parsing::single_parse_outer)?);
++ } else {
++ break;
++ }
++ }
++ Ok(attrs)
++ }
++
+ // <UnOp> <trailer>
+ // & <trailer>
+ // &mut <trailer>
+ // box <trailer>
+ #[cfg(feature = "full")]
+ fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
+- if ahead.peek(Token![&])
+- || ahead.peek(Token![box])
+- || ahead.peek(Token![*])
+- || ahead.peek(Token![!])
+- || ahead.peek(Token![-])
+- {
+- let attrs = input.call(Attribute::parse_outer)?;
+- if input.peek(Token![&]) {
++ let begin = input.fork();
++ let attrs = input.call(expr_attrs)?;
++ if input.peek(Token![&]) {
++ let and_token: Token![&] = input.parse()?;
++ let raw: Option<raw> =
++ if input.peek(raw) && (input.peek2(Token![mut]) || input.peek2(Token![const])) {
++ Some(input.parse()?)
++ } else {
++ None
++ };
++ let mutability: Option<Token![mut]> = input.parse()?;
++ if raw.is_some() && mutability.is_none() {
++ input.parse::<Token![const]>()?;
++ }
++ let expr = Box::new(unary_expr(input, allow_struct)?);
++ if raw.is_some() {
++ Ok(Expr::Verbatim(verbatim::between(begin, input)))
++ } else {
+ Ok(Expr::Reference(ExprReference {
+ attrs,
+- and_token: input.parse()?,
++ and_token,
+ raw: Reserved::default(),
+- mutability: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
+- }))
+- } else if input.peek(Token![box]) {
+- Ok(Expr::Box(ExprBox {
+- attrs,
+- box_token: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
+- }))
+- } else {
+- Ok(Expr::Unary(ExprUnary {
+- attrs,
+- op: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
++ mutability,
++ expr,
+ }))
+ }
++ } else if input.peek(Token![box]) {
++ Ok(Expr::Box(ExprBox {
++ attrs,
++ box_token: input.parse()?,
++ expr: Box::new(unary_expr(input, allow_struct)?),
++ }))
++ } else if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
++ Ok(Expr::Unary(ExprUnary {
++ attrs,
++ op: input.parse()?,
++ expr: Box::new(unary_expr(input, allow_struct)?),
++ }))
+ } else {
+- trailer_expr(input, allow_struct)
++ trailer_expr(attrs, input, allow_struct)
+ }
+ }
+
+ #[cfg(not(feature = "full"))]
+ fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
+- if ahead.peek(Token![*]) || ahead.peek(Token![!]) || ahead.peek(Token![-]) {
++ if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
+ Ok(Expr::Unary(ExprUnary {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs: Vec::new(),
+ op: input.parse()?,
+ expr: Box::new(unary_expr(input, allow_struct)?),
+ }))
+@@ -1495,13 +1450,11 @@ pub(crate) mod parsing {
+ // <atom> [ <expr> ] ...
+ // <atom> ? ...
+ #[cfg(feature = "full")]
+- fn trailer_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- if input.peek(token::Group) {
+- return input.call(expr_group).map(Expr::Group);
+- }
+-
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+-
++ fn trailer_expr(
++ outer_attrs: Vec<Attribute>,
++ input: ParseStream,
++ allow_struct: AllowStruct,
++ ) -> Result<Expr> {
+ let atom = atom_expr(input, allow_struct)?;
+ let mut e = trailer_helper(input, atom)?;
+
+@@ -1523,18 +1476,26 @@ pub(crate) mod parsing {
+ args: content.parse_terminated(Expr::parse)?,
+ });
+ } else if input.peek(Token![.]) && !input.peek(Token![..]) {
+- let dot_token: Token![.] = input.parse()?;
++ let mut dot_token: Token![.] = input.parse()?;
+
+- if input.peek(token::Await) {
++ let await_token: Option<token::Await> = input.parse()?;
++ if let Some(await_token) = await_token {
+ e = Expr::Await(ExprAwait {
+ attrs: Vec::new(),
+ base: Box::new(e),
+ dot_token,
+- await_token: input.parse()?,
++ await_token,
+ });
+ continue;
+ }
+
++ let float_token: Option<LitFloat> = input.parse()?;
++ if let Some(float_token) = float_token {
++ if multi_index(&mut e, &mut dot_token, float_token)? {
++ continue;
++ }
++ }
++
+ let member: Member = input.parse()?;
+ let turbofish = if member.is_named() && input.peek(Token![::]) {
+ Some(MethodTurbofish {
+@@ -1620,10 +1581,17 @@ pub(crate) mod parsing {
+ });
+ } else if input.peek(Token![.]) && !input.peek(Token![..]) && !input.peek2(token::Await)
+ {
++ let mut dot_token: Token![.] = input.parse()?;
++ let float_token: Option<LitFloat> = input.parse()?;
++ if let Some(float_token) = float_token {
++ if multi_index(&mut e, &mut dot_token, float_token)? {
++ continue;
++ }
++ }
+ e = Expr::Field(ExprField {
+ attrs: Vec::new(),
+ base: Box::new(e),
+- dot_token: input.parse()?,
++ dot_token,
+ member: input.parse()?,
+ });
+ } else if input.peek(token::Bracket) {
+@@ -1646,7 +1614,11 @@ pub(crate) mod parsing {
+ // interactions, as they are fully contained.
+ #[cfg(feature = "full")]
+ fn atom_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- if input.peek(token::Group) {
++ if input.peek(token::Group)
++ && !input.peek2(Token![::])
++ && !input.peek2(Token![!])
++ && !input.peek2(token::Brace)
++ {
+ input.call(expr_group).map(Expr::Group)
+ } else if input.peek(Lit) {
+ input.parse().map(Expr::Lit)
+@@ -1668,7 +1640,6 @@ pub(crate) mod parsing {
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ path_or_macro_or_struct(input, allow_struct)
+@@ -1740,7 +1711,6 @@ pub(crate) mod parsing {
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ input.parse().map(Expr::Path)
+@@ -1878,7 +1848,7 @@ pub(crate) mod parsing {
+
+ #[cfg(feature = "full")]
+ pub(crate) fn expr_early(input: ParseStream) -> Result<Expr> {
+- let mut attrs = input.call(Attribute::parse_outer)?;
++ let mut attrs = input.call(expr_attrs)?;
+ let mut expr = if input.peek(Token![if]) {
+ Expr::If(input.parse()?)
+ } else if input.peek(Token![while]) {
+@@ -1905,7 +1875,7 @@ pub(crate) mod parsing {
+ return parse_expr(input, expr, allow_struct, Precedence::Any);
+ };
+
+- if input.peek(Token![.]) || input.peek(Token![?]) {
++ if input.peek(Token![.]) && !input.peek(Token![..]) || input.peek(Token![?]) {
+ expr = trailer_helper(input, expr)?;
+
+ attrs.extend(expr.replace_attrs(Vec::new()));
+@@ -1951,7 +1921,16 @@ pub(crate) mod parsing {
+
+ #[cfg(feature = "full")]
+ fn generic_method_argument(input: ParseStream) -> Result<GenericMethodArgument> {
+- // TODO parse const generics as well
++ if input.peek(Lit) {
++ let lit = input.parse()?;
++ return Ok(GenericMethodArgument::Const(Expr::Lit(lit)));
++ }
++
++ if input.peek(token::Brace) {
++ let block = input.call(expr::parsing::expr_block)?;
++ return Ok(GenericMethodArgument::Const(Expr::Block(block)));
++ }
++
+ input.parse().map(GenericMethodArgument::Type)
+ }
+
+@@ -1960,44 +1939,20 @@ pub(crate) mod parsing {
+ Ok(ExprLet {
+ attrs: Vec::new(),
+ let_token: input.parse()?,
+- pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let pat: Pat = input.parse()?;
+- if leading_vert.is_some()
+- || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
+- {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|])
+- && !input.peek(Token![||])
+- && !input.peek(Token![|=])
+- {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- })
+- } else {
+- pat
+- }
+- },
++ pat: pat::parsing::multi_pat_with_leading_vert(input)?,
+ eq_token: input.parse()?,
+- expr: Box::new(input.call(expr_no_struct)?),
++ expr: Box::new(input.call(Expr::parse_without_eager_brace)?),
+ })
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for ExprIf {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ Ok(ExprIf {
+- attrs: Vec::new(),
++ attrs,
+ if_token: input.parse()?,
+- cond: Box::new(input.call(expr_no_struct)?),
++ cond: Box::new(input.call(Expr::parse_without_eager_brace)?),
+ then_branch: input.parse()?,
+ else_branch: {
+ if input.peek(Token![else]) {
+@@ -2033,29 +1988,14 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprForLoop {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let for_token: Token![for] = input.parse()?;
+
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let mut pat: Pat = input.parse()?;
+- if leading_vert.is_some() || input.peek(Token![|]) {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|]) {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- pat = Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- });
+- }
++ let pat = pat::parsing::multi_pat_with_leading_vert(input)?;
+
+ let in_token: Token![in] = input.parse()?;
+- let expr: Expr = input.call(expr_no_struct)?;
++ let expr: Expr = input.call(Expr::parse_without_eager_brace)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+@@ -2063,7 +2003,7 @@ pub(crate) mod parsing {
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprForLoop {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ for_token,
+ pat,
+@@ -2077,6 +2017,7 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprLoop {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let loop_token: Token![loop] = input.parse()?;
+
+@@ -2086,7 +2027,7 @@ pub(crate) mod parsing {
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprLoop {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ loop_token,
+ body: Block { brace_token, stmts },
+@@ -2097,8 +2038,9 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprMatch {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let match_token: Token![match] = input.parse()?;
+- let expr = expr_no_struct(input)?;
++ let expr = Expr::parse_without_eager_brace(input)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+@@ -2110,7 +2052,7 @@ pub(crate) mod parsing {
+ }
+
+ Ok(ExprMatch {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ match_token,
+ expr: Box::new(expr),
+ brace_token,
+@@ -2305,9 +2247,10 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprWhile {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let while_token: Token![while] = input.parse()?;
+- let cond = expr_no_struct(input)?;
++ let cond = Expr::parse_without_eager_brace(input)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+@@ -2315,7 +2258,7 @@ pub(crate) mod parsing {
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprWhile {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ while_token,
+ cond: Box::new(cond),
+@@ -2399,6 +2342,7 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for FieldValue {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ let member: Member = input.parse()?;
+ let (colon_token, value) = if input.peek(Token![:]) || !member.is_named() {
+ let colon_token: Token![:] = input.parse()?;
+@@ -2416,7 +2360,7 @@ pub(crate) mod parsing {
+ };
+
+ Ok(FieldValue {
+- attrs: Vec::new(),
++ attrs,
+ member,
+ colon_token,
+ expr: value,
+@@ -2433,46 +2377,36 @@ pub(crate) mod parsing {
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
++ let attrs = private::attrs(outer_attrs, inner_attrs);
+
+ let mut fields = Punctuated::new();
+- loop {
+- let attrs = content.call(Attribute::parse_outer)?;
+- // TODO: optimize using advance_to
+- if content.fork().parse::<Member>().is_err() {
+- if attrs.is_empty() {
+- break;
+- } else {
+- return Err(content.error("expected struct field"));
+- }
++ while !content.is_empty() {
++ if content.peek(Token![..]) {
++ return Ok(ExprStruct {
++ attrs,
++ brace_token,
++ path,
++ fields,
++ dot2_token: Some(content.parse()?),
++ rest: Some(Box::new(content.parse()?)),
++ });
+ }
+
+- fields.push(FieldValue {
+- attrs,
+- ..content.parse()?
+- });
+-
+- if !content.peek(Token![,]) {
++ fields.push(content.parse()?);
++ if content.is_empty() {
+ break;
+ }
+ let punct: Token![,] = content.parse()?;
+ fields.push_punct(punct);
+ }
+
+- let (dot2_token, rest) = if fields.empty_or_trailing() && content.peek(Token![..]) {
+- let dot2_token: Token![..] = content.parse()?;
+- let rest: Expr = content.parse()?;
+- (Some(dot2_token), Some(Box::new(rest)))
+- } else {
+- (None, None)
+- };
+-
+ Ok(ExprStruct {
+- attrs: private::attrs(outer_attrs, inner_attrs),
++ attrs,
+ brace_token,
+ path,
+ fields,
+- dot2_token,
+- rest,
++ dot2_token: None,
++ rest: None,
+ })
+ }
+
+@@ -2577,27 +2511,7 @@ pub(crate) mod parsing {
+ let requires_comma;
+ Ok(Arm {
+ attrs: input.call(Attribute::parse_outer)?,
+- pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let pat: Pat = input.parse()?;
+- if leading_vert.is_some() || input.peek(Token![|]) {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|]) {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- })
+- } else {
+- pat
+- }
+- },
++ pat: pat::parsing::multi_pat_with_leading_vert(input)?,
+ guard: {
+ if input.peek(Token![if]) {
+ let if_token: Token![if] = input.parse()?;
+@@ -2641,6 +2555,26 @@ pub(crate) mod parsing {
+ }
+ }
+
++ fn multi_index(e: &mut Expr, dot_token: &mut Token![.], float: LitFloat) -> Result<bool> {
++ let mut float_repr = float.to_string();
++ let trailing_dot = float_repr.ends_with('.');
++ if trailing_dot {
++ float_repr.truncate(float_repr.len() - 1);
++ }
++ for part in float_repr.split('.') {
++ let index = crate::parse_str(part).map_err(|err| Error::new(float.span(), err))?;
++ let base = mem::replace(e, Expr::__Nonexhaustive);
++ *e = Expr::Field(ExprField {
++ attrs: Vec::new(),
++ base: Box::new(base),
++ dot_token: Token![.](dot_token.span),
++ member: Member::Unnamed(index),
++ });
++ *dot_token = Token![.](float.span());
++ }
++ Ok(!trailing_dot)
++ }
++
+ #[cfg(feature = "full")]
+ impl Member {
+ fn is_named(&self) -> bool {
+diff --git a/third_party/rust/syn/src/ext.rs b/third_party/rust/syn/src/ext.rs
+index d09577a27a..4f9bc145d9 100644
+--- mozilla-release/third_party/rust/syn/src/ext.rs
++++ mozilla-release/third_party/rust/syn/src/ext.rs
+@@ -1,6 +1,6 @@
+ //! Extension traits to provide parsing methods on foreign types.
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ use proc_macro2::Ident;
+
+@@ -16,7 +16,7 @@ use crate::token::CustomToken;
+ /// This trait is sealed and cannot be implemented for types outside of Syn. It
+ /// is implemented only for `proc_macro2::Ident`.
+ ///
+-/// *This trait is available if Syn is built with the `"parsing"` feature.*
++/// *This trait is available only if Syn is built with the `"parsing"` feature.*
+ pub trait IdentExt: Sized + private::Sealed {
+ /// Parses any identifier including keywords.
+ ///
+@@ -129,7 +129,13 @@ mod private {
+
+ impl Sealed for Ident {}
+
+- #[derive(Copy, Clone)]
+ pub struct PeekFn;
+ pub struct IdentAny;
++
++ impl Copy for PeekFn {}
++ impl Clone for PeekFn {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
+ }
+diff --git a/third_party/rust/syn/src/file.rs b/third_party/rust/syn/src/file.rs
+index 88c02fe832..c8fab63cd9 100644
+--- mozilla-release/third_party/rust/syn/src/file.rs
++++ mozilla-release/third_party/rust/syn/src/file.rs
+@@ -3,7 +3,7 @@ use super::*;
+ ast_struct! {
+ /// A complete file of Rust source code.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Example
+ ///
+@@ -37,6 +37,8 @@ ast_struct! {
+ /// file.read_to_string(&mut src).expect("Unable to read file");
+ ///
+ /// let syntax = syn::parse_file(&src).expect("Unable to parse file");
++ ///
++ /// // Debug impl is available if Syn is built with "extra-traits" feature.
+ /// println!("{:#?}", syntax);
+ /// }
+ /// ```
+diff --git a/third_party/rust/syn/src/gen/clone.rs b/third_party/rust/syn/src/gen/clone.rs
+new file mode 100644
+index 0000000000..bea3887013
+--- /dev/null
++++ mozilla-release/third_party/rust/syn/src/gen/clone.rs
+@@ -0,0 +1,2051 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#![allow(clippy::clone_on_copy, clippy::expl_impl_clone_on_copy)]
++use crate::*;
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Abi {
++ fn clone(&self) -> Self {
++ Abi {
++ extern_token: self.extern_token.clone(),
++ name: self.name.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for AngleBracketedGenericArguments {
++ fn clone(&self) -> Self {
++ AngleBracketedGenericArguments {
++ colon2_token: self.colon2_token.clone(),
++ lt_token: self.lt_token.clone(),
++ args: self.args.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Arm {
++ fn clone(&self) -> Self {
++ Arm {
++ attrs: self.attrs.clone(),
++ pat: self.pat.clone(),
++ guard: self.guard.clone(),
++ fat_arrow_token: self.fat_arrow_token.clone(),
++ body: self.body.clone(),
++ comma: self.comma.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for AttrStyle {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for AttrStyle {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Attribute {
++ fn clone(&self) -> Self {
++ Attribute {
++ pound_token: self.pound_token.clone(),
++ style: self.style.clone(),
++ bracket_token: self.bracket_token.clone(),
++ path: self.path.clone(),
++ tokens: self.tokens.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BareFnArg {
++ fn clone(&self) -> Self {
++ BareFnArg {
++ attrs: self.attrs.clone(),
++ name: self.name.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for BinOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BinOp {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Binding {
++ fn clone(&self) -> Self {
++ Binding {
++ ident: self.ident.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Block {
++ fn clone(&self) -> Self {
++ Block {
++ brace_token: self.brace_token.clone(),
++ stmts: self.stmts.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BoundLifetimes {
++ fn clone(&self) -> Self {
++ BoundLifetimes {
++ for_token: self.for_token.clone(),
++ lt_token: self.lt_token.clone(),
++ lifetimes: self.lifetimes.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ConstParam {
++ fn clone(&self) -> Self {
++ ConstParam {
++ attrs: self.attrs.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ default: self.default.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Constraint {
++ fn clone(&self) -> Self {
++ Constraint {
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for Data {
++ fn clone(&self) -> Self {
++ match self {
++ Data::Struct(v0) => Data::Struct(v0.clone()),
++ Data::Enum(v0) => Data::Enum(v0.clone()),
++ Data::Union(v0) => Data::Union(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataEnum {
++ fn clone(&self) -> Self {
++ DataEnum {
++ enum_token: self.enum_token.clone(),
++ brace_token: self.brace_token.clone(),
++ variants: self.variants.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataStruct {
++ fn clone(&self) -> Self {
++ DataStruct {
++ struct_token: self.struct_token.clone(),
++ fields: self.fields.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataUnion {
++ fn clone(&self) -> Self {
++ DataUnion {
++ union_token: self.union_token.clone(),
++ fields: self.fields.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DeriveInput {
++ fn clone(&self) -> Self {
++ DeriveInput {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ data: self.data.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Expr {
++ fn clone(&self) -> Self {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => Expr::Array(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => Expr::Assign(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => Expr::AssignOp(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => Expr::Async(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => Expr::Await(v0.clone()),
++ Expr::Binary(v0) => Expr::Binary(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => Expr::Block(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => Expr::Box(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => Expr::Break(v0.clone()),
++ Expr::Call(v0) => Expr::Call(v0.clone()),
++ Expr::Cast(v0) => Expr::Cast(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => Expr::Closure(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => Expr::Continue(v0.clone()),
++ Expr::Field(v0) => Expr::Field(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => Expr::ForLoop(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => Expr::Group(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::If(v0) => Expr::If(v0.clone()),
++ Expr::Index(v0) => Expr::Index(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => Expr::Let(v0.clone()),
++ Expr::Lit(v0) => Expr::Lit(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => Expr::Loop(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => Expr::Macro(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => Expr::Match(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => Expr::MethodCall(v0.clone()),
++ Expr::Paren(v0) => Expr::Paren(v0.clone()),
++ Expr::Path(v0) => Expr::Path(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => Expr::Range(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => Expr::Reference(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => Expr::Repeat(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => Expr::Return(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => Expr::Struct(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => Expr::Try(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => Expr::TryBlock(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => Expr::Tuple(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => Expr::Type(v0.clone()),
++ Expr::Unary(v0) => Expr::Unary(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => Expr::Unsafe(v0.clone()),
++ Expr::Verbatim(v0) => Expr::Verbatim(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::While(v0) => Expr::While(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => Expr::Yield(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprArray {
++ fn clone(&self) -> Self {
++ ExprArray {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAssign {
++ fn clone(&self) -> Self {
++ ExprAssign {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ eq_token: self.eq_token.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAssignOp {
++ fn clone(&self) -> Self {
++ ExprAssignOp {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ op: self.op.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAsync {
++ fn clone(&self) -> Self {
++ ExprAsync {
++ attrs: self.attrs.clone(),
++ async_token: self.async_token.clone(),
++ capture: self.capture.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAwait {
++ fn clone(&self) -> Self {
++ ExprAwait {
++ attrs: self.attrs.clone(),
++ base: self.base.clone(),
++ dot_token: self.dot_token.clone(),
++ await_token: self.await_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprBinary {
++ fn clone(&self) -> Self {
++ ExprBinary {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ op: self.op.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBlock {
++ fn clone(&self) -> Self {
++ ExprBlock {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBox {
++ fn clone(&self) -> Self {
++ ExprBox {
++ attrs: self.attrs.clone(),
++ box_token: self.box_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBreak {
++ fn clone(&self) -> Self {
++ ExprBreak {
++ attrs: self.attrs.clone(),
++ break_token: self.break_token.clone(),
++ label: self.label.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprCall {
++ fn clone(&self) -> Self {
++ ExprCall {
++ attrs: self.attrs.clone(),
++ func: self.func.clone(),
++ paren_token: self.paren_token.clone(),
++ args: self.args.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprCast {
++ fn clone(&self) -> Self {
++ ExprCast {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ as_token: self.as_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprClosure {
++ fn clone(&self) -> Self {
++ ExprClosure {
++ attrs: self.attrs.clone(),
++ asyncness: self.asyncness.clone(),
++ movability: self.movability.clone(),
++ capture: self.capture.clone(),
++ or1_token: self.or1_token.clone(),
++ inputs: self.inputs.clone(),
++ or2_token: self.or2_token.clone(),
++ output: self.output.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprContinue {
++ fn clone(&self) -> Self {
++ ExprContinue {
++ attrs: self.attrs.clone(),
++ continue_token: self.continue_token.clone(),
++ label: self.label.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprField {
++ fn clone(&self) -> Self {
++ ExprField {
++ attrs: self.attrs.clone(),
++ base: self.base.clone(),
++ dot_token: self.dot_token.clone(),
++ member: self.member.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprForLoop {
++ fn clone(&self) -> Self {
++ ExprForLoop {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ for_token: self.for_token.clone(),
++ pat: self.pat.clone(),
++ in_token: self.in_token.clone(),
++ expr: self.expr.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprGroup {
++ fn clone(&self) -> Self {
++ ExprGroup {
++ attrs: self.attrs.clone(),
++ group_token: self.group_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprIf {
++ fn clone(&self) -> Self {
++ ExprIf {
++ attrs: self.attrs.clone(),
++ if_token: self.if_token.clone(),
++ cond: self.cond.clone(),
++ then_branch: self.then_branch.clone(),
++ else_branch: self.else_branch.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprIndex {
++ fn clone(&self) -> Self {
++ ExprIndex {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ bracket_token: self.bracket_token.clone(),
++ index: self.index.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprLet {
++ fn clone(&self) -> Self {
++ ExprLet {
++ attrs: self.attrs.clone(),
++ let_token: self.let_token.clone(),
++ pat: self.pat.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprLit {
++ fn clone(&self) -> Self {
++ ExprLit {
++ attrs: self.attrs.clone(),
++ lit: self.lit.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprLoop {
++ fn clone(&self) -> Self {
++ ExprLoop {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ loop_token: self.loop_token.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMacro {
++ fn clone(&self) -> Self {
++ ExprMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMatch {
++ fn clone(&self) -> Self {
++ ExprMatch {
++ attrs: self.attrs.clone(),
++ match_token: self.match_token.clone(),
++ expr: self.expr.clone(),
++ brace_token: self.brace_token.clone(),
++ arms: self.arms.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMethodCall {
++ fn clone(&self) -> Self {
++ ExprMethodCall {
++ attrs: self.attrs.clone(),
++ receiver: self.receiver.clone(),
++ dot_token: self.dot_token.clone(),
++ method: self.method.clone(),
++ turbofish: self.turbofish.clone(),
++ paren_token: self.paren_token.clone(),
++ args: self.args.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprParen {
++ fn clone(&self) -> Self {
++ ExprParen {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprPath {
++ fn clone(&self) -> Self {
++ ExprPath {
++ attrs: self.attrs.clone(),
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprRange {
++ fn clone(&self) -> Self {
++ ExprRange {
++ attrs: self.attrs.clone(),
++ from: self.from.clone(),
++ limits: self.limits.clone(),
++ to: self.to.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprReference {
++ fn clone(&self) -> Self {
++ ExprReference {
++ attrs: self.attrs.clone(),
++ and_token: self.and_token.clone(),
++ raw: self.raw.clone(),
++ mutability: self.mutability.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprRepeat {
++ fn clone(&self) -> Self {
++ ExprRepeat {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ len: self.len.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprReturn {
++ fn clone(&self) -> Self {
++ ExprReturn {
++ attrs: self.attrs.clone(),
++ return_token: self.return_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprStruct {
++ fn clone(&self) -> Self {
++ ExprStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ brace_token: self.brace_token.clone(),
++ fields: self.fields.clone(),
++ dot2_token: self.dot2_token.clone(),
++ rest: self.rest.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTry {
++ fn clone(&self) -> Self {
++ ExprTry {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ question_token: self.question_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTryBlock {
++ fn clone(&self) -> Self {
++ ExprTryBlock {
++ attrs: self.attrs.clone(),
++ try_token: self.try_token.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTuple {
++ fn clone(&self) -> Self {
++ ExprTuple {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprType {
++ fn clone(&self) -> Self {
++ ExprType {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprUnary {
++ fn clone(&self) -> Self {
++ ExprUnary {
++ attrs: self.attrs.clone(),
++ op: self.op.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprUnsafe {
++ fn clone(&self) -> Self {
++ ExprUnsafe {
++ attrs: self.attrs.clone(),
++ unsafe_token: self.unsafe_token.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprWhile {
++ fn clone(&self) -> Self {
++ ExprWhile {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ while_token: self.while_token.clone(),
++ cond: self.cond.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprYield {
++ fn clone(&self) -> Self {
++ ExprYield {
++ attrs: self.attrs.clone(),
++ yield_token: self.yield_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Field {
++ fn clone(&self) -> Self {
++ Field {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FieldPat {
++ fn clone(&self) -> Self {
++ FieldPat {
++ attrs: self.attrs.clone(),
++ member: self.member.clone(),
++ colon_token: self.colon_token.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FieldValue {
++ fn clone(&self) -> Self {
++ FieldValue {
++ attrs: self.attrs.clone(),
++ member: self.member.clone(),
++ colon_token: self.colon_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Fields {
++ fn clone(&self) -> Self {
++ match self {
++ Fields::Named(v0) => Fields::Named(v0.clone()),
++ Fields::Unnamed(v0) => Fields::Unnamed(v0.clone()),
++ Fields::Unit => Fields::Unit,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for FieldsNamed {
++ fn clone(&self) -> Self {
++ FieldsNamed {
++ brace_token: self.brace_token.clone(),
++ named: self.named.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for FieldsUnnamed {
++ fn clone(&self) -> Self {
++ FieldsUnnamed {
++ paren_token: self.paren_token.clone(),
++ unnamed: self.unnamed.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for File {
++ fn clone(&self) -> Self {
++ File {
++ shebang: self.shebang.clone(),
++ attrs: self.attrs.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FnArg {
++ fn clone(&self) -> Self {
++ match self {
++ FnArg::Receiver(v0) => FnArg::Receiver(v0.clone()),
++ FnArg::Typed(v0) => FnArg::Typed(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItem {
++ fn clone(&self) -> Self {
++ match self {
++ ForeignItem::Fn(v0) => ForeignItem::Fn(v0.clone()),
++ ForeignItem::Static(v0) => ForeignItem::Static(v0.clone()),
++ ForeignItem::Type(v0) => ForeignItem::Type(v0.clone()),
++ ForeignItem::Macro(v0) => ForeignItem::Macro(v0.clone()),
++ ForeignItem::Verbatim(v0) => ForeignItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemFn {
++ fn clone(&self) -> Self {
++ ForeignItemFn {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ sig: self.sig.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemMacro {
++ fn clone(&self) -> Self {
++ ForeignItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemStatic {
++ fn clone(&self) -> Self {
++ ForeignItemStatic {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ static_token: self.static_token.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemType {
++ fn clone(&self) -> Self {
++ ForeignItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for GenericArgument {
++ fn clone(&self) -> Self {
++ match self {
++ GenericArgument::Lifetime(v0) => GenericArgument::Lifetime(v0.clone()),
++ GenericArgument::Type(v0) => GenericArgument::Type(v0.clone()),
++ GenericArgument::Binding(v0) => GenericArgument::Binding(v0.clone()),
++ GenericArgument::Constraint(v0) => GenericArgument::Constraint(v0.clone()),
++ GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for GenericMethodArgument {
++ fn clone(&self) -> Self {
++ match self {
++ GenericMethodArgument::Type(v0) => GenericMethodArgument::Type(v0.clone()),
++ GenericMethodArgument::Const(v0) => GenericMethodArgument::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for GenericParam {
++ fn clone(&self) -> Self {
++ match self {
++ GenericParam::Type(v0) => GenericParam::Type(v0.clone()),
++ GenericParam::Lifetime(v0) => GenericParam::Lifetime(v0.clone()),
++ GenericParam::Const(v0) => GenericParam::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Generics {
++ fn clone(&self) -> Self {
++ Generics {
++ lt_token: self.lt_token.clone(),
++ params: self.params.clone(),
++ gt_token: self.gt_token.clone(),
++ where_clause: self.where_clause.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItem {
++ fn clone(&self) -> Self {
++ match self {
++ ImplItem::Const(v0) => ImplItem::Const(v0.clone()),
++ ImplItem::Method(v0) => ImplItem::Method(v0.clone()),
++ ImplItem::Type(v0) => ImplItem::Type(v0.clone()),
++ ImplItem::Macro(v0) => ImplItem::Macro(v0.clone()),
++ ImplItem::Verbatim(v0) => ImplItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemConst {
++ fn clone(&self) -> Self {
++ ImplItemConst {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemMacro {
++ fn clone(&self) -> Self {
++ ImplItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemMethod {
++ fn clone(&self) -> Self {
++ ImplItemMethod {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ sig: self.sig.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemType {
++ fn clone(&self) -> Self {
++ ImplItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Index {
++ fn clone(&self) -> Self {
++ Index {
++ index: self.index.clone(),
++ span: self.span.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Item {
++ fn clone(&self) -> Self {
++ match self {
++ Item::Const(v0) => Item::Const(v0.clone()),
++ Item::Enum(v0) => Item::Enum(v0.clone()),
++ Item::ExternCrate(v0) => Item::ExternCrate(v0.clone()),
++ Item::Fn(v0) => Item::Fn(v0.clone()),
++ Item::ForeignMod(v0) => Item::ForeignMod(v0.clone()),
++ Item::Impl(v0) => Item::Impl(v0.clone()),
++ Item::Macro(v0) => Item::Macro(v0.clone()),
++ Item::Macro2(v0) => Item::Macro2(v0.clone()),
++ Item::Mod(v0) => Item::Mod(v0.clone()),
++ Item::Static(v0) => Item::Static(v0.clone()),
++ Item::Struct(v0) => Item::Struct(v0.clone()),
++ Item::Trait(v0) => Item::Trait(v0.clone()),
++ Item::TraitAlias(v0) => Item::TraitAlias(v0.clone()),
++ Item::Type(v0) => Item::Type(v0.clone()),
++ Item::Union(v0) => Item::Union(v0.clone()),
++ Item::Use(v0) => Item::Use(v0.clone()),
++ Item::Verbatim(v0) => Item::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemConst {
++ fn clone(&self) -> Self {
++ ItemConst {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemEnum {
++ fn clone(&self) -> Self {
++ ItemEnum {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ enum_token: self.enum_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ brace_token: self.brace_token.clone(),
++ variants: self.variants.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemExternCrate {
++ fn clone(&self) -> Self {
++ ItemExternCrate {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ extern_token: self.extern_token.clone(),
++ crate_token: self.crate_token.clone(),
++ ident: self.ident.clone(),
++ rename: self.rename.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemFn {
++ fn clone(&self) -> Self {
++ ItemFn {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ sig: self.sig.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemForeignMod {
++ fn clone(&self) -> Self {
++ ItemForeignMod {
++ attrs: self.attrs.clone(),
++ abi: self.abi.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemImpl {
++ fn clone(&self) -> Self {
++ ItemImpl {
++ attrs: self.attrs.clone(),
++ defaultness: self.defaultness.clone(),
++ unsafety: self.unsafety.clone(),
++ impl_token: self.impl_token.clone(),
++ generics: self.generics.clone(),
++ trait_: self.trait_.clone(),
++ self_ty: self.self_ty.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMacro {
++ fn clone(&self) -> Self {
++ ItemMacro {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMacro2 {
++ fn clone(&self) -> Self {
++ ItemMacro2 {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ macro_token: self.macro_token.clone(),
++ ident: self.ident.clone(),
++ rules: self.rules.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMod {
++ fn clone(&self) -> Self {
++ ItemMod {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ mod_token: self.mod_token.clone(),
++ ident: self.ident.clone(),
++ content: self.content.clone(),
++ semi: self.semi.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemStatic {
++ fn clone(&self) -> Self {
++ ItemStatic {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ static_token: self.static_token.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemStruct {
++ fn clone(&self) -> Self {
++ ItemStruct {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ struct_token: self.struct_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ fields: self.fields.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemTrait {
++ fn clone(&self) -> Self {
++ ItemTrait {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ unsafety: self.unsafety.clone(),
++ auto_token: self.auto_token.clone(),
++ trait_token: self.trait_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ colon_token: self.colon_token.clone(),
++ supertraits: self.supertraits.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemTraitAlias {
++ fn clone(&self) -> Self {
++ ItemTraitAlias {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ trait_token: self.trait_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ bounds: self.bounds.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemType {
++ fn clone(&self) -> Self {
++ ItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemUnion {
++ fn clone(&self) -> Self {
++ ItemUnion {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ union_token: self.union_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ fields: self.fields.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemUse {
++ fn clone(&self) -> Self {
++ ItemUse {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ use_token: self.use_token.clone(),
++ leading_colon: self.leading_colon.clone(),
++ tree: self.tree.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Label {
++ fn clone(&self) -> Self {
++ Label {
++ name: self.name.clone(),
++ colon_token: self.colon_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for LifetimeDef {
++ fn clone(&self) -> Self {
++ LifetimeDef {
++ attrs: self.attrs.clone(),
++ lifetime: self.lifetime.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++impl Clone for Lit {
++ fn clone(&self) -> Self {
++ match self {
++ Lit::Str(v0) => Lit::Str(v0.clone()),
++ Lit::ByteStr(v0) => Lit::ByteStr(v0.clone()),
++ Lit::Byte(v0) => Lit::Byte(v0.clone()),
++ Lit::Char(v0) => Lit::Char(v0.clone()),
++ Lit::Int(v0) => Lit::Int(v0.clone()),
++ Lit::Float(v0) => Lit::Float(v0.clone()),
++ Lit::Bool(v0) => Lit::Bool(v0.clone()),
++ Lit::Verbatim(v0) => Lit::Verbatim(v0.clone()),
++ }
++ }
++}
++impl Clone for LitBool {
++ fn clone(&self) -> Self {
++ LitBool {
++ value: self.value.clone(),
++ span: self.span.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Local {
++ fn clone(&self) -> Self {
++ Local {
++ attrs: self.attrs.clone(),
++ let_token: self.let_token.clone(),
++ pat: self.pat.clone(),
++ init: self.init.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Macro {
++ fn clone(&self) -> Self {
++ Macro {
++ path: self.path.clone(),
++ bang_token: self.bang_token.clone(),
++ delimiter: self.delimiter.clone(),
++ tokens: self.tokens.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MacroDelimiter {
++ fn clone(&self) -> Self {
++ match self {
++ MacroDelimiter::Paren(v0) => MacroDelimiter::Paren(v0.clone()),
++ MacroDelimiter::Brace(v0) => MacroDelimiter::Brace(v0.clone()),
++ MacroDelimiter::Bracket(v0) => MacroDelimiter::Bracket(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Member {
++ fn clone(&self) -> Self {
++ match self {
++ Member::Named(v0) => Member::Named(v0.clone()),
++ Member::Unnamed(v0) => Member::Unnamed(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Meta {
++ fn clone(&self) -> Self {
++ match self {
++ Meta::Path(v0) => Meta::Path(v0.clone()),
++ Meta::List(v0) => Meta::List(v0.clone()),
++ Meta::NameValue(v0) => Meta::NameValue(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MetaList {
++ fn clone(&self) -> Self {
++ MetaList {
++ path: self.path.clone(),
++ paren_token: self.paren_token.clone(),
++ nested: self.nested.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MetaNameValue {
++ fn clone(&self) -> Self {
++ MetaNameValue {
++ path: self.path.clone(),
++ eq_token: self.eq_token.clone(),
++ lit: self.lit.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for MethodTurbofish {
++ fn clone(&self) -> Self {
++ MethodTurbofish {
++ colon2_token: self.colon2_token.clone(),
++ lt_token: self.lt_token.clone(),
++ args: self.args.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for NestedMeta {
++ fn clone(&self) -> Self {
++ match self {
++ NestedMeta::Meta(v0) => NestedMeta::Meta(v0.clone()),
++ NestedMeta::Lit(v0) => NestedMeta::Lit(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ParenthesizedGenericArguments {
++ fn clone(&self) -> Self {
++ ParenthesizedGenericArguments {
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Pat {
++ fn clone(&self) -> Self {
++ match self {
++ Pat::Box(v0) => Pat::Box(v0.clone()),
++ Pat::Ident(v0) => Pat::Ident(v0.clone()),
++ Pat::Lit(v0) => Pat::Lit(v0.clone()),
++ Pat::Macro(v0) => Pat::Macro(v0.clone()),
++ Pat::Or(v0) => Pat::Or(v0.clone()),
++ Pat::Path(v0) => Pat::Path(v0.clone()),
++ Pat::Range(v0) => Pat::Range(v0.clone()),
++ Pat::Reference(v0) => Pat::Reference(v0.clone()),
++ Pat::Rest(v0) => Pat::Rest(v0.clone()),
++ Pat::Slice(v0) => Pat::Slice(v0.clone()),
++ Pat::Struct(v0) => Pat::Struct(v0.clone()),
++ Pat::Tuple(v0) => Pat::Tuple(v0.clone()),
++ Pat::TupleStruct(v0) => Pat::TupleStruct(v0.clone()),
++ Pat::Type(v0) => Pat::Type(v0.clone()),
++ Pat::Verbatim(v0) => Pat::Verbatim(v0.clone()),
++ Pat::Wild(v0) => Pat::Wild(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatBox {
++ fn clone(&self) -> Self {
++ PatBox {
++ attrs: self.attrs.clone(),
++ box_token: self.box_token.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatIdent {
++ fn clone(&self) -> Self {
++ PatIdent {
++ attrs: self.attrs.clone(),
++ by_ref: self.by_ref.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ subpat: self.subpat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatLit {
++ fn clone(&self) -> Self {
++ PatLit {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatMacro {
++ fn clone(&self) -> Self {
++ PatMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatOr {
++ fn clone(&self) -> Self {
++ PatOr {
++ attrs: self.attrs.clone(),
++ leading_vert: self.leading_vert.clone(),
++ cases: self.cases.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatPath {
++ fn clone(&self) -> Self {
++ PatPath {
++ attrs: self.attrs.clone(),
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatRange {
++ fn clone(&self) -> Self {
++ PatRange {
++ attrs: self.attrs.clone(),
++ lo: self.lo.clone(),
++ limits: self.limits.clone(),
++ hi: self.hi.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatReference {
++ fn clone(&self) -> Self {
++ PatReference {
++ attrs: self.attrs.clone(),
++ and_token: self.and_token.clone(),
++ mutability: self.mutability.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatRest {
++ fn clone(&self) -> Self {
++ PatRest {
++ attrs: self.attrs.clone(),
++ dot2_token: self.dot2_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatSlice {
++ fn clone(&self) -> Self {
++ PatSlice {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatStruct {
++ fn clone(&self) -> Self {
++ PatStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ brace_token: self.brace_token.clone(),
++ fields: self.fields.clone(),
++ dot2_token: self.dot2_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatTuple {
++ fn clone(&self) -> Self {
++ PatTuple {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatTupleStruct {
++ fn clone(&self) -> Self {
++ PatTupleStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatType {
++ fn clone(&self) -> Self {
++ PatType {
++ attrs: self.attrs.clone(),
++ pat: self.pat.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatWild {
++ fn clone(&self) -> Self {
++ PatWild {
++ attrs: self.attrs.clone(),
++ underscore_token: self.underscore_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Path {
++ fn clone(&self) -> Self {
++ Path {
++ leading_colon: self.leading_colon.clone(),
++ segments: self.segments.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PathArguments {
++ fn clone(&self) -> Self {
++ match self {
++ PathArguments::None => PathArguments::None,
++ PathArguments::AngleBracketed(v0) => PathArguments::AngleBracketed(v0.clone()),
++ PathArguments::Parenthesized(v0) => PathArguments::Parenthesized(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PathSegment {
++ fn clone(&self) -> Self {
++ PathSegment {
++ ident: self.ident.clone(),
++ arguments: self.arguments.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateEq {
++ fn clone(&self) -> Self {
++ PredicateEq {
++ lhs_ty: self.lhs_ty.clone(),
++ eq_token: self.eq_token.clone(),
++ rhs_ty: self.rhs_ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateLifetime {
++ fn clone(&self) -> Self {
++ PredicateLifetime {
++ lifetime: self.lifetime.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateType {
++ fn clone(&self) -> Self {
++ PredicateType {
++ lifetimes: self.lifetimes.clone(),
++ bounded_ty: self.bounded_ty.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for QSelf {
++ fn clone(&self) -> Self {
++ QSelf {
++ lt_token: self.lt_token.clone(),
++ ty: self.ty.clone(),
++ position: self.position.clone(),
++ as_token: self.as_token.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Copy for RangeLimits {}
++#[cfg(feature = "full")]
++impl Clone for RangeLimits {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Receiver {
++ fn clone(&self) -> Self {
++ Receiver {
++ attrs: self.attrs.clone(),
++ reference: self.reference.clone(),
++ mutability: self.mutability.clone(),
++ self_token: self.self_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ReturnType {
++ fn clone(&self) -> Self {
++ match self {
++ ReturnType::Default => ReturnType::Default,
++ ReturnType::Type(v0, v1) => ReturnType::Type(v0.clone(), v1.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Signature {
++ fn clone(&self) -> Self {
++ Signature {
++ constness: self.constness.clone(),
++ asyncness: self.asyncness.clone(),
++ unsafety: self.unsafety.clone(),
++ abi: self.abi.clone(),
++ fn_token: self.fn_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ variadic: self.variadic.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Stmt {
++ fn clone(&self) -> Self {
++ match self {
++ Stmt::Local(v0) => Stmt::Local(v0.clone()),
++ Stmt::Item(v0) => Stmt::Item(v0.clone()),
++ Stmt::Expr(v0) => Stmt::Expr(v0.clone()),
++ Stmt::Semi(v0, v1) => Stmt::Semi(v0.clone(), v1.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TraitBound {
++ fn clone(&self) -> Self {
++ TraitBound {
++ paren_token: self.paren_token.clone(),
++ modifier: self.modifier.clone(),
++ lifetimes: self.lifetimes.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for TraitBoundModifier {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TraitBoundModifier {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItem {
++ fn clone(&self) -> Self {
++ match self {
++ TraitItem::Const(v0) => TraitItem::Const(v0.clone()),
++ TraitItem::Method(v0) => TraitItem::Method(v0.clone()),
++ TraitItem::Type(v0) => TraitItem::Type(v0.clone()),
++ TraitItem::Macro(v0) => TraitItem::Macro(v0.clone()),
++ TraitItem::Verbatim(v0) => TraitItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemConst {
++ fn clone(&self) -> Self {
++ TraitItemConst {
++ attrs: self.attrs.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemMacro {
++ fn clone(&self) -> Self {
++ TraitItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemMethod {
++ fn clone(&self) -> Self {
++ TraitItemMethod {
++ attrs: self.attrs.clone(),
++ sig: self.sig.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemType {
++ fn clone(&self) -> Self {
++ TraitItemType {
++ attrs: self.attrs.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Type {
++ fn clone(&self) -> Self {
++ match self {
++ Type::Array(v0) => Type::Array(v0.clone()),
++ Type::BareFn(v0) => Type::BareFn(v0.clone()),
++ Type::Group(v0) => Type::Group(v0.clone()),
++ Type::ImplTrait(v0) => Type::ImplTrait(v0.clone()),
++ Type::Infer(v0) => Type::Infer(v0.clone()),
++ Type::Macro(v0) => Type::Macro(v0.clone()),
++ Type::Never(v0) => Type::Never(v0.clone()),
++ Type::Paren(v0) => Type::Paren(v0.clone()),
++ Type::Path(v0) => Type::Path(v0.clone()),
++ Type::Ptr(v0) => Type::Ptr(v0.clone()),
++ Type::Reference(v0) => Type::Reference(v0.clone()),
++ Type::Slice(v0) => Type::Slice(v0.clone()),
++ Type::TraitObject(v0) => Type::TraitObject(v0.clone()),
++ Type::Tuple(v0) => Type::Tuple(v0.clone()),
++ Type::Verbatim(v0) => Type::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeArray {
++ fn clone(&self) -> Self {
++ TypeArray {
++ bracket_token: self.bracket_token.clone(),
++ elem: self.elem.clone(),
++ semi_token: self.semi_token.clone(),
++ len: self.len.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeBareFn {
++ fn clone(&self) -> Self {
++ TypeBareFn {
++ lifetimes: self.lifetimes.clone(),
++ unsafety: self.unsafety.clone(),
++ abi: self.abi.clone(),
++ fn_token: self.fn_token.clone(),
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ variadic: self.variadic.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeGroup {
++ fn clone(&self) -> Self {
++ TypeGroup {
++ group_token: self.group_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeImplTrait {
++ fn clone(&self) -> Self {
++ TypeImplTrait {
++ impl_token: self.impl_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeInfer {
++ fn clone(&self) -> Self {
++ TypeInfer {
++ underscore_token: self.underscore_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeMacro {
++ fn clone(&self) -> Self {
++ TypeMacro {
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeNever {
++ fn clone(&self) -> Self {
++ TypeNever {
++ bang_token: self.bang_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParam {
++ fn clone(&self) -> Self {
++ TypeParam {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ eq_token: self.eq_token.clone(),
++ default: self.default.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParamBound {
++ fn clone(&self) -> Self {
++ match self {
++ TypeParamBound::Trait(v0) => TypeParamBound::Trait(v0.clone()),
++ TypeParamBound::Lifetime(v0) => TypeParamBound::Lifetime(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParen {
++ fn clone(&self) -> Self {
++ TypeParen {
++ paren_token: self.paren_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypePath {
++ fn clone(&self) -> Self {
++ TypePath {
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypePtr {
++ fn clone(&self) -> Self {
++ TypePtr {
++ star_token: self.star_token.clone(),
++ const_token: self.const_token.clone(),
++ mutability: self.mutability.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeReference {
++ fn clone(&self) -> Self {
++ TypeReference {
++ and_token: self.and_token.clone(),
++ lifetime: self.lifetime.clone(),
++ mutability: self.mutability.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeSlice {
++ fn clone(&self) -> Self {
++ TypeSlice {
++ bracket_token: self.bracket_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeTraitObject {
++ fn clone(&self) -> Self {
++ TypeTraitObject {
++ dyn_token: self.dyn_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeTuple {
++ fn clone(&self) -> Self {
++ TypeTuple {
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for UnOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for UnOp {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseGlob {
++ fn clone(&self) -> Self {
++ UseGlob {
++ star_token: self.star_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseGroup {
++ fn clone(&self) -> Self {
++ UseGroup {
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseName {
++ fn clone(&self) -> Self {
++ UseName {
++ ident: self.ident.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UsePath {
++ fn clone(&self) -> Self {
++ UsePath {
++ ident: self.ident.clone(),
++ colon2_token: self.colon2_token.clone(),
++ tree: self.tree.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseRename {
++ fn clone(&self) -> Self {
++ UseRename {
++ ident: self.ident.clone(),
++ as_token: self.as_token.clone(),
++ rename: self.rename.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseTree {
++ fn clone(&self) -> Self {
++ match self {
++ UseTree::Path(v0) => UseTree::Path(v0.clone()),
++ UseTree::Name(v0) => UseTree::Name(v0.clone()),
++ UseTree::Rename(v0) => UseTree::Rename(v0.clone()),
++ UseTree::Glob(v0) => UseTree::Glob(v0.clone()),
++ UseTree::Group(v0) => UseTree::Group(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Variadic {
++ fn clone(&self) -> Self {
++ Variadic {
++ attrs: self.attrs.clone(),
++ dots: self.dots.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Variant {
++ fn clone(&self) -> Self {
++ Variant {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ fields: self.fields.clone(),
++ discriminant: self.discriminant.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisCrate {
++ fn clone(&self) -> Self {
++ VisCrate {
++ crate_token: self.crate_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisPublic {
++ fn clone(&self) -> Self {
++ VisPublic {
++ pub_token: self.pub_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisRestricted {
++ fn clone(&self) -> Self {
++ VisRestricted {
++ pub_token: self.pub_token.clone(),
++ paren_token: self.paren_token.clone(),
++ in_token: self.in_token.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Visibility {
++ fn clone(&self) -> Self {
++ match self {
++ Visibility::Public(v0) => Visibility::Public(v0.clone()),
++ Visibility::Crate(v0) => Visibility::Crate(v0.clone()),
++ Visibility::Restricted(v0) => Visibility::Restricted(v0.clone()),
++ Visibility::Inherited => Visibility::Inherited,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for WhereClause {
++ fn clone(&self) -> Self {
++ WhereClause {
++ where_token: self.where_token.clone(),
++ predicates: self.predicates.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for WherePredicate {
++ fn clone(&self) -> Self {
++ match self {
++ WherePredicate::Type(v0) => WherePredicate::Type(v0.clone()),
++ WherePredicate::Lifetime(v0) => WherePredicate::Lifetime(v0.clone()),
++ WherePredicate::Eq(v0) => WherePredicate::Eq(v0.clone()),
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/debug.rs b/third_party/rust/syn/src/gen/debug.rs
+new file mode 100644
+index 0000000000..72baab05f4
+--- /dev/null
++++ mozilla-release/third_party/rust/syn/src/gen/debug.rs
+@@ -0,0 +1,2857 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++use crate::*;
++use std::fmt::{self, Debug};
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Abi {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Abi");
++ formatter.field("extern_token", &self.extern_token);
++ formatter.field("name", &self.name);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for AngleBracketedGenericArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("AngleBracketedGenericArguments");
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("args", &self.args);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Arm {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Arm");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("pat", &self.pat);
++ formatter.field("guard", &self.guard);
++ formatter.field("fat_arrow_token", &self.fat_arrow_token);
++ formatter.field("body", &self.body);
++ formatter.field("comma", &self.comma);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for AttrStyle {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ AttrStyle::Outer => formatter.write_str("Outer"),
++ AttrStyle::Inner(v0) => {
++ let mut formatter = formatter.debug_tuple("Inner");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Attribute {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Attribute");
++ formatter.field("pound_token", &self.pound_token);
++ formatter.field("style", &self.style);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("path", &self.path);
++ formatter.field("tokens", &self.tokens);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BareFnArg {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("BareFnArg");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("name", &self.name);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BinOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ BinOp::Add(v0) => {
++ let mut formatter = formatter.debug_tuple("Add");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Sub(v0) => {
++ let mut formatter = formatter.debug_tuple("Sub");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Mul(v0) => {
++ let mut formatter = formatter.debug_tuple("Mul");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Div(v0) => {
++ let mut formatter = formatter.debug_tuple("Div");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Rem(v0) => {
++ let mut formatter = formatter.debug_tuple("Rem");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::And(v0) => {
++ let mut formatter = formatter.debug_tuple("And");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Or(v0) => {
++ let mut formatter = formatter.debug_tuple("Or");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitXor(v0) => {
++ let mut formatter = formatter.debug_tuple("BitXor");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitAnd(v0) => {
++ let mut formatter = formatter.debug_tuple("BitAnd");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitOr(v0) => {
++ let mut formatter = formatter.debug_tuple("BitOr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Shl(v0) => {
++ let mut formatter = formatter.debug_tuple("Shl");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Shr(v0) => {
++ let mut formatter = formatter.debug_tuple("Shr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Eq(v0) => {
++ let mut formatter = formatter.debug_tuple("Eq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Lt(v0) => {
++ let mut formatter = formatter.debug_tuple("Lt");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Le(v0) => {
++ let mut formatter = formatter.debug_tuple("Le");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Ne(v0) => {
++ let mut formatter = formatter.debug_tuple("Ne");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Ge(v0) => {
++ let mut formatter = formatter.debug_tuple("Ge");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Gt(v0) => {
++ let mut formatter = formatter.debug_tuple("Gt");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::AddEq(v0) => {
++ let mut formatter = formatter.debug_tuple("AddEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::SubEq(v0) => {
++ let mut formatter = formatter.debug_tuple("SubEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::MulEq(v0) => {
++ let mut formatter = formatter.debug_tuple("MulEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::DivEq(v0) => {
++ let mut formatter = formatter.debug_tuple("DivEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::RemEq(v0) => {
++ let mut formatter = formatter.debug_tuple("RemEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitXorEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitXorEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitAndEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitAndEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitOrEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitOrEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::ShlEq(v0) => {
++ let mut formatter = formatter.debug_tuple("ShlEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::ShrEq(v0) => {
++ let mut formatter = formatter.debug_tuple("ShrEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Binding {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Binding");
++ formatter.field("ident", &self.ident);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Block {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Block");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("stmts", &self.stmts);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BoundLifetimes {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("BoundLifetimes");
++ formatter.field("for_token", &self.for_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ConstParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ConstParam");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("default", &self.default);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Constraint {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Constraint");
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for Data {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Data::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Data::Enum(v0) => {
++ let mut formatter = formatter.debug_tuple("Enum");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Data::Union(v0) => {
++ let mut formatter = formatter.debug_tuple("Union");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataEnum {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataEnum");
++ formatter.field("enum_token", &self.enum_token);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("variants", &self.variants);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataStruct");
++ formatter.field("struct_token", &self.struct_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataUnion {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataUnion");
++ formatter.field("union_token", &self.union_token);
++ formatter.field("fields", &self.fields);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DeriveInput {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DeriveInput");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("data", &self.data);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Expr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => {
++ let mut formatter = formatter.debug_tuple("Array");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => {
++ let mut formatter = formatter.debug_tuple("Assign");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => {
++ let mut formatter = formatter.debug_tuple("AssignOp");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => {
++ let mut formatter = formatter.debug_tuple("Async");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => {
++ let mut formatter = formatter.debug_tuple("Await");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Binary(v0) => {
++ let mut formatter = formatter.debug_tuple("Binary");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => {
++ let mut formatter = formatter.debug_tuple("Block");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => {
++ let mut formatter = formatter.debug_tuple("Box");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => {
++ let mut formatter = formatter.debug_tuple("Break");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Call(v0) => {
++ let mut formatter = formatter.debug_tuple("Call");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Cast(v0) => {
++ let mut formatter = formatter.debug_tuple("Cast");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => {
++ let mut formatter = formatter.debug_tuple("Closure");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => {
++ let mut formatter = formatter.debug_tuple("Continue");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Field(v0) => {
++ let mut formatter = formatter.debug_tuple("Field");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => {
++ let mut formatter = formatter.debug_tuple("ForLoop");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::If(v0) => {
++ let mut formatter = formatter.debug_tuple("If");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Index(v0) => {
++ let mut formatter = formatter.debug_tuple("Index");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => {
++ let mut formatter = formatter.debug_tuple("Let");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => {
++ let mut formatter = formatter.debug_tuple("Loop");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => {
++ let mut formatter = formatter.debug_tuple("Match");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => {
++ let mut formatter = formatter.debug_tuple("MethodCall");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => {
++ let mut formatter = formatter.debug_tuple("Range");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => {
++ let mut formatter = formatter.debug_tuple("Repeat");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => {
++ let mut formatter = formatter.debug_tuple("Return");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => {
++ let mut formatter = formatter.debug_tuple("Try");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => {
++ let mut formatter = formatter.debug_tuple("TryBlock");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Unary(v0) => {
++ let mut formatter = formatter.debug_tuple("Unary");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => {
++ let mut formatter = formatter.debug_tuple("Unsafe");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::While(v0) => {
++ let mut formatter = formatter.debug_tuple("While");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => {
++ let mut formatter = formatter.debug_tuple("Yield");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprArray {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprArray");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAssign {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAssign");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAssignOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAssignOp");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("op", &self.op);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAsync {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAsync");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("async_token", &self.async_token);
++ formatter.field("capture", &self.capture);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAwait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAwait");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("base", &self.base);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("await_token", &self.await_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprBinary {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBinary");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("op", &self.op);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBlock {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBlock");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBox {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBox");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("box_token", &self.box_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBreak {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBreak");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("break_token", &self.break_token);
++ formatter.field("label", &self.label);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprCall {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprCall");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("func", &self.func);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("args", &self.args);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprCast {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprCast");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprClosure {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprClosure");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("asyncness", &self.asyncness);
++ formatter.field("movability", &self.movability);
++ formatter.field("capture", &self.capture);
++ formatter.field("or1_token", &self.or1_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("or2_token", &self.or2_token);
++ formatter.field("output", &self.output);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprContinue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprContinue");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("continue_token", &self.continue_token);
++ formatter.field("label", &self.label);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprField {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprField");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("base", &self.base);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("member", &self.member);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprForLoop {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprForLoop");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("for_token", &self.for_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("in_token", &self.in_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprGroup");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("group_token", &self.group_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprIf {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprIf");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("if_token", &self.if_token);
++ formatter.field("cond", &self.cond);
++ formatter.field("then_branch", &self.then_branch);
++ formatter.field("else_branch", &self.else_branch);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprIndex {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprIndex");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("index", &self.index);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprLet {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLet");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("let_token", &self.let_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprLit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLit");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lit", &self.lit);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprLoop {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLoop");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("loop_token", &self.loop_token);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMatch {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMatch");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("match_token", &self.match_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("arms", &self.arms);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMethodCall {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMethodCall");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("receiver", &self.receiver);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("method", &self.method);
++ formatter.field("turbofish", &self.turbofish);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("args", &self.args);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprParen {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprParen");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprPath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprPath");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprRange {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprRange");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("from", &self.from);
++ formatter.field("limits", &self.limits);
++ formatter.field("to", &self.to);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprReference");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("and_token", &self.and_token);
++ formatter.field("raw", &self.raw);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprRepeat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprRepeat");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.field("len", &self.len);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprReturn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprReturn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("return_token", &self.return_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.field("rest", &self.rest);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTry {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTry");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("question_token", &self.question_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTryBlock {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTryBlock");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("try_token", &self.try_token);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTuple");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprUnary {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprUnary");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("op", &self.op);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprUnsafe {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprUnsafe");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("unsafe_token", &self.unsafe_token);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprWhile {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprWhile");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("while_token", &self.while_token);
++ formatter.field("cond", &self.cond);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprYield {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprYield");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("yield_token", &self.yield_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Field {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Field");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FieldPat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldPat");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("member", &self.member);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FieldValue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldValue");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("member", &self.member);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Fields {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Fields::Named(v0) => {
++ let mut formatter = formatter.debug_tuple("Named");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Fields::Unnamed(v0) => {
++ let mut formatter = formatter.debug_tuple("Unnamed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Fields::Unit => formatter.write_str("Unit"),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for FieldsNamed {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldsNamed");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("named", &self.named);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for FieldsUnnamed {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldsUnnamed");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("unnamed", &self.unnamed);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for File {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("File");
++ formatter.field("shebang", &self.shebang);
++ formatter.field("attrs", &self.attrs);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FnArg {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ FnArg::Receiver(v0) => {
++ let mut formatter = formatter.debug_tuple("Receiver");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ FnArg::Typed(v0) => {
++ let mut formatter = formatter.debug_tuple("Typed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ForeignItem::Fn(v0) => {
++ let mut formatter = formatter.debug_tuple("Fn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Static(v0) => {
++ let mut formatter = formatter.debug_tuple("Static");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemFn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("sig", &self.sig);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemStatic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemStatic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("static_token", &self.static_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for GenericArgument {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericArgument::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Binding(v0) => {
++ let mut formatter = formatter.debug_tuple("Binding");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Constraint(v0) => {
++ let mut formatter = formatter.debug_tuple("Constraint");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for GenericMethodArgument {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericMethodArgument::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericMethodArgument::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for GenericParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericParam::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericParam::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericParam::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Generics {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Generics");
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("params", &self.params);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.field("where_clause", &self.where_clause);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ImplItem::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Method(v0) => {
++ let mut formatter = formatter.debug_tuple("Method");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemMethod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemMethod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("sig", &self.sig);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Index {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Index");
++ formatter.field("index", &self.index);
++ formatter.field("span", &self.span);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Item {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Item::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Enum(v0) => {
++ let mut formatter = formatter.debug_tuple("Enum");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::ExternCrate(v0) => {
++ let mut formatter = formatter.debug_tuple("ExternCrate");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Fn(v0) => {
++ let mut formatter = formatter.debug_tuple("Fn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::ForeignMod(v0) => {
++ let mut formatter = formatter.debug_tuple("ForeignMod");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Impl(v0) => {
++ let mut formatter = formatter.debug_tuple("Impl");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Macro2(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro2");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Mod(v0) => {
++ let mut formatter = formatter.debug_tuple("Mod");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Static(v0) => {
++ let mut formatter = formatter.debug_tuple("Static");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Trait(v0) => {
++ let mut formatter = formatter.debug_tuple("Trait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::TraitAlias(v0) => {
++ let mut formatter = formatter.debug_tuple("TraitAlias");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Union(v0) => {
++ let mut formatter = formatter.debug_tuple("Union");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Use(v0) => {
++ let mut formatter = formatter.debug_tuple("Use");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemEnum {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemEnum");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("enum_token", &self.enum_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("variants", &self.variants);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemExternCrate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemExternCrate");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("extern_token", &self.extern_token);
++ formatter.field("crate_token", &self.crate_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("rename", &self.rename);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemFn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("sig", &self.sig);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemForeignMod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemForeignMod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("abi", &self.abi);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemImpl {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemImpl");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("impl_token", &self.impl_token);
++ formatter.field("generics", &self.generics);
++ formatter.field("trait_", &self.trait_);
++ formatter.field("self_ty", &self.self_ty);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMacro2 {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMacro2");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("macro_token", &self.macro_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("rules", &self.rules);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("mod_token", &self.mod_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("content", &self.content);
++ formatter.field("semi", &self.semi);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemStatic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemStatic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("static_token", &self.static_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("struct_token", &self.struct_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("fields", &self.fields);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemTrait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemTrait");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("auto_token", &self.auto_token);
++ formatter.field("trait_token", &self.trait_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("supertraits", &self.supertraits);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemTraitAlias {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemTraitAlias");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("trait_token", &self.trait_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemUnion {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemUnion");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("union_token", &self.union_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("fields", &self.fields);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemUse {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemUse");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("use_token", &self.use_token);
++ formatter.field("leading_colon", &self.leading_colon);
++ formatter.field("tree", &self.tree);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Label {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Label");
++ formatter.field("name", &self.name);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.finish()
++ }
++}
++impl Debug for Lifetime {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Lifetime");
++ formatter.field("apostrophe", &self.apostrophe);
++ formatter.field("ident", &self.ident);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for LifetimeDef {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("LifetimeDef");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++impl Debug for Lit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Lit::Str(v0) => {
++ let mut formatter = formatter.debug_tuple("Str");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::ByteStr(v0) => {
++ let mut formatter = formatter.debug_tuple("ByteStr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Byte(v0) => {
++ let mut formatter = formatter.debug_tuple("Byte");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Char(v0) => {
++ let mut formatter = formatter.debug_tuple("Char");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Int(v0) => {
++ let mut formatter = formatter.debug_tuple("Int");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Float(v0) => {
++ let mut formatter = formatter.debug_tuple("Float");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Bool(v0) => {
++ let mut formatter = formatter.debug_tuple("Bool");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Local {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Local");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("let_token", &self.let_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("init", &self.init);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Macro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Macro");
++ formatter.field("path", &self.path);
++ formatter.field("bang_token", &self.bang_token);
++ formatter.field("delimiter", &self.delimiter);
++ formatter.field("tokens", &self.tokens);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MacroDelimiter {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ MacroDelimiter::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ MacroDelimiter::Brace(v0) => {
++ let mut formatter = formatter.debug_tuple("Brace");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ MacroDelimiter::Bracket(v0) => {
++ let mut formatter = formatter.debug_tuple("Bracket");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Member {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Member::Named(v0) => {
++ let mut formatter = formatter.debug_tuple("Named");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Member::Unnamed(v0) => {
++ let mut formatter = formatter.debug_tuple("Unnamed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Meta {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Meta::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Meta::List(v0) => {
++ let mut formatter = formatter.debug_tuple("List");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Meta::NameValue(v0) => {
++ let mut formatter = formatter.debug_tuple("NameValue");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MetaList {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MetaList");
++ formatter.field("path", &self.path);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("nested", &self.nested);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MetaNameValue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MetaNameValue");
++ formatter.field("path", &self.path);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("lit", &self.lit);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for MethodTurbofish {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MethodTurbofish");
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("args", &self.args);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for NestedMeta {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ NestedMeta::Meta(v0) => {
++ let mut formatter = formatter.debug_tuple("Meta");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ NestedMeta::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ParenthesizedGenericArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ParenthesizedGenericArguments");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Pat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Pat::Box(v0) => {
++ let mut formatter = formatter.debug_tuple("Box");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Ident(v0) => {
++ let mut formatter = formatter.debug_tuple("Ident");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Or(v0) => {
++ let mut formatter = formatter.debug_tuple("Or");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Range(v0) => {
++ let mut formatter = formatter.debug_tuple("Range");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Rest(v0) => {
++ let mut formatter = formatter.debug_tuple("Rest");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Slice(v0) => {
++ let mut formatter = formatter.debug_tuple("Slice");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::TupleStruct(v0) => {
++ let mut formatter = formatter.debug_tuple("TupleStruct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Wild(v0) => {
++ let mut formatter = formatter.debug_tuple("Wild");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatBox {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatBox");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("box_token", &self.box_token);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatIdent {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatIdent");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("by_ref", &self.by_ref);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("subpat", &self.subpat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatLit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatLit");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatOr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatOr");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("leading_vert", &self.leading_vert);
++ formatter.field("cases", &self.cases);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatPath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatPath");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatRange {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatRange");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lo", &self.lo);
++ formatter.field("limits", &self.limits);
++ formatter.field("hi", &self.hi);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatReference");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("and_token", &self.and_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatRest {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatRest");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatSlice {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatSlice");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatTuple");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatTupleStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatTupleStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("pat", &self.pat);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatWild {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatWild");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("underscore_token", &self.underscore_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Path {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Path");
++ formatter.field("leading_colon", &self.leading_colon);
++ formatter.field("segments", &self.segments);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PathArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ PathArguments::None => formatter.write_str("None"),
++ PathArguments::AngleBracketed(v0) => {
++ let mut formatter = formatter.debug_tuple("AngleBracketed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ PathArguments::Parenthesized(v0) => {
++ let mut formatter = formatter.debug_tuple("Parenthesized");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PathSegment {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PathSegment");
++ formatter.field("ident", &self.ident);
++ formatter.field("arguments", &self.arguments);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateEq {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateEq");
++ formatter.field("lhs_ty", &self.lhs_ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("rhs_ty", &self.rhs_ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateLifetime {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateLifetime");
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateType");
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("bounded_ty", &self.bounded_ty);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for QSelf {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("QSelf");
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("position", &self.position);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for RangeLimits {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ RangeLimits::HalfOpen(v0) => {
++ let mut formatter = formatter.debug_tuple("HalfOpen");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ RangeLimits::Closed(v0) => {
++ let mut formatter = formatter.debug_tuple("Closed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Receiver {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Receiver");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("reference", &self.reference);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("self_token", &self.self_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ReturnType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ReturnType::Default => formatter.write_str("Default"),
++ ReturnType::Type(v0, v1) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.field(v1);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Signature {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Signature");
++ formatter.field("constness", &self.constness);
++ formatter.field("asyncness", &self.asyncness);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("abi", &self.abi);
++ formatter.field("fn_token", &self.fn_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("variadic", &self.variadic);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Stmt {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Stmt::Local(v0) => {
++ let mut formatter = formatter.debug_tuple("Local");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Item(v0) => {
++ let mut formatter = formatter.debug_tuple("Item");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Expr(v0) => {
++ let mut formatter = formatter.debug_tuple("Expr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Semi(v0, v1) => {
++ let mut formatter = formatter.debug_tuple("Semi");
++ formatter.field(v0);
++ formatter.field(v1);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TraitBound {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitBound");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("modifier", &self.modifier);
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TraitBoundModifier {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TraitBoundModifier::None => formatter.write_str("None"),
++ TraitBoundModifier::Maybe(v0) => {
++ let mut formatter = formatter.debug_tuple("Maybe");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TraitItem::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Method(v0) => {
++ let mut formatter = formatter.debug_tuple("Method");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemMethod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemMethod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("sig", &self.sig);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Type {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Type::Array(v0) => {
++ let mut formatter = formatter.debug_tuple("Array");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::BareFn(v0) => {
++ let mut formatter = formatter.debug_tuple("BareFn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::ImplTrait(v0) => {
++ let mut formatter = formatter.debug_tuple("ImplTrait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Infer(v0) => {
++ let mut formatter = formatter.debug_tuple("Infer");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Never(v0) => {
++ let mut formatter = formatter.debug_tuple("Never");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Ptr(v0) => {
++ let mut formatter = formatter.debug_tuple("Ptr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Slice(v0) => {
++ let mut formatter = formatter.debug_tuple("Slice");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::TraitObject(v0) => {
++ let mut formatter = formatter.debug_tuple("TraitObject");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeArray {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeArray");
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elem", &self.elem);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.field("len", &self.len);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeBareFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeBareFn");
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("abi", &self.abi);
++ formatter.field("fn_token", &self.fn_token);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("variadic", &self.variadic);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeGroup");
++ formatter.field("group_token", &self.group_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeImplTrait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeImplTrait");
++ formatter.field("impl_token", &self.impl_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeInfer {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeInfer");
++ formatter.field("underscore_token", &self.underscore_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeMacro");
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeNever {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeNever");
++ formatter.field("bang_token", &self.bang_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeParam");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("default", &self.default);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParamBound {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TypeParamBound::Trait(v0) => {
++ let mut formatter = formatter.debug_tuple("Trait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TypeParamBound::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParen {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeParen");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypePath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypePath");
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypePtr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypePtr");
++ formatter.field("star_token", &self.star_token);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeReference");
++ formatter.field("and_token", &self.and_token);
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeSlice {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeSlice");
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeTraitObject {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeTraitObject");
++ formatter.field("dyn_token", &self.dyn_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeTuple");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for UnOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ UnOp::Deref(v0) => {
++ let mut formatter = formatter.debug_tuple("Deref");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UnOp::Not(v0) => {
++ let mut formatter = formatter.debug_tuple("Not");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UnOp::Neg(v0) => {
++ let mut formatter = formatter.debug_tuple("Neg");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseGlob {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseGlob");
++ formatter.field("star_token", &self.star_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseGroup");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseName {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseName");
++ formatter.field("ident", &self.ident);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UsePath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UsePath");
++ formatter.field("ident", &self.ident);
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("tree", &self.tree);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseRename {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseRename");
++ formatter.field("ident", &self.ident);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("rename", &self.rename);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseTree {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ UseTree::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Name(v0) => {
++ let mut formatter = formatter.debug_tuple("Name");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Rename(v0) => {
++ let mut formatter = formatter.debug_tuple("Rename");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Glob(v0) => {
++ let mut formatter = formatter.debug_tuple("Glob");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Variadic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Variadic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("dots", &self.dots);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Variant {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Variant");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("fields", &self.fields);
++ formatter.field("discriminant", &self.discriminant);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisCrate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisCrate");
++ formatter.field("crate_token", &self.crate_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisPublic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisPublic");
++ formatter.field("pub_token", &self.pub_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisRestricted {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisRestricted");
++ formatter.field("pub_token", &self.pub_token);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("in_token", &self.in_token);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Visibility {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Visibility::Public(v0) => {
++ let mut formatter = formatter.debug_tuple("Public");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Crate(v0) => {
++ let mut formatter = formatter.debug_tuple("Crate");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Restricted(v0) => {
++ let mut formatter = formatter.debug_tuple("Restricted");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Inherited => formatter.write_str("Inherited"),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for WhereClause {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("WhereClause");
++ formatter.field("where_token", &self.where_token);
++ formatter.field("predicates", &self.predicates);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for WherePredicate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ WherePredicate::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ WherePredicate::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ WherePredicate::Eq(v0) => {
++ let mut formatter = formatter.debug_tuple("Eq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/eq.rs b/third_party/rust/syn/src/gen/eq.rs
+new file mode 100644
+index 0000000000..15b2bcbbde
+--- /dev/null
++++ mozilla-release/third_party/rust/syn/src/gen/eq.rs
+@@ -0,0 +1,1930 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#[cfg(any(feature = "derive", feature = "full"))]
++use crate::tt::TokenStreamHelper;
++use crate::*;
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Abi {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Abi {
++ fn eq(&self, other: &Self) -> bool {
++ self.name == other.name
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for AngleBracketedGenericArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for AngleBracketedGenericArguments {
++ fn eq(&self, other: &Self) -> bool {
++ self.colon2_token == other.colon2_token && self.args == other.args
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Arm {}
++#[cfg(feature = "full")]
++impl PartialEq for Arm {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.pat == other.pat
++ && self.guard == other.guard
++ && self.body == other.body
++ && self.comma == other.comma
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for AttrStyle {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for AttrStyle {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (AttrStyle::Outer, AttrStyle::Outer) => true,
++ (AttrStyle::Inner(_), AttrStyle::Inner(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Attribute {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Attribute {
++ fn eq(&self, other: &Self) -> bool {
++ self.style == other.style
++ && self.path == other.path
++ && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BareFnArg {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BareFnArg {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.name == other.name && self.ty == other.ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BinOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BinOp {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (BinOp::Add(_), BinOp::Add(_)) => true,
++ (BinOp::Sub(_), BinOp::Sub(_)) => true,
++ (BinOp::Mul(_), BinOp::Mul(_)) => true,
++ (BinOp::Div(_), BinOp::Div(_)) => true,
++ (BinOp::Rem(_), BinOp::Rem(_)) => true,
++ (BinOp::And(_), BinOp::And(_)) => true,
++ (BinOp::Or(_), BinOp::Or(_)) => true,
++ (BinOp::BitXor(_), BinOp::BitXor(_)) => true,
++ (BinOp::BitAnd(_), BinOp::BitAnd(_)) => true,
++ (BinOp::BitOr(_), BinOp::BitOr(_)) => true,
++ (BinOp::Shl(_), BinOp::Shl(_)) => true,
++ (BinOp::Shr(_), BinOp::Shr(_)) => true,
++ (BinOp::Eq(_), BinOp::Eq(_)) => true,
++ (BinOp::Lt(_), BinOp::Lt(_)) => true,
++ (BinOp::Le(_), BinOp::Le(_)) => true,
++ (BinOp::Ne(_), BinOp::Ne(_)) => true,
++ (BinOp::Ge(_), BinOp::Ge(_)) => true,
++ (BinOp::Gt(_), BinOp::Gt(_)) => true,
++ (BinOp::AddEq(_), BinOp::AddEq(_)) => true,
++ (BinOp::SubEq(_), BinOp::SubEq(_)) => true,
++ (BinOp::MulEq(_), BinOp::MulEq(_)) => true,
++ (BinOp::DivEq(_), BinOp::DivEq(_)) => true,
++ (BinOp::RemEq(_), BinOp::RemEq(_)) => true,
++ (BinOp::BitXorEq(_), BinOp::BitXorEq(_)) => true,
++ (BinOp::BitAndEq(_), BinOp::BitAndEq(_)) => true,
++ (BinOp::BitOrEq(_), BinOp::BitOrEq(_)) => true,
++ (BinOp::ShlEq(_), BinOp::ShlEq(_)) => true,
++ (BinOp::ShrEq(_), BinOp::ShrEq(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Binding {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Binding {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Block {}
++#[cfg(feature = "full")]
++impl PartialEq for Block {
++ fn eq(&self, other: &Self) -> bool {
++ self.stmts == other.stmts
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BoundLifetimes {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BoundLifetimes {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ConstParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ConstParam {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.eq_token == other.eq_token
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Constraint {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Constraint {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.bounds == other.bounds
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for Data {}
++#[cfg(feature = "derive")]
++impl PartialEq for Data {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Data::Struct(self0), Data::Struct(other0)) => self0 == other0,
++ (Data::Enum(self0), Data::Enum(other0)) => self0 == other0,
++ (Data::Union(self0), Data::Union(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataEnum {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataEnum {
++ fn eq(&self, other: &Self) -> bool {
++ self.variants == other.variants
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataStruct {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.fields == other.fields && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataUnion {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataUnion {
++ fn eq(&self, other: &Self) -> bool {
++ self.fields == other.fields
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DeriveInput {}
++#[cfg(feature = "derive")]
++impl PartialEq for DeriveInput {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.data == other.data
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Expr {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Expr {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ #[cfg(feature = "full")]
++ (Expr::Array(self0), Expr::Array(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Assign(self0), Expr::Assign(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::AssignOp(self0), Expr::AssignOp(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Async(self0), Expr::Async(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Await(self0), Expr::Await(other0)) => self0 == other0,
++ (Expr::Binary(self0), Expr::Binary(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Block(self0), Expr::Block(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Box(self0), Expr::Box(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Break(self0), Expr::Break(other0)) => self0 == other0,
++ (Expr::Call(self0), Expr::Call(other0)) => self0 == other0,
++ (Expr::Cast(self0), Expr::Cast(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Closure(self0), Expr::Closure(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Continue(self0), Expr::Continue(other0)) => self0 == other0,
++ (Expr::Field(self0), Expr::Field(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::ForLoop(self0), Expr::ForLoop(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Group(self0), Expr::Group(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::If(self0), Expr::If(other0)) => self0 == other0,
++ (Expr::Index(self0), Expr::Index(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Let(self0), Expr::Let(other0)) => self0 == other0,
++ (Expr::Lit(self0), Expr::Lit(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Loop(self0), Expr::Loop(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Macro(self0), Expr::Macro(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Match(self0), Expr::Match(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::MethodCall(self0), Expr::MethodCall(other0)) => self0 == other0,
++ (Expr::Paren(self0), Expr::Paren(other0)) => self0 == other0,
++ (Expr::Path(self0), Expr::Path(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Range(self0), Expr::Range(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Reference(self0), Expr::Reference(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Repeat(self0), Expr::Repeat(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Return(self0), Expr::Return(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Struct(self0), Expr::Struct(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Try(self0), Expr::Try(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::TryBlock(self0), Expr::TryBlock(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Tuple(self0), Expr::Tuple(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Type(self0), Expr::Type(other0)) => self0 == other0,
++ (Expr::Unary(self0), Expr::Unary(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Unsafe(self0), Expr::Unsafe(other0)) => self0 == other0,
++ (Expr::Verbatim(self0), Expr::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ #[cfg(feature = "full")]
++ (Expr::While(self0), Expr::While(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Yield(self0), Expr::Yield(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprArray {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprArray {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAssign {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAssign {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.left == other.left && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAssignOp {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAssignOp {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.left == other.left
++ && self.op == other.op
++ && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAsync {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAsync {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.capture == other.capture && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAwait {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAwait {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.base == other.base
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprBinary {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprBinary {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.left == other.left
++ && self.op == other.op
++ && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBlock {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBlock {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBox {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBox {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBreak {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBreak {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprCall {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprCall {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.func == other.func && self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprCast {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprCast {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprClosure {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprClosure {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.asyncness == other.asyncness
++ && self.movability == other.movability
++ && self.capture == other.capture
++ && self.inputs == other.inputs
++ && self.output == other.output
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprContinue {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprContinue {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprField {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprField {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.base == other.base && self.member == other.member
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprForLoop {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprForLoop {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.label == other.label
++ && self.pat == other.pat
++ && self.expr == other.expr
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprGroup {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprIf {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprIf {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.cond == other.cond
++ && self.then_branch == other.then_branch
++ && self.else_branch == other.else_branch
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprIndex {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprIndex {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.index == other.index
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprLet {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprLet {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprLit {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprLit {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.lit == other.lit
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprLoop {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprLoop {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMatch {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMatch {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.arms == other.arms
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMethodCall {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMethodCall {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.receiver == other.receiver
++ && self.method == other.method
++ && self.turbofish == other.turbofish
++ && self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprParen {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprParen {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprPath {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprPath {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprRange {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprRange {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.from == other.from
++ && self.limits == other.limits
++ && self.to == other.to
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprReference {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mutability == other.mutability && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprRepeat {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprRepeat {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.len == other.len
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprReturn {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprReturn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.path == other.path
++ && self.fields == other.fields
++ && self.dot2_token == other.dot2_token
++ && self.rest == other.rest
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTry {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTry {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTryBlock {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTryBlock {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTuple {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprType {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprUnary {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprUnary {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.op == other.op && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprUnsafe {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprUnsafe {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprWhile {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprWhile {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.label == other.label
++ && self.cond == other.cond
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprYield {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprYield {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Field {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Field {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.colon_token == other.colon_token
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FieldPat {}
++#[cfg(feature = "full")]
++impl PartialEq for FieldPat {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.member == other.member
++ && self.colon_token == other.colon_token
++ && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FieldValue {}
++#[cfg(feature = "full")]
++impl PartialEq for FieldValue {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.member == other.member
++ && self.colon_token == other.colon_token
++ && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Fields {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Fields {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Fields::Named(self0), Fields::Named(other0)) => self0 == other0,
++ (Fields::Unnamed(self0), Fields::Unnamed(other0)) => self0 == other0,
++ (Fields::Unit, Fields::Unit) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for FieldsNamed {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for FieldsNamed {
++ fn eq(&self, other: &Self) -> bool {
++ self.named == other.named
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for FieldsUnnamed {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for FieldsUnnamed {
++ fn eq(&self, other: &Self) -> bool {
++ self.unnamed == other.unnamed
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for File {}
++#[cfg(feature = "full")]
++impl PartialEq for File {
++ fn eq(&self, other: &Self) -> bool {
++ self.shebang == other.shebang && self.attrs == other.attrs && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FnArg {}
++#[cfg(feature = "full")]
++impl PartialEq for FnArg {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (FnArg::Receiver(self0), FnArg::Receiver(other0)) => self0 == other0,
++ (FnArg::Typed(self0), FnArg::Typed(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItem {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ForeignItem::Fn(self0), ForeignItem::Fn(other0)) => self0 == other0,
++ (ForeignItem::Static(self0), ForeignItem::Static(other0)) => self0 == other0,
++ (ForeignItem::Type(self0), ForeignItem::Type(other0)) => self0 == other0,
++ (ForeignItem::Macro(self0), ForeignItem::Macro(other0)) => self0 == other0,
++ (ForeignItem::Verbatim(self0), ForeignItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemFn {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.vis == other.vis && self.sig == other.sig
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemStatic {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemStatic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for GenericArgument {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for GenericArgument {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericArgument::Lifetime(self0), GenericArgument::Lifetime(other0)) => {
++ self0 == other0
++ }
++ (GenericArgument::Type(self0), GenericArgument::Type(other0)) => self0 == other0,
++ (GenericArgument::Binding(self0), GenericArgument::Binding(other0)) => self0 == other0,
++ (GenericArgument::Constraint(self0), GenericArgument::Constraint(other0)) => {
++ self0 == other0
++ }
++ (GenericArgument::Const(self0), GenericArgument::Const(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for GenericMethodArgument {}
++#[cfg(feature = "full")]
++impl PartialEq for GenericMethodArgument {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericMethodArgument::Type(self0), GenericMethodArgument::Type(other0)) => {
++ self0 == other0
++ }
++ (GenericMethodArgument::Const(self0), GenericMethodArgument::Const(other0)) => {
++ self0 == other0
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for GenericParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for GenericParam {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericParam::Type(self0), GenericParam::Type(other0)) => self0 == other0,
++ (GenericParam::Lifetime(self0), GenericParam::Lifetime(other0)) => self0 == other0,
++ (GenericParam::Const(self0), GenericParam::Const(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Generics {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Generics {
++ fn eq(&self, other: &Self) -> bool {
++ self.lt_token == other.lt_token
++ && self.params == other.params
++ && self.gt_token == other.gt_token
++ && self.where_clause == other.where_clause
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItem {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ImplItem::Const(self0), ImplItem::Const(other0)) => self0 == other0,
++ (ImplItem::Method(self0), ImplItem::Method(other0)) => self0 == other0,
++ (ImplItem::Type(self0), ImplItem::Type(other0)) => self0 == other0,
++ (ImplItem::Macro(self0), ImplItem::Macro(other0)) => self0 == other0,
++ (ImplItem::Verbatim(self0), ImplItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemMethod {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemMethod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.sig == other.sig
++ && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Item {}
++#[cfg(feature = "full")]
++impl PartialEq for Item {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Item::Const(self0), Item::Const(other0)) => self0 == other0,
++ (Item::Enum(self0), Item::Enum(other0)) => self0 == other0,
++ (Item::ExternCrate(self0), Item::ExternCrate(other0)) => self0 == other0,
++ (Item::Fn(self0), Item::Fn(other0)) => self0 == other0,
++ (Item::ForeignMod(self0), Item::ForeignMod(other0)) => self0 == other0,
++ (Item::Impl(self0), Item::Impl(other0)) => self0 == other0,
++ (Item::Macro(self0), Item::Macro(other0)) => self0 == other0,
++ (Item::Macro2(self0), Item::Macro2(other0)) => self0 == other0,
++ (Item::Mod(self0), Item::Mod(other0)) => self0 == other0,
++ (Item::Static(self0), Item::Static(other0)) => self0 == other0,
++ (Item::Struct(self0), Item::Struct(other0)) => self0 == other0,
++ (Item::Trait(self0), Item::Trait(other0)) => self0 == other0,
++ (Item::TraitAlias(self0), Item::TraitAlias(other0)) => self0 == other0,
++ (Item::Type(self0), Item::Type(other0)) => self0 == other0,
++ (Item::Union(self0), Item::Union(other0)) => self0 == other0,
++ (Item::Use(self0), Item::Use(other0)) => self0 == other0,
++ (Item::Verbatim(self0), Item::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemEnum {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemEnum {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.variants == other.variants
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemExternCrate {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemExternCrate {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.rename == other.rename
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemFn {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.sig == other.sig
++ && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemForeignMod {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemForeignMod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.abi == other.abi && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemImpl {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemImpl {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.defaultness == other.defaultness
++ && self.unsafety == other.unsafety
++ && self.generics == other.generics
++ && self.trait_ == other.trait_
++ && self.self_ty == other.self_ty
++ && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.mac == other.mac
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMacro2 {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMacro2 {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules)
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMod {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.content == other.content
++ && self.semi == other.semi
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemStatic {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemStatic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.fields == other.fields
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemTrait {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemTrait {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.unsafety == other.unsafety
++ && self.auto_token == other.auto_token
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.colon_token == other.colon_token
++ && self.supertraits == other.supertraits
++ && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemTraitAlias {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemTraitAlias {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.bounds == other.bounds
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemUnion {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemUnion {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.fields == other.fields
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemUse {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemUse {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.leading_colon == other.leading_colon
++ && self.tree == other.tree
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Label {}
++#[cfg(feature = "full")]
++impl PartialEq for Label {
++ fn eq(&self, other: &Self) -> bool {
++ self.name == other.name
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for LifetimeDef {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for LifetimeDef {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.lifetime == other.lifetime
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ }
++}
++impl Eq for Lit {}
++impl PartialEq for Lit {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Lit::Str(self0), Lit::Str(other0)) => self0 == other0,
++ (Lit::ByteStr(self0), Lit::ByteStr(other0)) => self0 == other0,
++ (Lit::Byte(self0), Lit::Byte(other0)) => self0 == other0,
++ (Lit::Char(self0), Lit::Char(other0)) => self0 == other0,
++ (Lit::Int(self0), Lit::Int(other0)) => self0 == other0,
++ (Lit::Float(self0), Lit::Float(other0)) => self0 == other0,
++ (Lit::Bool(self0), Lit::Bool(other0)) => self0 == other0,
++ (Lit::Verbatim(self0), Lit::Verbatim(other0)) => {
++ self0.to_string() == other0.to_string()
++ }
++ _ => false,
++ }
++ }
++}
++impl Eq for LitBool {}
++impl PartialEq for LitBool {
++ fn eq(&self, other: &Self) -> bool {
++ self.value == other.value
++ }
++}
++impl Eq for LitByte {}
++impl Eq for LitByteStr {}
++impl Eq for LitChar {}
++impl Eq for LitFloat {}
++impl Eq for LitInt {}
++impl Eq for LitStr {}
++#[cfg(feature = "full")]
++impl Eq for Local {}
++#[cfg(feature = "full")]
++impl PartialEq for Local {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.init == other.init
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Macro {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Macro {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path
++ && self.delimiter == other.delimiter
++ && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MacroDelimiter {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MacroDelimiter {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (MacroDelimiter::Paren(_), MacroDelimiter::Paren(_)) => true,
++ (MacroDelimiter::Brace(_), MacroDelimiter::Brace(_)) => true,
++ (MacroDelimiter::Bracket(_), MacroDelimiter::Bracket(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Meta {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Meta {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Meta::Path(self0), Meta::Path(other0)) => self0 == other0,
++ (Meta::List(self0), Meta::List(other0)) => self0 == other0,
++ (Meta::NameValue(self0), Meta::NameValue(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MetaList {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MetaList {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path && self.nested == other.nested
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MetaNameValue {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MetaNameValue {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path && self.lit == other.lit
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for MethodTurbofish {}
++#[cfg(feature = "full")]
++impl PartialEq for MethodTurbofish {
++ fn eq(&self, other: &Self) -> bool {
++ self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for NestedMeta {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for NestedMeta {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (NestedMeta::Meta(self0), NestedMeta::Meta(other0)) => self0 == other0,
++ (NestedMeta::Lit(self0), NestedMeta::Lit(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ParenthesizedGenericArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ParenthesizedGenericArguments {
++ fn eq(&self, other: &Self) -> bool {
++ self.inputs == other.inputs && self.output == other.output
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Pat {}
++#[cfg(feature = "full")]
++impl PartialEq for Pat {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Pat::Box(self0), Pat::Box(other0)) => self0 == other0,
++ (Pat::Ident(self0), Pat::Ident(other0)) => self0 == other0,
++ (Pat::Lit(self0), Pat::Lit(other0)) => self0 == other0,
++ (Pat::Macro(self0), Pat::Macro(other0)) => self0 == other0,
++ (Pat::Or(self0), Pat::Or(other0)) => self0 == other0,
++ (Pat::Path(self0), Pat::Path(other0)) => self0 == other0,
++ (Pat::Range(self0), Pat::Range(other0)) => self0 == other0,
++ (Pat::Reference(self0), Pat::Reference(other0)) => self0 == other0,
++ (Pat::Rest(self0), Pat::Rest(other0)) => self0 == other0,
++ (Pat::Slice(self0), Pat::Slice(other0)) => self0 == other0,
++ (Pat::Struct(self0), Pat::Struct(other0)) => self0 == other0,
++ (Pat::Tuple(self0), Pat::Tuple(other0)) => self0 == other0,
++ (Pat::TupleStruct(self0), Pat::TupleStruct(other0)) => self0 == other0,
++ (Pat::Type(self0), Pat::Type(other0)) => self0 == other0,
++ (Pat::Verbatim(self0), Pat::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ (Pat::Wild(self0), Pat::Wild(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatBox {}
++#[cfg(feature = "full")]
++impl PartialEq for PatBox {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatIdent {}
++#[cfg(feature = "full")]
++impl PartialEq for PatIdent {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.by_ref == other.by_ref
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.subpat == other.subpat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatLit {}
++#[cfg(feature = "full")]
++impl PartialEq for PatLit {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for PatMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatOr {}
++#[cfg(feature = "full")]
++impl PartialEq for PatOr {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.leading_vert == other.leading_vert
++ && self.cases == other.cases
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatPath {}
++#[cfg(feature = "full")]
++impl PartialEq for PatPath {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatRange {}
++#[cfg(feature = "full")]
++impl PartialEq for PatRange {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.lo == other.lo
++ && self.limits == other.limits
++ && self.hi == other.hi
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatReference {}
++#[cfg(feature = "full")]
++impl PartialEq for PatReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mutability == other.mutability && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatRest {}
++#[cfg(feature = "full")]
++impl PartialEq for PatRest {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatSlice {}
++#[cfg(feature = "full")]
++impl PartialEq for PatSlice {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for PatStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.path == other.path
++ && self.fields == other.fields
++ && self.dot2_token == other.dot2_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatTuple {}
++#[cfg(feature = "full")]
++impl PartialEq for PatTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatTupleStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for PatTupleStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.path == other.path && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatType {}
++#[cfg(feature = "full")]
++impl PartialEq for PatType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatWild {}
++#[cfg(feature = "full")]
++impl PartialEq for PatWild {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Path {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Path {
++ fn eq(&self, other: &Self) -> bool {
++ self.leading_colon == other.leading_colon && self.segments == other.segments
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PathArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PathArguments {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (PathArguments::None, PathArguments::None) => true,
++ (PathArguments::AngleBracketed(self0), PathArguments::AngleBracketed(other0)) => {
++ self0 == other0
++ }
++ (PathArguments::Parenthesized(self0), PathArguments::Parenthesized(other0)) => {
++ self0 == other0
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PathSegment {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PathSegment {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.arguments == other.arguments
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateEq {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateEq {
++ fn eq(&self, other: &Self) -> bool {
++ self.lhs_ty == other.lhs_ty && self.rhs_ty == other.rhs_ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateLifetime {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateLifetime {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetime == other.lifetime && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateType {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateType {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ && self.bounded_ty == other.bounded_ty
++ && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for QSelf {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for QSelf {
++ fn eq(&self, other: &Self) -> bool {
++ self.ty == other.ty && self.position == other.position && self.as_token == other.as_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for RangeLimits {}
++#[cfg(feature = "full")]
++impl PartialEq for RangeLimits {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (RangeLimits::HalfOpen(_), RangeLimits::HalfOpen(_)) => true,
++ (RangeLimits::Closed(_), RangeLimits::Closed(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Receiver {}
++#[cfg(feature = "full")]
++impl PartialEq for Receiver {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.reference == other.reference
++ && self.mutability == other.mutability
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ReturnType {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ReturnType {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ReturnType::Default, ReturnType::Default) => true,
++ (ReturnType::Type(_, self1), ReturnType::Type(_, other1)) => self1 == other1,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Signature {}
++#[cfg(feature = "full")]
++impl PartialEq for Signature {
++ fn eq(&self, other: &Self) -> bool {
++ self.constness == other.constness
++ && self.asyncness == other.asyncness
++ && self.unsafety == other.unsafety
++ && self.abi == other.abi
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.inputs == other.inputs
++ && self.variadic == other.variadic
++ && self.output == other.output
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Stmt {}
++#[cfg(feature = "full")]
++impl PartialEq for Stmt {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Stmt::Local(self0), Stmt::Local(other0)) => self0 == other0,
++ (Stmt::Item(self0), Stmt::Item(other0)) => self0 == other0,
++ (Stmt::Expr(self0), Stmt::Expr(other0)) => self0 == other0,
++ (Stmt::Semi(self0, _), Stmt::Semi(other0, _)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TraitBound {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TraitBound {
++ fn eq(&self, other: &Self) -> bool {
++ self.paren_token == other.paren_token
++ && self.modifier == other.modifier
++ && self.lifetimes == other.lifetimes
++ && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TraitBoundModifier {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TraitBoundModifier {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TraitBoundModifier::None, TraitBoundModifier::None) => true,
++ (TraitBoundModifier::Maybe(_), TraitBoundModifier::Maybe(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItem {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TraitItem::Const(self0), TraitItem::Const(other0)) => self0 == other0,
++ (TraitItem::Method(self0), TraitItem::Method(other0)) => self0 == other0,
++ (TraitItem::Type(self0), TraitItem::Type(other0)) => self0 == other0,
++ (TraitItem::Macro(self0), TraitItem::Macro(other0)) => self0 == other0,
++ (TraitItem::Verbatim(self0), TraitItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.default == other.default
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemMethod {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemMethod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.sig == other.sig
++ && self.default == other.default
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Type {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Type {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Type::Array(self0), Type::Array(other0)) => self0 == other0,
++ (Type::BareFn(self0), Type::BareFn(other0)) => self0 == other0,
++ (Type::Group(self0), Type::Group(other0)) => self0 == other0,
++ (Type::ImplTrait(self0), Type::ImplTrait(other0)) => self0 == other0,
++ (Type::Infer(self0), Type::Infer(other0)) => self0 == other0,
++ (Type::Macro(self0), Type::Macro(other0)) => self0 == other0,
++ (Type::Never(self0), Type::Never(other0)) => self0 == other0,
++ (Type::Paren(self0), Type::Paren(other0)) => self0 == other0,
++ (Type::Path(self0), Type::Path(other0)) => self0 == other0,
++ (Type::Ptr(self0), Type::Ptr(other0)) => self0 == other0,
++ (Type::Reference(self0), Type::Reference(other0)) => self0 == other0,
++ (Type::Slice(self0), Type::Slice(other0)) => self0 == other0,
++ (Type::TraitObject(self0), Type::TraitObject(other0)) => self0 == other0,
++ (Type::Tuple(self0), Type::Tuple(other0)) => self0 == other0,
++ (Type::Verbatim(self0), Type::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeArray {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeArray {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem && self.len == other.len
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeBareFn {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeBareFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ && self.unsafety == other.unsafety
++ && self.abi == other.abi
++ && self.inputs == other.inputs
++ && self.variadic == other.variadic
++ && self.output == other.output
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeGroup {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeImplTrait {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeImplTrait {
++ fn eq(&self, other: &Self) -> bool {
++ self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeInfer {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeInfer {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeMacro {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.mac == other.mac
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeNever {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeNever {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParam {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ && self.eq_token == other.eq_token
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParamBound {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParamBound {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TypeParamBound::Trait(self0), TypeParamBound::Trait(other0)) => self0 == other0,
++ (TypeParamBound::Lifetime(self0), TypeParamBound::Lifetime(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParen {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParen {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypePath {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypePath {
++ fn eq(&self, other: &Self) -> bool {
++ self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypePtr {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypePtr {
++ fn eq(&self, other: &Self) -> bool {
++ self.const_token == other.const_token
++ && self.mutability == other.mutability
++ && self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeReference {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetime == other.lifetime
++ && self.mutability == other.mutability
++ && self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeSlice {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeSlice {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeTraitObject {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeTraitObject {
++ fn eq(&self, other: &Self) -> bool {
++ self.dyn_token == other.dyn_token && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeTuple {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.elems == other.elems
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for UnOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for UnOp {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (UnOp::Deref(_), UnOp::Deref(_)) => true,
++ (UnOp::Not(_), UnOp::Not(_)) => true,
++ (UnOp::Neg(_), UnOp::Neg(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseGlob {}
++#[cfg(feature = "full")]
++impl PartialEq for UseGlob {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseGroup {}
++#[cfg(feature = "full")]
++impl PartialEq for UseGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseName {}
++#[cfg(feature = "full")]
++impl PartialEq for UseName {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UsePath {}
++#[cfg(feature = "full")]
++impl PartialEq for UsePath {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.tree == other.tree
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseRename {}
++#[cfg(feature = "full")]
++impl PartialEq for UseRename {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.rename == other.rename
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseTree {}
++#[cfg(feature = "full")]
++impl PartialEq for UseTree {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (UseTree::Path(self0), UseTree::Path(other0)) => self0 == other0,
++ (UseTree::Name(self0), UseTree::Name(other0)) => self0 == other0,
++ (UseTree::Rename(self0), UseTree::Rename(other0)) => self0 == other0,
++ (UseTree::Glob(self0), UseTree::Glob(other0)) => self0 == other0,
++ (UseTree::Group(self0), UseTree::Group(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Variadic {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Variadic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Variant {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Variant {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.fields == other.fields
++ && self.discriminant == other.discriminant
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisCrate {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisCrate {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisPublic {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisPublic {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisRestricted {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisRestricted {
++ fn eq(&self, other: &Self) -> bool {
++ self.in_token == other.in_token && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Visibility {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Visibility {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Visibility::Public(self0), Visibility::Public(other0)) => self0 == other0,
++ (Visibility::Crate(self0), Visibility::Crate(other0)) => self0 == other0,
++ (Visibility::Restricted(self0), Visibility::Restricted(other0)) => self0 == other0,
++ (Visibility::Inherited, Visibility::Inherited) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for WhereClause {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for WhereClause {
++ fn eq(&self, other: &Self) -> bool {
++ self.predicates == other.predicates
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for WherePredicate {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for WherePredicate {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (WherePredicate::Type(self0), WherePredicate::Type(other0)) => self0 == other0,
++ (WherePredicate::Lifetime(self0), WherePredicate::Lifetime(other0)) => self0 == other0,
++ (WherePredicate::Eq(self0), WherePredicate::Eq(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/fold.rs b/third_party/rust/syn/src/gen/fold.rs
+index f51218b78c..d9dd32a420 100644
+--- mozilla-release/third_party/rust/syn/src/gen/fold.rs
++++ mozilla-release/third_party/rust/syn/src/gen/fold.rs
+@@ -2,6 +2,7 @@
+ // It is not intended for manual editing.
+
+ #![allow(unreachable_code, unused_variables)]
++#![allow(clippy::match_wildcard_for_single_variants)]
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::gen::helper::fold::*;
+ #[cfg(any(feature = "full", feature = "derive"))]
+@@ -26,7 +27,7 @@ macro_rules! full {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"fold"` feature.*
++/// *This trait is available only if Syn is built with the `"fold"` feature.*
+ pub trait Fold {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_abi(&mut self, i: Abi) -> Abi {
+@@ -433,35 +434,27 @@ pub trait Fold {
+ fn fold_lifetime_def(&mut self, i: LifetimeDef) -> LifetimeDef {
+ fold_lifetime_def(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit(&mut self, i: Lit) -> Lit {
+ fold_lit(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_bool(&mut self, i: LitBool) -> LitBool {
+ fold_lit_bool(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_byte(&mut self, i: LitByte) -> LitByte {
+ fold_lit_byte(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_byte_str(&mut self, i: LitByteStr) -> LitByteStr {
+ fold_lit_byte_str(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_char(&mut self, i: LitChar) -> LitChar {
+ fold_lit_char(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_float(&mut self, i: LitFloat) -> LitFloat {
+ fold_lit_float(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_int(&mut self, i: LitInt) -> LitInt {
+ fold_lit_int(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_str(&mut self, i: LitStr) -> LitStr {
+ fold_lit_str(self, i)
+ }
+@@ -799,10 +792,10 @@ where
+ F: Fold + ?Sized,
+ {
+ AngleBracketedGenericArguments {
+- colon2_token: (node.colon2_token).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ colon2_token: (node.colon2_token).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ args: FoldHelper::lift(node.args, |it| f.fold_generic_argument(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -819,9 +812,9 @@ where
+ Box::new(f.fold_expr(*(it).1)),
+ )
+ }),
+- fat_arrow_token: Token ! [ => ](tokens_helper(f, &node.fat_arrow_token.spans)),
++ fat_arrow_token: Token ! [=>](tokens_helper(f, &node.fat_arrow_token.spans)),
+ body: Box::new(f.fold_expr(*node.body)),
+- comma: (node.comma).map(|it| Token ! [ , ](tokens_helper(f, &it.spans))),
++ comma: (node.comma).map(|it| Token ! [,](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -842,7 +835,7 @@ where
+ F: Fold + ?Sized,
+ {
+ Attribute {
+- pound_token: Token ! [ # ](tokens_helper(f, &node.pound_token.spans)),
++ pound_token: Token ! [#](tokens_helper(f, &node.pound_token.spans)),
+ style: f.fold_attr_style(node.style),
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ path: f.fold_path(node.path),
+@@ -859,7 +852,7 @@ where
+ name: (node.name).map(|it| {
+ (
+ f.fold_ident((it).0),
+- Token ! [ : ](tokens_helper(f, &(it).1.spans)),
++ Token ! [:](tokens_helper(f, &(it).1.spans)),
+ )
+ }),
+ ty: f.fold_type(node.ty),
+@@ -871,59 +864,47 @@ where
+ F: Fold + ?Sized,
+ {
+ match node {
+- BinOp::Add(_binding_0) => BinOp::Add(Token ! [ + ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Sub(_binding_0) => BinOp::Sub(Token ! [ - ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Mul(_binding_0) => BinOp::Mul(Token ! [ * ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Div(_binding_0) => BinOp::Div(Token ! [ / ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Rem(_binding_0) => BinOp::Rem(Token ! [ % ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::And(_binding_0) => BinOp::And(Token ! [ && ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Or(_binding_0) => BinOp::Or(Token ! [ || ](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Add(_binding_0) => BinOp::Add(Token ! [+](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Sub(_binding_0) => BinOp::Sub(Token ! [-](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Mul(_binding_0) => BinOp::Mul(Token ! [*](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Div(_binding_0) => BinOp::Div(Token ! [/](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Rem(_binding_0) => BinOp::Rem(Token ! [%](tokens_helper(f, &_binding_0.spans))),
++ BinOp::And(_binding_0) => BinOp::And(Token ! [&&](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Or(_binding_0) => BinOp::Or(Token ! [||](tokens_helper(f, &_binding_0.spans))),
+ BinOp::BitXor(_binding_0) => {
+- BinOp::BitXor(Token ! [ ^ ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitXor(Token ! [^](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitAnd(_binding_0) => {
+- BinOp::BitAnd(Token ! [ & ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::BitOr(_binding_0) => {
+- BinOp::BitOr(Token ! [ | ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::Shl(_binding_0) => BinOp::Shl(Token ! [ << ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Shr(_binding_0) => BinOp::Shr(Token ! [ >> ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Eq(_binding_0) => BinOp::Eq(Token ! [ == ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Lt(_binding_0) => BinOp::Lt(Token ! [ < ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Le(_binding_0) => BinOp::Le(Token ! [ <= ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Ne(_binding_0) => BinOp::Ne(Token ! [ != ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Ge(_binding_0) => BinOp::Ge(Token ! [ >= ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Gt(_binding_0) => BinOp::Gt(Token ! [ > ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::AddEq(_binding_0) => {
+- BinOp::AddEq(Token ! [ += ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::SubEq(_binding_0) => {
+- BinOp::SubEq(Token ! [ -= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::MulEq(_binding_0) => {
+- BinOp::MulEq(Token ! [ *= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::DivEq(_binding_0) => {
+- BinOp::DivEq(Token ! [ /= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::RemEq(_binding_0) => {
+- BinOp::RemEq(Token ! [ %= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitAnd(Token ! [&](tokens_helper(f, &_binding_0.spans)))
+ }
++ BinOp::BitOr(_binding_0) => BinOp::BitOr(Token ! [|](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Shl(_binding_0) => BinOp::Shl(Token ! [<<](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Shr(_binding_0) => BinOp::Shr(Token ! [>>](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Eq(_binding_0) => BinOp::Eq(Token ! [==](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Lt(_binding_0) => BinOp::Lt(Token ! [<](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Le(_binding_0) => BinOp::Le(Token ! [<=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Ne(_binding_0) => BinOp::Ne(Token ! [!=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Ge(_binding_0) => BinOp::Ge(Token ! [>=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Gt(_binding_0) => BinOp::Gt(Token ! [>](tokens_helper(f, &_binding_0.spans))),
++ BinOp::AddEq(_binding_0) => BinOp::AddEq(Token ! [+=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::SubEq(_binding_0) => BinOp::SubEq(Token ! [-=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::MulEq(_binding_0) => BinOp::MulEq(Token ! [*=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::DivEq(_binding_0) => BinOp::DivEq(Token ! [/=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::RemEq(_binding_0) => BinOp::RemEq(Token ! [%=](tokens_helper(f, &_binding_0.spans))),
+ BinOp::BitXorEq(_binding_0) => {
+- BinOp::BitXorEq(Token ! [ ^= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitXorEq(Token ! [^=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitAndEq(_binding_0) => {
+- BinOp::BitAndEq(Token ! [ &= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitAndEq(Token ! [&=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitOrEq(_binding_0) => {
+- BinOp::BitOrEq(Token ! [ |= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitOrEq(Token ! [|=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::ShlEq(_binding_0) => {
+- BinOp::ShlEq(Token ! [ <<= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::ShlEq(Token ! [<<=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::ShrEq(_binding_0) => {
+- BinOp::ShrEq(Token ! [ >>= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::ShrEq(Token ! [>>=](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+@@ -934,7 +915,7 @@ where
+ {
+ Binding {
+ ident: f.fold_ident(node.ident),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: f.fold_type(node.ty),
+ }
+ }
+@@ -955,9 +936,9 @@ where
+ {
+ BoundLifetimes {
+ for_token: Token![for](tokens_helper(f, &node.for_token.span)),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ lifetimes: FoldHelper::lift(node.lifetimes, |it| f.fold_lifetime_def(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -969,9 +950,9 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+- eq_token: (node.eq_token).map(|it| Token ! [ = ](tokens_helper(f, &it.spans))),
++ eq_token: (node.eq_token).map(|it| Token ! [=](tokens_helper(f, &it.spans))),
+ default: (node.default).map(|it| f.fold_expr(it)),
+ }
+ }
+@@ -982,7 +963,7 @@ where
+ {
+ Constraint {
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ }
+ }
+@@ -1016,7 +997,7 @@ where
+ DataStruct {
+ struct_token: Token![struct](tokens_helper(f, &node.struct_token.span)),
+ fields: f.fold_fields(node.fields),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "derive")]
+@@ -1112,7 +1093,7 @@ where
+ ExprAssign {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ left: Box::new(f.fold_expr(*node.left)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ right: Box::new(f.fold_expr(*node.right)),
+ }
+ }
+@@ -1148,7 +1129,7 @@ where
+ ExprAwait {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ base: Box::new(f.fold_expr(*node.base)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ await_token: crate::token::Await(tokens_helper(f, &node.await_token.span)),
+ }
+ }
+@@ -1232,9 +1213,9 @@ where
+ asyncness: (node.asyncness).map(|it| Token![async](tokens_helper(f, &it.span))),
+ movability: (node.movability).map(|it| Token![static](tokens_helper(f, &it.span))),
+ capture: (node.capture).map(|it| Token![move](tokens_helper(f, &it.span))),
+- or1_token: Token ! [ | ](tokens_helper(f, &node.or1_token.spans)),
++ or1_token: Token ! [|](tokens_helper(f, &node.or1_token.spans)),
+ inputs: FoldHelper::lift(node.inputs, |it| f.fold_pat(it)),
+- or2_token: Token ! [ | ](tokens_helper(f, &node.or2_token.spans)),
++ or2_token: Token ! [|](tokens_helper(f, &node.or2_token.spans)),
+ output: f.fold_return_type(node.output),
+ body: Box::new(f.fold_expr(*node.body)),
+ }
+@@ -1258,7 +1239,7 @@ where
+ ExprField {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ base: Box::new(f.fold_expr(*node.base)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ member: f.fold_member(node.member),
+ }
+ }
+@@ -1327,7 +1308,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ let_token: Token![let](tokens_helper(f, &node.let_token.span)),
+ pat: f.fold_pat(node.pat),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+ }
+ }
+@@ -1384,7 +1365,7 @@ where
+ ExprMethodCall {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ receiver: Box::new(f.fold_expr(*node.receiver)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ method: f.fold_ident(node.method),
+ turbofish: (node.turbofish).map(|it| f.fold_method_turbofish(it)),
+ paren_token: Paren(tokens_helper(f, &node.paren_token.span)),
+@@ -1432,7 +1413,7 @@ where
+ {
+ ExprReference {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ raw: node.raw,
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ expr: Box::new(f.fold_expr(*node.expr)),
+@@ -1447,7 +1428,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ len: Box::new(f.fold_expr(*node.len)),
+ }
+ }
+@@ -1484,7 +1465,7 @@ where
+ ExprTry {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- question_token: Token ! [ ? ](tokens_helper(f, &node.question_token.spans)),
++ question_token: Token ! [?](tokens_helper(f, &node.question_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1517,7 +1498,7 @@ where
+ ExprType {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ }
+ }
+@@ -1576,7 +1557,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ ident: (node.ident).map(|it| f.fold_ident(it)),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ ty: f.fold_type(node.ty),
+ }
+ }
+@@ -1588,7 +1569,7 @@ where
+ FieldPat {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ member: f.fold_member(node.member),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ pat: Box::new(f.fold_pat(*node.pat)),
+ }
+ }
+@@ -1600,7 +1581,7 @@ where
+ FieldValue {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ member: f.fold_member(node.member),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ expr: f.fold_expr(node.expr),
+ }
+ }
+@@ -1681,7 +1662,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ sig: f.fold_signature(node.sig),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1692,7 +1673,7 @@ where
+ ForeignItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1706,9 +1687,9 @@ where
+ static_token: Token![static](tokens_helper(f, &node.static_token.span)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1721,7 +1702,7 @@ where
+ vis: f.fold_visibility(node.vis),
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -1779,9 +1760,9 @@ where
+ F: Fold + ?Sized,
+ {
+ Generics {
+- lt_token: (node.lt_token).map(|it| Token ! [ < ](tokens_helper(f, &it.spans))),
++ lt_token: (node.lt_token).map(|it| Token ! [<](tokens_helper(f, &it.spans))),
+ params: FoldHelper::lift(node.params, |it| f.fold_generic_param(it)),
+- gt_token: (node.gt_token).map(|it| Token ! [ > ](tokens_helper(f, &it.spans))),
++ gt_token: (node.gt_token).map(|it| Token ! [>](tokens_helper(f, &it.spans))),
+ where_clause: (node.where_clause).map(|it| f.fold_where_clause(it)),
+ }
+ }
+@@ -1819,11 +1800,11 @@ where
+ defaultness: (node.defaultness).map(|it| Token![default](tokens_helper(f, &it.span))),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: f.fold_expr(node.expr),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1834,7 +1815,7 @@ where
+ ImplItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1862,9 +1843,9 @@ where
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: f.fold_type(node.ty),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -1913,11 +1894,11 @@ where
+ vis: f.fold_visibility(node.vis),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1952,7 +1933,7 @@ where
+ f.fold_ident((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2011,7 +1992,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ ident: (node.ident).map(|it| f.fold_ident(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2043,7 +2024,7 @@ where
+ FoldHelper::lift((it).1, |it| f.fold_item(it)),
+ )
+ }),
+- semi: (node.semi).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi: (node.semi).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2057,11 +2038,11 @@ where
+ static_token: Token![static](tokens_helper(f, &node.static_token.span)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2076,7 +2057,7 @@ where
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ fields: f.fold_fields(node.fields),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2092,7 +2073,7 @@ where
+ trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ supertraits: FoldHelper::lift(node.supertraits, |it| f.fold_type_param_bound(it)),
+ brace_token: Brace(tokens_helper(f, &node.brace_token.span)),
+ items: FoldHelper::lift(node.items, |it| f.fold_trait_item(it)),
+@@ -2109,9 +2090,9 @@ where
+ trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2125,9 +2106,9 @@ where
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2153,9 +2134,9 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ use_token: Token![use](tokens_helper(f, &node.use_token.span)),
+- leading_colon: (node.leading_colon).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
++ leading_colon: (node.leading_colon).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
+ tree: f.fold_use_tree(node.tree),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2165,7 +2146,7 @@ where
+ {
+ Label {
+ name: f.fold_lifetime(node.name),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ }
+ }
+ pub fn fold_lifetime<F>(f: &mut F, node: Lifetime) -> Lifetime
+@@ -2185,11 +2166,10 @@ where
+ LifetimeDef {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ lifetime: f.fold_lifetime(node.lifetime),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit<F>(f: &mut F, node: Lit) -> Lit
+ where
+ F: Fold + ?Sized,
+@@ -2205,7 +2185,6 @@ where
+ Lit::Verbatim(_binding_0) => Lit::Verbatim(_binding_0),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_bool<F>(f: &mut F, node: LitBool) -> LitBool
+ where
+ F: Fold + ?Sized,
+@@ -2215,7 +2194,6 @@ where
+ span: f.fold_span(node.span),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_byte<F>(f: &mut F, node: LitByte) -> LitByte
+ where
+ F: Fold + ?Sized,
+@@ -2225,7 +2203,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_byte_str<F>(f: &mut F, node: LitByteStr) -> LitByteStr
+ where
+ F: Fold + ?Sized,
+@@ -2235,7 +2212,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_char<F>(f: &mut F, node: LitChar) -> LitChar
+ where
+ F: Fold + ?Sized,
+@@ -2245,7 +2221,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_float<F>(f: &mut F, node: LitFloat) -> LitFloat
+ where
+ F: Fold + ?Sized,
+@@ -2255,7 +2230,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_int<F>(f: &mut F, node: LitInt) -> LitInt
+ where
+ F: Fold + ?Sized,
+@@ -2265,7 +2239,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_str<F>(f: &mut F, node: LitStr) -> LitStr
+ where
+ F: Fold + ?Sized,
+@@ -2286,11 +2259,11 @@ where
+ pat: f.fold_pat(node.pat),
+ init: (node.init).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ Box::new(f.fold_expr(*(it).1)),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2361,7 +2334,7 @@ where
+ {
+ MetaNameValue {
+ path: f.fold_path(node.path),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ lit: f.fold_lit(node.lit),
+ }
+ }
+@@ -2371,10 +2344,10 @@ where
+ F: Fold + ?Sized,
+ {
+ MethodTurbofish {
+- colon2_token: Token ! [ :: ](tokens_helper(f, &node.colon2_token.spans)),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ colon2_token: Token ! [::](tokens_helper(f, &node.colon2_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ args: FoldHelper::lift(node.args, |it| f.fold_generic_method_argument(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2449,7 +2422,7 @@ where
+ ident: f.fold_ident(node.ident),
+ subpat: (node.subpat).map(|it| {
+ (
+- Token ! [ @ ](tokens_helper(f, &(it).0.spans)),
++ Token ! [@](tokens_helper(f, &(it).0.spans)),
+ Box::new(f.fold_pat(*(it).1)),
+ )
+ }),
+@@ -2482,7 +2455,7 @@ where
+ {
+ PatOr {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- leading_vert: (node.leading_vert).map(|it| Token ! [ | ](tokens_helper(f, &it.spans))),
++ leading_vert: (node.leading_vert).map(|it| Token ! [|](tokens_helper(f, &it.spans))),
+ cases: FoldHelper::lift(node.cases, |it| f.fold_pat(it)),
+ }
+ }
+@@ -2516,7 +2489,7 @@ where
+ {
+ PatReference {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ pat: Box::new(f.fold_pat(*node.pat)),
+ }
+@@ -2585,7 +2558,7 @@ where
+ PatType {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ pat: Box::new(f.fold_pat(*node.pat)),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ }
+ }
+@@ -2605,7 +2578,7 @@ where
+ F: Fold + ?Sized,
+ {
+ Path {
+- leading_colon: (node.leading_colon).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
++ leading_colon: (node.leading_colon).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
+ segments: FoldHelper::lift(node.segments, |it| f.fold_path_segment(it)),
+ }
+ }
+@@ -2641,7 +2614,7 @@ where
+ {
+ PredicateEq {
+ lhs_ty: f.fold_type(node.lhs_ty),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ rhs_ty: f.fold_type(node.rhs_ty),
+ }
+ }
+@@ -2652,7 +2625,7 @@ where
+ {
+ PredicateLifetime {
+ lifetime: f.fold_lifetime(node.lifetime),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)),
+ }
+ }
+@@ -2664,7 +2637,7 @@ where
+ PredicateType {
+ lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)),
+ bounded_ty: f.fold_type(node.bounded_ty),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ }
+ }
+@@ -2674,11 +2647,11 @@ where
+ F: Fold + ?Sized,
+ {
+ QSelf {
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ position: node.position,
+ as_token: (node.as_token).map(|it| Token![as](tokens_helper(f, &it.span))),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2691,7 +2664,7 @@ where
+ RangeLimits::HalfOpen(Token![..](tokens_helper(f, &_binding_0.spans)))
+ }
+ RangeLimits::Closed(_binding_0) => {
+- RangeLimits::Closed(Token ! [ ..= ](tokens_helper(f, &_binding_0.spans)))
++ RangeLimits::Closed(Token ! [..=](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+@@ -2704,7 +2677,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ reference: (node.reference).map(|it| {
+ (
+- Token ! [ & ](tokens_helper(f, &(it).0.spans)),
++ Token ! [&](tokens_helper(f, &(it).0.spans)),
+ ((it).1).map(|it| f.fold_lifetime(it)),
+ )
+ }),
+@@ -2720,7 +2693,7 @@ where
+ match node {
+ ReturnType::Default => ReturnType::Default,
+ ReturnType::Type(_binding_0, _binding_1) => ReturnType::Type(
+- Token ! [ -> ](tokens_helper(f, &_binding_0.spans)),
++ Token ! [->](tokens_helper(f, &_binding_0.spans)),
+ Box::new(f.fold_type(*_binding_1)),
+ ),
+ }
+@@ -2761,7 +2734,7 @@ where
+ Stmt::Expr(_binding_0) => Stmt::Expr(f.fold_expr(_binding_0)),
+ Stmt::Semi(_binding_0, _binding_1) => Stmt::Semi(
+ f.fold_expr(_binding_0),
+- Token ! [ ; ](tokens_helper(f, &_binding_1.spans)),
++ Token ! [;](tokens_helper(f, &_binding_1.spans)),
+ ),
+ }
+ }
+@@ -2785,7 +2758,7 @@ where
+ match node {
+ TraitBoundModifier::None => TraitBoundModifier::None,
+ TraitBoundModifier::Maybe(_binding_0) => {
+- TraitBoundModifier::Maybe(Token ! [ ? ](tokens_helper(f, &_binding_0.spans)))
++ TraitBoundModifier::Maybe(Token ! [?](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+@@ -2812,15 +2785,15 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+ default: (node.default).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_expr((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2831,7 +2804,7 @@ where
+ TraitItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2843,7 +2816,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ sig: f.fold_signature(node.sig),
+ default: (node.default).map(|it| f.fold_block(it)),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2856,15 +2829,15 @@ where
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ default: (node.default).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_type((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2899,7 +2872,7 @@ where
+ TypeArray {
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ elem: Box::new(f.fold_type(*node.elem)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ len: f.fold_expr(node.len),
+ }
+ }
+@@ -2974,9 +2947,9 @@ where
+ TypeParam {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ ident: f.fold_ident(node.ident),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+- eq_token: (node.eq_token).map(|it| Token ! [ = ](tokens_helper(f, &it.spans))),
++ eq_token: (node.eq_token).map(|it| Token ! [=](tokens_helper(f, &it.spans))),
+ default: (node.default).map(|it| f.fold_type(it)),
+ }
+ }
+@@ -3018,7 +2991,7 @@ where
+ F: Fold + ?Sized,
+ {
+ TypePtr {
+- star_token: Token ! [ * ](tokens_helper(f, &node.star_token.spans)),
++ star_token: Token ! [*](tokens_helper(f, &node.star_token.spans)),
+ const_token: (node.const_token).map(|it| Token![const](tokens_helper(f, &it.span))),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ elem: Box::new(f.fold_type(*node.elem)),
+@@ -3030,7 +3003,7 @@ where
+ F: Fold + ?Sized,
+ {
+ TypeReference {
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ lifetime: (node.lifetime).map(|it| f.fold_lifetime(it)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ elem: Box::new(f.fold_type(*node.elem)),
+@@ -3072,9 +3045,9 @@ where
+ F: Fold + ?Sized,
+ {
+ match node {
+- UnOp::Deref(_binding_0) => UnOp::Deref(Token ! [ * ](tokens_helper(f, &_binding_0.spans))),
++ UnOp::Deref(_binding_0) => UnOp::Deref(Token ! [*](tokens_helper(f, &_binding_0.spans))),
+ UnOp::Not(_binding_0) => UnOp::Not(Token![!](tokens_helper(f, &_binding_0.spans))),
+- UnOp::Neg(_binding_0) => UnOp::Neg(Token ! [ - ](tokens_helper(f, &_binding_0.spans))),
++ UnOp::Neg(_binding_0) => UnOp::Neg(Token ! [-](tokens_helper(f, &_binding_0.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -3083,7 +3056,7 @@ where
+ F: Fold + ?Sized,
+ {
+ UseGlob {
+- star_token: Token ! [ * ](tokens_helper(f, &node.star_token.spans)),
++ star_token: Token ! [*](tokens_helper(f, &node.star_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -3112,7 +3085,7 @@ where
+ {
+ UsePath {
+ ident: f.fold_ident(node.ident),
+- colon2_token: Token ! [ :: ](tokens_helper(f, &node.colon2_token.spans)),
++ colon2_token: Token ! [::](tokens_helper(f, &node.colon2_token.spans)),
+ tree: Box::new(f.fold_use_tree(*node.tree)),
+ }
+ }
+@@ -3147,7 +3120,7 @@ where
+ {
+ Variadic {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- dots: Token ! [ ... ](tokens_helper(f, &node.dots.spans)),
++ dots: Token ! [...](tokens_helper(f, &node.dots.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -3161,7 +3134,7 @@ where
+ fields: f.fold_fields(node.fields),
+ discriminant: (node.discriminant).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_expr((it).1),
+ )
+ }),
+diff --git a/third_party/rust/syn/src/gen/hash.rs b/third_party/rust/syn/src/gen/hash.rs
+new file mode 100644
+index 0000000000..9e9e84a7af
+--- /dev/null
++++ mozilla-release/third_party/rust/syn/src/gen/hash.rs
+@@ -0,0 +1,2691 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#[cfg(any(feature = "derive", feature = "full"))]
++use crate::tt::TokenStreamHelper;
++use crate::*;
++use std::hash::{Hash, Hasher};
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Abi {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.name.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for AngleBracketedGenericArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.colon2_token.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Arm {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.guard.hash(state);
++ self.body.hash(state);
++ self.comma.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for AttrStyle {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ AttrStyle::Outer => {
++ state.write_u8(0u8);
++ }
++ AttrStyle::Inner(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Attribute {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.style.hash(state);
++ self.path.hash(state);
++ TokenStreamHelper(&self.tokens).hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BareFnArg {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.name.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BinOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ BinOp::Add(_) => {
++ state.write_u8(0u8);
++ }
++ BinOp::Sub(_) => {
++ state.write_u8(1u8);
++ }
++ BinOp::Mul(_) => {
++ state.write_u8(2u8);
++ }
++ BinOp::Div(_) => {
++ state.write_u8(3u8);
++ }
++ BinOp::Rem(_) => {
++ state.write_u8(4u8);
++ }
++ BinOp::And(_) => {
++ state.write_u8(5u8);
++ }
++ BinOp::Or(_) => {
++ state.write_u8(6u8);
++ }
++ BinOp::BitXor(_) => {
++ state.write_u8(7u8);
++ }
++ BinOp::BitAnd(_) => {
++ state.write_u8(8u8);
++ }
++ BinOp::BitOr(_) => {
++ state.write_u8(9u8);
++ }
++ BinOp::Shl(_) => {
++ state.write_u8(10u8);
++ }
++ BinOp::Shr(_) => {
++ state.write_u8(11u8);
++ }
++ BinOp::Eq(_) => {
++ state.write_u8(12u8);
++ }
++ BinOp::Lt(_) => {
++ state.write_u8(13u8);
++ }
++ BinOp::Le(_) => {
++ state.write_u8(14u8);
++ }
++ BinOp::Ne(_) => {
++ state.write_u8(15u8);
++ }
++ BinOp::Ge(_) => {
++ state.write_u8(16u8);
++ }
++ BinOp::Gt(_) => {
++ state.write_u8(17u8);
++ }
++ BinOp::AddEq(_) => {
++ state.write_u8(18u8);
++ }
++ BinOp::SubEq(_) => {
++ state.write_u8(19u8);
++ }
++ BinOp::MulEq(_) => {
++ state.write_u8(20u8);
++ }
++ BinOp::DivEq(_) => {
++ state.write_u8(21u8);
++ }
++ BinOp::RemEq(_) => {
++ state.write_u8(22u8);
++ }
++ BinOp::BitXorEq(_) => {
++ state.write_u8(23u8);
++ }
++ BinOp::BitAndEq(_) => {
++ state.write_u8(24u8);
++ }
++ BinOp::BitOrEq(_) => {
++ state.write_u8(25u8);
++ }
++ BinOp::ShlEq(_) => {
++ state.write_u8(26u8);
++ }
++ BinOp::ShrEq(_) => {
++ state.write_u8(27u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Binding {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Block {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.stmts.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BoundLifetimes {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ConstParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.eq_token.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Constraint {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for Data {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Data::Struct(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Data::Enum(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Data::Union(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataEnum {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.variants.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.fields.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataUnion {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.fields.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DeriveInput {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.data.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Expr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Expr::Binary(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Expr::Call(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Expr::Cast(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Expr::Field(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => {
++ state.write_u8(14u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::If(v0) => {
++ state.write_u8(16u8);
++ v0.hash(state);
++ }
++ Expr::Index(v0) => {
++ state.write_u8(17u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => {
++ state.write_u8(18u8);
++ v0.hash(state);
++ }
++ Expr::Lit(v0) => {
++ state.write_u8(19u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => {
++ state.write_u8(20u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => {
++ state.write_u8(21u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => {
++ state.write_u8(22u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => {
++ state.write_u8(23u8);
++ v0.hash(state);
++ }
++ Expr::Paren(v0) => {
++ state.write_u8(24u8);
++ v0.hash(state);
++ }
++ Expr::Path(v0) => {
++ state.write_u8(25u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => {
++ state.write_u8(26u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => {
++ state.write_u8(27u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => {
++ state.write_u8(28u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => {
++ state.write_u8(29u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => {
++ state.write_u8(30u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => {
++ state.write_u8(31u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => {
++ state.write_u8(32u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => {
++ state.write_u8(33u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => {
++ state.write_u8(34u8);
++ v0.hash(state);
++ }
++ Expr::Unary(v0) => {
++ state.write_u8(35u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => {
++ state.write_u8(36u8);
++ v0.hash(state);
++ }
++ Expr::Verbatim(v0) => {
++ state.write_u8(37u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::While(v0) => {
++ state.write_u8(38u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => {
++ state.write_u8(39u8);
++ v0.hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprArray {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAssign {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAssignOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.op.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAsync {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.capture.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAwait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.base.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprBinary {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.op.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBlock {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBox {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBreak {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprCall {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.func.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprCast {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprClosure {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.asyncness.hash(state);
++ self.movability.hash(state);
++ self.capture.hash(state);
++ self.inputs.hash(state);
++ self.output.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprContinue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprField {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.base.hash(state);
++ self.member.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprForLoop {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.pat.hash(state);
++ self.expr.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprIf {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.cond.hash(state);
++ self.then_branch.hash(state);
++ self.else_branch.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprIndex {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.index.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprLet {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprLit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lit.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprLoop {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMatch {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.arms.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMethodCall {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.receiver.hash(state);
++ self.method.hash(state);
++ self.turbofish.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprParen {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprPath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprRange {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.from.hash(state);
++ self.limits.hash(state);
++ self.to.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mutability.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprRepeat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.len.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprReturn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.fields.hash(state);
++ self.dot2_token.hash(state);
++ self.rest.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTry {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTryBlock {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprUnary {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.op.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprUnsafe {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprWhile {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.cond.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprYield {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Field {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.colon_token.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FieldPat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.member.hash(state);
++ self.colon_token.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FieldValue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.member.hash(state);
++ self.colon_token.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Fields {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Fields::Named(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Fields::Unnamed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Fields::Unit => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for FieldsNamed {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.named.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for FieldsUnnamed {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.unnamed.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for File {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.shebang.hash(state);
++ self.attrs.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FnArg {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ FnArg::Receiver(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ FnArg::Typed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ForeignItem::Fn(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ ForeignItem::Static(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ ForeignItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ ForeignItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ ForeignItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.sig.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemStatic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for GenericArgument {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericArgument::Lifetime(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericArgument::Type(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ GenericArgument::Binding(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ GenericArgument::Constraint(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ GenericArgument::Const(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for GenericMethodArgument {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericMethodArgument::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericMethodArgument::Const(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for GenericParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericParam::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericParam::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ GenericParam::Const(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Generics {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lt_token.hash(state);
++ self.params.hash(state);
++ self.gt_token.hash(state);
++ self.where_clause.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ImplItem::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ ImplItem::Method(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ ImplItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ ImplItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ ImplItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemMethod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.sig.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Item {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Item::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Item::Enum(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Item::ExternCrate(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Item::Fn(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Item::ForeignMod(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Item::Impl(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Item::Macro(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Item::Macro2(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Item::Mod(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Item::Static(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Item::Struct(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Item::Trait(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Item::TraitAlias(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Item::Type(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Item::Union(v0) => {
++ state.write_u8(14u8);
++ v0.hash(state);
++ }
++ Item::Use(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ Item::Verbatim(v0) => {
++ state.write_u8(16u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemEnum {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.variants.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemExternCrate {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.rename.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.sig.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemForeignMod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.abi.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemImpl {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.defaultness.hash(state);
++ self.unsafety.hash(state);
++ self.generics.hash(state);
++ self.trait_.hash(state);
++ self.self_ty.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMacro2 {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ TokenStreamHelper(&self.rules).hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.content.hash(state);
++ self.semi.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemStatic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.fields.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemTrait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.unsafety.hash(state);
++ self.auto_token.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.colon_token.hash(state);
++ self.supertraits.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemTraitAlias {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemUnion {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.fields.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemUse {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.leading_colon.hash(state);
++ self.tree.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Label {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.name.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for LifetimeDef {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lifetime.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ }
++}
++impl Hash for Lit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Lit::Str(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Lit::ByteStr(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Lit::Byte(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Lit::Char(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Lit::Int(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Lit::Float(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Lit::Bool(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Lit::Verbatim(v0) => {
++ state.write_u8(7u8);
++ v0.to_string().hash(state);
++ }
++ }
++ }
++}
++impl Hash for LitBool {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.value.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Local {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.init.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Macro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.delimiter.hash(state);
++ TokenStreamHelper(&self.tokens).hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MacroDelimiter {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ MacroDelimiter::Paren(_) => {
++ state.write_u8(0u8);
++ }
++ MacroDelimiter::Brace(_) => {
++ state.write_u8(1u8);
++ }
++ MacroDelimiter::Bracket(_) => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Meta {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Meta::Path(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Meta::List(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Meta::NameValue(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MetaList {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.nested.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MetaNameValue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.lit.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for MethodTurbofish {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for NestedMeta {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ NestedMeta::Meta(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ NestedMeta::Lit(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ParenthesizedGenericArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.inputs.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Pat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Pat::Box(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Pat::Ident(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Pat::Lit(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Pat::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Pat::Or(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Pat::Path(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Pat::Range(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Pat::Reference(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Pat::Rest(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Pat::Slice(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Pat::Struct(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Pat::Tuple(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Pat::TupleStruct(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Pat::Type(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Pat::Verbatim(v0) => {
++ state.write_u8(14u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ Pat::Wild(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatBox {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatIdent {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.by_ref.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.subpat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatLit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatOr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.leading_vert.hash(state);
++ self.cases.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatPath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatRange {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lo.hash(state);
++ self.limits.hash(state);
++ self.hi.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mutability.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatRest {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatSlice {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.fields.hash(state);
++ self.dot2_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatTupleStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatWild {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Path {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.leading_colon.hash(state);
++ self.segments.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PathArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ PathArguments::None => {
++ state.write_u8(0u8);
++ }
++ PathArguments::AngleBracketed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ PathArguments::Parenthesized(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PathSegment {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.arguments.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateEq {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lhs_ty.hash(state);
++ self.rhs_ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateLifetime {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetime.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ self.bounded_ty.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for QSelf {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ty.hash(state);
++ self.position.hash(state);
++ self.as_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for RangeLimits {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ RangeLimits::HalfOpen(_) => {
++ state.write_u8(0u8);
++ }
++ RangeLimits::Closed(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Receiver {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.reference.hash(state);
++ self.mutability.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ReturnType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ReturnType::Default => {
++ state.write_u8(0u8);
++ }
++ ReturnType::Type(_, v1) => {
++ state.write_u8(1u8);
++ v1.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Signature {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.constness.hash(state);
++ self.asyncness.hash(state);
++ self.unsafety.hash(state);
++ self.abi.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.inputs.hash(state);
++ self.variadic.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Stmt {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Stmt::Local(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Stmt::Item(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Stmt::Expr(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Stmt::Semi(v0, _) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TraitBound {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.paren_token.hash(state);
++ self.modifier.hash(state);
++ self.lifetimes.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TraitBoundModifier {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TraitBoundModifier::None => {
++ state.write_u8(0u8);
++ }
++ TraitBoundModifier::Maybe(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TraitItem::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ TraitItem::Method(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ TraitItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ TraitItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ TraitItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemMethod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.sig.hash(state);
++ self.default.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Type {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Type::Array(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Type::BareFn(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Type::Group(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Type::ImplTrait(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Type::Infer(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Type::Macro(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Type::Never(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Type::Paren(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Type::Path(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Type::Ptr(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Type::Reference(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Type::Slice(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Type::TraitObject(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Type::Tuple(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Type::Verbatim(v0) => {
++ state.write_u8(14u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeArray {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ self.len.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeBareFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ self.unsafety.hash(state);
++ self.abi.hash(state);
++ self.inputs.hash(state);
++ self.variadic.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeImplTrait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeInfer {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.mac.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeNever {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ self.eq_token.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParamBound {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TypeParamBound::Trait(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ TypeParamBound::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParen {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypePath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypePtr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.const_token.hash(state);
++ self.mutability.hash(state);
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetime.hash(state);
++ self.mutability.hash(state);
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeSlice {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeTraitObject {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.dyn_token.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elems.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for UnOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ UnOp::Deref(_) => {
++ state.write_u8(0u8);
++ }
++ UnOp::Not(_) => {
++ state.write_u8(1u8);
++ }
++ UnOp::Neg(_) => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseGlob {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseName {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UsePath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.tree.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseRename {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.rename.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseTree {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ UseTree::Path(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ UseTree::Name(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ UseTree::Rename(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ UseTree::Glob(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ UseTree::Group(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Variadic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Variant {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.fields.hash(state);
++ self.discriminant.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisCrate {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisPublic {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisRestricted {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.in_token.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Visibility {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Visibility::Public(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Visibility::Crate(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Visibility::Restricted(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Visibility::Inherited => {
++ state.write_u8(3u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for WhereClause {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.predicates.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for WherePredicate {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ WherePredicate::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ WherePredicate::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ WherePredicate::Eq(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/visit.rs b/third_party/rust/syn/src/gen/visit.rs
+index b667f530c3..24d34b7480 100644
+--- mozilla-release/third_party/rust/syn/src/gen/visit.rs
++++ mozilla-release/third_party/rust/syn/src/gen/visit.rs
+@@ -20,7 +20,6 @@ macro_rules! full {
+ unreachable!()
+ };
+ }
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! skip {
+ ($($tt:tt)*) => {};
+ }
+@@ -30,7 +29,7 @@ macro_rules! skip {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"visit"` feature.*
++/// *This trait is available only if Syn is built with the `"visit"` feature.*
+ pub trait Visit<'ast> {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_abi(&mut self, i: &'ast Abi) {
+@@ -434,35 +433,27 @@ pub trait Visit<'ast> {
+ fn visit_lifetime_def(&mut self, i: &'ast LifetimeDef) {
+ visit_lifetime_def(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit(&mut self, i: &'ast Lit) {
+ visit_lit(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_bool(&mut self, i: &'ast LitBool) {
+ visit_lit_bool(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte(&mut self, i: &'ast LitByte) {
+ visit_lit_byte(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_str(&mut self, i: &'ast LitByteStr) {
+ visit_lit_byte_str(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_char(&mut self, i: &'ast LitChar) {
+ visit_lit_char(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_float(&mut self, i: &'ast LitFloat) {
+ visit_lit_float(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_int(&mut self, i: &'ast LitInt) {
+ visit_lit_int(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_str(&mut self, i: &'ast LitStr) {
+ visit_lit_str(self, i)
+ }
+@@ -2537,7 +2528,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit<'ast, V>(v: &mut V, node: &'ast Lit)
+ where
+ V: Visit<'ast> + ?Sized,
+@@ -2569,7 +2559,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_bool<'ast, V>(v: &mut V, node: &'ast LitBool)
+ where
+ V: Visit<'ast> + ?Sized,
+@@ -2577,37 +2566,31 @@ where
+ skip!(node.value);
+ v.visit_span(&node.span);
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte<'ast, V>(v: &mut V, node: &'ast LitByte)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_str<'ast, V>(v: &mut V, node: &'ast LitByteStr)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_char<'ast, V>(v: &mut V, node: &'ast LitChar)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_float<'ast, V>(v: &mut V, node: &'ast LitFloat)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_int<'ast, V>(v: &mut V, node: &'ast LitInt)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_str<'ast, V>(v: &mut V, node: &'ast LitStr)
+ where
+ V: Visit<'ast> + ?Sized,
+diff --git a/third_party/rust/syn/src/gen/visit_mut.rs b/third_party/rust/syn/src/gen/visit_mut.rs
+index 5cddb827c6..5ce11f0b2e 100644
+--- mozilla-release/third_party/rust/syn/src/gen/visit_mut.rs
++++ mozilla-release/third_party/rust/syn/src/gen/visit_mut.rs
+@@ -20,7 +20,6 @@ macro_rules! full {
+ unreachable!()
+ };
+ }
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! skip {
+ ($($tt:tt)*) => {};
+ }
+@@ -31,7 +30,7 @@ macro_rules! skip {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"visit-mut"` feature.*
++/// *This trait is available only if Syn is built with the `"visit-mut"` feature.*
+ pub trait VisitMut {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_abi_mut(&mut self, i: &mut Abi) {
+@@ -438,35 +437,27 @@ pub trait VisitMut {
+ fn visit_lifetime_def_mut(&mut self, i: &mut LifetimeDef) {
+ visit_lifetime_def_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_mut(&mut self, i: &mut Lit) {
+ visit_lit_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_bool_mut(&mut self, i: &mut LitBool) {
+ visit_lit_bool_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_mut(&mut self, i: &mut LitByte) {
+ visit_lit_byte_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_str_mut(&mut self, i: &mut LitByteStr) {
+ visit_lit_byte_str_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_char_mut(&mut self, i: &mut LitChar) {
+ visit_lit_char_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_float_mut(&mut self, i: &mut LitFloat) {
+ visit_lit_float_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_int_mut(&mut self, i: &mut LitInt) {
+ visit_lit_int_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_str_mut(&mut self, i: &mut LitStr) {
+ visit_lit_str_mut(self, i)
+ }
+@@ -2543,7 +2534,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_mut<V>(v: &mut V, node: &mut Lit)
+ where
+ V: VisitMut + ?Sized,
+@@ -2575,7 +2565,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_bool_mut<V>(v: &mut V, node: &mut LitBool)
+ where
+ V: VisitMut + ?Sized,
+@@ -2583,37 +2572,31 @@ where
+ skip!(node.value);
+ v.visit_span_mut(&mut node.span);
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_mut<V>(v: &mut V, node: &mut LitByte)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_str_mut<V>(v: &mut V, node: &mut LitByteStr)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_char_mut<V>(v: &mut V, node: &mut LitChar)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_float_mut<V>(v: &mut V, node: &mut LitFloat)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_int_mut<V>(v: &mut V, node: &mut LitInt)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_str_mut<V>(v: &mut V, node: &mut LitStr)
+ where
+ V: VisitMut + ?Sized,
+diff --git a/third_party/rust/syn/src/generics.rs b/third_party/rust/syn/src/generics.rs
+index 95ab2e404a..05e8ef5cdf 100644
+--- mozilla-release/third_party/rust/syn/src/generics.rs
++++ mozilla-release/third_party/rust/syn/src/generics.rs
+@@ -1,13 +1,16 @@
+ use super::*;
+ use crate::punctuated::{Iter, IterMut, Punctuated};
++#[cfg(all(feature = "printing", feature = "extra-traits"))]
++use std::fmt::{self, Debug};
++#[cfg(all(feature = "printing", feature = "extra-traits"))]
++use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// Lifetimes and type parameters attached to a declaration of a function,
+ /// enum, trait, etc.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[derive(Default)]
+ pub struct Generics {
+ pub lt_token: Option<Token![<]>,
+ pub params: Punctuated<GenericParam, Token![,]>,
+@@ -20,7 +23,7 @@ ast_enum_of_structs! {
+ /// A generic type parameter, lifetime, or const generic: `T: Into<String>`,
+ /// `'a: 'b`, `const LEN: usize`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -28,9 +31,6 @@ ast_enum_of_structs! {
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+- //
+- // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+- // blocked on https://github.com/rust-lang/rust/issues/62833
+ pub enum GenericParam {
+ /// A generic type parameter: `T: Into<String>`.
+ Type(TypeParam),
+@@ -46,7 +46,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A generic type parameter: `T: Into<String>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeParam {
+ pub attrs: Vec<Attribute>,
+@@ -61,7 +61,7 @@ ast_struct! {
+ ast_struct! {
+ /// A lifetime definition: `'a: 'b + 'c + 'd`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct LifetimeDef {
+ pub attrs: Vec<Attribute>,
+@@ -74,7 +74,7 @@ ast_struct! {
+ ast_struct! {
+ /// A const generic parameter: `const LENGTH: usize`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ConstParam {
+ pub attrs: Vec<Attribute>,
+@@ -87,6 +87,17 @@ ast_struct! {
+ }
+ }
+
++impl Default for Generics {
++ fn default() -> Self {
++ Generics {
++ lt_token: None,
++ params: Punctuated::new(),
++ gt_token: None,
++ where_clause: None,
++ }
++ }
++}
++
+ impl Generics {
+ /// Returns an
+ /// <code
+@@ -280,29 +291,23 @@ impl<'a> Iterator for ConstParamsMut<'a> {
+
+ /// Returned by `Generics::split_for_impl`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct ImplGenerics<'a>(&'a Generics);
+
+ /// Returned by `Generics::split_for_impl`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct TypeGenerics<'a>(&'a Generics);
+
+ /// Returned by `TypeGenerics::as_turbofish`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct Turbofish<'a>(&'a Generics);
+
+ #[cfg(feature = "printing")]
+@@ -314,9 +319,8 @@ impl Generics {
+ /// # use proc_macro2::{Span, Ident};
+ /// # use quote::quote;
+ /// #
+- /// # fn main() {
+- /// # let generics: syn::Generics = Default::default();
+- /// # let name = Ident::new("MyType", Span::call_site());
++ /// # let generics: syn::Generics = Default::default();
++ /// # let name = Ident::new("MyType", Span::call_site());
+ /// #
+ /// let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
+ /// quote! {
+@@ -324,11 +328,10 @@ impl Generics {
+ /// // ...
+ /// }
+ /// }
+- /// # ;
+- /// # }
++ /// # ;
+ /// ```
+ ///
+- /// *This method is available if Syn is built with the `"derive"` or
++ /// *This method is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature and the `"printing"` feature.*
+ pub fn split_for_impl(&self) -> (ImplGenerics, TypeGenerics, Option<&WhereClause>) {
+ (
+@@ -339,11 +342,57 @@ impl Generics {
+ }
+ }
+
++#[cfg(feature = "printing")]
++macro_rules! generics_wrapper_impls {
++ ($ty:ident) => {
++ #[cfg(feature = "clone-impls")]
++ impl<'a> Clone for $ty<'a> {
++ fn clone(&self) -> Self {
++ $ty(self.0)
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Debug for $ty<'a> {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ formatter
++ .debug_tuple(stringify!($ty))
++ .field(self.0)
++ .finish()
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Eq for $ty<'a> {}
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> PartialEq for $ty<'a> {
++ fn eq(&self, other: &Self) -> bool {
++ self.0 == other.0
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Hash for $ty<'a> {
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ self.0.hash(state);
++ }
++ }
++ };
++}
++
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(ImplGenerics);
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(TypeGenerics);
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(Turbofish);
++
+ #[cfg(feature = "printing")]
+ impl<'a> TypeGenerics<'a> {
+ /// Turn a type's generics like `<X, Y>` into a turbofish like `::<X, Y>`.
+ ///
+- /// *This method is available if Syn is built with the `"derive"` or
++ /// *This method is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature and the `"printing"` feature.*
+ pub fn as_turbofish(&self) -> Turbofish {
+ Turbofish(self.0)
+@@ -353,9 +402,8 @@ impl<'a> TypeGenerics<'a> {
+ ast_struct! {
+ /// A set of bound lifetimes: `for<'a, 'b, 'c>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[derive(Default)]
+ pub struct BoundLifetimes {
+ pub for_token: Token![for],
+ pub lt_token: Token![<],
+@@ -364,6 +412,17 @@ ast_struct! {
+ }
+ }
+
++impl Default for BoundLifetimes {
++ fn default() -> Self {
++ BoundLifetimes {
++ for_token: Default::default(),
++ lt_token: Default::default(),
++ lifetimes: Punctuated::new(),
++ gt_token: Default::default(),
++ }
++ }
++}
++
+ impl LifetimeDef {
+ pub fn new(lifetime: Lifetime) -> Self {
+ LifetimeDef {
+@@ -391,7 +450,7 @@ impl From<Ident> for TypeParam {
+ ast_enum_of_structs! {
+ /// A trait or lifetime used as a bound on a type parameter.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum TypeParamBound {
+ Trait(TraitBound),
+@@ -402,7 +461,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A trait used as a bound on a type parameter.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct TraitBound {
+ pub paren_token: Option<token::Paren>,
+@@ -418,9 +477,8 @@ ast_enum! {
+ /// A modifier on a trait bound, currently only used for the `?` in
+ /// `?Sized`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum TraitBoundModifier {
+ None,
+ Maybe(Token![?]),
+@@ -431,7 +489,7 @@ ast_struct! {
+ /// A `where` clause in a definition: `where T: Deserialize<'de>, D:
+ /// 'static`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct WhereClause {
+ pub where_token: Token![where],
+@@ -442,7 +500,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// A single predicate in a `where` clause: `T: Deserialize<'de>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -450,9 +508,6 @@ ast_enum_of_structs! {
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+- //
+- // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+- // blocked on https://github.com/rust-lang/rust/issues/62833
+ pub enum WherePredicate {
+ /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
+ Type(PredicateType),
+@@ -468,7 +523,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateType {
+ /// Any lifetimes from a `for` binding
+@@ -484,7 +539,7 @@ ast_struct! {
+ ast_struct! {
+ /// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateLifetime {
+ pub lifetime: Lifetime,
+@@ -496,7 +551,7 @@ ast_struct! {
+ ast_struct! {
+ /// An equality predicate in a `where` clause (unsupported).
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateEq {
+ pub lhs_ty: Type,
+@@ -521,7 +576,6 @@ pub mod parsing {
+
+ let mut params = Punctuated::new();
+ let mut allow_lifetime_param = true;
+- let mut allow_type_param = true;
+ loop {
+ if input.peek(Token![>]) {
+ break;
+@@ -534,7 +588,7 @@ pub mod parsing {
+ attrs,
+ ..input.parse()?
+ }));
+- } else if allow_type_param && lookahead.peek(Ident) {
++ } else if lookahead.peek(Ident) {
+ allow_lifetime_param = false;
+ params.push_value(GenericParam::Type(TypeParam {
+ attrs,
+@@ -542,7 +596,6 @@ pub mod parsing {
+ }));
+ } else if lookahead.peek(Token![const]) {
+ allow_lifetime_param = false;
+- allow_type_param = false;
+ params.push_value(GenericParam::Const(ConstParam {
+ attrs,
+ ..input.parse()?
+@@ -665,57 +718,53 @@ pub mod parsing {
+
+ impl Parse for TypeParam {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let has_colon;
+- let has_default;
+- Ok(TypeParam {
+- attrs: input.call(Attribute::parse_outer)?,
+- ident: input.parse()?,
+- colon_token: {
+- if input.peek(Token![:]) {
+- has_colon = true;
+- Some(input.parse()?)
+- } else {
+- has_colon = false;
+- None
+- }
+- },
+- bounds: {
+- let mut bounds = Punctuated::new();
+- if has_colon {
+- loop {
+- if input.peek(Token![,])
+- || input.peek(Token![>])
+- || input.peek(Token![=])
+- {
+- break;
+- }
+- let value = input.parse()?;
+- bounds.push_value(value);
+- if !input.peek(Token![+]) {
+- break;
+- }
+- let punct = input.parse()?;
+- bounds.push_punct(punct);
+- }
++ let attrs = input.call(Attribute::parse_outer)?;
++ let ident: Ident = input.parse()?;
++ let colon_token: Option<Token![:]> = input.parse()?;
++
++ let begin_bound = input.fork();
++ let mut is_maybe_const = false;
++ let mut bounds = Punctuated::new();
++ if colon_token.is_some() {
++ loop {
++ if input.peek(Token![,]) || input.peek(Token![>]) || input.peek(Token![=]) {
++ break;
+ }
+- bounds
+- },
+- eq_token: {
+- if input.peek(Token![=]) {
+- has_default = true;
+- Some(input.parse()?)
+- } else {
+- has_default = false;
+- None
++ if input.peek(Token![?]) && input.peek2(Token![const]) {
++ input.parse::<Token![?]>()?;
++ input.parse::<Token![const]>()?;
++ is_maybe_const = true;
+ }
+- },
+- default: {
+- if has_default {
+- Some(input.parse()?)
+- } else {
+- None
++ let value: TypeParamBound = input.parse()?;
++ bounds.push_value(value);
++ if !input.peek(Token![+]) {
++ break;
+ }
+- },
++ let punct: Token![+] = input.parse()?;
++ bounds.push_punct(punct);
++ }
++ }
++
++ let mut eq_token: Option<Token![=]> = input.parse()?;
++ let mut default = if eq_token.is_some() {
++ Some(input.parse::<Type>()?)
++ } else {
++ None
++ };
++
++ if is_maybe_const {
++ bounds.clear();
++ eq_token = None;
++ default = Some(Type::Verbatim(verbatim::between(begin_bound, input)));
++ }
++
++ Ok(TypeParam {
++ attrs,
++ ident,
++ colon_token,
++ bounds,
++ eq_token,
++ default,
+ })
+ }
+ }
+@@ -898,6 +947,8 @@ mod printing {
+ use super::*;
+
+ use proc_macro2::TokenStream;
++ #[cfg(feature = "full")]
++ use proc_macro2::TokenTree;
+ use quote::{ToTokens, TokenStreamExt};
+
+ use crate::attr::FilterAttrs;
+@@ -1080,9 +1131,25 @@ mod printing {
+ TokensOrDefault(&self.colon_token).to_tokens(tokens);
+ self.bounds.to_tokens(tokens);
+ }
+- if self.default.is_some() {
++ if let Some(default) = &self.default {
++ #[cfg(feature = "full")]
++ {
++ if self.eq_token.is_none() {
++ if let Type::Verbatim(default) = default {
++ let mut iter = default.clone().into_iter();
++ match (iter.next(), iter.next()) {
++ (Some(TokenTree::Punct(ref q)), Some(TokenTree::Ident(ref c)))
++ if q.as_char() == '?' && c == "const" =>
++ {
++ return default.to_tokens(tokens);
++ }
++ _ => {}
++ }
++ }
++ }
++ }
+ TokensOrDefault(&self.eq_token).to_tokens(tokens);
+- self.default.to_tokens(tokens);
++ default.to_tokens(tokens);
+ }
+ }
+ }
+@@ -1117,9 +1184,9 @@ mod printing {
+ self.ident.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+- if self.default.is_some() {
++ if let Some(default) = &self.default {
+ TokensOrDefault(&self.eq_token).to_tokens(tokens);
+- self.default.to_tokens(tokens);
++ default.to_tokens(tokens);
+ }
+ }
+ }
+diff --git a/third_party/rust/syn/src/item.rs b/third_party/rust/syn/src/item.rs
+index ff4485ace9..0d8f7d3ddc 100644
+--- mozilla-release/third_party/rust/syn/src/item.rs
++++ mozilla-release/third_party/rust/syn/src/item.rs
+@@ -1,17 +1,15 @@
+ use super::*;
+-use crate::derive::{Data, DeriveInput};
++use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
+ use crate::punctuated::Punctuated;
+ use proc_macro2::TokenStream;
+
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
++#[cfg(feature = "parsing")]
++use std::mem;
+
+ ast_enum_of_structs! {
+ /// Things that can appear directly inside of a module or scope.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -21,7 +19,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Item #manual_extra_traits {
++ pub enum Item {
+ /// A constant item: `const MAX: u16 = 65535`.
+ Const(ItemConst),
+
+@@ -83,7 +81,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A constant item: `const MAX: u16 = 65535`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemConst {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -100,7 +98,7 @@ ast_struct! {
+ ast_struct! {
+ /// An enum definition: `enum Foo<A, B> { A(A), B(B) }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemEnum {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -115,7 +113,7 @@ ast_struct! {
+ ast_struct! {
+ /// An `extern crate` item: `extern crate serde`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemExternCrate {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -131,7 +129,7 @@ ast_struct! {
+ /// A free-standing function: `fn process(n: usize) -> Result<()> { ...
+ /// }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemFn {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -143,7 +141,7 @@ ast_struct! {
+ ast_struct! {
+ /// A block of foreign items: `extern "C" { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemForeignMod {
+ pub attrs: Vec<Attribute>,
+ pub abi: Abi,
+@@ -156,7 +154,7 @@ ast_struct! {
+ /// An impl block providing trait or associated items: `impl<A> Trait
+ /// for Data<A> { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemImpl {
+ pub attrs: Vec<Attribute>,
+ pub defaultness: Option<Token![default]>,
+@@ -175,7 +173,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation, which includes `macro_rules!` definitions.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemMacro {
+ pub attrs: Vec<Attribute>,
+ /// The `example` in `macro_rules! example { ... }`.
+@@ -188,8 +186,8 @@ ast_struct! {
+ ast_struct! {
+ /// A 2.0-style declarative macro introduced by the `macro` keyword.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
+- pub struct ItemMacro2 #manual_extra_traits {
++ /// *This type is available only if Syn is built with the `"full"` feature.*
++ pub struct ItemMacro2 {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub macro_token: Token![macro],
+@@ -201,7 +199,7 @@ ast_struct! {
+ ast_struct! {
+ /// A module or module declaration: `mod m` or `mod m { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemMod {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -215,7 +213,7 @@ ast_struct! {
+ ast_struct! {
+ /// A static item: `static BIKE: Shed = Shed(42)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemStatic {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -233,7 +231,7 @@ ast_struct! {
+ ast_struct! {
+ /// A struct definition: `struct Foo<A> { x: A }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemStruct {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -248,7 +246,7 @@ ast_struct! {
+ ast_struct! {
+ /// A trait definition: `pub trait Iterator { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemTrait {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -267,7 +265,7 @@ ast_struct! {
+ ast_struct! {
+ /// A trait alias: `pub trait SharableIterator = Iterator + Sync`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemTraitAlias {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -283,7 +281,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type alias: `type Result<T> = std::result::Result<T, MyError>`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -299,7 +297,7 @@ ast_struct! {
+ ast_struct! {
+ /// A union definition: `union Foo<A, B> { x: A, y: B }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemUnion {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -313,7 +311,7 @@ ast_struct! {
+ ast_struct! {
+ /// A use declaration: `use std::collections::HashMap`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemUse {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -324,145 +322,32 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Item {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Item {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Item::Const(this), Item::Const(other)) => this == other,
+- (Item::Enum(this), Item::Enum(other)) => this == other,
+- (Item::ExternCrate(this), Item::ExternCrate(other)) => this == other,
+- (Item::Fn(this), Item::Fn(other)) => this == other,
+- (Item::ForeignMod(this), Item::ForeignMod(other)) => this == other,
+- (Item::Impl(this), Item::Impl(other)) => this == other,
+- (Item::Macro(this), Item::Macro(other)) => this == other,
+- (Item::Macro2(this), Item::Macro2(other)) => this == other,
+- (Item::Mod(this), Item::Mod(other)) => this == other,
+- (Item::Static(this), Item::Static(other)) => this == other,
+- (Item::Struct(this), Item::Struct(other)) => this == other,
+- (Item::Trait(this), Item::Trait(other)) => this == other,
+- (Item::TraitAlias(this), Item::TraitAlias(other)) => this == other,
+- (Item::Type(this), Item::Type(other)) => this == other,
+- (Item::Union(this), Item::Union(other)) => this == other,
+- (Item::Use(this), Item::Use(other)) => this == other,
+- (Item::Verbatim(this), Item::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Item {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
++impl Item {
++ #[cfg(feature = "parsing")]
++ pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
+ match self {
+- Item::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- Item::Enum(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- Item::ExternCrate(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- Item::Fn(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- Item::ForeignMod(item) => {
+- state.write_u8(4);
+- item.hash(state);
+- }
+- Item::Impl(item) => {
+- state.write_u8(5);
+- item.hash(state);
+- }
+- Item::Macro(item) => {
+- state.write_u8(6);
+- item.hash(state);
+- }
+- Item::Macro2(item) => {
+- state.write_u8(7);
+- item.hash(state);
+- }
+- Item::Mod(item) => {
+- state.write_u8(8);
+- item.hash(state);
+- }
+- Item::Static(item) => {
+- state.write_u8(9);
+- item.hash(state);
+- }
+- Item::Struct(item) => {
+- state.write_u8(10);
+- item.hash(state);
+- }
+- Item::Trait(item) => {
+- state.write_u8(11);
+- item.hash(state);
+- }
+- Item::TraitAlias(item) => {
+- state.write_u8(12);
+- item.hash(state);
+- }
+- Item::Type(item) => {
+- state.write_u8(13);
+- item.hash(state);
+- }
+- Item::Union(item) => {
+- state.write_u8(14);
+- item.hash(state);
+- }
+- Item::Use(item) => {
+- state.write_u8(15);
+- item.hash(state);
+- }
+- Item::Verbatim(item) => {
+- state.write_u8(16);
+- TokenStreamHelper(item).hash(state);
+- }
++ Item::ExternCrate(ItemExternCrate { attrs, .. })
++ | Item::Use(ItemUse { attrs, .. })
++ | Item::Static(ItemStatic { attrs, .. })
++ | Item::Const(ItemConst { attrs, .. })
++ | Item::Fn(ItemFn { attrs, .. })
++ | Item::Mod(ItemMod { attrs, .. })
++ | Item::ForeignMod(ItemForeignMod { attrs, .. })
++ | Item::Type(ItemType { attrs, .. })
++ | Item::Struct(ItemStruct { attrs, .. })
++ | Item::Enum(ItemEnum { attrs, .. })
++ | Item::Union(ItemUnion { attrs, .. })
++ | Item::Trait(ItemTrait { attrs, .. })
++ | Item::TraitAlias(ItemTraitAlias { attrs, .. })
++ | Item::Impl(ItemImpl { attrs, .. })
++ | Item::Macro(ItemMacro { attrs, .. })
++ | Item::Macro2(ItemMacro2 { attrs, .. }) => mem::replace(attrs, new),
++ Item::Verbatim(_) => Vec::new(),
+ Item::__Nonexhaustive => unreachable!(),
+ }
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ItemMacro2 {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ItemMacro2 {
+- fn eq(&self, other: &Self) -> bool {
+- self.attrs == other.attrs
+- && self.vis == other.vis
+- && self.macro_token == other.macro_token
+- && self.ident == other.ident
+- && TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ItemMacro2 {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.attrs.hash(state);
+- self.vis.hash(state);
+- self.macro_token.hash(state);
+- self.ident.hash(state);
+- TokenStreamHelper(&self.rules).hash(state);
+- }
+-}
+-
+ impl From<DeriveInput> for Item {
+ fn from(input: DeriveInput) -> Item {
+ match input.data {
+@@ -496,10 +381,57 @@ impl From<DeriveInput> for Item {
+ }
+ }
+
++impl From<ItemStruct> for DeriveInput {
++ fn from(input: ItemStruct) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Struct(DataStruct {
++ struct_token: input.struct_token,
++ fields: input.fields,
++ semi_token: input.semi_token,
++ }),
++ }
++ }
++}
++
++impl From<ItemEnum> for DeriveInput {
++ fn from(input: ItemEnum) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Enum(DataEnum {
++ enum_token: input.enum_token,
++ brace_token: input.brace_token,
++ variants: input.variants,
++ }),
++ }
++ }
++}
++
++impl From<ItemUnion> for DeriveInput {
++ fn from(input: ItemUnion) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Union(DataUnion {
++ union_token: input.union_token,
++ fields: input.fields,
++ }),
++ }
++ }
++}
++
+ ast_enum_of_structs! {
+ /// A suffix of an import tree in a `use` item: `Type as Renamed` or `*`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -530,7 +462,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A path prefix of imports in a `use` item: `std::...`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UsePath {
+ pub ident: Ident,
+ pub colon2_token: Token![::],
+@@ -541,7 +473,7 @@ ast_struct! {
+ ast_struct! {
+ /// An identifier imported by a `use` item: `HashMap`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseName {
+ pub ident: Ident,
+ }
+@@ -550,7 +482,7 @@ ast_struct! {
+ ast_struct! {
+ /// An renamed identifier imported by a `use` item: `HashMap as Map`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseRename {
+ pub ident: Ident,
+ pub as_token: Token![as],
+@@ -561,7 +493,7 @@ ast_struct! {
+ ast_struct! {
+ /// A glob import in a `use` item: `*`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseGlob {
+ pub star_token: Token![*],
+ }
+@@ -570,7 +502,7 @@ ast_struct! {
+ ast_struct! {
+ /// A braced group of imports in a `use` item: `{A, B, C}`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseGroup {
+ pub brace_token: token::Brace,
+ pub items: Punctuated<UseTree, Token![,]>,
+@@ -580,7 +512,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// An item within an `extern` block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -590,7 +522,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum ForeignItem #manual_extra_traits {
++ pub enum ForeignItem {
+ /// A foreign function in an `extern` block.
+ Fn(ForeignItemFn),
+
+@@ -614,7 +546,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A foreign function in an `extern` block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemFn {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -626,7 +558,7 @@ ast_struct! {
+ ast_struct! {
+ /// A foreign static item in an `extern` block: `static ext: u8`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemStatic {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -642,7 +574,7 @@ ast_struct! {
+ ast_struct! {
+ /// A foreign type in an `extern` block: `type void`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -655,7 +587,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation within an extern block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -663,61 +595,10 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ForeignItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ForeignItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (ForeignItem::Fn(this), ForeignItem::Fn(other)) => this == other,
+- (ForeignItem::Static(this), ForeignItem::Static(other)) => this == other,
+- (ForeignItem::Type(this), ForeignItem::Type(other)) => this == other,
+- (ForeignItem::Macro(this), ForeignItem::Macro(other)) => this == other,
+- (ForeignItem::Verbatim(this), ForeignItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ForeignItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- ForeignItem::Fn(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- ForeignItem::Static(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- ForeignItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- ForeignItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- ForeignItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- ForeignItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_enum_of_structs! {
+ /// An item declaration within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -727,7 +608,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum TraitItem #manual_extra_traits {
++ pub enum TraitItem {
+ /// An associated constant within the definition of a trait.
+ Const(TraitItemConst),
+
+@@ -751,7 +632,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// An associated constant within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemConst {
+ pub attrs: Vec<Attribute>,
+ pub const_token: Token![const],
+@@ -766,7 +647,7 @@ ast_struct! {
+ ast_struct! {
+ /// A trait method within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemMethod {
+ pub attrs: Vec<Attribute>,
+ pub sig: Signature,
+@@ -778,7 +659,7 @@ ast_struct! {
+ ast_struct! {
+ /// An associated type within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemType {
+ pub attrs: Vec<Attribute>,
+ pub type_token: Token![type],
+@@ -794,7 +675,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -802,61 +683,10 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for TraitItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for TraitItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (TraitItem::Const(this), TraitItem::Const(other)) => this == other,
+- (TraitItem::Method(this), TraitItem::Method(other)) => this == other,
+- (TraitItem::Type(this), TraitItem::Type(other)) => this == other,
+- (TraitItem::Macro(this), TraitItem::Macro(other)) => this == other,
+- (TraitItem::Verbatim(this), TraitItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for TraitItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- TraitItem::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- TraitItem::Method(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- TraitItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- TraitItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- TraitItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- TraitItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_enum_of_structs! {
+ /// An item within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -866,7 +696,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum ImplItem #manual_extra_traits {
++ pub enum ImplItem {
+ /// An associated constant within an impl block.
+ Const(ImplItemConst),
+
+@@ -890,7 +720,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// An associated constant within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemConst {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -908,7 +738,7 @@ ast_struct! {
+ ast_struct! {
+ /// A method within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemMethod {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -921,7 +751,7 @@ ast_struct! {
+ ast_struct! {
+ /// An associated type within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -938,7 +768,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -946,62 +776,11 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ImplItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ImplItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (ImplItem::Const(this), ImplItem::Const(other)) => this == other,
+- (ImplItem::Method(this), ImplItem::Method(other)) => this == other,
+- (ImplItem::Type(this), ImplItem::Type(other)) => this == other,
+- (ImplItem::Macro(this), ImplItem::Macro(other)) => this == other,
+- (ImplItem::Verbatim(this), ImplItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ImplItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- ImplItem::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- ImplItem::Method(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- ImplItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- ImplItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- ImplItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- ImplItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_struct! {
+ /// A function signature in a trait or implementation: `unsafe fn
+ /// initialize(&self)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Signature {
+ pub constness: Option<Token![const]>,
+ pub asyncness: Option<Token![async]>,
+@@ -1017,13 +796,34 @@ ast_struct! {
+ }
+ }
+
++impl Signature {
++ /// A method's `self` receiver, such as `&self` or `self: Box<Self>`.
++ pub fn receiver(&self) -> Option<&FnArg> {
++ let arg = self.inputs.first()?;
++ match arg {
++ FnArg::Receiver(_) => Some(arg),
++ FnArg::Typed(PatType { pat, .. }) => {
++ if let Pat::Ident(PatIdent { ident, .. }) = &**pat {
++ if ident == "self" {
++ return Some(arg);
++ }
++ }
++ None
++ }
++ }
++ }
++}
++
+ ast_enum_of_structs! {
+ /// An argument in a function signature: the `n: usize` in `fn f(n: usize)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum FnArg {
+ /// The `self` argument of an associated method, whether taken by value
+ /// or by reference.
++ ///
++ /// Note that `self` receivers with a specified type, such as `self:
++ /// Box<Self>`, are parsed as a `FnArg::Typed`.
+ Receiver(Receiver),
+
+ /// A function argument accepted by pattern and type.
+@@ -1035,7 +835,10 @@ ast_struct! {
+ /// The `self` argument of an associated method, whether taken by value
+ /// or by reference.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// Note that `self` receivers with a specified type, such as `self:
++ /// Box<Self>`, are parsed as a `FnArg::Typed`.
++ ///
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Receiver {
+ pub attrs: Vec<Attribute>,
+ pub reference: Option<(Token![&], Option<Lifetime>)>,
+@@ -1056,7 +859,8 @@ pub mod parsing {
+
+ use crate::ext::IdentExt;
+ use crate::parse::discouraged::Speculative;
+- use crate::parse::{Parse, ParseStream, Result};
++ use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
++ use crate::token::Brace;
+ use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenTree};
+ use std::iter::{self, FromIterator};
+
+@@ -1064,18 +868,26 @@ pub mod parsing {
+
+ impl Parse for Item {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![extern]) {
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ parse_rest_of_fn(input, Vec::new(), vis, sig).map(Item::Fn)
++ }
++ } else if lookahead.peek(Token![extern]) {
+ ahead.parse::<Token![extern]>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Token![crate]) {
+ input.parse().map(Item::ExternCrate)
+- } else if lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else if lookahead.peek(token::Brace) {
+ input.parse().map(Item::ForeignMod)
+ } else if lookahead.peek(LitStr) {
+@@ -1083,8 +895,6 @@ pub mod parsing {
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(token::Brace) {
+ input.parse().map(Item::ForeignMod)
+- } else if lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else {
+ Err(lookahead.error())
+ }
+@@ -1094,18 +904,61 @@ pub mod parsing {
+ } else if lookahead.peek(Token![use]) {
+ input.parse().map(Item::Use)
+ } else if lookahead.peek(Token![static]) {
+- input.parse().map(Item::Static)
++ let vis = input.parse()?;
++ let static_token = input.parse()?;
++ let mutability = input.parse()?;
++ let ident = input.parse()?;
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(Item::Static(ItemStatic {
++ attrs: Vec::new(),
++ vis,
++ static_token,
++ mutability,
++ ident,
++ colon_token,
++ ty,
++ eq_token: input.parse()?,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![const]) {
+ ahead.parse::<Token![const]>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+- input.parse().map(Item::Const)
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(Item::Fn)
++ let vis = input.parse()?;
++ let const_token = input.parse()?;
++ let ident = {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ };
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(Item::Const(ItemConst {
++ attrs: Vec::new(),
++ vis,
++ const_token,
++ ident,
++ colon_token,
++ ty,
++ eq_token: input.parse()?,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else {
+ Err(lookahead.error())
+ }
+@@ -1117,21 +970,19 @@ pub mod parsing {
+ {
+ input.parse().map(Item::Trait)
+ } else if lookahead.peek(Token![impl]) {
+- input.parse().map(Item::Impl)
+- } else if lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(Item::Fn)
++ let allow_const_impl = true;
++ if let Some(item) = parse_impl(input, allow_const_impl)? {
++ Ok(Item::Impl(item))
++ } else {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ }
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![async]) || lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else if lookahead.peek(Token![mod]) {
+ input.parse().map(Item::Mod)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(Item::Type)
++ parse_item_type(begin, input)
+ } else if lookahead.peek(existential) {
+ input.call(item_existential).map(Item::Verbatim)
+ } else if lookahead.peek(Token![struct]) {
+@@ -1147,14 +998,18 @@ pub mod parsing {
+ } else if lookahead.peek(Token![impl])
+ || lookahead.peek(Token![default]) && !ahead.peek2(Token![!])
+ {
+- input.parse().map(Item::Impl)
++ let allow_const_impl = true;
++ if let Some(item) = parse_impl(input, allow_const_impl)? {
++ Ok(Item::Impl(item))
++ } else {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ }
+ } else if lookahead.peek(Token![macro]) {
+ input.parse().map(Item::Macro2)
+ } else if vis.is_inherited()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+@@ -1163,32 +1018,64 @@ pub mod parsing {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- Item::ExternCrate(item) => &mut item.attrs,
+- Item::Use(item) => &mut item.attrs,
+- Item::Static(item) => &mut item.attrs,
+- Item::Const(item) => &mut item.attrs,
+- Item::Fn(item) => &mut item.attrs,
+- Item::Mod(item) => &mut item.attrs,
+- Item::ForeignMod(item) => &mut item.attrs,
+- Item::Type(item) => &mut item.attrs,
+- Item::Struct(item) => &mut item.attrs,
+- Item::Enum(item) => &mut item.attrs,
+- Item::Union(item) => &mut item.attrs,
+- Item::Trait(item) => &mut item.attrs,
+- Item::TraitAlias(item) => &mut item.attrs,
+- Item::Impl(item) => &mut item.attrs,
+- Item::Macro(item) => &mut item.attrs,
+- Item::Macro2(item) => &mut item.attrs,
+- Item::Verbatim(_) => return Ok(item),
+- Item::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
++ attrs.extend(item.replace_attrs(Vec::new()));
++ item.replace_attrs(attrs);
++ Ok(item)
++ }
++ }
++
++ struct FlexibleItemType {
++ vis: Visibility,
++ defaultness: Option<Token![default]>,
++ type_token: Token![type],
++ ident: Ident,
++ generics: Generics,
++ colon_token: Option<Token![:]>,
++ bounds: Punctuated<TypeParamBound, Token![+]>,
++ ty: Option<(Token![=], Type)>,
++ semi_token: Token![;],
++ }
++
++ impl Parse for FlexibleItemType {
++ fn parse(input: ParseStream) -> Result<Self> {
++ let vis: Visibility = input.parse()?;
++ let defaultness: Option<Token![default]> = input.parse()?;
++ let type_token: Token![type] = input.parse()?;
++ let ident: Ident = input.parse()?;
++ let mut generics: Generics = input.parse()?;
++ let colon_token: Option<Token![:]> = input.parse()?;
++ let mut bounds = Punctuated::new();
++ if colon_token.is_some() {
++ loop {
++ bounds.push_value(input.parse::<TypeParamBound>()?);
++ if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) {
++ break;
++ }
++ bounds.push_punct(input.parse::<Token![+]>()?);
++ if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) {
++ break;
++ }
++ }
+ }
++ generics.where_clause = input.parse()?;
++ let ty = if let Some(eq_token) = input.parse()? {
++ Some((eq_token, input.parse::<Type>()?))
++ } else {
++ None
++ };
++ let semi_token: Token![;] = input.parse()?;
+
+- Ok(item)
++ Ok(FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ ty,
++ semi_token,
++ })
+ }
+ }
+
+@@ -1310,7 +1197,6 @@ pub mod parsing {
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+ || lookahead.peek(Token![crate])
+- || lookahead.peek(Token![extern])
+ {
+ let ident = input.call(Ident::parse_any)?;
+ if input.peek(Token![::]) {
+@@ -1392,69 +1278,126 @@ pub mod parsing {
+ }
+ }
+
+- impl Parse for ItemFn {
+- fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+- let vis: Visibility = input.parse()?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
++ fn pop_variadic(args: &mut Punctuated<FnArg, Token![,]>) -> Option<Variadic> {
++ let trailing_punct = args.trailing_punct();
+
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+- let variadic = inputs.last().as_ref().and_then(get_variadic);
+-
+- fn get_variadic(input: &&FnArg) -> Option<Variadic> {
+- if let FnArg::Typed(PatType { ty, .. }) = input {
+- if let Type::Verbatim(tokens) = &**ty {
+- if let Ok(dots) = parse2(tokens.clone()) {
+- return Some(Variadic {
+- attrs: Vec::new(),
+- dots,
+- });
+- }
+- }
+- }
+- None
++ let last = match args.last_mut()? {
++ FnArg::Typed(last) => last,
++ _ => return None,
++ };
++
++ let ty = match last.ty.as_ref() {
++ Type::Verbatim(ty) => ty,
++ _ => return None,
++ };
++
++ let mut variadic = Variadic {
++ attrs: Vec::new(),
++ dots: parse2(ty.clone()).ok()?,
++ };
++
++ if let Pat::Verbatim(pat) = last.pat.as_ref() {
++ if pat.to_string() == "..." && !trailing_punct {
++ variadic.attrs = mem::replace(&mut last.attrs, Vec::new());
++ args.pop();
+ }
++ }
+
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ Some(variadic)
++ }
+
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
+- let stmts = content.call(Block::parse_within)?;
++ fn variadic_to_tokens(dots: &Token![...]) -> TokenStream {
++ TokenStream::from_iter(vec![
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Joint);
++ dot.set_span(dots.spans[0]);
++ dot
++ }),
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Joint);
++ dot.set_span(dots.spans[1]);
++ dot
++ }),
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Alone);
++ dot.set_span(dots.spans[2]);
++ dot
++ }),
++ ])
++ }
+
+- Ok(ItemFn {
+- attrs: private::attrs(outer_attrs, inner_attrs),
+- vis,
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
+- block: Box::new(Block { brace_token, stmts }),
+- })
++ fn peek_signature(input: ParseStream) -> bool {
++ let fork = input.fork();
++ fork.parse::<Option<Token![const]>>().is_ok()
++ && fork.parse::<Option<Token![async]>>().is_ok()
++ && fork.parse::<Option<Token![unsafe]>>().is_ok()
++ && fork.parse::<Option<Abi>>().is_ok()
++ && fork.peek(Token![fn])
++ }
++
++ fn parse_signature(input: ParseStream) -> Result<Signature> {
++ let constness: Option<Token![const]> = input.parse()?;
++ let asyncness: Option<Token![async]> = input.parse()?;
++ let unsafety: Option<Token![unsafe]> = input.parse()?;
++ let abi: Option<Abi> = input.parse()?;
++ let fn_token: Token![fn] = input.parse()?;
++ let ident: Ident = input.parse()?;
++ let generics: Generics = input.parse()?;
++
++ let content;
++ let paren_token = parenthesized!(content in input);
++ let mut inputs = parse_fn_args(&content)?;
++ let variadic = pop_variadic(&mut inputs);
++
++ let output: ReturnType = input.parse()?;
++ let where_clause: Option<WhereClause> = input.parse()?;
++
++ Ok(Signature {
++ constness,
++ asyncness,
++ unsafety,
++ abi,
++ fn_token,
++ ident,
++ paren_token,
++ inputs,
++ output,
++ variadic,
++ generics: Generics {
++ where_clause,
++ ..generics
++ },
++ })
++ }
++
++ impl Parse for ItemFn {
++ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ parse_rest_of_fn(input, outer_attrs, vis, sig)
+ }
+ }
+
++ fn parse_rest_of_fn(
++ input: ParseStream,
++ outer_attrs: Vec<Attribute>,
++ vis: Visibility,
++ sig: Signature,
++ ) -> Result<ItemFn> {
++ let content;
++ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
++ let stmts = content.call(Block::parse_within)?;
++
++ Ok(ItemFn {
++ attrs: private::attrs(outer_attrs, inner_attrs),
++ vis,
++ sig,
++ block: Box::new(Block { brace_token, stmts }),
++ })
++ }
++
+ impl Parse for FnArg {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -1491,26 +1434,79 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_fn_args(input: ParseStream) -> Result<Punctuated<FnArg, Token![,]>> {
++ let mut args = Punctuated::new();
++ let mut has_receiver = false;
++
++ while !input.is_empty() {
++ let attrs = input.call(Attribute::parse_outer)?;
++
++ let arg = if let Some(dots) = input.parse::<Option<Token![...]>>()? {
++ FnArg::Typed(PatType {
++ attrs,
++ pat: Box::new(Pat::Verbatim(variadic_to_tokens(&dots))),
++ colon_token: Token![:](dots.spans[0]),
++ ty: Box::new(Type::Verbatim(variadic_to_tokens(&dots))),
++ })
++ } else {
++ let mut arg: FnArg = input.parse()?;
++ match &mut arg {
++ FnArg::Receiver(receiver) if has_receiver => {
++ return Err(Error::new(
++ receiver.self_token.span,
++ "unexpected second method receiver",
++ ));
++ }
++ FnArg::Receiver(receiver) if !args.is_empty() => {
++ return Err(Error::new(
++ receiver.self_token.span,
++ "unexpected method receiver",
++ ));
++ }
++ FnArg::Receiver(receiver) => {
++ has_receiver = true;
++ receiver.attrs = attrs;
++ }
++ FnArg::Typed(arg) => arg.attrs = attrs,
++ }
++ arg
++ };
++ args.push_value(arg);
++
++ if input.is_empty() {
++ break;
++ }
++
++ let comma: Token![,] = input.parse()?;
++ args.push_punct(comma);
++ }
++
++ Ok(args)
++ }
++
+ fn fn_arg_typed(input: ParseStream) -> Result<PatType> {
++ // Hack to parse pre-2018 syntax in
++ // test/ui/rfc-2565-param-attrs/param-attrs-pretty.rs
++ // because the rest of the test case is valuable.
++ if input.peek(Ident) && input.peek2(Token![<]) {
++ let span = input.fork().parse::<Ident>()?.span();
++ return Ok(PatType {
++ attrs: Vec::new(),
++ pat: Box::new(Pat::Wild(PatWild {
++ attrs: Vec::new(),
++ underscore_token: Token![_](span),
++ })),
++ colon_token: Token![:](span),
++ ty: input.parse()?,
++ });
++ }
++
+ Ok(PatType {
+ attrs: Vec::new(),
+- pat: input.parse()?,
++ pat: Box::new(pat::parsing::multi_pat(input)?),
+ colon_token: input.parse()?,
+ ty: Box::new(match input.parse::<Option<Token![...]>>()? {
+- Some(dot3) => {
+- let args = vec![
+- TokenTree::Punct(Punct::new('.', Spacing::Joint)),
+- TokenTree::Punct(Punct::new('.', Spacing::Joint)),
+- TokenTree::Punct(Punct::new('.', Spacing::Alone)),
+- ];
+- let tokens = TokenStream::from_iter(args.into_iter().zip(&dot3.spans).map(
+- |(mut arg, span)| {
+- arg.set_span(*span);
+- arg
+- },
+- ));
+- Type::Verbatim(tokens)
+- }
++ Some(dot3) => Type::Verbatim(variadic_to_tokens(&dot3)),
+ None => input.parse()?,
+ }),
+ })
+@@ -1581,22 +1577,60 @@ pub mod parsing {
+
+ impl Parse for ForeignItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![fn]) {
+- input.parse().map(ForeignItem::Fn)
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ if input.peek(token::Brace) {
++ let content;
++ braced!(content in input);
++ content.call(Attribute::parse_inner)?;
++ content.call(Block::parse_within)?;
++
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Fn(ForeignItemFn {
++ attrs: Vec::new(),
++ vis,
++ sig,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![static]) {
+- input.parse().map(ForeignItem::Static)
++ let vis = input.parse()?;
++ let static_token = input.parse()?;
++ let mutability = input.parse()?;
++ let ident = input.parse()?;
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![=]) {
++ input.parse::<Token![=]>()?;
++ input.parse::<Expr>()?;
++ input.parse::<Token![;]>()?;
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Static(ForeignItemStatic {
++ attrs: Vec::new(),
++ vis,
++ static_token,
++ mutability,
++ ident,
++ colon_token,
++ ty,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(ForeignItem::Type)
++ parse_foreign_item_type(begin, input)
+ } else if vis.is_inherited()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+@@ -1605,17 +1639,16 @@ pub mod parsing {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- ForeignItem::Fn(item) => &mut item.attrs,
+- ForeignItem::Static(item) => &mut item.attrs,
+- ForeignItem::Type(item) => &mut item.attrs,
+- ForeignItem::Macro(item) => &mut item.attrs,
+- ForeignItem::Verbatim(_) | ForeignItem::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
+- }
++ let item_attrs = match &mut item {
++ ForeignItem::Fn(item) => &mut item.attrs,
++ ForeignItem::Static(item) => &mut item.attrs,
++ ForeignItem::Type(item) => &mut item.attrs,
++ ForeignItem::Macro(item) => &mut item.attrs,
++ ForeignItem::Verbatim(_) => return Ok(item),
++ ForeignItem::__Nonexhaustive => unreachable!(),
++ };
++ attrs.extend(item_attrs.drain(..));
++ *item_attrs = attrs;
+
+ Ok(item)
+ }
+@@ -1625,55 +1658,12 @@ pub mod parsing {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let mut inputs = Punctuated::new();
+- let mut variadic = None;
+- while !content.is_empty() {
+- let attrs = content.call(Attribute::parse_outer)?;
+-
+- if let Some(dots) = content.parse()? {
+- variadic = Some(Variadic { attrs, dots });
+- break;
+- }
+-
+- let mut arg = content.call(fn_arg_typed)?;
+- arg.attrs = attrs;
+- inputs.push_value(FnArg::Typed(arg));
+- if content.is_empty() {
+- break;
+- }
+-
+- inputs.push_punct(content.parse()?);
+- }
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let sig = parse_signature(input)?;
+ let semi_token: Token![;] = input.parse()?;
+-
+ Ok(ForeignItemFn {
+ attrs,
+ vis,
+- sig: Signature {
+- constness: None,
+- asyncness: None,
+- unsafety: None,
+- abi: None,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
++ sig,
+ semi_token,
+ })
+ }
+@@ -1706,6 +1696,37 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_foreign_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ForeignItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some()
++ || generics.lt_token.is_some()
++ || generics.where_clause.is_some()
++ || colon_token.is_some()
++ || ty.is_some()
++ {
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Type(ForeignItemType {
++ attrs: Vec::new(),
++ vis,
++ type_token,
++ ident,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for ForeignItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -1742,6 +1763,36 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_item_type(begin: ParseBuffer, input: ParseStream) -> Result<Item> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some() || colon_token.is_some() || ty.is_none() {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ let (eq_token, ty) = ty.unwrap();
++ Ok(Item::Type(ItemType {
++ attrs: Vec::new(),
++ vis,
++ type_token,
++ ident,
++ generics,
++ eq_token,
++ ty: Box::new(ty),
++ semi_token,
++ }))
++ }
++ }
++
+ #[cfg(not(feature = "printing"))]
+ fn item_existential(input: ParseStream) -> Result<TokenStream> {
+ Err(input.error("existential type is not supported"))
+@@ -1887,7 +1938,7 @@ pub mod parsing {
+
+ impl Parse for ItemTrait {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let attrs = input.call(Attribute::parse_outer)?;
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let unsafety: Option<Token![unsafe]> = input.parse()?;
+ let auto_token: Option<Token![auto]> = input.parse()?;
+@@ -1896,7 +1947,7 @@ pub mod parsing {
+ let generics: Generics = input.parse()?;
+ parse_rest_of_trait(
+ input,
+- attrs,
++ outer_attrs,
+ vis,
+ unsafety,
+ auto_token,
+@@ -1909,7 +1960,7 @@ pub mod parsing {
+
+ fn parse_rest_of_trait(
+ input: ParseStream,
+- attrs: Vec<Attribute>,
++ outer_attrs: Vec<Attribute>,
+ vis: Visibility,
+ unsafety: Option<Token![unsafe]>,
+ auto_token: Option<Token![auto]>,
+@@ -1937,13 +1988,14 @@ pub mod parsing {
+
+ let content;
+ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
+ let mut items = Vec::new();
+ while !content.is_empty() {
+ items.push(content.parse()?);
+ }
+
+ Ok(ItemTrait {
+- attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ vis,
+ unsafety,
+ auto_token,
+@@ -2014,14 +2066,19 @@ pub mod parsing {
+
+ impl Parse for TraitItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
++ let vis: Visibility = input.parse()?;
++ let defaultness: Option<Token![default]> = input.parse()?;
+ let ahead = input.fork();
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![const]) {
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ input.parse().map(TraitItem::Method)
++ } else if lookahead.peek(Token![const]) {
+ ahead.parse::<Token![const]>()?;
+ let lookahead = ahead.lookahead1();
+- if lookahead.peek(Ident) {
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+ input.parse().map(TraitItem::Const)
+ } else if lookahead.peek(Token![async])
+ || lookahead.peek(Token![unsafe])
+@@ -2032,18 +2089,11 @@ pub mod parsing {
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![async])
+- || lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(TraitItem::Method)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(TraitItem::Type)
++ parse_trait_item_type(begin.fork(), input)
+ } else if lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::])
+ {
+@@ -2052,18 +2102,20 @@ pub mod parsing {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- TraitItem::Const(item) => &mut item.attrs,
+- TraitItem::Method(item) => &mut item.attrs,
+- TraitItem::Type(item) => &mut item.attrs,
+- TraitItem::Macro(item) => &mut item.attrs,
+- TraitItem::Verbatim(_) | TraitItem::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
++ match (vis, defaultness) {
++ (Visibility::Inherited, None) => {}
++ _ => return Ok(TraitItem::Verbatim(verbatim::between(begin, input))),
+ }
+
++ let item_attrs = match &mut item {
++ TraitItem::Const(item) => &mut item.attrs,
++ TraitItem::Method(item) => &mut item.attrs,
++ TraitItem::Type(item) => &mut item.attrs,
++ TraitItem::Macro(item) => &mut item.attrs,
++ TraitItem::Verbatim(_) | TraitItem::__Nonexhaustive => unreachable!(),
++ };
++ attrs.extend(item_attrs.drain(..));
++ *item_attrs = attrs;
+ Ok(item)
+ }
+ }
+@@ -2073,7 +2125,14 @@ pub mod parsing {
+ Ok(TraitItemConst {
+ attrs: input.call(Attribute::parse_outer)?,
+ const_token: input.parse()?,
+- ident: input.parse()?,
++ ident: {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ },
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ default: {
+@@ -2093,20 +2152,7 @@ pub mod parsing {
+ impl Parse for TraitItemMethod {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let outer_attrs = input.call(Attribute::parse_outer)?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let sig = parse_signature(input)?;
+
+ let lookahead = input.lookahead1();
+ let (brace_token, inner_attrs, stmts, semi_token) = if lookahead.peek(token::Brace) {
+@@ -2124,22 +2170,7 @@ pub mod parsing {
+
+ Ok(TraitItemMethod {
+ attrs: private::attrs(outer_attrs, inner_attrs),
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic: None,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
++ sig,
+ default: brace_token.map(|brace_token| Block { brace_token, stmts }),
+ semi_token,
+ })
+@@ -2188,6 +2219,35 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_trait_item_type(begin: ParseBuffer, input: ParseStream) -> Result<TraitItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some() || vis.is_some() {
++ Ok(TraitItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(TraitItem::Type(TraitItemType {
++ attrs: Vec::new(),
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ default: ty,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for TraitItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -2207,52 +2267,67 @@ pub mod parsing {
+
+ impl Parse for ItemImpl {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+- let defaultness: Option<Token![default]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let impl_token: Token![impl] = input.parse()?;
+-
+- let has_generics = input.peek(Token![<])
+- && (input.peek2(Token![>])
+- || input.peek2(Token![#])
+- || (input.peek2(Ident) || input.peek2(Lifetime))
+- && (input.peek3(Token![:])
+- || input.peek3(Token![,])
+- || input.peek3(Token![>])));
+- let generics: Generics = if has_generics {
+- input.parse()?
+- } else {
+- Generics::default()
+- };
+-
+- let trait_ = {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- if ahead.parse::<Option<Token![!]>>().is_ok()
+- && ahead.parse::<Path>().is_ok()
+- && ahead.parse::<Token![for]>().is_ok()
+- {
+- let polarity: Option<Token![!]> = input.parse()?;
+- let path: Path = input.parse()?;
+- let for_token: Token![for] = input.parse()?;
+- Some((polarity, path, for_token))
+- } else {
+- None
+- }
+- };
+- let self_ty: Type = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let allow_const_impl = false;
++ parse_impl(input, allow_const_impl).map(Option::unwrap)
++ }
++ }
++
++ fn parse_impl(input: ParseStream, allow_const_impl: bool) -> Result<Option<ItemImpl>> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
++ let defaultness: Option<Token![default]> = input.parse()?;
++ let unsafety: Option<Token![unsafe]> = input.parse()?;
++ let impl_token: Token![impl] = input.parse()?;
++
++ let has_generics = input.peek(Token![<])
++ && (input.peek2(Token![>])
++ || input.peek2(Token![#])
++ || (input.peek2(Ident) || input.peek2(Lifetime))
++ && (input.peek3(Token![:])
++ || input.peek3(Token![,])
++ || input.peek3(Token![>]))
++ || input.peek2(Token![const]));
++ let generics: Generics = if has_generics {
++ input.parse()?
++ } else {
++ Generics::default()
++ };
+
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
++ let is_const_impl = allow_const_impl
++ && (input.peek(Token![const]) || input.peek(Token![?]) && input.peek2(Token![const]));
++ if is_const_impl {
++ input.parse::<Option<Token![?]>>()?;
++ input.parse::<Token![const]>()?;
++ }
+
+- let mut items = Vec::new();
+- while !content.is_empty() {
+- items.push(content.parse()?);
++ let trait_ = (|| -> Option<_> {
++ let ahead = input.fork();
++ let polarity: Option<Token![!]> = ahead.parse().ok()?;
++ let mut path: Path = ahead.parse().ok()?;
++ if path.segments.last().unwrap().arguments.is_empty() && ahead.peek(token::Paren) {
++ let parenthesized = PathArguments::Parenthesized(ahead.parse().ok()?);
++ path.segments.last_mut().unwrap().arguments = parenthesized;
+ }
++ let for_token: Token![for] = ahead.parse().ok()?;
++ input.advance_to(&ahead);
++ Some((polarity, path, for_token))
++ })();
++
++ let self_ty: Type = input.parse()?;
++ let where_clause: Option<WhereClause> = input.parse()?;
++
++ let content;
++ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
++
++ let mut items = Vec::new();
++ while !content.is_empty() {
++ items.push(content.parse()?);
++ }
+
+- Ok(ItemImpl {
++ if is_const_impl {
++ Ok(None)
++ } else {
++ Ok(Some(ItemImpl {
+ attrs: private::attrs(outer_attrs, inner_attrs),
+ defaultness,
+ unsafety,
+@@ -2265,12 +2340,13 @@ pub mod parsing {
+ self_ty: Box::new(self_ty),
+ brace_token,
+ items,
+- })
++ }))
+ }
+ }
+
+ impl Parse for ImplItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+@@ -2284,28 +2360,38 @@ pub mod parsing {
+ None
+ };
+
+- let mut item = if lookahead.peek(Token![const]) {
+- ahead.parse::<Token![const]>()?;
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ input.parse().map(ImplItem::Method)
++ } else if lookahead.peek(Token![const]) {
++ let const_token: Token![const] = ahead.parse()?;
+ let lookahead = ahead.lookahead1();
+- if lookahead.peek(Ident) {
+- input.parse().map(ImplItem::Const)
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(ImplItem::Method)
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.advance_to(&ahead);
++ let ident: Ident = input.call(Ident::parse_any)?;
++ let colon_token: Token![:] = input.parse()?;
++ let ty: Type = input.parse()?;
++ if let Some(eq_token) = input.parse()? {
++ return Ok(ImplItem::Const(ImplItemConst {
++ attrs,
++ vis,
++ defaultness,
++ const_token,
++ ident,
++ colon_token,
++ ty,
++ eq_token,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }));
++ } else {
++ input.parse::<Token![;]>()?;
++ return Ok(ImplItem::Verbatim(verbatim::between(begin, input)));
++ }
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(ImplItem::Method)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(ImplItem::Type)
++ parse_impl_item_type(begin, input)
+ } else if vis.is_inherited() && defaultness.is_none() && lookahead.peek(existential) {
+ input.call(item_existential).map(ImplItem::Verbatim)
+ } else if vis.is_inherited()
+@@ -2313,7 +2399,6 @@ pub mod parsing {
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+@@ -2346,7 +2431,14 @@ pub mod parsing {
+ vis: input.parse()?,
+ defaultness: input.parse()?,
+ const_token: input.parse()?,
+- ident: input.parse()?,
++ ident: {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ },
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ eq_token: input.parse()?,
+@@ -2358,50 +2450,39 @@ pub mod parsing {
+
+ impl Parse for ImplItemMethod {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
++ let mut attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let defaultness: Option<Token![default]> = input.parse()?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
+-
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
+- let stmts = content.call(Block::parse_within)?;
++ let sig = parse_signature(input)?;
++
++ let block = if let Some(semi) = input.parse::<Option<Token![;]>>()? {
++ // Accept methods without a body in an impl block because
++ // rustc's *parser* does not reject them (the compilation error
++ // is emitted later than parsing) and it can be useful for macro
++ // DSLs.
++ let mut punct = Punct::new(';', Spacing::Alone);
++ punct.set_span(semi.span);
++ let tokens = TokenStream::from_iter(vec![TokenTree::Punct(punct)]);
++ Block {
++ brace_token: Brace::default(),
++ stmts: vec![Stmt::Item(Item::Verbatim(tokens))],
++ }
++ } else {
++ let content;
++ let brace_token = braced!(content in input);
++ attrs.extend(content.call(Attribute::parse_inner)?);
++ Block {
++ brace_token,
++ stmts: content.call(Block::parse_within)?,
++ }
++ };
+
+ Ok(ImplItemMethod {
+- attrs: private::attrs(outer_attrs, inner_attrs),
++ attrs,
+ vis,
+ defaultness,
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic: None,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
+- block: Block { brace_token, stmts },
++ sig,
++ block,
+ })
+ }
+ }
+@@ -2426,6 +2507,37 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_impl_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ImplItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if colon_token.is_some() || ty.is_none() {
++ Ok(ImplItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ let (eq_token, ty) = ty.unwrap();
++ Ok(ImplItem::Type(ImplItemType {
++ attrs: Vec::new(),
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ eq_token,
++ ty,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for ImplItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -2471,6 +2583,7 @@ mod printing {
+
+ use crate::attr::FilterAttrs;
+ use crate::print::TokensOrDefault;
++ use crate::punctuated::Pair;
+
+ impl ToTokens for ItemExternCrate {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+@@ -2835,6 +2948,14 @@ mod printing {
+ self.vis.to_tokens(tokens);
+ self.defaultness.to_tokens(tokens);
+ self.sig.to_tokens(tokens);
++ if self.block.stmts.len() == 1 {
++ if let Stmt::Item(Item::Verbatim(verbatim)) = &self.block.stmts[0] {
++ if verbatim.to_string() == ";" {
++ verbatim.to_tokens(tokens);
++ return;
++ }
++ }
++ }
+ self.block.brace_token.surround(tokens, |tokens| {
+ tokens.append_all(self.attrs.inner());
+ tokens.append_all(&self.block.stmts);
+@@ -2905,6 +3026,33 @@ mod printing {
+ }
+ }
+
++ fn maybe_variadic_to_tokens(arg: &FnArg, tokens: &mut TokenStream) -> bool {
++ let arg = match arg {
++ FnArg::Typed(arg) => arg,
++ FnArg::Receiver(receiver) => {
++ receiver.to_tokens(tokens);
++ return false;
++ }
++ };
++
++ match arg.ty.as_ref() {
++ Type::Verbatim(ty) if ty.to_string() == "..." => {
++ match arg.pat.as_ref() {
++ Pat::Verbatim(pat) if pat.to_string() == "..." => {
++ tokens.append_all(arg.attrs.outer());
++ pat.to_tokens(tokens);
++ }
++ _ => arg.to_tokens(tokens),
++ }
++ true
++ }
++ _ => {
++ arg.to_tokens(tokens);
++ false
++ }
++ }
++ }
++
+ impl ToTokens for Signature {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.constness.to_tokens(tokens);
+@@ -2915,11 +3063,24 @@ mod printing {
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ self.paren_token.surround(tokens, |tokens| {
+- self.inputs.to_tokens(tokens);
+- if self.variadic.is_some() && !self.inputs.empty_or_trailing() {
+- <Token![,]>::default().to_tokens(tokens);
++ let mut last_is_variadic = false;
++ for input in self.inputs.pairs() {
++ match input {
++ Pair::Punctuated(input, comma) => {
++ maybe_variadic_to_tokens(input, tokens);
++ comma.to_tokens(tokens);
++ }
++ Pair::End(input) => {
++ last_is_variadic = maybe_variadic_to_tokens(input, tokens);
++ }
++ }
++ }
++ if self.variadic.is_some() && !last_is_variadic {
++ if !self.inputs.empty_or_trailing() {
++ <Token![,]>::default().to_tokens(tokens);
++ }
++ self.variadic.to_tokens(tokens);
+ }
+- self.variadic.to_tokens(tokens);
+ });
+ self.output.to_tokens(tokens);
+ self.generics.where_clause.to_tokens(tokens);
+diff --git a/third_party/rust/syn/src/keyword.rs b/third_party/rust/syn/src/keyword.rs
+deleted file mode 100644
+index e69de29bb2..0000000000
+diff --git a/third_party/rust/syn/src/lib.rs b/third_party/rust/syn/src/lib.rs
+index c8ada7e638..3da506731e 100644
+--- mozilla-release/third_party/rust/syn/src/lib.rs
++++ mozilla-release/third_party/rust/syn/src/lib.rs
+@@ -1,3 +1,11 @@
++//! [![github]](https://github.com/dtolnay/syn)&ensp;[![crates-io]](https://crates.io/crates/syn)&ensp;[![docs-rs]](https://docs.rs/syn)
++//!
++//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
++//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
++//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K
++//!
++//! <br>
++//!
+ //! Syn is a parsing library for parsing a stream of Rust tokens into a syntax
+ //! tree of Rust source code.
+ //!
+@@ -62,8 +70,8 @@
+ //! ```
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use quote::quote;
+ //! use syn::{parse_macro_input, DeriveInput};
+@@ -242,35 +250,48 @@
+ //! dynamic library libproc_macro from rustc toolchain.
+
+ // Syn types in rustdoc of other crates get linked to here.
+-#![doc(html_root_url = "https://docs.rs/syn/1.0.5")]
++#![doc(html_root_url = "https://docs.rs/syn/1.0.40")]
+ #![deny(clippy::all, clippy::pedantic)]
+ // Ignored clippy lints.
+ #![allow(
+- clippy::block_in_if_condition_stmt,
++ clippy::blocks_in_if_conditions,
+ clippy::cognitive_complexity,
+ clippy::doc_markdown,
+ clippy::eval_order_dependence,
+ clippy::inherent_to_string,
+ clippy::large_enum_variant,
++ clippy::manual_non_exhaustive,
++ clippy::match_like_matches_macro,
++ clippy::match_on_vec_items,
++ clippy::needless_doctest_main,
+ clippy::needless_pass_by_value,
+ clippy::never_loop,
+ clippy::suspicious_op_assign_impl,
+ clippy::too_many_arguments,
+- clippy::trivially_copy_pass_by_ref
++ clippy::trivially_copy_pass_by_ref,
++ clippy::unnecessary_unwrap
+ )]
+ // Ignored clippy_pedantic lints.
+ #![allow(
+ clippy::cast_possible_truncation,
++ clippy::default_trait_access,
+ clippy::empty_enum,
++ clippy::expl_impl_clone_on_copy,
+ clippy::if_not_else,
+ clippy::items_after_statements,
++ clippy::match_same_arms,
++ clippy::missing_errors_doc,
+ clippy::module_name_repetitions,
++ clippy::must_use_candidate,
++ clippy::option_if_let_else,
+ clippy::shadow_unrelated,
+ clippy::similar_names,
+ clippy::single_match_else,
++ clippy::too_many_lines,
+ clippy::unseparated_literal_suffix,
+ clippy::use_self,
+- clippy::used_underscore_binding
++ clippy::used_underscore_binding,
++ clippy::wildcard_imports
+ )]
+
+ #[cfg(all(
+@@ -284,7 +305,6 @@ extern crate unicode_xid;
+ #[cfg(feature = "printing")]
+ extern crate quote;
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[macro_use]
+ mod macros;
+
+@@ -307,7 +327,6 @@ pub use crate::attr::{
+ AttrStyle, Attribute, AttributeArgs, Meta, MetaList, MetaNameValue, NestedMeta,
+ };
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ mod bigint;
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+@@ -364,9 +383,7 @@ pub use crate::file::File;
+ mod lifetime;
+ pub use crate::lifetime::Lifetime;
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ mod lit;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ pub use crate::lit::{
+ Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr, StrStyle,
+ };
+@@ -441,6 +458,9 @@ pub mod parse_macro_input;
+ #[cfg(all(feature = "parsing", feature = "printing"))]
+ pub mod spanned;
+
++#[cfg(all(feature = "parsing", feature = "full"))]
++mod whitespace;
++
+ mod gen {
+ /// Syntax tree traversal to walk a shared borrow of a syntax tree.
+ ///
+@@ -482,7 +502,7 @@ mod gen {
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"visit"` feature.*
++ /// *This module is available only if Syn is built with the `"visit"` feature.*
+ ///
+ /// <br>
+ ///
+@@ -603,7 +623,7 @@ mod gen {
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"visit-mut"`
++ /// *This module is available only if Syn is built with the `"visit-mut"`
+ /// feature.*
+ ///
+ /// <br>
+@@ -702,7 +722,7 @@ mod gen {
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"fold"` feature.*
++ /// *This module is available only if Syn is built with the `"fold"` feature.*
+ ///
+ /// <br>
+ ///
+@@ -744,6 +764,22 @@ mod gen {
+ #[rustfmt::skip]
+ pub mod fold;
+
++ #[cfg(feature = "clone-impls")]
++ #[rustfmt::skip]
++ mod clone;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod eq;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod hash;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod debug;
++
+ #[cfg(any(feature = "full", feature = "derive"))]
+ #[path = "../gen_helper.rs"]
+ mod helper;
+@@ -757,6 +793,8 @@ pub mod export;
+ mod custom_keyword;
+ mod custom_punctuation;
+ mod sealed;
++mod span;
++mod thread;
+
+ #[cfg(feature = "parsing")]
+ mod lookahead;
+@@ -764,13 +802,15 @@ mod lookahead;
+ #[cfg(feature = "parsing")]
+ pub mod parse;
+
+-mod span;
++#[cfg(feature = "full")]
++mod reserved;
++
++#[cfg(all(any(feature = "full", feature = "derive"), feature = "parsing"))]
++mod verbatim;
+
+ #[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
+ mod print;
+
+-mod thread;
+-
+ ////////////////////////////////////////////////////////////////////////////////
+
+ #[allow(dead_code, non_camel_case_types)]
+@@ -800,14 +840,14 @@ pub use crate::error::{Error, Result};
+ ///
+ /// [`syn::parse2`]: parse2
+ ///
+-/// *This function is available if Syn is built with both the `"parsing"` and
++/// *This function is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ ///
+ /// # Examples
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use quote::quote;
+ /// use syn::DeriveInput;
+@@ -847,7 +887,7 @@ pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> {
+ ///
+ /// [`syn::parse`]: parse()
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` feature.*
++/// *This function is available only if Syn is built with the `"parsing"` feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+ parse::Parser::parse2(T::parse, tokens)
+@@ -855,7 +895,7 @@ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+
+ /// Parse a string of Rust code into the chosen syntax tree node.
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` feature.*
++/// *This function is available only if Syn is built with the `"parsing"` feature.*
+ ///
+ /// # Hygiene
+ ///
+@@ -874,9 +914,7 @@ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+ /// Ok(())
+ /// }
+ /// #
+-/// # fn main() {
+-/// # run().unwrap();
+-/// # }
++/// # run().unwrap();
+ /// ```
+ #[cfg(feature = "parsing")]
+ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+@@ -894,7 +932,7 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+ ///
+ /// If present, either of these would be an error using `from_str`.
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` and
++/// *This function is available only if Syn is built with the `"parsing"` and
+ /// `"full"` features.*
+ ///
+ /// # Examples
+@@ -918,9 +956,7 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+ /// Ok(())
+ /// }
+ /// #
+-/// # fn main() {
+-/// # run().unwrap();
+-/// # }
++/// # run().unwrap();
+ /// ```
+ #[cfg(all(feature = "parsing", feature = "full"))]
+ pub fn parse_file(mut content: &str) -> Result<File> {
+@@ -931,13 +967,16 @@ pub fn parse_file(mut content: &str) -> Result<File> {
+ }
+
+ let mut shebang = None;
+- if content.starts_with("#!") && !content.starts_with("#![") {
+- if let Some(idx) = content.find('\n') {
+- shebang = Some(content[..idx].to_string());
+- content = &content[idx..];
+- } else {
+- shebang = Some(content.to_string());
+- content = "";
++ if content.starts_with("#!") {
++ let rest = whitespace::skip(&content[2..]);
++ if !rest.starts_with('[') {
++ if let Some(idx) = content.find('\n') {
++ shebang = Some(content[..idx].to_string());
++ content = &content[idx..];
++ } else {
++ shebang = Some(content.to_string());
++ content = "";
++ }
+ }
+ }
+
+diff --git a/third_party/rust/syn/src/lifetime.rs b/third_party/rust/syn/src/lifetime.rs
+index d51c48e827..959cc5f9c6 100644
+--- mozilla-release/third_party/rust/syn/src/lifetime.rs
++++ mozilla-release/third_party/rust/syn/src/lifetime.rs
+@@ -18,10 +18,8 @@ use crate::lookahead;
+ /// - All following characters must be Unicode code points with the XID_Continue
+ /// property.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+-#[cfg_attr(feature = "extra-traits", derive(Debug))]
+-#[derive(Clone)]
+ pub struct Lifetime {
+ pub apostrophe: Span,
+ pub ident: Ident,
+@@ -72,6 +70,15 @@ impl Display for Lifetime {
+ }
+ }
+
++impl Clone for Lifetime {
++ fn clone(&self) -> Self {
++ Lifetime {
++ apostrophe: self.apostrophe,
++ ident: self.ident.clone(),
++ }
++ }
++}
++
+ impl PartialEq for Lifetime {
+ fn eq(&self, other: &Lifetime) -> bool {
+ self.ident.eq(&other.ident)
+diff --git a/third_party/rust/syn/src/lit.rs b/third_party/rust/syn/src/lit.rs
+index f2209a2980..ee77e75bec 100644
+--- mozilla-release/third_party/rust/syn/src/lit.rs
++++ mozilla-release/third_party/rust/syn/src/lit.rs
+@@ -22,9 +22,6 @@ use crate::{Error, Result};
+ ast_enum_of_structs! {
+ /// A Rust literal such as a string or integer or boolean.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
+- ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+@@ -33,7 +30,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Lit #manual_extra_traits {
++ pub enum Lit {
+ /// A UTF-8 string literal: `"foo"`.
+ Str(LitStr),
+
+@@ -64,61 +61,44 @@ ast_enum_of_structs! {
+
+ ast_struct! {
+ /// A UTF-8 string literal: `"foo"`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitStr #manual_extra_traits_debug {
+- repr: Box<LitStrRepr>,
++ pub struct LitStr {
++ repr: Box<LitRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+-struct LitStrRepr {
+- token: Literal,
+- suffix: Box<str>,
+-}
+-
+ ast_struct! {
+ /// A byte string literal: `b"foo"`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitByteStr #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitByteStr {
++ repr: Box<LitRepr>,
+ }
+ }
+
+ ast_struct! {
+ /// A byte literal: `b'f'`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitByte #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitByte {
++ repr: Box<LitRepr>,
+ }
+ }
+
+ ast_struct! {
+ /// A character literal: `'a'`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitChar #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitChar {
++ repr: Box<LitRepr>,
+ }
+ }
+
++struct LitRepr {
++ token: Literal,
++ suffix: Box<str>,
++}
++
+ ast_struct! {
+ /// An integer literal: `1` or `1u16`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitInt #manual_extra_traits_debug {
++ pub struct LitInt {
+ repr: Box<LitIntRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ struct LitIntRepr {
+ token: Literal,
+ digits: Box<str>,
+@@ -129,15 +109,11 @@ ast_struct! {
+ /// A floating point literal: `1f64` or `1.0e10f64`.
+ ///
+ /// Must be finite. May not be infinte or NaN.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitFloat #manual_extra_traits_debug {
++ pub struct LitFloat {
+ repr: Box<LitFloatRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ struct LitFloatRepr {
+ token: Literal,
+ digits: Box<str>,
+@@ -146,92 +122,27 @@ struct LitFloatRepr {
+
+ ast_struct! {
+ /// A boolean literal: `true` or `false`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitBool #manual_extra_traits_debug {
++ pub struct LitBool {
+ pub value: bool,
+ pub span: Span,
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Lit {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Lit {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Lit::Str(this), Lit::Str(other)) => this == other,
+- (Lit::ByteStr(this), Lit::ByteStr(other)) => this == other,
+- (Lit::Byte(this), Lit::Byte(other)) => this == other,
+- (Lit::Char(this), Lit::Char(other)) => this == other,
+- (Lit::Int(this), Lit::Int(other)) => this == other,
+- (Lit::Float(this), Lit::Float(other)) => this == other,
+- (Lit::Bool(this), Lit::Bool(other)) => this == other,
+- (Lit::Verbatim(this), Lit::Verbatim(other)) => this.to_string() == other.to_string(),
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Lit {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Lit::Str(lit) => {
+- hash.write_u8(0);
+- lit.hash(hash);
+- }
+- Lit::ByteStr(lit) => {
+- hash.write_u8(1);
+- lit.hash(hash);
+- }
+- Lit::Byte(lit) => {
+- hash.write_u8(2);
+- lit.hash(hash);
+- }
+- Lit::Char(lit) => {
+- hash.write_u8(3);
+- lit.hash(hash);
+- }
+- Lit::Int(lit) => {
+- hash.write_u8(4);
+- lit.hash(hash);
+- }
+- Lit::Float(lit) => {
+- hash.write_u8(5);
+- lit.hash(hash);
+- }
+- Lit::Bool(lit) => {
+- hash.write_u8(6);
+- lit.hash(hash);
+- }
+- Lit::Verbatim(lit) => {
+- hash.write_u8(7);
+- lit.to_string().hash(hash);
+- }
+- }
+- }
+-}
+-
+ impl LitStr {
+ pub fn new(value: &str, span: Span) -> Self {
+- let mut lit = Literal::string(value);
+- lit.set_span(span);
++ let mut token = Literal::string(value);
++ token.set_span(span);
+ LitStr {
+- repr: Box::new(LitStrRepr {
+- token: lit,
++ repr: Box::new(LitRepr {
++ token,
+ suffix: Box::<str>::default(),
+ }),
+ }
+ }
+
+ pub fn value(&self) -> String {
+- let (value, _) = value::parse_lit_str(&self.repr.token.to_string());
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_str(&repr);
+ String::from(value)
+ }
+
+@@ -311,7 +222,7 @@ impl LitStr {
+ fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
+ match &mut token {
+ TokenTree::Group(g) => {
+- let stream = respan_token_stream(g.stream().clone(), span);
++ let stream = respan_token_stream(g.stream(), span);
+ *g = Group::new(g.delimiter(), stream);
+ g.set_span(span);
+ }
+@@ -345,19 +256,30 @@ impl LitByteStr {
+ pub fn new(value: &[u8], span: Span) -> Self {
+ let mut token = Literal::byte_string(value);
+ token.set_span(span);
+- LitByteStr { token }
++ LitByteStr {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> Vec<u8> {
+- value::parse_lit_byte_str(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_byte_str(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+@@ -365,19 +287,30 @@ impl LitByte {
+ pub fn new(value: u8, span: Span) -> Self {
+ let mut token = Literal::u8_suffixed(value);
+ token.set_span(span);
+- LitByte { token }
++ LitByte {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> u8 {
+- value::parse_lit_byte(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_byte(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+@@ -385,36 +318,52 @@ impl LitChar {
+ pub fn new(value: char, span: Span) -> Self {
+ let mut token = Literal::character(value);
+ token.set_span(span);
+- LitChar { token }
++ LitChar {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> char {
+- value::parse_lit_char(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_char(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+ impl LitInt {
+ pub fn new(repr: &str, span: Span) -> Self {
+- if let Some((digits, suffix)) = value::parse_lit_int(repr) {
+- let mut token = value::to_literal(repr);
+- token.set_span(span);
+- LitInt {
+- repr: Box::new(LitIntRepr {
+- token,
+- digits,
+- suffix,
+- }),
+- }
+- } else {
+- panic!("Not an integer literal: `{}`", repr);
++ let (digits, suffix) = match value::parse_lit_int(repr) {
++ Some(parse) => parse,
++ None => panic!("Not an integer literal: `{}`", repr),
++ };
++
++ let mut token = match value::to_literal(repr, &digits, &suffix) {
++ Some(token) => token,
++ None => panic!("Unsupported integer literal: `{}`", repr),
++ };
++
++ token.set_span(span);
++ LitInt {
++ repr: Box::new(LitIntRepr {
++ token,
++ digits,
++ suffix,
++ }),
+ }
+ }
+
+@@ -492,18 +441,23 @@ impl Display for LitInt {
+
+ impl LitFloat {
+ pub fn new(repr: &str, span: Span) -> Self {
+- if let Some((digits, suffix)) = value::parse_lit_float(repr) {
+- let mut token = value::to_literal(repr);
+- token.set_span(span);
+- LitFloat {
+- repr: Box::new(LitFloatRepr {
+- token,
+- digits,
+- suffix,
+- }),
+- }
+- } else {
+- panic!("Not a float literal: `{}`", repr);
++ let (digits, suffix) = match value::parse_lit_float(repr) {
++ Some(parse) => parse,
++ None => panic!("Not a float literal: `{}`", repr),
++ };
++
++ let mut token = match value::to_literal(repr, &digits, &suffix) {
++ Some(token) => token,
++ None => panic!("Unsupported float literal: `{}`", repr),
++ };
++
++ token.set_span(span);
++ LitFloat {
++ repr: Box::new(LitFloatRepr {
++ token,
++ digits,
++ suffix,
++ }),
+ }
+ }
+
+@@ -575,7 +529,7 @@ mod debug_impls {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitByteStr")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+@@ -584,7 +538,7 @@ mod debug_impls {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitByte")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+@@ -593,7 +547,7 @@ mod debug_impls {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitChar")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+@@ -626,15 +580,53 @@ mod debug_impls {
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl Clone for LitRepr {
++ fn clone(&self) -> Self {
++ LitRepr {
++ token: self.token.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for LitIntRepr {
++ fn clone(&self) -> Self {
++ LitIntRepr {
++ token: self.token.clone(),
++ digits: self.digits.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for LitFloatRepr {
++ fn clone(&self) -> Self {
++ LitFloatRepr {
++ token: self.token.clone(),
++ digits: self.digits.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
+ macro_rules! lit_extra_traits {
+- ($ty:ident, $($field:ident).+) => {
+- #[cfg(feature = "extra-traits")]
+- impl Eq for $ty {}
++ ($ty:ident) => {
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $ty {
++ fn clone(&self) -> Self {
++ $ty {
++ repr: self.repr.clone(),
++ }
++ }
++ }
+
+ #[cfg(feature = "extra-traits")]
+ impl PartialEq for $ty {
+ fn eq(&self, other: &Self) -> bool {
+- self.$($field).+.to_string() == other.$($field).+.to_string()
++ self.repr.token.to_string() == other.repr.token.to_string()
+ }
+ }
+
+@@ -644,7 +636,7 @@ macro_rules! lit_extra_traits {
+ where
+ H: Hasher,
+ {
+- self.$($field).+.to_string().hash(state);
++ self.repr.token.to_string().hash(state);
+ }
+ }
+
+@@ -657,20 +649,23 @@ macro_rules! lit_extra_traits {
+ };
+ }
+
+-lit_extra_traits!(LitStr, repr.token);
+-lit_extra_traits!(LitByteStr, token);
+-lit_extra_traits!(LitByte, token);
+-lit_extra_traits!(LitChar, token);
+-lit_extra_traits!(LitInt, repr.token);
+-lit_extra_traits!(LitFloat, repr.token);
+-lit_extra_traits!(LitBool, value);
++lit_extra_traits!(LitStr);
++lit_extra_traits!(LitByteStr);
++lit_extra_traits!(LitByte);
++lit_extra_traits!(LitChar);
++lit_extra_traits!(LitInt);
++lit_extra_traits!(LitFloat);
++
++#[cfg(feature = "parsing")]
++#[doc(hidden)]
++#[allow(non_snake_case)]
++pub fn LitBool(marker: lookahead::TokenMarker) -> LitBool {
++ match marker {}
++}
+
+ ast_enum! {
+ /// The style of a string literal, either plain quoted or a raw string like
+ /// `r##"data"##`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
+ pub enum StrStyle #no_visit {
+ /// An ordinary string like `"data"`.
+ Cooked,
+@@ -691,7 +686,9 @@ pub fn Lit(marker: lookahead::TokenMarker) -> Lit {
+ #[cfg(feature = "parsing")]
+ pub mod parsing {
+ use super::*;
++ use crate::buffer::Cursor;
+ use crate::parse::{Parse, ParseStream, Result};
++ use proc_macro2::Punct;
+
+ impl Parse for Lit {
+ fn parse(input: ParseStream) -> Result<Self> {
+@@ -699,25 +696,73 @@ pub mod parsing {
+ if let Some((lit, rest)) = cursor.literal() {
+ return Ok((Lit::new(lit), rest));
+ }
+- while let Some((ident, rest)) = cursor.ident() {
+- let value = if ident == "true" {
+- true
+- } else if ident == "false" {
+- false
+- } else {
+- break;
+- };
+- let lit_bool = LitBool {
+- value,
+- span: ident.span(),
+- };
+- return Ok((Lit::Bool(lit_bool), rest));
++
++ if let Some((ident, rest)) = cursor.ident() {
++ let value = ident == "true";
++ if value || ident == "false" {
++ let lit_bool = LitBool {
++ value,
++ span: ident.span(),
++ };
++ return Ok((Lit::Bool(lit_bool), rest));
++ }
+ }
++
++ if let Some((punct, rest)) = cursor.punct() {
++ if punct.as_char() == '-' {
++ if let Some((lit, rest)) = parse_negative_lit(punct, rest) {
++ return Ok((lit, rest));
++ }
++ }
++ }
++
+ Err(cursor.error("expected literal"))
+ })
+ }
+ }
+
++ fn parse_negative_lit(neg: Punct, cursor: Cursor) -> Option<(Lit, Cursor)> {
++ let (lit, rest) = cursor.literal()?;
++
++ let mut span = neg.span();
++ span = span.join(lit.span()).unwrap_or(span);
++
++ let mut repr = lit.to_string();
++ repr.insert(0, '-');
++
++ if !(repr.ends_with("f32") || repr.ends_with("f64")) {
++ if let Some((digits, suffix)) = value::parse_lit_int(&repr) {
++ if let Some(mut token) = value::to_literal(&repr, &digits, &suffix) {
++ token.set_span(span);
++ return Some((
++ Lit::Int(LitInt {
++ repr: Box::new(LitIntRepr {
++ token,
++ digits,
++ suffix,
++ }),
++ }),
++ rest,
++ ));
++ }
++ }
++ }
++
++ let (digits, suffix) = value::parse_lit_float(&repr)?;
++ let mut token = value::to_literal(&repr, &digits, &suffix)?;
++ token.set_span(span);
++ Some((
++ Lit::Float(LitFloat {
++ repr: Box::new(LitFloatRepr {
++ token,
++ digits,
++ suffix,
++ }),
++ }),
++ rest,
++ ))
++ }
++
+ impl Parse for LitStr {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let head = input.fork();
+@@ -803,19 +848,19 @@ mod printing {
+
+ impl ToTokens for LitByteStr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for LitByte {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for LitChar {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+@@ -855,20 +900,29 @@ mod value {
+ b'"' | b'r' => {
+ let (_, suffix) = parse_lit_str(&repr);
+ return Lit::Str(LitStr {
+- repr: Box::new(LitStrRepr { token, suffix }),
++ repr: Box::new(LitRepr { token, suffix }),
+ });
+ }
+ b'b' => match byte(&repr, 1) {
+ b'"' | b'r' => {
+- return Lit::ByteStr(LitByteStr { token });
++ let (_, suffix) = parse_lit_byte_str(&repr);
++ return Lit::ByteStr(LitByteStr {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ b'\'' => {
+- return Lit::Byte(LitByte { token });
++ let (_, suffix) = parse_lit_byte(&repr);
++ return Lit::Byte(LitByte {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ _ => {}
+ },
+ b'\'' => {
+- return Lit::Char(LitChar { token });
++ let (_, suffix) = parse_lit_char(&repr);
++ return Lit::Char(LitChar {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ b'0'..=b'9' | b'-' => {
+ if !(repr.ends_with("f32") || repr.ends_with("f64")) {
+@@ -905,6 +959,44 @@ mod value {
+
+ panic!("Unrecognized literal: `{}`", repr);
+ }
++
++ pub fn suffix(&self) -> &str {
++ match self {
++ Lit::Str(lit) => lit.suffix(),
++ Lit::ByteStr(lit) => lit.suffix(),
++ Lit::Byte(lit) => lit.suffix(),
++ Lit::Char(lit) => lit.suffix(),
++ Lit::Int(lit) => lit.suffix(),
++ Lit::Float(lit) => lit.suffix(),
++ Lit::Bool(_) | Lit::Verbatim(_) => "",
++ }
++ }
++
++ pub fn span(&self) -> Span {
++ match self {
++ Lit::Str(lit) => lit.span(),
++ Lit::ByteStr(lit) => lit.span(),
++ Lit::Byte(lit) => lit.span(),
++ Lit::Char(lit) => lit.span(),
++ Lit::Int(lit) => lit.span(),
++ Lit::Float(lit) => lit.span(),
++ Lit::Bool(lit) => lit.span,
++ Lit::Verbatim(lit) => lit.span(),
++ }
++ }
++
++ pub fn set_span(&mut self, span: Span) {
++ match self {
++ Lit::Str(lit) => lit.set_span(span),
++ Lit::ByteStr(lit) => lit.set_span(span),
++ Lit::Byte(lit) => lit.set_span(span),
++ Lit::Char(lit) => lit.set_span(span),
++ Lit::Int(lit) => lit.set_span(span),
++ Lit::Float(lit) => lit.set_span(span),
++ Lit::Bool(lit) => lit.span = span,
++ Lit::Verbatim(lit) => lit.set_span(span),
++ }
++ }
+ }
+
+ /// Get the byte at offset idx, or a default of `b'\0'` if we're looking
+@@ -1004,19 +1096,18 @@ mod value {
+ pounds += 1;
+ }
+ assert_eq!(byte(s, pounds), b'"');
+- assert_eq!(byte(s, s.len() - pounds - 1), b'"');
+- for end in s[s.len() - pounds..].bytes() {
++ let close = s.rfind('"').unwrap();
++ for end in s[close + 1..close + 1 + pounds].bytes() {
+ assert_eq!(end, b'#');
+ }
+
+- let content = s[pounds + 1..s.len() - pounds - 1]
+- .to_owned()
+- .into_boxed_str();
+- let suffix = Box::<str>::default(); // todo
++ let content = s[pounds + 1..close].to_owned().into_boxed_str();
++ let suffix = s[close + 1 + pounds..].to_owned().into_boxed_str();
+ (content, suffix)
+ }
+
+- pub fn parse_lit_byte_str(s: &str) -> Vec<u8> {
++ // Returns (content, suffix).
++ pub fn parse_lit_byte_str(s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ match byte(s, 1) {
+ b'"' => parse_lit_byte_str_cooked(s),
+@@ -1028,25 +1119,25 @@ mod value {
+ // Clippy false positive
+ // https://github.com/rust-lang-nursery/rust-clippy/issues/2329
+ #[allow(clippy::needless_continue)]
+- fn parse_lit_byte_str_cooked(mut s: &str) -> Vec<u8> {
++ fn parse_lit_byte_str_cooked(mut s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ assert_eq!(byte(s, 1), b'"');
+ s = &s[2..];
+
+ // We're going to want to have slices which don't respect codepoint boundaries.
+- let mut s = s.as_bytes();
++ let mut v = s.as_bytes();
+
+ let mut out = Vec::new();
+ 'outer: loop {
+- let byte = match byte(s, 0) {
++ let byte = match byte(v, 0) {
+ b'"' => break,
+ b'\\' => {
+- let b = byte(s, 1);
+- s = &s[2..];
++ let b = byte(v, 1);
++ v = &v[2..];
+ match b {
+ b'x' => {
+- let (b, rest) = backslash_x(s);
+- s = rest;
++ let (b, rest) = backslash_x(v);
++ v = rest;
+ b
+ }
+ b'n' => b'\n',
+@@ -1057,10 +1148,10 @@ mod value {
+ b'\'' => b'\'',
+ b'"' => b'"',
+ b'\r' | b'\n' => loop {
+- let byte = byte(s, 0);
++ let byte = byte(v, 0);
+ let ch = char::from_u32(u32::from(byte)).unwrap();
+ if ch.is_whitespace() {
+- s = &s[1..];
++ v = &v[1..];
+ } else {
+ continue 'outer;
+ }
+@@ -1069,42 +1160,45 @@ mod value {
+ }
+ }
+ b'\r' => {
+- assert_eq!(byte(s, 1), b'\n', "Bare CR not allowed in string");
+- s = &s[2..];
++ assert_eq!(byte(v, 1), b'\n', "Bare CR not allowed in string");
++ v = &v[2..];
+ b'\n'
+ }
+ b => {
+- s = &s[1..];
++ v = &v[1..];
+ b
+ }
+ };
+ out.push(byte);
+ }
+
+- assert_eq!(s, b"\"");
+- out
++ assert_eq!(byte(v, 0), b'"');
++ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
++ (out, suffix)
+ }
+
+- fn parse_lit_byte_str_raw(s: &str) -> Vec<u8> {
++ fn parse_lit_byte_str_raw(s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+- String::from(parse_lit_str_raw(&s[1..]).0).into_bytes()
++ let (value, suffix) = parse_lit_str_raw(&s[1..]);
++ (String::from(value).into_bytes(), suffix)
+ }
+
+- pub fn parse_lit_byte(s: &str) -> u8 {
++ // Returns (value, suffix).
++ pub fn parse_lit_byte(s: &str) -> (u8, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ assert_eq!(byte(s, 1), b'\'');
+
+ // We're going to want to have slices which don't respect codepoint boundaries.
+- let mut s = s[2..].as_bytes();
++ let mut v = s[2..].as_bytes();
+
+- let b = match byte(s, 0) {
++ let b = match byte(v, 0) {
+ b'\\' => {
+- let b = byte(s, 1);
+- s = &s[2..];
++ let b = byte(v, 1);
++ v = &v[2..];
+ match b {
+ b'x' => {
+- let (b, rest) = backslash_x(s);
+- s = rest;
++ let (b, rest) = backslash_x(v);
++ v = rest;
+ b
+ }
+ b'n' => b'\n',
+@@ -1118,16 +1212,18 @@ mod value {
+ }
+ }
+ b => {
+- s = &s[1..];
++ v = &v[1..];
+ b
+ }
+ };
+
+- assert_eq!(byte(s, 0), b'\'');
+- b
++ assert_eq!(byte(v, 0), b'\'');
++ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
++ (b, suffix)
+ }
+
+- pub fn parse_lit_char(mut s: &str) -> char {
++ // Returns (value, suffix).
++ pub fn parse_lit_char(mut s: &str) -> (char, Box<str>) {
+ assert_eq!(byte(s, 0), b'\'');
+ s = &s[1..];
+
+@@ -1163,8 +1259,9 @@ mod value {
+ ch
+ }
+ };
+- assert_eq!(s, "\'", "Expected end of char literal");
+- ch
++ assert_eq!(byte(s, 0), b'\'');
++ let suffix = s[1..].to_owned().into_boxed_str();
++ (ch, suffix)
+ }
+
+ fn backslash_x<S>(s: &S) -> (u8, &S)
+@@ -1334,7 +1431,11 @@ mod value {
+ }
+ b'e' | b'E' => {
+ if has_e {
+- return None;
++ if has_exponent {
++ break;
++ } else {
++ return None;
++ }
+ }
+ has_e = true;
+ bytes[write] = b'e';
+@@ -1372,11 +1473,33 @@ mod value {
+ }
+ }
+
+- pub fn to_literal(s: &str) -> Literal {
+- let stream = s.parse::<TokenStream>().unwrap();
+- match stream.into_iter().next().unwrap() {
+- TokenTree::Literal(l) => l,
+- _ => unreachable!(),
++ pub fn to_literal(repr: &str, digits: &str, suffix: &str) -> Option<Literal> {
++ if repr.starts_with('-') {
++ if suffix == "f64" {
++ digits.parse().ok().map(Literal::f64_suffixed)
++ } else if suffix == "f32" {
++ digits.parse().ok().map(Literal::f32_suffixed)
++ } else if suffix == "i64" {
++ digits.parse().ok().map(Literal::i64_suffixed)
++ } else if suffix == "i32" {
++ digits.parse().ok().map(Literal::i32_suffixed)
++ } else if suffix == "i16" {
++ digits.parse().ok().map(Literal::i16_suffixed)
++ } else if suffix == "i8" {
++ digits.parse().ok().map(Literal::i8_suffixed)
++ } else if !suffix.is_empty() {
++ None
++ } else if digits.contains('.') {
++ digits.parse().ok().map(Literal::f64_unsuffixed)
++ } else {
++ digits.parse().ok().map(Literal::i64_unsuffixed)
++ }
++ } else {
++ let stream = repr.parse::<TokenStream>().unwrap();
++ match stream.into_iter().next().unwrap() {
++ TokenTree::Literal(l) => Some(l),
++ _ => unreachable!(),
++ }
+ }
+ }
+ }
+diff --git a/third_party/rust/syn/src/mac.rs b/third_party/rust/syn/src/mac.rs
+index 6c3dcae92a..de288a34e1 100644
+--- mozilla-release/third_party/rust/syn/src/mac.rs
++++ mozilla-release/third_party/rust/syn/src/mac.rs
+@@ -2,21 +2,17 @@ use super::*;
+ use crate::token::{Brace, Bracket, Paren};
+ use proc_macro2::TokenStream;
+ #[cfg(feature = "parsing")]
+-use proc_macro2::{Delimiter, Span, TokenTree};
++use proc_macro2::{Delimiter, Group, Span, TokenTree};
+
+ #[cfg(feature = "parsing")]
+ use crate::parse::{Parse, ParseStream, Parser, Result};
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// A macro invocation: `println!("{}", mac)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- pub struct Macro #manual_extra_traits {
++ pub struct Macro {
+ pub path: Path,
+ pub bang_token: Token![!],
+ pub delimiter: MacroDelimiter,
+@@ -27,7 +23,7 @@ ast_struct! {
+ ast_enum! {
+ /// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}` or `m![...]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum MacroDelimiter {
+ Paren(Paren),
+@@ -36,39 +32,20 @@ ast_enum! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Macro {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Macro {
+- fn eq(&self, other: &Self) -> bool {
+- self.path == other.path
+- && self.bang_token == other.bang_token
+- && self.delimiter == other.delimiter
+- && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Macro {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.path.hash(state);
+- self.bang_token.hash(state);
+- self.delimiter.hash(state);
+- TokenStreamHelper(&self.tokens).hash(state);
+- }
+-}
+-
+ #[cfg(feature = "parsing")]
+-fn delimiter_span(delimiter: &MacroDelimiter) -> Span {
+- match delimiter {
++fn delimiter_span_close(macro_delimiter: &MacroDelimiter) -> Span {
++ let delimiter = match macro_delimiter {
++ MacroDelimiter::Paren(_) => Delimiter::Parenthesis,
++ MacroDelimiter::Brace(_) => Delimiter::Brace,
++ MacroDelimiter::Bracket(_) => Delimiter::Bracket,
++ };
++ let mut group = Group::new(delimiter, TokenStream::new());
++ group.set_span(match macro_delimiter {
+ MacroDelimiter::Paren(token) => token.span,
+ MacroDelimiter::Brace(token) => token.span,
+ MacroDelimiter::Bracket(token) => token.span,
+- }
++ });
++ group.span_close()
+ }
+
+ impl Macro {
+@@ -163,9 +140,7 @@ impl Macro {
+ /// given parser.
+ #[cfg(feature = "parsing")]
+ pub fn parse_body_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+- // TODO: see if we can get a group.span_close() span in here as the
+- // scope, rather than the span of the whole group.
+- let scope = delimiter_span(&self.delimiter);
++ let scope = delimiter_span_close(&self.delimiter);
+ crate::parse::parse_scoped(parser, scope, self.tokens.clone())
+ }
+ }
+diff --git a/third_party/rust/syn/src/macros.rs b/third_party/rust/syn/src/macros.rs
+index 9cac5c15df..8060224381 100644
+--- mozilla-release/third_party/rust/syn/src/macros.rs
++++ mozilla-release/third_party/rust/syn/src/macros.rs
+@@ -4,15 +4,11 @@ macro_rules! ast_struct {
+ struct $name:ident #full $($rest:tt)*
+ ) => {
+ #[cfg(feature = "full")]
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name $($rest)*
+
+ #[cfg(not(feature = "full"))]
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name {
+- _noconstruct: (),
++ _noconstruct: ::std::marker::PhantomData<::proc_macro2::Span>,
+ }
+
+ #[cfg(all(not(feature = "full"), feature = "printing"))]
+@@ -23,29 +19,10 @@ macro_rules! ast_struct {
+ }
+ };
+
+- (
+- [$($attrs_pub:tt)*]
+- struct $name:ident #manual_extra_traits $($rest:tt)*
+- ) => {
+- #[cfg_attr(feature = "extra-traits", derive(Debug))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* struct $name $($rest)*
+- };
+-
+- (
+- [$($attrs_pub:tt)*]
+- struct $name:ident #manual_extra_traits_debug $($rest:tt)*
+- ) => {
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* struct $name $($rest)*
+- };
+-
+ (
+ [$($attrs_pub:tt)*]
+ struct $name:ident $($rest:tt)*
+ ) => {
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name $($rest)*
+ };
+
+@@ -63,21 +40,10 @@ macro_rules! ast_enum {
+ ast_enum!([$($attrs_pub)*] enum $name $($rest)*);
+ );
+
+- (
+- [$($attrs_pub:tt)*]
+- enum $name:ident #manual_extra_traits $($rest:tt)*
+- ) => (
+- #[cfg_attr(feature = "extra-traits", derive(Debug))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* enum $name $($rest)*
+- );
+-
+ (
+ [$($attrs_pub:tt)*]
+ enum $name:ident $($rest:tt)*
+ ) => (
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* enum $name $($rest)*
+ );
+
+@@ -120,15 +86,9 @@ macro_rules! ast_enum_of_structs_impl {
+ check_keyword_matches!(pub $pub);
+ check_keyword_matches!(enum $enum);
+
+- $(
+- $(
+- impl From<$member> for $name {
+- fn from(e: $member) -> $name {
+- $name::$variant(e)
+- }
+- }
+- )*
+- )*
++ $($(
++ ast_enum_from_struct!($name::$variant, $member);
++ )*)*
+
+ #[cfg(feature = "printing")]
+ generate_to_tokens! {
+@@ -140,6 +100,19 @@ macro_rules! ast_enum_of_structs_impl {
+ };
+ }
+
++macro_rules! ast_enum_from_struct {
++ // No From<TokenStream> for verbatim variants.
++ ($name:ident::Verbatim, $member:ident) => {};
++
++ ($name:ident::$variant:ident, $member:ident) => {
++ impl From<$member> for $name {
++ fn from(e: $member) -> $name {
++ $name::$variant(e)
++ }
++ }
++ };
++}
++
+ #[cfg(feature = "printing")]
+ macro_rules! generate_to_tokens {
+ (do_not_generate_to_tokens $($foo:tt)*) => ();
+diff --git a/third_party/rust/syn/src/op.rs b/third_party/rust/syn/src/op.rs
+index 49fb853c79..d254673b40 100644
+--- mozilla-release/third_party/rust/syn/src/op.rs
++++ mozilla-release/third_party/rust/syn/src/op.rs
+@@ -1,9 +1,8 @@
+ ast_enum! {
+ /// A binary operator: `+`, `+=`, `&`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum BinOp {
+ /// The `+` operator (addition)
+ Add(Token![+]),
+@@ -67,9 +66,8 @@ ast_enum! {
+ ast_enum! {
+ /// A unary operator: `*`, `!`, `-`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum UnOp {
+ /// The `*` operator for dereferencing
+ Deref(Token![*]),
+diff --git a/third_party/rust/syn/src/parse.rs b/third_party/rust/syn/src/parse.rs
+index 7c7b194308..abb4c4c14f 100644
+--- mozilla-release/third_party/rust/syn/src/parse.rs
++++ mozilla-release/third_party/rust/syn/src/parse.rs
+@@ -26,8 +26,8 @@
+ //! [`parse_macro_input!`]: ../macro.parse_macro_input.html
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use syn::{braced, parse_macro_input, token, Field, Ident, Result, Token};
+ //! use syn::parse::{Parse, ParseStream};
+@@ -109,9 +109,7 @@
+ //! # Ok(())
+ //! # }
+ //! #
+-//! # fn main() {
+-//! # run_parser().unwrap();
+-//! # }
++//! # run_parser().unwrap();
+ //! ```
+ //!
+ //! The [`parse_quote!`] macro also uses this approach.
+@@ -155,8 +153,8 @@
+ //! [`Parser`]: trait.Parser.html
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use syn::parse::Parser;
+ //! use syn::punctuated::Punctuated;
+@@ -186,7 +184,7 @@
+ //!
+ //! ---
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ #[path = "discouraged.rs"]
+ pub mod discouraged;
+@@ -217,6 +215,11 @@ pub use crate::lookahead::{Lookahead1, Peek};
+
+ /// Parsing interface implemented by all types that can be parsed in a default
+ /// way from a token stream.
++///
++/// Refer to the [module documentation] for details about implementing and using
++/// the `Parse` trait.
++///
++/// [module documentation]: self
+ pub trait Parse: Sized {
+ fn parse(input: ParseStream) -> Result<Self>;
+ }
+@@ -263,13 +266,16 @@ pub struct ParseBuffer<'a> {
+ // the cell.
+ cell: Cell<Cursor<'static>>,
+ marker: PhantomData<Cursor<'a>>,
+- unexpected: Rc<Cell<Option<Span>>>,
++ unexpected: Cell<Option<Rc<Cell<Unexpected>>>>,
+ }
+
+ impl<'a> Drop for ParseBuffer<'a> {
+ fn drop(&mut self) {
+- if !self.is_empty() && self.unexpected.get().is_none() {
+- self.unexpected.set(Some(self.cursor().span()));
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(self.cursor()) {
++ let (inner, old_span) = inner_unexpected(self);
++ if old_span.is_none() {
++ inner.set(Unexpected::Some(unexpected_span));
++ }
+ }
+ }
+ }
+@@ -324,15 +330,12 @@ impl<'a> Debug for ParseBuffer<'a> {
+ /// # input.parse()
+ /// # }
+ /// #
+-/// # fn main() {
+-/// # use syn::parse::Parser;
+-/// # let remainder = remainder_after_skipping_past_next_at
+-/// # .parse_str("a @ b c")
+-/// # .unwrap();
+-/// # assert_eq!(remainder.to_string(), "b c");
+-/// # }
++/// # use syn::parse::Parser;
++/// # let remainder = remainder_after_skipping_past_next_at
++/// # .parse_str("a @ b c")
++/// # .unwrap();
++/// # assert_eq!(remainder.to_string(), "b c");
+ /// ```
+-#[derive(Copy, Clone)]
+ pub struct StepCursor<'c, 'a> {
+ scope: Span,
+ // This field is covariant in 'c.
+@@ -356,6 +359,14 @@ impl<'c, 'a> Deref for StepCursor<'c, 'a> {
+ }
+ }
+
++impl<'c, 'a> Copy for StepCursor<'c, 'a> {}
++
++impl<'c, 'a> Clone for StepCursor<'c, 'a> {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++
+ impl<'c, 'a> StepCursor<'c, 'a> {
+ /// Triggers an error at the current position of the parse stream.
+ ///
+@@ -375,36 +386,81 @@ pub(crate) fn advance_step_cursor<'c, 'a>(proof: StepCursor<'c, 'a>, to: Cursor<
+ unsafe { mem::transmute::<Cursor<'c>, Cursor<'a>>(to) }
+ }
+
+-fn skip(input: ParseStream) -> bool {
+- input
+- .step(|cursor| {
+- if let Some((_lifetime, rest)) = cursor.lifetime() {
+- Ok((true, rest))
+- } else if let Some((_token, rest)) = cursor.token_tree() {
+- Ok((true, rest))
+- } else {
+- Ok((false, *cursor))
+- }
+- })
+- .unwrap()
+-}
+-
+ pub(crate) fn new_parse_buffer(
+ scope: Span,
+ cursor: Cursor,
+- unexpected: Rc<Cell<Option<Span>>>,
++ unexpected: Rc<Cell<Unexpected>>,
+ ) -> ParseBuffer {
+ ParseBuffer {
+ scope,
+ // See comment on `cell` in the struct definition.
+ cell: Cell::new(unsafe { mem::transmute::<Cursor, Cursor<'static>>(cursor) }),
+ marker: PhantomData,
+- unexpected,
++ unexpected: Cell::new(Some(unexpected)),
++ }
++}
++
++pub(crate) enum Unexpected {
++ None,
++ Some(Span),
++ Chain(Rc<Cell<Unexpected>>),
++}
++
++impl Default for Unexpected {
++ fn default() -> Self {
++ Unexpected::None
+ }
+ }
+
+-pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Option<Span>>> {
+- buffer.unexpected.clone()
++impl Clone for Unexpected {
++ fn clone(&self) -> Self {
++ match self {
++ Unexpected::None => Unexpected::None,
++ Unexpected::Some(span) => Unexpected::Some(*span),
++ Unexpected::Chain(next) => Unexpected::Chain(next.clone()),
++ }
++ }
++}
++
++// We call this on Cell<Unexpected> and Cell<Option<T>> where temporarily
++// swapping in a None is cheap.
++fn cell_clone<T: Default + Clone>(cell: &Cell<T>) -> T {
++ let prev = cell.take();
++ let ret = prev.clone();
++ cell.set(prev);
++ ret
++}
++
++fn inner_unexpected(buffer: &ParseBuffer) -> (Rc<Cell<Unexpected>>, Option<Span>) {
++ let mut unexpected = get_unexpected(buffer);
++ loop {
++ match cell_clone(&unexpected) {
++ Unexpected::None => return (unexpected, None),
++ Unexpected::Some(span) => return (unexpected, Some(span)),
++ Unexpected::Chain(next) => unexpected = next,
++ }
++ }
++}
++
++pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Unexpected>> {
++ cell_clone(&buffer.unexpected).unwrap()
++}
++
++fn span_of_unexpected_ignoring_nones(mut cursor: Cursor) -> Option<Span> {
++ if cursor.eof() {
++ return None;
++ }
++ while let Some((inner, _span, rest)) = cursor.group(Delimiter::None) {
++ if let Some(unexpected) = span_of_unexpected_ignoring_nones(inner) {
++ return Some(unexpected);
++ }
++ cursor = rest;
++ }
++ if cursor.eof() {
++ None
++ } else {
++ Some(cursor.span())
++ }
+ }
+
+ impl<'a> ParseBuffer<'a> {
+@@ -566,14 +622,17 @@ impl<'a> ParseBuffer<'a> {
+ /// }
+ /// ```
+ pub fn peek2<T: Peek>(&self, token: T) -> bool {
+- let ahead = self.fork();
+- skip(&ahead) && ahead.peek(token)
++ let _ = token;
++ self.cursor().skip().map_or(false, T::Token::peek)
+ }
+
+ /// Looks at the third-next token in the parse stream.
+ pub fn peek3<T: Peek>(&self, token: T) -> bool {
+- let ahead = self.fork();
+- skip(&ahead) && skip(&ahead) && ahead.peek(token)
++ let _ = token;
++ self.cursor()
++ .skip()
++ .and_then(Cursor::skip)
++ .map_or(false, T::Token::peek)
+ }
+
+ /// Parses zero or more occurrences of `T` separated by punctuation of type
+@@ -615,12 +674,10 @@ impl<'a> ParseBuffer<'a> {
+ /// }
+ /// }
+ /// #
+- /// # fn main() {
+- /// # let input = quote! {
+- /// # struct S(A, B);
+- /// # };
+- /// # syn::parse2::<TupleStruct>(input).unwrap();
+- /// # }
++ /// # let input = quote! {
++ /// # struct S(A, B);
++ /// # };
++ /// # syn::parse2::<TupleStruct>(input).unwrap();
+ /// ```
+ pub fn parse_terminated<T, P: Parse>(
+ &self,
+@@ -847,8 +904,8 @@ impl<'a> ParseBuffer<'a> {
+ cell: self.cell.clone(),
+ marker: PhantomData,
+ // Not the parent's unexpected. Nothing cares whether the clone
+- // parses all the way.
+- unexpected: Rc::new(Cell::new(None)),
++ // parses all the way unless we `advance_to`.
++ unexpected: Cell::new(Some(Rc::new(Cell::new(Unexpected::None)))),
+ }
+ }
+
+@@ -923,13 +980,11 @@ impl<'a> ParseBuffer<'a> {
+ /// # input.parse()
+ /// # }
+ /// #
+- /// # fn main() {
+- /// # use syn::parse::Parser;
+- /// # let remainder = remainder_after_skipping_past_next_at
+- /// # .parse_str("a @ b c")
+- /// # .unwrap();
+- /// # assert_eq!(remainder.to_string(), "b c");
+- /// # }
++ /// # use syn::parse::Parser;
++ /// # let remainder = remainder_after_skipping_past_next_at
++ /// # .parse_str("a @ b c")
++ /// # .unwrap();
++ /// # assert_eq!(remainder.to_string(), "b c");
+ /// ```
+ pub fn step<F, R>(&self, function: F) -> Result<R>
+ where
+@@ -961,6 +1016,18 @@ impl<'a> ParseBuffer<'a> {
+ Ok(node)
+ }
+
++ /// Returns the `Span` of the next token in the parse stream, or
++ /// `Span::call_site()` if this parse stream has completely exhausted its
++ /// input `TokenStream`.
++ pub fn span(&self) -> Span {
++ let cursor = self.cursor();
++ if cursor.eof() {
++ self.scope
++ } else {
++ crate::buffer::open_span_of_group(cursor)
++ }
++ }
++
+ /// Provides low-level access to the token representation underlying this
+ /// parse stream.
+ ///
+@@ -971,7 +1038,7 @@ impl<'a> ParseBuffer<'a> {
+ }
+
+ fn check_unexpected(&self) -> Result<()> {
+- match self.unexpected.get() {
++ match inner_unexpected(self).1 {
+ Some(span) => Err(Error::new(span, "unexpected token")),
+ None => Ok(()),
+ }
+@@ -1048,7 +1115,7 @@ impl Parse for Literal {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"parsing"` feature.*
++/// *This trait is available only if Syn is built with the `"parsing"` feature.*
+ pub trait Parser: Sized {
+ type Output;
+
+@@ -1063,7 +1130,7 @@ pub trait Parser: Sized {
+ /// This function will check that the input is fully parsed. If there are
+ /// any unparsed tokens at the end of the stream, an error is returned.
+ ///
+- /// *This method is available if Syn is built with both the `"parsing"` and
++ /// *This method is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ #[cfg(all(
+ not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
+@@ -1088,6 +1155,7 @@ pub trait Parser: Sized {
+
+ // Not public API.
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
+ let _ = scope;
+ self.parse2(tokens)
+@@ -1095,6 +1163,7 @@ pub trait Parser: Sized {
+
+ // Not public API.
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
+ input.parse().and_then(|tokens| self.parse2(tokens))
+ }
+@@ -1103,7 +1172,7 @@ pub trait Parser: Sized {
+ fn tokens_to_parse_buffer(tokens: &TokenBuffer) -> ParseBuffer {
+ let scope = Span::call_site();
+ let cursor = tokens.begin();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ new_parse_buffer(scope, cursor, unexpected)
+ }
+
+@@ -1118,38 +1187,42 @@ where
+ let state = tokens_to_parse_buffer(&buf);
+ let node = self(&state)?;
+ state.check_unexpected()?;
+- if state.is_empty() {
+- Ok(node)
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) {
++ Err(Error::new(unexpected_span, "unexpected token"))
+ } else {
+- Err(state.error("unexpected token"))
++ Ok(node)
+ }
+ }
+
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
+ let buf = TokenBuffer::new2(tokens);
+ let cursor = buf.begin();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ let state = new_parse_buffer(scope, cursor, unexpected);
+ let node = self(&state)?;
+ state.check_unexpected()?;
+- if state.is_empty() {
+- Ok(node)
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) {
++ Err(Error::new(unexpected_span, "unexpected token"))
+ } else {
+- Err(state.error("unexpected token"))
++ Ok(node)
+ }
+ }
+
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
+ self(input)
+ }
+ }
+
++#[cfg(any(feature = "full", feature = "derive"))]
+ pub(crate) fn parse_scoped<F: Parser>(f: F, scope: Span, tokens: TokenStream) -> Result<F::Output> {
+ f.__parse_scoped(scope, tokens)
+ }
+
++#[cfg(any(feature = "full", feature = "derive"))]
+ pub(crate) fn parse_stream<F: Parser>(f: F, input: ParseStream) -> Result<F::Output> {
+ f.__parse_stream(input)
+ }
+@@ -1160,8 +1233,8 @@ pub(crate) fn parse_stream<F: Parser>(f: F, input: ParseStream) -> Result<F::Out
+ /// provided any attribute args.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::parse_macro_input;
+ /// use syn::parse::Nothing;
+diff --git a/third_party/rust/syn/src/parse_macro_input.rs b/third_party/rust/syn/src/parse_macro_input.rs
+index d6e0725c17..c8fc1cea37 100644
+--- mozilla-release/third_party/rust/syn/src/parse_macro_input.rs
++++ mozilla-release/third_party/rust/syn/src/parse_macro_input.rs
+@@ -16,8 +16,8 @@
+ /// #\[proc_macro_attribute\] attribute.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, Result};
+ /// use syn::parse::{Parse, ParseStream};
+@@ -43,7 +43,31 @@
+ /// # "".parse().unwrap()
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++///
++/// <br>
++///
++/// # Expansion
++///
++/// `parse_macro_input!($variable as $Type)` expands to something like:
++///
++/// ```no_run
++/// # extern crate proc_macro;
++/// #
++/// # macro_rules! doc_test {
++/// # ($variable:ident as $Type:ty) => {
++/// match syn::parse::<$Type>($variable) {
++/// Ok(syntax_tree) => syntax_tree,
++/// Err(err) => return proc_macro::TokenStream::from(err.to_compile_error()),
++/// }
++/// # };
++/// # }
++/// #
++/// # fn test(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
++/// # let _ = doc_test!(input as syn::Ident);
++/// # proc_macro::TokenStream::new()
++/// # }
++/// ```
++#[macro_export]
+ macro_rules! parse_macro_input {
+ ($tokenstream:ident as $ty:ty) => {
+ match $crate::parse_macro_input::parse::<$ty>($tokenstream) {
+@@ -54,7 +78,7 @@ macro_rules! parse_macro_input {
+ }
+ };
+ ($tokenstream:ident) => {
+- parse_macro_input!($tokenstream as _)
++ $crate::parse_macro_input!($tokenstream as _)
+ };
+ }
+
+diff --git a/third_party/rust/syn/src/parse_quote.rs b/third_party/rust/syn/src/parse_quote.rs
+index 18a47b95c7..66aa818cd0 100644
+--- mozilla-release/third_party/rust/syn/src/parse_quote.rs
++++ mozilla-release/third_party/rust/syn/src/parse_quote.rs
+@@ -24,7 +24,7 @@
+ /// }
+ /// ```
+ ///
+-/// *This macro is available if Syn is built with the `"parsing"` feature,
++/// *This macro is available only if Syn is built with the `"parsing"` feature,
+ /// although interpolation of syntax tree nodes into the quoted tokens is only
+ /// supported if Syn is built with the `"printing"` feature as well.*
+ ///
+@@ -56,8 +56,10 @@
+ /// or inner like `#![...]`
+ /// - [`Punctuated<T, P>`] — parses zero or more `T` separated by punctuation
+ /// `P` with optional trailing punctuation
++/// - [`Vec<Stmt>`] — parses the same as `Block::parse_within`
+ ///
+ /// [`Punctuated<T, P>`]: punctuated::Punctuated
++/// [`Vec<Stmt>`]: Block::parse_within
+ ///
+ /// # Panics
+ ///
+@@ -67,7 +69,7 @@
+ //
+ // TODO: allow Punctuated to be inferred as intra doc link, currently blocked on
+ // https://github.com/rust-lang/rust/issues/62834
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! parse_quote {
+ ($($tt:tt)*) => {
+ $crate::parse_quote::parse(
+@@ -112,6 +114,8 @@ impl<T: Parse> ParseQuote for T {
+ use crate::punctuated::Punctuated;
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::{attr, Attribute};
++#[cfg(feature = "full")]
++use crate::{Block, Stmt};
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+ impl ParseQuote for Attribute {
+@@ -129,3 +133,10 @@ impl<T: Parse, P: Parse> ParseQuote for Punctuated<T, P> {
+ Self::parse_terminated(input)
+ }
+ }
++
++#[cfg(feature = "full")]
++impl ParseQuote for Vec<Stmt> {
++ fn parse(input: ParseStream) -> Result<Self> {
++ Block::parse_within(input)
++ }
++}
+diff --git a/third_party/rust/syn/src/pat.rs b/third_party/rust/syn/src/pat.rs
+index 9371e05493..e9576a2361 100644
+--- mozilla-release/third_party/rust/syn/src/pat.rs
++++ mozilla-release/third_party/rust/syn/src/pat.rs
+@@ -1,16 +1,12 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+ use proc_macro2::TokenStream;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_enum_of_structs! {
+ /// A pattern in a local binding, function signature, match expression, or
+ /// various other places.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -20,7 +16,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Pat #manual_extra_traits {
++ pub enum Pat {
+ /// A box pattern: `box v`.
+ Box(PatBox),
+
+@@ -86,7 +82,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A box pattern: `box v`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatBox {
+ pub attrs: Vec<Attribute>,
+ pub box_token: Token![box],
+@@ -97,7 +93,10 @@ ast_struct! {
+ ast_struct! {
+ /// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// It may also be a unit struct or struct variant (e.g. `None`), or a
++ /// constant; these cannot be distinguished syntactically.
++ ///
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatIdent {
+ pub attrs: Vec<Attribute>,
+ pub by_ref: Option<Token![ref]>,
+@@ -113,7 +112,7 @@ ast_struct! {
+ /// This holds an `Expr` rather than a `Lit` because negative numbers
+ /// are represented as an `Expr::Unary`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatLit {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+@@ -123,7 +122,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro in pattern position.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -133,7 +132,7 @@ ast_struct! {
+ ast_struct! {
+ /// A pattern that matches any one of a set of cases.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatOr {
+ pub attrs: Vec<Attribute>,
+ pub leading_vert: Option<Token![|]>,
+@@ -150,7 +149,7 @@ ast_struct! {
+ /// `<A>::B::C` and `<A as Trait>::B::C` can only legally refer to
+ /// associated constants.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatPath {
+ pub attrs: Vec<Attribute>,
+ pub qself: Option<QSelf>,
+@@ -161,7 +160,7 @@ ast_struct! {
+ ast_struct! {
+ /// A range pattern: `1..=2`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatRange {
+ pub attrs: Vec<Attribute>,
+ pub lo: Box<Expr>,
+@@ -173,7 +172,7 @@ ast_struct! {
+ ast_struct! {
+ /// A reference pattern: `&mut var`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatReference {
+ pub attrs: Vec<Attribute>,
+ pub and_token: Token![&],
+@@ -185,7 +184,7 @@ ast_struct! {
+ ast_struct! {
+ /// The dots in a tuple or slice pattern: `[0, 1, ..]`
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatRest {
+ pub attrs: Vec<Attribute>,
+ pub dot2_token: Token![..],
+@@ -195,7 +194,7 @@ ast_struct! {
+ ast_struct! {
+ /// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatSlice {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+@@ -206,7 +205,7 @@ ast_struct! {
+ ast_struct! {
+ /// A struct or struct variant pattern: `Variant { x, y, .. }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatStruct {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+@@ -219,7 +218,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple pattern: `(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatTuple {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+@@ -230,7 +229,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatTupleStruct {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+@@ -241,7 +240,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type ascription pattern: `foo: f64`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatType {
+ pub attrs: Vec<Attribute>,
+ pub pat: Box<Pat>,
+@@ -253,7 +252,7 @@ ast_struct! {
+ ast_struct! {
+ /// A pattern that matches any value: `_`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatWild {
+ pub attrs: Vec<Attribute>,
+ pub underscore_token: Token![_],
+@@ -266,7 +265,7 @@ ast_struct! {
+ /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` are treated
+ /// the same as `x: x, y: ref y, z: ref mut z` but there is no colon token.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct FieldPat {
+ pub attrs: Vec<Attribute>,
+ pub member: Member,
+@@ -275,122 +274,17 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Pat {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Pat {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Pat::Box(this), Pat::Box(other)) => this == other,
+- (Pat::Ident(this), Pat::Ident(other)) => this == other,
+- (Pat::Lit(this), Pat::Lit(other)) => this == other,
+- (Pat::Macro(this), Pat::Macro(other)) => this == other,
+- (Pat::Or(this), Pat::Or(other)) => this == other,
+- (Pat::Path(this), Pat::Path(other)) => this == other,
+- (Pat::Range(this), Pat::Range(other)) => this == other,
+- (Pat::Reference(this), Pat::Reference(other)) => this == other,
+- (Pat::Rest(this), Pat::Rest(other)) => this == other,
+- (Pat::Slice(this), Pat::Slice(other)) => this == other,
+- (Pat::Struct(this), Pat::Struct(other)) => this == other,
+- (Pat::Tuple(this), Pat::Tuple(other)) => this == other,
+- (Pat::TupleStruct(this), Pat::TupleStruct(other)) => this == other,
+- (Pat::Type(this), Pat::Type(other)) => this == other,
+- (Pat::Verbatim(this), Pat::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- (Pat::Wild(this), Pat::Wild(other)) => this == other,
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Pat {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Pat::Box(pat) => {
+- hash.write_u8(0);
+- pat.hash(hash);
+- }
+- Pat::Ident(pat) => {
+- hash.write_u8(1);
+- pat.hash(hash);
+- }
+- Pat::Lit(pat) => {
+- hash.write_u8(2);
+- pat.hash(hash);
+- }
+- Pat::Macro(pat) => {
+- hash.write_u8(3);
+- pat.hash(hash);
+- }
+- Pat::Or(pat) => {
+- hash.write_u8(4);
+- pat.hash(hash);
+- }
+- Pat::Path(pat) => {
+- hash.write_u8(5);
+- pat.hash(hash);
+- }
+- Pat::Range(pat) => {
+- hash.write_u8(6);
+- pat.hash(hash);
+- }
+- Pat::Reference(pat) => {
+- hash.write_u8(7);
+- pat.hash(hash);
+- }
+- Pat::Rest(pat) => {
+- hash.write_u8(8);
+- pat.hash(hash);
+- }
+- Pat::Slice(pat) => {
+- hash.write_u8(9);
+- pat.hash(hash);
+- }
+- Pat::Struct(pat) => {
+- hash.write_u8(10);
+- pat.hash(hash);
+- }
+- Pat::Tuple(pat) => {
+- hash.write_u8(11);
+- pat.hash(hash);
+- }
+- Pat::TupleStruct(pat) => {
+- hash.write_u8(12);
+- pat.hash(hash);
+- }
+- Pat::Type(pat) => {
+- hash.write_u8(13);
+- pat.hash(hash);
+- }
+- Pat::Verbatim(pat) => {
+- hash.write_u8(14);
+- TokenStreamHelper(pat).hash(hash);
+- }
+- Pat::Wild(pat) => {
+- hash.write_u8(15);
+- pat.hash(hash);
+- }
+- Pat::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ #[cfg(feature = "parsing")]
+-mod parsing {
++pub mod parsing {
+ use super::*;
+
+ use crate::ext::IdentExt;
+- use crate::parse::{Parse, ParseStream, Result};
++ use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
+ use crate::path;
+
+ impl Parse for Pat {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let lookahead = input.lookahead1();
+ if lookahead.peek(Ident)
+ && ({
+@@ -411,7 +305,6 @@ mod parsing {
+ || lookahead.peek(Token![<])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ pat_path_or_macro_or_struct_or_range(input)
+@@ -434,7 +327,7 @@ mod parsing {
+ } else if lookahead.peek(token::Bracket) {
+ input.call(pat_slice).map(Pat::Slice)
+ } else if lookahead.peek(Token![..]) && !input.peek(Token![...]) {
+- input.call(pat_rest).map(Pat::Rest)
++ pat_range_half_open(input, begin)
+ } else {
+ Err(lookahead.error())
+ }
+@@ -442,10 +335,11 @@ mod parsing {
+ }
+
+ fn pat_path_or_macro_or_struct_or_range(input: ParseStream) -> Result<Pat> {
++ let begin = input.fork();
+ let (qself, path) = path::parsing::qpath(input, true)?;
+
+ if input.peek(Token![..]) {
+- return pat_range(input, qself, path).map(Pat::Range);
++ return pat_range(input, begin, qself, path);
+ }
+
+ if qself.is_some() {
+@@ -487,7 +381,7 @@ mod parsing {
+ } else if input.peek(token::Paren) {
+ pat_tuple_struct(input, path).map(Pat::TupleStruct)
+ } else if input.peek(Token![..]) {
+- pat_range(input, qself, path).map(Pat::Range)
++ pat_range(input, begin, qself, path)
+ } else {
+ Ok(Pat::Path(PatPath {
+ attrs: Vec::new(),
+@@ -546,7 +440,7 @@ mod parsing {
+ while !content.is_empty() && !content.peek(Token![..]) {
+ let value = content.call(field_pat)?;
+ fields.push_value(value);
+- if !content.peek(Token![,]) {
++ if content.is_empty() {
+ break;
+ }
+ let punct: Token![,] = content.parse()?;
+@@ -578,6 +472,7 @@ mod parsing {
+ }
+
+ fn field_pat(input: ParseStream) -> Result<FieldPat> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ let boxed: Option<Token![box]> = input.parse()?;
+ let by_ref: Option<Token![ref]> = input.parse()?;
+ let mutability: Option<Token![mut]> = input.parse()?;
+@@ -587,10 +482,10 @@ mod parsing {
+ || member.is_unnamed()
+ {
+ return Ok(FieldPat {
+- attrs: Vec::new(),
++ attrs,
+ member,
+ colon_token: input.parse()?,
+- pat: input.parse()?,
++ pat: Box::new(multi_pat(input)?),
+ });
+ }
+
+@@ -610,30 +505,57 @@ mod parsing {
+ if let Some(boxed) = boxed {
+ pat = Pat::Box(PatBox {
+ attrs: Vec::new(),
+- pat: Box::new(pat),
+ box_token: boxed,
++ pat: Box::new(pat),
+ });
+ }
+
+ Ok(FieldPat {
++ attrs,
+ member: Member::Named(ident),
+- pat: Box::new(pat),
+- attrs: Vec::new(),
+ colon_token: None,
++ pat: Box::new(pat),
+ })
+ }
+
+- fn pat_range(input: ParseStream, qself: Option<QSelf>, path: Path) -> Result<PatRange> {
+- Ok(PatRange {
+- attrs: Vec::new(),
+- lo: Box::new(Expr::Path(ExprPath {
++ fn pat_range(
++ input: ParseStream,
++ begin: ParseBuffer,
++ qself: Option<QSelf>,
++ path: Path,
++ ) -> Result<Pat> {
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if let Some(hi) = hi {
++ Ok(Pat::Range(PatRange {
+ attrs: Vec::new(),
+- qself,
+- path,
+- })),
+- limits: input.parse()?,
+- hi: input.call(pat_lit_expr)?,
+- })
++ lo: Box::new(Expr::Path(ExprPath {
++ attrs: Vec::new(),
++ qself,
++ path,
++ })),
++ limits,
++ hi,
++ }))
++ } else {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ }
++ }
++
++ fn pat_range_half_open(input: ParseStream, begin: ParseBuffer) -> Result<Pat> {
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if hi.is_some() {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ } else {
++ match limits {
++ RangeLimits::HalfOpen(dot2_token) => Ok(Pat::Rest(PatRest {
++ attrs: Vec::new(),
++ dot2_token,
++ })),
++ RangeLimits::Closed(_) => Err(input.error("expected range upper bound")),
++ }
++ }
+ }
+
+ fn pat_tuple(input: ParseStream) -> Result<PatTuple> {
+@@ -642,7 +564,7 @@ mod parsing {
+
+ let mut elems = Punctuated::new();
+ while !content.is_empty() {
+- let value: Pat = content.parse()?;
++ let value = multi_pat(&content)?;
+ elems.push_value(value);
+ if content.is_empty() {
+ break;
+@@ -668,14 +590,21 @@ mod parsing {
+ }
+
+ fn pat_lit_or_range(input: ParseStream) -> Result<Pat> {
+- let lo = input.call(pat_lit_expr)?;
++ let begin = input.fork();
++ let lo = input.call(pat_lit_expr)?.unwrap();
+ if input.peek(Token![..]) {
+- Ok(Pat::Range(PatRange {
+- attrs: Vec::new(),
+- lo,
+- limits: input.parse()?,
+- hi: input.call(pat_lit_expr)?,
+- }))
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if let Some(hi) = hi {
++ Ok(Pat::Range(PatRange {
++ attrs: Vec::new(),
++ lo,
++ limits,
++ hi,
++ }))
++ } else {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ }
+ } else {
+ Ok(Pat::Lit(PatLit {
+ attrs: Vec::new(),
+@@ -684,7 +613,17 @@ mod parsing {
+ }
+ }
+
+- fn pat_lit_expr(input: ParseStream) -> Result<Box<Expr>> {
++ fn pat_lit_expr(input: ParseStream) -> Result<Option<Box<Expr>>> {
++ if input.is_empty()
++ || input.peek(Token![|])
++ || input.peek(Token![=>])
++ || input.peek(Token![:]) && !input.peek(Token![::])
++ || input.peek(Token![,])
++ || input.peek(Token![;])
++ {
++ return Ok(None);
++ }
++
+ let neg: Option<Token![-]> = input.parse()?;
+
+ let lookahead = input.lookahead1();
+@@ -696,7 +635,6 @@ mod parsing {
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![Self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ {
+ Expr::Path(input.parse()?)
+@@ -704,7 +642,7 @@ mod parsing {
+ return Err(lookahead.error());
+ };
+
+- Ok(Box::new(if let Some(neg) = neg {
++ Ok(Some(Box::new(if let Some(neg) = neg {
+ Expr::Unary(ExprUnary {
+ attrs: Vec::new(),
+ op: UnOp::Neg(neg),
+@@ -712,7 +650,7 @@ mod parsing {
+ })
+ } else {
+ expr
+- }))
++ })))
+ }
+
+ fn pat_slice(input: ParseStream) -> Result<PatSlice> {
+@@ -721,7 +659,7 @@ mod parsing {
+
+ let mut elems = Punctuated::new();
+ while !content.is_empty() {
+- let value: Pat = content.parse()?;
++ let value = multi_pat(&content)?;
+ elems.push_value(value);
+ if content.is_empty() {
+ break;
+@@ -737,11 +675,35 @@ mod parsing {
+ })
+ }
+
+- fn pat_rest(input: ParseStream) -> Result<PatRest> {
+- Ok(PatRest {
+- attrs: Vec::new(),
+- dot2_token: input.parse()?,
+- })
++ pub fn multi_pat(input: ParseStream) -> Result<Pat> {
++ multi_pat_impl(input, None)
++ }
++
++ pub fn multi_pat_with_leading_vert(input: ParseStream) -> Result<Pat> {
++ let leading_vert: Option<Token![|]> = input.parse()?;
++ multi_pat_impl(input, leading_vert)
++ }
++
++ fn multi_pat_impl(input: ParseStream, leading_vert: Option<Token![|]>) -> Result<Pat> {
++ let mut pat: Pat = input.parse()?;
++ if leading_vert.is_some()
++ || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
++ {
++ let mut cases = Punctuated::new();
++ cases.push_value(pat);
++ while input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=]) {
++ let punct = input.parse()?;
++ cases.push_punct(punct);
++ let pat: Pat = input.parse()?;
++ cases.push_value(pat);
++ }
++ pat = Pat::Or(PatOr {
++ attrs: Vec::new(),
++ leading_vert,
++ cases,
++ });
++ }
++ Ok(pat)
+ }
+ }
+
+@@ -756,12 +718,14 @@ mod printing {
+
+ impl ToTokens for PatWild {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.underscore_token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatIdent {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.by_ref.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+@@ -774,6 +738,7 @@ mod printing {
+
+ impl ToTokens for PatStruct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.path.to_tokens(tokens);
+ self.brace_token.surround(tokens, |tokens| {
+ self.fields.to_tokens(tokens);
+@@ -788,6 +753,7 @@ mod printing {
+
+ impl ToTokens for PatTupleStruct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.path.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+ }
+@@ -804,12 +770,14 @@ mod printing {
+
+ impl ToTokens for PatPath {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ private::print_path(tokens, &self.qself, &self.path);
+ }
+ }
+
+ impl ToTokens for PatTuple {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.paren_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ });
+@@ -818,6 +786,7 @@ mod printing {
+
+ impl ToTokens for PatBox {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.box_token.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+ }
+@@ -825,6 +794,7 @@ mod printing {
+
+ impl ToTokens for PatReference {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.and_token.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+@@ -833,18 +803,21 @@ mod printing {
+
+ impl ToTokens for PatRest {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.dot2_token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatLit {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.expr.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatRange {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.lo.to_tokens(tokens);
+ match &self.limits {
+ RangeLimits::HalfOpen(t) => t.to_tokens(tokens),
+@@ -856,6 +829,7 @@ mod printing {
+
+ impl ToTokens for PatSlice {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.bracket_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ });
+@@ -864,12 +838,14 @@ mod printing {
+
+ impl ToTokens for PatMacro {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.mac.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatOr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.leading_vert.to_tokens(tokens);
+ self.cases.to_tokens(tokens);
+ }
+@@ -877,6 +853,7 @@ mod printing {
+
+ impl ToTokens for FieldPat {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ if let Some(colon_token) = &self.colon_token {
+ self.member.to_tokens(tokens);
+ colon_token.to_tokens(tokens);
+diff --git a/third_party/rust/syn/src/path.rs b/third_party/rust/syn/src/path.rs
+index 8dda43ee67..15c0fcc664 100644
+--- mozilla-release/third_party/rust/syn/src/path.rs
++++ mozilla-release/third_party/rust/syn/src/path.rs
+@@ -2,9 +2,9 @@ use super::*;
+ use crate::punctuated::Punctuated;
+
+ ast_struct! {
+- /// A path at which a named item is exported: `std::collections::HashMap`.
++ /// A path at which a named item is exported (e.g. `std::collections::HashMap`).
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Path {
+ pub leading_colon: Option<Token![::]>,
+@@ -29,7 +29,7 @@ where
+ ast_struct! {
+ /// A segment of a path together with any path arguments on that segment.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct PathSegment {
+ pub ident: Ident,
+@@ -52,7 +52,7 @@ where
+ ast_enum! {
+ /// Angle bracketed or parenthesized arguments of a path segment.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// ## Angle bracketed
+@@ -98,7 +98,7 @@ impl PathArguments {
+ ast_enum! {
+ /// An individual generic argument, like `'a`, `T`, or `Item = T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum GenericArgument {
+ /// A lifetime argument.
+@@ -122,7 +122,7 @@ ast_struct! {
+ /// Angle bracketed arguments of a path segment: the `<K, V>` in `HashMap<K,
+ /// V>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct AngleBracketedGenericArguments {
+ pub colon2_token: Option<Token![::]>,
+@@ -135,7 +135,7 @@ ast_struct! {
+ ast_struct! {
+ /// A binding (equality constraint) on an associated type: `Item = u8`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Binding {
+ pub ident: Ident,
+@@ -147,7 +147,7 @@ ast_struct! {
+ ast_struct! {
+ /// An associated type bound: `Iterator<Item: Display>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Constraint {
+ pub ident: Ident,
+@@ -160,7 +160,7 @@ ast_struct! {
+ /// Arguments of a function path segment: the `(A, B) -> C` in `Fn(A,B) ->
+ /// C`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct ParenthesizedGenericArguments {
+ pub paren_token: token::Paren,
+@@ -189,7 +189,7 @@ ast_struct! {
+ /// ty position = 0
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct QSelf {
+ pub lt_token: Token![<],
+@@ -291,11 +291,7 @@ pub mod parsing {
+
+ impl PathSegment {
+ fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
+- if input.peek(Token![super])
+- || input.peek(Token![self])
+- || input.peek(Token![crate])
+- || input.peek(Token![extern])
+- {
++ if input.peek(Token![super]) || input.peek(Token![self]) || input.peek(Token![crate]) {
+ let ident = input.call(Ident::parse_any)?;
+ return Ok(PathSegment::from(ident));
+ }
+@@ -358,7 +354,7 @@ pub mod parsing {
+ impl Path {
+ /// Parse a `Path` containing no path arguments on any of its segments.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+@@ -400,7 +396,6 @@ pub mod parsing {
+ && !input.peek(Token![self])
+ && !input.peek(Token![Self])
+ && !input.peek(Token![crate])
+- && !input.peek(Token![extern])
+ {
+ break;
+ }
+@@ -433,7 +428,7 @@ pub mod parsing {
+ /// path arguments, and
+ /// - the ident of the first path segment is equal to the given one.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+@@ -472,7 +467,7 @@ pub mod parsing {
+ /// - the first path segment has no angle bracketed or parenthesized
+ /// path arguments.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ pub fn get_ident(&self) -> Option<&Ident> {
+ if self.leading_colon.is_none()
+diff --git a/third_party/rust/syn/src/punctuated.rs b/third_party/rust/syn/src/punctuated.rs
+index 38c7bf4e82..46c82a65b1 100644
+--- mozilla-release/third_party/rust/syn/src/punctuated.rs
++++ mozilla-release/third_party/rust/syn/src/punctuated.rs
+@@ -22,6 +22,8 @@
+
+ #[cfg(feature = "extra-traits")]
+ use std::fmt::{self, Debug};
++#[cfg(feature = "extra-traits")]
++use std::hash::{Hash, Hasher};
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use std::iter;
+ use std::iter::FromIterator;
+@@ -41,8 +43,6 @@ use crate::token::Token;
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[cfg_attr(feature = "extra-traits", derive(Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct Punctuated<T, P> {
+ inner: Vec<(T, P)>,
+ last: Option<Box<T>>,
+@@ -76,22 +76,19 @@ impl<T, P> Punctuated<T, P> {
+ self.iter().next()
+ }
+
++ /// Mutably borrows the first element in this sequence.
++ pub fn first_mut(&mut self) -> Option<&mut T> {
++ self.iter_mut().next()
++ }
++
+ /// Borrows the last element in this sequence.
+ pub fn last(&self) -> Option<&T> {
+- if self.last.is_some() {
+- self.last.as_ref().map(Box::as_ref)
+- } else {
+- self.inner.last().map(|pair| &pair.0)
+- }
++ self.iter().next_back()
+ }
+
+ /// Mutably borrows the last element in this sequence.
+ pub fn last_mut(&mut self) -> Option<&mut T> {
+- if self.last.is_some() {
+- self.last.as_mut().map(Box::as_mut)
+- } else {
+- self.inner.last_mut().map(|pair| &mut pair.0)
+- }
++ self.iter_mut().next_back()
+ }
+
+ /// Returns an iterator over borrowed syntax tree nodes of type `&T`.
+@@ -230,13 +227,19 @@ impl<T, P> Punctuated<T, P> {
+ }
+ }
+
++ /// Clears the sequence of all values and punctuation, making it empty.
++ pub fn clear(&mut self) {
++ self.inner.clear();
++ self.last = None;
++ }
++
+ /// Parses zero or more occurrences of `T` separated by punctuation of type
+ /// `P`, with optional trailing punctuation.
+ ///
+ /// Parsing continues until the end of this parse stream. The entire content
+ /// of this parse stream must consist of `T` and `P`.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_terminated(input: ParseStream) -> Result<Self>
+@@ -256,7 +259,7 @@ impl<T, P> Punctuated<T, P> {
+ ///
+ /// [`parse_terminated`]: Punctuated::parse_terminated
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_terminated_with(
+@@ -292,7 +295,7 @@ impl<T, P> Punctuated<T, P> {
+ /// is not followed by a `P`, even if there are remaining tokens in the
+ /// stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_separated_nonempty(input: ParseStream) -> Result<Self>
+@@ -312,7 +315,7 @@ impl<T, P> Punctuated<T, P> {
+ ///
+ /// [`parse_separated_nonempty`]: Punctuated::parse_separated_nonempty
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_separated_nonempty_with(
+@@ -338,6 +341,53 @@ impl<T, P> Punctuated<T, P> {
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl<T, P> Clone for Punctuated<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ Punctuated {
++ inner: self.inner.clone(),
++ last: self.last.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> Eq for Punctuated<T, P>
++where
++ T: Eq,
++ P: Eq,
++{
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> PartialEq for Punctuated<T, P>
++where
++ T: PartialEq,
++ P: PartialEq,
++{
++ fn eq(&self, other: &Self) -> bool {
++ let Punctuated { inner, last } = self;
++ *inner == other.inner && *last == other.last
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> Hash for Punctuated<T, P>
++where
++ T: Hash,
++ P: Hash,
++{
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ let Punctuated { inner, last } = self;
++ inner.hash(state);
++ last.hash(state);
++ }
++}
++
+ #[cfg(feature = "extra-traits")]
+ impl<T: Debug, P: Debug> Debug for Punctuated<T, P> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -536,7 +586,6 @@ impl<'a, T, P> ExactSizeIterator for PairsMut<'a, T, P> {
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[derive(Clone)]
+ pub struct IntoPairs<T, P> {
+ inner: vec::IntoIter<(T, P)>,
+ last: option::IntoIter<T>,
+@@ -572,12 +621,24 @@ impl<T, P> ExactSizeIterator for IntoPairs<T, P> {
+ }
+ }
+
++impl<T, P> Clone for IntoPairs<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ IntoPairs {
++ inner: self.inner.clone(),
++ last: self.last.clone(),
++ }
++ }
++}
++
+ /// An iterator over owned values of type `T`.
+ ///
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[derive(Clone)]
+ pub struct IntoIter<T> {
+ inner: vec::IntoIter<T>,
+ }
+@@ -606,6 +667,17 @@ impl<T> ExactSizeIterator for IntoIter<T> {
+ }
+ }
+
++impl<T> Clone for IntoIter<T>
++where
++ T: Clone,
++{
++ fn clone(&self) -> Self {
++ IntoIter {
++ inner: self.inner.clone(),
++ }
++ }
++}
++
+ /// An iterator over borrowed values of type `&T`.
+ ///
+ /// Refer to the [module documentation] for details about punctuated sequences.
+@@ -799,7 +871,6 @@ impl<'a, T: 'a, I: 'a> IterMutTrait<'a, T> for I where
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub enum Pair<T, P> {
+ Punctuated(T, P),
+ End(T),
+@@ -856,6 +927,20 @@ impl<T, P> Pair<T, P> {
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl<T, P> Clone for Pair<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ match self {
++ Pair::Punctuated(t, p) => Pair::Punctuated(t.clone(), p.clone()),
++ Pair::End(t) => Pair::End(t.clone()),
++ }
++ }
++}
++
+ impl<T, P> Index<usize> for Punctuated<T, P> {
+ type Output = T;
+
+diff --git a/third_party/rust/syn/src/reserved.rs b/third_party/rust/syn/src/reserved.rs
+new file mode 100644
+index 0000000000..ccfb8b5ad0
+--- /dev/null
++++ mozilla-release/third_party/rust/syn/src/reserved.rs
+@@ -0,0 +1,42 @@
++// Type for a syntax tree node that is reserved for future use.
++//
++// For example ExprReference contains a field `raw` of type Reserved. If `&raw
++// place` syntax becomes a thing as per https://github.com/rust-lang/rfcs/pull/2582,
++// we can backward compatibly change `raw`'s type to Option<Token![raw]> without
++// the possibility of breaking any code.
++
++use proc_macro2::Span;
++use std::marker::PhantomData;
++
++#[cfg(feature = "extra-traits")]
++use std::fmt::{self, Debug};
++
++ast_struct! {
++ pub struct Reserved {
++ _private: PhantomData<Span>,
++ }
++}
++
++impl Default for Reserved {
++ fn default() -> Self {
++ Reserved {
++ _private: PhantomData,
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for Reserved {
++ fn clone(&self) -> Self {
++ Reserved {
++ _private: self._private,
++ }
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl Debug for Reserved {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ formatter.debug_struct("Reserved").finish()
++ }
++}
+diff --git a/third_party/rust/syn/src/spanned.rs b/third_party/rust/syn/src/spanned.rs
+index 71ffe26b81..01591cedcb 100644
+--- mozilla-release/third_party/rust/syn/src/spanned.rs
++++ mozilla-release/third_party/rust/syn/src/spanned.rs
+@@ -1,7 +1,7 @@
+ //! A trait that can provide the `Span` of the complete contents of a syntax
+ //! tree node.
+ //!
+-//! *This module is available if Syn is built with both the `"parsing"` and
++//! *This module is available only if Syn is built with both the `"parsing"` and
+ //! `"printing"` features.*
+ //!
+ //! <br>
+@@ -97,7 +97,7 @@ use quote::spanned::Spanned as ToTokens;
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with both the `"parsing"` and
++/// *This trait is available only if Syn is built with both the `"parsing"` and
+ /// `"printing"` features.*
+ pub trait Spanned {
+ /// Returns a `Span` covering the complete contents of this syntax tree
+diff --git a/third_party/rust/syn/src/stmt.rs b/third_party/rust/syn/src/stmt.rs
+index e4277fdbaa..b06e843d75 100644
+--- mozilla-release/third_party/rust/syn/src/stmt.rs
++++ mozilla-release/third_party/rust/syn/src/stmt.rs
+@@ -3,7 +3,7 @@ use super::*;
+ ast_struct! {
+ /// A braced block containing Rust statements.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Block {
+ pub brace_token: token::Brace,
+ /// Statements in a block
+@@ -14,7 +14,7 @@ ast_struct! {
+ ast_enum! {
+ /// A statement, usually ending in a semicolon.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum Stmt {
+ /// A local (let) binding.
+ Local(Local),
+@@ -33,7 +33,7 @@ ast_enum! {
+ ast_struct! {
+ /// A local `let` binding: `let x: u64 = s.parse()?`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Local {
+ pub attrs: Vec<Attribute>,
+ pub let_token: Token![let],
+@@ -47,14 +47,15 @@ ast_struct! {
+ pub mod parsing {
+ use super::*;
+
++ use crate::parse::discouraged::Speculative;
+ use crate::parse::{Parse, ParseStream, Result};
+- use crate::punctuated::Punctuated;
++ use proc_macro2::TokenStream;
+
+ impl Block {
+ /// Parse the body of a block as zero or more statements, possibly
+ /// including one trailing expression.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+@@ -106,8 +107,8 @@ pub mod parsing {
+ pub fn parse_within(input: ParseStream) -> Result<Vec<Stmt>> {
+ let mut stmts = Vec::new();
+ loop {
+- while input.peek(Token![;]) {
+- input.parse::<Token![;]>()?;
++ while let Some(semi) = input.parse::<Option<Token![;]>>()? {
++ stmts.push(Stmt::Semi(Expr::Verbatim(TokenStream::new()), semi));
+ }
+ if input.is_empty() {
+ break;
+@@ -146,55 +147,55 @@ pub mod parsing {
+ }
+
+ fn parse_stmt(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
+- // TODO: optimize using advance_to
++ let mut attrs = input.call(Attribute::parse_outer)?;
++
++ // brace-style macros; paren and bracket macros get parsed as
++ // expression statements.
+ let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
++ if let Ok(path) = ahead.call(Path::parse_mod_style) {
++ if ahead.peek(Token![!]) && (ahead.peek2(token::Brace) || ahead.peek2(Ident)) {
++ input.advance_to(&ahead);
++ return stmt_mac(input, attrs, path);
++ }
++ }
+
+- if {
+- let ahead = ahead.fork();
+- // Only parse braces here; paren and bracket will get parsed as
+- // expression statements
+- ahead.call(Path::parse_mod_style).is_ok()
+- && ahead.parse::<Token![!]>().is_ok()
+- && (ahead.peek(token::Brace) || ahead.peek(Ident))
+- } {
+- stmt_mac(input)
+- } else if ahead.peek(Token![let]) {
+- stmt_local(input).map(Stmt::Local)
+- } else if ahead.peek(Token![pub])
+- || ahead.peek(Token![crate]) && !ahead.peek2(Token![::])
+- || ahead.peek(Token![extern]) && !ahead.peek2(Token![::])
+- || ahead.peek(Token![use])
+- || ahead.peek(Token![static]) && (ahead.peek2(Token![mut]) || ahead.peek2(Ident))
+- || ahead.peek(Token![const])
+- || ahead.peek(Token![unsafe]) && !ahead.peek2(token::Brace)
+- || ahead.peek(Token![async])
+- && (ahead.peek2(Token![unsafe])
+- || ahead.peek2(Token![extern])
+- || ahead.peek2(Token![fn]))
+- || ahead.peek(Token![fn])
+- || ahead.peek(Token![mod])
+- || ahead.peek(Token![type])
+- || ahead.peek(item::parsing::existential) && ahead.peek2(Token![type])
+- || ahead.peek(Token![struct])
+- || ahead.peek(Token![enum])
+- || ahead.peek(Token![union]) && ahead.peek2(Ident)
+- || ahead.peek(Token![auto]) && ahead.peek2(Token![trait])
+- || ahead.peek(Token![trait])
+- || ahead.peek(Token![default])
+- && (ahead.peek2(Token![unsafe]) || ahead.peek2(Token![impl]))
+- || ahead.peek(Token![impl])
+- || ahead.peek(Token![macro])
++ if input.peek(Token![let]) {
++ stmt_local(input, attrs).map(Stmt::Local)
++ } else if input.peek(Token![pub])
++ || input.peek(Token![crate]) && !input.peek2(Token![::])
++ || input.peek(Token![extern])
++ || input.peek(Token![use])
++ || input.peek(Token![static]) && (input.peek2(Token![mut]) || input.peek2(Ident))
++ || input.peek(Token![const])
++ || input.peek(Token![unsafe]) && !input.peek2(token::Brace)
++ || input.peek(Token![async])
++ && (input.peek2(Token![unsafe])
++ || input.peek2(Token![extern])
++ || input.peek2(Token![fn]))
++ || input.peek(Token![fn])
++ || input.peek(Token![mod])
++ || input.peek(Token![type])
++ || input.peek(item::parsing::existential) && input.peek2(Token![type])
++ || input.peek(Token![struct])
++ || input.peek(Token![enum])
++ || input.peek(Token![union]) && input.peek2(Ident)
++ || input.peek(Token![auto]) && input.peek2(Token![trait])
++ || input.peek(Token![trait])
++ || input.peek(Token![default])
++ && (input.peek2(Token![unsafe]) || input.peek2(Token![impl]))
++ || input.peek(Token![impl])
++ || input.peek(Token![macro])
+ {
+- input.parse().map(Stmt::Item)
++ let mut item: Item = input.parse()?;
++ attrs.extend(item.replace_attrs(Vec::new()));
++ item.replace_attrs(attrs);
++ Ok(Stmt::Item(item))
+ } else {
+- stmt_expr(input, allow_nosemi)
++ stmt_expr(input, allow_nosemi, attrs)
+ }
+ }
+
+- fn stmt_mac(input: ParseStream) -> Result<Stmt> {
+- let attrs = input.call(Attribute::parse_outer)?;
+- let path = input.call(Path::parse_mod_style)?;
++ fn stmt_mac(input: ParseStream, attrs: Vec<Attribute>, path: Path) -> Result<Stmt> {
+ let bang_token: Token![!] = input.parse()?;
+ let ident: Option<Ident> = input.parse()?;
+ let (delimiter, tokens) = mac::parse_delimiter(input)?;
+@@ -213,33 +214,12 @@ pub mod parsing {
+ })))
+ }
+
+- fn stmt_local(input: ParseStream) -> Result<Local> {
++ fn stmt_local(input: ParseStream, attrs: Vec<Attribute>) -> Result<Local> {
+ Ok(Local {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs,
+ let_token: input.parse()?,
+ pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let mut pat: Pat = input.parse()?;
+- if leading_vert.is_some()
+- || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
+- {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|])
+- && !input.peek(Token![||])
+- && !input.peek(Token![|=])
+- {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- pat = Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- });
+- }
++ let mut pat: Pat = pat::parsing::multi_pat_with_leading_vert(input)?;
+ if input.peek(Token![:]) {
+ let colon_token: Token![:] = input.parse()?;
+ let ty: Type = input.parse()?;
+@@ -265,12 +245,19 @@ pub mod parsing {
+ })
+ }
+
+- fn stmt_expr(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
+- let mut attrs = input.call(Attribute::parse_outer)?;
++ fn stmt_expr(
++ input: ParseStream,
++ allow_nosemi: bool,
++ mut attrs: Vec<Attribute>,
++ ) -> Result<Stmt> {
+ let mut e = expr::parsing::expr_early(input)?;
+
+- attrs.extend(e.replace_attrs(Vec::new()));
+- e.replace_attrs(attrs);
++ let mut attr_target = &mut e;
++ while let Expr::Binary(e) = attr_target {
++ attr_target = &mut e.left;
++ }
++ attrs.extend(attr_target.replace_attrs(Vec::new()));
++ attr_target.replace_attrs(attrs);
+
+ if input.peek(Token![;]) {
+ return Ok(Stmt::Semi(e, input.parse()?));
+diff --git a/third_party/rust/syn/src/token.rs b/third_party/rust/syn/src/token.rs
+index 0b8c18192f..8539378c5e 100644
+--- mozilla-release/third_party/rust/syn/src/token.rs
++++ mozilla-release/third_party/rust/syn/src/token.rs
+@@ -88,7 +88,6 @@
+ //! [Printing]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html
+ //! [`Span`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html
+
+-use std;
+ #[cfg(feature = "extra-traits")]
+ use std::cmp;
+ #[cfg(feature = "extra-traits")]
+@@ -97,13 +96,13 @@ use std::fmt::{self, Debug};
+ use std::hash::{Hash, Hasher};
+ use std::ops::{Deref, DerefMut};
+
+-#[cfg(feature = "parsing")]
+-use proc_macro2::Delimiter;
+ #[cfg(any(feature = "parsing", feature = "printing"))]
+ use proc_macro2::Ident;
+ use proc_macro2::Span;
+ #[cfg(feature = "printing")]
+ use proc_macro2::TokenStream;
++#[cfg(feature = "parsing")]
++use proc_macro2::{Delimiter, Literal, Punct, TokenTree};
+ #[cfg(feature = "printing")]
+ use quote::{ToTokens, TokenStreamExt};
+
+@@ -112,10 +111,8 @@ use self::private::WithSpan;
+ use crate::buffer::Cursor;
+ #[cfg(feature = "parsing")]
+ use crate::error::Result;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ use crate::lifetime::Lifetime;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ use crate::lit::{Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr};
+ #[cfg(feature = "parsing")]
+@@ -155,21 +152,20 @@ mod private {
+ #[cfg(feature = "parsing")]
+ impl private::Sealed for Ident {}
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ fn peek_impl(cursor: Cursor, peek: fn(ParseStream) -> bool) -> bool {
++ use crate::parse::Unexpected;
+ use std::cell::Cell;
+ use std::rc::Rc;
+
+ let scope = Span::call_site();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ let buffer = crate::parse::new_parse_buffer(scope, cursor, unexpected);
+ peek(&buffer)
+ }
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! impl_token {
+- ($name:ident $display:expr) => {
++ ($display:tt $name:ty) => {
+ #[cfg(feature = "parsing")]
+ impl Token for $name {
+ fn peek(cursor: Cursor) -> bool {
+@@ -189,24 +185,38 @@ macro_rules! impl_token {
+ };
+ }
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(Lifetime "lifetime");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(Lit "literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitStr "string literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitByteStr "byte string literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitByte "byte literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitChar "character literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitInt "integer literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitFloat "floating point literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitBool "boolean literal");
++impl_token!("lifetime" Lifetime);
++impl_token!("literal" Lit);
++impl_token!("string literal" LitStr);
++impl_token!("byte string literal" LitByteStr);
++impl_token!("byte literal" LitByte);
++impl_token!("character literal" LitChar);
++impl_token!("integer literal" LitInt);
++impl_token!("floating point literal" LitFloat);
++impl_token!("boolean literal" LitBool);
++impl_token!("group token" proc_macro2::Group);
++
++macro_rules! impl_low_level_token {
++ ($display:tt $ty:ident $get:ident) => {
++ #[cfg(feature = "parsing")]
++ impl Token for $ty {
++ fn peek(cursor: Cursor) -> bool {
++ cursor.$get().is_some()
++ }
++
++ fn display() -> &'static str {
++ $display
++ }
++ }
++
++ #[cfg(feature = "parsing")]
++ impl private::Sealed for $ty {}
++ };
++}
++
++impl_low_level_token!("punctuation token" Punct punct);
++impl_low_level_token!("literal" Literal literal);
++impl_low_level_token!("token" TokenTree token_tree);
+
+ // Not public API.
+ #[doc(hidden)]
+@@ -233,7 +243,6 @@ impl<T: CustomToken> Token for T {
+ macro_rules! define_keywords {
+ ($($token:tt pub struct $name:ident #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[$doc]
+ ///
+ /// Don't try to remember the name of this type &mdash; use the
+@@ -260,6 +269,16 @@ macro_rules! define_keywords {
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -338,7 +357,6 @@ macro_rules! impl_deref_if_len_is_1 {
+ macro_rules! define_punctuation_structs {
+ ($($token:tt pub struct $name:ident/$len:tt #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[repr(C)]
+ #[$doc]
+ ///
+@@ -366,6 +384,16 @@ macro_rules! define_punctuation_structs {
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -436,7 +464,6 @@ macro_rules! define_punctuation {
+ macro_rules! define_delimiters {
+ ($($token:tt pub struct $name:ident #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[$doc]
+ pub struct $name {
+ pub span: Span,
+@@ -458,6 +485,16 @@ macro_rules! define_delimiters {
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -855,7 +892,7 @@ pub mod parsing {
+ }
+
+ pub fn punct<S: FromSpans>(input: ParseStream, token: &str) -> Result<S> {
+- let mut spans = [input.cursor().span(); 3];
++ let mut spans = [input.span(); 3];
+ punct_helper(input, token, &mut spans)?;
+ Ok(S::from_spans(&spans))
+ }
+diff --git a/third_party/rust/syn/src/tt.rs b/third_party/rust/syn/src/tt.rs
+index f860eebb4f..8dba0627cd 100644
+--- mozilla-release/third_party/rust/syn/src/tt.rs
++++ mozilla-release/third_party/rust/syn/src/tt.rs
+@@ -18,8 +18,8 @@ impl<'a> PartialEq for TokenTreeHelper<'a> {
+ _ => return false,
+ }
+
+- let s1 = g1.stream().clone().into_iter();
+- let mut s2 = g2.stream().clone().into_iter();
++ let s1 = g1.stream().into_iter();
++ let mut s2 = g2.stream().into_iter();
+
+ for item1 in s1 {
+ let item2 = match s2.next() {
+@@ -60,7 +60,7 @@ impl<'a> Hash for TokenTreeHelper<'a> {
+ Delimiter::None => 3u8.hash(h),
+ }
+
+- for item in g.stream().clone() {
++ for item in g.stream() {
+ TokenTreeHelper(&item).hash(h);
+ }
+ 0xffu8.hash(h); // terminator w/ a variant we don't normally hash
+diff --git a/third_party/rust/syn/src/ty.rs b/third_party/rust/syn/src/ty.rs
+index 4ee59bda2a..fd7c97eab7 100644
+--- mozilla-release/third_party/rust/syn/src/ty.rs
++++ mozilla-release/third_party/rust/syn/src/ty.rs
+@@ -1,15 +1,11 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+ use proc_macro2::TokenStream;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_enum_of_structs! {
+ /// The possible types that a Rust value could have.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -20,7 +16,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Type #manual_extra_traits {
++ pub enum Type {
+ /// A fixed size array type: `[T; n]`.
+ Array(TypeArray),
+
+@@ -77,7 +73,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A fixed size array type: `[T; n]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeArray {
+ pub bracket_token: token::Bracket,
+@@ -90,7 +86,7 @@ ast_struct! {
+ ast_struct! {
+ /// A bare function type: `fn(usize) -> bool`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeBareFn {
+ pub lifetimes: Option<BoundLifetimes>,
+@@ -107,7 +103,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type contained within invisible delimiters.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeGroup {
+ pub group_token: token::Group,
+@@ -119,7 +115,7 @@ ast_struct! {
+ /// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or
+ /// a lifetime.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeImplTrait {
+ pub impl_token: Token![impl],
+@@ -130,7 +126,7 @@ ast_struct! {
+ ast_struct! {
+ /// Indication that a type should be inferred by the compiler: `_`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeInfer {
+ pub underscore_token: Token![_],
+@@ -140,7 +136,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro in the type position.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeMacro {
+ pub mac: Macro,
+@@ -150,7 +146,7 @@ ast_struct! {
+ ast_struct! {
+ /// The never type: `!`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeNever {
+ pub bang_token: Token![!],
+@@ -160,7 +156,7 @@ ast_struct! {
+ ast_struct! {
+ /// A parenthesized type equivalent to the inner type.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeParen {
+ pub paren_token: token::Paren,
+@@ -172,7 +168,7 @@ ast_struct! {
+ /// A path like `std::slice::Iter`, optionally qualified with a
+ /// self-type as in `<Vec<T> as SomeTrait>::Associated`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypePath {
+ pub qself: Option<QSelf>,
+@@ -183,7 +179,7 @@ ast_struct! {
+ ast_struct! {
+ /// A raw pointer type: `*const T` or `*mut T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypePtr {
+ pub star_token: Token![*],
+@@ -196,7 +192,7 @@ ast_struct! {
+ ast_struct! {
+ /// A reference type: `&'a T` or `&'a mut T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeReference {
+ pub and_token: Token![&],
+@@ -209,7 +205,7 @@ ast_struct! {
+ ast_struct! {
+ /// A dynamically sized slice type: `[T]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeSlice {
+ pub bracket_token: token::Bracket,
+@@ -221,7 +217,7 @@ ast_struct! {
+ /// A trait object type `Bound1 + Bound2 + Bound3` where `Bound` is a
+ /// trait or a lifetime.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeTraitObject {
+ pub dyn_token: Option<Token![dyn]>,
+@@ -232,7 +228,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple type: `(A, B, C, String)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeTuple {
+ pub paren_token: token::Paren,
+@@ -240,111 +236,10 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Type {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Type {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Type::Array(this), Type::Array(other)) => this == other,
+- (Type::BareFn(this), Type::BareFn(other)) => this == other,
+- (Type::Group(this), Type::Group(other)) => this == other,
+- (Type::ImplTrait(this), Type::ImplTrait(other)) => this == other,
+- (Type::Infer(this), Type::Infer(other)) => this == other,
+- (Type::Macro(this), Type::Macro(other)) => this == other,
+- (Type::Never(this), Type::Never(other)) => this == other,
+- (Type::Paren(this), Type::Paren(other)) => this == other,
+- (Type::Path(this), Type::Path(other)) => this == other,
+- (Type::Ptr(this), Type::Ptr(other)) => this == other,
+- (Type::Reference(this), Type::Reference(other)) => this == other,
+- (Type::Slice(this), Type::Slice(other)) => this == other,
+- (Type::TraitObject(this), Type::TraitObject(other)) => this == other,
+- (Type::Tuple(this), Type::Tuple(other)) => this == other,
+- (Type::Verbatim(this), Type::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Type {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Type::Array(ty) => {
+- hash.write_u8(0);
+- ty.hash(hash);
+- }
+- Type::BareFn(ty) => {
+- hash.write_u8(1);
+- ty.hash(hash);
+- }
+- Type::Group(ty) => {
+- hash.write_u8(2);
+- ty.hash(hash);
+- }
+- Type::ImplTrait(ty) => {
+- hash.write_u8(3);
+- ty.hash(hash);
+- }
+- Type::Infer(ty) => {
+- hash.write_u8(4);
+- ty.hash(hash);
+- }
+- Type::Macro(ty) => {
+- hash.write_u8(5);
+- ty.hash(hash);
+- }
+- Type::Never(ty) => {
+- hash.write_u8(6);
+- ty.hash(hash);
+- }
+- Type::Paren(ty) => {
+- hash.write_u8(7);
+- ty.hash(hash);
+- }
+- Type::Path(ty) => {
+- hash.write_u8(8);
+- ty.hash(hash);
+- }
+- Type::Ptr(ty) => {
+- hash.write_u8(9);
+- ty.hash(hash);
+- }
+- Type::Reference(ty) => {
+- hash.write_u8(10);
+- ty.hash(hash);
+- }
+- Type::Slice(ty) => {
+- hash.write_u8(11);
+- ty.hash(hash);
+- }
+- Type::TraitObject(ty) => {
+- hash.write_u8(12);
+- ty.hash(hash);
+- }
+- Type::Tuple(ty) => {
+- hash.write_u8(13);
+- ty.hash(hash);
+- }
+- Type::Verbatim(ty) => {
+- hash.write_u8(14);
+- TokenStreamHelper(ty).hash(hash);
+- }
+- Type::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_struct! {
+ /// The binary interface of a function: `extern "C"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Abi {
+ pub extern_token: Token![extern],
+@@ -355,7 +250,7 @@ ast_struct! {
+ ast_struct! {
+ /// An argument in a function type: the `usize` in `fn(usize) -> bool`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct BareFnArg {
+ pub attrs: Vec<Attribute>,
+@@ -377,7 +272,7 @@ ast_struct! {
+ /// }
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Variadic {
+ pub attrs: Vec<Attribute>,
+@@ -388,7 +283,7 @@ ast_struct! {
+ ast_enum! {
+ /// Return type of a function signature.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum ReturnType {
+ /// Return type is not specified.
+@@ -407,10 +302,13 @@ pub mod parsing {
+ use crate::ext::IdentExt;
+ use crate::parse::{Parse, ParseStream, Result};
+ use crate::path;
++ use proc_macro2::{Punct, Spacing, TokenTree};
++ use std::iter::FromIterator;
+
+ impl Parse for Type {
+ fn parse(input: ParseStream) -> Result<Self> {
+- ambig_ty(input, true)
++ let allow_plus = true;
++ ambig_ty(input, allow_plus)
+ }
+ }
+
+@@ -421,15 +319,17 @@ pub mod parsing {
+ ///
+ /// This parser does not allow a `+`, while the default parser does.
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- ambig_ty(input, false)
++ let allow_plus = false;
++ ambig_ty(input, allow_plus)
+ }
+ }
+
+ fn ambig_ty(input: ParseStream, allow_plus: bool) -> Result<Type> {
+- if input.peek(token::Group) {
++ if input.peek(token::Group) && !input.peek2(Token![::]) && !input.peek2(Token![<]) {
+ return input.parse().map(Type::Group);
+ }
+
++ let begin = input.fork();
+ let mut lifetimes = None::<BoundLifetimes>;
+ let mut lookahead = input.lookahead1();
+ if lookahead.peek(Token![for]) {
+@@ -524,7 +424,7 @@ pub mod parsing {
+ ..trait_bound
+ })
+ }
+- other => other,
++ other @ TypeParamBound::Lifetime(_) => other,
+ }
+ }
+ _ => break,
+@@ -549,17 +449,20 @@ pub mod parsing {
+ }))
+ } else if lookahead.peek(Token![fn])
+ || lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![extern]) && !input.peek2(Token![::])
++ || lookahead.peek(Token![extern])
+ {
+- let mut bare_fn: TypeBareFn = input.parse()?;
+- bare_fn.lifetimes = lifetimes;
+- Ok(Type::BareFn(bare_fn))
++ let allow_mut_self = true;
++ if let Some(mut bare_fn) = parse_bare_fn(input, allow_mut_self)? {
++ bare_fn.lifetimes = lifetimes;
++ Ok(Type::BareFn(bare_fn))
++ } else {
++ Ok(Type::Verbatim(verbatim::between(begin, input)))
++ }
+ } else if lookahead.peek(Ident)
+ || input.peek(Token![super])
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![crate])
+- || input.peek(Token![extern])
+ || lookahead.peek(Token![::])
+ || lookahead.peek(Token![<])
+ {
+@@ -722,38 +625,58 @@ pub mod parsing {
+
+ impl Parse for TypeBareFn {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let args;
+- let allow_variadic;
+- Ok(TypeBareFn {
+- lifetimes: input.parse()?,
+- unsafety: input.parse()?,
+- abi: input.parse()?,
+- fn_token: input.parse()?,
+- paren_token: parenthesized!(args in input),
+- inputs: {
+- let mut inputs = Punctuated::new();
+- while !args.is_empty() && !args.peek(Token![...]) {
+- inputs.push_value(args.parse()?);
+- if args.is_empty() {
+- break;
+- }
+- inputs.push_punct(args.parse()?);
+- }
+- allow_variadic = inputs.empty_or_trailing();
+- inputs
+- },
+- variadic: {
+- if allow_variadic && args.peek(Token![...]) {
+- Some(Variadic {
+- attrs: Vec::new(),
++ let allow_mut_self = false;
++ parse_bare_fn(input, allow_mut_self).map(Option::unwrap)
++ }
++ }
++
++ fn parse_bare_fn(input: ParseStream, allow_mut_self: bool) -> Result<Option<TypeBareFn>> {
++ let args;
++ let mut variadic = None;
++ let mut has_mut_self = false;
++
++ let bare_fn = TypeBareFn {
++ lifetimes: input.parse()?,
++ unsafety: input.parse()?,
++ abi: input.parse()?,
++ fn_token: input.parse()?,
++ paren_token: parenthesized!(args in input),
++ inputs: {
++ let mut inputs = Punctuated::new();
++
++ while !args.is_empty() {
++ let attrs = args.call(Attribute::parse_outer)?;
++
++ if inputs.empty_or_trailing() && args.peek(Token![...]) {
++ variadic = Some(Variadic {
++ attrs,
+ dots: args.parse()?,
+- })
++ });
++ break;
++ }
++
++ if let Some(arg) = parse_bare_fn_arg(&args, allow_mut_self)? {
++ inputs.push_value(BareFnArg { attrs, ..arg });
+ } else {
+- None
++ has_mut_self = true;
+ }
+- },
+- output: input.call(ReturnType::without_plus)?,
+- })
++ if args.is_empty() {
++ break;
++ }
++
++ inputs.push_punct(args.parse()?);
++ }
++
++ inputs
++ },
++ variadic,
++ output: input.call(ReturnType::without_plus)?,
++ };
++
++ if has_mut_self {
++ Ok(None)
++ } else {
++ Ok(Some(bare_fn))
+ }
+ }
+
+@@ -776,9 +699,27 @@ pub mod parsing {
+ impl Parse for TypeTuple {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let content;
++ let paren_token = parenthesized!(content in input);
++
++ if content.is_empty() {
++ return Ok(TypeTuple {
++ paren_token,
++ elems: Punctuated::new(),
++ });
++ }
++
++ let first: Type = content.parse()?;
+ Ok(TypeTuple {
+- paren_token: parenthesized!(content in input),
+- elems: content.parse_terminated(Type::parse)?,
++ paren_token,
++ elems: {
++ let mut elems = Punctuated::new();
++ elems.push_value(first);
++ elems.push_punct(content.parse()?);
++ let rest: Punctuated<Type, Token![,]> =
++ content.parse_terminated(Parse::parse)?;
++ elems.extend(rest);
++ elems
++ },
+ })
+ }
+ }
+@@ -807,9 +748,11 @@ pub mod parsing {
+
+ impl ReturnType {
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+
++ #[doc(hidden)]
+ pub fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
+ if input.peek(Token![->]) {
+ let arrow = input.parse()?;
+@@ -844,10 +787,12 @@ pub mod parsing {
+
+ impl TypeTraitObject {
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+
+ // Only allow multiple trait references if allow_plus is true.
++ #[doc(hidden)]
+ pub fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
+ Ok(TypeTraitObject {
+ dyn_token: input.parse()?,
+@@ -910,7 +855,8 @@ pub mod parsing {
+
+ impl Parse for TypeParen {
+ fn parse(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+ }
+
+@@ -926,22 +872,72 @@ pub mod parsing {
+
+ impl Parse for BareFnArg {
+ fn parse(input: ParseStream) -> Result<Self> {
+- Ok(BareFnArg {
+- attrs: input.call(Attribute::parse_outer)?,
+- name: {
+- if (input.peek(Ident) || input.peek(Token![_]))
+- && input.peek2(Token![:])
+- && !input.peek2(Token![::])
+- {
+- let name = input.call(Ident::parse_any)?;
+- let colon: Token![:] = input.parse()?;
+- Some((name, colon))
+- } else {
+- None
+- }
+- },
+- ty: input.parse()?,
+- })
++ let allow_mut_self = false;
++ parse_bare_fn_arg(input, allow_mut_self).map(Option::unwrap)
++ }
++ }
++
++ fn parse_bare_fn_arg(
++ input: ParseStream,
++ mut allow_mut_self: bool,
++ ) -> Result<Option<BareFnArg>> {
++ let mut has_mut_self = false;
++ let arg = BareFnArg {
++ attrs: input.call(Attribute::parse_outer)?,
++ name: {
++ if (input.peek(Ident) || input.peek(Token![_]) || input.peek(Token![self]))
++ && input.peek2(Token![:])
++ && !input.peek2(Token![::])
++ {
++ let name = input.call(Ident::parse_any)?;
++ let colon: Token![:] = input.parse()?;
++ Some((name, colon))
++ } else if allow_mut_self
++ && input.peek(Token![mut])
++ && input.peek2(Token![self])
++ && input.peek3(Token![:])
++ && !input.peek3(Token![::])
++ {
++ has_mut_self = true;
++ allow_mut_self = false;
++ input.parse::<Token![mut]>()?;
++ input.parse::<Token![self]>()?;
++ input.parse::<Token![:]>()?;
++ None
++ } else {
++ None
++ }
++ },
++ ty: if !has_mut_self && input.peek(Token![...]) {
++ let dot3 = input.parse::<Token![...]>()?;
++ let args = vec![
++ TokenTree::Punct(Punct::new('.', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('.', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('.', Spacing::Alone)),
++ ];
++ let tokens = TokenStream::from_iter(args.into_iter().zip(&dot3.spans).map(
++ |(mut arg, span)| {
++ arg.set_span(*span);
++ arg
++ },
++ ));
++ Type::Verbatim(tokens)
++ } else if allow_mut_self && input.peek(Token![mut]) && input.peek2(Token![self]) {
++ has_mut_self = true;
++ input.parse::<Token![mut]>()?;
++ Type::Path(TypePath {
++ qself: None,
++ path: input.parse::<Token![self]>()?.into(),
++ })
++ } else {
++ input.parse()?
++ },
++ };
++
++ if has_mut_self {
++ Ok(None)
++ } else {
++ Ok(Some(arg))
+ }
+ }
+
+diff --git a/third_party/rust/syn/src/verbatim.rs b/third_party/rust/syn/src/verbatim.rs
+new file mode 100644
+index 0000000000..0686352f7a
+--- /dev/null
++++ mozilla-release/third_party/rust/syn/src/verbatim.rs
+@@ -0,0 +1,15 @@
++use crate::parse::{ParseBuffer, ParseStream};
++use proc_macro2::TokenStream;
++use std::iter;
++
++pub fn between<'a>(begin: ParseBuffer<'a>, end: ParseStream<'a>) -> TokenStream {
++ let end = end.cursor();
++ let mut cursor = begin.cursor();
++ let mut tokens = TokenStream::new();
++ while cursor != end {
++ let (tt, next) = cursor.token_tree().unwrap();
++ tokens.extend(iter::once(tt));
++ cursor = next;
++ }
++ tokens
++}
+diff --git a/third_party/rust/syn/src/whitespace.rs b/third_party/rust/syn/src/whitespace.rs
+new file mode 100644
+index 0000000000..7be082e1a2
+--- /dev/null
++++ mozilla-release/third_party/rust/syn/src/whitespace.rs
+@@ -0,0 +1,65 @@
++pub fn skip(mut s: &str) -> &str {
++ 'skip: while !s.is_empty() {
++ let byte = s.as_bytes()[0];
++ if byte == b'/' {
++ if s.starts_with("//")
++ && (!s.starts_with("///") || s.starts_with("////"))
++ && !s.starts_with("//!")
++ {
++ if let Some(i) = s.find('\n') {
++ s = &s[i + 1..];
++ continue;
++ } else {
++ return "";
++ }
++ } else if s.starts_with("/**/") {
++ s = &s[4..];
++ continue;
++ } else if s.starts_with("/*")
++ && (!s.starts_with("/**") || s.starts_with("/***"))
++ && !s.starts_with("/*!")
++ {
++ let mut depth = 0;
++ let bytes = s.as_bytes();
++ let mut i = 0;
++ let upper = bytes.len() - 1;
++ while i < upper {
++ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
++ depth += 1;
++ i += 1; // eat '*'
++ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
++ depth -= 1;
++ if depth == 0 {
++ s = &s[i + 2..];
++ continue 'skip;
++ }
++ i += 1; // eat '/'
++ }
++ i += 1;
++ }
++ return s;
++ }
++ }
++ match byte {
++ b' ' | 0x09..=0x0d => {
++ s = &s[1..];
++ continue;
++ }
++ b if b <= 0x7f => {}
++ _ => {
++ let ch = s.chars().next().unwrap();
++ if is_whitespace(ch) {
++ s = &s[ch.len_utf8()..];
++ continue;
++ }
++ }
++ }
++ return s;
++ }
++ s
++}
++
++fn is_whitespace(ch: char) -> bool {
++ // Rust treats left-to-right mark and right-to-left mark as whitespace
++ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
++}
+diff --git a/third_party/rust/syn/tests/clone.sh b/third_party/rust/syn/tests/clone.sh
+deleted file mode 100755
+index 8e0863cba6..0000000000
+--- mozilla-release/third_party/rust/syn/tests/clone.sh
++++ mozilla-release//dev/null
+@@ -1,16 +0,0 @@
+-#!/bin/bash
+-
+-REV=4560cb830fce63fcffdc4558f4281aaac6a3a1ba
+-
+-set -euo pipefail
+-cd "$(dirname "${BASH_SOURCE[0]}")"
+-mkdir -p rust
+-touch rust/COMMIT
+-
+-if [ "$(cat rust/COMMIT)" != "$REV" ]; then
+- rm -rf rust
+- mkdir rust
+- curl -L "https://github.com/rust-lang/rust/archive/${REV}.tar.gz" \
+- | tar xz --directory rust --strip-components 1
+- echo "$REV" > rust/COMMIT
+-fi
+diff --git a/third_party/rust/syn/tests/common/eq.rs b/third_party/rust/syn/tests/common/eq.rs
+index 13a6c36ae5..7589a07573 100644
+--- mozilla-release/third_party/rust/syn/tests/common/eq.rs
++++ mozilla-release/third_party/rust/syn/tests/common/eq.rs
+@@ -1,36 +1,35 @@
++extern crate rustc_ast;
+ extern crate rustc_data_structures;
++extern crate rustc_span;
+ extern crate rustc_target;
+-extern crate syntax;
+-extern crate syntax_pos;
+
+ use std::mem;
+
+-use self::rustc_data_structures::sync::Lrc;
+-use self::rustc_data_structures::thin_vec::ThinVec;
+-use self::rustc_target::abi::FloatTy;
+-use self::rustc_target::spec::abi::Abi;
+-use self::syntax::ast::{
+- AngleBracketedArgs, AnonConst, Arg, Arm, AsmDialect, AssocTyConstraint, AssocTyConstraintKind,
+- AttrId, AttrStyle, Attribute, BareFnTy, BinOpKind, BindingMode, Block, BlockCheckMode,
+- CaptureBy, Constness, Crate, CrateSugar, Defaultness, EnumDef, Expr, ExprKind, Field, FieldPat,
+- FnDecl, FnHeader, ForeignItem, ForeignItemKind, ForeignMod, FunctionRetTy, GenericArg,
+- GenericArgs, GenericBound, GenericParam, GenericParamKind, Generics, GlobalAsm, Ident,
+- ImplItem, ImplItemKind, ImplPolarity, InlineAsm, InlineAsmOutput, IntTy, IsAsync, IsAuto, Item,
+- ItemKind, Label, Lifetime, Lit, LitIntType, LitKind, Local, Mac, MacDelimiter, MacStmtStyle,
+- MacroDef, MethodSig, Mod, Movability, MutTy, Mutability, NodeId, ParenthesizedArgs, Pat,
+- PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
+- StmtKind, StrStyle, StructField, TraitBoundModifier, TraitItem, TraitItemKind,
+- TraitObjectSyntax, TraitRef, Ty, TyKind, UintTy, UnOp, UnsafeSource, Unsafety, UseTree,
+- UseTreeKind, Variant, VariantData, VisibilityKind, WhereBoundPredicate, WhereClause,
+- WhereEqPredicate, WherePredicate, WhereRegionPredicate,
++use rustc_ast::ast::{
++ AngleBracketedArg, AngleBracketedArgs, AnonConst, Arm, AssocItemKind, AssocTyConstraint,
++ AssocTyConstraintKind, Async, AttrId, AttrItem, AttrKind, AttrStyle, Attribute, BareFnTy,
++ BinOpKind, BindingMode, Block, BlockCheckMode, BorrowKind, CaptureBy, Const, Crate, CrateSugar,
++ Defaultness, EnumDef, Expr, ExprKind, Extern, Field, FieldPat, FloatTy, FnDecl, FnHeader,
++ FnRetTy, FnSig, ForeignItemKind, ForeignMod, GenericArg, GenericArgs, GenericBound,
++ GenericParam, GenericParamKind, Generics, GlobalAsm, ImplPolarity, InlineAsm, InlineAsmOperand,
++ InlineAsmOptions, InlineAsmRegOrRegClass, InlineAsmTemplatePiece, IntTy, IsAuto, Item,
++ ItemKind, Label, Lifetime, Lit, LitFloatType, LitIntType, LitKind, LlvmAsmDialect,
++ LlvmInlineAsm, LlvmInlineAsmOutput, Local, MacArgs, MacCall, MacCallStmt, MacDelimiter,
++ MacStmtStyle, MacroDef, Mod, Movability, MutTy, Mutability, NodeId, Param, ParenthesizedArgs,
++ Pat, PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
++ StmtKind, StrLit, StrStyle, StructField, TraitBoundModifier, TraitObjectSyntax, TraitRef, Ty,
++ TyKind, UintTy, UnOp, Unsafe, UnsafeSource, UseTree, UseTreeKind, Variant, VariantData,
++ VisibilityKind, WhereBoundPredicate, WhereClause, WhereEqPredicate, WherePredicate,
++ WhereRegionPredicate,
+ };
+-use self::syntax::parse::lexer::comments;
+-use self::syntax::parse::token::{self, DelimToken, Token, TokenKind};
+-use self::syntax::ptr::P;
+-use self::syntax::source_map::Spanned;
+-use self::syntax::symbol::{sym, Symbol};
+-use self::syntax::tokenstream::{DelimSpan, TokenStream, TokenTree};
+-use self::syntax_pos::{Span, SyntaxContext, DUMMY_SP};
++use rustc_ast::ptr::P;
++use rustc_ast::token::{self, CommentKind, DelimToken, Token, TokenKind};
++use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
++use rustc_data_structures::sync::Lrc;
++use rustc_data_structures::thin_vec::ThinVec;
++use rustc_span::source_map::Spanned;
++use rustc_span::symbol::Ident;
++use rustc_span::{Span, Symbol, SyntaxContext};
+
+ pub trait SpanlessEq {
+ fn eq(&self, other: &Self) -> bool;
+@@ -86,14 +85,6 @@ impl<A: SpanlessEq, B: SpanlessEq> SpanlessEq for (A, B) {
+ }
+ }
+
+-impl<A: SpanlessEq, B: SpanlessEq, C: SpanlessEq> SpanlessEq for (A, B, C) {
+- fn eq(&self, other: &Self) -> bool {
+- SpanlessEq::eq(&self.0, &other.0)
+- && SpanlessEq::eq(&self.1, &other.1)
+- && SpanlessEq::eq(&self.2, &other.2)
+- }
+-}
+-
+ macro_rules! spanless_eq_true {
+ ($name:ident) => {
+ impl SpanlessEq for $name {
+@@ -126,17 +117,19 @@ spanless_eq_partial_eq!(u16);
+ spanless_eq_partial_eq!(u128);
+ spanless_eq_partial_eq!(usize);
+ spanless_eq_partial_eq!(char);
++spanless_eq_partial_eq!(String);
+ spanless_eq_partial_eq!(Symbol);
+-spanless_eq_partial_eq!(Abi);
++spanless_eq_partial_eq!(CommentKind);
+ spanless_eq_partial_eq!(DelimToken);
++spanless_eq_partial_eq!(InlineAsmOptions);
+
+ macro_rules! spanless_eq_struct {
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $(![$ignore:ident])*
+ } => {
+- impl SpanlessEq for $name {
++ impl $(<$param: SpanlessEq>)* SpanlessEq for $name $(<$param>)* {
+ fn eq(&self, other: &Self) -> bool {
+ let $name { $($field,)* $($ignore: _,)* } = self;
+ let $name { $($field: $other,)* $($ignore: _,)* } = other;
+@@ -146,14 +139,14 @@ macro_rules! spanless_eq_struct {
+ };
+
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $next:ident
+ $($rest:ident)*
+ $(!$ignore:ident)*
+ } => {
+ spanless_eq_struct! {
+- $name;
++ $name $(<$param>)*;
+ $([$field $other])*
+ [$next other]
+ $($rest)*
+@@ -162,14 +155,14 @@ macro_rules! spanless_eq_struct {
+ };
+
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $(![$ignore:ident])*
+ !$next:ident
+ $(!$rest:ident)*
+ } => {
+ spanless_eq_struct! {
+- $name;
++ $name $(<$param>)*;
+ $([$field $other])*
+ $(![$ignore])*
+ ![$next]
+@@ -263,119 +256,131 @@ macro_rules! spanless_eq_enum {
+ };
+ }
+
+-spanless_eq_struct!(AngleBracketedArgs; span args constraints);
++spanless_eq_struct!(AngleBracketedArgs; span args);
+ spanless_eq_struct!(AnonConst; id value);
+-spanless_eq_struct!(Arg; attrs ty pat id span);
+-spanless_eq_struct!(Arm; attrs pats guard body span id);
++spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder);
+ spanless_eq_struct!(AssocTyConstraint; id ident kind span);
+-spanless_eq_struct!(Attribute; id style path tokens span !is_sugared_doc);
+-spanless_eq_struct!(BareFnTy; unsafety abi generic_params decl);
++spanless_eq_struct!(AttrItem; path args);
++spanless_eq_struct!(Attribute; kind id style span);
++spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl);
+ spanless_eq_struct!(Block; stmts id rules span);
+-spanless_eq_struct!(Crate; module attrs span);
++spanless_eq_struct!(Crate; module attrs span proc_macros);
+ spanless_eq_struct!(EnumDef; variants);
+-spanless_eq_struct!(Expr; id node span attrs);
+-spanless_eq_struct!(Field; ident expr span is_shorthand attrs id);
+-spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span);
+-spanless_eq_struct!(FnDecl; inputs output c_variadic);
+-spanless_eq_struct!(FnHeader; constness asyncness unsafety abi);
+-spanless_eq_struct!(ForeignItem; ident attrs node id span vis);
++spanless_eq_struct!(Expr; id kind span attrs !tokens);
++spanless_eq_struct!(Field; attrs id span ident expr is_shorthand is_placeholder);
++spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span is_placeholder);
++spanless_eq_struct!(FnDecl; inputs output);
++spanless_eq_struct!(FnHeader; constness asyncness unsafety ext);
++spanless_eq_struct!(FnSig; header decl span);
+ spanless_eq_struct!(ForeignMod; abi items);
+-spanless_eq_struct!(GenericParam; id ident attrs bounds kind);
++spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind);
+ spanless_eq_struct!(Generics; params where_clause span);
+ spanless_eq_struct!(GlobalAsm; asm);
+-spanless_eq_struct!(ImplItem; id ident vis defaultness attrs generics node span !tokens);
+-spanless_eq_struct!(InlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
+-spanless_eq_struct!(InlineAsmOutput; constraint expr is_rw is_indirect);
+-spanless_eq_struct!(Item; ident attrs id node vis span !tokens);
++spanless_eq_struct!(InlineAsm; template operands options line_spans);
++spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens);
+ spanless_eq_struct!(Label; ident);
+ spanless_eq_struct!(Lifetime; id ident);
+-spanless_eq_struct!(Lit; token node span);
++spanless_eq_struct!(Lit; token kind span);
++spanless_eq_struct!(LlvmInlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
++spanless_eq_struct!(LlvmInlineAsmOutput; constraint expr is_rw is_indirect);
+ spanless_eq_struct!(Local; pat ty init id span attrs);
+-spanless_eq_struct!(Mac; path delim tts span prior_type_ascription);
+-spanless_eq_struct!(MacroDef; tokens legacy);
+-spanless_eq_struct!(MethodSig; header decl);
++spanless_eq_struct!(MacCall; path args prior_type_ascription);
++spanless_eq_struct!(MacCallStmt; mac style attrs);
++spanless_eq_struct!(MacroDef; body macro_rules);
+ spanless_eq_struct!(Mod; inner items inline);
+ spanless_eq_struct!(MutTy; ty mutbl);
++spanless_eq_struct!(Param; attrs ty pat id span is_placeholder);
+ spanless_eq_struct!(ParenthesizedArgs; span inputs output);
+-spanless_eq_struct!(Pat; id node span);
++spanless_eq_struct!(Pat; id kind span tokens);
+ spanless_eq_struct!(Path; span segments);
+ spanless_eq_struct!(PathSegment; ident id args);
+ spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span);
+ spanless_eq_struct!(QSelf; ty path_span position);
+-spanless_eq_struct!(Stmt; id node span);
+-spanless_eq_struct!(StructField; span ident vis id ty attrs);
++spanless_eq_struct!(Stmt; id kind span);
++spanless_eq_struct!(StrLit; style symbol suffix span symbol_unescaped);
++spanless_eq_struct!(StructField; attrs id span vis ident ty is_placeholder);
+ spanless_eq_struct!(Token; kind span);
+-spanless_eq_struct!(TraitItem; id ident attrs generics node span !tokens);
+ spanless_eq_struct!(TraitRef; path ref_id);
+-spanless_eq_struct!(Ty; id node span);
++spanless_eq_struct!(Ty; id kind span);
+ spanless_eq_struct!(UseTree; prefix kind span);
+-spanless_eq_struct!(Variant; ident attrs id data disr_expr span);
++spanless_eq_struct!(Variant; attrs id span vis ident data disr_expr is_placeholder);
+ spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds);
+-spanless_eq_struct!(WhereClause; predicates span);
++spanless_eq_struct!(WhereClause; has_where_token predicates span);
+ spanless_eq_struct!(WhereEqPredicate; id span lhs_ty rhs_ty);
+ spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
+-spanless_eq_enum!(AsmDialect; Att Intel);
++spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
++spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
+ spanless_eq_enum!(AssocTyConstraintKind; Equality(ty) Bound(bounds));
++spanless_eq_enum!(Async; Yes(span closure_id return_impl_trait_id) No);
++spanless_eq_enum!(AttrKind; Normal(0) DocComment(0 1));
+ spanless_eq_enum!(AttrStyle; Outer Inner);
+ spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
+ spanless_eq_enum!(BindingMode; ByRef(0) ByValue(0));
+ spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
++spanless_eq_enum!(BorrowKind; Ref Raw);
+ spanless_eq_enum!(CaptureBy; Value Ref);
+-spanless_eq_enum!(Constness; Const NotConst);
++spanless_eq_enum!(Const; Yes(0) No);
+ spanless_eq_enum!(CrateSugar; PubCrate JustCrate);
+-spanless_eq_enum!(Defaultness; Default Final);
++spanless_eq_enum!(Defaultness; Default(0) Final);
++spanless_eq_enum!(Extern; None Implicit Explicit(0));
+ spanless_eq_enum!(FloatTy; F32 F64);
+-spanless_eq_enum!(ForeignItemKind; Fn(0 1) Static(0 1) Ty Macro(0));
+-spanless_eq_enum!(FunctionRetTy; Default(0) Ty(0));
++spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
++spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
+ spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0));
+ spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0));
+ spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0));
+-spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty));
+-spanless_eq_enum!(ImplItemKind; Const(0 1) Method(0 1) TyAlias(0) OpaqueTy(0) Macro(0));
+-spanless_eq_enum!(ImplPolarity; Positive Negative);
++spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span));
++spanless_eq_enum!(ImplPolarity; Positive Negative(0));
++spanless_eq_enum!(InlineAsmRegOrRegClass; Reg(0) RegClass(0));
++spanless_eq_enum!(InlineAsmTemplatePiece; String(0) Placeholder(operand_idx modifier span));
+ spanless_eq_enum!(IntTy; Isize I8 I16 I32 I64 I128);
+-spanless_eq_enum!(IsAsync; Async(closure_id return_impl_trait_id) NotAsync);
+ spanless_eq_enum!(IsAuto; Yes No);
++spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
+ spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
++spanless_eq_enum!(LlvmAsmDialect; Att Intel);
++spanless_eq_enum!(MacArgs; Empty Delimited(0 1 2) Eq(0 1));
+ spanless_eq_enum!(MacDelimiter; Parenthesis Bracket Brace);
+ spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
+ spanless_eq_enum!(Movability; Static Movable);
+-spanless_eq_enum!(Mutability; Mutable Immutable);
++spanless_eq_enum!(Mutability; Mut Not);
+ spanless_eq_enum!(RangeEnd; Included(0) Excluded);
+ spanless_eq_enum!(RangeLimits; HalfOpen Closed);
+-spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Mac(0));
++spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
+ spanless_eq_enum!(StrStyle; Cooked Raw(0));
+ spanless_eq_enum!(TokenTree; Token(0) Delimited(0 1 2));
+-spanless_eq_enum!(TraitBoundModifier; None Maybe);
+-spanless_eq_enum!(TraitItemKind; Const(0 1) Method(0 1) Type(0 1) Macro(0));
++spanless_eq_enum!(TraitBoundModifier; None Maybe MaybeConst MaybeConstMaybe);
+ spanless_eq_enum!(TraitObjectSyntax; Dyn None);
+ spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
+ spanless_eq_enum!(UnOp; Deref Not Neg);
++spanless_eq_enum!(Unsafe; Yes(0) No);
+ spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
+-spanless_eq_enum!(Unsafety; Unsafe Normal);
+ spanless_eq_enum!(UseTreeKind; Simple(0 1 2) Nested(0) Glob);
+ spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0));
+ spanless_eq_enum!(VisibilityKind; Public Crate(0) Restricted(path id) Inherited);
+ spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
+-spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1) Tup(0)
++spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1 2) Tup(0)
+ Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1) If(0 1 2)
+ While(0 1 2) ForLoop(0 1 2 3) Loop(0 1) Match(0 1) Closure(0 1 2 3 4 5)
+- Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1) AssignOp(0 1 2)
+- Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1) Break(0 1)
+- Continue(0) Ret(0) InlineAsm(0) Mac(0) Struct(0 1 2) Repeat(0 1) Paren(0)
+- Try(0) Yield(0) Err);
+-spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1)
+- Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1) OpaqueTy(0 1)
+- Enum(0 1) Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
+- Impl(0 1 2 3 4 5 6) Mac(0) MacroDef(0));
++ Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1 2) AssignOp(0 1 2)
++ Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1)
++ Continue(0) Ret(0) InlineAsm(0) LlvmInlineAsm(0) MacCall(0) Struct(0 1 2)
++ Repeat(0 1) Paren(0) Try(0) Yield(0) Err);
++spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
++ InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(expr)
++ Sym(expr));
++spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1 2)
++ Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1 2 3) Enum(0 1)
++ Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
++ Impl(unsafety polarity defaultness constness generics of_trait self_ty items)
++ MacCall(0) MacroDef(0));
+ spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0) Byte(0) Char(0) Int(0 1)
+- Float(0 1) FloatUnsuffixed(0) Bool(0) Err(0));
++ Float(0 1) Bool(0) Err(0));
+ spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2) TupleStruct(0 1)
+ Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest
+- Paren(0) Mac(0));
++ Paren(0) MacCall(0));
+ spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Rptr(0 1) BareFn(0) Never
+ Tup(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1) Paren(0) Typeof(0) Infer
+- ImplicitSelf Mac(0) Err CVarArgs);
++ ImplicitSelf MacCall(0) Err CVarArgs);
+
+ impl SpanlessEq for Ident {
+ fn eq(&self, other: &Self) -> bool {
+@@ -414,44 +419,20 @@ impl SpanlessEq for TokenKind {
+
+ impl SpanlessEq for TokenStream {
+ fn eq(&self, other: &Self) -> bool {
+- SpanlessEq::eq(&expand_tts(self), &expand_tts(other))
+- }
+-}
+-
+-fn expand_tts(tts: &TokenStream) -> Vec<TokenTree> {
+- let mut tokens = Vec::new();
+- for tt in tts.clone().into_trees() {
+- let c = match tt {
+- TokenTree::Token(Token {
+- kind: TokenKind::DocComment(c),
+- ..
+- }) => c,
+- _ => {
+- tokens.push(tt);
+- continue;
++ let mut this = self.clone().into_trees();
++ let mut other = other.clone().into_trees();
++ loop {
++ let this = match this.next() {
++ None => return other.next().is_none(),
++ Some(val) => val,
++ };
++ let other = match other.next() {
++ None => return false,
++ Some(val) => val,
++ };
++ if !SpanlessEq::eq(&this, &other) {
++ return false;
+ }
+- };
+- let contents = comments::strip_doc_comment_decoration(&c.as_str());
+- let style = comments::doc_comment_style(&c.as_str());
+- tokens.push(TokenTree::token(TokenKind::Pound, DUMMY_SP));
+- if style == AttrStyle::Inner {
+- tokens.push(TokenTree::token(TokenKind::Not, DUMMY_SP));
+ }
+- let lit = token::Lit {
+- kind: token::LitKind::Str,
+- symbol: Symbol::intern(&contents),
+- suffix: None,
+- };
+- let tts = vec![
+- TokenTree::token(TokenKind::Ident(sym::doc, false), DUMMY_SP),
+- TokenTree::token(TokenKind::Eq, DUMMY_SP),
+- TokenTree::token(TokenKind::Literal(lit), DUMMY_SP),
+- ];
+- tokens.push(TokenTree::Delimited(
+- DelimSpan::dummy(),
+- DelimToken::Bracket,
+- tts.into_iter().collect::<TokenStream>().into(),
+- ));
+ }
+- tokens
+ }
+diff --git a/third_party/rust/syn/tests/common/mod.rs b/third_party/rust/syn/tests/common/mod.rs
+index 8b784beed7..a1cc80a16f 100644
+--- mozilla-release/third_party/rust/syn/tests/common/mod.rs
++++ mozilla-release/third_party/rust/syn/tests/common/mod.rs
+@@ -1,5 +1,6 @@
+ #![allow(dead_code)]
+
++use rayon::ThreadPoolBuilder;
+ use std::env;
+
+ pub mod eq;
+@@ -12,3 +13,15 @@ pub fn abort_after() -> usize {
+ Err(_) => usize::max_value(),
+ }
+ }
++
++/// Configure Rayon threadpool.
++pub fn rayon_init() {
++ let stack_size = match env::var("RUST_MIN_STACK") {
++ Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"),
++ Err(_) => 20 * 1024 * 1024,
++ };
++ ThreadPoolBuilder::new()
++ .stack_size(stack_size)
++ .build_global()
++ .unwrap();
++}
+diff --git a/third_party/rust/syn/tests/common/parse.rs b/third_party/rust/syn/tests/common/parse.rs
+index 41d192f6fb..192828fedd 100644
+--- mozilla-release/third_party/rust/syn/tests/common/parse.rs
++++ mozilla-release/third_party/rust/syn/tests/common/parse.rs
+@@ -1,20 +1,20 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
++extern crate rustc_ast;
++extern crate rustc_expand;
++extern crate rustc_parse as parse;
++extern crate rustc_session;
++extern crate rustc_span;
+
+-use self::syntax::ast;
+-use self::syntax::parse::{self, ParseSess};
+-use self::syntax::ptr::P;
+-use self::syntax::source_map::FilePathMapping;
+-use self::syntax_pos::FileName;
++use rustc_ast::ast;
++use rustc_ast::ptr::P;
++use rustc_session::parse::ParseSess;
++use rustc_span::source_map::FilePathMapping;
++use rustc_span::FileName;
+
+ use std::panic;
+
+-pub fn libsyntax_expr(input: &str) -> Option<P<ast::Expr>> {
++pub fn librustc_expr(input: &str) -> Option<P<ast::Expr>> {
+ match panic::catch_unwind(|| {
+ let sess = ParseSess::new(FilePathMapping::empty());
+- sess.span_diagnostic.set_continue_after_error(false);
+ let e = parse::new_parser_from_source_str(
+ &sess,
+ FileName::Custom("test_precedence".to_string()),
+@@ -32,7 +32,7 @@ pub fn libsyntax_expr(input: &str) -> Option<P<ast::Expr>> {
+ Ok(Some(e)) => Some(e),
+ Ok(None) => None,
+ Err(_) => {
+- errorf!("libsyntax panicked\n");
++ errorf!("librustc panicked\n");
+ None
+ }
+ }
+diff --git a/third_party/rust/syn/tests/debug/gen.rs b/third_party/rust/syn/tests/debug/gen.rs
+index 8450c09ecf..85a1a39079 100644
+--- mozilla-release/third_party/rust/syn/tests/debug/gen.rs
++++ mozilla-release/third_party/rust/syn/tests/debug/gen.rs
+@@ -2,7 +2,7 @@
+ // It is not intended for manual editing.
+
+ use super::{Lite, RefCast};
+-use std::fmt::{self, Debug};
++use std::fmt::{self, Debug, Display};
+ impl Debug for Lite<syn::Abi> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let _val = &self.value;
+@@ -1039,9 +1039,9 @@ impl Debug for Lite<syn::Expr> {
+ }
+ syn::Expr::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ syn::Expr::While(_val) => {
+@@ -2116,9 +2116,9 @@ impl Debug for Lite<syn::ForeignItem> {
+ }
+ syn::ForeignItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -2432,9 +2432,9 @@ impl Debug for Lite<syn::ImplItem> {
+ }
+ syn::ImplItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -2940,9 +2940,9 @@ impl Debug for Lite<syn::Item> {
+ }
+ syn::Item::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -3437,9 +3437,9 @@ impl Debug for Lite<syn::Lit> {
+ }
+ syn::Lit::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ }
+@@ -3878,9 +3878,9 @@ impl Debug for Lite<syn::Pat> {
+ }
+ syn::Pat::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ syn::Pat::Wild(_val) => {
+@@ -4674,9 +4674,9 @@ impl Debug for Lite<syn::TraitItem> {
+ }
+ syn::TraitItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -5040,9 +5040,9 @@ impl Debug for Lite<syn::Type> {
+ }
+ syn::Type::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+diff --git a/third_party/rust/syn/tests/debug/mod.rs b/third_party/rust/syn/tests/debug/mod.rs
+index c1180532ec..cefebacef7 100644
+--- mozilla-release/third_party/rust/syn/tests/debug/mod.rs
++++ mozilla-release/third_party/rust/syn/tests/debug/mod.rs
+@@ -1,10 +1,7 @@
+-extern crate proc_macro2;
+-extern crate ref_cast;
+-
+ mod gen;
+
+-use self::proc_macro2::{Ident, Literal, TokenStream};
+-use self::ref_cast::RefCast;
++use proc_macro2::{Ident, Literal, TokenStream};
++use ref_cast::RefCast;
+ use std::fmt::{self, Debug};
+ use std::ops::Deref;
+ use syn::punctuated::Punctuated;
+@@ -66,7 +63,15 @@ impl Debug for Lite<Literal> {
+
+ impl Debug for Lite<TokenStream> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- write!(formatter, "`{}`", self.value)
++ let string = self.value.to_string();
++ if string.len() <= 80 {
++ write!(formatter, "TokenStream(`{}`)", self.value)
++ } else {
++ formatter
++ .debug_tuple("TokenStream")
++ .field(&format_args!("`{}`", string))
++ .finish()
++ }
+ }
+ }
+
+diff --git a/third_party/rust/syn/tests/features/error.rs b/third_party/rust/syn/tests/features/error.rs
+deleted file mode 100644
+index 10ac88965d..0000000000
+--- mozilla-release/third_party/rust/syn/tests/features/error.rs
++++ mozilla-release//dev/null
+@@ -1 +0,0 @@
+-"Hello! You want: cargo test --release --all-features"
+diff --git a/third_party/rust/syn/tests/features/mod.rs b/third_party/rust/syn/tests/features/mod.rs
+deleted file mode 100644
+index 83fbe13e7e..0000000000
+--- mozilla-release/third_party/rust/syn/tests/features/mod.rs
++++ mozilla-release//dev/null
+@@ -1,22 +0,0 @@
+-#[allow(unused_macros)]
+-macro_rules! hide_from_rustfmt {
+- ($mod:item) => {
+- $mod
+- };
+-}
+-
+-#[cfg(not(all(
+- feature = "derive",
+- feature = "full",
+- feature = "parsing",
+- feature = "printing",
+- feature = "visit",
+- feature = "visit-mut",
+- feature = "fold",
+- feature = "clone-impls",
+- feature = "extra-traits",
+- feature = "proc-macro",
+-)))]
+-hide_from_rustfmt! {
+- mod error;
+-}
+diff --git a/third_party/rust/syn/tests/macros/mod.rs b/third_party/rust/syn/tests/macros/mod.rs
+index c72fd01058..3994615fc4 100644
+--- mozilla-release/third_party/rust/syn/tests/macros/mod.rs
++++ mozilla-release/third_party/rust/syn/tests/macros/mod.rs
+@@ -1,5 +1,3 @@
+-extern crate proc_macro2;
+-
+ #[path = "../debug/mod.rs"]
+ pub mod debug;
+
+@@ -42,18 +40,18 @@ macro_rules! snapshot_impl {
+ (($expr:ident) as $t:ty, @$snapshot:literal) => {
+ let $expr = crate::macros::Tokens::parse::<$t>($expr).unwrap();
+ let debug = crate::macros::debug::Lite(&$expr);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ };
+ (($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{
+ let syntax_tree = crate::macros::Tokens::parse::<$t>($($expr)*).unwrap();
+ let debug = crate::macros::debug::Lite(&syntax_tree);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ syntax_tree
+ }};
+ (($($expr:tt)*) , @$snapshot:literal) => {{
+ let syntax_tree = $($expr)*;
+ let debug = crate::macros::debug::Lite(&syntax_tree);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ syntax_tree
+ }};
+ (($($expr:tt)*) $next:tt $($rest:tt)*) => {
+diff --git a/third_party/rust/syn/tests/repo/mod.rs b/third_party/rust/syn/tests/repo/mod.rs
+index c22cb03758..1d3e1f0e74 100644
+--- mozilla-release/third_party/rust/syn/tests/repo/mod.rs
++++ mozilla-release/third_party/rust/syn/tests/repo/mod.rs
+@@ -1,8 +1,37 @@
+-extern crate walkdir;
++mod progress;
+
+-use std::process::Command;
++use self::progress::Progress;
++use anyhow::Result;
++use flate2::read::GzDecoder;
++use std::fs;
++use std::path::Path;
++use tar::Archive;
++use walkdir::DirEntry;
+
+-use self::walkdir::DirEntry;
++const REVISION: &str = "792c645ca7d11a8d254df307d019c5bf01445c37";
++
++#[rustfmt::skip]
++static EXCLUDE: &[&str] = &[
++ // Compile-fail expr parameter in const generic position: f::<1 + 2>()
++ "test/ui/const-generics/const-expression-parameter.rs",
++
++ // Deprecated anonymous parameter syntax in traits
++ "test/ui/issues/issue-13105.rs",
++ "test/ui/issues/issue-13775.rs",
++ "test/ui/issues/issue-34074.rs",
++ "test/ui/proc-macro/trait-fn-args-2015.rs",
++
++ // Not actually test cases
++ "test/rustdoc-ui/test-compile-fail2.rs",
++ "test/rustdoc-ui/test-compile-fail3.rs",
++ "test/ui/include-single-expr-helper.rs",
++ "test/ui/include-single-expr-helper-1.rs",
++ "test/ui/issues/auxiliary/issue-21146-inc.rs",
++ "test/ui/json-bom-plus-crlf-multifile-aux.rs",
++ "test/ui/lint/expansion-time-include.rs",
++ "test/ui/macros/auxiliary/macro-comma-support.rs",
++ "test/ui/macros/auxiliary/macro-include-items-expr.rs",
++];
+
+ pub fn base_dir_filter(entry: &DirEntry) -> bool {
+ let path = entry.path();
+@@ -12,49 +41,95 @@ pub fn base_dir_filter(entry: &DirEntry) -> bool {
+ if path.extension().map(|e| e != "rs").unwrap_or(true) {
+ return false;
+ }
+- let path_string = path.to_string_lossy();
+- let path_string = if cfg!(windows) {
+- path_string.replace('\\', "/").into()
++
++ let mut path_string = path.to_string_lossy();
++ if cfg!(windows) {
++ path_string = path_string.replace('\\', "/").into();
++ }
++ let path = if let Some(path) = path_string.strip_prefix("tests/rust/src/") {
++ path
++ } else if let Some(path) = path_string.strip_prefix("tests/rust/library/") {
++ path
+ } else {
+- path_string
++ panic!("unexpected path in Rust dist: {}", path_string);
+ };
++
+ // TODO assert that parsing fails on the parse-fail cases
+- if path_string.starts_with("tests/rust/src/test/parse-fail")
+- || path_string.starts_with("tests/rust/src/test/compile-fail")
+- || path_string.starts_with("tests/rust/src/test/rustfix")
++ if path.starts_with("test/parse-fail")
++ || path.starts_with("test/compile-fail")
++ || path.starts_with("test/rustfix")
+ {
+ return false;
+ }
+
+- if path_string.starts_with("tests/rust/src/test/ui") {
+- let stderr_path = path.with_extension("stderr");
++ if path.starts_with("test/ui") {
++ let stderr_path = entry.path().with_extension("stderr");
+ if stderr_path.exists() {
+ // Expected to fail in some way
+ return false;
+ }
+ }
+
+- match path_string.as_ref() {
+- // Deprecated placement syntax
+- "tests/rust/src/test/ui/obsolete-in-place/bad.rs" |
+- // Deprecated anonymous parameter syntax in traits
+- "tests/rust/src/test/ui/error-codes/e0119/auxiliary/issue-23563-a.rs" |
+- "tests/rust/src/test/ui/issues/issue-13105.rs" |
+- "tests/rust/src/test/ui/issues/issue-13775.rs" |
+- "tests/rust/src/test/ui/issues/issue-34074.rs" |
+- // Deprecated await macro syntax
+- "tests/rust/src/test/ui/async-await/await-macro.rs" |
+- // 2015-style dyn that libsyntax rejects
+- "tests/rust/src/test/ui/dyn-keyword/dyn-2015-no-warnings-without-lints.rs" |
+- // not actually test cases
+- "tests/rust/src/test/ui/macros/auxiliary/macro-comma-support.rs" |
+- "tests/rust/src/test/ui/macros/auxiliary/macro-include-items-expr.rs" |
+- "tests/rust/src/test/ui/issues/auxiliary/issue-21146-inc.rs" => false,
+- _ => true,
++ !EXCLUDE.contains(&path)
++}
++
++#[allow(dead_code)]
++pub fn edition(path: &Path) -> &'static str {
++ if path.ends_with("dyn-2015-no-warnings-without-lints.rs") {
++ "2015"
++ } else {
++ "2018"
+ }
+ }
+
+ pub fn clone_rust() {
+- let result = Command::new("tests/clone.sh").status().unwrap();
+- assert!(result.success());
++ let needs_clone = match fs::read_to_string("tests/rust/COMMIT") {
++ Err(_) => true,
++ Ok(contents) => contents.trim() != REVISION,
++ };
++ if needs_clone {
++ download_and_unpack().unwrap();
++ }
++ let mut missing = String::new();
++ let test_src = Path::new("tests/rust/src");
++ for exclude in EXCLUDE {
++ if !test_src.join(exclude).exists() {
++ missing += "\ntests/rust/src/";
++ missing += exclude;
++ }
++ }
++ if !missing.is_empty() {
++ panic!("excluded test file does not exist:{}\n", missing);
++ }
++}
++
++fn download_and_unpack() -> Result<()> {
++ let url = format!(
++ "https://github.com/rust-lang/rust/archive/{}.tar.gz",
++ REVISION
++ );
++ let response = reqwest::blocking::get(&url)?.error_for_status()?;
++ let progress = Progress::new(response);
++ let decoder = GzDecoder::new(progress);
++ let mut archive = Archive::new(decoder);
++ let prefix = format!("rust-{}", REVISION);
++
++ let tests_rust = Path::new("tests/rust");
++ if tests_rust.exists() {
++ fs::remove_dir_all(tests_rust)?;
++ }
++
++ for entry in archive.entries()? {
++ let mut entry = entry?;
++ let path = entry.path()?;
++ if path == Path::new("pax_global_header") {
++ continue;
++ }
++ let relative = path.strip_prefix(&prefix)?;
++ let out = tests_rust.join(relative);
++ entry.unpack(&out)?;
++ }
++
++ fs::write("tests/rust/COMMIT", REVISION)?;
++ Ok(())
+ }
+diff --git a/third_party/rust/syn/tests/repo/progress.rs b/third_party/rust/syn/tests/repo/progress.rs
+new file mode 100644
+index 0000000000..28c8a44b12
+--- /dev/null
++++ mozilla-release/third_party/rust/syn/tests/repo/progress.rs
+@@ -0,0 +1,37 @@
++use std::io::{Read, Result};
++use std::time::{Duration, Instant};
++
++pub struct Progress<R> {
++ bytes: usize,
++ tick: Instant,
++ stream: R,
++}
++
++impl<R> Progress<R> {
++ pub fn new(stream: R) -> Self {
++ Progress {
++ bytes: 0,
++ tick: Instant::now() + Duration::from_millis(2000),
++ stream,
++ }
++ }
++}
++
++impl<R: Read> Read for Progress<R> {
++ fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
++ let num = self.stream.read(buf)?;
++ self.bytes += num;
++ let now = Instant::now();
++ if now > self.tick {
++ self.tick = now + Duration::from_millis(500);
++ errorf!("downloading... {} bytes\n", self.bytes);
++ }
++ Ok(num)
++ }
++}
++
++impl<R> Drop for Progress<R> {
++ fn drop(&mut self) {
++ errorf!("done ({} bytes)\n", self.bytes);
++ }
++}
+diff --git a/third_party/rust/syn/tests/test_asyncness.rs b/third_party/rust/syn/tests/test_asyncness.rs
+index f868fbcc20..0efef5976f 100644
+--- mozilla-release/third_party/rust/syn/tests/test_asyncness.rs
++++ mozilla-release/third_party/rust/syn/tests/test_asyncness.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -12,16 +8,16 @@ fn test_async_fn() {
+ let input = "async fn process() {}";
+
+ snapshot!(input as Item, @r###"
+- ⋮Item::Fn {
+- ⋮ vis: Inherited,
+- ⋮ sig: Signature {
+- ⋮ asyncness: Some,
+- ⋮ ident: "process",
+- ⋮ generics: Generics,
+- ⋮ output: Default,
+- ⋮ },
+- ⋮ block: Block,
+- ⋮}
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ asyncness: Some,
++ ident: "process",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ }
+ "###);
+ }
+
+@@ -30,12 +26,12 @@ fn test_async_closure() {
+ let input = "async || {}";
+
+ snapshot!(input as Expr, @r###"
+- ⋮Expr::Closure {
+- ⋮ asyncness: Some,
+- ⋮ output: Default,
+- ⋮ body: Expr::Block {
+- ⋮ block: Block,
+- ⋮ },
+- ⋮}
++ Expr::Closure {
++ asyncness: Some,
++ output: Default,
++ body: Expr::Block {
++ block: Block,
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_attribute.rs b/third_party/rust/syn/tests/test_attribute.rs
+index aff6294fc3..c26bd090ec 100644
+--- mozilla-release/third_party/rust/syn/tests/test_attribute.rs
++++ mozilla-release/third_party/rust/syn/tests/test_attribute.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -13,14 +9,14 @@ fn test_meta_item_word() {
+ let meta = test("#[foo]");
+
+ snapshot!(meta, @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+@@ -29,17 +25,17 @@ fn test_meta_item_name_value() {
+ let meta = test("#[foo = 5]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+ }
+
+@@ -48,37 +44,37 @@ fn test_meta_item_bool_value() {
+ let meta = test("#[foo = true]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: true,
+- ⋮ },
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: true,
++ },
++ }
+ "###);
+
+ let meta = test("#[foo = false]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: false,
+- ⋮ },
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: false,
++ },
++ }
+ "###);
+ }
+
+@@ -87,19 +83,19 @@ fn test_meta_item_list_lit() {
+ let meta = test("#[foo(5)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+ }
+
+@@ -108,26 +104,26 @@ fn test_meta_item_list_word() {
+ let meta = test("#[foo(bar)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+@@ -136,29 +132,29 @@ fn test_meta_item_list_name_value() {
+ let meta = test("#[foo(bar = 5)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ ],
++ }
+ "###);
+ }
+
+@@ -167,31 +163,31 @@ fn test_meta_item_list_bool_value() {
+ let meta = test("#[foo(bar = true)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: true,
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: true,
++ },
++ }),
++ ],
++ }
+ "###);
+ }
+
+@@ -200,68 +196,68 @@ fn test_meta_item_multiple() {
+ let meta = test("#[foo(word, name = 5, list(name2 = 6), word2)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+@@ -270,21 +266,63 @@ fn test_bool_lit() {
+ let meta = test("#[foo(true)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(Lit::Bool {
+- ⋮ value: true,
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(Lit::Bool {
++ value: true,
++ }),
++ ],
++ }
++ "###);
++}
++
++#[test]
++fn test_negative_lit() {
++ let meta = test("#[form(min = -1, max = 200)]");
++
++ snapshot!(meta, @r###"
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "form",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "min",
++ arguments: None,
++ },
++ ],
++ },
++ lit: -1,
++ }),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "max",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 200,
++ }),
++ ],
++ }
+ "###);
+ }
+
+diff --git a/third_party/rust/syn/tests/test_derive_input.rs b/third_party/rust/syn/tests/test_derive_input.rs
+index de68240166..bf1ebdb67d 100644
+--- mozilla-release/third_party/rust/syn/tests/test_derive_input.rs
++++ mozilla-release/third_party/rust/syn/tests/test_derive_input.rs
+@@ -1,8 +1,3 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -16,15 +11,15 @@ fn test_unit() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "Unit",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "Unit",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -39,105 +34,105 @@ fn test_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "derive",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `( Debug , Clone )`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: "Item",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Named {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("ident"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Ident",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("attrs"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Vec",
+- ⋮ arguments: PathArguments::AngleBracketed {
+- ⋮ args: [
+- ⋮ Type(Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Attribute",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "derive",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`(Debug , Clone)`),
++ },
++ ],
++ vis: Visibility::Public,
++ ident: "Item",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Visibility::Public,
++ ident: Some("ident"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Ident",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("attrs"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Vec",
++ arguments: PathArguments::AngleBracketed {
++ args: [
++ Type(Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Attribute",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ },
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+
+ snapshot!(input.attrs[0].parse_meta().unwrap(), @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "derive",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Clone",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "derive",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "Clone",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+@@ -151,46 +146,46 @@ fn test_union() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "MaybeUninit",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "T",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ },
+- ⋮ data: Data::Union {
+- ⋮ fields: FieldsNamed {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("uninit"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Tuple,
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("value"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "MaybeUninit",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "T",
++ }),
++ ],
++ gt_token: Some,
++ },
++ data: Data::Union {
++ fields: FieldsNamed {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("uninit"),
++ colon_token: Some,
++ ty: Type::Tuple,
++ },
++ Field {
++ vis: Inherited,
++ ident: Some("value"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+ }
+
+@@ -212,118 +207,118 @@ fn test_enum() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "doc",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `= r" See the std::result module documentation for details."`,
+- ⋮ },
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "must_use",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: "Result",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "T",
+- ⋮ }),
+- ⋮ Type(TypeParam {
+- ⋮ ident: "E",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ },
+- ⋮ data: Data::Enum {
+- ⋮ variants: [
+- ⋮ Variant {
+- ⋮ ident: "Ok",
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "Err",
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "E",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "Surprise",
+- ⋮ fields: Unit,
+- ⋮ discriminant: Some(Expr::Lit {
+- ⋮ lit: 0isize,
+- ⋮ }),
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "ProcMacroHack",
+- ⋮ fields: Unit,
+- ⋮ discriminant: Some(Expr::Field {
+- ⋮ base: Expr::Tuple {
+- ⋮ elems: [
+- ⋮ Expr::Lit {
+- ⋮ lit: 0,
+- ⋮ },
+- ⋮ Expr::Lit {
+- ⋮ lit: "data",
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ member: Unnamed(Index {
+- ⋮ index: 0,
+- ⋮ }),
+- ⋮ }),
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "doc",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`= r" See the std::result module documentation for details."`),
++ },
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "must_use",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Visibility::Public,
++ ident: "Result",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "T",
++ }),
++ Type(TypeParam {
++ ident: "E",
++ }),
++ ],
++ gt_token: Some,
++ },
++ data: Data::Enum {
++ variants: [
++ Variant {
++ ident: "Ok",
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ Variant {
++ ident: "Err",
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "E",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ Variant {
++ ident: "Surprise",
++ fields: Unit,
++ discriminant: Some(Expr::Lit {
++ lit: 0isize,
++ }),
++ },
++ Variant {
++ ident: "ProcMacroHack",
++ fields: Unit,
++ discriminant: Some(Expr::Field {
++ base: Expr::Tuple {
++ elems: [
++ Expr::Lit {
++ lit: 0,
++ },
++ Expr::Lit {
++ lit: "data",
++ },
++ ],
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }),
++ },
++ ],
++ },
++ }
+ "###);
+
+ let meta_items: Vec<_> = input
+@@ -333,27 +328,27 @@ fn test_enum() {
+ .collect();
+
+ snapshot!(meta_items, @r###"
+- ⋮[
+- ⋮ Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "doc",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: " See the std::result module documentation for details.",
+- ⋮ },
+- ⋮ Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "must_use",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ }),
+- ⋮]
++ [
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "doc",
++ arguments: None,
++ },
++ ],
++ },
++ lit: " See the std::result module documentation for details.",
++ },
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "must_use",
++ arguments: None,
++ },
++ ],
++ }),
++ ]
+ "###);
+ }
+
+@@ -366,34 +361,34 @@ fn test_attr_with_path() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ leading_colon: Some,
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "attr_args",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "identity",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `fn main ( ) { assert_eq ! ( foo ( ) , "Hello, world!" ) ; }`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "Dummy",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ leading_colon: Some,
++ segments: [
++ PathSegment {
++ ident: "attr_args",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "identity",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`fn main () { assert_eq ! (foo () , "Hello, world!") ; }`),
++ },
++ ],
++ vis: Inherited,
++ ident: "Dummy",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ assert!(input.attrs[0].parse_meta().is_err());
+@@ -407,29 +402,29 @@ fn test_attr_with_non_mod_style_path() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "inert",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `< T >`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "inert",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`< T >`),
++ },
++ ],
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ assert!(input.attrs[0].parse_meta().is_err());
+@@ -443,48 +438,48 @@ fn test_attr_with_mod_style_path_with_self() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "self",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "self",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ snapshot!(input.attrs[0].parse_meta().unwrap(), @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "self",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "self",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+@@ -496,55 +491,55 @@ fn test_pub_restricted() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "m",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "Z",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "m",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "n",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "u8",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "Z",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "n",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "u8",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -555,15 +550,15 @@ fn test_vis_crate() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Crate,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Crate,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -574,24 +569,24 @@ fn test_pub_restricted_crate() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "crate",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "crate",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -602,24 +597,24 @@ fn test_pub_restricted_super() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "super",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "super",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -630,25 +625,25 @@ fn test_pub_restricted_in_super() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "super",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "super",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -659,15 +654,15 @@ fn test_fields_on_unit_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let data = match input.data {
+@@ -688,47 +683,47 @@ fn test_fields_on_named_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Named {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("foo"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("bar"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("foo"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("bar"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+
+ let data = match input.data {
+@@ -737,38 +732,38 @@ fn test_fields_on_named_struct() {
+ };
+
+ snapshot!(data.fields.into_iter().collect::<Vec<_>>(), @r###"
+- ⋮[
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("foo"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("bar"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮]
++ [
++ Field {
++ vis: Inherited,
++ ident: Some("foo"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("bar"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ]
+ "###);
+ }
+
+@@ -779,44 +774,44 @@ fn test_fields_on_tuple_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let data = match input.data {
+@@ -825,34 +820,34 @@ fn test_fields_on_tuple_struct() {
+ };
+
+ snapshot!(data.fields.iter().collect::<Vec<_>>(), @r###"
+- ⋮[
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮]
++ [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ]
+ "###);
+ }
+
+@@ -864,34 +859,34 @@ fn test_ambiguous_crate() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "crate",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "X",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "crate",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "X",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_expr.rs b/third_party/rust/syn/tests/test_expr.rs
+index c8a11cec2c..b2b65a254f 100644
+--- mozilla-release/third_party/rust/syn/tests/test_expr.rs
++++ mozilla-release/third_party/rust/syn/tests/test_expr.rs
+@@ -1,40 +1,302 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+ #[macro_use]
+ mod macros;
+
+-use std::str::FromStr;
+-
+-use proc_macro2::TokenStream;
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
+ use syn::{Expr, ExprRange};
+
+ #[test]
+ fn test_expr_parse() {
+- let code = "..100u32";
+- let tt = TokenStream::from_str(code).unwrap();
+- let expr: Expr = syn::parse2(tt.clone()).unwrap();
+- let expr_range: ExprRange = syn::parse2(tt).unwrap();
+- assert_eq!(expr, Expr::Range(expr_range));
++ let tokens = quote!(..100u32);
++ snapshot!(tokens as Expr, @r###"
++ Expr::Range {
++ limits: HalfOpen,
++ to: Some(Expr::Lit {
++ lit: 100u32,
++ }),
++ }
++ "###);
++
++ let tokens = quote!(..100u32);
++ snapshot!(tokens as ExprRange, @r###"
++ ExprRange {
++ limits: HalfOpen,
++ to: Some(Expr::Lit {
++ lit: 100u32,
++ }),
++ }
++ "###);
+ }
+
+ #[test]
+ fn test_await() {
+ // Must not parse as Expr::Field.
+- let expr = syn::parse_str::<Expr>("fut.await").unwrap();
+-
+- snapshot!(expr, @r###"
+- ⋮Expr::Await {
+- ⋮ base: Expr::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "fut",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ let tokens = quote!(fut.await);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Await {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "fut",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ }
++ "###);
++}
++
++#[rustfmt::skip]
++#[test]
++fn test_tuple_multi_index() {
++ for &input in &[
++ "tuple.0.0",
++ "tuple .0.0",
++ "tuple. 0.0",
++ "tuple.0 .0",
++ "tuple.0. 0",
++ "tuple . 0 . 0",
++ ] {
++ snapshot!(input as Expr, @r###"
++ Expr::Field {
++ base: Expr::Field {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "tuple",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }
++ "###);
++ }
++
++ for tokens in vec![
++ quote!(tuple.0.0),
++ quote!(tuple .0.0),
++ quote!(tuple. 0.0),
++ quote!(tuple.0 .0),
++ quote!(tuple.0. 0),
++ quote!(tuple . 0 . 0),
++ ] {
++ snapshot!(tokens as Expr, @r###"
++ Expr::Field {
++ base: Expr::Field {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "tuple",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }
++ "###);
++ }
++}
++
++#[test]
++fn test_macro_variable_func() {
++ // mimics the token stream corresponding to `$fn()`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { f })),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Call {
++ func: Expr::Group {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "f",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ }
++ "###);
++
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('#', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Bracket, quote! { outside })),
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[inside] f })),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Call {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "outside",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ func: Expr::Group {
++ expr: Expr::Path {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "inside",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "f",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_macro() {
++ // mimics the token stream corresponding to `$macro!()`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { m })),
++ TokenTree::Punct(Punct::new('!', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Macro {
++ mac: Macro {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ ],
++ },
++ delimiter: Paren,
++ tokens: TokenStream(``),
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_struct() {
++ // mimics the token stream corresponding to `$struct {}`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { S })),
++ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Struct {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "S",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_match_arm() {
++ // mimics the token stream corresponding to `match v { _ => $expr }`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Ident(Ident::new("match", Span::call_site())),
++ TokenTree::Ident(Ident::new("v", Span::call_site())),
++ TokenTree::Group(Group::new(
++ Delimiter::Brace,
++ TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('_', Spacing::Alone)),
++ TokenTree::Punct(Punct::new('=', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[a] () })),
++ ]),
++ )),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Match {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "v",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ arms: [
++ Arm {
++ pat: Pat::Wild,
++ body: Expr::Group {
++ expr: Expr::Tuple {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "a",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ },
++ },
++ },
++ ],
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_generics.rs b/third_party/rust/syn/tests/test_generics.rs
+index 55c79e066b..b29434a147 100644
+--- mozilla-release/third_party/rust/syn/tests/test_generics.rs
++++ mozilla-release/third_party/rust/syn/tests/test_generics.rs
+@@ -1,8 +1,3 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -16,90 +11,90 @@ fn test_split_for_impl() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Lifetime(LifetimeDef {
+- ⋮ lifetime: Lifetime {
+- ⋮ ident: "a",
+- ⋮ },
+- ⋮ }),
+- ⋮ Lifetime(LifetimeDef {
+- ⋮ lifetime: Lifetime {
+- ⋮ ident: "b",
+- ⋮ },
+- ⋮ colon_token: Some,
+- ⋮ bounds: [
+- ⋮ Lifetime {
+- ⋮ ident: "a",
+- ⋮ },
+- ⋮ ],
+- ⋮ }),
+- ⋮ Type(TypeParam {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "may_dangle",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ ident: "T",
+- ⋮ colon_token: Some,
+- ⋮ bounds: [
+- ⋮ Lifetime(Lifetime {
+- ⋮ ident: "a",
+- ⋮ }),
+- ⋮ ],
+- ⋮ eq_token: Some,
+- ⋮ default: Some(Type::Tuple),
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ where_clause: Some(WhereClause {
+- ⋮ predicates: [
+- ⋮ Type(PredicateType {
+- ⋮ bounded_ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ bounds: [
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ },
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Lifetime(LifetimeDef {
++ lifetime: Lifetime {
++ ident: "a",
++ },
++ }),
++ Lifetime(LifetimeDef {
++ lifetime: Lifetime {
++ ident: "b",
++ },
++ colon_token: Some,
++ bounds: [
++ Lifetime {
++ ident: "a",
++ },
++ ],
++ }),
++ Type(TypeParam {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "may_dangle",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ ident: "T",
++ colon_token: Some,
++ bounds: [
++ Lifetime(Lifetime {
++ ident: "a",
++ }),
++ ],
++ eq_token: Some,
++ default: Some(Type::Tuple),
++ }),
++ ],
++ gt_token: Some,
++ where_clause: Some(WhereClause {
++ predicates: [
++ Type(PredicateType {
++ bounded_ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ bounds: [
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ }),
++ ],
++ }),
++ },
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let generics = input.generics;
+@@ -131,46 +126,46 @@ fn test_split_for_impl() {
+ fn test_ty_param_bound() {
+ let tokens = quote!('a);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Lifetime(Lifetime {
+- ⋮ ident: "a",
+- ⋮})
++ Lifetime(Lifetime {
++ ident: "a",
++ })
+ "###);
+
+ let tokens = quote!('_);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Lifetime(Lifetime {
+- ⋮ ident: "_",
+- ⋮})
++ Lifetime(Lifetime {
++ ident: "_",
++ })
+ "###);
+
+ let tokens = quote!(Debug);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮})
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ },
++ })
+ "###);
+
+ let tokens = quote!(?Sized);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Trait(TraitBound {
+- ⋮ modifier: Maybe,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Sized",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮})
++ Trait(TraitBound {
++ modifier: Maybe,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Sized",
++ arguments: None,
++ },
++ ],
++ },
++ })
+ "###);
+ }
+
+@@ -187,76 +182,76 @@ fn test_fn_precedence_in_where_clause() {
+ };
+
+ snapshot!(input as ItemFn, @r###"
+- ⋮ItemFn {
+- ⋮ vis: Inherited,
+- ⋮ sig: Signature {
+- ⋮ ident: "f",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "G",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ where_clause: Some(WhereClause {
+- ⋮ predicates: [
+- ⋮ Type(PredicateType {
+- ⋮ bounded_ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "G",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ bounds: [
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "FnOnce",
+- ⋮ arguments: PathArguments::Parenthesized {
+- ⋮ output: Type(
+- ⋮ Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ),
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Send",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ },
+- ⋮ output: Default,
+- ⋮ },
+- ⋮ block: Block,
+- ⋮}
++ ItemFn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "f",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "G",
++ }),
++ ],
++ gt_token: Some,
++ where_clause: Some(WhereClause {
++ predicates: [
++ Type(PredicateType {
++ bounded_ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "G",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ bounds: [
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "FnOnce",
++ arguments: PathArguments::Parenthesized {
++ output: Type(
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ),
++ },
++ },
++ ],
++ },
++ }),
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Send",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ }),
++ ],
++ }),
++ },
++ output: Default,
++ },
++ block: Block,
++ }
+ "###);
+
+ let where_clause = input.sig.generics.where_clause.as_ref().unwrap();
+@@ -270,7 +265,7 @@ fn test_fn_precedence_in_where_clause() {
+ assert_eq!(predicate.bounds.len(), 2, "{:#?}", predicate.bounds);
+
+ let first_bound = &predicate.bounds[0];
+- assert_eq!(quote!(#first_bound).to_string(), "FnOnce ( ) -> i32");
++ assert_eq!(quote!(#first_bound).to_string(), "FnOnce () -> i32");
+
+ let second_bound = &predicate.bounds[1];
+ assert_eq!(quote!(#second_bound).to_string(), "Send");
+diff --git a/third_party/rust/syn/tests/test_grouping.rs b/third_party/rust/syn/tests/test_grouping.rs
+index 1558a47b4b..a0fe716390 100644
+--- mozilla-release/third_party/rust/syn/tests/test_grouping.rs
++++ mozilla-release/third_party/rust/syn/tests/test_grouping.rs
+@@ -1,8 +1,3 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -28,31 +23,31 @@ fn test_grouping() {
+ TokenTree::Literal(Literal::i32_suffixed(4)),
+ ]);
+
+- assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
++ assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
+
+ snapshot!(tokens as Expr, @r###"
+- ⋮Expr::Binary {
+- ⋮ left: Expr::Lit {
+- ⋮ lit: 1i32,
+- ⋮ },
+- ⋮ op: Add,
+- ⋮ right: Expr::Binary {
+- ⋮ left: Expr::Group {
+- ⋮ expr: Expr::Binary {
+- ⋮ left: Expr::Lit {
+- ⋮ lit: 2i32,
+- ⋮ },
+- ⋮ op: Add,
+- ⋮ right: Expr::Lit {
+- ⋮ lit: 3i32,
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ op: Mul,
+- ⋮ right: Expr::Lit {
+- ⋮ lit: 4i32,
+- ⋮ },
+- ⋮ },
+- ⋮}
++ Expr::Binary {
++ left: Expr::Lit {
++ lit: 1i32,
++ },
++ op: Add,
++ right: Expr::Binary {
++ left: Expr::Group {
++ expr: Expr::Binary {
++ left: Expr::Lit {
++ lit: 2i32,
++ },
++ op: Add,
++ right: Expr::Lit {
++ lit: 3i32,
++ },
++ },
++ },
++ op: Mul,
++ right: Expr::Lit {
++ lit: 4i32,
++ },
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_ident.rs b/third_party/rust/syn/tests/test_ident.rs
+index bec00a70c9..ee01bfcc9f 100644
+--- mozilla-release/third_party/rust/syn/tests/test_ident.rs
++++ mozilla-release/third_party/rust/syn/tests/test_ident.rs
+@@ -1,8 +1,3 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+-mod features;
+-
+ use proc_macro2::{Ident, Span, TokenStream};
+ use std::str::FromStr;
+ use syn::Result;
+diff --git a/third_party/rust/syn/tests/test_item.rs b/third_party/rust/syn/tests/test_item.rs
+new file mode 100644
+index 0000000000..74ac4baec6
+--- /dev/null
++++ mozilla-release/third_party/rust/syn/tests/test_item.rs
+@@ -0,0 +1,45 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::Item;
++
++#[test]
++fn test_macro_variable_attr() {
++ // mimics the token stream corresponding to `$attr fn f() {}`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[test] })),
++ TokenTree::Ident(Ident::new("fn", Span::call_site())),
++ TokenTree::Ident(Ident::new("f", Span::call_site())),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Item, @r###"
++ Item::Fn {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "test",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Inherited,
++ sig: Signature {
++ ident: "f",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_iterators.rs b/third_party/rust/syn/tests/test_iterators.rs
+index 1cf7157e6f..2c8359c157 100644
+--- mozilla-release/third_party/rust/syn/tests/test_iterators.rs
++++ mozilla-release/third_party/rust/syn/tests/test_iterators.rs
+@@ -1,10 +1,5 @@
+ use syn::punctuated::{Pair, Punctuated};
+-
+-extern crate quote;
+-#[macro_use]
+-extern crate syn;
+-
+-mod features;
++use syn::Token;
+
+ #[macro_use]
+ mod macros;
+diff --git a/third_party/rust/syn/tests/test_lit.rs b/third_party/rust/syn/tests/test_lit.rs
+index 1e8f49d19b..e995f2287f 100644
+--- mozilla-release/third_party/rust/syn/tests/test_lit.rs
++++ mozilla-release/third_party/rust/syn/tests/test_lit.rs
+@@ -1,13 +1,11 @@
+-extern crate proc_macro2;
+-extern crate quote;
+-extern crate syn;
++#[macro_use]
++mod macros;
+
+-mod features;
+-
+-use proc_macro2::{TokenStream, TokenTree};
++use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
+ use quote::ToTokens;
++use std::iter::FromIterator;
+ use std::str::FromStr;
+-use syn::Lit;
++use syn::{Lit, LitFloat, LitInt};
+
+ fn lit(s: &str) -> Lit {
+ match TokenStream::from_str(s)
+@@ -50,6 +48,9 @@ fn strings() {
+ "contains\nnewlinesescaped newlines",
+ );
+ test_string("r\"raw\nstring\\\nhere\"", "raw\nstring\\\nhere");
++ test_string("\"...\"q", "...");
++ test_string("r\"...\"q", "...");
++ test_string("r##\"...\"##q", "...");
+ }
+
+ #[test]
+@@ -79,6 +80,9 @@ fn byte_strings() {
+ b"contains\nnewlinesescaped newlines",
+ );
+ test_byte_string("br\"raw\nstring\\\nhere\"", b"raw\nstring\\\nhere");
++ test_byte_string("b\"...\"q", b"...");
++ test_byte_string("br\"...\"q", b"...");
++ test_byte_string("br##\"...\"##q", b"...");
+ }
+
+ #[test]
+@@ -100,6 +104,7 @@ fn bytes() {
+ test_byte("b'\\t'", b'\t');
+ test_byte("b'\\''", b'\'');
+ test_byte("b'\"'", b'"');
++ test_byte("b'a'q", b'a');
+ }
+
+ #[test]
+@@ -125,6 +130,7 @@ fn chars() {
+ test_char("'\\''", '\'');
+ test_char("'\"'", '"');
+ test_char("'\\u{1F415}'", '\u{1F415}');
++ test_char("'a'q", 'a');
+ }
+
+ #[test]
+@@ -185,4 +191,59 @@ fn floats() {
+ test_float("5.5e12", 5.5e12, "");
+ test_float("1.0__3e-12", 1.03e-12, "");
+ test_float("1.03e+12", 1.03e12, "");
++ test_float("9e99e99", 9e99, "e99");
++}
++
++#[test]
++fn negative() {
++ let span = Span::call_site();
++ assert_eq!("-1", LitInt::new("-1", span).to_string());
++ assert_eq!("-1i8", LitInt::new("-1i8", span).to_string());
++ assert_eq!("-1i16", LitInt::new("-1i16", span).to_string());
++ assert_eq!("-1i32", LitInt::new("-1i32", span).to_string());
++ assert_eq!("-1i64", LitInt::new("-1i64", span).to_string());
++ assert_eq!("-1.5", LitFloat::new("-1.5", span).to_string());
++ assert_eq!("-1.5f32", LitFloat::new("-1.5f32", span).to_string());
++ assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string());
++}
++
++#[test]
++fn suffix() {
++ fn get_suffix(token: &str) -> String {
++ let lit = syn::parse_str::<Lit>(token).unwrap();
++ match lit {
++ Lit::Str(lit) => lit.suffix().to_owned(),
++ Lit::ByteStr(lit) => lit.suffix().to_owned(),
++ Lit::Byte(lit) => lit.suffix().to_owned(),
++ Lit::Char(lit) => lit.suffix().to_owned(),
++ Lit::Int(lit) => lit.suffix().to_owned(),
++ Lit::Float(lit) => lit.suffix().to_owned(),
++ _ => unimplemented!(),
++ }
++ }
++
++ assert_eq!(get_suffix("\"\"s"), "s");
++ assert_eq!(get_suffix("r\"\"r"), "r");
++ assert_eq!(get_suffix("b\"\"b"), "b");
++ assert_eq!(get_suffix("br\"\"br"), "br");
++ assert_eq!(get_suffix("r#\"\"#r"), "r");
++ assert_eq!(get_suffix("'c'c"), "c");
++ assert_eq!(get_suffix("b'b'b"), "b");
++ assert_eq!(get_suffix("1i32"), "i32");
++ assert_eq!(get_suffix("1_i32"), "i32");
++ assert_eq!(get_suffix("1.0f32"), "f32");
++ assert_eq!(get_suffix("1.0_f32"), "f32");
++}
++
++#[test]
++fn test_deep_group_empty() {
++ let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Literal(Literal::string("hi"))]),
++ ))]),
++ ))]);
++
++ snapshot!(tokens as Lit, @r#""hi""# );
+ }
+diff --git a/third_party/rust/syn/tests/test_meta.rs b/third_party/rust/syn/tests/test_meta.rs
+index 547472d6f4..d37dda948a 100644
+--- mozilla-release/third_party/rust/syn/tests/test_meta.rs
++++ mozilla-release/third_party/rust/syn/tests/test_meta.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -12,14 +8,14 @@ fn test_parse_meta_item_word() {
+ let input = "hello";
+
+ snapshot!(input as Meta, @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "hello",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "hello",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+@@ -29,31 +25,31 @@ fn test_parse_meta_name_value() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -65,31 +61,31 @@ fn test_parse_meta_name_value_with_keyword() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "static",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "static",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "static",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "static",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -101,31 +97,31 @@ fn test_parse_meta_name_value_with_bool() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "true",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "true",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "true",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "true",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -137,35 +133,35 @@ fn test_parse_meta_item_list_lit() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaList, @r###"
+- ⋮MetaList {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ MetaList {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -177,133 +173,133 @@ fn test_parse_meta_item_multiple() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaList, @r###"
+- ⋮MetaList {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ MetaList {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -316,28 +312,28 @@ fn test_parse_nested_meta() {
+
+ let input = "list(name2 = 6)";
+ snapshot!(input as NestedMeta, @r###"
+- ⋮Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮})
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ })
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_parse_buffer.rs b/third_party/rust/syn/tests/test_parse_buffer.rs
+index f09495187f..57a3c7c38c 100644
+--- mozilla-release/third_party/rust/syn/tests/test_parse_buffer.rs
++++ mozilla-release/third_party/rust/syn/tests/test_parse_buffer.rs
+@@ -1,7 +1,7 @@
+-#[macro_use]
+-extern crate syn;
+-
++use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenStream, TokenTree};
++use std::iter::FromIterator;
+ use syn::parse::{discouraged::Speculative, Parse, ParseStream, Parser, Result};
++use syn::{parenthesized, Token};
+
+ #[test]
+ #[should_panic(expected = "Fork was not derived from the advancing parse stream")]
+@@ -53,3 +53,38 @@ fn smuggled_speculative_cursor_into_brackets() {
+
+ syn::parse_str::<BreakRules>("()").unwrap();
+ }
++
++#[test]
++fn trailing_empty_none_group() {
++ fn parse(input: ParseStream) -> Result<()> {
++ input.parse::<Token![+]>()?;
++
++ let content;
++ parenthesized!(content in input);
++ content.parse::<Token![+]>()?;
++
++ Ok(())
++ }
++
++ // `+ ( + <Ø Ø> ) <Ø <Ø Ø> Ø>`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
++ TokenTree::Group(Group::new(
++ Delimiter::Parenthesis,
++ TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ ]),
++ )),
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::new(),
++ ))]),
++ )),
++ ]);
++
++ parse.parse2(tokens).unwrap();
++}
+diff --git a/third_party/rust/syn/tests/test_parse_stream.rs b/third_party/rust/syn/tests/test_parse_stream.rs
+new file mode 100644
+index 0000000000..76bd065777
+--- /dev/null
++++ mozilla-release/third_party/rust/syn/tests/test_parse_stream.rs
+@@ -0,0 +1,12 @@
++use syn::ext::IdentExt;
++use syn::parse::ParseStream;
++use syn::{Ident, Token};
++
++#[test]
++fn test_peek() {
++ let _ = |input: ParseStream| {
++ let _ = input.peek(Ident);
++ let _ = input.peek(Ident::peek_any);
++ let _ = input.peek(Token![::]);
++ };
++}
+diff --git a/third_party/rust/syn/tests/test_pat.rs b/third_party/rust/syn/tests/test_pat.rs
+index 1343aa646f..73388dd79d 100644
+--- mozilla-release/third_party/rust/syn/tests/test_pat.rs
++++ mozilla-release/third_party/rust/syn/tests/test_pat.rs
+@@ -1,10 +1,5 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ use quote::quote;
+-use syn::Pat;
++use syn::{Item, Pat, Stmt};
+
+ #[test]
+ fn test_pat_ident() {
+@@ -21,3 +16,23 @@ fn test_pat_path() {
+ value => panic!("expected PatPath, got {:?}", value),
+ }
+ }
++
++#[test]
++fn test_leading_vert() {
++ // https://github.com/rust-lang/rust/blob/1.43.0/src/test/ui/or-patterns/remove-leading-vert.rs
++
++ syn::parse_str::<Item>("fn f() {}").unwrap();
++ syn::parse_str::<Item>("fn fun1(| A: E) {}").unwrap_err();
++ syn::parse_str::<Item>("fn fun2(|| A: E) {}").unwrap_err();
++
++ syn::parse_str::<Stmt>("let | () = ();").unwrap();
++ syn::parse_str::<Stmt>("let (| A): E;").unwrap_err();
++ syn::parse_str::<Stmt>("let (|| A): (E);").unwrap_err();
++ syn::parse_str::<Stmt>("let (| A,): (E,);").unwrap_err();
++ syn::parse_str::<Stmt>("let [| A]: [E; 1];").unwrap_err();
++ syn::parse_str::<Stmt>("let [|| A]: [E; 1];").unwrap_err();
++ syn::parse_str::<Stmt>("let TS(| A): TS;").unwrap_err();
++ syn::parse_str::<Stmt>("let TS(|| A): TS;").unwrap_err();
++ syn::parse_str::<Stmt>("let NS { f: | A }: NS;").unwrap_err();
++ syn::parse_str::<Stmt>("let NS { f: || A }: NS;").unwrap_err();
++}
+diff --git a/third_party/rust/syn/tests/test_path.rs b/third_party/rust/syn/tests/test_path.rs
+new file mode 100644
+index 0000000000..2ce12066f5
+--- /dev/null
++++ mozilla-release/third_party/rust/syn/tests/test_path.rs
+@@ -0,0 +1,52 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::{Expr, Type};
++
++#[test]
++fn parse_interpolated_leading_component() {
++ // mimics the token stream corresponding to `$mod::rest`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { first })),
++ TokenTree::Punct(Punct::new(':', Spacing::Joint)),
++ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
++ TokenTree::Ident(Ident::new("rest", Span::call_site())),
++ ]);
++
++ snapshot!(tokens.clone() as Expr, @r###"
++ Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "first",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "rest",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++
++ snapshot!(tokens as Type, @r###"
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "first",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "rest",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_precedence.rs b/third_party/rust/syn/tests/test_precedence.rs
+index 53ee66e372..a586b3fe48 100644
+--- mozilla-release/third_party/rust/syn/tests/test_precedence.rs
++++ mozilla-release/third_party/rust/syn/tests/test_precedence.rs
+@@ -4,35 +4,26 @@
+
+ //! The tests in this module do the following:
+ //!
+-//! 1. Parse a given expression in both `syn` and `libsyntax`.
++//! 1. Parse a given expression in both `syn` and `librustc`.
+ //! 2. Fold over the expression adding brackets around each subexpression (with
+-//! some complications - see the `syn_brackets` and `libsyntax_brackets`
++//! some complications - see the `syn_brackets` and `librustc_brackets`
+ //! methods).
+ //! 3. Serialize the `syn` expression back into a string, and re-parse it with
+-//! `libsyntax`.
++//! `librustc`.
+ //! 4. Respan all of the expressions, replacing the spans with the default
+ //! spans.
+ //! 5. Compare the expressions with one another, if they are not equal fail.
+
+-extern crate quote;
+-extern crate rayon;
+-extern crate regex;
++extern crate rustc_ast;
+ extern crate rustc_data_structures;
+-extern crate smallvec;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
+-extern crate walkdir;
+-
+-mod features;
++extern crate rustc_span;
+
+ use quote::quote;
+ use rayon::iter::{IntoParallelIterator, ParallelIterator};
+ use regex::Regex;
+-use smallvec::smallvec;
+-use syntax::ast;
+-use syntax::ptr::P;
+-use syntax_pos::edition::Edition;
++use rustc_ast::ast;
++use rustc_ast::ptr::P;
++use rustc_span::edition::Edition;
+ use walkdir::{DirEntry, WalkDir};
+
+ use std::fs::File;
+@@ -73,7 +64,7 @@ fn test_simple_precedence() {
+ continue;
+ };
+
+- let pf = match test_expressions(vec![expr]) {
++ let pf = match test_expressions(Edition::Edition2018, vec![expr]) {
+ (1, 0) => "passed",
+ (0, 1) => {
+ failed += 1;
+@@ -91,8 +82,8 @@ fn test_simple_precedence() {
+
+ /// Test expressions from rustc, like in `test_round_trip`.
+ #[test]
+-#[cfg_attr(target_os = "windows", ignore = "requires nix .sh")]
+ fn test_rustc_precedence() {
++ common::rayon_init();
+ repo::clone_rust();
+ let abort_after = common::abort_after();
+ if abort_after == 0 {
+@@ -118,15 +109,6 @@ fn test_rustc_precedence() {
+ return;
+ }
+
+- // Our version of `libsyntax` can't parse this tests
+- if path
+- .to_str()
+- .unwrap()
+- .ends_with("optional_comma_in_match_arm.rs")
+- {
+- return;
+- }
+-
+ let mut file = File::open(path).unwrap();
+ let mut content = String::new();
+ file.read_to_string(&mut content).unwrap();
+@@ -134,8 +116,9 @@ fn test_rustc_precedence() {
+
+ let (l_passed, l_failed) = match syn::parse_file(&content) {
+ Ok(file) => {
++ let edition = repo::edition(path).parse().unwrap();
+ let exprs = collect_exprs(file);
+- test_expressions(exprs)
++ test_expressions(edition, exprs)
+ }
+ Err(msg) => {
+ errorf!("syn failed to parse\n{:?}\n", msg);
+@@ -169,36 +152,36 @@ fn test_rustc_precedence() {
+ }
+ }
+
+-fn test_expressions(exprs: Vec<syn::Expr>) -> (usize, usize) {
++fn test_expressions(edition: Edition, exprs: Vec<syn::Expr>) -> (usize, usize) {
+ let mut passed = 0;
+ let mut failed = 0;
+
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(edition, || {
+ for expr in exprs {
+ let raw = quote!(#expr).to_string();
+
+- let libsyntax_ast = if let Some(e) = libsyntax_parse_and_rewrite(&raw) {
++ let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&raw) {
+ e
+ } else {
+ failed += 1;
+- errorf!("\nFAIL - libsyntax failed to parse raw\n");
++ errorf!("\nFAIL - librustc failed to parse raw\n");
+ continue;
+ };
+
+ let syn_expr = syn_brackets(expr);
+- let syn_ast = if let Some(e) = parse::libsyntax_expr(&quote!(#syn_expr).to_string()) {
++ let syn_ast = if let Some(e) = parse::librustc_expr(&quote!(#syn_expr).to_string()) {
+ e
+ } else {
+ failed += 1;
+- errorf!("\nFAIL - libsyntax failed to parse bracketed\n");
++ errorf!("\nFAIL - librustc failed to parse bracketed\n");
+ continue;
+ };
+
+- if SpanlessEq::eq(&syn_ast, &libsyntax_ast) {
++ if SpanlessEq::eq(&syn_ast, &librustc_ast) {
+ passed += 1;
+ } else {
+ failed += 1;
+- errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, libsyntax_ast);
++ errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, librustc_ast);
+ }
+ }
+ });
+@@ -206,54 +189,106 @@ fn test_expressions(exprs: Vec<syn::Expr>) -> (usize, usize) {
+ (passed, failed)
+ }
+
+-fn libsyntax_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
+- parse::libsyntax_expr(input).and_then(libsyntax_brackets)
++fn librustc_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
++ parse::librustc_expr(input).and_then(librustc_brackets)
+ }
+
+ /// Wrap every expression which is not already wrapped in parens with parens, to
+ /// reveal the precidence of the parsed expressions, and produce a stringified
+ /// form of the resulting expression.
+ ///
+-/// This method operates on libsyntax objects.
+-fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
++/// This method operates on librustc objects.
++fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
++ use rustc_ast::ast::{
++ Block, BorrowKind, Expr, ExprKind, Field, GenericArg, MacCall, Pat, Stmt, StmtKind, Ty,
++ };
++ use rustc_ast::mut_visit::{noop_visit_generic_arg, MutVisitor};
++ use rustc_data_structures::map_in_place::MapInPlace;
+ use rustc_data_structures::thin_vec::ThinVec;
+- use smallvec::SmallVec;
++ use rustc_span::DUMMY_SP;
+ use std::mem;
+- use syntax::ast::{Expr, ExprKind, Field, Mac, Pat, Stmt, StmtKind, Ty};
+- use syntax::mut_visit::{noop_visit_expr, MutVisitor};
+- use syntax_pos::DUMMY_SP;
+
+ struct BracketsVisitor {
+ failed: bool,
+ };
+
++ fn flat_map_field<T: MutVisitor>(mut f: Field, vis: &mut T) -> Vec<Field> {
++ if f.is_shorthand {
++ noop_visit_expr(&mut f.expr, vis);
++ } else {
++ vis.visit_expr(&mut f.expr);
++ }
++ vec![f]
++ }
++
++ fn flat_map_stmt<T: MutVisitor>(stmt: Stmt, vis: &mut T) -> Vec<Stmt> {
++ let kind = match stmt.kind {
++ // Don't wrap toplevel expressions in statements.
++ StmtKind::Expr(mut e) => {
++ noop_visit_expr(&mut e, vis);
++ StmtKind::Expr(e)
++ }
++ StmtKind::Semi(mut e) => {
++ noop_visit_expr(&mut e, vis);
++ StmtKind::Semi(e)
++ }
++ s => s,
++ };
++
++ vec![Stmt { kind, ..stmt }]
++ }
++
++ fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) {
++ use rustc_ast::mut_visit::{noop_visit_expr, visit_opt, visit_thin_attrs};
++ match &mut e.kind {
++ ExprKind::AddrOf(BorrowKind::Raw, ..) => {}
++ ExprKind::Struct(path, fields, expr) => {
++ vis.visit_path(path);
++ fields.flat_map_in_place(|field| flat_map_field(field, vis));
++ visit_opt(expr, |expr| vis.visit_expr(expr));
++ vis.visit_id(&mut e.id);
++ vis.visit_span(&mut e.span);
++ visit_thin_attrs(&mut e.attrs, vis);
++ }
++ _ => noop_visit_expr(e, vis),
++ }
++ }
++
+ impl MutVisitor for BracketsVisitor {
+ fn visit_expr(&mut self, e: &mut P<Expr>) {
+ noop_visit_expr(e, self);
+- match e.node {
++ match e.kind {
+ ExprKind::If(..) | ExprKind::Block(..) | ExprKind::Let(..) => {}
+ _ => {
+ let inner = mem::replace(
+ e,
+ P(Expr {
+ id: ast::DUMMY_NODE_ID,
+- node: ExprKind::Err,
++ kind: ExprKind::Err,
+ span: DUMMY_SP,
+ attrs: ThinVec::new(),
++ tokens: None,
+ }),
+ );
+- e.node = ExprKind::Paren(inner);
++ e.kind = ExprKind::Paren(inner);
+ }
+ }
+ }
+
+- fn flat_map_field(&mut self, mut f: Field) -> SmallVec<[Field; 1]> {
+- if f.is_shorthand {
+- noop_visit_expr(&mut f.expr, self);
+- } else {
+- self.visit_expr(&mut f.expr);
++ fn visit_generic_arg(&mut self, arg: &mut GenericArg) {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericArg::Const(arg) => noop_visit_expr(&mut arg.value, self),
++ _ => noop_visit_generic_arg(arg, self),
+ }
+- SmallVec::from([f])
++ }
++
++ fn visit_block(&mut self, block: &mut P<Block>) {
++ self.visit_id(&mut block.id);
++ block
++ .stmts
++ .flat_map_in_place(|stmt| flat_map_stmt(stmt, self));
++ self.visit_span(&mut block.span);
+ }
+
+ // We don't want to look at expressions that might appear in patterns or
+@@ -267,25 +302,8 @@ fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>>
+ let _ = ty;
+ }
+
+- fn flat_map_stmt(&mut self, stmt: Stmt) -> SmallVec<[Stmt; 1]> {
+- let node = match stmt.node {
+- // Don't wrap toplevel expressions in statements.
+- StmtKind::Expr(mut e) => {
+- noop_visit_expr(&mut e, self);
+- StmtKind::Expr(e)
+- }
+- StmtKind::Semi(mut e) => {
+- noop_visit_expr(&mut e, self);
+- StmtKind::Semi(e)
+- }
+- s => s,
+- };
+-
+- smallvec![Stmt { node, ..stmt }]
+- }
+-
+- fn visit_mac(&mut self, mac: &mut Mac) {
+- // By default when folding over macros, libsyntax panics. This is
++ fn visit_mac(&mut self, mac: &mut MacCall) {
++ // By default when folding over macros, librustc panics. This is
+ // because it's usually not what you want, you want to run after
+ // macro expansion. We do want to do that (syn doesn't do macro
+ // expansion), so we implement visit_mac to just return the macro
+@@ -295,11 +313,11 @@ fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>>
+ }
+
+ let mut folder = BracketsVisitor { failed: false };
+- folder.visit_expr(&mut libsyntax_expr);
++ folder.visit_expr(&mut librustc_expr);
+ if folder.failed {
+ None
+ } else {
+- Some(libsyntax_expr)
++ Some(librustc_expr)
+ }
+ }
+
+@@ -318,14 +336,33 @@ fn syn_brackets(syn_expr: syn::Expr) -> syn::Expr {
+ Expr::If(..) | Expr::Unsafe(..) | Expr::Block(..) | Expr::Let(..) => {
+ fold_expr(self, expr)
+ }
+- node => Expr::Paren(ExprParen {
++ _ => Expr::Paren(ExprParen {
+ attrs: Vec::new(),
+- expr: Box::new(fold_expr(self, node)),
++ expr: Box::new(fold_expr(self, expr)),
+ paren_token: token::Paren::default(),
+ }),
+ }
+ }
+
++ fn fold_generic_argument(&mut self, arg: GenericArgument) -> GenericArgument {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericArgument::Const(a) => GenericArgument::Const(fold_expr(self, a)),
++ _ => fold_generic_argument(self, arg),
++ }
++ }
++
++ fn fold_generic_method_argument(
++ &mut self,
++ arg: GenericMethodArgument,
++ ) -> GenericMethodArgument {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericMethodArgument::Const(a) => GenericMethodArgument::Const(fold_expr(self, a)),
++ _ => fold_generic_method_argument(self, arg),
++ }
++ }
++
+ fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
+ match stmt {
+ // Don't wrap toplevel expressions in statements.
+@@ -360,7 +397,10 @@ fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
+ struct CollectExprs(Vec<Expr>);
+ impl Fold for CollectExprs {
+ fn fold_expr(&mut self, expr: Expr) -> Expr {
+- self.0.push(expr);
++ match expr {
++ Expr::Verbatim(tokens) if tokens.is_empty() => {}
++ _ => self.0.push(expr),
++ }
+
+ Expr::Tuple(ExprTuple {
+ attrs: vec![],
+diff --git a/third_party/rust/syn/tests/test_receiver.rs b/third_party/rust/syn/tests/test_receiver.rs
+new file mode 100644
+index 0000000000..923df96ba9
+--- /dev/null
++++ mozilla-release/third_party/rust/syn/tests/test_receiver.rs
+@@ -0,0 +1,127 @@
++use syn::{parse_quote, FnArg, Receiver, TraitItemMethod};
++
++#[test]
++fn test_by_value() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_value(self: Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_mut_value() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_mut(mut self: Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_ref() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_ref(self: &Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_box() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_box(self: Box<Self>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_pin() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_pin(self: Pin<Self>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_explicit_type() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn explicit_type(self: Pin<MyType>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_value_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn value_shorthand(self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: None,
++ mutability: None,
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver without ref/mut, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_mut_value_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn mut_value_shorthand(mut self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: None,
++ mutability: Some(_),
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with mut, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_ref_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn ref_shorthand(&self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: Some(_),
++ mutability: None,
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with ref, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_ref_mut_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn ref_mut_shorthand(&mut self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: Some(_),
++ mutability: Some(_),
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with ref+mut, got {:?}", value),
++ }
++}
+diff --git a/third_party/rust/syn/tests/test_round_trip.rs b/third_party/rust/syn/tests/test_round_trip.rs
+index 2fc9cecd86..260dd0c3d9 100644
+--- mozilla-release/third_party/rust/syn/tests/test_round_trip.rs
++++ mozilla-release/third_party/rust/syn/tests/test_round_trip.rs
+@@ -2,22 +2,20 @@
+ #![recursion_limit = "1024"]
+ #![feature(rustc_private)]
+
+-extern crate quote;
+-extern crate rayon;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
+-extern crate walkdir;
+-
+-mod features;
++extern crate rustc_ast;
++extern crate rustc_errors;
++extern crate rustc_expand;
++extern crate rustc_parse as parse;
++extern crate rustc_session;
++extern crate rustc_span;
+
+ use quote::quote;
+ use rayon::iter::{IntoParallelIterator, ParallelIterator};
+-use syntax::ast;
+-use syntax::parse::{self, PResult, ParseSess};
+-use syntax::source_map::FilePathMapping;
+-use syntax_pos::edition::Edition;
+-use syntax_pos::FileName;
++use rustc_ast::ast;
++use rustc_errors::PResult;
++use rustc_session::parse::ParseSess;
++use rustc_span::source_map::FilePathMapping;
++use rustc_span::FileName;
+ use walkdir::{DirEntry, WalkDir};
+
+ use std::fs::File;
+@@ -38,8 +36,8 @@ mod repo;
+ use common::eq::SpanlessEq;
+
+ #[test]
+-#[cfg_attr(target_os = "windows", ignore = "requires nix .sh")]
+ fn test_round_trip() {
++ common::rayon_init();
+ repo::clone_rust();
+ let abort_after = common::abort_after();
+ if abort_after == 0 {
+@@ -78,11 +76,12 @@ fn test_round_trip() {
+ }
+ };
+ let back = quote!(#krate).to_string();
++ let edition = repo::edition(path).parse().unwrap();
+
+ let equal = panic::catch_unwind(|| {
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(edition, || {
+ let sess = ParseSess::new(FilePathMapping::empty());
+- let before = match libsyntax_parse(content, &sess) {
++ let before = match librustc_parse(content, &sess) {
+ Ok(before) => before,
+ Err(mut diagnostic) => {
+ diagnostic.cancel();
+@@ -93,7 +92,7 @@ fn test_round_trip() {
+ errorf!("=== {}: ignore\n", path.display());
+ } else {
+ errorf!(
+- "=== {}: ignore - libsyntax failed to parse original content: {}\n",
++ "=== {}: ignore - librustc failed to parse original content: {}\n",
+ path.display(),
+ diagnostic.message()
+ );
+@@ -101,10 +100,10 @@ fn test_round_trip() {
+ return true;
+ }
+ };
+- let after = match libsyntax_parse(back, &sess) {
++ let after = match librustc_parse(back, &sess) {
+ Ok(after) => after,
+ Err(mut diagnostic) => {
+- errorf!("=== {}: libsyntax failed to parse", path.display());
++ errorf!("=== {}: librustc failed to parse", path.display());
+ diagnostic.emit();
+ return false;
+ }
+@@ -130,7 +129,7 @@ fn test_round_trip() {
+ })
+ });
+ match equal {
+- Err(_) => errorf!("=== {}: ignoring libsyntax panic\n", path.display()),
++ Err(_) => errorf!("=== {}: ignoring librustc panic\n", path.display()),
+ Ok(true) => {}
+ Ok(false) => {
+ let prev_failed = failed.fetch_add(1, Ordering::SeqCst);
+@@ -147,7 +146,7 @@ fn test_round_trip() {
+ }
+ }
+
+-fn libsyntax_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
++fn librustc_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
+ let name = FileName::Custom("test_round_trip".to_string());
+ parse::parse_crate_from_source_str(name, content, sess)
+ }
+diff --git a/third_party/rust/syn/tests/test_shebang.rs b/third_party/rust/syn/tests/test_shebang.rs
+new file mode 100644
+index 0000000000..dc26b9aab3
+--- /dev/null
++++ mozilla-release/third_party/rust/syn/tests/test_shebang.rs
+@@ -0,0 +1,59 @@
++#[macro_use]
++mod macros;
++
++#[test]
++fn test_basic() {
++ let content = "#!/usr/bin/env rustx\nfn main() {}";
++ let file = syn::parse_file(content).unwrap();
++ snapshot!(file, @r###"
++ File {
++ shebang: Some("#!/usr/bin/env rustx"),
++ items: [
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "main",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ },
++ ],
++ }
++ "###);
++}
++
++#[test]
++fn test_comment() {
++ let content = "#!//am/i/a/comment\n[allow(dead_code)] fn main() {}";
++ let file = syn::parse_file(content).unwrap();
++ snapshot!(file, @r###"
++ File {
++ attrs: [
++ Attribute {
++ style: Inner,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "allow",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`(dead_code)`),
++ },
++ ],
++ items: [
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "main",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ },
++ ],
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_should_parse.rs b/third_party/rust/syn/tests/test_should_parse.rs
+index aadf42e3af..180d859916 100644
+--- mozilla-release/third_party/rust/syn/tests/test_should_parse.rs
++++ mozilla-release/third_party/rust/syn/tests/test_should_parse.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ macro_rules! should_parse {
+ ($name:ident, { $($in:tt)* }) => {
+ #[test]
+diff --git a/third_party/rust/syn/tests/test_size.rs b/third_party/rust/syn/tests/test_size.rs
+index 386d4df889..01e8401158 100644
+--- mozilla-release/third_party/rust/syn/tests/test_size.rs
++++ mozilla-release/third_party/rust/syn/tests/test_size.rs
+@@ -1,7 +1,5 @@
+ #![cfg(target_pointer_width = "64")]
+
+-mod features;
+-
+ use std::mem;
+ use syn::*;
+
+diff --git a/third_party/rust/syn/tests/test_stmt.rs b/third_party/rust/syn/tests/test_stmt.rs
+new file mode 100644
+index 0000000000..d68b47fd2f
+--- /dev/null
++++ mozilla-release/third_party/rust/syn/tests/test_stmt.rs
+@@ -0,0 +1,44 @@
++#[macro_use]
++mod macros;
++
++use syn::Stmt;
++
++#[test]
++fn test_raw_operator() {
++ let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap();
++
++ snapshot!(stmt, @r###"
++ Local(Local {
++ pat: Pat::Wild,
++ init: Some(Verbatim(`& raw const x`)),
++ })
++ "###);
++}
++
++#[test]
++fn test_raw_variable() {
++ let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap();
++
++ snapshot!(stmt, @r###"
++ Local(Local {
++ pat: Pat::Wild,
++ init: Some(Expr::Reference {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "raw",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ }),
++ })
++ "###);
++}
++
++#[test]
++fn test_raw_invalid() {
++ assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
++}
+diff --git a/third_party/rust/syn/tests/test_token_trees.rs b/third_party/rust/syn/tests/test_token_trees.rs
+index 70a9a72aab..5b00448af8 100644
+--- mozilla-release/third_party/rust/syn/tests/test_token_trees.rs
++++ mozilla-release/third_party/rust/syn/tests/test_token_trees.rs
+@@ -1,9 +1,3 @@
+-extern crate proc_macro2;
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -21,7 +15,11 @@ fn test_struct() {
+ }
+ ";
+
+- snapshot!(input as TokenStream, @"`# [ derive ( Debug , Clone ) ] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`");
++ snapshot!(input as TokenStream, @r###"
++ TokenStream(
++ `# [derive (Debug , Clone)] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`,
++ )
++ "###);
+ }
+
+ #[test]
+diff --git a/third_party/rust/syn/tests/test_ty.rs b/third_party/rust/syn/tests/test_ty.rs
+new file mode 100644
+index 0000000000..9cbdcd6b99
+--- /dev/null
++++ mozilla-release/third_party/rust/syn/tests/test_ty.rs
+@@ -0,0 +1,53 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::Type;
++
++#[test]
++fn test_mut_self() {
++ syn::parse_str::<Type>("fn(mut self)").unwrap();
++ syn::parse_str::<Type>("fn(mut self: ())").unwrap();
++ syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err();
++ syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err();
++ syn::parse_str::<Type>("fn(mut self::T)").unwrap_err();
++}
++
++#[test]
++fn test_macro_variable_type() {
++ // mimics the token stream corresponding to `$ty<T>`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
++ TokenTree::Punct(Punct::new('<', Spacing::Alone)),
++ TokenTree::Ident(Ident::new("T", Span::call_site())),
++ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
++ ]);
++
++ snapshot!(tokens as Type, @r###"
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "ty",
++ arguments: PathArguments::AngleBracketed {
++ args: [
++ Type(Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ },
++ },
++ ],
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_visibility.rs b/third_party/rust/syn/tests/test_visibility.rs
+new file mode 100644
+index 0000000000..c3d0ac7a5b
+--- /dev/null
++++ mozilla-release/third_party/rust/syn/tests/test_visibility.rs
+@@ -0,0 +1,145 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use std::iter::FromIterator;
++use syn::parse::{Parse, ParseStream};
++use syn::{DeriveInput, Result, Visibility};
++
++#[derive(Debug)]
++struct VisRest {
++ vis: Visibility,
++ rest: TokenStream,
++}
++
++impl Parse for VisRest {
++ fn parse(input: ParseStream) -> Result<Self> {
++ Ok(VisRest {
++ vis: input.parse()?,
++ rest: input.parse()?,
++ })
++ }
++}
++
++macro_rules! assert_vis_parse {
++ ($input:expr, Ok($p:pat)) => {
++ assert_vis_parse!($input, Ok($p) + "");
++ };
++
++ ($input:expr, Ok($p:pat) + $rest:expr) => {
++ let expected = $rest.parse::<TokenStream>().unwrap();
++ let parse: VisRest = syn::parse_str($input).unwrap();
++
++ match parse.vis {
++ $p => {}
++ _ => panic!("Expected {}, got {:?}", stringify!($p), parse.vis),
++ }
++
++ // NOTE: Round-trips through `to_string` to avoid potential whitespace
++ // diffs.
++ assert_eq!(parse.rest.to_string(), expected.to_string());
++ };
++
++ ($input:expr, Err) => {
++ syn::parse2::<VisRest>($input.parse().unwrap()).unwrap_err();
++ };
++}
++
++#[test]
++fn test_pub() {
++ assert_vis_parse!("pub", Ok(Visibility::Public(_)));
++}
++
++#[test]
++fn test_crate() {
++ assert_vis_parse!("crate", Ok(Visibility::Crate(_)));
++}
++
++#[test]
++fn test_inherited() {
++ assert_vis_parse!("", Ok(Visibility::Inherited));
++}
++
++#[test]
++fn test_in() {
++ assert_vis_parse!("pub(in foo::bar)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_crate() {
++ assert_vis_parse!("pub(crate)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_self() {
++ assert_vis_parse!("pub(self)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_super() {
++ assert_vis_parse!("pub(super)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_missing_in() {
++ assert_vis_parse!("pub(foo::bar)", Ok(Visibility::Public(_)) + "(foo::bar)");
++}
++
++#[test]
++fn test_missing_in_path() {
++ assert_vis_parse!("pub(in)", Err);
++}
++
++#[test]
++fn test_crate_path() {
++ assert_vis_parse!("pub(crate::A, crate::B)", Ok(Visibility::Public(_)) + "(crate::A, crate::B)");
++}
++
++#[test]
++fn test_junk_after_in() {
++ assert_vis_parse!("pub(in some::path @@garbage)", Err);
++}
++
++#[test]
++fn test_empty_group_vis() {
++ // mimics `struct S { $vis $field: () }` where $vis is empty
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Ident(Ident::new("struct", Span::call_site())),
++ TokenTree::Ident(Ident::new("S", Span::call_site())),
++ TokenTree::Group(Group::new(
++ Delimiter::Brace,
++ TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Ident(Ident::new(
++ "f",
++ Span::call_site(),
++ ))]),
++ )),
++ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]),
++ )),
++ ]);
++
++ snapshot!(tokens as DeriveInput, @r###"
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("f"),
++ colon_token: Some,
++ ty: Type::Tuple,
++ },
++ ],
++ },
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/zzz_stable.rs b/third_party/rust/syn/tests/zzz_stable.rs
+index a81b3df4d0..a1a670d9ed 100644
+--- mozilla-release/third_party/rust/syn/tests/zzz_stable.rs
++++ mozilla-release/third_party/rust/syn/tests/zzz_stable.rs
+@@ -1,7 +1,5 @@
+ #![cfg(syn_disable_nightly_tests)]
+
+-extern crate termcolor;
+-
+ use std::io::{self, Write};
+ use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
+
+@@ -10,7 +8,7 @@ const MSG: &str = "\
+ ‖ WARNING:
+ ‖ This is not a nightly compiler so not all tests were able to
+ ‖ run. Syn includes tests that compare Syn's parser against the
+-‖ compiler's parser, which requires access to unstable libsyntax
++‖ compiler's parser, which requires access to unstable librustc
+ ‖ data structures and a nightly compiler.
+ ‖
+ ";
+--
+2.28.0
+
Property changes on: head/www/cliqz/files/patch-bug1663715
___________________________________________________________________
Added: fbsd:nokeywords
## -0,0 +1 ##
+yes
\ No newline at end of property
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Index: head/www/deno/Makefile
===================================================================
--- head/www/deno/Makefile (revision 552220)
+++ head/www/deno/Makefile (revision 552221)
@@ -1,359 +1,360 @@
# $FreeBSD$
PORTNAME= deno
DISTVERSIONPREFIX= v
DISTVERSION= 1.4.6
+PORTREVISION= 1
CATEGORIES= www
MAINTAINER= mikael@FreeBSD.org
COMMENT= Secure JavaScript and TypeScript runtime
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
BROKEN_i386= cargo:warning=c/freebsd.c:31:10: error: conflicting types for 'get_cpu_speed'
BUILD_DEPENDS= binutils>0:devel/binutils \
gn:devel/gn \
libunwind>0:devel/libunwind
.include <bsd.port.options.mk>
# clang10+ is required, this conditional can be dropped when 12.1 is EOL
.if (${OSVERSION} >= 1200000 && ${OSVERSION} < 1201515)
BUILD_DEPENDS+= llvm10>0:devel/llvm10
.endif
USES= cargo ninja pkgconfig:build python:3.5+,build
USE_GITHUB= yes
GH_ACCOUNT= denoland
USE_GNOME= glib20
WRKSRC= ${WRKDIR}/${PORTNAME}-${PORTVERSION}
RUSTY_VERS= 0.11.0
CARGO_ENV+= CLANG_BASE_PATH=/usr \
GN=${PREFIX}/bin/gn \
NINJA=${PREFIX}/bin/ninja \
V8_FROM_SOURCE=1
RUSTFLAGS+= -C link-arg=-fuse-ld=lld
PLIST_FILES= bin/${PORTNAME}
BINARY_ALIAS= python=${PYTHON_CMD}
CARGO_CRATES= Inflector-0.11.4 \
adler-0.2.3 \
ahash-0.3.8 \
aho-corasick-0.7.13 \
alloc-no-stdlib-2.0.1 \
alloc-stdlib-0.2.1 \
ansi_term-0.11.0 \
anyhow-1.0.32 \
anymap-0.12.1 \
arc-swap-0.4.7 \
arrayvec-0.5.1 \
ast_node-0.7.0 \
async-compression-0.3.5 \
atty-0.2.14 \
autocfg-0.1.7 \
autocfg-1.0.1 \
base64-0.11.0 \
base64-0.12.3 \
bitflags-1.2.1 \
block-buffer-0.7.3 \
block-buffer-0.9.0 \
block-padding-0.1.5 \
brotli-3.3.0 \
brotli-decompressor-2.3.1 \
buf_redux-0.8.4 \
bumpalo-3.4.0 \
byte-tools-0.3.1 \
byteorder-1.3.4 \
bytes-0.5.6 \
cargo_gn-0.0.15 \
cc-1.0.59 \
cfg-if-0.1.10 \
chrono-0.4.15 \
clap-2.33.3 \
cloudabi-0.0.3 \
const-random-0.1.8 \
const-random-macro-0.1.8 \
cpuid-bool-0.1.2 \
crc32fast-1.2.0 \
crossbeam-channel-0.4.4 \
crossbeam-utils-0.7.2 \
darling-0.10.2 \
darling_core-0.10.2 \
darling_macro-0.10.2 \
dashmap-3.11.10 \
deno_doc-0.1.11 \
deno_lint-0.2.3 \
digest-0.8.1 \
digest-0.9.0 \
dissimilar-1.0.2 \
dlopen-0.1.8 \
dlopen_derive-0.1.4 \
dprint-core-0.31.0 \
dprint-plugin-typescript-0.32.4 \
dtoa-0.4.6 \
either-1.6.0 \
encoding_rs-0.8.24 \
enum_kind-0.2.0 \
env_logger-0.7.1 \
errno-0.1.8 \
fake-simd-0.1.2 \
filetime-0.2.12 \
flate2-1.0.17 \
fnv-1.0.7 \
from_variant-0.1.2 \
fsevent-2.0.2 \
fsevent-sys-3.0.2 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.3.5 \
futures-channel-0.3.5 \
futures-core-0.3.5 \
futures-executor-0.3.5 \
futures-io-0.3.5 \
futures-macro-0.3.5 \
futures-sink-0.3.5 \
futures-task-0.3.5 \
futures-util-0.3.5 \
fwdansi-1.1.0 \
fxhash-0.2.1 \
generic-array-0.12.3 \
generic-array-0.14.4 \
getrandom-0.1.15 \
h2-0.2.6 \
hashbrown-0.9.0 \
headers-0.3.2 \
headers-core-0.2.0 \
hermit-abi-0.1.15 \
http-0.2.1 \
http-body-0.3.1 \
httparse-1.3.4 \
humantime-1.3.0 \
hyper-0.13.7 \
hyper-rustls-0.21.0 \
ident_case-1.0.1 \
idna-0.2.0 \
if_chain-1.0.0 \
indexmap-1.6.0 \
inotify-0.8.3 \
inotify-sys-0.1.3 \
input_buffer-0.3.1 \
iovec-0.1.4 \
ipnet-2.3.0 \
is-macro-0.1.8 \
itoa-0.4.6 \
js-sys-0.3.45 \
jsdoc-0.6.0 \
jsonc-parser-0.14.0 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
lazycell-1.3.0 \
lexical-core-0.7.4 \
libc-0.2.77 \
log-0.4.11 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
mime-0.3.16 \
mime_guess-2.0.3 \
miniz_oxide-0.4.2 \
mio-0.6.22 \
mio-extras-2.0.6 \
mio-named-pipes-0.1.7 \
mio-uds-0.6.8 \
miow-0.2.1 \
miow-0.3.5 \
multipart-0.17.0 \
net2-0.2.35 \
new_debug_unreachable-1.0.4 \
nix-0.18.0 \
nom-5.1.2 \
notify-5.0.0-pre.3 \
num-bigint-0.2.6 \
num-integer-0.1.43 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
once_cell-1.4.1 \
opaque-debug-0.2.3 \
opaque-debug-0.3.0 \
ordered-float-1.1.0 \
os_pipe-0.9.2 \
owning_ref-0.4.1 \
percent-encoding-2.1.0 \
phf-0.8.0 \
phf_generator-0.8.0 \
phf_macros-0.8.0 \
phf_shared-0.8.0 \
pin-project-0.4.23 \
pin-project-internal-0.4.23 \
pin-project-lite-0.1.7 \
pin-utils-0.1.0 \
pmutil-0.5.3 \
ppv-lite86-0.2.9 \
precomputed-hash-0.1.1 \
proc-macro-hack-0.5.18 \
proc-macro-nested-0.1.6 \
proc-macro2-0.4.30 \
proc-macro2-1.0.21 \
pty-0.2.2 \
quick-error-1.2.3 \
quote-0.6.13 \
quote-1.0.7 \
rand-0.6.5 \
rand-0.7.3 \
rand_chacha-0.1.1 \
rand_chacha-0.2.2 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_pcg-0.2.1 \
rand_xorshift-0.1.1 \
rdrand-0.4.0 \
redox_syscall-0.1.57 \
regex-1.3.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.3 \
reqwest-0.10.8 \
retain_mut-0.1.1 \
ring-0.16.15 \
rustc_version-0.2.3 \
rustls-0.18.1 \
rusty_v8-0.11.0 \
rustyline-6.3.0 \
rustyline-derive-0.3.1 \
ryu-1.0.5 \
safemem-0.3.3 \
same-file-1.0.6 \
scoped-tls-1.0.0 \
scopeguard-1.1.0 \
sct-0.6.0 \
semver-0.9.0 \
semver-parser-0.7.0 \
semver-parser-0.9.0 \
serde-1.0.116 \
serde_derive-1.0.116 \
serde_json-1.0.57 \
serde_urlencoded-0.6.1 \
sha-1-0.8.2 \
sha-1-0.9.1 \
signal-hook-registry-1.2.1 \
siphasher-0.3.3 \
slab-0.4.2 \
smallvec-1.4.2 \
socket2-0.3.15 \
sourcemap-6.0.1 \
spin-0.5.2 \
stable_deref_trait-1.2.0 \
static_assertions-1.1.0 \
string_cache-0.8.0 \
string_cache_codegen-0.5.1 \
string_enum-0.3.0 \
strsim-0.8.0 \
strsim-0.9.3 \
swc_atoms-0.2.4 \
swc_common-0.10.3 \
swc_ecma_ast-0.32.0 \
swc_ecma_codegen-0.36.0 \
swc_ecma_codegen_macros-0.5.0 \
swc_ecma_dep_graph-0.4.0 \
swc_ecma_parser-0.38.0 \
swc_ecma_parser_macros-0.4.1 \
swc_ecma_transforms-0.25.2 \
swc_ecma_transforms_macros-0.1.1 \
swc_ecma_utils-0.22.0 \
swc_ecma_visit-0.18.0 \
swc_ecmascript-0.9.1 \
swc_macros_common-0.3.1 \
swc_visit-0.2.0 \
swc_visit_macros-0.2.0 \
syn-0.15.44 \
syn-1.0.41 \
sys-info-0.7.0 \
tempfile-3.1.0 \
termcolor-1.1.0 \
textwrap-0.11.0 \
thiserror-1.0.20 \
thiserror-impl-1.0.20 \
thread_local-1.0.1 \
time-0.1.44 \
tinyvec-0.3.4 \
tokio-0.2.22 \
tokio-macros-0.2.5 \
tokio-rustls-0.14.1 \
tokio-tungstenite-0.11.0 \
tokio-util-0.3.1 \
toml-0.5.6 \
tower-service-0.3.0 \
tracing-0.1.19 \
tracing-core-0.1.16 \
tracing-futures-0.2.4 \
try-lock-0.2.3 \
tungstenite-0.11.1 \
twoway-0.1.8 \
typenum-1.12.0 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.1.0 \
unicode-xid-0.2.1 \
untrusted-0.7.1 \
url-2.1.1 \
urlencoding-1.1.1 \
utf-8-0.7.5 \
utf8parse-0.2.0 \
uuid-0.8.1 \
vec_map-0.8.2 \
version_check-0.9.2 \
walkdir-2.3.1 \
want-0.3.0 \
warp-0.2.5 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasi-0.10.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.68 \
wasm-bindgen-backend-0.2.68 \
wasm-bindgen-futures-0.4.18 \
wasm-bindgen-macro-0.2.68 \
wasm-bindgen-macro-support-0.2.68 \
wasm-bindgen-shared-0.2.68 \
web-sys-0.3.45 \
webpki-0.21.3 \
webpki-roots-0.19.0 \
which-4.0.2 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.7.0 \
winres-0.1.11 \
ws2_32-sys-0.2.1
post-patch:
${REINPLACE_CMD} "s|%%LOCALBASE%%|${LOCALBASE}|" \
${WRKSRC}/cargo-crates/rusty_v8-${RUSTY_VERS}/build/toolchain/gcc_toolchain.gni \
${WRKSRC}/cargo-crates/rusty_v8-${RUSTY_VERS}/buildtools/third_party/libc++/BUILD.gn
# clang10+ is required, this conditional can be dropped when 12.1 is EOL
.if (${OSVERSION} >= 1200000 && ${OSVERSION} < 1201515)
@${PATCH} -d ${PATCH_WRKSRC} ${PATCH_ARGS} < ${FILESDIR}/extrapatch-clang10
.endif
do-install:
${INSTALL_PROGRAM} ${WRKDIR}/target/release/deno ${STAGEDIR}${LOCALBASE}/bin
.include <bsd.port.mk>
Index: head/www/ffsend/Makefile
===================================================================
--- head/www/ffsend/Makefile (revision 552220)
+++ head/www/ffsend/Makefile (revision 552221)
@@ -1,339 +1,340 @@
# $FreeBSD$
PORTNAME= ffsend
DISTVERSIONPREFIX= v
DISTVERSION= 0.2.67
+PORTREVISION= 1
CATEGORIES= www
MAINTAINER= 0mp@FreeBSD.org
COMMENT= Easily and securely share files from the command line via Firefox Send
LICENSE= APACHE20 BSD2CLAUSE BSD3CLAUSE CC0-1.0 FUCHSIACPRNG GPLv3 ISCL \
MIT MPL20 UNLICENSE ZLIB
LICENSE_COMB= multi
LICENSE_NAME_FUCHSIACPRNG= BSD-like license + patent clause
LICENSE_FILE_FUCHSIACPRNG= ${WRKSRC}/cargo-crates/fuchsia-cprng-0.1.1/LICENSE
# Main license.
LICENSE_FILE_GPLv3= ${WRKSRC}/LICENSE
LICENSE_PERMS_FUCHSIACPRNG= dist-mirror dist-sell pkg-mirror pkg-sell \
auto-accept
RUN_DEPENDS= ca_root_nss>0:security/ca_root_nss \
xclip:x11/xclip \
xsel:x11/xsel-conrad
USES= cargo localbase:ldflags python:build,3.5+ ssl xorg
USE_GITLAB= yes
GL_ACCOUNT= timvisee
GL_COMMIT= ed34588184d8ac493edceb7b9f500344ae21148b
USE_XORG= xcb
CARGO_CRATES= addr2line-0.13.0 \
adler-0.2.3 \
aho-corasick-0.7.13 \
ansi_term-0.11.0 \
arc-swap-0.4.7 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
atty-0.2.14 \
autocfg-0.1.7 \
autocfg-1.0.1 \
backtrace-0.3.50 \
base-x-0.2.6 \
base64-0.9.3 \
base64-0.10.1 \
base64-0.12.3 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
block-0.1.6 \
block-buffer-0.7.3 \
block-padding-0.1.5 \
bstr-0.2.13 \
bumpalo-3.4.0 \
byte-tools-0.3.1 \
byteorder-1.3.4 \
bytes-0.4.12 \
bytes-0.5.6 \
cc-1.0.60 \
cfg-if-0.1.10 \
chbs-0.0.10 \
checked_int_cast-1.0.0 \
chrono-0.4.18 \
clap-2.33.3 \
clipboard-0.5.0 \
clipboard-win-2.2.0 \
cloudabi-0.0.3 \
colored-2.0.0 \
const_fn-0.4.2 \
constant_time_eq-0.1.5 \
core-foundation-0.7.0 \
core-foundation-sys-0.7.0 \
crossbeam-channel-0.4.4 \
crossbeam-utils-0.7.2 \
crossterm-0.17.7 \
crossterm_winapi-0.6.1 \
crypto-mac-0.7.0 \
csv-1.1.3 \
csv-core-0.1.10 \
darling-0.10.2 \
darling_core-0.10.2 \
darling_macro-0.10.2 \
derive_builder-0.9.0 \
derive_builder_core-0.9.0 \
digest-0.8.1 \
directories-3.0.1 \
dirs-1.0.5 \
dirs-sys-0.3.5 \
discard-1.0.4 \
dtoa-0.4.6 \
encode_unicode-0.3.6 \
encoding_rs-0.8.24 \
failure-0.1.8 \
failure_derive-0.1.8 \
fake-simd-0.1.2 \
ffsend-api-0.6.2 \
filetime-0.2.12 \
fnv-1.0.7 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fs2-0.4.3 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.1.29 \
futures-channel-0.3.5 \
futures-core-0.3.5 \
futures-io-0.3.5 \
futures-macro-0.3.5 \
futures-sink-0.3.5 \
futures-task-0.3.5 \
futures-util-0.3.5 \
generic-array-0.12.3 \
getrandom-0.1.15 \
gimli-0.22.0 \
h2-0.2.6 \
hashbrown-0.9.0 \
hermit-abi-0.1.16 \
hkdf-0.8.0 \
hmac-0.7.1 \
http-0.2.1 \
http-body-0.3.1 \
httparse-1.3.4 \
httpdate-0.3.2 \
hyper-0.10.16 \
hyper-0.13.8 \
hyper-rustls-0.21.0 \
hyper-tls-0.4.3 \
ident_case-1.0.1 \
idna-0.1.5 \
idna-0.2.0 \
indexmap-1.6.0 \
iovec-0.1.4 \
ipnet-2.3.0 \
itoa-0.4.6 \
js-sys-0.3.45 \
kernel32-sys-0.2.2 \
language-tags-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.77 \
lock_api-0.3.4 \
log-0.3.9 \
log-0.4.11 \
malloc_buf-0.0.6 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
mime-0.2.6 \
mime-0.3.16 \
mime_guess-2.0.3 \
miniz_oxide-0.4.2 \
mio-0.6.22 \
mio-0.7.0 \
miow-0.2.1 \
miow-0.3.5 \
native-tls-0.2.4 \
net2-0.2.35 \
ntapi-0.3.4 \
num-integer-0.1.43 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
objc-0.2.7 \
objc-foundation-0.1.1 \
objc_id-0.1.1 \
object-0.20.0 \
once_cell-1.4.1 \
opaque-debug-0.2.3 \
open-1.4.0 \
openssl-0.10.30 \
openssl-probe-0.1.2 \
openssl-sys-0.9.58 \
parking_lot-0.9.0 \
parking_lot-0.10.2 \
parking_lot_core-0.6.2 \
parking_lot_core-0.7.2 \
pathdiff-0.2.0 \
pbr-1.0.3 \
percent-encoding-1.0.1 \
percent-encoding-2.1.0 \
pin-project-0.4.24 \
pin-project-internal-0.4.24 \
pin-project-lite-0.1.8 \
pin-utils-0.1.0 \
pkg-config-0.3.18 \
ppv-lite86-0.2.9 \
prettytable-rs-0.8.0 \
proc-macro-hack-0.5.18 \
proc-macro-nested-0.1.6 \
proc-macro2-1.0.23 \
qr2term-0.2.1 \
qrcode-0.12.0 \
quote-1.0.7 \
rand-0.6.5 \
rand-0.7.3 \
rand_chacha-0.1.1 \
rand_chacha-0.2.2 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rdrand-0.4.0 \
redox_syscall-0.1.57 \
redox_users-0.3.5 \
regex-1.3.9 \
regex-automata-0.1.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.3 \
reqwest-0.10.8 \
ring-0.16.15 \
rpassword-5.0.0 \
rust-argon2-0.8.2 \
rustc-demangle-0.1.16 \
rustc_version-0.2.3 \
rustls-0.18.1 \
ryu-1.0.5 \
safemem-0.3.3 \
schannel-0.1.19 \
scopeguard-1.1.0 \
sct-0.6.0 \
security-framework-0.4.4 \
security-framework-sys-0.4.3 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.116 \
serde_derive-1.0.116 \
serde_json-1.0.57 \
serde_urlencoded-0.6.1 \
sha1-0.6.0 \
sha2-0.8.2 \
signal-hook-0.1.16 \
signal-hook-registry-1.2.1 \
slab-0.4.2 \
smallvec-0.6.13 \
smallvec-1.4.2 \
socket2-0.3.15 \
spin-0.5.2 \
standback-0.2.10 \
stdweb-0.4.20 \
stdweb-derive-0.5.3 \
stdweb-internal-macros-0.2.9 \
stdweb-internal-runtime-0.1.5 \
strsim-0.8.0 \
strsim-0.9.3 \
subtle-1.0.0 \
syn-1.0.42 \
synstructure-0.12.4 \
tar-0.4.30 \
tempfile-3.1.0 \
term-0.5.2 \
textwrap-0.11.0 \
thiserror-1.0.20 \
thiserror-impl-1.0.20 \
thread_local-1.0.1 \
time-0.1.44 \
time-0.2.22 \
time-macros-0.1.0 \
time-macros-impl-0.1.1 \
tinyvec-0.3.4 \
tokio-0.2.22 \
tokio-codec-0.1.2 \
tokio-executor-0.1.10 \
tokio-io-0.1.13 \
tokio-reactor-0.1.12 \
tokio-rustls-0.14.1 \
tokio-sync-0.1.8 \
tokio-tcp-0.1.4 \
tokio-tls-0.2.1 \
tokio-tls-0.3.1 \
tokio-util-0.3.1 \
toml-0.5.6 \
tower-service-0.3.0 \
tracing-0.1.19 \
tracing-core-0.1.16 \
traitobject-0.1.0 \
try-lock-0.2.3 \
typeable-0.1.2 \
typenum-1.12.0 \
unicase-1.4.2 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
untrusted-0.7.1 \
url-1.7.2 \
url-2.1.1 \
urlshortener-3.0.0 \
vcpkg-0.2.10 \
vec_map-0.8.2 \
version-compare-0.0.10 \
version-compare-0.0.11 \
version_check-0.1.5 \
version_check-0.9.2 \
want-0.3.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasi-0.10.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.68 \
wasm-bindgen-backend-0.2.68 \
wasm-bindgen-futures-0.4.18 \
wasm-bindgen-macro-0.2.68 \
wasm-bindgen-macro-support-0.2.68 \
wasm-bindgen-shared-0.2.68 \
web-sys-0.3.45 \
webpki-0.21.3 \
webpki-roots-0.19.0 \
websocket-0.24.0 \
websocket-base-0.24.0 \
which-4.0.2 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.7.0 \
ws2_32-sys-0.2.1 \
x11-clipboard-0.3.3 \
xattr-0.2.2 \
xcb-0.8.2
MAKE_ENV= XCLIP_PATH=${LOCALBASE}/bin/xclip \
XSEL_PATH=${LOCALBASE}/bin/xsel
BINARY_ALIAS= python3=${PYTHON_CMD}
PLIST_FILES= bin/${PORTNAME} \
bin/ffdel \
bin/ffget \
bin/ffput
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
.for f in ffdel ffget ffput
cd ${STAGEDIR}${PREFIX}/bin && ${RLN} ${PORTNAME} ${f}
.endfor
.include <bsd.port.mk>
Index: head/www/firefox/Makefile
===================================================================
--- head/www/firefox/Makefile (revision 552220)
+++ head/www/firefox/Makefile (revision 552221)
@@ -1,60 +1,61 @@
# Created by: Alan Eldridge <alane@FreeBSD.org>
# $FreeBSD$
PORTNAME= firefox
DISTVERSION= 81.0.2
+PORTREVISION= 1
PORTEPOCH= 2
CATEGORIES= www
MASTER_SITES= MOZILLA/${PORTNAME}/releases/${DISTVERSION}/source \
MOZILLA/${PORTNAME}/candidates/${DISTVERSION}-candidates/build2/source
DISTFILES= ${DISTNAME}.source${EXTRACT_SUFX}
MAINTAINER= gecko@FreeBSD.org
COMMENT= Web browser based on the browser portion of Mozilla
BUILD_DEPENDS= nspr>=4.26:devel/nspr \
nss>=3.56:security/nss \
icu>=67.1,1:devel/icu \
libevent>=2.1.8:devel/libevent \
harfbuzz>=2.6.8:print/harfbuzz \
graphite2>=1.3.14:graphics/graphite2 \
png>=1.6.35:graphics/png \
libvpx>=1.8.2:multimedia/libvpx \
${PYTHON_PKGNAMEPREFIX}sqlite3>0:databases/py-sqlite3@${PY_FLAVOR} \
v4l_compat>0:multimedia/v4l_compat \
autoconf-2.13:devel/autoconf213 \
nasm:devel/nasm \
yasm:devel/yasm \
zip:archivers/zip
USE_GECKO= gecko
CONFLICTS_INSTALL= firefox-esr
USE_MOZILLA= -sqlite
USES= tar:xz
FIREFOX_ICON= ${MOZILLA}.png
FIREFOX_ICON_SRC= ${PREFIX}/lib/${MOZILLA}/browser/chrome/icons/default/default48.png
FIREFOX_DESKTOP= ${MOZSRC}/taskcluster/docker/${MOZILLA}-snap/${MOZILLA}.desktop
MOZ_OPTIONS= --enable-application=browser \
--enable-official-branding
.include "${.CURDIR}/../../www/firefox/Makefile.options"
post-patch:
@${REINPLACE_CMD} -e 's/%u/%U/' -e '/X-MultipleArgs/d' \
-e '/^Icon/s/=.*/=${FIREFOX_ICON:R}/' \
${FIREFOX_DESKTOP}
@${REINPLACE_CMD} -e 's|%%LOCALBASE%%|${LOCALBASE}|g' \
${WRKSRC}/browser/app/nsBrowserApp.cpp
pre-configure:
(cd ${WRKSRC} && ${LOCALBASE}/bin/autoconf-2.13)
(cd ${WRKSRC}/js/src/ && ${LOCALBASE}/bin/autoconf-2.13)
post-install:
${INSTALL_DATA} ${FIREFOX_DESKTOP} ${STAGEDIR}${PREFIX}/share/applications/
${MKDIR} ${STAGEDIR}${PREFIX}/share/pixmaps
${LN} -sf ${FIREFOX_ICON_SRC} ${STAGEDIR}${PREFIX}/share/pixmaps/${FIREFOX_ICON}
.include <bsd.port.mk>
Index: head/www/firefox/files/patch-bug1663715
===================================================================
--- head/www/firefox/files/patch-bug1663715 (nonexistent)
+++ head/www/firefox/files/patch-bug1663715 (revision 552221)
@@ -0,0 +1,35278 @@
+
+# HG changeset patch
+# User Emilio Cobos Álvarez <emilio@crisal.io>
+# Date 1599584448 0
+# Node ID e2cede25c027940ca4b36917a31163c278a4411f
+# Parent a816580ea8a998b84f6dbddfe5683e2748455e5b
+Bug 1663715 - Update syn and proc-macro2 so that Firefox can build on Rust nightly again. r=froydnj
+
+Generated with:
+
+ cargo update -p syn --precise 1.0.40
+ ./mach vendor rust
+
+Rust issue: https://github.com/rust-lang/rust/issues/76482
+
+Differential Revision: https://phabricator.services.mozilla.com/D89473
+
+diff --git a/Cargo.lock b/Cargo.lock
+--- Cargo.lock
++++ Cargo.lock
+@@ -3816,19 +3816,19 @@ checksum = "ecd45702f76d6d3c75a80564378a
+ dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ ]
+
+ [[package]]
+ name = "proc-macro2"
+-version = "1.0.5"
+-source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"
++version = "1.0.20"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "175c513d55719db99da20232b06cda8bab6b83ec2d04e3283edf0213c37c1a29"
+ dependencies = [
+ "unicode-xid",
+ ]
+
+ [[package]]
+ name = "procedural-masquerade"
+ version = "0.1.1"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+@@ -4812,19 +4812,19 @@ dependencies = [
+ "cc",
+ "gleam",
+ "glsl-to-cxx",
+ "webrender_build",
+ ]
+
+ [[package]]
+ name = "syn"
+-version = "1.0.5"
+-source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"
++version = "1.0.40"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"
+ dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+ ]
+
+ [[package]]
+ name = "sync-guid"
+diff --git a/third_party/rust/proc-macro2/.cargo-checksum.json b/third_party/rust/proc-macro2/.cargo-checksum.json
+--- third_party/rust/proc-macro2/.cargo-checksum.json
++++ third_party/rust/proc-macro2/.cargo-checksum.json
+@@ -1,1 +1,1 @@
+-{"files":{"Cargo.toml":"e2c1fc6ed317eeef8462fcd192f6b6389e1d84f0d7afeac78f12c23903deddf8","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"362a2156f7645528061b6e8487a2eb0f32f1693012ed82ee57afa05c039bba0d","build.rs":"0cc6e2cb919ddbff59cf1d810283939f97a59f0037540c0f2ee3453237635ff8","src/fallback.rs":"5c6379a90735e27abcc40253b223158c6b1e5784f3850bc423335363e87ef038","src/lib.rs":"ae5251296ad3fcd8b600919a993fec0afd8b56da3e11fef6bc7265b273129936","src/strnom.rs":"37f7791f73f123817ad5403af1d4e2a0714be27401729a2d451bc80b1f26bac9","src/wrapper.rs":"81372e910604217a625aa71c47d43e65f4e008456eae93ac39325c9abf10701a","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"8c427be9cba1fa8d4a16647e53e3545e5863e29e2c0b311c93c9dd1399abf6a1"},"package":"90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"c20c4c52342e65ea11ad8382edc636e628e8f8c5ab7cffddc32426b2fe8fe4cd","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"e1f9d4fc22cff2c049f166a403b41458632a94357890d31cf0e3ad83807fb430","build.rs":"332185d7ad4c859210f5edd7a76bc95146c8277726a2f81417f34927c4424d68","src/detection.rs":"9d25d896889e65330858f2d6f6223c1b98cd1dad189813ad4161ff189fbda2b8","src/fallback.rs":"239f9a25c0f2ab57592288d944c7f1a0f887536b6d4dc2428a17640af8d10a41","src/lib.rs":"2b1d98424c9b23b547dabf85554120e5e65472026a0f3f711b3a097bca7c32fe","src/parse.rs":"500edee9773132e27e44d0fdaa042b1cb9451e29e65124493986f51710c0664c","src/wrapper.rs":"d36c0dced7ec0e7585c1f935cda836080bcae6de1de3d7851d962e9e11a3ac48","tests/comments.rs":"ea6cbe6f4c8852e6a0612893c7d4f2c144a2e6a134a6c3db641a320cbfc3c800","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"310c856e27ff61c9ec7f0a5cd96031aac02971557b1621f5e17b089d58e79bcd","tests/test_fmt.rs":"745dfdc41d09c5308c221395eb43f2041f0a1413d2927a813bc2ad4554438fe2"},"package":"175c513d55719db99da20232b06cda8bab6b83ec2d04e3283edf0213c37c1a29"}
+\ No newline at end of file
+diff --git a/third_party/rust/proc-macro2/Cargo.toml b/third_party/rust/proc-macro2/Cargo.toml
+--- third_party/rust/proc-macro2/Cargo.toml
++++ third_party/rust/proc-macro2/Cargo.toml
+@@ -8,36 +8,35 @@
+ # If you believe there's an error in this file please file an
+ # issue against the rust-lang/cargo repository. If you're
+ # editing this file be aware that the upstream Cargo.toml
+ # will likely look very different (and much more reasonable)
+
+ [package]
+ edition = "2018"
+ name = "proc-macro2"
+-version = "1.0.5"
+-authors = ["Alex Crichton <alex@alexcrichton.com>"]
+-description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
+-homepage = "https://github.com/alexcrichton/proc-macro2"
++version = "1.0.20"
++authors = ["Alex Crichton <alex@alexcrichton.com>", "David Tolnay <dtolnay@gmail.com>"]
++description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n"
+ documentation = "https://docs.rs/proc-macro2"
+ readme = "README.md"
+ keywords = ["macros"]
++categories = ["development-tools::procedural-macro-helpers"]
+ license = "MIT OR Apache-2.0"
+ repository = "https://github.com/alexcrichton/proc-macro2"
+ [package.metadata.docs.rs]
+ rustc-args = ["--cfg", "procmacro2_semver_exempt"]
+ rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
++targets = ["x86_64-unknown-linux-gnu"]
+
+-[lib]
+-name = "proc_macro2"
++[package.metadata.playground]
++features = ["span-locations"]
+ [dependencies.unicode-xid]
+ version = "0.2"
+ [dev-dependencies.quote]
+ version = "1.0"
+ default_features = false
+
+ [features]
+ default = ["proc-macro"]
+ nightly = []
+ proc-macro = []
+ span-locations = []
+-[badges.travis-ci]
+-repository = "alexcrichton/proc-macro2"
+diff --git a/third_party/rust/proc-macro2/README.md b/third_party/rust/proc-macro2/README.md
+--- third_party/rust/proc-macro2/README.md
++++ third_party/rust/proc-macro2/README.md
+@@ -1,11 +1,11 @@
+ # proc-macro2
+
+-[![Build Status](https://api.travis-ci.com/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.com/alexcrichton/proc-macro2)
++[![Build Status](https://img.shields.io/github/workflow/status/alexcrichton/proc-macro2/build%20and%20test)](https://github.com/alexcrichton/proc-macro2/actions)
+ [![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
+ [![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
+
+ A wrapper around the procedural macro API of the compiler's `proc_macro` crate.
+ This library serves two purposes:
+
+ - **Bring proc-macro-like functionality to other contexts like build.rs and
+ main.rs.** Types from `proc_macro` are entirely specific to procedural macros
+diff --git a/third_party/rust/proc-macro2/build.rs b/third_party/rust/proc-macro2/build.rs
+--- third_party/rust/proc-macro2/build.rs
++++ third_party/rust/proc-macro2/build.rs
+@@ -9,16 +9,20 @@
+ // "wrap_proc_macro"
+ // Wrap types from libproc_macro rather than polyfilling the whole API.
+ // Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set,
+ // because we can't emulate the unstable API without emulating everything
+ // else. Also enabled unconditionally on nightly, in which case the
+ // procmacro2_semver_exempt surface area is implemented by using the
+ // nightly-only proc_macro API.
+ //
++// "hygiene"
++// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at
++// and Span::located_at. Enabled on Rust 1.45+.
++//
+ // "proc_macro_span"
+ // Enable non-dummy behavior of Span::start and Span::end methods which
+ // requires an unstable compiler feature. Enabled when building with
+ // nightly, unless `-Z allow-feature` in RUSTFLAGS disallows unstable
+ // features.
+ //
+ // "super_unstable"
+ // Implement the semver exempt API in terms of the nightly-only proc_macro
+@@ -52,16 +56,24 @@ fn main() {
+ // https://github.com/alexcrichton/proc-macro2/issues/147
+ println!("cargo:rustc-cfg=procmacro2_semver_exempt");
+ }
+
+ if semver_exempt || cfg!(feature = "span-locations") {
+ println!("cargo:rustc-cfg=span_locations");
+ }
+
++ if version.minor < 39 {
++ println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard");
++ }
++
++ if version.minor >= 45 {
++ println!("cargo:rustc-cfg=hygiene");
++ }
++
+ let target = env::var("TARGET").unwrap();
+ if !enable_use_proc_macro(&target) {
+ return;
+ }
+
+ println!("cargo:rustc-cfg=use_proc_macro");
+
+ if version.nightly || !semver_exempt {
+diff --git a/third_party/rust/proc-macro2/src/detection.rs b/third_party/rust/proc-macro2/src/detection.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/proc-macro2/src/detection.rs
+@@ -0,0 +1,67 @@
++use std::panic::{self, PanicInfo};
++use std::sync::atomic::*;
++use std::sync::Once;
++
++static WORKS: AtomicUsize = AtomicUsize::new(0);
++static INIT: Once = Once::new();
++
++pub(crate) fn inside_proc_macro() -> bool {
++ match WORKS.load(Ordering::SeqCst) {
++ 1 => return false,
++ 2 => return true,
++ _ => {}
++ }
++
++ INIT.call_once(initialize);
++ inside_proc_macro()
++}
++
++pub(crate) fn force_fallback() {
++ WORKS.store(1, Ordering::SeqCst);
++}
++
++pub(crate) fn unforce_fallback() {
++ initialize();
++}
++
++// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
++// then use catch_unwind to determine whether the compiler's proc_macro is
++// working. When proc-macro2 is used from outside of a procedural macro all
++// of the proc_macro crate's APIs currently panic.
++//
++// The Once is to prevent the possibility of this ordering:
++//
++// thread 1 calls take_hook, gets the user's original hook
++// thread 1 calls set_hook with the null hook
++// thread 2 calls take_hook, thinks null hook is the original hook
++// thread 2 calls set_hook with the null hook
++// thread 1 calls set_hook with the actual original hook
++// thread 2 calls set_hook with what it thinks is the original hook
++//
++// in which the user's hook has been lost.
++//
++// There is still a race condition where a panic in a different thread can
++// happen during the interval that the user's original panic hook is
++// unregistered such that their hook is incorrectly not called. This is
++// sufficiently unlikely and less bad than printing panic messages to stderr
++// on correct use of this crate. Maybe there is a libstd feature request
++// here. For now, if a user needs to guarantee that this failure mode does
++// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
++// the main thread before launching any other threads.
++fn initialize() {
++ type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
++
++ let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
++ let sanity_check = &*null_hook as *const PanicHook;
++ let original_hook = panic::take_hook();
++ panic::set_hook(null_hook);
++
++ let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
++ WORKS.store(works as usize + 1, Ordering::SeqCst);
++
++ let hopefully_null_hook = panic::take_hook();
++ panic::set_hook(original_hook);
++ if sanity_check != &*hopefully_null_hook {
++ panic!("observed race condition in proc_macro2::inside_proc_macro");
++ }
++}
+diff --git a/third_party/rust/proc-macro2/src/fallback.rs b/third_party/rust/proc-macro2/src/fallback.rs
+--- third_party/rust/proc-macro2/src/fallback.rs
++++ third_party/rust/proc-macro2/src/fallback.rs
+@@ -1,41 +1,121 @@
++use crate::parse::{token_stream, Cursor};
++use crate::{Delimiter, Spacing, TokenTree};
+ #[cfg(span_locations)]
+ use std::cell::RefCell;
+ #[cfg(span_locations)]
+ use std::cmp;
+-use std::fmt;
+-use std::iter;
++use std::fmt::{self, Debug, Display};
++use std::iter::FromIterator;
++use std::mem;
+ use std::ops::RangeBounds;
+ #[cfg(procmacro2_semver_exempt)]
+ use std::path::Path;
+ use std::path::PathBuf;
+ use std::str::FromStr;
+ use std::vec;
+-
+-use crate::strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
+-use crate::{Delimiter, Punct, Spacing, TokenTree};
+ use unicode_xid::UnicodeXID;
+
++/// Force use of proc-macro2's fallback implementation of the API for now, even
++/// if the compiler's implementation is available.
++pub fn force() {
++ #[cfg(wrap_proc_macro)]
++ crate::detection::force_fallback();
++}
++
++/// Resume using the compiler's implementation of the proc macro API if it is
++/// available.
++pub fn unforce() {
++ #[cfg(wrap_proc_macro)]
++ crate::detection::unforce_fallback();
++}
++
+ #[derive(Clone)]
+-pub struct TokenStream {
+- inner: Vec<TokenTree>,
++pub(crate) struct TokenStream {
++ pub(crate) inner: Vec<TokenTree>,
+ }
+
+ #[derive(Debug)]
+-pub struct LexError;
++pub(crate) struct LexError;
+
+ impl TokenStream {
+ pub fn new() -> TokenStream {
+ TokenStream { inner: Vec::new() }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.inner.len() == 0
+ }
++
++ fn take_inner(&mut self) -> Vec<TokenTree> {
++ mem::replace(&mut self.inner, Vec::new())
++ }
++
++ fn push_token(&mut self, token: TokenTree) {
++ // https://github.com/alexcrichton/proc-macro2/issues/235
++ match token {
++ #[cfg(not(no_bind_by_move_pattern_guard))]
++ TokenTree::Literal(crate::Literal {
++ #[cfg(wrap_proc_macro)]
++ inner: crate::imp::Literal::Fallback(literal),
++ #[cfg(not(wrap_proc_macro))]
++ inner: literal,
++ ..
++ }) if literal.text.starts_with('-') => {
++ push_negative_literal(self, literal);
++ }
++ #[cfg(no_bind_by_move_pattern_guard)]
++ TokenTree::Literal(crate::Literal {
++ #[cfg(wrap_proc_macro)]
++ inner: crate::imp::Literal::Fallback(literal),
++ #[cfg(not(wrap_proc_macro))]
++ inner: literal,
++ ..
++ }) => {
++ if literal.text.starts_with('-') {
++ push_negative_literal(self, literal);
++ } else {
++ self.inner
++ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
++ }
++ }
++ _ => self.inner.push(token),
++ }
++
++ #[cold]
++ fn push_negative_literal(stream: &mut TokenStream, mut literal: Literal) {
++ literal.text.remove(0);
++ let mut punct = crate::Punct::new('-', Spacing::Alone);
++ punct.set_span(crate::Span::_new_stable(literal.span));
++ stream.inner.push(TokenTree::Punct(punct));
++ stream
++ .inner
++ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
++ }
++ }
++}
++
++// Nonrecursive to prevent stack overflow.
++impl Drop for TokenStream {
++ fn drop(&mut self) {
++ while let Some(token) = self.inner.pop() {
++ let group = match token {
++ TokenTree::Group(group) => group.inner,
++ _ => continue,
++ };
++ #[cfg(wrap_proc_macro)]
++ let group = match group {
++ crate::imp::Group::Fallback(group) => group,
++ _ => continue,
++ };
++ let mut group = group;
++ self.inner.extend(group.stream.take_inner());
++ }
++ }
+ }
+
+ #[cfg(span_locations)]
+ fn get_cursor(src: &str) -> Cursor {
+ // Create a dummy file & add it to the source map
+ SOURCE_MAP.with(|cm| {
+ let mut cm = cm.borrow_mut();
+ let name = format!("<parsed string {}>", cm.files.len());
+@@ -54,68 +134,49 @@ fn get_cursor(src: &str) -> Cursor {
+
+ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ // Create a dummy file & add it to the source map
+ let cursor = get_cursor(src);
+
+- match token_stream(cursor) {
+- Ok((input, output)) => {
+- if skip_whitespace(input).len() != 0 {
+- Err(LexError)
+- } else {
+- Ok(output)
+- }
+- }
+- Err(LexError) => Err(LexError),
++ let (rest, tokens) = token_stream(cursor)?;
++ if rest.is_empty() {
++ Ok(tokens)
++ } else {
++ Err(LexError)
+ }
+ }
+ }
+
+-impl fmt::Display for TokenStream {
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut joint = false;
+ for (i, tt) in self.inner.iter().enumerate() {
+ if i != 0 && !joint {
+ write!(f, " ")?;
+ }
+ joint = false;
+- match *tt {
+- TokenTree::Group(ref tt) => {
+- let (start, end) = match tt.delimiter() {
+- Delimiter::Parenthesis => ("(", ")"),
+- Delimiter::Brace => ("{", "}"),
+- Delimiter::Bracket => ("[", "]"),
+- Delimiter::None => ("", ""),
+- };
+- if tt.stream().into_iter().next().is_none() {
+- write!(f, "{} {}", start, end)?
+- } else {
+- write!(f, "{} {} {}", start, tt.stream(), end)?
+- }
++ match tt {
++ TokenTree::Group(tt) => Display::fmt(tt, f),
++ TokenTree::Ident(tt) => Display::fmt(tt, f),
++ TokenTree::Punct(tt) => {
++ joint = tt.spacing() == Spacing::Joint;
++ Display::fmt(tt, f)
+ }
+- TokenTree::Ident(ref tt) => write!(f, "{}", tt)?,
+- TokenTree::Punct(ref tt) => {
+- write!(f, "{}", tt.as_char())?;
+- match tt.spacing() {
+- Spacing::Alone => {}
+- Spacing::Joint => joint = true,
+- }
+- }
+- TokenTree::Literal(ref tt) => write!(f, "{}", tt)?,
+- }
++ TokenTree::Literal(tt) => Display::fmt(tt, f),
++ }?
+ }
+
+ Ok(())
+ }
+ }
+
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+ }
+ }
+
+ #[cfg(use_proc_macro)]
+ impl From<proc_macro::TokenStream> for TokenStream {
+@@ -134,122 +195,107 @@ impl From<TokenStream> for proc_macro::T
+ .to_string()
+ .parse()
+ .expect("failed to parse to compiler tokens")
+ }
+ }
+
+ impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+- TokenStream { inner: vec![tree] }
++ let mut stream = TokenStream::new();
++ stream.push_token(tree);
++ stream
+ }
+ }
+
+-impl iter::FromIterator<TokenTree> for TokenStream {
+- fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
+- let mut v = Vec::new();
+-
+- for token in streams.into_iter() {
+- v.push(token);
+- }
+-
+- TokenStream { inner: v }
++impl FromIterator<TokenTree> for TokenStream {
++ fn from_iter<I: IntoIterator<Item = TokenTree>>(tokens: I) -> Self {
++ let mut stream = TokenStream::new();
++ stream.extend(tokens);
++ stream
+ }
+ }
+
+-impl iter::FromIterator<TokenStream> for TokenStream {
++impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut v = Vec::new();
+
+- for stream in streams.into_iter() {
+- v.extend(stream.inner);
++ for mut stream in streams {
++ v.extend(stream.take_inner());
+ }
+
+ TokenStream { inner: v }
+ }
+ }
+
+ impl Extend<TokenTree> for TokenStream {
+- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
+- self.inner.extend(streams);
++ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) {
++ tokens.into_iter().for_each(|token| self.push_token(token));
+ }
+ }
+
+ impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+- self.inner
+- .extend(streams.into_iter().flat_map(|stream| stream));
++ self.inner.extend(streams.into_iter().flatten());
+ }
+ }
+
+-pub type TokenTreeIter = vec::IntoIter<TokenTree>;
++pub(crate) type TokenTreeIter = vec::IntoIter<TokenTree>;
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = TokenTreeIter;
+
+- fn into_iter(self) -> TokenTreeIter {
+- self.inner.into_iter()
++ fn into_iter(mut self) -> TokenTreeIter {
++ self.take_inner().into_iter()
+ }
+ }
+
+ #[derive(Clone, PartialEq, Eq)]
+-pub struct SourceFile {
++pub(crate) struct SourceFile {
+ path: PathBuf,
+ }
+
+ impl SourceFile {
+ /// Get the path to this source file as a string.
+ pub fn path(&self) -> PathBuf {
+ self.path.clone()
+ }
+
+ pub fn is_real(&self) -> bool {
+ // XXX(nika): Support real files in the future?
+ false
+ }
+ }
+
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+ .field("is_real", &self.is_real())
+ .finish()
+ }
+ }
+
+ #[derive(Clone, Copy, Debug, PartialEq, Eq)]
+-pub struct LineColumn {
++pub(crate) struct LineColumn {
+ pub line: usize,
+ pub column: usize,
+ }
+
+ #[cfg(span_locations)]
+ thread_local! {
+ static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
+ // NOTE: We start with a single dummy file which all call_site() and
+ // def_site() spans reference.
+- files: vec![{
++ files: vec![FileInfo {
+ #[cfg(procmacro2_semver_exempt)]
+- {
+- FileInfo {
+- name: "<unspecified>".to_owned(),
+- span: Span { lo: 0, hi: 0 },
+- lines: vec![0],
+- }
+- }
+-
+- #[cfg(not(procmacro2_semver_exempt))]
+- {
+- FileInfo {
+- span: Span { lo: 0, hi: 0 },
+- lines: vec![0],
+- }
+- }
++ name: "<unspecified>".to_owned(),
++ span: Span { lo: 0, hi: 0 },
++ lines: vec![0],
+ }],
+ });
+ }
+
+ #[cfg(span_locations)]
+ struct FileInfo {
+ #[cfg(procmacro2_semver_exempt)]
+ name: String,
+@@ -277,26 +323,31 @@ impl FileInfo {
+ }
+ }
+
+ fn span_within(&self, span: Span) -> bool {
+ span.lo >= self.span.lo && span.hi <= self.span.hi
+ }
+ }
+
+-/// Computesthe offsets of each line in the given source string.
++/// Computes the offsets of each line in the given source string
++/// and the total number of characters
+ #[cfg(span_locations)]
+-fn lines_offsets(s: &str) -> Vec<usize> {
++fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
+ let mut lines = vec![0];
+- let mut prev = 0;
+- while let Some(len) = s[prev..].find('\n') {
+- prev += len + 1;
+- lines.push(prev);
++ let mut total = 0;
++
++ for ch in s.chars() {
++ total += 1;
++ if ch == '\n' {
++ lines.push(total);
++ }
+ }
+- lines
++
++ (total, lines)
+ }
+
+ #[cfg(span_locations)]
+ struct SourceMap {
+ files: Vec<FileInfo>,
+ }
+
+ #[cfg(span_locations)]
+@@ -305,81 +356,83 @@ impl SourceMap {
+ // Add 1 so there's always space between files.
+ //
+ // We'll always have at least 1 file, as we initialize our files list
+ // with a dummy file.
+ self.files.last().unwrap().span.hi + 1
+ }
+
+ fn add_file(&mut self, name: &str, src: &str) -> Span {
+- let lines = lines_offsets(src);
++ let (len, lines) = lines_offsets(src);
+ let lo = self.next_start_pos();
+ // XXX(nika): Shouild we bother doing a checked cast or checked add here?
+ let span = Span {
+ lo,
+- hi: lo + (src.len() as u32),
++ hi: lo + (len as u32),
+ };
+
+- #[cfg(procmacro2_semver_exempt)]
+ self.files.push(FileInfo {
++ #[cfg(procmacro2_semver_exempt)]
+ name: name.to_owned(),
+ span,
+ lines,
+ });
+
+ #[cfg(not(procmacro2_semver_exempt))]
+- self.files.push(FileInfo { span, lines });
+ let _ = name;
+
+ span
+ }
+
+ fn fileinfo(&self, span: Span) -> &FileInfo {
+ for file in &self.files {
+ if file.span_within(span) {
+ return file;
+ }
+ }
+ panic!("Invalid span with no related FileInfo!");
+ }
+ }
+
+ #[derive(Clone, Copy, PartialEq, Eq)]
+-pub struct Span {
++pub(crate) struct Span {
+ #[cfg(span_locations)]
+- lo: u32,
++ pub(crate) lo: u32,
+ #[cfg(span_locations)]
+- hi: u32,
++ pub(crate) hi: u32,
+ }
+
+ impl Span {
+ #[cfg(not(span_locations))]
+ pub fn call_site() -> Span {
+ Span {}
+ }
+
+ #[cfg(span_locations)]
+ pub fn call_site() -> Span {
+ Span { lo: 0, hi: 0 }
+ }
+
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ Span::call_site()
++ }
++
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn def_site() -> Span {
+ Span::call_site()
+ }
+
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn resolved_at(&self, _other: Span) -> Span {
+ // Stable spans consist only of line/column information, so
+ // `resolved_at` and `located_at` only select which span the
+ // caller wants line/column information from.
+ *self
+ }
+
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn located_at(&self, other: Span) -> Span {
+ other
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn source_file(&self) -> SourceFile {
+ SOURCE_MAP.with(|cm| {
+ let cm = cm.borrow();
+@@ -422,36 +475,69 @@ impl Span {
+ return None;
+ }
+ Some(Span {
+ lo: cmp::min(self.lo, other.lo),
+ hi: cmp::max(self.hi, other.hi),
+ })
+ })
+ }
++
++ #[cfg(not(span_locations))]
++ fn first_byte(self) -> Self {
++ self
++ }
++
++ #[cfg(span_locations)]
++ fn first_byte(self) -> Self {
++ Span {
++ lo: self.lo,
++ hi: cmp::min(self.lo.saturating_add(1), self.hi),
++ }
++ }
++
++ #[cfg(not(span_locations))]
++ fn last_byte(self) -> Self {
++ self
++ }
++
++ #[cfg(span_locations)]
++ fn last_byte(self) -> Self {
++ Span {
++ lo: cmp::max(self.hi.saturating_sub(1), self.lo),
++ hi: self.hi,
++ }
++ }
+ }
+
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ return write!(f, "bytes({}..{})", self.lo, self.hi);
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ write!(f, "Span")
+ }
+ }
+
+-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+- if cfg!(procmacro2_semver_exempt) {
++pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
++ #[cfg(span_locations)]
++ {
++ if span.lo == 0 && span.hi == 0 {
++ return;
++ }
++ }
++
++ if cfg!(span_locations) {
+ debug.field("span", &span);
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Group {
++pub(crate) struct Group {
+ delimiter: Delimiter,
+ stream: TokenStream,
+ span: Span,
+ }
+
+ impl Group {
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ Group {
+@@ -469,58 +555,67 @@ impl Group {
+ self.stream.clone()
+ }
+
+ pub fn span(&self) -> Span {
+ self.span
+ }
+
+ pub fn span_open(&self) -> Span {
+- self.span
++ self.span.first_byte()
+ }
+
+ pub fn span_close(&self) -> Span {
+- self.span
++ self.span.last_byte()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+ }
+
+-impl fmt::Display for Group {
++impl Display for Group {
++ // We attempt to match libproc_macro's formatting.
++ // Empty parens: ()
++ // Nonempty parens: (...)
++ // Empty brackets: []
++ // Nonempty brackets: [...]
++ // Empty braces: { }
++ // Nonempty braces: { ... }
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- let (left, right) = match self.delimiter {
++ let (open, close) = match self.delimiter {
+ Delimiter::Parenthesis => ("(", ")"),
+- Delimiter::Brace => ("{", "}"),
++ Delimiter::Brace => ("{ ", "}"),
+ Delimiter::Bracket => ("[", "]"),
+ Delimiter::None => ("", ""),
+ };
+
+- f.write_str(left)?;
+- self.stream.fmt(f)?;
+- f.write_str(right)?;
++ f.write_str(open)?;
++ Display::fmt(&self.stream, f)?;
++ if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty() {
++ f.write_str(" ")?;
++ }
++ f.write_str(close)?;
+
+ Ok(())
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Group");
+ debug.field("delimiter", &self.delimiter);
+ debug.field("stream", &self.stream);
+- #[cfg(procmacro2_semver_exempt)]
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Ident {
++pub(crate) struct Ident {
+ sym: String,
+ span: Span,
+ raw: bool,
+ }
+
+ impl Ident {
+ fn _new(string: &str, raw: bool, span: Span) -> Ident {
+ validate_ident(string);
+@@ -544,26 +639,24 @@ impl Ident {
+ self.span
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+ }
+
+-#[inline]
+-fn is_ident_start(c: char) -> bool {
++pub(crate) fn is_ident_start(c: char) -> bool {
+ ('a' <= c && c <= 'z')
+ || ('A' <= c && c <= 'Z')
+ || c == '_'
+ || (c > '\x7f' && UnicodeXID::is_xid_start(c))
+ }
+
+-#[inline]
+-fn is_ident_continue(c: char) -> bool {
++pub(crate) fn is_ident_continue(c: char) -> bool {
+ ('a' <= c && c <= 'z')
+ || ('A' <= c && c <= 'Z')
+ || c == '_'
+ || ('0' <= c && c <= '9')
+ || (c > '\x7f' && UnicodeXID::is_xid_continue(c))
+ }
+
+ fn validate_ident(string: &str) {
+@@ -610,49 +703,49 @@ where
+ if self.raw {
+ other.starts_with("r#") && self.sym == other[2..]
+ } else {
+ self.sym == other
+ }
+ }
+ }
+
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ if self.raw {
+- "r#".fmt(f)?;
++ f.write_str("r#")?;
+ }
+- self.sym.fmt(f)
++ Display::fmt(&self.sym, f)
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ // Ident(proc_macro), Ident(r#union)
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = f.debug_tuple("Ident");
+ debug.field(&format_args!("{}", self));
+ debug.finish()
+ }
+
+ // Ident {
+ // sym: proc_macro,
+ // span: bytes(128..138)
+ // }
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = f.debug_struct("Ident");
+ debug.field("sym", &format_args!("{}", self));
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Literal {
++pub(crate) struct Literal {
+ text: String,
+ span: Span,
+ }
+
+ macro_rules! suffixed_numbers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+ Literal::_new(format!(concat!("{}", stringify!($kind)), n))
+@@ -664,17 +757,17 @@ macro_rules! unsuffixed_numbers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+ Literal::_new(n.to_string())
+ }
+ )*)
+ }
+
+ impl Literal {
+- fn _new(text: String) -> Literal {
++ pub(crate) fn _new(text: String) -> Literal {
+ Literal {
+ text,
+ span: Span::call_site(),
+ }
+ }
+
+ suffixed_numbers! {
+ u8_suffixed => u8,
+@@ -706,61 +799,62 @@ impl Literal {
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
+ }
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+ let mut s = f.to_string();
+- if !s.contains(".") {
++ if !s.contains('.') {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+ }
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+ let mut s = f.to_string();
+- if !s.contains(".") {
++ if !s.contains('.') {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+ }
+
+ pub fn string(t: &str) -> Literal {
+ let mut text = String::with_capacity(t.len() + 2);
+ text.push('"');
+ for c in t.chars() {
+ if c == '\'' {
+- // escape_default turns this into "\'" which is unnecessary.
++ // escape_debug turns this into "\'" which is unnecessary.
+ text.push(c);
+ } else {
+- text.extend(c.escape_default());
++ text.extend(c.escape_debug());
+ }
+ }
+ text.push('"');
+ Literal::_new(text)
+ }
+
+ pub fn character(t: char) -> Literal {
+ let mut text = String::new();
+ text.push('\'');
+ if t == '"' {
+- // escape_default turns this into '\"' which is unnecessary.
++ // escape_debug turns this into '\"' which is unnecessary.
+ text.push(t);
+ } else {
+- text.extend(t.escape_default());
++ text.extend(t.escape_debug());
+ }
+ text.push('\'');
+ Literal::_new(text)
+ }
+
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ let mut escaped = "b\"".to_string();
+ for b in bytes {
++ #[allow(clippy::match_overlapping_arm)]
+ match *b {
+ b'\0' => escaped.push_str(r"\0"),
+ b'\t' => escaped.push_str(r"\t"),
+ b'\n' => escaped.push_str(r"\n"),
+ b'\r' => escaped.push_str(r"\r"),
+ b'"' => escaped.push_str("\\\""),
+ b'\\' => escaped.push_str("\\\\"),
+ b'\x20'..=b'\x7E' => escaped.push(*b as char),
+@@ -779,656 +873,22 @@ impl Literal {
+ self.span = span;
+ }
+
+ pub fn subspan<R: RangeBounds<usize>>(&self, _range: R) -> Option<Span> {
+ None
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.text.fmt(f)
+- }
+-}
+-
+-impl fmt::Debug for Literal {
+- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+- let mut debug = fmt.debug_struct("Literal");
+- debug.field("lit", &format_args!("{}", self.text));
+- #[cfg(procmacro2_semver_exempt)]
+- debug.field("span", &self.span);
+- debug.finish()
+- }
+-}
+-
+-fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
+- let mut trees = Vec::new();
+- loop {
+- let input_no_ws = skip_whitespace(input);
+- if input_no_ws.rest.len() == 0 {
+- break;
+- }
+- if let Ok((a, tokens)) = doc_comment(input_no_ws) {
+- input = a;
+- trees.extend(tokens);
+- continue;
+- }
+-
+- let (a, tt) = match token_tree(input_no_ws) {
+- Ok(p) => p,
+- Err(_) => break,
+- };
+- trees.push(tt);
+- input = a;
+- }
+- Ok((input, TokenStream { inner: trees }))
+-}
+-
+-#[cfg(not(span_locations))]
+-fn spanned<'a, T>(
+- input: Cursor<'a>,
+- f: fn(Cursor<'a>) -> PResult<'a, T>,
+-) -> PResult<'a, (T, crate::Span)> {
+- let (a, b) = f(skip_whitespace(input))?;
+- Ok((a, ((b, crate::Span::_new_stable(Span::call_site())))))
+-}
+-
+-#[cfg(span_locations)]
+-fn spanned<'a, T>(
+- input: Cursor<'a>,
+- f: fn(Cursor<'a>) -> PResult<'a, T>,
+-) -> PResult<'a, (T, crate::Span)> {
+- let input = skip_whitespace(input);
+- let lo = input.off;
+- let (a, b) = f(input)?;
+- let hi = a.off;
+- let span = crate::Span::_new_stable(Span { lo, hi });
+- Ok((a, (b, span)))
+-}
+-
+-fn token_tree(input: Cursor) -> PResult<TokenTree> {
+- let (rest, (mut tt, span)) = spanned(input, token_kind)?;
+- tt.set_span(span);
+- Ok((rest, tt))
+-}
+-
+-named!(token_kind -> TokenTree, alt!(
+- map!(group, |g| TokenTree::Group(crate::Group::_new_stable(g)))
+- |
+- map!(literal, |l| TokenTree::Literal(crate::Literal::_new_stable(l))) // must be before symbol
+- |
+- map!(op, TokenTree::Punct)
+- |
+- symbol_leading_ws
+-));
+-
+-named!(group -> Group, alt!(
+- delimited!(
+- punct!("("),
+- token_stream,
+- punct!(")")
+- ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
+- |
+- delimited!(
+- punct!("["),
+- token_stream,
+- punct!("]")
+- ) => { |ts| Group::new(Delimiter::Bracket, ts) }
+- |
+- delimited!(
+- punct!("{"),
+- token_stream,
+- punct!("}")
+- ) => { |ts| Group::new(Delimiter::Brace, ts) }
+-));
+-
+-fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
+- symbol(skip_whitespace(input))
+-}
+-
+-fn symbol(input: Cursor) -> PResult<TokenTree> {
+- let raw = input.starts_with("r#");
+- let rest = input.advance((raw as usize) << 1);
+-
+- let (rest, sym) = symbol_not_raw(rest)?;
+-
+- if !raw {
+- let ident = crate::Ident::new(sym, crate::Span::call_site());
+- return Ok((rest, ident.into()));
+- }
+-
+- if sym == "_" {
+- return Err(LexError);
+- }
+-
+- let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
+- Ok((rest, ident.into()))
+-}
+-
+-fn symbol_not_raw(input: Cursor) -> PResult<&str> {
+- let mut chars = input.char_indices();
+-
+- match chars.next() {
+- Some((_, ch)) if is_ident_start(ch) => {}
+- _ => return Err(LexError),
+- }
+-
+- let mut end = input.len();
+- for (i, ch) in chars {
+- if !is_ident_continue(ch) {
+- end = i;
+- break;
+- }
+- }
+-
+- Ok((input.advance(end), &input.rest[..end]))
+-}
+-
+-fn literal(input: Cursor) -> PResult<Literal> {
+- let input_no_ws = skip_whitespace(input);
+-
+- match literal_nocapture(input_no_ws) {
+- Ok((a, ())) => {
+- let start = input.len() - input_no_ws.len();
+- let len = input_no_ws.len() - a.len();
+- let end = start + len;
+- Ok((a, Literal::_new(input.rest[start..end].to_string())))
+- }
+- Err(LexError) => Err(LexError),
++ Display::fmt(&self.text, f)
+ }
+ }
+
+-named!(literal_nocapture -> (), alt!(
+- string
+- |
+- byte_string
+- |
+- byte
+- |
+- character
+- |
+- float
+- |
+- int
+-));
+-
+-named!(string -> (), alt!(
+- quoted_string
+- |
+- preceded!(
+- punct!("r"),
+- raw_string
+- ) => { |_| () }
+-));
+-
+-named!(quoted_string -> (), do_parse!(
+- punct!("\"") >>
+- cooked_string >>
+- tag!("\"") >>
+- option!(symbol_not_raw) >>
+- (())
+-));
+-
+-fn cooked_string(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices().peekable();
+- while let Some((byte_offset, ch)) = chars.next() {
+- match ch {
+- '"' => {
+- return Ok((input.advance(byte_offset), ()));
+- }
+- '\r' => {
+- if let Some((_, '\n')) = chars.next() {
+- // ...
+- } else {
+- break;
+- }
+- }
+- '\\' => match chars.next() {
+- Some((_, 'x')) => {
+- if !backslash_x_char(&mut chars) {
+- break;
+- }
+- }
+- Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
+- | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
+- Some((_, 'u')) => {
+- if !backslash_u(&mut chars) {
+- break;
+- }
+- }
+- Some((_, '\n')) | Some((_, '\r')) => {
+- while let Some(&(_, ch)) = chars.peek() {
+- if ch.is_whitespace() {
+- chars.next();
+- } else {
+- break;
+- }
+- }
+- }
+- _ => break,
+- },
+- _ch => {}
+- }
+- }
+- Err(LexError)
+-}
+-
+-named!(byte_string -> (), alt!(
+- delimited!(
+- punct!("b\""),
+- cooked_byte_string,
+- tag!("\"")
+- ) => { |_| () }
+- |
+- preceded!(
+- punct!("br"),
+- raw_string
+- ) => { |_| () }
+-));
+-
+-fn cooked_byte_string(mut input: Cursor) -> PResult<()> {
+- let mut bytes = input.bytes().enumerate();
+- 'outer: while let Some((offset, b)) = bytes.next() {
+- match b {
+- b'"' => {
+- return Ok((input.advance(offset), ()));
+- }
+- b'\r' => {
+- if let Some((_, b'\n')) = bytes.next() {
+- // ...
+- } else {
+- break;
+- }
+- }
+- b'\\' => match bytes.next() {
+- Some((_, b'x')) => {
+- if !backslash_x_byte(&mut bytes) {
+- break;
+- }
+- }
+- Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
+- | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
+- Some((newline, b'\n')) | Some((newline, b'\r')) => {
+- let rest = input.advance(newline + 1);
+- for (offset, ch) in rest.char_indices() {
+- if !ch.is_whitespace() {
+- input = rest.advance(offset);
+- bytes = input.bytes().enumerate();
+- continue 'outer;
+- }
+- }
+- break;
+- }
+- _ => break,
+- },
+- b if b < 0x80 => {}
+- _ => break,
+- }
+- }
+- Err(LexError)
+-}
+-
+-fn raw_string(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices();
+- let mut n = 0;
+- while let Some((byte_offset, ch)) = chars.next() {
+- match ch {
+- '"' => {
+- n = byte_offset;
+- break;
+- }
+- '#' => {}
+- _ => return Err(LexError),
+- }
+- }
+- for (byte_offset, ch) in chars {
+- match ch {
+- '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
+- let rest = input.advance(byte_offset + 1 + n);
+- return Ok((rest, ()));
+- }
+- '\r' => {}
+- _ => {}
+- }
+- }
+- Err(LexError)
+-}
+-
+-named!(byte -> (), do_parse!(
+- punct!("b") >>
+- tag!("'") >>
+- cooked_byte >>
+- tag!("'") >>
+- (())
+-));
+-
+-fn cooked_byte(input: Cursor) -> PResult<()> {
+- let mut bytes = input.bytes().enumerate();
+- let ok = match bytes.next().map(|(_, b)| b) {
+- Some(b'\\') => match bytes.next().map(|(_, b)| b) {
+- Some(b'x') => backslash_x_byte(&mut bytes),
+- Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
+- | Some(b'"') => true,
+- _ => false,
+- },
+- b => b.is_some(),
+- };
+- if ok {
+- match bytes.next() {
+- Some((offset, _)) => {
+- if input.chars().as_str().is_char_boundary(offset) {
+- Ok((input.advance(offset), ()))
+- } else {
+- Err(LexError)
+- }
+- }
+- None => Ok((input.advance(input.len()), ())),
+- }
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-named!(character -> (), do_parse!(
+- punct!("'") >>
+- cooked_char >>
+- tag!("'") >>
+- (())
+-));
+-
+-fn cooked_char(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices();
+- let ok = match chars.next().map(|(_, ch)| ch) {
+- Some('\\') => match chars.next().map(|(_, ch)| ch) {
+- Some('x') => backslash_x_char(&mut chars),
+- Some('u') => backslash_u(&mut chars),
+- Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
+- true
+- }
+- _ => false,
+- },
+- ch => ch.is_some(),
+- };
+- if ok {
+- match chars.next() {
+- Some((idx, _)) => Ok((input.advance(idx), ())),
+- None => Ok((input.advance(input.len()), ())),
+- }
+- } else {
+- Err(LexError)
++impl Debug for Literal {
++ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
++ let mut debug = fmt.debug_struct("Literal");
++ debug.field("lit", &format_args!("{}", self.text));
++ debug_span_field_if_nontrivial(&mut debug, self.span);
++ debug.finish()
+ }
+ }
+-
+-macro_rules! next_ch {
+- ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
+- match $chars.next() {
+- Some((_, ch)) => match ch {
+- $pat $(| $rest)* => ch,
+- _ => return false,
+- },
+- None => return false
+- }
+- };
+-}
+-
+-fn backslash_x_char<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, char)>,
+-{
+- next_ch!(chars @ '0'..='7');
+- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+- true
+-}
+-
+-fn backslash_x_byte<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, u8)>,
+-{
+- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+- true
+-}
+-
+-fn backslash_u<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, char)>,
+-{
+- next_ch!(chars @ '{');
+- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+- loop {
+- let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
+- if c == '}' {
+- return true;
+- }
+- }
+-}
+-
+-fn float(input: Cursor) -> PResult<()> {
+- let (mut rest, ()) = float_digits(input)?;
+- if let Some(ch) = rest.chars().next() {
+- if is_ident_start(ch) {
+- rest = symbol_not_raw(rest)?.0;
+- }
+- }
+- word_break(rest)
+-}
+-
+-fn float_digits(input: Cursor) -> PResult<()> {
+- let mut chars = input.chars().peekable();
+- match chars.next() {
+- Some(ch) if ch >= '0' && ch <= '9' => {}
+- _ => return Err(LexError),
+- }
+-
+- let mut len = 1;
+- let mut has_dot = false;
+- let mut has_exp = false;
+- while let Some(&ch) = chars.peek() {
+- match ch {
+- '0'..='9' | '_' => {
+- chars.next();
+- len += 1;
+- }
+- '.' => {
+- if has_dot {
+- break;
+- }
+- chars.next();
+- if chars
+- .peek()
+- .map(|&ch| ch == '.' || is_ident_start(ch))
+- .unwrap_or(false)
+- {
+- return Err(LexError);
+- }
+- len += 1;
+- has_dot = true;
+- }
+- 'e' | 'E' => {
+- chars.next();
+- len += 1;
+- has_exp = true;
+- break;
+- }
+- _ => break,
+- }
+- }
+-
+- let rest = input.advance(len);
+- if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
+- return Err(LexError);
+- }
+-
+- if has_exp {
+- let mut has_exp_value = false;
+- while let Some(&ch) = chars.peek() {
+- match ch {
+- '+' | '-' => {
+- if has_exp_value {
+- break;
+- }
+- chars.next();
+- len += 1;
+- }
+- '0'..='9' => {
+- chars.next();
+- len += 1;
+- has_exp_value = true;
+- }
+- '_' => {
+- chars.next();
+- len += 1;
+- }
+- _ => break,
+- }
+- }
+- if !has_exp_value {
+- return Err(LexError);
+- }
+- }
+-
+- Ok((input.advance(len), ()))
+-}
+-
+-fn int(input: Cursor) -> PResult<()> {
+- let (mut rest, ()) = digits(input)?;
+- if let Some(ch) = rest.chars().next() {
+- if is_ident_start(ch) {
+- rest = symbol_not_raw(rest)?.0;
+- }
+- }
+- word_break(rest)
+-}
+-
+-fn digits(mut input: Cursor) -> PResult<()> {
+- let base = if input.starts_with("0x") {
+- input = input.advance(2);
+- 16
+- } else if input.starts_with("0o") {
+- input = input.advance(2);
+- 8
+- } else if input.starts_with("0b") {
+- input = input.advance(2);
+- 2
+- } else {
+- 10
+- };
+-
+- let mut len = 0;
+- let mut empty = true;
+- for b in input.bytes() {
+- let digit = match b {
+- b'0'..=b'9' => (b - b'0') as u64,
+- b'a'..=b'f' => 10 + (b - b'a') as u64,
+- b'A'..=b'F' => 10 + (b - b'A') as u64,
+- b'_' => {
+- if empty && base == 10 {
+- return Err(LexError);
+- }
+- len += 1;
+- continue;
+- }
+- _ => break,
+- };
+- if digit >= base {
+- return Err(LexError);
+- }
+- len += 1;
+- empty = false;
+- }
+- if empty {
+- Err(LexError)
+- } else {
+- Ok((input.advance(len), ()))
+- }
+-}
+-
+-fn op(input: Cursor) -> PResult<Punct> {
+- let input = skip_whitespace(input);
+- match op_char(input) {
+- Ok((rest, '\'')) => {
+- symbol(rest)?;
+- Ok((rest, Punct::new('\'', Spacing::Joint)))
+- }
+- Ok((rest, ch)) => {
+- let kind = match op_char(rest) {
+- Ok(_) => Spacing::Joint,
+- Err(LexError) => Spacing::Alone,
+- };
+- Ok((rest, Punct::new(ch, kind)))
+- }
+- Err(LexError) => Err(LexError),
+- }
+-}
+-
+-fn op_char(input: Cursor) -> PResult<char> {
+- if input.starts_with("//") || input.starts_with("/*") {
+- // Do not accept `/` of a comment as an op.
+- return Err(LexError);
+- }
+-
+- let mut chars = input.chars();
+- let first = match chars.next() {
+- Some(ch) => ch,
+- None => {
+- return Err(LexError);
+- }
+- };
+- let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
+- if recognized.contains(first) {
+- Ok((input.advance(first.len_utf8()), first))
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
+- let mut trees = Vec::new();
+- let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
+- trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
+- if inner {
+- trees.push(Punct::new('!', Spacing::Alone).into());
+- }
+- let mut stream = vec![
+- TokenTree::Ident(crate::Ident::new("doc", span)),
+- TokenTree::Punct(Punct::new('=', Spacing::Alone)),
+- TokenTree::Literal(crate::Literal::string(comment)),
+- ];
+- for tt in stream.iter_mut() {
+- tt.set_span(span);
+- }
+- let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
+- trees.push(crate::Group::_new_stable(group).into());
+- for tt in trees.iter_mut() {
+- tt.set_span(span);
+- }
+- Ok((rest, trees))
+-}
+-
+-named!(doc_comment_contents -> (&str, bool), alt!(
+- do_parse!(
+- punct!("//!") >>
+- s: take_until_newline_or_eof!() >>
+- ((s, true))
+- )
+- |
+- do_parse!(
+- option!(whitespace) >>
+- peek!(tag!("/*!")) >>
+- s: block_comment >>
+- ((s, true))
+- )
+- |
+- do_parse!(
+- punct!("///") >>
+- not!(tag!("/")) >>
+- s: take_until_newline_or_eof!() >>
+- ((s, false))
+- )
+- |
+- do_parse!(
+- option!(whitespace) >>
+- peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
+- s: block_comment >>
+- ((s, false))
+- )
+-));
+diff --git a/third_party/rust/proc-macro2/src/lib.rs b/third_party/rust/proc-macro2/src/lib.rs
+--- third_party/rust/proc-macro2/src/lib.rs
++++ third_party/rust/proc-macro2/src/lib.rs
+@@ -73,37 +73,44 @@
+ //!
+ //! # Thread-Safety
+ //!
+ //! Most types in this crate are `!Sync` because the underlying compiler
+ //! types make use of thread-local memory, meaning they cannot be accessed from
+ //! a different thread.
+
+ // Proc-macro2 types in rustdoc of other crates get linked to here.
+-#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.5")]
++#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.20")]
+ #![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
+ #![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
++#![allow(clippy::needless_doctest_main)]
+
+ #[cfg(use_proc_macro)]
+ extern crate proc_macro;
+
+ use std::cmp::Ordering;
+-use std::fmt;
++use std::fmt::{self, Debug, Display};
+ use std::hash::{Hash, Hasher};
+ use std::iter::FromIterator;
+ use std::marker;
+ use std::ops::RangeBounds;
+ #[cfg(procmacro2_semver_exempt)]
+ use std::path::PathBuf;
+ use std::rc::Rc;
+ use std::str::FromStr;
+
+-#[macro_use]
+-mod strnom;
+-mod fallback;
++mod parse;
++
++#[cfg(wrap_proc_macro)]
++mod detection;
++
++// Public for proc_macro2::fallback::force() and unforce(), but those are quite
++// a niche use case so we omit it from rustdoc.
++#[doc(hidden)]
++pub mod fallback;
+
+ #[cfg(not(wrap_proc_macro))]
+ use crate::fallback as imp;
+ #[path = "wrapper.rs"]
+ #[cfg(wrap_proc_macro)]
+ mod imp;
+
+ /// An abstract stream of tokens, or more concretely a sequence of token trees.
+@@ -223,32 +230,32 @@ impl FromIterator<TokenStream> for Token
+ TokenStream::_new(streams.into_iter().map(|i| i.inner).collect())
+ }
+ }
+
+ /// Prints the token stream as a string that is supposed to be losslessly
+ /// convertible back into the same token stream (modulo spans), except for
+ /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+ /// numeric literals.
+-impl fmt::Display for TokenStream {
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+ /// Prints token in a form convenient for debugging.
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Debug for LexError {
++impl Debug for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+ /// The source file of a given `Span`.
+ ///
+ /// This type is semver exempt and not exposed by default.
+ #[cfg(procmacro2_semver_exempt)]
+ #[derive(Clone, PartialEq, Eq)]
+@@ -286,19 +293,19 @@ impl SourceFile {
+ /// Returns `true` if this source file is a real source file, and not
+ /// generated by an external macro's expansion.
+ pub fn is_real(&self) -> bool {
+ self.inner.is_real()
+ }
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+ /// A line-column pair representing the start or end of a `Span`.
+ ///
+ /// This type is semver exempt and not exposed by default.
+ #[cfg(span_locations)]
+ #[derive(Copy, Clone, Debug, PartialEq, Eq)]
+@@ -306,16 +313,32 @@ pub struct LineColumn {
+ /// The 1-indexed line in the source file on which the span starts or ends
+ /// (inclusive).
+ pub line: usize,
+ /// The 0-indexed column (in UTF-8 characters) in the source file on which
+ /// the span starts or ends (inclusive).
+ pub column: usize,
+ }
+
++#[cfg(span_locations)]
++impl Ord for LineColumn {
++ fn cmp(&self, other: &Self) -> Ordering {
++ self.line
++ .cmp(&other.line)
++ .then(self.column.cmp(&other.column))
++ }
++}
++
++#[cfg(span_locations)]
++impl PartialOrd for LineColumn {
++ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
++ Some(self.cmp(other))
++ }
++}
++
+ /// A region of source code, along with macro expansion information.
+ #[derive(Copy, Clone)]
+ pub struct Span {
+ inner: imp::Span,
+ _marker: marker::PhantomData<Rc<()>>,
+ }
+
+ impl Span {
+@@ -337,38 +360,42 @@ impl Span {
+ ///
+ /// Identifiers created with this span will be resolved as if they were
+ /// written directly at the macro call location (call-site hygiene) and
+ /// other code at the macro call site will be able to refer to them as well.
+ pub fn call_site() -> Span {
+ Span::_new(imp::Span::call_site())
+ }
+
++ /// The span located at the invocation of the procedural macro, but with
++ /// local variables, labels, and `$crate` resolved at the definition site
++ /// of the macro. This is the same hygiene behavior as `macro_rules`.
++ ///
++ /// This function requires Rust 1.45 or later.
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ Span::_new(imp::Span::mixed_site())
++ }
++
+ /// A span that resolves at the macro definition site.
+ ///
+ /// This method is semver exempt and not exposed by default.
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn def_site() -> Span {
+ Span::_new(imp::Span::def_site())
+ }
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+- ///
+- /// This method is semver exempt and not exposed by default.
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.resolved_at(other.inner))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+- ///
+- /// This method is semver exempt and not exposed by default.
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn located_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.located_at(other.inner))
+ }
+
+ /// Convert `proc_macro2::Span` to `proc_macro::Span`.
+ ///
+ /// This method is available when building with a nightly compiler, or when
+ /// building with rustc 1.29+ *without* semver exempt features.
+@@ -434,19 +461,19 @@ impl Span {
+ /// This method is semver exempt and not exposed by default.
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn eq(&self, other: &Span) -> bool {
+ self.inner.eq(&other.inner)
+ }
+ }
+
+ /// Prints a span in a form convenient for debugging.
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+ /// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
+ #[derive(Clone)]
+ pub enum TokenTree {
+ /// A token stream surrounded by bracket delimiters.
+ Group(Group),
+@@ -457,35 +484,35 @@ pub enum TokenTree {
+ /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
+ Literal(Literal),
+ }
+
+ impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+- match *self {
+- TokenTree::Group(ref t) => t.span(),
+- TokenTree::Ident(ref t) => t.span(),
+- TokenTree::Punct(ref t) => t.span(),
+- TokenTree::Literal(ref t) => t.span(),
++ match self {
++ TokenTree::Group(t) => t.span(),
++ TokenTree::Ident(t) => t.span(),
++ TokenTree::Punct(t) => t.span(),
++ TokenTree::Literal(t) => t.span(),
+ }
+ }
+
+ /// Configures the span for *only this token*.
+ ///
+ /// Note that if this token is a `Group` then this method will not configure
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+- match *self {
+- TokenTree::Group(ref mut t) => t.set_span(span),
+- TokenTree::Ident(ref mut t) => t.set_span(span),
+- TokenTree::Punct(ref mut t) => t.set_span(span),
+- TokenTree::Literal(ref mut t) => t.set_span(span),
++ match self {
++ TokenTree::Group(t) => t.set_span(span),
++ TokenTree::Ident(t) => t.set_span(span),
++ TokenTree::Punct(t) => t.set_span(span),
++ TokenTree::Literal(t) => t.set_span(span),
+ }
+ }
+ }
+
+ impl From<Group> for TokenTree {
+ fn from(g: Group) -> TokenTree {
+ TokenTree::Group(g)
+ }
+@@ -508,42 +535,42 @@ impl From<Literal> for TokenTree {
+ TokenTree::Literal(g)
+ }
+ }
+
+ /// Prints the token tree as a string that is supposed to be losslessly
+ /// convertible back into the same token tree (modulo spans), except for
+ /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+ /// numeric literals.
+-impl fmt::Display for TokenTree {
++impl Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- match *self {
+- TokenTree::Group(ref t) => t.fmt(f),
+- TokenTree::Ident(ref t) => t.fmt(f),
+- TokenTree::Punct(ref t) => t.fmt(f),
+- TokenTree::Literal(ref t) => t.fmt(f),
++ match self {
++ TokenTree::Group(t) => Display::fmt(t, f),
++ TokenTree::Ident(t) => Display::fmt(t, f),
++ TokenTree::Punct(t) => Display::fmt(t, f),
++ TokenTree::Literal(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+ /// Prints token tree in a form convenient for debugging.
+-impl fmt::Debug for TokenTree {
++impl Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+- match *self {
+- TokenTree::Group(ref t) => t.fmt(f),
+- TokenTree::Ident(ref t) => {
++ match self {
++ TokenTree::Group(t) => Debug::fmt(t, f),
++ TokenTree::Ident(t) => {
+ let mut debug = f.debug_struct("Ident");
+ debug.field("sym", &format_args!("{}", t));
+ imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner);
+ debug.finish()
+ }
+- TokenTree::Punct(ref t) => t.fmt(f),
+- TokenTree::Literal(ref t) => t.fmt(f),
++ TokenTree::Punct(t) => Debug::fmt(t, f),
++ TokenTree::Literal(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+
+ /// A delimited token stream.
+ ///
+ /// A `Group` internally contains a `TokenStream` which is surrounded by
+ /// `Delimiter`s.
+@@ -646,25 +673,25 @@ impl Group {
+ pub fn set_span(&mut self, span: Span) {
+ self.inner.set_span(span.inner)
+ }
+ }
+
+ /// Prints the group as a string that should be losslessly convertible back
+ /// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+ /// with `Delimiter::None` delimiters.
+-impl fmt::Display for Group {
++impl Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- fmt::Display::fmt(&self.inner, formatter)
++ Display::fmt(&self.inner, formatter)
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- fmt::Debug::fmt(&self.inner, formatter)
++ Debug::fmt(&self.inner, formatter)
+ }
+ }
+
+ /// An `Punct` is an single punctuation character like `+`, `-` or `#`.
+ ///
+ /// Multicharacter operators like `+=` are represented as two instances of
+ /// `Punct` with different forms of `Spacing` returned.
+ #[derive(Clone)]
+@@ -725,23 +752,23 @@ impl Punct {
+ /// Configure the span for this punctuation character.
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+ }
+
+ /// Prints the punctuation character as a string that should be losslessly
+ /// convertible back into the same character.
+-impl fmt::Display for Punct {
++impl Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.op.fmt(f)
++ Display::fmt(&self.op, f)
+ }
+ }
+
+-impl fmt::Debug for Punct {
++impl Debug for Punct {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Punct");
+ debug.field("op", &self.op);
+ debug.field("spacing", &self.spacing);
+ imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner);
+ debug.finish()
+ }
+ }
+@@ -915,25 +942,25 @@ impl Ord for Ident {
+ impl Hash for Ident {
+ fn hash<H: Hasher>(&self, hasher: &mut H) {
+ self.to_string().hash(hasher)
+ }
+ }
+
+ /// Prints the identifier as a string that should be losslessly convertible back
+ /// into the same identifier.
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+ /// A literal string (`"hello"`), byte string (`b"hello"`), character (`'a'`),
+ /// byte character (`b'a'`), an integer or floating point number with or without
+ /// a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
+ ///
+ /// Boolean literals like `true` and `false` do not belong here, they are
+@@ -1135,36 +1162,36 @@ impl Literal {
+ /// nightly compiler, this method will always return `None`.
+ ///
+ /// [`proc_macro::Literal::subspan`]: https://doc.rust-lang.org/proc_macro/struct.Literal.html#method.subspan
+ pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
+ self.inner.subspan(range).map(Span::_new)
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+ /// Public implementation details for the `TokenStream` type, such as iterators.
+ pub mod token_stream {
+- use std::fmt;
++ use crate::{imp, TokenTree};
++ use std::fmt::{self, Debug};
+ use std::marker;
+ use std::rc::Rc;
+
+ pub use crate::TokenStream;
+- use crate::{imp, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ ///
+ /// The iteration is "shallow", e.g. the iterator doesn't recurse into
+ /// delimited groups, and returns whole groups as token trees.
+ #[derive(Clone)]
+ pub struct IntoIter {
+ inner: imp::TokenTreeIter,
+@@ -1174,19 +1201,19 @@ pub mod token_stream {
+ impl Iterator for IntoIter {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<TokenTree> {
+ self.inner.next()
+ }
+ }
+
+- impl fmt::Debug for IntoIter {
++ impl Debug for IntoIter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = IntoIter;
+
+ fn into_iter(self) -> IntoIter {
+diff --git a/third_party/rust/proc-macro2/src/parse.rs b/third_party/rust/proc-macro2/src/parse.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/proc-macro2/src/parse.rs
+@@ -0,0 +1,791 @@
++use crate::fallback::{
++ is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
++};
++use crate::{Delimiter, Punct, Spacing, TokenTree};
++use std::str::{Bytes, CharIndices, Chars};
++use unicode_xid::UnicodeXID;
++
++#[derive(Copy, Clone, Eq, PartialEq)]
++pub(crate) struct Cursor<'a> {
++ pub rest: &'a str,
++ #[cfg(span_locations)]
++ pub off: u32,
++}
++
++impl<'a> Cursor<'a> {
++ fn advance(&self, bytes: usize) -> Cursor<'a> {
++ let (_front, rest) = self.rest.split_at(bytes);
++ Cursor {
++ rest,
++ #[cfg(span_locations)]
++ off: self.off + _front.chars().count() as u32,
++ }
++ }
++
++ fn starts_with(&self, s: &str) -> bool {
++ self.rest.starts_with(s)
++ }
++
++ pub(crate) fn is_empty(&self) -> bool {
++ self.rest.is_empty()
++ }
++
++ fn len(&self) -> usize {
++ self.rest.len()
++ }
++
++ fn as_bytes(&self) -> &'a [u8] {
++ self.rest.as_bytes()
++ }
++
++ fn bytes(&self) -> Bytes<'a> {
++ self.rest.bytes()
++ }
++
++ fn chars(&self) -> Chars<'a> {
++ self.rest.chars()
++ }
++
++ fn char_indices(&self) -> CharIndices<'a> {
++ self.rest.char_indices()
++ }
++
++ fn parse(&self, tag: &str) -> Result<Cursor<'a>, LexError> {
++ if self.starts_with(tag) {
++ Ok(self.advance(tag.len()))
++ } else {
++ Err(LexError)
++ }
++ }
++}
++
++type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
++
++fn skip_whitespace(input: Cursor) -> Cursor {
++ let mut s = input;
++
++ while !s.is_empty() {
++ let byte = s.as_bytes()[0];
++ if byte == b'/' {
++ if s.starts_with("//")
++ && (!s.starts_with("///") || s.starts_with("////"))
++ && !s.starts_with("//!")
++ {
++ let (cursor, _) = take_until_newline_or_eof(s);
++ s = cursor;
++ continue;
++ } else if s.starts_with("/**/") {
++ s = s.advance(4);
++ continue;
++ } else if s.starts_with("/*")
++ && (!s.starts_with("/**") || s.starts_with("/***"))
++ && !s.starts_with("/*!")
++ {
++ match block_comment(s) {
++ Ok((rest, _)) => {
++ s = rest;
++ continue;
++ }
++ Err(LexError) => return s,
++ }
++ }
++ }
++ match byte {
++ b' ' | 0x09..=0x0d => {
++ s = s.advance(1);
++ continue;
++ }
++ b if b <= 0x7f => {}
++ _ => {
++ let ch = s.chars().next().unwrap();
++ if is_whitespace(ch) {
++ s = s.advance(ch.len_utf8());
++ continue;
++ }
++ }
++ }
++ return s;
++ }
++ s
++}
++
++fn block_comment(input: Cursor) -> PResult<&str> {
++ if !input.starts_with("/*") {
++ return Err(LexError);
++ }
++
++ let mut depth = 0;
++ let bytes = input.as_bytes();
++ let mut i = 0;
++ let upper = bytes.len() - 1;
++
++ while i < upper {
++ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
++ depth += 1;
++ i += 1; // eat '*'
++ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
++ depth -= 1;
++ if depth == 0 {
++ return Ok((input.advance(i + 2), &input.rest[..i + 2]));
++ }
++ i += 1; // eat '/'
++ }
++ i += 1;
++ }
++
++ Err(LexError)
++}
++
++fn is_whitespace(ch: char) -> bool {
++ // Rust treats left-to-right mark and right-to-left mark as whitespace
++ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
++}
++
++fn word_break(input: Cursor) -> Result<Cursor, LexError> {
++ match input.chars().next() {
++ Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
++ Some(_) | None => Ok(input),
++ }
++}
++
++pub(crate) fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
++ let mut trees = Vec::new();
++ let mut stack = Vec::new();
++
++ loop {
++ input = skip_whitespace(input);
++
++ if let Ok((rest, tt)) = doc_comment(input) {
++ trees.extend(tt);
++ input = rest;
++ continue;
++ }
++
++ #[cfg(span_locations)]
++ let lo = input.off;
++
++ let first = match input.bytes().next() {
++ Some(first) => first,
++ None => break,
++ };
++
++ if let Some(open_delimiter) = match first {
++ b'(' => Some(Delimiter::Parenthesis),
++ b'[' => Some(Delimiter::Bracket),
++ b'{' => Some(Delimiter::Brace),
++ _ => None,
++ } {
++ input = input.advance(1);
++ let frame = (open_delimiter, trees);
++ #[cfg(span_locations)]
++ let frame = (lo, frame);
++ stack.push(frame);
++ trees = Vec::new();
++ } else if let Some(close_delimiter) = match first {
++ b')' => Some(Delimiter::Parenthesis),
++ b']' => Some(Delimiter::Bracket),
++ b'}' => Some(Delimiter::Brace),
++ _ => None,
++ } {
++ input = input.advance(1);
++ let frame = stack.pop().ok_or(LexError)?;
++ #[cfg(span_locations)]
++ let (lo, frame) = frame;
++ let (open_delimiter, outer) = frame;
++ if open_delimiter != close_delimiter {
++ return Err(LexError);
++ }
++ let mut g = Group::new(open_delimiter, TokenStream { inner: trees });
++ g.set_span(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: input.off,
++ });
++ trees = outer;
++ trees.push(TokenTree::Group(crate::Group::_new_stable(g)));
++ } else {
++ let (rest, mut tt) = leaf_token(input)?;
++ tt.set_span(crate::Span::_new_stable(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: rest.off,
++ }));
++ trees.push(tt);
++ input = rest;
++ }
++ }
++
++ if stack.is_empty() {
++ Ok((input, TokenStream { inner: trees }))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn leaf_token(input: Cursor) -> PResult<TokenTree> {
++ if let Ok((input, l)) = literal(input) {
++ // must be parsed before ident
++ Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l))))
++ } else if let Ok((input, p)) = op(input) {
++ Ok((input, TokenTree::Punct(p)))
++ } else if let Ok((input, i)) = ident(input) {
++ Ok((input, TokenTree::Ident(i)))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn ident(input: Cursor) -> PResult<crate::Ident> {
++ let raw = input.starts_with("r#");
++ let rest = input.advance((raw as usize) << 1);
++
++ let (rest, sym) = ident_not_raw(rest)?;
++
++ if !raw {
++ let ident = crate::Ident::new(sym, crate::Span::call_site());
++ return Ok((rest, ident));
++ }
++
++ if sym == "_" {
++ return Err(LexError);
++ }
++
++ let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
++ Ok((rest, ident))
++}
++
++fn ident_not_raw(input: Cursor) -> PResult<&str> {
++ let mut chars = input.char_indices();
++
++ match chars.next() {
++ Some((_, ch)) if is_ident_start(ch) => {}
++ _ => return Err(LexError),
++ }
++
++ let mut end = input.len();
++ for (i, ch) in chars {
++ if !is_ident_continue(ch) {
++ end = i;
++ break;
++ }
++ }
++
++ Ok((input.advance(end), &input.rest[..end]))
++}
++
++fn literal(input: Cursor) -> PResult<Literal> {
++ match literal_nocapture(input) {
++ Ok(a) => {
++ let end = input.len() - a.len();
++ Ok((a, Literal::_new(input.rest[..end].to_string())))
++ }
++ Err(LexError) => Err(LexError),
++ }
++}
++
++fn literal_nocapture(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(ok) = string(input) {
++ Ok(ok)
++ } else if let Ok(ok) = byte_string(input) {
++ Ok(ok)
++ } else if let Ok(ok) = byte(input) {
++ Ok(ok)
++ } else if let Ok(ok) = character(input) {
++ Ok(ok)
++ } else if let Ok(ok) = float(input) {
++ Ok(ok)
++ } else if let Ok(ok) = int(input) {
++ Ok(ok)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn literal_suffix(input: Cursor) -> Cursor {
++ match ident_not_raw(input) {
++ Ok((input, _)) => input,
++ Err(LexError) => input,
++ }
++}
++
++fn string(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(input) = input.parse("\"") {
++ cooked_string(input)
++ } else if let Ok(input) = input.parse("r") {
++ raw_string(input)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn cooked_string(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.char_indices().peekable();
++
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' => {
++ let input = input.advance(i + 1);
++ return Ok(literal_suffix(input));
++ }
++ '\r' => {
++ if let Some((_, '\n')) = chars.next() {
++ // ...
++ } else {
++ break;
++ }
++ }
++ '\\' => match chars.next() {
++ Some((_, 'x')) => {
++ if !backslash_x_char(&mut chars) {
++ break;
++ }
++ }
++ Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
++ | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
++ Some((_, 'u')) => {
++ if !backslash_u(&mut chars) {
++ break;
++ }
++ }
++ Some((_, '\n')) | Some((_, '\r')) => {
++ while let Some(&(_, ch)) = chars.peek() {
++ if ch.is_whitespace() {
++ chars.next();
++ } else {
++ break;
++ }
++ }
++ }
++ _ => break,
++ },
++ _ch => {}
++ }
++ }
++ Err(LexError)
++}
++
++fn byte_string(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(input) = input.parse("b\"") {
++ cooked_byte_string(input)
++ } else if let Ok(input) = input.parse("br") {
++ raw_string(input)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, LexError> {
++ let mut bytes = input.bytes().enumerate();
++ 'outer: while let Some((offset, b)) = bytes.next() {
++ match b {
++ b'"' => {
++ let input = input.advance(offset + 1);
++ return Ok(literal_suffix(input));
++ }
++ b'\r' => {
++ if let Some((_, b'\n')) = bytes.next() {
++ // ...
++ } else {
++ break;
++ }
++ }
++ b'\\' => match bytes.next() {
++ Some((_, b'x')) => {
++ if !backslash_x_byte(&mut bytes) {
++ break;
++ }
++ }
++ Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
++ | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
++ Some((newline, b'\n')) | Some((newline, b'\r')) => {
++ let rest = input.advance(newline + 1);
++ for (offset, ch) in rest.char_indices() {
++ if !ch.is_whitespace() {
++ input = rest.advance(offset);
++ bytes = input.bytes().enumerate();
++ continue 'outer;
++ }
++ }
++ break;
++ }
++ _ => break,
++ },
++ b if b < 0x80 => {}
++ _ => break,
++ }
++ }
++ Err(LexError)
++}
++
++fn raw_string(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.char_indices();
++ let mut n = 0;
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' => {
++ n = i;
++ break;
++ }
++ '#' => {}
++ _ => return Err(LexError),
++ }
++ }
++ for (i, ch) in chars {
++ match ch {
++ '"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => {
++ let rest = input.advance(i + 1 + n);
++ return Ok(literal_suffix(rest));
++ }
++ '\r' => {}
++ _ => {}
++ }
++ }
++ Err(LexError)
++}
++
++fn byte(input: Cursor) -> Result<Cursor, LexError> {
++ let input = input.parse("b'")?;
++ let mut bytes = input.bytes().enumerate();
++ let ok = match bytes.next().map(|(_, b)| b) {
++ Some(b'\\') => match bytes.next().map(|(_, b)| b) {
++ Some(b'x') => backslash_x_byte(&mut bytes),
++ Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
++ | Some(b'"') => true,
++ _ => false,
++ },
++ b => b.is_some(),
++ };
++ if !ok {
++ return Err(LexError);
++ }
++ let (offset, _) = bytes.next().ok_or(LexError)?;
++ if !input.chars().as_str().is_char_boundary(offset) {
++ return Err(LexError);
++ }
++ let input = input.advance(offset).parse("'")?;
++ Ok(literal_suffix(input))
++}
++
++fn character(input: Cursor) -> Result<Cursor, LexError> {
++ let input = input.parse("'")?;
++ let mut chars = input.char_indices();
++ let ok = match chars.next().map(|(_, ch)| ch) {
++ Some('\\') => match chars.next().map(|(_, ch)| ch) {
++ Some('x') => backslash_x_char(&mut chars),
++ Some('u') => backslash_u(&mut chars),
++ Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
++ true
++ }
++ _ => false,
++ },
++ ch => ch.is_some(),
++ };
++ if !ok {
++ return Err(LexError);
++ }
++ let (idx, _) = chars.next().ok_or(LexError)?;
++ let input = input.advance(idx).parse("'")?;
++ Ok(literal_suffix(input))
++}
++
++macro_rules! next_ch {
++ ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
++ match $chars.next() {
++ Some((_, ch)) => match ch {
++ $pat $(| $rest)* => ch,
++ _ => return false,
++ },
++ None => return false,
++ }
++ };
++}
++
++fn backslash_x_char<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, char)>,
++{
++ next_ch!(chars @ '0'..='7');
++ next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
++ true
++}
++
++fn backslash_x_byte<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, u8)>,
++{
++ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
++ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
++ true
++}
++
++fn backslash_u<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, char)>,
++{
++ next_ch!(chars @ '{');
++ next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
++ loop {
++ let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
++ if c == '}' {
++ return true;
++ }
++ }
++}
++
++fn float(input: Cursor) -> Result<Cursor, LexError> {
++ let mut rest = float_digits(input)?;
++ if let Some(ch) = rest.chars().next() {
++ if is_ident_start(ch) {
++ rest = ident_not_raw(rest)?.0;
++ }
++ }
++ word_break(rest)
++}
++
++fn float_digits(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.chars().peekable();
++ match chars.next() {
++ Some(ch) if ch >= '0' && ch <= '9' => {}
++ _ => return Err(LexError),
++ }
++
++ let mut len = 1;
++ let mut has_dot = false;
++ let mut has_exp = false;
++ while let Some(&ch) = chars.peek() {
++ match ch {
++ '0'..='9' | '_' => {
++ chars.next();
++ len += 1;
++ }
++ '.' => {
++ if has_dot {
++ break;
++ }
++ chars.next();
++ if chars
++ .peek()
++ .map(|&ch| ch == '.' || is_ident_start(ch))
++ .unwrap_or(false)
++ {
++ return Err(LexError);
++ }
++ len += 1;
++ has_dot = true;
++ }
++ 'e' | 'E' => {
++ chars.next();
++ len += 1;
++ has_exp = true;
++ break;
++ }
++ _ => break,
++ }
++ }
++
++ let rest = input.advance(len);
++ if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
++ return Err(LexError);
++ }
++
++ if has_exp {
++ let mut has_exp_value = false;
++ while let Some(&ch) = chars.peek() {
++ match ch {
++ '+' | '-' => {
++ if has_exp_value {
++ break;
++ }
++ chars.next();
++ len += 1;
++ }
++ '0'..='9' => {
++ chars.next();
++ len += 1;
++ has_exp_value = true;
++ }
++ '_' => {
++ chars.next();
++ len += 1;
++ }
++ _ => break,
++ }
++ }
++ if !has_exp_value {
++ return Err(LexError);
++ }
++ }
++
++ Ok(input.advance(len))
++}
++
++fn int(input: Cursor) -> Result<Cursor, LexError> {
++ let mut rest = digits(input)?;
++ if let Some(ch) = rest.chars().next() {
++ if is_ident_start(ch) {
++ rest = ident_not_raw(rest)?.0;
++ }
++ }
++ word_break(rest)
++}
++
++fn digits(mut input: Cursor) -> Result<Cursor, LexError> {
++ let base = if input.starts_with("0x") {
++ input = input.advance(2);
++ 16
++ } else if input.starts_with("0o") {
++ input = input.advance(2);
++ 8
++ } else if input.starts_with("0b") {
++ input = input.advance(2);
++ 2
++ } else {
++ 10
++ };
++
++ let mut len = 0;
++ let mut empty = true;
++ for b in input.bytes() {
++ let digit = match b {
++ b'0'..=b'9' => (b - b'0') as u64,
++ b'a'..=b'f' => 10 + (b - b'a') as u64,
++ b'A'..=b'F' => 10 + (b - b'A') as u64,
++ b'_' => {
++ if empty && base == 10 {
++ return Err(LexError);
++ }
++ len += 1;
++ continue;
++ }
++ _ => break,
++ };
++ if digit >= base {
++ return Err(LexError);
++ }
++ len += 1;
++ empty = false;
++ }
++ if empty {
++ Err(LexError)
++ } else {
++ Ok(input.advance(len))
++ }
++}
++
++fn op(input: Cursor) -> PResult<Punct> {
++ match op_char(input) {
++ Ok((rest, '\'')) => {
++ ident(rest)?;
++ Ok((rest, Punct::new('\'', Spacing::Joint)))
++ }
++ Ok((rest, ch)) => {
++ let kind = match op_char(rest) {
++ Ok(_) => Spacing::Joint,
++ Err(LexError) => Spacing::Alone,
++ };
++ Ok((rest, Punct::new(ch, kind)))
++ }
++ Err(LexError) => Err(LexError),
++ }
++}
++
++fn op_char(input: Cursor) -> PResult<char> {
++ if input.starts_with("//") || input.starts_with("/*") {
++ // Do not accept `/` of a comment as an op.
++ return Err(LexError);
++ }
++
++ let mut chars = input.chars();
++ let first = match chars.next() {
++ Some(ch) => ch,
++ None => {
++ return Err(LexError);
++ }
++ };
++ let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
++ if recognized.contains(first) {
++ Ok((input.advance(first.len_utf8()), first))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
++ #[cfg(span_locations)]
++ let lo = input.off;
++ let (rest, (comment, inner)) = doc_comment_contents(input)?;
++ let span = crate::Span::_new_stable(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: rest.off,
++ });
++
++ let mut scan_for_bare_cr = comment;
++ while let Some(cr) = scan_for_bare_cr.find('\r') {
++ let rest = &scan_for_bare_cr[cr + 1..];
++ if !rest.starts_with('\n') {
++ return Err(LexError);
++ }
++ scan_for_bare_cr = rest;
++ }
++
++ let mut trees = Vec::new();
++ trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
++ if inner {
++ trees.push(Punct::new('!', Spacing::Alone).into());
++ }
++ let mut stream = vec![
++ TokenTree::Ident(crate::Ident::new("doc", span)),
++ TokenTree::Punct(Punct::new('=', Spacing::Alone)),
++ TokenTree::Literal(crate::Literal::string(comment)),
++ ];
++ for tt in stream.iter_mut() {
++ tt.set_span(span);
++ }
++ let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
++ trees.push(crate::Group::_new_stable(group).into());
++ for tt in trees.iter_mut() {
++ tt.set_span(span);
++ }
++ Ok((rest, trees))
++}
++
++fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
++ if input.starts_with("//!") {
++ let input = input.advance(3);
++ let (input, s) = take_until_newline_or_eof(input);
++ Ok((input, (s, true)))
++ } else if input.starts_with("/*!") {
++ let (input, s) = block_comment(input)?;
++ Ok((input, (&s[3..s.len() - 2], true)))
++ } else if input.starts_with("///") {
++ let input = input.advance(3);
++ if input.starts_with("/") {
++ return Err(LexError);
++ }
++ let (input, s) = take_until_newline_or_eof(input);
++ Ok((input, (s, false)))
++ } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
++ let (input, s) = block_comment(input)?;
++ Ok((input, (&s[3..s.len() - 2], false)))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
++ let chars = input.char_indices();
++
++ for (i, ch) in chars {
++ if ch == '\n' {
++ return (input.advance(i), &input.rest[..i]);
++ } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
++ return (input.advance(i + 1), &input.rest[..i]);
++ }
++ }
++
++ (input.advance(input.len()), input.rest)
++}
+diff --git a/third_party/rust/proc-macro2/src/strnom.rs b/third_party/rust/proc-macro2/src/strnom.rs
+deleted file mode 100644
+--- third_party/rust/proc-macro2/src/strnom.rs
++++ /dev/null
+@@ -1,391 +0,0 @@
+-//! Adapted from [`nom`](https://github.com/Geal/nom).
+-
+-use crate::fallback::LexError;
+-use std::str::{Bytes, CharIndices, Chars};
+-use unicode_xid::UnicodeXID;
+-
+-#[derive(Copy, Clone, Eq, PartialEq)]
+-pub struct Cursor<'a> {
+- pub rest: &'a str,
+- #[cfg(span_locations)]
+- pub off: u32,
+-}
+-
+-impl<'a> Cursor<'a> {
+- #[cfg(not(span_locations))]
+- pub fn advance(&self, amt: usize) -> Cursor<'a> {
+- Cursor {
+- rest: &self.rest[amt..],
+- }
+- }
+- #[cfg(span_locations)]
+- pub fn advance(&self, amt: usize) -> Cursor<'a> {
+- Cursor {
+- rest: &self.rest[amt..],
+- off: self.off + (amt as u32),
+- }
+- }
+-
+- pub fn find(&self, p: char) -> Option<usize> {
+- self.rest.find(p)
+- }
+-
+- pub fn starts_with(&self, s: &str) -> bool {
+- self.rest.starts_with(s)
+- }
+-
+- pub fn is_empty(&self) -> bool {
+- self.rest.is_empty()
+- }
+-
+- pub fn len(&self) -> usize {
+- self.rest.len()
+- }
+-
+- pub fn as_bytes(&self) -> &'a [u8] {
+- self.rest.as_bytes()
+- }
+-
+- pub fn bytes(&self) -> Bytes<'a> {
+- self.rest.bytes()
+- }
+-
+- pub fn chars(&self) -> Chars<'a> {
+- self.rest.chars()
+- }
+-
+- pub fn char_indices(&self) -> CharIndices<'a> {
+- self.rest.char_indices()
+- }
+-}
+-
+-pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
+-
+-pub fn whitespace(input: Cursor) -> PResult<()> {
+- if input.is_empty() {
+- return Err(LexError);
+- }
+-
+- let bytes = input.as_bytes();
+- let mut i = 0;
+- while i < bytes.len() {
+- let s = input.advance(i);
+- if bytes[i] == b'/' {
+- if s.starts_with("//")
+- && (!s.starts_with("///") || s.starts_with("////"))
+- && !s.starts_with("//!")
+- {
+- if let Some(len) = s.find('\n') {
+- i += len + 1;
+- continue;
+- }
+- break;
+- } else if s.starts_with("/**/") {
+- i += 4;
+- continue;
+- } else if s.starts_with("/*")
+- && (!s.starts_with("/**") || s.starts_with("/***"))
+- && !s.starts_with("/*!")
+- {
+- let (_, com) = block_comment(s)?;
+- i += com.len();
+- continue;
+- }
+- }
+- match bytes[i] {
+- b' ' | 0x09..=0x0d => {
+- i += 1;
+- continue;
+- }
+- b if b <= 0x7f => {}
+- _ => {
+- let ch = s.chars().next().unwrap();
+- if is_whitespace(ch) {
+- i += ch.len_utf8();
+- continue;
+- }
+- }
+- }
+- return if i > 0 { Ok((s, ())) } else { Err(LexError) };
+- }
+- Ok((input.advance(input.len()), ()))
+-}
+-
+-pub fn block_comment(input: Cursor) -> PResult<&str> {
+- if !input.starts_with("/*") {
+- return Err(LexError);
+- }
+-
+- let mut depth = 0;
+- let bytes = input.as_bytes();
+- let mut i = 0;
+- let upper = bytes.len() - 1;
+- while i < upper {
+- if bytes[i] == b'/' && bytes[i + 1] == b'*' {
+- depth += 1;
+- i += 1; // eat '*'
+- } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
+- depth -= 1;
+- if depth == 0 {
+- return Ok((input.advance(i + 2), &input.rest[..i + 2]));
+- }
+- i += 1; // eat '/'
+- }
+- i += 1;
+- }
+- Err(LexError)
+-}
+-
+-pub fn skip_whitespace(input: Cursor) -> Cursor {
+- match whitespace(input) {
+- Ok((rest, _)) => rest,
+- Err(LexError) => input,
+- }
+-}
+-
+-fn is_whitespace(ch: char) -> bool {
+- // Rust treats left-to-right mark and right-to-left mark as whitespace
+- ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
+-}
+-
+-pub fn word_break(input: Cursor) -> PResult<()> {
+- match input.chars().next() {
+- Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
+- Some(_) | None => Ok((input, ())),
+- }
+-}
+-
+-macro_rules! named {
+- ($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
+- fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
+- $submac!(i, $($args)*)
+- }
+- };
+-}
+-
+-macro_rules! alt {
+- ($i:expr, $e:ident | $($rest:tt)*) => {
+- alt!($i, call!($e) | $($rest)*)
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
+- match $subrule!($i, $($args)*) {
+- res @ Ok(_) => res,
+- _ => alt!($i, $($rest)*)
+- }
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
+- match $subrule!($i, $($args)*) {
+- Ok((i, o)) => Ok((i, $gen(o))),
+- Err(LexError) => alt!($i, $($rest)*)
+- }
+- };
+-
+- ($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
+- alt!($i, call!($e) => { $gen } | $($rest)*)
+- };
+-
+- ($i:expr, $e:ident => { $gen:expr }) => {
+- alt!($i, call!($e) => { $gen })
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
+- match $subrule!($i, $($args)*) {
+- Ok((i, o)) => Ok((i, $gen(o))),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-
+- ($i:expr, $e:ident) => {
+- alt!($i, call!($e))
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)*)) => {
+- $subrule!($i, $($args)*)
+- };
+-}
+-
+-macro_rules! do_parse {
+- ($i:expr, ( $($rest:expr),* )) => {
+- Ok(($i, ( $($rest),* )))
+- };
+-
+- ($i:expr, $e:ident >> $($rest:tt)*) => {
+- do_parse!($i, call!($e) >> $($rest)*)
+- };
+-
+- ($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, _)) => do_parse!(i, $($rest)*),
+- }
+- };
+-
+- ($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
+- do_parse!($i, $field: call!($e) >> $($rest)*)
+- };
+-
+- ($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => {
+- let $field = o;
+- do_parse!(i, $($rest)*)
+- },
+- }
+- };
+-}
+-
+-macro_rules! peek {
+- ($i:expr, $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Ok((_, o)) => Ok(($i, o)),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-}
+-
+-macro_rules! call {
+- ($i:expr, $fun:expr $(, $args:expr)*) => {
+- $fun($i $(, $args)*)
+- };
+-}
+-
+-macro_rules! option {
+- ($i:expr, $f:expr) => {
+- match $f($i) {
+- Ok((i, o)) => Ok((i, Some(o))),
+- Err(LexError) => Ok(($i, None)),
+- }
+- };
+-}
+-
+-macro_rules! take_until_newline_or_eof {
+- ($i:expr,) => {{
+- if $i.len() == 0 {
+- Ok(($i, ""))
+- } else {
+- match $i.find('\n') {
+- Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
+- None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
+- }
+- }
+- }};
+-}
+-
+-macro_rules! tuple {
+- ($i:expr, $($rest:tt)*) => {
+- tuple_parser!($i, (), $($rest)*)
+- };
+-}
+-
+-/// Do not use directly. Use `tuple!`.
+-macro_rules! tuple_parser {
+- ($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
+- tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
+- };
+-
+- ($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
+- }
+- };
+-
+- ($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
+- }
+- };
+-
+- ($i:expr, ($($parsed:tt),*), $e:ident) => {
+- tuple_parser!($i, ($($parsed),*), call!($e))
+- };
+-
+- ($i:expr, (), $submac:ident!( $($args:tt)* )) => {
+- $submac!($i, $($args)*)
+- };
+-
+- ($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => Ok((i, ($($parsed),*, o)))
+- }
+- };
+-
+- ($i:expr, ($($parsed:expr),*)) => {
+- Ok(($i, ($($parsed),*)))
+- };
+-}
+-
+-macro_rules! not {
+- ($i:expr, $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Ok((_, _)) => Err(LexError),
+- Err(LexError) => Ok(($i, ())),
+- }
+- };
+-}
+-
+-macro_rules! tag {
+- ($i:expr, $tag:expr) => {
+- if $i.starts_with($tag) {
+- Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
+- } else {
+- Err(LexError)
+- }
+- };
+-}
+-
+-macro_rules! punct {
+- ($i:expr, $punct:expr) => {
+- $crate::strnom::punct($i, $punct)
+- };
+-}
+-
+-/// Do not use directly. Use `punct!`.
+-pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
+- let input = skip_whitespace(input);
+- if input.starts_with(token) {
+- Ok((input.advance(token.len()), token))
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-macro_rules! preceded {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
+- match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
+- Ok((remaining, (_, o))) => Ok((remaining, o)),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-
+- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
+- preceded!($i, $submac!($($args)*), call!($g))
+- };
+-}
+-
+-macro_rules! delimited {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
+- match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i1, (_, o, _))) => Ok((i1, o))
+- }
+- };
+-}
+-
+-macro_rules! map {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => Ok((i, call!(o, $g)))
+- }
+- };
+-
+- ($i:expr, $f:expr, $g:expr) => {
+- map!($i, call!($f), $g)
+- };
+-}
+diff --git a/third_party/rust/proc-macro2/src/wrapper.rs b/third_party/rust/proc-macro2/src/wrapper.rs
+--- third_party/rust/proc-macro2/src/wrapper.rs
++++ third_party/rust/proc-macro2/src/wrapper.rs
+@@ -1,96 +1,39 @@
+-use std::fmt;
+-use std::iter;
++use crate::detection::inside_proc_macro;
++use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
++use std::fmt::{self, Debug, Display};
++use std::iter::FromIterator;
+ use std::ops::RangeBounds;
+-use std::panic::{self, PanicInfo};
++use std::panic;
+ #[cfg(super_unstable)]
+ use std::path::PathBuf;
+ use std::str::FromStr;
+
+-use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
+-
+ #[derive(Clone)]
+-pub enum TokenStream {
++pub(crate) enum TokenStream {
+ Compiler(DeferredTokenStream),
+ Fallback(fallback::TokenStream),
+ }
+
+ // Work around https://github.com/rust-lang/rust/issues/65080.
+ // In `impl Extend<TokenTree> for TokenStream` which is used heavily by quote,
+ // we hold on to the appended tokens and do proc_macro::TokenStream::extend as
+ // late as possible to batch together consecutive uses of the Extend impl.
+ #[derive(Clone)]
+-pub struct DeferredTokenStream {
++pub(crate) struct DeferredTokenStream {
+ stream: proc_macro::TokenStream,
+ extra: Vec<proc_macro::TokenTree>,
+ }
+
+-pub enum LexError {
++pub(crate) enum LexError {
+ Compiler(proc_macro::LexError),
+ Fallback(fallback::LexError),
+ }
+
+-fn nightly_works() -> bool {
+- use std::sync::atomic::*;
+- use std::sync::Once;
+-
+- static WORKS: AtomicUsize = AtomicUsize::new(0);
+- static INIT: Once = Once::new();
+-
+- match WORKS.load(Ordering::SeqCst) {
+- 1 => return false,
+- 2 => return true,
+- _ => {}
+- }
+-
+- // Swap in a null panic hook to avoid printing "thread panicked" to stderr,
+- // then use catch_unwind to determine whether the compiler's proc_macro is
+- // working. When proc-macro2 is used from outside of a procedural macro all
+- // of the proc_macro crate's APIs currently panic.
+- //
+- // The Once is to prevent the possibility of this ordering:
+- //
+- // thread 1 calls take_hook, gets the user's original hook
+- // thread 1 calls set_hook with the null hook
+- // thread 2 calls take_hook, thinks null hook is the original hook
+- // thread 2 calls set_hook with the null hook
+- // thread 1 calls set_hook with the actual original hook
+- // thread 2 calls set_hook with what it thinks is the original hook
+- //
+- // in which the user's hook has been lost.
+- //
+- // There is still a race condition where a panic in a different thread can
+- // happen during the interval that the user's original panic hook is
+- // unregistered such that their hook is incorrectly not called. This is
+- // sufficiently unlikely and less bad than printing panic messages to stderr
+- // on correct use of this crate. Maybe there is a libstd feature request
+- // here. For now, if a user needs to guarantee that this failure mode does
+- // not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
+- // the main thread before launching any other threads.
+- INIT.call_once(|| {
+- type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
+-
+- let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
+- let sanity_check = &*null_hook as *const PanicHook;
+- let original_hook = panic::take_hook();
+- panic::set_hook(null_hook);
+-
+- let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
+- WORKS.store(works as usize + 1, Ordering::SeqCst);
+-
+- let hopefully_null_hook = panic::take_hook();
+- panic::set_hook(original_hook);
+- if sanity_check != &*hopefully_null_hook {
+- panic!("observed race condition in proc_macro2::nightly_works");
+- }
+- });
+- nightly_works()
+-}
+-
+ fn mismatch() -> ! {
+ panic!("stable/nightly mismatch")
+ }
+
+ impl DeferredTokenStream {
+ fn new(stream: proc_macro::TokenStream) -> Self {
+ DeferredTokenStream {
+ stream,
+@@ -98,28 +41,33 @@ impl DeferredTokenStream {
+ }
+ }
+
+ fn is_empty(&self) -> bool {
+ self.stream.is_empty() && self.extra.is_empty()
+ }
+
+ fn evaluate_now(&mut self) {
+- self.stream.extend(self.extra.drain(..));
++ // If-check provides a fast short circuit for the common case of `extra`
++ // being empty, which saves a round trip over the proc macro bridge.
++ // Improves macro expansion time in winrt by 6% in debug mode.
++ if !self.extra.is_empty() {
++ self.stream.extend(self.extra.drain(..));
++ }
+ }
+
+ fn into_token_stream(mut self) -> proc_macro::TokenStream {
+ self.evaluate_now();
+ self.stream
+ }
+ }
+
+ impl TokenStream {
+ pub fn new() -> TokenStream {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
+ } else {
+ TokenStream::Fallback(fallback::TokenStream::new())
+ }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ match self {
+@@ -142,31 +90,37 @@ impl TokenStream {
+ }
+ }
+ }
+
+ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Ok(TokenStream::Compiler(DeferredTokenStream::new(
+- src.parse()?,
++ proc_macro_parse(src)?,
+ )))
+ } else {
+ Ok(TokenStream::Fallback(src.parse()?))
+ }
+ }
+ }
+
+-impl fmt::Display for TokenStream {
++// Work around https://github.com/rust-lang/rust/issues/58736.
++fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
++ panic::catch_unwind(|| src.parse().map_err(LexError::Compiler))
++ .unwrap_or(Err(LexError::Fallback(fallback::LexError)))
++}
++
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
+- TokenStream::Fallback(tts) => tts.fmt(f),
++ TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f),
++ TokenStream::Fallback(tts) => Display::fmt(tts, f),
+ }
+ }
+ }
+
+ impl From<proc_macro::TokenStream> for TokenStream {
+ fn from(inner: proc_macro::TokenStream) -> TokenStream {
+ TokenStream::Compiler(DeferredTokenStream::new(inner))
+ }
+@@ -182,17 +136,17 @@ impl From<TokenStream> for proc_macro::T
+ }
+
+ impl From<fallback::TokenStream> for TokenStream {
+ fn from(inner: fallback::TokenStream) -> TokenStream {
+ TokenStream::Fallback(inner)
+ }
+ }
+
+-// Assumes nightly_works().
++// Assumes inside_proc_macro().
+ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+ match token {
+ TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
+ TokenTree::Punct(tt) => {
+ let spacing = match tt.spacing() {
+ Spacing::Joint => proc_macro::Spacing::Joint,
+ Spacing::Alone => proc_macro::Spacing::Alone,
+ };
+@@ -202,37 +156,37 @@ fn into_compiler_token(token: TokenTree)
+ }
+ TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
+ TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
+ }
+ }
+
+ impl From<TokenTree> for TokenStream {
+ fn from(token: TokenTree) -> TokenStream {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
+ } else {
+ TokenStream::Fallback(token.into())
+ }
+ }
+ }
+
+-impl iter::FromIterator<TokenTree> for TokenStream {
++impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(
+ trees.into_iter().map(into_compiler_token).collect(),
+ ))
+ } else {
+ TokenStream::Fallback(trees.into_iter().collect())
+ }
+ }
+ }
+
+-impl iter::FromIterator<TokenStream> for TokenStream {
++impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut streams = streams.into_iter();
+ match streams.next() {
+ Some(TokenStream::Compiler(mut first)) => {
+ first.evaluate_now();
+ first.stream.extend(streams.map(|s| match s {
+ TokenStream::Compiler(s) => s.into_token_stream(),
+ TokenStream::Fallback(_) => mismatch(),
+@@ -247,75 +201,76 @@ impl iter::FromIterator<TokenStream> for
+ TokenStream::Fallback(first)
+ }
+ None => TokenStream::new(),
+ }
+ }
+ }
+
+ impl Extend<TokenTree> for TokenStream {
+- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
++ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
+ match self {
+ TokenStream::Compiler(tts) => {
+ // Here is the reason for DeferredTokenStream.
+- tts.extra
+- .extend(streams.into_iter().map(into_compiler_token));
++ for token in stream {
++ tts.extra.push(into_compiler_token(token));
++ }
+ }
+- TokenStream::Fallback(tts) => tts.extend(streams),
++ TokenStream::Fallback(tts) => tts.extend(stream),
+ }
+ }
+ }
+
+ impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ match self {
+ TokenStream::Compiler(tts) => {
+ tts.evaluate_now();
+ tts.stream
+- .extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
++ .extend(streams.into_iter().map(TokenStream::unwrap_nightly));
+ }
+ TokenStream::Fallback(tts) => {
+- tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()));
++ tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
+ }
+ }
+ }
+ }
+
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
+- TokenStream::Fallback(tts) => tts.fmt(f),
++ TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f),
++ TokenStream::Fallback(tts) => Debug::fmt(tts, f),
+ }
+ }
+ }
+
+ impl From<proc_macro::LexError> for LexError {
+ fn from(e: proc_macro::LexError) -> LexError {
+ LexError::Compiler(e)
+ }
+ }
+
+ impl From<fallback::LexError> for LexError {
+ fn from(e: fallback::LexError) -> LexError {
+ LexError::Fallback(e)
+ }
+ }
+
+-impl fmt::Debug for LexError {
++impl Debug for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- LexError::Compiler(e) => e.fmt(f),
+- LexError::Fallback(e) => e.fmt(f),
++ LexError::Compiler(e) => Debug::fmt(e, f),
++ LexError::Fallback(e) => Debug::fmt(e, f),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum TokenTreeIter {
++pub(crate) enum TokenTreeIter {
+ Compiler(proc_macro::token_stream::IntoIter),
+ Fallback(fallback::TokenTreeIter),
+ }
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = TokenTreeIter;
+
+@@ -356,25 +311,25 @@ impl Iterator for TokenTreeIter {
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ match self {
+ TokenTreeIter::Compiler(tts) => tts.size_hint(),
+ TokenTreeIter::Fallback(tts) => tts.size_hint(),
+ }
+ }
+ }
+
+-impl fmt::Debug for TokenTreeIter {
++impl Debug for TokenTreeIter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("TokenTreeIter").finish()
+ }
+ }
+
+ #[derive(Clone, PartialEq, Eq)]
+ #[cfg(super_unstable)]
+-pub enum SourceFile {
++pub(crate) enum SourceFile {
+ Compiler(proc_macro::SourceFile),
+ Fallback(fallback::SourceFile),
+ }
+
+ #[cfg(super_unstable)]
+ impl SourceFile {
+ fn nightly(sf: proc_macro::SourceFile) -> Self {
+ SourceFile::Compiler(sf)
+@@ -392,68 +347,87 @@ impl SourceFile {
+ match self {
+ SourceFile::Compiler(a) => a.is_real(),
+ SourceFile::Fallback(a) => a.is_real(),
+ }
+ }
+ }
+
+ #[cfg(super_unstable)]
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- SourceFile::Compiler(a) => a.fmt(f),
+- SourceFile::Fallback(a) => a.fmt(f),
++ SourceFile::Compiler(a) => Debug::fmt(a, f),
++ SourceFile::Fallback(a) => Debug::fmt(a, f),
+ }
+ }
+ }
+
+ #[cfg(any(super_unstable, feature = "span-locations"))]
+-pub struct LineColumn {
++pub(crate) struct LineColumn {
+ pub line: usize,
+ pub column: usize,
+ }
+
+ #[derive(Copy, Clone)]
+-pub enum Span {
++pub(crate) enum Span {
+ Compiler(proc_macro::Span),
+ Fallback(fallback::Span),
+ }
+
+ impl Span {
+ pub fn call_site() -> Span {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::call_site())
+ } else {
+ Span::Fallback(fallback::Span::call_site())
+ }
+ }
+
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ if inside_proc_macro() {
++ Span::Compiler(proc_macro::Span::mixed_site())
++ } else {
++ Span::Fallback(fallback::Span::mixed_site())
++ }
++ }
++
+ #[cfg(super_unstable)]
+ pub fn def_site() -> Span {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::def_site())
+ } else {
+ Span::Fallback(fallback::Span::def_site())
+ }
+ }
+
+- #[cfg(super_unstable)]
+ pub fn resolved_at(&self, other: Span) -> Span {
+ match (self, other) {
++ #[cfg(hygiene)]
+ (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
++
++ // Name resolution affects semantics, but location is only cosmetic
++ #[cfg(not(hygiene))]
++ (Span::Compiler(_), Span::Compiler(_)) => other,
++
+ (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
+ _ => mismatch(),
+ }
+ }
+
+- #[cfg(super_unstable)]
+ pub fn located_at(&self, other: Span) -> Span {
+ match (self, other) {
++ #[cfg(hygiene)]
+ (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
++
++ // Name resolution affects semantics, but location is only cosmetic
++ #[cfg(not(hygiene))]
++ (Span::Compiler(_), Span::Compiler(_)) => *self,
++
+ (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
+ _ => mismatch(),
+ }
+ }
+
+ pub fn unwrap(self) -> proc_macro::Span {
+ match self {
+ Span::Compiler(s) => s,
+@@ -537,36 +511,36 @@ impl From<proc_macro::Span> for crate::S
+ }
+
+ impl From<fallback::Span> for Span {
+ fn from(inner: fallback::Span) -> Span {
+ Span::Fallback(inner)
+ }
+ }
+
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Span::Compiler(s) => s.fmt(f),
+- Span::Fallback(s) => s.fmt(f),
++ Span::Compiler(s) => Debug::fmt(s, f),
++ Span::Fallback(s) => Debug::fmt(s, f),
+ }
+ }
+ }
+
+-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
++pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+ match span {
+ Span::Compiler(s) => {
+ debug.field("span", &s);
+ }
+ Span::Fallback(s) => fallback::debug_span_field_if_nontrivial(debug, s),
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum Group {
++pub(crate) enum Group {
+ Compiler(proc_macro::Group),
+ Fallback(fallback::Group),
+ }
+
+ impl Group {
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ match stream {
+ TokenStream::Compiler(tts) => {
+@@ -647,36 +621,36 @@ impl Group {
+ }
+
+ impl From<fallback::Group> for Group {
+ fn from(g: fallback::Group) -> Self {
+ Group::Fallback(g)
+ }
+ }
+
+-impl fmt::Display for Group {
++impl Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Group::Compiler(group) => group.fmt(formatter),
+- Group::Fallback(group) => group.fmt(formatter),
++ Group::Compiler(group) => Display::fmt(group, formatter),
++ Group::Fallback(group) => Display::fmt(group, formatter),
+ }
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Group::Compiler(group) => group.fmt(formatter),
+- Group::Fallback(group) => group.fmt(formatter),
++ Group::Compiler(group) => Debug::fmt(group, formatter),
++ Group::Fallback(group) => Debug::fmt(group, formatter),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum Ident {
++pub(crate) enum Ident {
+ Compiler(proc_macro::Ident),
+ Fallback(fallback::Ident),
+ }
+
+ impl Ident {
+ pub fn new(string: &str, span: Span) -> Ident {
+ match span {
+ Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new(string, s)),
+@@ -742,56 +716,56 @@ where
+ let other = other.as_ref();
+ match self {
+ Ident::Compiler(t) => t.to_string() == other,
+ Ident::Fallback(t) => t == other,
+ }
+ }
+ }
+
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Ident::Compiler(t) => t.fmt(f),
+- Ident::Fallback(t) => t.fmt(f),
++ Ident::Compiler(t) => Display::fmt(t, f),
++ Ident::Fallback(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Ident::Compiler(t) => t.fmt(f),
+- Ident::Fallback(t) => t.fmt(f),
++ Ident::Compiler(t) => Debug::fmt(t, f),
++ Ident::Fallback(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum Literal {
++pub(crate) enum Literal {
+ Compiler(proc_macro::Literal),
+ Fallback(fallback::Literal),
+ }
+
+ macro_rules! suffixed_numbers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::$name(n))
+ } else {
+ Literal::Fallback(fallback::Literal::$name(n))
+ }
+ }
+ )*)
+ }
+
+ macro_rules! unsuffixed_integers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::$name(n))
+ } else {
+ Literal::Fallback(fallback::Literal::$name(n))
+ }
+ }
+ )*)
+ }
+
+@@ -825,49 +799,49 @@ impl Literal {
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
+ }
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
+ } else {
+ Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
+ }
+ }
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
+ } else {
+ Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
+ }
+ }
+
+ pub fn string(t: &str) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::string(t))
+ } else {
+ Literal::Fallback(fallback::Literal::string(t))
+ }
+ }
+
+ pub fn character(t: char) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::character(t))
+ } else {
+ Literal::Fallback(fallback::Literal::character(t))
+ }
+ }
+
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::byte_string(bytes))
+ } else {
+ Literal::Fallback(fallback::Literal::byte_string(bytes))
+ }
+ }
+
+ pub fn span(&self) -> Span {
+ match self {
+@@ -903,25 +877,25 @@ impl Literal {
+ }
+
+ impl From<fallback::Literal> for Literal {
+ fn from(s: fallback::Literal) -> Literal {
+ Literal::Fallback(s)
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Literal::Compiler(t) => t.fmt(f),
+- Literal::Fallback(t) => t.fmt(f),
++ Literal::Compiler(t) => Display::fmt(t, f),
++ Literal::Fallback(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Literal::Compiler(t) => t.fmt(f),
+- Literal::Fallback(t) => t.fmt(f),
++ Literal::Compiler(t) => Debug::fmt(t, f),
++ Literal::Fallback(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+diff --git a/third_party/rust/proc-macro2/tests/comments.rs b/third_party/rust/proc-macro2/tests/comments.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/proc-macro2/tests/comments.rs
+@@ -0,0 +1,103 @@
++use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree};
++
++// #[doc = "..."] -> "..."
++fn lit_of_outer_doc_comment(tokens: TokenStream) -> Literal {
++ lit_of_doc_comment(tokens, false)
++}
++
++// #![doc = "..."] -> "..."
++fn lit_of_inner_doc_comment(tokens: TokenStream) -> Literal {
++ lit_of_doc_comment(tokens, true)
++}
++
++fn lit_of_doc_comment(tokens: TokenStream, inner: bool) -> Literal {
++ let mut iter = tokens.clone().into_iter();
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '#');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ if inner {
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '!');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ }
++ iter = match iter.next().unwrap() {
++ TokenTree::Group(group) => {
++ assert_eq!(group.delimiter(), Delimiter::Bracket);
++ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
++ group.stream().into_iter()
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ };
++ match iter.next().unwrap() {
++ TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"),
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '=');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Literal(literal) => {
++ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
++ literal
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++}
++
++#[test]
++fn closed_immediately() {
++ let stream = "/**/".parse::<TokenStream>().unwrap();
++ let tokens = stream.into_iter().collect::<Vec<_>>();
++ assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
++}
++
++#[test]
++fn incomplete() {
++ assert!("/*/".parse::<TokenStream>().is_err());
++}
++
++#[test]
++fn lit() {
++ let stream = "/// doc".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc\"");
++
++ let stream = "//! doc".parse::<TokenStream>().unwrap();
++ let lit = lit_of_inner_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc\"");
++
++ let stream = "/** doc */".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc \"");
++
++ let stream = "/*! doc */".parse::<TokenStream>().unwrap();
++ let lit = lit_of_inner_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc \"");
++}
++
++#[test]
++fn carriage_return() {
++ let stream = "///\r\n".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\"\"");
++
++ let stream = "/**\r\n*/".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\"\\r\\n\"");
++
++ "///\r".parse::<TokenStream>().unwrap_err();
++ "///\r \n".parse::<TokenStream>().unwrap_err();
++ "/**\r \n*/".parse::<TokenStream>().unwrap_err();
++}
+diff --git a/third_party/rust/proc-macro2/tests/test.rs b/third_party/rust/proc-macro2/tests/test.rs
+--- third_party/rust/proc-macro2/tests/test.rs
++++ third_party/rust/proc-macro2/tests/test.rs
+@@ -1,12 +1,11 @@
++use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
+ use std::str::{self, FromStr};
+
+-use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
+-
+ #[test]
+ fn idents() {
+ assert_eq!(
+ Ident::new("String", Span::call_site()).to_string(),
+ "String"
+ );
+ assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
+ assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
+@@ -105,16 +104,43 @@ fn literal_suffix() {
+ assert_eq!(token_count("999u256"), 1);
+ assert_eq!(token_count("999r#u256"), 3);
+ assert_eq!(token_count("1."), 1);
+ assert_eq!(token_count("1.f32"), 3);
+ assert_eq!(token_count("1.0_0"), 1);
+ assert_eq!(token_count("1._0"), 3);
+ assert_eq!(token_count("1._m"), 3);
+ assert_eq!(token_count("\"\"s"), 1);
++ assert_eq!(token_count("r\"\"r"), 1);
++ assert_eq!(token_count("b\"\"b"), 1);
++ assert_eq!(token_count("br\"\"br"), 1);
++ assert_eq!(token_count("r#\"\"#r"), 1);
++ assert_eq!(token_count("'c'c"), 1);
++ assert_eq!(token_count("b'b'b"), 1);
++}
++
++#[test]
++fn literal_iter_negative() {
++ let negative_literal = Literal::i32_suffixed(-3);
++ let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
++ let mut iter = tokens.into_iter();
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '-');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ unexpected => panic!("unexpected token {:?}", unexpected),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Literal(literal) => {
++ assert_eq!(literal.to_string(), "3i32");
++ }
++ unexpected => panic!("unexpected token {:?}", unexpected),
++ }
++ assert!(iter.next().is_none());
+ }
+
+ #[test]
+ fn roundtrip() {
+ fn roundtrip(p: &str) {
+ println!("parse: {}", p);
+ let s = p.parse::<TokenStream>().unwrap().to_string();
+ println!("first: {}", s);
+@@ -161,46 +187,16 @@ fn fail() {
+ fail("' static");
+ fail("r#1");
+ fail("r#_");
+ }
+
+ #[cfg(span_locations)]
+ #[test]
+ fn span_test() {
+- use proc_macro2::TokenTree;
+-
+- fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
+- let ts = p.parse::<TokenStream>().unwrap();
+- check_spans_internal(ts, &mut lines);
+- }
+-
+- fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
+- for i in ts {
+- if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
+- *lines = rest;
+-
+- let start = i.span().start();
+- assert_eq!(start.line, sline, "sline did not match for {}", i);
+- assert_eq!(start.column, scol, "scol did not match for {}", i);
+-
+- let end = i.span().end();
+- assert_eq!(end.line, eline, "eline did not match for {}", i);
+- assert_eq!(end.column, ecol, "ecol did not match for {}", i);
+-
+- match i {
+- TokenTree::Group(ref g) => {
+- check_spans_internal(g.stream().clone(), lines);
+- }
+- _ => {}
+- }
+- }
+- }
+- }
+-
+ check_spans(
+ "\
+ /// This is a document comment
+ testing 123
+ {
+ testing 234
+ }",
+ &[
+@@ -269,59 +265,17 @@ fn span_join() {
+ joined1.unwrap().source_file(),
+ source1[0].span().source_file()
+ );
+ }
+
+ #[test]
+ fn no_panic() {
+ let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
+- assert!(s.parse::<proc_macro2::TokenStream>().is_err());
+-}
+-
+-#[test]
+-fn tricky_doc_comment() {
+- let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
+-
+- let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
+- match tokens[0] {
+- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
+- _ => panic!("wrong token {:?}", tokens[0]),
+- }
+- let mut tokens = match tokens[1] {
+- proc_macro2::TokenTree::Group(ref tt) => {
+- assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
+- tt.stream().into_iter()
+- }
+- _ => panic!("wrong token {:?}", tokens[0]),
+- };
+-
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
+- t => panic!("wrong token {:?}", t),
+- }
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
+- t => panic!("wrong token {:?}", t),
+- }
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Literal(ref tt) => {
+- assert_eq!(tt.to_string(), "\" doc\"");
+- }
+- t => panic!("wrong token {:?}", t),
+- }
+- assert!(tokens.next().is_none());
+-
+- let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
++ assert!(s.parse::<TokenStream>().is_err());
+ }
+
+ #[test]
+ fn op_before_comment() {
+ let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
+ match tts.next().unwrap() {
+ TokenTree::Punct(tt) => {
+ assert_eq!(tt.as_char(), '~');
+@@ -340,30 +294,30 @@ fn raw_identifier() {
+ }
+ assert!(tts.next().is_none());
+ }
+
+ #[test]
+ fn test_debug_ident() {
+ let ident = Ident::new("proc_macro", Span::call_site());
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected = "Ident(proc_macro)";
+
+- #[cfg(procmacro2_semver_exempt)]
+- let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
++ #[cfg(span_locations)]
++ let expected = "Ident { sym: proc_macro }";
+
+ assert_eq!(expected, format!("{:?}", ident));
+ }
+
+ #[test]
+ fn test_debug_tokenstream() {
+ let tts = TokenStream::from_str("[a + 1]").unwrap();
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected = "\
+ TokenStream [
+ Group {
+ delimiter: Bracket,
+ stream: TokenStream [
+ Ident {
+ sym: a,
+ },
+@@ -374,17 +328,17 @@ TokenStream [
+ Literal {
+ lit: 1,
+ },
+ ],
+ },
+ ]\
+ ";
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected_before_trailing_commas = "\
+ TokenStream [
+ Group {
+ delimiter: Bracket,
+ stream: TokenStream [
+ Ident {
+ sym: a
+ },
+@@ -395,17 +349,17 @@ TokenStream [
+ Literal {
+ lit: 1
+ }
+ ]
+ }
+ ]\
+ ";
+
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ let expected = "\
+ TokenStream [
+ Group {
+ delimiter: Bracket,
+ stream: TokenStream [
+ Ident {
+ sym: a,
+ span: bytes(2..3),
+@@ -420,17 +374,17 @@ TokenStream [
+ span: bytes(6..7),
+ },
+ ],
+ span: bytes(1..8),
+ },
+ ]\
+ ";
+
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ let expected_before_trailing_commas = "\
+ TokenStream [
+ Group {
+ delimiter: Bracket,
+ stream: TokenStream [
+ Ident {
+ sym: a,
+ span: bytes(2..3)
+@@ -459,8 +413,85 @@ TokenStream [
+ }
+
+ #[test]
+ fn default_tokenstream_is_empty() {
+ let default_token_stream: TokenStream = Default::default();
+
+ assert!(default_token_stream.is_empty());
+ }
++
++#[test]
++fn tuple_indexing() {
++ // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
++ let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
++ assert_eq!("tuple", tokens.next().unwrap().to_string());
++ assert_eq!(".", tokens.next().unwrap().to_string());
++ assert_eq!("0.0", tokens.next().unwrap().to_string());
++ assert!(tokens.next().is_none());
++}
++
++#[cfg(span_locations)]
++#[test]
++fn non_ascii_tokens() {
++ check_spans("// abc", &[]);
++ check_spans("// ábc", &[]);
++ check_spans("// abc x", &[]);
++ check_spans("// ábc x", &[]);
++ check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
++ check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
++ check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
++ check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
++ check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
++ check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
++ check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
++ check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
++ check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]);
++ check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]);
++ check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
++ check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
++ check_spans("'a'", &[(1, 0, 1, 3)]);
++ check_spans("'á'", &[(1, 0, 1, 3)]);
++ check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
++ check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
++ check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
++ check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
++ check_spans("abc", &[(1, 0, 1, 3)]);
++ check_spans("ábc", &[(1, 0, 1, 3)]);
++ check_spans("ábć", &[(1, 0, 1, 3)]);
++ check_spans("abc// foo", &[(1, 0, 1, 3)]);
++ check_spans("ábc// foo", &[(1, 0, 1, 3)]);
++ check_spans("ábć// foo", &[(1, 0, 1, 3)]);
++ check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
++ check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]);
++}
++
++#[cfg(span_locations)]
++fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
++ let ts = p.parse::<TokenStream>().unwrap();
++ check_spans_internal(ts, &mut lines);
++ assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
++}
++
++#[cfg(span_locations)]
++fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
++ for i in ts {
++ if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
++ *lines = rest;
++
++ let start = i.span().start();
++ assert_eq!(start.line, sline, "sline did not match for {}", i);
++ assert_eq!(start.column, scol, "scol did not match for {}", i);
++
++ let end = i.span().end();
++ assert_eq!(end.line, eline, "eline did not match for {}", i);
++ assert_eq!(end.column, ecol, "ecol did not match for {}", i);
++
++ if let TokenTree::Group(g) = i {
++ check_spans_internal(g.stream().clone(), lines);
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/proc-macro2/tests/test_fmt.rs b/third_party/rust/proc-macro2/tests/test_fmt.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/proc-macro2/tests/test_fmt.rs
+@@ -0,0 +1,26 @@
++use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
++use std::iter::{self, FromIterator};
++
++#[test]
++fn test_fmt_group() {
++ let ident = Ident::new("x", Span::call_site());
++ let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident)));
++ let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new());
++ let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone());
++ let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new());
++ let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone());
++ let braces_empty = Group::new(Delimiter::Brace, TokenStream::new());
++ let braces_nonempty = Group::new(Delimiter::Brace, inner.clone());
++ let none_empty = Group::new(Delimiter::None, TokenStream::new());
++ let none_nonempty = Group::new(Delimiter::None, inner.clone());
++
++ // Matches libproc_macro.
++ assert_eq!("()", parens_empty.to_string());
++ assert_eq!("(x)", parens_nonempty.to_string());
++ assert_eq!("[]", brackets_empty.to_string());
++ assert_eq!("[x]", brackets_nonempty.to_string());
++ assert_eq!("{ }", braces_empty.to_string());
++ assert_eq!("{ x }", braces_nonempty.to_string());
++ assert_eq!("", none_empty.to_string());
++ assert_eq!("x", none_nonempty.to_string());
++}
+diff --git a/third_party/rust/syn/.cargo-checksum.json b/third_party/rust/syn/.cargo-checksum.json
+--- third_party/rust/syn/.cargo-checksum.json
++++ third_party/rust/syn/.cargo-checksum.json
+@@ -1,1 +1,1 @@
+-{"files":{"Cargo.toml":"484d29864d333a361652fa4e24e1dcfab9efa47705ffd8c106d802eb03b78da7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"ca605417b6db8c995458f8407afaad6c177aedcc2274004283600f5638fa1b0c","benches/file.rs":"b45211cc4a0296a77aac2b4de16dbc6b5cb66adfb5afac00a77bccea87f43968","benches/rust.rs":"9cc0f62e944f1583d05c43a395a1556731501cf5976ef67a081f4f6387f883ba","build.rs":"7423ab199728d55c7d64c44b7c6729cfd93bd8273366a77707353003e27565d7","src/attr.rs":"cf81add298f0e75c35a9980a59bc3c2fd3fe933635830d1591374eeb2487c225","src/await.rs":"18f0b2ecb319991f891e300011424985e3cf33d166ea9f29f22d575fc8c83a76","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"2a432c11a3da67a21d46c2272bf9ce60a0bb20893b5750027bbd8ca3e843ab35","src/custom_keyword.rs":"589e46ec1be9a04d6de12c0b8cadf87cc1c05606ed46ddea62e9869cbca4a191","src/custom_punctuation.rs":"2ba2e294e15a0fce7ede3686c42b2891797079a724dd1193b66e7d305624c891","src/data.rs":"cc9b250d084e444782d3ff5e63c1ba387cbde8f7f2e977eab9846d920b4b8c3f","src/derive.rs":"c18878f14be5d5ab11fd7dda2d2ff1ff75c9662daf11eed033de62e4d0670a89","src/discouraged.rs":"50e10915695c4d14f64a78e20ecbef90a2cd53a7c26ee3426a2524a8ee5c9cbf","src/error.rs":"2c17a402f83ed5ae4ad96e753216771bef620235c2ff1ccc23f4bbafc7266fe1","src/export.rs":"dcae67456278c0339acfbcbb4737b8d37cfba5a150ae789f31f4be79abf7e726","src/expr.rs":"871d8eeb43cef02ef88de3bea7477b79b4eabc096a0899dde0e5750edf482f49","src/ext.rs":"b97ed549490b9248b5b5df31b3d5b08ba8791e23e6c5d3a1157a0363eb683ff3","src/file.rs":"3cc2bf5c709238d515a557f721f231c8c725b196400de051f945b549299d38a7","src/gen/fold.rs":"10b3ae33d0ce410d6bbe8b93be9d5f9e856c7dc8212133cc46b703f97d548190","src/gen/visit.rs":"e0f5798552d186024696b7bfc7219d4ff53b0e45f735a83e77cbb6b6578c5fa4","src/gen/visit_mut.rs":"9f7dda83907969971dba84d545aaa563b0728e54db97ffab5050fdf43a79c731","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"d845d7a828863123a5187fd0fe59c9dae3636f63bad302bd035792eed3dcb1ba","src/group.rs":"119b62d8481b4b1c327639bed40e114bf1969765250b68186628247fd4144b3b","src/ident.rs":"503156ce51a7ef0420892e8dbf2ecf8fe51f42a84d52cc2c05654e1a83020cbf","src/item.rs":"213f2f58c65ee1aa222f111bc9b1be681f8fb069caed04ca56586839979318d0","src/keyword.rs":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/lib.rs":"24778e9f15e8025e75aca114c712716ada586b471adb3b3b69278f4d39b8a21b","src/lifetime.rs":"905359708f772ec858954badde69ee016d29e6eeba1dd205b268445b1aff6f3a","src/lit.rs":"5bb0bddb94cbd256e50e92dc091a0baa09f1be40a77058b897507f3b17191e5d","src/lookahead.rs":"5cce8b4cb345a85c24a452ea6d78eadb76f01ca0a789cbf5ce35108334904173","src/mac.rs":"6b468244cc07e3f2f10419f833d9e2ed23edbcd6dc34cf21c5947633699db964","src/macros.rs":"0d8c3bab47539aa2d00bec64e92c901ea2c9c0af74c868051c0905b82650f970","src/op.rs":"93cd44770bb110deadf807a01d9a666efe644b6e3010f4b51cae77ee7438cfbb","src/parse.rs":"5017123c249ebc65866af113a0ad671814b9873f47568180e6539a305eb0317d","src/parse_macro_input.rs":"f799aadb7216c2d333b579f48ed2fedfe07b5e96f004b25b569649ffbaa958d2","src/parse_quote.rs":"81575bf60b18b0d8624d7025a5bcc8dcd6633ad70c454dee2a06e4c391700b6c","src/pat.rs":"db0f2263b9813de1f4e3e3e0396fe0080b1e11c8090c6b4fb6fca3cfbe22bc96","src/path.rs":"32e685ac7fd2d4b9989802de8f326a8d47fa710f86ec3e45fd9d3ff8fdfe97ef","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"384e7b317b26f24118eb4b0c39e949ee9f4f3e700a4c80e462342c83b2cc3282","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"adddb6acae14a0fa340df302b932c31e34b259706ce56fd82ab597ec424500e1","src/stmt.rs":"fbccf2b4da7980fe6ea8d99457d291577c0f225b370c1dd97da41abf2a18fcf7","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"761d8d1793560eb2b631c36ddfdbb14ac65178405f095453aa0e75e8816bdbb9","src/tt.rs":"1e32ae216d14d895ff621bf32bc611f138aa00108b0090be2cbaa0affebe8e2a","src/ty.rs":"ce052e0079b65b66bea4e9502d2ff2c90ad4b867904bf7eb892eb60aa3ef219a","tests/clone.sh":"961243d42540d9992090efbbd5478b7aa395671db64a6c28cdadb6edc610ebdf","tests/common/eq.rs":"a42d339808fd32dd4bfd440c254add8c56d5e2cde3a6bf0c88621b618ce5eaa7","tests/common/mod.rs":"20a3300016351fa210a193fbb0db059ef5125fa7909585ded64790004d4977ed","tests/common/parse.rs":"17ba6d1e74aaa3f8096c6d379d803221f12d95cca69927be047d6ddf8367647f","tests/debug/gen.rs":"57bd5cf585e0b86ad00f29f09ff3db3390c4a756d503514a9b28407500dcea3c","tests/debug/mod.rs":"462d6fe34ee75c3ca1207d4db2ff3bdee5b430b9f9ca632e5671d1588d3f76b3","tests/features/error.rs":"e0581a2869cbd237c2bc18a0a85272296e1577bb5f7317a67fa85e28e04eea6f","tests/features/mod.rs":"66a2605ec54ede29208da350f2bed536dfa763b58408d64d3fca3b13de64b64f","tests/macros/mod.rs":"3f2d758c0ba76b93f54b0c1fc22ad50edff8ef42629ba4d47ac7d7f823da8359","tests/repo/mod.rs":"e851a68972c9194a9a8d7b68538b16ed79ae81cba55e1a2ce210d1b759fb1a21","tests/test_asyncness.rs":"b6c46118b036e6807d24eb0e1779244b4fca23dac0d8031e9843b3edec484ce8","tests/test_attribute.rs":"2d8f18a98c989d3f7adaaeb1aeebd4f8413365ace63feecb37cb3f9db9db4d8f","tests/test_derive_input.rs":"477d80f914c54b526f8ff229788dc0e7798d118f6dcfa348f4c99755edb347b9","tests/test_expr.rs":"f35ca80566849a36e6ba6403d9663519eff37e4224360c468fedff8b561a643e","tests/test_generics.rs":"83a5dc07f5c5701c12625399262f7120b66f01a742523f3eda28da2cf2c87eb3","tests/test_grouping.rs":"aadd75215addd9e5a8fa2f9472117d4cb80f1e8b84e07f4c0845675c9014164f","tests/test_ident.rs":"236c239dd66f543f084f44ff747d7bc3962cf11a019a279777fe972f6e17aa4c","tests/test_iterators.rs":"718938da14778dcba06324d36a99d9317c9d45d81a34c6a44c47e1fa38085e9f","tests/test_lit.rs":"7dff2661a5ac586d6ed2fe27501cb8ff62f4cf3f6c91f596bff6057c67ad7857","tests/test_meta.rs":"8444dee084882243b107dfc8a6aac27f9382f9774162d1ac8ed8ec30d60c048e","tests/test_parse_buffer.rs":"b244bb4bc41ff06d21f239e60a3d663fdec5aa4af33f2a354afef36d34f0aefc","tests/test_pat.rs":"41776b878efae9b8e340f21ffe6296e921cf309f618482efd98609c33e32c28b","tests/test_precedence.rs":"71f3ea52cda8b40166bb7416fb98774e6a653542497b521f8e183e283dcf579d","tests/test_round_trip.rs":"e0de37f45fa223b488d25a41beab185eb92abb7bf765a9f13fe5d870ff31f5f1","tests/test_should_parse.rs":"4da4e25ee2baa7e75135c375042a7f958de136c5698dab03f99ff7a774dcd463","tests/test_size.rs":"970150b9d49ef91ab4c8f8c6a59b83f9a68a02acb779f0280733a5efaec6487a","tests/test_token_trees.rs":"a07ea657bf03b9c667c821b2db2af49b176ca737e3e01217a73cca78b7f11380","tests/zzz_stable.rs":"961d4940a926db4ca523d834b060c62de988e6a8e01c9f5efaa7bb4c86745b47"},"package":"66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"28ddb678a5ccac4423435384c8b7116f804e896eabc5aae9d5c2bc666aaebbb4","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"03f3b53cf858536a0883aa5b5882ee61dcd0f1e71c0930c9106fcfa1d6aad2df","benches/file.rs":"b4724fc7c0f48b8f488e2632a1064f6c0bf16ded3969680fc3f4a2369536269b","benches/rust.rs":"ea6291ef2d2a83d94a3312fe179d48259f8ec0b04c961993ddd181d0a4ab740e","build.rs":"aeca2312f05aec658eaa66980a0ef3d578837db107a55702b39419ea0422eb4a","src/attr.rs":"7d79482634d6544eb4a4825405407b53660d0f5f8b929f7e1671e005b9d92038","src/await.rs":"18f0b2ecb319991f891e300011424985e3cf33d166ea9f29f22d575fc8c83a76","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"cf2a4b3bdc247b80c85ff5625a1dfb7a5f517fd835f6e1518a7b924990e4c293","src/custom_keyword.rs":"9627467063e41776315a6a14b2aaea3875592d8e0ebd2dc6df1fc2f12c06f146","src/custom_punctuation.rs":"b00e7bee96eb473507527e39db65e74e71592dc06421d2cfe45ed899c17d4847","src/data.rs":"7aec9a745cd53ec95688afa353f6efb9576e7fc0143757b51d28bc3d900b1d2a","src/derive.rs":"fa71866df6e383673dd3329f455a9f953585b83f9739050be3bf1f8c6d526b96","src/discouraged.rs":"a1f3d85e20dedf50b1b7b4571d970a3a6e9b2de4afde7dd0c986fe240df2ba46","src/error.rs":"c3005b50e3132026250c5356d0d391bf96db8087f0f5f744de98e360d8a20a3e","src/export.rs":"dcae67456278c0339acfbcbb4737b8d37cfba5a150ae789f31f4be79abf7e726","src/expr.rs":"54455fd20041996653ca5379b03cdf3c2fc1b3dd2e1149b5bc6b1dd492545d55","src/ext.rs":"870086d9021e6a6fcefa2f00cd91b55c4b74dcee8f0f6a07e76d96fb44707d61","src/file.rs":"75167ebc77e7870122078eabde1b872c337142d4b0962c20cedffcaaa2a5b7c6","src/gen/clone.rs":"0845c1bf8624c3f235cd247b4eb748e7e16b4c240097cb0ff16751f688c079ae","src/gen/debug.rs":"d24fe37f4ce1dd74f2dc54136e893782d3c4d0908323c036c97599551a56960c","src/gen/eq.rs":"1e6ef09b17ca7f36861ef23ce2a6991b231ed5f087f046469b5f23da40f5b419","src/gen/fold.rs":"3f59e59ed8ad2ab5dd347bfbe41bbc785c2aabd8ae902087a584a6daed597182","src/gen/hash.rs":"e5b2a52587173076777233a9e57e2b3c8e0dd6d6f41d16fa7c9fde68b05c2bfc","src/gen/visit.rs":"23008c170d4dd3975232876a0a654921d9b6af57372cb9fcc133ca740588d666","src/gen/visit_mut.rs":"42886c3ee02ded72d9c3eec006e20431eaee0c6b90ddefc1a36ec7bf50c6a24a","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"d1c175284ca21e777ef0414c28383929b170ccb00aaf7a929eb18d3b05e18da8","src/group.rs":"119b62d8481b4b1c327639bed40e114bf1969765250b68186628247fd4144b3b","src/ident.rs":"503156ce51a7ef0420892e8dbf2ecf8fe51f42a84d52cc2c05654e1a83020cbf","src/item.rs":"c9ad9881e8cda8ee3f157f0c7602fc53d08a7e3288b9afc388c393689eac5aea","src/lib.rs":"558ad13779233b27bebc4b2fc8025eb1c7e57b32130dc1dd911391e27b427500","src/lifetime.rs":"f390fe06692fc51fbf3eb490bb9f795da70e4452f51c5b0df3bbaa899084ddf1","src/lit.rs":"9fab84e38756b092fbb055dcdf01e31d42d916c49e3eaae8c9019043b0ee4301","src/lookahead.rs":"5cce8b4cb345a85c24a452ea6d78eadb76f01ca0a789cbf5ce35108334904173","src/mac.rs":"e5cecea397fd01a44958162781d8d94343fe2a1b9b9754a5666c3d2ab4d7ef64","src/macros.rs":"2ce05b553f14da4ee550bb681cb0733b7186ad94719cd36f96d53e15fd02cf2b","src/op.rs":"449514e146deab0ab020bc6f764544c294dbc780941c9802bf60cf1b2839d550","src/parse.rs":"bde888c98ee259f2a73489a693515ed4875432b0d79486ac83aea19f441992a3","src/parse_macro_input.rs":"653a020f023cac0eccbc1fcc34aa7bf80567b43e5475deab4ad3e487a5363201","src/parse_quote.rs":"642f21e5fa54df4b7c373fb158289ee1005d49e1a49b1d194df5438faee71c46","src/pat.rs":"1473b258162cc822f1ee0c0869f521053ed345a140c39ed83b9b4dfb6f9f2aca","src/path.rs":"f119f0c2af12fabd360eac9a2312e0f6e6c28c633c9671bde6ef0bece7c5ba3c","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"212f5a601d6c2eb8b8fa679be1167b455b595bee964d2775b0101ebb16c3eaa5","src/reserved.rs":"3625eb2a64589a4992ab79a1674e9679f465bea613ab139a671df5337e88cee6","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"7d77714d585e6f42397091ffb3a799fd7b20c05c5442c737683c429ea7d409a5","src/stmt.rs":"3917fbc897f80efe838267833c55650ff8d636cb49a6d1084e28eff65d0e3ccd","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"a1ca6298bf6592cb80cbab1db4eac2fa4e3fa56729bb807bfb0f08ab0f229ca5","src/tt.rs":"1cc9e200624288322f800f32e3d6e2e53da946467bb312dd40a52c02cdcc4730","src/ty.rs":"cb167cbb16240c59a31b44adec175172caaf75ffef9a0bb168584b51bf105795","src/verbatim.rs":"802a97df997432f18cac6e6200ff6ea29fb2474986005e0fcdbc2b65197f87f7","src/whitespace.rs":"e63dd0aa3d34029f17766a8b09c1a6e4479e36c552c8b7023d710a399333aace","tests/.gitignore":"22e782449a3c216db3f7215d5fb8882e316768e40beeec3833aae419ad8941db","tests/common/eq.rs":"4b190a3833bdfd20a4cb1e3dff25a698751dec71d6f30249cf09426e061a4fb1","tests/common/mod.rs":"25ef6d7daa09bad3198a0e9e91b2812425f92db7c585c1e34a03a84d7362ccd8","tests/common/parse.rs":"8b7ba32f4988c30758c108536c4877dc5a039a237bf9b0687220ef2295797bbd","tests/debug/gen.rs":"d6e2abf2a7bb58a7895a60c2f094a98a4f85c9189d02011d0dcef6ef053f26e3","tests/debug/mod.rs":"868763d0ef1609a3ad5e05e9f1bfa0f813e91e7e9a36653414a188bb2fdaa425","tests/macros/mod.rs":"c0eafa4e3845fc08f6efe6021bac37822c0ac325eb7b51194a5f35236f648d92","tests/repo/mod.rs":"9e316b88d57ae213e81950c35e45443078ec90e702798353bc3528cb8a2810b6","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/test_asyncness.rs":"cff01db49d28ab23b0b258bc6c0a5cc4071be4fe7248eef344a5d79d2fb649b7","tests/test_attribute.rs":"0ffd99384e1a52ae17d9fed5c4053e411e8f9018decef07ffa621d1faa7329d8","tests/test_derive_input.rs":"610444351e3bf99366976bbf1da109c334a70ac9500caef366bcf9b68819829f","tests/test_expr.rs":"0ee83f6f6de950018c043efcc3e85776b4227dae3068309998a8d9709f2fc66c","tests/test_generics.rs":"9d713f90a79d6145efc89fb6f946029ca03486c632219950889da39940152ba0","tests/test_grouping.rs":"46c27baec4daaaf1e891892f0b0515ea8a44619071c7d0cc9192580916f1569f","tests/test_ident.rs":"9eb53d1e21edf23e7c9e14dc74dcc2b2538e9221e19dbcc0a44e3acc2e90f3f6","tests/test_item.rs":"461ed0c8648afffcea3217f52c9a88298182b4d39d73a11803b1281d99c98c25","tests/test_iterators.rs":"53ed6078d37550bd6765d2411e3660be401aef8a31a407350cc064a7d08c7c33","tests/test_lit.rs":"2a46c5f2f2ad1dcbb7e9b0cd11b55861c5ff818c2c4c51351d07e2daa7c74674","tests/test_meta.rs":"1fc98af3279cadc3d8db3c7e8d4d7f9e9dbd4d17548cf6a2f6f4536ed65367f6","tests/test_parse_buffer.rs":"8bbe2d24ca8a3788f72c6908fc96c26d546f11c69687bf8d72727f851d5e2d27","tests/test_parse_stream.rs":"2f449a2c41a3dee6fd14bee24e1666a453cb808eda17332fd91afd127fcdd2a6","tests/test_pat.rs":"2cb331fe404496d51e7cc7e283ae13c519a2265ca82e1c88e113296f860c2cba","tests/test_path.rs":"fcd5591e639fc787acc9763d828a811c8114525c9341282eefda8f331e082a51","tests/test_precedence.rs":"8d03656741b01e577d7501ce24332d1a4febec3e31a043e47c61062b8c527ed2","tests/test_receiver.rs":"084eca59984b9a18651da52f2c4407355da3de1335916a12477652999e2d01cc","tests/test_round_trip.rs":"ba01bf4ec04cd2d6f9e4800c343563925ae960c5f16752dc0797fda4451b6cc2","tests/test_shebang.rs":"f5772cadad5b56e3112cb16308b779f92bce1c3a48091fc9933deb2276a69331","tests/test_should_parse.rs":"1d3535698a446e2755bfc360676bdb161841a1f454cdef6e7556c6d06a95c89d","tests/test_size.rs":"5fae772bab66809d6708232f35cfb4a287882486763b0f763feec2ad79fbb68b","tests/test_stmt.rs":"17e4355843ee2982b51faba2721a18966f8c2b9422e16b052a123b8ee8b80752","tests/test_token_trees.rs":"43e56a701817e3c3bfd0cae54a457dd7a38ccb3ca19da41e2b995fdf20e6ed18","tests/test_ty.rs":"5b7c0bfc4963d41920dd0b39fdea419e34f00409ba86ad4211d6c3c7e8bbe1c0","tests/test_visibility.rs":"3f958e2b3b5908005e756a80eea326a91eac97cc4ab60599bebde8d4b942d65c","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"}
+\ No newline at end of file
+diff --git a/third_party/rust/syn/Cargo.toml b/third_party/rust/syn/Cargo.toml
+--- third_party/rust/syn/Cargo.toml
++++ third_party/rust/syn/Cargo.toml
+@@ -8,79 +8,90 @@
+ # If you believe there's an error in this file please file an
+ # issue against the rust-lang/cargo repository. If you're
+ # editing this file be aware that the upstream Cargo.toml
+ # will likely look very different (and much more reasonable)
+
+ [package]
+ edition = "2018"
+ name = "syn"
+-version = "1.0.5"
++version = "1.0.40"
+ authors = ["David Tolnay <dtolnay@gmail.com>"]
+ include = ["/benches/**", "/build.rs", "/Cargo.toml", "/LICENSE-APACHE", "/LICENSE-MIT", "/README.md", "/src/**", "/tests/**"]
+ description = "Parser for Rust source code"
+ documentation = "https://docs.rs/syn"
+ readme = "README.md"
+ categories = ["development-tools::procedural-macro-helpers"]
+ license = "MIT OR Apache-2.0"
+ repository = "https://github.com/dtolnay/syn"
+ [package.metadata.docs.rs]
+ all-features = true
++targets = ["x86_64-unknown-linux-gnu"]
+
+ [package.metadata.playground]
+-all-features = true
+-
+-[lib]
+-name = "syn"
++features = ["full", "visit", "visit-mut", "fold", "extra-traits"]
+
+ [[bench]]
+ name = "rust"
+ harness = false
+ required-features = ["full", "parsing"]
+-edition = "2018"
+
+ [[bench]]
+ name = "file"
+ required-features = ["full", "parsing"]
+-edition = "2018"
+ [dependencies.proc-macro2]
+-version = "1.0"
++version = "1.0.13"
+ default-features = false
+
+ [dependencies.quote]
+ version = "1.0"
+ optional = true
+ default-features = false
+
+ [dependencies.unicode-xid]
+ version = "0.2"
++[dev-dependencies.anyhow]
++version = "1.0"
++
++[dev-dependencies.flate2]
++version = "1.0"
++
+ [dev-dependencies.insta]
+-version = "0.9"
++version = "0.16"
+
+ [dev-dependencies.rayon]
+ version = "1.0"
+
+ [dev-dependencies.ref-cast]
+-version = "0.2"
++version = "1.0"
+
+ [dev-dependencies.regex]
+ version = "1.0"
+
++[dev-dependencies.reqwest]
++version = "0.10"
++features = ["blocking"]
++
++[dev-dependencies.syn-test-suite]
++version = "0"
++
++[dev-dependencies.tar]
++version = "0.4"
++
+ [dev-dependencies.termcolor]
+ version = "1.0"
+
+ [dev-dependencies.walkdir]
+ version = "2.1"
+
+ [features]
+ clone-impls = []
+ default = ["derive", "parsing", "printing", "clone-impls", "proc-macro"]
+ derive = []
+ extra-traits = []
+ fold = []
+ full = []
+ parsing = []
+ printing = ["quote"]
+ proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"]
++test = ["syn-test-suite/all-features"]
+ visit = []
+ visit-mut = []
+-[badges.travis-ci]
+-repository = "dtolnay/syn"
+diff --git a/third_party/rust/syn/README.md b/third_party/rust/syn/README.md
+--- third_party/rust/syn/README.md
++++ third_party/rust/syn/README.md
+@@ -1,15 +1,15 @@
+ Parser for Rust source code
+ ===========================
+
+-[![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn)
+-[![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn)
+-[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/syn/1.0/syn/)
+-[![Rustc Version 1.31+](https://img.shields.io/badge/rustc-1.31+-lightgray.svg)](https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html)
++[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/syn-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/syn)
++[<img alt="crates.io" src="https://img.shields.io/crates/v/syn.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/syn)
++[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K" height="20">](https://docs.rs/syn)
++[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/syn/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
+
+ Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
+ of Rust source code.
+
+ Currently this library is geared toward use in Rust procedural macros, but
+ contains some APIs that may be useful more generally.
+
+ - **Data structures** — Syn provides a complete syntax tree that can represent
+@@ -41,20 +41,16 @@ contains some APIs that may be useful mo
+
+ [`syn::File`]: https://docs.rs/syn/1.0/syn/struct.File.html
+ [`syn::Item`]: https://docs.rs/syn/1.0/syn/enum.Item.html
+ [`syn::Expr`]: https://docs.rs/syn/1.0/syn/enum.Expr.html
+ [`syn::Type`]: https://docs.rs/syn/1.0/syn/enum.Type.html
+ [`syn::DeriveInput`]: https://docs.rs/syn/1.0/syn/struct.DeriveInput.html
+ [parser functions]: https://docs.rs/syn/1.0/syn/parse/index.html
+
+-If you get stuck with anything involving procedural macros in Rust I am happy to
+-provide help even if the issue is not related to Syn. Please file a ticket in
+-this repo.
+-
+ *Version requirement: Syn supports rustc 1.31 and up.*
+
+ [*Release notes*](https://github.com/dtolnay/syn/releases)
+
+ <br>
+
+ ## Resources
+
+@@ -83,18 +79,16 @@ tokens back to the compiler to compile i
+ syn = "1.0"
+ quote = "1.0"
+
+ [lib]
+ proc-macro = true
+ ```
+
+ ```rust
+-extern crate proc_macro;
+-
+ use proc_macro::TokenStream;
+ use quote::quote;
+ use syn::{parse_macro_input, DeriveInput};
+
+ #[proc_macro_derive(MyMacro)]
+ pub fn my_macro(input: TokenStream) -> TokenStream {
+ // Parse the input tokens into a syntax tree
+ let input = parse_macro_input!(input as DeriveInput);
+@@ -266,17 +260,17 @@ incompatible ecosystems for proc macros
+
+ In general all of your code should be written against proc-macro2 rather than
+ proc-macro. The one exception is in the signatures of procedural macro entry
+ points, which are required by the language to use `proc_macro::TokenStream`.
+
+ The proc-macro2 crate will automatically detect and use the compiler's data
+ structures when a procedural macro is active.
+
+-[proc-macro2]: https://docs.rs/proc-macro2/1.0.0/proc_macro2/
++[proc-macro2]: https://docs.rs/proc-macro2/1.0/proc_macro2/
+
+ <br>
+
+ #### License
+
+ <sup>
+ Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
+ 2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
+diff --git a/third_party/rust/syn/benches/file.rs b/third_party/rust/syn/benches/file.rs
+--- third_party/rust/syn/benches/file.rs
++++ third_party/rust/syn/benches/file.rs
+@@ -1,14 +1,21 @@
+ // $ cargo bench --features full --bench file
+
+ #![feature(rustc_private, test)]
++#![recursion_limit = "1024"]
+
+ extern crate test;
+
++#[macro_use]
++#[path = "../tests/macros/mod.rs"]
++mod macros;
++
++#[path = "../tests/common/mod.rs"]
++mod common;
+ #[path = "../tests/repo/mod.rs"]
+ pub mod repo;
+
+ use proc_macro2::TokenStream;
+ use std::fs;
+ use std::str::FromStr;
+ use test::Bencher;
+
+diff --git a/third_party/rust/syn/benches/rust.rs b/third_party/rust/syn/benches/rust.rs
+--- third_party/rust/syn/benches/rust.rs
++++ third_party/rust/syn/benches/rust.rs
+@@ -1,15 +1,22 @@
+ // $ cargo bench --features full --bench rust
+ //
+ // Syn only, useful for profiling:
+ // $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full --bench rust
+
+ #![cfg_attr(not(syn_only), feature(rustc_private))]
++#![recursion_limit = "1024"]
+
++#[macro_use]
++#[path = "../tests/macros/mod.rs"]
++mod macros;
++
++#[path = "../tests/common/mod.rs"]
++mod common;
+ #[path = "../tests/repo/mod.rs"]
+ mod repo;
+
+ use std::fs;
+ use std::time::{Duration, Instant};
+
+ #[cfg(not(syn_only))]
+ mod tokenstream_parse {
+@@ -23,41 +30,45 @@ mod tokenstream_parse {
+
+ mod syn_parse {
+ pub fn bench(content: &str) -> Result<(), ()> {
+ syn::parse_file(content).map(drop).map_err(drop)
+ }
+ }
+
+ #[cfg(not(syn_only))]
+-mod libsyntax_parse {
++mod librustc_parse {
+ extern crate rustc_data_structures;
+- extern crate syntax;
+- extern crate syntax_pos;
++ extern crate rustc_errors;
++ extern crate rustc_parse;
++ extern crate rustc_session;
++ extern crate rustc_span;
+
+ use rustc_data_structures::sync::Lrc;
+- use syntax::edition::Edition;
+- use syntax::errors::{emitter::Emitter, DiagnosticBuilder, Handler};
+- use syntax::parse::ParseSess;
+- use syntax::source_map::{FilePathMapping, SourceMap};
+- use syntax_pos::FileName;
++ use rustc_errors::{emitter::Emitter, Diagnostic, Handler};
++ use rustc_session::parse::ParseSess;
++ use rustc_span::source_map::{FilePathMapping, SourceMap};
++ use rustc_span::{edition::Edition, FileName};
+
+ pub fn bench(content: &str) -> Result<(), ()> {
+ struct SilentEmitter;
+
+ impl Emitter for SilentEmitter {
+- fn emit_diagnostic(&mut self, _db: &DiagnosticBuilder) {}
++ fn emit_diagnostic(&mut self, _diag: &Diagnostic) {}
++ fn source_map(&self) -> Option<&Lrc<SourceMap>> {
++ None
++ }
+ }
+
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(Edition::Edition2018, || {
+ let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let emitter = Box::new(SilentEmitter);
+ let handler = Handler::with_emitter(false, None, emitter);
+ let sess = ParseSess::with_span_handler(handler, cm);
+- if let Err(mut diagnostic) = syntax::parse::parse_crate_from_source_str(
++ if let Err(mut diagnostic) = rustc_parse::parse_crate_from_source_str(
+ FileName::Custom("bench".to_owned()),
+ content.to_owned(),
+ &sess,
+ ) {
+ diagnostic.cancel();
+ return Err(());
+ };
+ Ok(())
+@@ -99,21 +110,21 @@ fn exec(mut codepath: impl FnMut(&str) -
+ assert_eq!(success, total);
+ begin.elapsed()
+ }
+
+ fn main() {
+ repo::clone_rust();
+
+ macro_rules! testcases {
+- ($($(#[$cfg:meta])* $name:path,)*) => {
++ ($($(#[$cfg:meta])* $name:ident,)*) => {
+ vec![
+ $(
+ $(#[$cfg])*
+- (stringify!($name), $name as fn(&str) -> Result<(), ()>),
++ (stringify!($name), $name::bench as fn(&str) -> Result<(), ()>),
+ )*
+ ]
+ };
+ }
+
+ #[cfg(not(syn_only))]
+ {
+ let mut lines = 0;
+@@ -123,22 +134,22 @@ fn main() {
+ files += 1;
+ Ok(())
+ });
+ eprintln!("\n{} lines in {} files", lines, files);
+ }
+
+ for (name, f) in testcases!(
+ #[cfg(not(syn_only))]
+- read_from_disk::bench,
++ read_from_disk,
+ #[cfg(not(syn_only))]
+- tokenstream_parse::bench,
+- syn_parse::bench,
++ tokenstream_parse,
++ syn_parse,
+ #[cfg(not(syn_only))]
+- libsyntax_parse::bench,
++ librustc_parse,
+ ) {
+ eprint!("{:20}", format!("{}:", name));
+ let elapsed = exec(f);
+ eprintln!(
+ "elapsed={}.{:03}s",
+ elapsed.as_secs(),
+ elapsed.subsec_millis(),
+ );
+diff --git a/third_party/rust/syn/build.rs b/third_party/rust/syn/build.rs
+--- third_party/rust/syn/build.rs
++++ third_party/rust/syn/build.rs
+@@ -1,11 +1,11 @@
+ use std::env;
+ use std::process::Command;
+-use std::str::{self, FromStr};
++use std::str;
+
+ // The rustc-cfg strings below are *not* public API. Please let us know by
+ // opening a GitHub issue if your build environment requires some way to enable
+ // these cfgs other than by executing our build script.
+ fn main() {
+ let compiler = match rustc_version() {
+ Some(compiler) => compiler,
+ None => return,
+@@ -21,43 +21,19 @@ fn main() {
+ }
+
+ struct Compiler {
+ minor: u32,
+ nightly: bool,
+ }
+
+ fn rustc_version() -> Option<Compiler> {
+- let rustc = match env::var_os("RUSTC") {
+- Some(rustc) => rustc,
+- None => return None,
+- };
+-
+- let output = match Command::new(rustc).arg("--version").output() {
+- Ok(output) => output,
+- Err(_) => return None,
+- };
+-
+- let version = match str::from_utf8(&output.stdout) {
+- Ok(version) => version,
+- Err(_) => return None,
+- };
+-
++ let rustc = env::var_os("RUSTC")?;
++ let output = Command::new(rustc).arg("--version").output().ok()?;
++ let version = str::from_utf8(&output.stdout).ok()?;
+ let mut pieces = version.split('.');
+ if pieces.next() != Some("rustc 1") {
+ return None;
+ }
+-
+- let next = match pieces.next() {
+- Some(next) => next,
+- None => return None,
+- };
+-
+- let minor = match u32::from_str(next) {
+- Ok(minor) => minor,
+- Err(_) => return None,
+- };
+-
+- Some(Compiler {
+- minor: minor,
+- nightly: version.contains("nightly"),
+- })
++ let minor = pieces.next()?.parse().ok()?;
++ let nightly = version.contains("nightly");
++ Some(Compiler { minor, nightly })
+ }
+diff --git a/third_party/rust/syn/src/attr.rs b/third_party/rust/syn/src/attr.rs
+--- third_party/rust/syn/src/attr.rs
++++ third_party/rust/syn/src/attr.rs
+@@ -4,25 +4,21 @@ use crate::punctuated::Punctuated;
+ use std::iter;
+
+ use proc_macro2::TokenStream;
+
+ #[cfg(feature = "parsing")]
+ use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result};
+ #[cfg(feature = "parsing")]
+ use crate::punctuated::Pair;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// An attribute like `#[repr(transparent)]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// <br>
+ ///
+ /// # Syntax
+ ///
+ /// Rust has six types of attributes.
+ ///
+@@ -106,58 +102,69 @@ ast_struct! {
+ /// If the attribute you are parsing is expected to conform to the
+ /// conventional structured form of attribute, use [`parse_meta()`] to
+ /// obtain that structured representation. If the attribute follows some
+ /// other grammar of its own, use [`parse_args()`] to parse that into the
+ /// expected data structure.
+ ///
+ /// [`parse_meta()`]: Attribute::parse_meta
+ /// [`parse_args()`]: Attribute::parse_args
+- pub struct Attribute #manual_extra_traits {
++ ///
++ /// <p><br></p>
++ ///
++ /// # Doc comments
++ ///
++ /// The compiler transforms doc comments, such as `/// comment` and `/*!
++ /// comment */`, into attributes before macros are expanded. Each comment is
++ /// expanded into an attribute of the form `#[doc = r"comment"]`.
++ ///
++ /// As an example, the following `mod` items are expanded identically:
++ ///
++ /// ```
++ /// # use syn::{ItemMod, parse_quote};
++ /// let doc: ItemMod = parse_quote! {
++ /// /// Single line doc comments
++ /// /// We write so many!
++ /// /**
++ /// * Multi-line comments...
++ /// * May span many lines
++ /// */
++ /// mod example {
++ /// //! Of course, they can be inner too
++ /// /*! And fit in a single line */
++ /// }
++ /// };
++ /// let attr: ItemMod = parse_quote! {
++ /// #[doc = r" Single line doc comments"]
++ /// #[doc = r" We write so many!"]
++ /// #[doc = r"
++ /// * Multi-line comments...
++ /// * May span many lines
++ /// "]
++ /// mod example {
++ /// #![doc = r" Of course, they can be inner too"]
++ /// #![doc = r" And fit in a single line "]
++ /// }
++ /// };
++ /// assert_eq!(doc, attr);
++ /// ```
++ pub struct Attribute {
+ pub pound_token: Token![#],
+ pub style: AttrStyle,
+ pub bracket_token: token::Bracket,
+ pub path: Path,
+ pub tokens: TokenStream,
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Attribute {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Attribute {
+- fn eq(&self, other: &Self) -> bool {
+- self.style == other.style
+- && self.pound_token == other.pound_token
+- && self.bracket_token == other.bracket_token
+- && self.path == other.path
+- && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Attribute {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.style.hash(state);
+- self.pound_token.hash(state);
+- self.bracket_token.hash(state);
+- self.path.hash(state);
+- TokenStreamHelper(&self.tokens).hash(state);
+- }
+-}
+-
+ impl Attribute {
+ /// Parses the content of the attribute, consisting of the path and tokens,
+ /// as a [`Meta`] if possible.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_meta(&self) -> Result<Meta> {
+ fn clone_ident_segment(segment: &PathSegment) -> PathSegment {
+ PathSegment {
+ ident: segment.ident.clone(),
+ arguments: PathArguments::None,
+ }
+@@ -194,91 +201,95 @@ impl Attribute {
+ /// parser; and
+ /// - the error message has a more useful span when `tokens` is empty.
+ ///
+ /// ```text
+ /// #[my_attr(value < 5)]
+ /// ^^^^^^^^^ what gets parsed
+ /// ```
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_args<T: Parse>(&self) -> Result<T> {
+ self.parse_args_with(T::parse)
+ }
+
+ /// Parse the arguments to the attribute using the given parser.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+ let parser = |input: ParseStream| {
+ let args = enter_args(self, input)?;
+ parse::parse_stream(parser, &args)
+ };
+ parser.parse2(self.tokens.clone())
+ }
+
+ /// Parses zero or more outer attributes from the stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
+ let mut attrs = Vec::new();
+ while input.peek(Token![#]) {
+ attrs.push(input.call(parsing::single_parse_outer)?);
+ }
+ Ok(attrs)
+ }
+
+ /// Parses zero or more inner attributes from the stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
+ let mut attrs = Vec::new();
+ while input.peek(Token![#]) && input.peek2(Token![!]) {
+ attrs.push(input.call(parsing::single_parse_inner)?);
+ }
+ Ok(attrs)
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+-fn error_expected_args(attr: &Attribute) -> Error {
++fn expected_parentheses(attr: &Attribute) -> String {
+ let style = match attr.style {
+ AttrStyle::Outer => "#",
+ AttrStyle::Inner(_) => "#!",
+ };
+
+ let mut path = String::new();
+ for segment in &attr.path.segments {
+ if !path.is_empty() || attr.path.leading_colon.is_some() {
+ path += "::";
+ }
+ path += &segment.ident.to_string();
+ }
+
+- let msg = format!("expected attribute arguments: {}[{}(...)]", style, path);
+-
+- #[cfg(feature = "printing")]
+- return Error::new_spanned(attr, msg);
+-
+- #[cfg(not(feature = "printing"))]
+- return Error::new(attr.bracket_token.span, msg);
++ format!("{}[{}(...)]", style, path)
+ }
+
+ #[cfg(feature = "parsing")]
+ fn enter_args<'a>(attr: &Attribute, input: ParseStream<'a>) -> Result<ParseBuffer<'a>> {
+ if input.is_empty() {
+- return Err(error_expected_args(attr));
++ let expected = expected_parentheses(attr);
++ let msg = format!("expected attribute arguments in parentheses: {}", expected);
++ return Err(crate::error::new2(
++ attr.pound_token.span,
++ attr.bracket_token.span,
++ msg,
++ ));
++ } else if input.peek(Token![=]) {
++ let expected = expected_parentheses(attr);
++ let msg = format!("expected parentheses: {}", expected);
++ return Err(input.error(msg));
+ };
+
+ let content;
+ if input.peek(token::Paren) {
+ parenthesized!(content in input);
+ } else if input.peek(token::Bracket) {
+ bracketed!(content in input);
+ } else if input.peek(token::Brace) {
+@@ -293,41 +304,40 @@ fn enter_args<'a>(attr: &Attribute, inpu
+ Err(input.error("unexpected token in attribute arguments"))
+ }
+ }
+
+ ast_enum! {
+ /// Distinguishes between attributes that decorate an item and attributes
+ /// that are contained within an item.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Outer attributes
+ ///
+ /// - `#[repr(transparent)]`
+ /// - `/// # Example`
+ /// - `/** Please file an issue */`
+ ///
+ /// # Inner attributes
+ ///
+ /// - `#![feature(proc_macro)]`
+ /// - `//! # Example`
+ /// - `/*! Please file an issue */`
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum AttrStyle {
+ Outer,
+ Inner(Token![!]),
+ }
+ }
+
+ ast_enum_of_structs! {
+ /// Content of a compile-time structured attribute.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// ## Path
+ ///
+ /// A meta path is like the `test` in `#[test]`.
+ ///
+ /// ## List
+ ///
+@@ -355,29 +365,29 @@ ast_enum_of_structs! {
+ /// A name-value pair within an attribute, like `feature = "nightly"`.
+ NameValue(MetaNameValue),
+ }
+ }
+
+ ast_struct! {
+ /// A structured list within an attribute, like `derive(Copy, Clone)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct MetaList {
+ pub path: Path,
+ pub paren_token: token::Paren,
+ pub nested: Punctuated<NestedMeta, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// A name-value pair within an attribute, like `feature = "nightly"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct MetaNameValue {
+ pub path: Path,
+ pub eq_token: Token![=],
+ pub lit: Lit,
+ }
+ }
+
+@@ -393,17 +403,17 @@ impl Meta {
+ Meta::NameValue(meta) => &meta.path,
+ }
+ }
+ }
+
+ ast_enum_of_structs! {
+ /// Element of a compile-time attribute list.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum NestedMeta {
+ /// A structured meta item, like the `Copy` in `#[derive(Copy)]` which
+ /// would be a nested `Meta::Path`.
+ Meta(Meta),
+
+ /// A Rust literal, like the `"new_name"` in `#[rename("new_name")]`.
+ Lit(Lit),
+@@ -424,18 +434,18 @@ ast_enum_of_structs! {
+ /// /* ... */
+ /// }
+ /// ```
+ ///
+ /// The implementation of this macro would want to parse its attribute arguments
+ /// as type `AttributeArgs`.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, AttributeArgs, ItemFn};
+ ///
+ /// # const IGNORE: &str = stringify! {
+ /// #[proc_macro_attribute]
+ /// # };
+ /// pub fn my_attribute(args: TokenStream, input: TokenStream) -> TokenStream {
+ /// let args = parse_macro_input!(args as AttributeArgs);
+@@ -459,27 +469,27 @@ where
+ T: IntoIterator<Item = &'a Attribute>,
+ {
+ type Ret = iter::Filter<T::IntoIter, fn(&&Attribute) -> bool>;
+
+ fn outer(self) -> Self::Ret {
+ fn is_outer(attr: &&Attribute) -> bool {
+ match attr.style {
+ AttrStyle::Outer => true,
+- _ => false,
++ AttrStyle::Inner(_) => false,
+ }
+ }
+ self.into_iter().filter(is_outer)
+ }
+
+ fn inner(self) -> Self::Ret {
+ fn is_inner(attr: &&Attribute) -> bool {
+ match attr.style {
+ AttrStyle::Inner(_) => true,
+- _ => false,
++ AttrStyle::Outer => false,
+ }
+ }
+ self.into_iter().filter(is_inner)
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ pub mod parsing {
+diff --git a/third_party/rust/syn/src/buffer.rs b/third_party/rust/syn/src/buffer.rs
+--- third_party/rust/syn/src/buffer.rs
++++ third_party/rust/syn/src/buffer.rs
+@@ -1,12 +1,12 @@
+ //! A stably addressed token buffer supporting efficient traversal based on a
+ //! cheaply copyable cursor.
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ // This module is heavily commented as it contains most of the unsafe code in
+ // Syn, and caution should be used when editing it. The public-facing interface
+ // is 100% safe but the implementation is fragile internally.
+
+ #[cfg(all(
+ not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
+ feature = "proc-macro"
+@@ -31,17 +31,17 @@ enum Entry {
+ // token tree, or null if this is the outermost level.
+ End(*const Entry),
+ }
+
+ /// A buffer that can be efficiently traversed multiple times, unlike
+ /// `TokenStream` which requires a deep copy in order to traverse more than
+ /// once.
+ ///
+-/// *This type is available if Syn is built with the `"parsing"` feature.*
++/// *This type is available only if Syn is built with the `"parsing"` feature.*
+ pub struct TokenBuffer {
+ // NOTE: Do not derive clone on this - there are raw pointers inside which
+ // will be messed up. Moving the `TokenBuffer` itself is safe as the actual
+ // backing slices won't be moved.
+ data: Box<[Entry]>,
+ }
+
+ impl TokenBuffer {
+@@ -93,17 +93,17 @@ impl TokenBuffer {
+ }
+
+ TokenBuffer { data: entries }
+ }
+
+ /// Creates a `TokenBuffer` containing all the tokens from the input
+ /// `TokenStream`.
+ ///
+- /// *This method is available if Syn is built with both the `"parsing"` and
++ /// *This method is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ #[cfg(all(
+ not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
+ feature = "proc-macro"
+ ))]
+ pub fn new(stream: pm::TokenStream) -> TokenBuffer {
+ Self::new2(stream.into())
+ }
+@@ -128,18 +128,17 @@ impl TokenBuffer {
+ /// and copied around.
+ ///
+ /// An empty `Cursor` can be created directly, or one may create a `TokenBuffer`
+ /// object and get a cursor to its first token with `begin()`.
+ ///
+ /// Two cursors are equal if they have the same location in the same input
+ /// stream, and have the same scope.
+ ///
+-/// *This type is available if Syn is built with the `"parsing"` feature.*
+-#[derive(Copy, Clone, Eq, PartialEq)]
++/// *This type is available only if Syn is built with the `"parsing"` feature.*
+ pub struct Cursor<'a> {
+ // The current entry which the `Cursor` is pointing at.
+ ptr: *const Entry,
+ // This is the only `Entry::End(..)` object which this cursor is allowed to
+ // point at. All other `End` objects are skipped over in `Cursor::create`.
+ scope: *const Entry,
+ // Cursor is covariant in 'a. This field ensures that our pointers are still
+ // valid.
+@@ -196,37 +195,38 @@ impl<'a> Cursor<'a> {
+
+ /// Bump the cursor to point at the next token after the current one. This
+ /// is undefined behavior if the cursor is currently looking at an
+ /// `Entry::End`.
+ unsafe fn bump(self) -> Cursor<'a> {
+ Cursor::create(self.ptr.offset(1), self.scope)
+ }
+
+- /// If the cursor is looking at a `None`-delimited group, move it to look at
+- /// the first token inside instead. If the group is empty, this will move
++ /// While the cursor is looking at a `None`-delimited group, move it to look
++ /// at the first token inside instead. If the group is empty, this will move
+ /// the cursor past the `None`-delimited group.
+ ///
+ /// WARNING: This mutates its argument.
+ fn ignore_none(&mut self) {
+- if let Entry::Group(group, buf) = self.entry() {
++ while let Entry::Group(group, buf) = self.entry() {
+ if group.delimiter() == Delimiter::None {
+ // NOTE: We call `Cursor::create` here to make sure that
+ // situations where we should immediately exit the span after
+ // entering it are handled correctly.
+ unsafe {
+ *self = Cursor::create(&buf.data[0], self.scope);
+ }
++ } else {
++ break;
+ }
+ }
+ }
+
+ /// Checks whether the cursor is currently pointing at the end of its valid
+ /// scope.
+- #[inline]
+ pub fn eof(self) -> bool {
+ // We're at eof if we're at the end of our scope.
+ self.ptr == self.scope
+ }
+
+ /// If the cursor is pointing at a `Group` with the given delimiter, returns
+ /// a cursor into that group and one pointing to the next `TokenTree`.
+ pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, Span, Cursor<'a>)> {
+@@ -337,16 +337,54 @@ impl<'a> Cursor<'a> {
+ match self.entry() {
+ Entry::Group(group, _) => group.span(),
+ Entry::Literal(l) => l.span(),
+ Entry::Ident(t) => t.span(),
+ Entry::Punct(o) => o.span(),
+ Entry::End(..) => Span::call_site(),
+ }
+ }
++
++ /// Skip over the next token without cloning it. Returns `None` if this
++ /// cursor points to eof.
++ ///
++ /// This method treats `'lifetimes` as a single token.
++ pub(crate) fn skip(self) -> Option<Cursor<'a>> {
++ match self.entry() {
++ Entry::End(..) => None,
++
++ // Treat lifetimes as a single tt for the purposes of 'skip'.
++ Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
++ let next = unsafe { self.bump() };
++ match next.entry() {
++ Entry::Ident(_) => Some(unsafe { next.bump() }),
++ _ => Some(next),
++ }
++ }
++ _ => Some(unsafe { self.bump() }),
++ }
++ }
++}
++
++impl<'a> Copy for Cursor<'a> {}
++
++impl<'a> Clone for Cursor<'a> {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++
++impl<'a> Eq for Cursor<'a> {}
++
++impl<'a> PartialEq for Cursor<'a> {
++ fn eq(&self, other: &Self) -> bool {
++ let Cursor { ptr, scope, marker } = self;
++ let _ = marker;
++ *ptr == other.ptr && *scope == other.scope
++ }
+ }
+
+ pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool {
+ a.scope == b.scope
+ }
+
+ pub(crate) fn open_span_of_group(cursor: Cursor) -> Span {
+ match cursor.entry() {
+diff --git a/third_party/rust/syn/src/custom_keyword.rs b/third_party/rust/syn/src/custom_keyword.rs
+--- third_party/rust/syn/src/custom_keyword.rs
++++ third_party/rust/syn/src/custom_keyword.rs
+@@ -81,46 +81,46 @@
+ /// value: input.parse()?,
+ /// })
+ /// } else {
+ /// Err(lookahead.error())
+ /// }
+ /// }
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_keyword {
+ ($ident:ident) => {
+ #[allow(non_camel_case_types)]
+ pub struct $ident {
+ pub span: $crate::export::Span,
+ }
+
+ #[doc(hidden)]
+- #[allow(non_snake_case)]
++ #[allow(dead_code, non_snake_case)]
+ pub fn $ident<__S: $crate::export::IntoSpans<[$crate::export::Span; 1]>>(
+ span: __S,
+ ) -> $ident {
+ $ident {
+ span: $crate::export::IntoSpans::into_spans(span)[0],
+ }
+ }
+
+ impl $crate::export::Default for $ident {
+ fn default() -> Self {
+ $ident {
+ span: $crate::export::Span::call_site(),
+ }
+ }
+ }
+
+- impl_parse_for_custom_keyword!($ident);
+- impl_to_tokens_for_custom_keyword!($ident);
+- impl_clone_for_custom_keyword!($ident);
+- impl_extra_traits_for_custom_keyword!($ident);
++ $crate::impl_parse_for_custom_keyword!($ident);
++ $crate::impl_to_tokens_for_custom_keyword!($ident);
++ $crate::impl_clone_for_custom_keyword!($ident);
++ $crate::impl_extra_traits_for_custom_keyword!($ident);
+ };
+ }
+
+ // Not public API.
+ #[cfg(feature = "parsing")]
+ #[doc(hidden)]
+ #[macro_export]
+ macro_rules! impl_parse_for_custom_keyword {
+diff --git a/third_party/rust/syn/src/custom_punctuation.rs b/third_party/rust/syn/src/custom_punctuation.rs
+--- third_party/rust/syn/src/custom_punctuation.rs
++++ third_party/rust/syn/src/custom_punctuation.rs
+@@ -69,67 +69,67 @@
+ /// Ok(tokens)
+ /// }
+ ///
+ /// fn main() {
+ /// let input = r#" a::b </> c::d::e "#;
+ /// let _: PathSegments = syn::parse_str(input).unwrap();
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ pub struct $ident {
+- pub spans: custom_punctuation_repr!($($tt)+),
++ pub spans: $crate::custom_punctuation_repr!($($tt)+),
+ }
+
+ #[doc(hidden)]
+- #[allow(non_snake_case)]
+- pub fn $ident<__S: $crate::export::IntoSpans<custom_punctuation_repr!($($tt)+)>>(
++ #[allow(dead_code, non_snake_case)]
++ pub fn $ident<__S: $crate::export::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>(
+ spans: __S,
+ ) -> $ident {
+- let _validate_len = 0 $(+ custom_punctuation_len!(strict, $tt))*;
++ let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*;
+ $ident {
+ spans: $crate::export::IntoSpans::into_spans(spans)
+ }
+ }
+
+ impl $crate::export::Default for $ident {
+ fn default() -> Self {
+ $ident($crate::export::Span::call_site())
+ }
+ }
+
+- impl_parse_for_custom_punctuation!($ident, $($tt)+);
+- impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
+- impl_clone_for_custom_punctuation!($ident, $($tt)+);
+- impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_parse_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_clone_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
+ };
+ }
+
+ // Not public API.
+ #[cfg(feature = "parsing")]
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! impl_parse_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::token::CustomToken for $ident {
+ fn peek(cursor: $crate::buffer::Cursor) -> bool {
+- $crate::token::parsing::peek_punct(cursor, stringify_punct!($($tt)+))
++ $crate::token::parsing::peek_punct(cursor, $crate::stringify_punct!($($tt)+))
+ }
+
+ fn display() -> &'static $crate::export::str {
+- custom_punctuation_concat!("`", stringify_punct!($($tt)+), "`")
++ concat!("`", $crate::stringify_punct!($($tt)+), "`")
+ }
+ }
+
+ impl $crate::parse::Parse for $ident {
+ fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
+- let spans: custom_punctuation_repr!($($tt)+) =
+- $crate::token::parsing::punct(input, stringify_punct!($($tt)+))?;
++ let spans: $crate::custom_punctuation_repr!($($tt)+) =
++ $crate::token::parsing::punct(input, $crate::stringify_punct!($($tt)+))?;
+ Ok($ident(spans))
+ }
+ }
+ };
+ }
+
+ // Not public API.
+ #[cfg(not(feature = "parsing"))]
+@@ -137,22 +137,22 @@ macro_rules! impl_parse_for_custom_punct
+ #[macro_export]
+ macro_rules! impl_parse_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {};
+ }
+
+ // Not public API.
+ #[cfg(feature = "printing")]
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! impl_to_tokens_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::export::ToTokens for $ident {
+ fn to_tokens(&self, tokens: &mut $crate::export::TokenStream2) {
+- $crate::token::printing::punct(stringify_punct!($($tt)+), &self.spans, tokens)
++ $crate::token::printing::punct($crate::stringify_punct!($($tt)+), &self.spans, tokens)
+ }
+ }
+ };
+ }
+
+ // Not public API.
+ #[cfg(not(feature = "printing"))]
+ #[doc(hidden)]
+@@ -216,26 +216,26 @@ macro_rules! impl_extra_traits_for_custo
+ #[doc(hidden)]
+ #[macro_export]
+ macro_rules! impl_extra_traits_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {};
+ }
+
+ // Not public API.
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_punctuation_repr {
+ ($($tt:tt)+) => {
+- [$crate::export::Span; 0 $(+ custom_punctuation_len!(lenient, $tt))+]
++ [$crate::export::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+]
+ };
+ }
+
+ // Not public API.
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ #[rustfmt::skip]
+ macro_rules! custom_punctuation_len {
+ ($mode:ident, +) => { 1 };
+ ($mode:ident, +=) => { 2 };
+ ($mode:ident, &) => { 1 };
+ ($mode:ident, &&) => { 2 };
+ ($mode:ident, &=) => { 2 };
+ ($mode:ident, @) => { 1 };
+@@ -274,17 +274,17 @@ macro_rules! custom_punctuation_len {
+ ($mode:ident, <<=) => { 3 };
+ ($mode:ident, >>) => { 2 };
+ ($mode:ident, >>=) => { 3 };
+ ($mode:ident, *) => { 1 };
+ ($mode:ident, -) => { 1 };
+ ($mode:ident, -=) => { 2 };
+ ($mode:ident, ~) => { 1 };
+ (lenient, $tt:tt) => { 0 };
+- (strict, $tt:tt) => {{ custom_punctuation_unexpected!($tt); 0 }};
++ (strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }};
+ }
+
+ // Not public API.
+ #[doc(hidden)]
+ #[macro_export]
+ macro_rules! custom_punctuation_unexpected {
+ () => {};
+ }
+@@ -292,18 +292,8 @@ macro_rules! custom_punctuation_unexpect
+ // Not public API.
+ #[doc(hidden)]
+ #[macro_export]
+ macro_rules! stringify_punct {
+ ($($tt:tt)+) => {
+ concat!($(stringify!($tt)),+)
+ };
+ }
+-
+-// Not public API.
+-// Without this, local_inner_macros breaks when looking for concat!
+-#[doc(hidden)]
+-#[macro_export]
+-macro_rules! custom_punctuation_concat {
+- ($($tt:tt)*) => {
+- concat!($($tt)*)
+- };
+-}
+diff --git a/third_party/rust/syn/src/data.rs b/third_party/rust/syn/src/data.rs
+--- third_party/rust/syn/src/data.rs
++++ third_party/rust/syn/src/data.rs
+@@ -1,15 +1,15 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+
+ ast_struct! {
+ /// An enum variant.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Variant {
+ /// Attributes tagged on the variant.
+ pub attrs: Vec<Attribute>,
+
+ /// Name of the variant.
+ pub ident: Ident,
+
+@@ -19,17 +19,17 @@ ast_struct! {
+ /// Explicit discriminant: `Variant = 1`
+ pub discriminant: Option<(Token![=], Expr)>,
+ }
+ }
+
+ ast_enum_of_structs! {
+ /// Data stored within an enum variant or struct.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+ //
+@@ -47,28 +47,28 @@ ast_enum_of_structs! {
+ Unit,
+ }
+ }
+
+ ast_struct! {
+ /// Named fields of a struct or struct variant such as `Point { x: f64,
+ /// y: f64 }`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct FieldsNamed {
+ pub brace_token: token::Brace,
+ pub named: Punctuated<Field, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct FieldsUnnamed {
+ pub paren_token: token::Paren,
+ pub unnamed: Punctuated<Field, Token![,]>,
+ }
+ }
+
+ impl Fields {
+@@ -88,16 +88,34 @@ impl Fields {
+ /// struct or variant's fields uniformly.
+ pub fn iter_mut(&mut self) -> punctuated::IterMut<Field> {
+ match self {
+ Fields::Unit => crate::punctuated::empty_punctuated_iter_mut(),
+ Fields::Named(f) => f.named.iter_mut(),
+ Fields::Unnamed(f) => f.unnamed.iter_mut(),
+ }
+ }
++
++ /// Returns the number of fields.
++ pub fn len(&self) -> usize {
++ match self {
++ Fields::Unit => 0,
++ Fields::Named(f) => f.named.len(),
++ Fields::Unnamed(f) => f.unnamed.len(),
++ }
++ }
++
++ /// Returns `true` if there are zero fields.
++ pub fn is_empty(&self) -> bool {
++ match self {
++ Fields::Unit => true,
++ Fields::Named(f) => f.named.is_empty(),
++ Fields::Unnamed(f) => f.unnamed.is_empty(),
++ }
++ }
+ }
+
+ impl IntoIterator for Fields {
+ type Item = Field;
+ type IntoIter = punctuated::IntoIter<Field>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ match self {
+@@ -124,17 +142,17 @@ impl<'a> IntoIterator for &'a mut Fields
+ fn into_iter(self) -> Self::IntoIter {
+ self.iter_mut()
+ }
+ }
+
+ ast_struct! {
+ /// A field of a struct or enum variant.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Field {
+ /// Attributes tagged on the field.
+ pub attrs: Vec<Attribute>,
+
+ /// Visibility of the field.
+ pub vis: Visibility,
+
+@@ -149,17 +167,17 @@ ast_struct! {
+ pub ty: Type,
+ }
+ }
+
+ ast_enum_of_structs! {
+ /// The visibility level of an item: inherited or `pub` or
+ /// `pub(restricted)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+ //
+@@ -179,58 +197,61 @@ ast_enum_of_structs! {
+ /// An inherited visibility, which usually means private.
+ Inherited,
+ }
+ }
+
+ ast_struct! {
+ /// A public visibility level: `pub`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisPublic {
+ pub pub_token: Token![pub],
+ }
+ }
+
+ ast_struct! {
+ /// A crate-level visibility: `crate`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisCrate {
+ pub crate_token: Token![crate],
+ }
+ }
+
+ ast_struct! {
+ /// A visibility level restricted to some path: `pub(self)` or
+ /// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisRestricted {
+ pub pub_token: Token![pub],
+ pub paren_token: token::Paren,
+ pub in_token: Option<Token![in]>,
+ pub path: Box<Path>,
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ pub mod parsing {
+ use super::*;
+
+ use crate::ext::IdentExt;
++ use crate::parse::discouraged::Speculative;
+ use crate::parse::{Parse, ParseStream, Result};
+
+ impl Parse for Variant {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
++ let _visibility: Visibility = input.parse()?;
+ Ok(Variant {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs,
+ ident: input.parse()?,
+ fields: {
+ if input.peek(token::Brace) {
+ Fields::Named(input.parse()?)
+ } else if input.peek(token::Paren) {
+ Fields::Unnamed(input.parse()?)
+ } else {
+ Fields::Unit
+@@ -290,68 +311,99 @@ pub mod parsing {
+ colon_token: None,
+ ty: input.parse()?,
+ })
+ }
+ }
+
+ impl Parse for Visibility {
+ fn parse(input: ParseStream) -> Result<Self> {
++ // Recognize an empty None-delimited group, as produced by a $:vis
++ // matcher that matched no tokens.
++ if input.peek(token::Group) {
++ let ahead = input.fork();
++ let group = crate::group::parse_group(&ahead)?;
++ if group.content.is_empty() {
++ input.advance_to(&ahead);
++ return Ok(Visibility::Inherited);
++ }
++ }
++
+ if input.peek(Token![pub]) {
+ Self::parse_pub(input)
+ } else if input.peek(Token![crate]) {
+ Self::parse_crate(input)
+ } else {
+ Ok(Visibility::Inherited)
+ }
+ }
+ }
+
+ impl Visibility {
+ fn parse_pub(input: ParseStream) -> Result<Self> {
+ let pub_token = input.parse::<Token![pub]>()?;
+
+ if input.peek(token::Paren) {
+- // TODO: optimize using advance_to
+ let ahead = input.fork();
+- let mut content;
+- parenthesized!(content in ahead);
+
++ let content;
++ let paren_token = parenthesized!(content in ahead);
+ if content.peek(Token![crate])
+ || content.peek(Token![self])
+ || content.peek(Token![super])
+ {
++ let path = content.call(Ident::parse_any)?;
++
++ // Ensure there are no additional tokens within `content`.
++ // Without explicitly checking, we may misinterpret a tuple
++ // field as a restricted visibility, causing a parse error.
++ // e.g. `pub (crate::A, crate::B)` (Issue #720).
++ if content.is_empty() {
++ input.advance_to(&ahead);
++ return Ok(Visibility::Restricted(VisRestricted {
++ pub_token,
++ paren_token,
++ in_token: None,
++ path: Box::new(Path::from(path)),
++ }));
++ }
++ } else if content.peek(Token![in]) {
++ let in_token: Token![in] = content.parse()?;
++ let path = content.call(Path::parse_mod_style)?;
++
++ input.advance_to(&ahead);
+ return Ok(Visibility::Restricted(VisRestricted {
+ pub_token,
+- paren_token: parenthesized!(content in input),
+- in_token: None,
+- path: Box::new(Path::from(content.call(Ident::parse_any)?)),
+- }));
+- } else if content.peek(Token![in]) {
+- return Ok(Visibility::Restricted(VisRestricted {
+- pub_token,
+- paren_token: parenthesized!(content in input),
+- in_token: Some(content.parse()?),
+- path: Box::new(content.call(Path::parse_mod_style)?),
++ paren_token,
++ in_token: Some(in_token),
++ path: Box::new(path),
+ }));
+ }
+ }
+
+ Ok(Visibility::Public(VisPublic { pub_token }))
+ }
+
+ fn parse_crate(input: ParseStream) -> Result<Self> {
+ if input.peek2(Token![::]) {
+ Ok(Visibility::Inherited)
+ } else {
+ Ok(Visibility::Crate(VisCrate {
+ crate_token: input.parse()?,
+ }))
+ }
+ }
++
++ #[cfg(feature = "full")]
++ pub(crate) fn is_some(&self) -> bool {
++ match self {
++ Visibility::Inherited => false,
++ _ => true,
++ }
++ }
+ }
+ }
+
+ #[cfg(feature = "printing")]
+ mod printing {
+ use super::*;
+
+ use proc_macro2::TokenStream;
+diff --git a/third_party/rust/syn/src/derive.rs b/third_party/rust/syn/src/derive.rs
+--- third_party/rust/syn/src/derive.rs
++++ third_party/rust/syn/src/derive.rs
+@@ -1,15 +1,15 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+
+ ast_struct! {
+ /// Data structure sent to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` feature.*
++ /// *This type is available only if Syn is built with the `"derive"` feature.*
+ pub struct DeriveInput {
+ /// Attributes tagged on the whole struct or enum.
+ pub attrs: Vec<Attribute>,
+
+ /// Visibility of the struct or enum.
+ pub vis: Visibility,
+
+ /// Name of the struct or enum.
+@@ -21,17 +21,17 @@ ast_struct! {
+ /// Data within the struct or enum.
+ pub data: Data,
+ }
+ }
+
+ ast_enum_of_structs! {
+ /// The storage of a struct, enum or union data structure.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` feature.*
++ /// *This type is available only if Syn is built with the `"derive"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+@@ -48,41 +48,41 @@ ast_enum_of_structs! {
+ }
+
+ do_not_generate_to_tokens
+ }
+
+ ast_struct! {
+ /// A struct input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataStruct {
+ pub struct_token: Token![struct],
+ pub fields: Fields,
+ pub semi_token: Option<Token![;]>,
+ }
+ }
+
+ ast_struct! {
+ /// An enum input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataEnum {
+ pub enum_token: Token![enum],
+ pub brace_token: token::Brace,
+ pub variants: Punctuated<Variant, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// An untagged union input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataUnion {
+ pub union_token: Token![union],
+ pub fields: FieldsNamed,
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+diff --git a/third_party/rust/syn/src/discouraged.rs b/third_party/rust/syn/src/discouraged.rs
+--- third_party/rust/syn/src/discouraged.rs
++++ third_party/rust/syn/src/discouraged.rs
+@@ -11,17 +11,17 @@ pub trait Speculative {
+ /// stream to the fork to "commit" the parsing from the fork to the main
+ /// stream.
+ ///
+ /// If you can avoid doing this, you should, as it limits the ability to
+ /// generate useful errors. That said, it is often the only way to parse
+ /// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem
+ /// is that when the fork fails to parse an `A`, it's impossible to tell
+ /// whether that was because of a syntax error and the user meant to provide
+- /// an `A`, or that the `A`s are finished and its time to start parsing
++ /// an `A`, or that the `A`s are finished and it's time to start parsing
+ /// `B`s. Use with care.
+ ///
+ /// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by
+ /// parsing `B*` and removing the leading members of `A` from the
+ /// repetition, bypassing the need to involve the downsides associated with
+ /// speculative parsing.
+ ///
+ /// [`ParseStream::fork`]: ParseBuffer::fork
+@@ -67,17 +67,16 @@ pub trait Speculative {
+ /// # }
+ ///
+ /// impl Parse for PathSegment {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// if input.peek(Token![super])
+ /// || input.peek(Token![self])
+ /// || input.peek(Token![Self])
+ /// || input.peek(Token![crate])
+- /// || input.peek(Token![extern])
+ /// {
+ /// let ident = input.call(Ident::parse_any)?;
+ /// return Ok(PathSegment::from(ident));
+ /// }
+ ///
+ /// let ident = input.parse()?;
+ /// if input.peek(Token![::]) && input.peek3(Token![<]) {
+ /// return Ok(PathSegment {
+@@ -159,13 +158,37 @@ pub trait Speculative {
+ }
+
+ impl<'a> Speculative for ParseBuffer<'a> {
+ fn advance_to(&self, fork: &Self) {
+ if !crate::buffer::same_scope(self.cursor(), fork.cursor()) {
+ panic!("Fork was not derived from the advancing parse stream");
+ }
+
++ let (self_unexp, self_sp) = inner_unexpected(self);
++ let (fork_unexp, fork_sp) = inner_unexpected(fork);
++ if !Rc::ptr_eq(&self_unexp, &fork_unexp) {
++ match (fork_sp, self_sp) {
++ // Unexpected set on the fork, but not on `self`, copy it over.
++ (Some(span), None) => {
++ self_unexp.set(Unexpected::Some(span));
++ }
++ // Unexpected unset. Use chain to propagate errors from fork.
++ (None, None) => {
++ fork_unexp.set(Unexpected::Chain(self_unexp));
++
++ // Ensure toplevel 'unexpected' tokens from the fork don't
++ // bubble up the chain by replacing the root `unexpected`
++ // pointer, only 'unexpected' tokens from existing group
++ // parsers should bubble.
++ fork.unexpected
++ .set(Some(Rc::new(Cell::new(Unexpected::None))));
++ }
++ // Unexpected has been set on `self`. No changes needed.
++ (_, Some(_)) => {}
++ }
++ }
++
+ // See comment on `cell` in the struct definition.
+ self.cell
+ .set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) })
+ }
+ }
+diff --git a/third_party/rust/syn/src/error.rs b/third_party/rust/syn/src/error.rs
+--- third_party/rust/syn/src/error.rs
++++ third_party/rust/syn/src/error.rs
+@@ -1,9 +1,8 @@
+-use std;
+ use std::fmt::{self, Debug, Display};
+ use std::iter::FromIterator;
+ use std::slice;
+ use std::vec;
+
+ use proc_macro2::{
+ Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
+ };
+@@ -27,18 +26,18 @@ pub type Result<T> = std::result::Result
+ /// message than simply panicking the macro.
+ ///
+ /// [`compile_error!`]: https://doc.rust-lang.org/std/macro.compile_error.html
+ ///
+ /// When parsing macro input, the [`parse_macro_input!`] macro handles the
+ /// conversion to `compile_error!` automatically.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, AttributeArgs, ItemFn};
+ ///
+ /// # const IGNORE: &str = stringify! {
+ /// #[proc_macro_attribute]
+ /// # };
+ /// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream {
+ /// let args = parse_macro_input!(args as AttributeArgs);
+@@ -77,17 +76,16 @@ pub type Result<T> = std::result::Result
+ /// # use proc_macro2::TokenStream;
+ /// # use syn::{DeriveInput, Result};
+ /// #
+ /// # pub fn my_derive(input: DeriveInput) -> Result<TokenStream> {
+ /// # unimplemented!()
+ /// # }
+ /// # }
+ /// ```
+-#[derive(Clone)]
+ pub struct Error {
+ messages: Vec<ErrorMessage>,
+ }
+
+ struct ErrorMessage {
+ // Span is implemented as an index into a thread-local interner to keep the
+ // size small. It is not safe to access from a different thread. We want
+ // errors to be Send and Sync to play nicely with the Failure crate, so pin
+@@ -245,16 +243,27 @@ pub fn new_at<T: Display>(scope: Span, c
+ if cursor.eof() {
+ Error::new(scope, format!("unexpected end of input, {}", message))
+ } else {
+ let span = crate::buffer::open_span_of_group(cursor);
+ Error::new(span, message)
+ }
+ }
+
++#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
++pub fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
++ Error {
++ messages: vec![ErrorMessage {
++ start_span: ThreadBound::new(start),
++ end_span: ThreadBound::new(end),
++ message: message.to_string(),
++ }],
++ }
++}
++
+ impl Debug for Error {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ if self.messages.len() == 1 {
+ formatter
+ .debug_tuple("Error")
+ .field(&self.messages[0])
+ .finish()
+ } else {
+@@ -273,16 +282,24 @@ impl Debug for ErrorMessage {
+ }
+
+ impl Display for Error {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str(&self.messages[0].message)
+ }
+ }
+
++impl Clone for Error {
++ fn clone(&self) -> Self {
++ Error {
++ messages: self.messages.clone(),
++ }
++ }
++}
++
+ impl Clone for ErrorMessage {
+ fn clone(&self) -> Self {
+ let start = self
+ .start_span
+ .get()
+ .cloned()
+ .unwrap_or_else(Span::call_site);
+ let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site);
+@@ -350,8 +367,16 @@ impl<'a> Iterator for Iter<'a> {
+ type Item = Error;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ Some(Error {
+ messages: vec![self.messages.next()?.clone()],
+ })
+ }
+ }
++
++impl Extend<Error> for Error {
++ fn extend<T: IntoIterator<Item = Error>>(&mut self, iter: T) {
++ for err in iter {
++ self.combine(err);
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/expr.rs b/third_party/rust/syn/src/expr.rs
+--- third_party/rust/syn/src/expr.rs
++++ third_party/rust/syn/src/expr.rs
+@@ -1,23 +1,26 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
++#[cfg(feature = "full")]
++use crate::reserved::Reserved;
+ use proc_macro2::{Span, TokenStream};
+-#[cfg(feature = "extra-traits")]
++#[cfg(feature = "printing")]
++use quote::IdentFragment;
++#[cfg(feature = "printing")]
++use std::fmt::{self, Display};
+ use std::hash::{Hash, Hasher};
+-#[cfg(all(feature = "parsing", feature = "full"))]
++#[cfg(feature = "parsing")]
+ use std::mem;
+
+ ast_enum_of_structs! {
+ /// A Rust expression.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
++ /// feature, but most of the variants are not available unless "full" is enabled.*
+ ///
+ /// # Syntax tree enums
+ ///
+ /// This type is a syntax tree enum. In Syn this and other syntax tree enums
+ /// are designed to be traversed using the following rebinding idiom.
+ ///
+ /// ```
+ /// # use syn::Expr;
+@@ -78,17 +81,17 @@ ast_enum_of_structs! {
+ /// if let Expr::Tuple(base) = *discriminant.base {
+ /// # }
+ /// # }
+ /// ```
+ ///
+ /// A sign that you may not be choosing the right variable names is if you
+ /// see names getting repeated in your code, like accessing
+ /// `receiver.receiver` or `pat.pat` or `cond.cond`.
+- pub enum Expr #manual_extra_traits {
++ pub enum Expr {
+ /// A slice literal expression: `[a, b, c, d]`.
+ Array(ExprArray),
+
+ /// An assignment expression: `a = compute()`.
+ Assign(ExprAssign),
+
+ /// A compound assignment expression: `counter += 1`.
+ AssignOp(ExprAssignOp),
+@@ -223,191 +226,191 @@ ast_enum_of_structs! {
+ #[doc(hidden)]
+ __Nonexhaustive,
+ }
+ }
+
+ ast_struct! {
+ /// A slice literal expression: `[a, b, c, d]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprArray #full {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+ pub elems: Punctuated<Expr, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// An assignment expression: `a = compute()`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAssign #full {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+ pub eq_token: Token![=],
+ pub right: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A compound assignment expression: `counter += 1`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAssignOp #full {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+ pub op: BinOp,
+ pub right: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// An async block: `async { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAsync #full {
+ pub attrs: Vec<Attribute>,
+ pub async_token: Token![async],
+ pub capture: Option<Token![move]>,
+ pub block: Block,
+ }
+ }
+
+ ast_struct! {
+ /// An await expression: `fut.await`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAwait #full {
+ pub attrs: Vec<Attribute>,
+ pub base: Box<Expr>,
+ pub dot_token: Token![.],
+ pub await_token: token::Await,
+ }
+ }
+
+ ast_struct! {
+ /// A binary operation: `a + b`, `a * b`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprBinary {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+ pub op: BinOp,
+ pub right: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A blocked scope: `{ ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBlock #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+ pub block: Block,
+ }
+ }
+
+ ast_struct! {
+ /// A box expression: `box f`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBox #full {
+ pub attrs: Vec<Attribute>,
+ pub box_token: Token![box],
+ pub expr: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A `break`, with an optional label to break and an optional
+ /// expression.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBreak #full {
+ pub attrs: Vec<Attribute>,
+ pub break_token: Token![break],
+ pub label: Option<Lifetime>,
+ pub expr: Option<Box<Expr>>,
+ }
+ }
+
+ ast_struct! {
+ /// A function call expression: `invoke(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprCall {
+ pub attrs: Vec<Attribute>,
+ pub func: Box<Expr>,
+ pub paren_token: token::Paren,
+ pub args: Punctuated<Expr, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// A cast expression: `foo as f64`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprCast {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+ pub as_token: Token![as],
+ pub ty: Box<Type>,
+ }
+ }
+
+ ast_struct! {
+ /// A closure expression: `|a, b| a + b`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprClosure #full {
+ pub attrs: Vec<Attribute>,
+ pub asyncness: Option<Token![async]>,
+ pub movability: Option<Token![static]>,
+ pub capture: Option<Token![move]>,
+ pub or1_token: Token![|],
+ pub inputs: Punctuated<Pat, Token![,]>,
+ pub or2_token: Token![|],
+ pub output: ReturnType,
+ pub body: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A `continue`, with an optional label.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprContinue #full {
+ pub attrs: Vec<Attribute>,
+ pub continue_token: Token![continue],
+ pub label: Option<Lifetime>,
+ }
+ }
+
+ ast_struct! {
+ /// Access of a named struct field (`obj.k`) or unnamed tuple struct
+ /// field (`obj.0`).
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprField {
+ pub attrs: Vec<Attribute>,
+ pub base: Box<Expr>,
+ pub dot_token: Token![.],
+ pub member: Member,
+ }
+ }
+
+ ast_struct! {
+ /// A for loop: `for pat in expr { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprForLoop #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+ pub for_token: Token![for],
+ pub pat: Pat,
+ pub in_token: Token![in],
+ pub expr: Box<Expr>,
+ pub body: Block,
+@@ -416,538 +419,312 @@ ast_struct! {
+
+ ast_struct! {
+ /// An expression contained within invisible delimiters.
+ ///
+ /// This variant is important for faithfully representing the precedence
+ /// of expressions and is related to `None`-delimited spans in a
+ /// `TokenStream`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprGroup #full {
+ pub attrs: Vec<Attribute>,
+ pub group_token: token::Group,
+ pub expr: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// An `if` expression with an optional `else` block: `if expr { ... }
+ /// else { ... }`.
+ ///
+ /// The `else` branch expression may only be an `If` or `Block`
+ /// expression, not any of the other types of expression.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprIf #full {
+ pub attrs: Vec<Attribute>,
+ pub if_token: Token![if],
+ pub cond: Box<Expr>,
+ pub then_branch: Block,
+ pub else_branch: Option<(Token![else], Box<Expr>)>,
+ }
+ }
+
+ ast_struct! {
+ /// A square bracketed indexing expression: `vector[2]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprIndex {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+ pub bracket_token: token::Bracket,
+ pub index: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A `let` guard: `let Some(x) = opt`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprLet #full {
+ pub attrs: Vec<Attribute>,
+ pub let_token: Token![let],
+ pub pat: Pat,
+ pub eq_token: Token![=],
+ pub expr: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A literal in place of an expression: `1`, `"foo"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprLit {
+ pub attrs: Vec<Attribute>,
+ pub lit: Lit,
+ }
+ }
+
+ ast_struct! {
+ /// Conditionless loop: `loop { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprLoop #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+ pub loop_token: Token![loop],
+ pub body: Block,
+ }
+ }
+
+ ast_struct! {
+ /// A macro invocation expression: `format!("{}", q)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMacro #full {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+ }
+ }
+
+ ast_struct! {
+ /// A `match` expression: `match n { Some(n) => {}, None => {} }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMatch #full {
+ pub attrs: Vec<Attribute>,
+ pub match_token: Token![match],
+ pub expr: Box<Expr>,
+ pub brace_token: token::Brace,
+ pub arms: Vec<Arm>,
+ }
+ }
+
+ ast_struct! {
+ /// A method call expression: `x.foo::<T>(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMethodCall #full {
+ pub attrs: Vec<Attribute>,
+ pub receiver: Box<Expr>,
+ pub dot_token: Token![.],
+ pub method: Ident,
+ pub turbofish: Option<MethodTurbofish>,
+ pub paren_token: token::Paren,
+ pub args: Punctuated<Expr, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// A parenthesized expression: `(a + b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprParen {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+ pub expr: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A path like `std::mem::replace` possibly containing generic
+ /// parameters and a qualified self-type.
+ ///
+ /// A plain identifier like `x` is a path of length 1.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprPath {
+ pub attrs: Vec<Attribute>,
+ pub qself: Option<QSelf>,
+ pub path: Path,
+ }
+ }
+
+ ast_struct! {
+ /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprRange #full {
+ pub attrs: Vec<Attribute>,
+ pub from: Option<Box<Expr>>,
+ pub limits: RangeLimits,
+ pub to: Option<Box<Expr>>,
+ }
+ }
+
+ ast_struct! {
+ /// A referencing operation: `&a` or `&mut a`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprReference #full {
+ pub attrs: Vec<Attribute>,
+ pub and_token: Token![&],
+ pub raw: Reserved,
+ pub mutability: Option<Token![mut]>,
+ pub expr: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// An array literal constructed from one repeated element: `[0u8; N]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprRepeat #full {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+ pub expr: Box<Expr>,
+ pub semi_token: Token![;],
+ pub len: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A `return`, with an optional value to be returned.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprReturn #full {
+ pub attrs: Vec<Attribute>,
+ pub return_token: Token![return],
+ pub expr: Option<Box<Expr>>,
+ }
+ }
+
+ ast_struct! {
+ /// A struct literal expression: `Point { x: 1, y: 1 }`.
+ ///
+ /// The `rest` provides the value of the remaining fields as in `S { a:
+ /// 1, b: 1, ..rest }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprStruct #full {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+ pub brace_token: token::Brace,
+ pub fields: Punctuated<FieldValue, Token![,]>,
+ pub dot2_token: Option<Token![..]>,
+ pub rest: Option<Box<Expr>>,
+ }
+ }
+
+ ast_struct! {
+ /// A try-expression: `expr?`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTry #full {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+ pub question_token: Token![?],
+ }
+ }
+
+ ast_struct! {
+ /// A try block: `try { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTryBlock #full {
+ pub attrs: Vec<Attribute>,
+ pub try_token: Token![try],
+ pub block: Block,
+ }
+ }
+
+ ast_struct! {
+ /// A tuple expression: `(a, b, c, d)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTuple #full {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+ pub elems: Punctuated<Expr, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// A type ascription expression: `foo: f64`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprType #full {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+ pub colon_token: Token![:],
+ pub ty: Box<Type>,
+ }
+ }
+
+ ast_struct! {
+ /// A unary operation: `!x`, `*x`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprUnary {
+ pub attrs: Vec<Attribute>,
+ pub op: UnOp,
+ pub expr: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// An unsafe block: `unsafe { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprUnsafe #full {
+ pub attrs: Vec<Attribute>,
+ pub unsafe_token: Token![unsafe],
+ pub block: Block,
+ }
+ }
+
+ ast_struct! {
+ /// A while loop: `while expr { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprWhile #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+ pub while_token: Token![while],
+ pub cond: Box<Expr>,
+ pub body: Block,
+ }
+ }
+
+ ast_struct! {
+ /// A yield expression: `yield expr`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprYield #full {
+ pub attrs: Vec<Attribute>,
+ pub yield_token: Token![yield],
+ pub expr: Option<Box<Expr>>,
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Expr {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Expr {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Expr::Array(this), Expr::Array(other)) => this == other,
+- (Expr::Assign(this), Expr::Assign(other)) => this == other,
+- (Expr::AssignOp(this), Expr::AssignOp(other)) => this == other,
+- (Expr::Async(this), Expr::Async(other)) => this == other,
+- (Expr::Await(this), Expr::Await(other)) => this == other,
+- (Expr::Binary(this), Expr::Binary(other)) => this == other,
+- (Expr::Block(this), Expr::Block(other)) => this == other,
+- (Expr::Box(this), Expr::Box(other)) => this == other,
+- (Expr::Break(this), Expr::Break(other)) => this == other,
+- (Expr::Call(this), Expr::Call(other)) => this == other,
+- (Expr::Cast(this), Expr::Cast(other)) => this == other,
+- (Expr::Closure(this), Expr::Closure(other)) => this == other,
+- (Expr::Continue(this), Expr::Continue(other)) => this == other,
+- (Expr::Field(this), Expr::Field(other)) => this == other,
+- (Expr::ForLoop(this), Expr::ForLoop(other)) => this == other,
+- (Expr::Group(this), Expr::Group(other)) => this == other,
+- (Expr::If(this), Expr::If(other)) => this == other,
+- (Expr::Index(this), Expr::Index(other)) => this == other,
+- (Expr::Let(this), Expr::Let(other)) => this == other,
+- (Expr::Lit(this), Expr::Lit(other)) => this == other,
+- (Expr::Loop(this), Expr::Loop(other)) => this == other,
+- (Expr::Macro(this), Expr::Macro(other)) => this == other,
+- (Expr::Match(this), Expr::Match(other)) => this == other,
+- (Expr::MethodCall(this), Expr::MethodCall(other)) => this == other,
+- (Expr::Paren(this), Expr::Paren(other)) => this == other,
+- (Expr::Path(this), Expr::Path(other)) => this == other,
+- (Expr::Range(this), Expr::Range(other)) => this == other,
+- (Expr::Reference(this), Expr::Reference(other)) => this == other,
+- (Expr::Repeat(this), Expr::Repeat(other)) => this == other,
+- (Expr::Return(this), Expr::Return(other)) => this == other,
+- (Expr::Struct(this), Expr::Struct(other)) => this == other,
+- (Expr::Try(this), Expr::Try(other)) => this == other,
+- (Expr::TryBlock(this), Expr::TryBlock(other)) => this == other,
+- (Expr::Tuple(this), Expr::Tuple(other)) => this == other,
+- (Expr::Type(this), Expr::Type(other)) => this == other,
+- (Expr::Unary(this), Expr::Unary(other)) => this == other,
+- (Expr::Unsafe(this), Expr::Unsafe(other)) => this == other,
+- (Expr::Verbatim(this), Expr::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- (Expr::While(this), Expr::While(other)) => this == other,
+- (Expr::Yield(this), Expr::Yield(other)) => this == other,
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Expr {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Expr::Array(expr) => {
+- hash.write_u8(0);
+- expr.hash(hash);
+- }
+- Expr::Assign(expr) => {
+- hash.write_u8(1);
+- expr.hash(hash);
+- }
+- Expr::AssignOp(expr) => {
+- hash.write_u8(2);
+- expr.hash(hash);
+- }
+- Expr::Async(expr) => {
+- hash.write_u8(3);
+- expr.hash(hash);
+- }
+- Expr::Await(expr) => {
+- hash.write_u8(4);
+- expr.hash(hash);
+- }
+- Expr::Binary(expr) => {
+- hash.write_u8(5);
+- expr.hash(hash);
+- }
+- Expr::Block(expr) => {
+- hash.write_u8(6);
+- expr.hash(hash);
+- }
+- Expr::Box(expr) => {
+- hash.write_u8(7);
+- expr.hash(hash);
+- }
+- Expr::Break(expr) => {
+- hash.write_u8(8);
+- expr.hash(hash);
+- }
+- Expr::Call(expr) => {
+- hash.write_u8(9);
+- expr.hash(hash);
+- }
+- Expr::Cast(expr) => {
+- hash.write_u8(10);
+- expr.hash(hash);
+- }
+- Expr::Closure(expr) => {
+- hash.write_u8(11);
+- expr.hash(hash);
+- }
+- Expr::Continue(expr) => {
+- hash.write_u8(12);
+- expr.hash(hash);
+- }
+- Expr::Field(expr) => {
+- hash.write_u8(13);
+- expr.hash(hash);
+- }
+- Expr::ForLoop(expr) => {
+- hash.write_u8(14);
+- expr.hash(hash);
+- }
+- Expr::Group(expr) => {
+- hash.write_u8(15);
+- expr.hash(hash);
+- }
+- Expr::If(expr) => {
+- hash.write_u8(16);
+- expr.hash(hash);
+- }
+- Expr::Index(expr) => {
+- hash.write_u8(17);
+- expr.hash(hash);
+- }
+- Expr::Let(expr) => {
+- hash.write_u8(18);
+- expr.hash(hash);
+- }
+- Expr::Lit(expr) => {
+- hash.write_u8(19);
+- expr.hash(hash);
+- }
+- Expr::Loop(expr) => {
+- hash.write_u8(20);
+- expr.hash(hash);
+- }
+- Expr::Macro(expr) => {
+- hash.write_u8(21);
+- expr.hash(hash);
+- }
+- Expr::Match(expr) => {
+- hash.write_u8(22);
+- expr.hash(hash);
+- }
+- Expr::MethodCall(expr) => {
+- hash.write_u8(23);
+- expr.hash(hash);
+- }
+- Expr::Paren(expr) => {
+- hash.write_u8(24);
+- expr.hash(hash);
+- }
+- Expr::Path(expr) => {
+- hash.write_u8(25);
+- expr.hash(hash);
+- }
+- Expr::Range(expr) => {
+- hash.write_u8(26);
+- expr.hash(hash);
+- }
+- Expr::Reference(expr) => {
+- hash.write_u8(27);
+- expr.hash(hash);
+- }
+- Expr::Repeat(expr) => {
+- hash.write_u8(28);
+- expr.hash(hash);
+- }
+- Expr::Return(expr) => {
+- hash.write_u8(29);
+- expr.hash(hash);
+- }
+- Expr::Struct(expr) => {
+- hash.write_u8(30);
+- expr.hash(hash);
+- }
+- Expr::Try(expr) => {
+- hash.write_u8(31);
+- expr.hash(hash);
+- }
+- Expr::TryBlock(expr) => {
+- hash.write_u8(32);
+- expr.hash(hash);
+- }
+- Expr::Tuple(expr) => {
+- hash.write_u8(33);
+- expr.hash(hash);
+- }
+- Expr::Type(expr) => {
+- hash.write_u8(34);
+- expr.hash(hash);
+- }
+- Expr::Unary(expr) => {
+- hash.write_u8(35);
+- expr.hash(hash);
+- }
+- Expr::Unsafe(expr) => {
+- hash.write_u8(36);
+- expr.hash(hash);
+- }
+- Expr::Verbatim(expr) => {
+- hash.write_u8(37);
+- TokenStreamHelper(expr).hash(hash);
+- }
+- Expr::While(expr) => {
+- hash.write_u8(38);
+- expr.hash(hash);
+- }
+- Expr::Yield(expr) => {
+- hash.write_u8(39);
+- expr.hash(hash);
+- }
+- Expr::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ impl Expr {
+ #[cfg(all(feature = "parsing", feature = "full"))]
+ pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
+ match self {
+ Expr::Box(ExprBox { attrs, .. })
+ | Expr::Array(ExprArray { attrs, .. })
+ | Expr::Call(ExprCall { attrs, .. })
+ | Expr::MethodCall(ExprMethodCall { attrs, .. })
+@@ -991,107 +768,145 @@ impl Expr {
+ }
+ }
+ }
+
+ ast_enum! {
+ /// A struct or tuple struct field accessed in a struct literal or field
+ /// expression.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum Member {
+ /// A named field like `self.x`.
+ Named(Ident),
+ /// An unnamed field like `self.0`.
+ Unnamed(Index),
+ }
+ }
+
++impl Eq for Member {}
++
++impl PartialEq for Member {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Member::Named(this), Member::Named(other)) => this == other,
++ (Member::Unnamed(this), Member::Unnamed(other)) => this == other,
++ _ => false,
++ }
++ }
++}
++
++impl Hash for Member {
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ match self {
++ Member::Named(m) => m.hash(state),
++ Member::Unnamed(m) => m.hash(state),
++ }
++ }
++}
++
++#[cfg(feature = "printing")]
++impl IdentFragment for Member {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Member::Named(m) => Display::fmt(m, formatter),
++ Member::Unnamed(m) => Display::fmt(&m.index, formatter),
++ }
++ }
++
++ fn span(&self) -> Option<Span> {
++ match self {
++ Member::Named(m) => Some(m.span()),
++ Member::Unnamed(m) => Some(m.span),
++ }
++ }
++}
++
+ ast_struct! {
+ /// The index of an unnamed tuple struct field.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- pub struct Index #manual_extra_traits {
++ pub struct Index {
+ pub index: u32,
+ pub span: Span,
+ }
+ }
+
+ impl From<usize> for Index {
+ fn from(index: usize) -> Index {
+ assert!(index < u32::max_value() as usize);
+ Index {
+ index: index as u32,
+ span: Span::call_site(),
+ }
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+ impl Eq for Index {}
+
+-#[cfg(feature = "extra-traits")]
+ impl PartialEq for Index {
+ fn eq(&self, other: &Self) -> bool {
+ self.index == other.index
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+ impl Hash for Index {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.index.hash(state);
+ }
+ }
+
+-#[cfg(feature = "full")]
+-ast_struct! {
+- #[derive(Default)]
+- pub struct Reserved {
+- private: (),
++#[cfg(feature = "printing")]
++impl IdentFragment for Index {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ Display::fmt(&self.index, formatter)
++ }
++
++ fn span(&self) -> Option<Span> {
++ Some(self.span)
+ }
+ }
+
+ #[cfg(feature = "full")]
+ ast_struct! {
+ /// The `::<>` explicit type parameters passed to a method call:
+ /// `parse::<u64>()`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct MethodTurbofish {
+ pub colon2_token: Token![::],
+ pub lt_token: Token![<],
+ pub args: Punctuated<GenericMethodArgument, Token![,]>,
+ pub gt_token: Token![>],
+ }
+ }
+
+ #[cfg(feature = "full")]
+ ast_enum! {
+ /// An individual generic argument to a method, like `T`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum GenericMethodArgument {
+ /// A type argument.
+ Type(Type),
+ /// A const expression. Must be inside of a block.
+ ///
+ /// NOTE: Identity expressions are represented as Type arguments, as
+ /// they are indistinguishable syntactically.
+ Const(Expr),
+ }
+ }
+
+ #[cfg(feature = "full")]
+ ast_struct! {
+ /// A field-value pair in a struct literal.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct FieldValue {
+ /// Attributes tagged on the field.
+ pub attrs: Vec<Attribute>,
+
+ /// Name or index of the field.
+ pub member: Member,
+
+ /// The colon in `Struct { x: x }`. If written in shorthand like
+@@ -1102,17 +917,17 @@ ast_struct! {
+ pub expr: Expr,
+ }
+ }
+
+ #[cfg(feature = "full")]
+ ast_struct! {
+ /// A lifetime labeling a `for`, `while`, or `loop`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Label {
+ pub name: Lifetime,
+ pub colon_token: Token![:],
+ }
+ }
+
+ #[cfg(feature = "full")]
+ ast_struct! {
+@@ -1129,45 +944,44 @@ ast_struct! {
+ /// }
+ /// // ...
+ /// # _ => {}
+ /// }
+ /// # false
+ /// # }
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Arm {
+ pub attrs: Vec<Attribute>,
+ pub pat: Pat,
+ pub guard: Option<(Token![if], Box<Expr>)>,
+ pub fat_arrow_token: Token![=>],
+ pub body: Box<Expr>,
+ pub comma: Option<Token![,]>,
+ }
+ }
+
+ #[cfg(feature = "full")]
+ ast_enum! {
+ /// Limit types of a range, inclusive or exclusive.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum RangeLimits {
+ /// Inclusive at the beginning, exclusive at the end.
+ HalfOpen(Token![..]),
+ /// Inclusive at the beginning and end.
+ Closed(Token![..=]),
+ }
+ }
+
+ #[cfg(any(feature = "parsing", feature = "printing"))]
+ #[cfg(feature = "full")]
+ pub(crate) fn requires_terminator(expr: &Expr) -> bool {
+- // see https://github.com/rust-lang/rust/blob/eb8f2586e/src/libsyntax/parse/classify.rs#L17-L37
++ // see https://github.com/rust-lang/rust/blob/2679c38fc/src/librustc_ast/util/classify.rs#L7-L25
+ match *expr {
+ Expr::Unsafe(..)
+ | Expr::Block(..)
+ | Expr::If(..)
+ | Expr::Match(..)
+ | Expr::While(..)
+ | Expr::Loop(..)
+ | Expr::ForLoop(..)
+@@ -1178,26 +992,27 @@ pub(crate) fn requires_terminator(expr:
+ }
+
+ #[cfg(feature = "parsing")]
+ pub(crate) mod parsing {
+ use super::*;
+
+ use crate::parse::{Parse, ParseStream, Result};
+ use crate::path;
++ use std::cmp::Ordering;
++
++ crate::custom_keyword!(raw);
+
+ // When we're parsing expressions which occur before blocks, like in an if
+ // statement's condition, we cannot parse a struct literal.
+ //
+ // Struct literals are ambiguous in certain positions
+ // https://github.com/rust-lang/rfcs/pull/92
+- #[derive(Copy, Clone)]
+ pub struct AllowStruct(bool);
+
+- #[derive(Copy, Clone, PartialEq, PartialOrd)]
+ enum Precedence {
+ Any,
+ Assign,
+ Range,
+ Or,
+ And,
+ Compare,
+ BitOr,
+@@ -1241,19 +1056,131 @@ pub(crate) mod parsing {
+ }
+
+ impl Parse for Expr {
+ fn parse(input: ParseStream) -> Result<Self> {
+ ambiguous_expr(input, AllowStruct(true))
+ }
+ }
+
+- #[cfg(feature = "full")]
+- fn expr_no_struct(input: ParseStream) -> Result<Expr> {
+- ambiguous_expr(input, AllowStruct(false))
++ impl Expr {
++ /// An alternative to the primary `Expr::parse` parser (from the
++ /// [`Parse`] trait) for ambiguous syntactic positions in which a
++ /// trailing brace should not be taken as part of the expression.
++ ///
++ /// Rust grammar has an ambiguity where braces sometimes turn a path
++ /// expression into a struct initialization and sometimes do not. In the
++ /// following code, the expression `S {}` is one expression. Presumably
++ /// there is an empty struct `struct S {}` defined somewhere which it is
++ /// instantiating.
++ ///
++ /// ```
++ /// # struct S;
++ /// # impl std::ops::Deref for S {
++ /// # type Target = bool;
++ /// # fn deref(&self) -> &Self::Target {
++ /// # &true
++ /// # }
++ /// # }
++ /// let _ = *S {};
++ ///
++ /// // parsed by rustc as: `*(S {})`
++ /// ```
++ ///
++ /// We would want to parse the above using `Expr::parse` after the `=`
++ /// token.
++ ///
++ /// But in the following, `S {}` is *not* a struct init expression.
++ ///
++ /// ```
++ /// # const S: &bool = &true;
++ /// if *S {} {}
++ ///
++ /// // parsed by rustc as:
++ /// //
++ /// // if (*S) {
++ /// // /* empty block */
++ /// // }
++ /// // {
++ /// // /* another empty block */
++ /// // }
++ /// ```
++ ///
++ /// For that reason we would want to parse if-conditions using
++ /// `Expr::parse_without_eager_brace` after the `if` token. Same for
++ /// similar syntactic positions such as the condition expr after a
++ /// `while` token or the expr at the top of a `match`.
++ ///
++ /// The Rust grammar's choices around which way this ambiguity is
++ /// resolved at various syntactic positions is fairly arbitrary. Really
++ /// either parse behavior could work in most positions, and language
++ /// designers just decide each case based on which is more likely to be
++ /// what the programmer had in mind most of the time.
++ ///
++ /// ```
++ /// # struct S;
++ /// # fn doc() -> S {
++ /// if return S {} {}
++ /// # unreachable!()
++ /// # }
++ ///
++ /// // parsed by rustc as:
++ /// //
++ /// // if (return (S {})) {
++ /// // }
++ /// //
++ /// // but could equally well have been this other arbitrary choice:
++ /// //
++ /// // if (return S) {
++ /// // }
++ /// // {}
++ /// ```
++ ///
++ /// Note the grammar ambiguity on trailing braces is distinct from
++ /// precedence and is not captured by assigning a precedence level to
++ /// the braced struct init expr in relation to other operators. This can
++ /// be illustrated by `return 0..S {}` vs `match 0..S {}`. The former
++ /// parses as `return (0..(S {}))` implying tighter precedence for
++ /// struct init than `..`, while the latter parses as `match (0..S) {}`
++ /// implying tighter precedence for `..` than struct init, a
++ /// contradiction.
++ #[cfg(feature = "full")]
++ pub fn parse_without_eager_brace(input: ParseStream) -> Result<Expr> {
++ ambiguous_expr(input, AllowStruct(false))
++ }
++ }
++
++ impl Copy for AllowStruct {}
++
++ impl Clone for AllowStruct {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
++ impl Copy for Precedence {}
++
++ impl Clone for Precedence {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
++ impl PartialEq for Precedence {
++ fn eq(&self, other: &Self) -> bool {
++ *self as u8 == *other as u8
++ }
++ }
++
++ impl PartialOrd for Precedence {
++ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
++ let this = *self as u8;
++ let other = *other as u8;
++ Some(this.cmp(&other))
++ }
+ }
+
+ #[cfg(feature = "full")]
+ fn parse_expr(
+ input: ParseStream,
+ mut lhs: Expr,
+ allow_struct: AllowStruct,
+ base: Precedence,
+@@ -1425,88 +1352,114 @@ pub(crate) mod parsing {
+ }
+
+ // Parse an arbitrary expression.
+ fn ambiguous_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+ let lhs = unary_expr(input, allow_struct)?;
+ parse_expr(input, lhs, allow_struct, Precedence::Any)
+ }
+
++ #[cfg(feature = "full")]
++ fn expr_attrs(input: ParseStream) -> Result<Vec<Attribute>> {
++ let mut attrs = Vec::new();
++ loop {
++ if input.peek(token::Group) {
++ let ahead = input.fork();
++ let group = crate::group::parse_group(&ahead)?;
++ if !group.content.peek(Token![#]) || group.content.peek2(Token![!]) {
++ break;
++ }
++ let attr = group.content.call(attr::parsing::single_parse_outer)?;
++ if !group.content.is_empty() {
++ break;
++ }
++ attrs.push(attr);
++ } else if input.peek(Token![#]) {
++ attrs.push(input.call(attr::parsing::single_parse_outer)?);
++ } else {
++ break;
++ }
++ }
++ Ok(attrs)
++ }
++
+ // <UnOp> <trailer>
+ // & <trailer>
+ // &mut <trailer>
+ // box <trailer>
+ #[cfg(feature = "full")]
+ fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
+- if ahead.peek(Token![&])
+- || ahead.peek(Token![box])
+- || ahead.peek(Token![*])
+- || ahead.peek(Token![!])
+- || ahead.peek(Token![-])
+- {
+- let attrs = input.call(Attribute::parse_outer)?;
+- if input.peek(Token![&]) {
++ let begin = input.fork();
++ let attrs = input.call(expr_attrs)?;
++ if input.peek(Token![&]) {
++ let and_token: Token![&] = input.parse()?;
++ let raw: Option<raw> =
++ if input.peek(raw) && (input.peek2(Token![mut]) || input.peek2(Token![const])) {
++ Some(input.parse()?)
++ } else {
++ None
++ };
++ let mutability: Option<Token![mut]> = input.parse()?;
++ if raw.is_some() && mutability.is_none() {
++ input.parse::<Token![const]>()?;
++ }
++ let expr = Box::new(unary_expr(input, allow_struct)?);
++ if raw.is_some() {
++ Ok(Expr::Verbatim(verbatim::between(begin, input)))
++ } else {
+ Ok(Expr::Reference(ExprReference {
+ attrs,
+- and_token: input.parse()?,
++ and_token,
+ raw: Reserved::default(),
+- mutability: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
+- }))
+- } else if input.peek(Token![box]) {
+- Ok(Expr::Box(ExprBox {
+- attrs,
+- box_token: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
+- }))
+- } else {
+- Ok(Expr::Unary(ExprUnary {
+- attrs,
+- op: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
++ mutability,
++ expr,
+ }))
+ }
++ } else if input.peek(Token![box]) {
++ Ok(Expr::Box(ExprBox {
++ attrs,
++ box_token: input.parse()?,
++ expr: Box::new(unary_expr(input, allow_struct)?),
++ }))
++ } else if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
++ Ok(Expr::Unary(ExprUnary {
++ attrs,
++ op: input.parse()?,
++ expr: Box::new(unary_expr(input, allow_struct)?),
++ }))
+ } else {
+- trailer_expr(input, allow_struct)
++ trailer_expr(attrs, input, allow_struct)
+ }
+ }
+
+ #[cfg(not(feature = "full"))]
+ fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
+- if ahead.peek(Token![*]) || ahead.peek(Token![!]) || ahead.peek(Token![-]) {
++ if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
+ Ok(Expr::Unary(ExprUnary {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs: Vec::new(),
+ op: input.parse()?,
+ expr: Box::new(unary_expr(input, allow_struct)?),
+ }))
+ } else {
+ trailer_expr(input, allow_struct)
+ }
+ }
+
+ // <atom> (..<args>) ...
+ // <atom> . <ident> (..<args>) ...
+ // <atom> . <ident> ...
+ // <atom> . <lit> ...
+ // <atom> [ <expr> ] ...
+ // <atom> ? ...
+ #[cfg(feature = "full")]
+- fn trailer_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- if input.peek(token::Group) {
+- return input.call(expr_group).map(Expr::Group);
+- }
+-
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+-
++ fn trailer_expr(
++ outer_attrs: Vec<Attribute>,
++ input: ParseStream,
++ allow_struct: AllowStruct,
++ ) -> Result<Expr> {
+ let atom = atom_expr(input, allow_struct)?;
+ let mut e = trailer_helper(input, atom)?;
+
+ let inner_attrs = e.replace_attrs(Vec::new());
+ let attrs = private::attrs(outer_attrs, inner_attrs);
+ e.replace_attrs(attrs);
+ Ok(e)
+ }
+@@ -1518,28 +1471,36 @@ pub(crate) mod parsing {
+ let content;
+ e = Expr::Call(ExprCall {
+ attrs: Vec::new(),
+ func: Box::new(e),
+ paren_token: parenthesized!(content in input),
+ args: content.parse_terminated(Expr::parse)?,
+ });
+ } else if input.peek(Token![.]) && !input.peek(Token![..]) {
+- let dot_token: Token![.] = input.parse()?;
++ let mut dot_token: Token![.] = input.parse()?;
+
+- if input.peek(token::Await) {
++ let await_token: Option<token::Await> = input.parse()?;
++ if let Some(await_token) = await_token {
+ e = Expr::Await(ExprAwait {
+ attrs: Vec::new(),
+ base: Box::new(e),
+ dot_token,
+- await_token: input.parse()?,
++ await_token,
+ });
+ continue;
+ }
+
++ let float_token: Option<LitFloat> = input.parse()?;
++ if let Some(float_token) = float_token {
++ if multi_index(&mut e, &mut dot_token, float_token)? {
++ continue;
++ }
++ }
++
+ let member: Member = input.parse()?;
+ let turbofish = if member.is_named() && input.peek(Token![::]) {
+ Some(MethodTurbofish {
+ colon2_token: input.parse()?,
+ lt_token: input.parse()?,
+ args: {
+ let mut args = Punctuated::new();
+ loop {
+@@ -1615,20 +1576,27 @@ pub(crate) mod parsing {
+ e = Expr::Call(ExprCall {
+ attrs: Vec::new(),
+ func: Box::new(e),
+ paren_token: parenthesized!(content in input),
+ args: content.parse_terminated(Expr::parse)?,
+ });
+ } else if input.peek(Token![.]) && !input.peek(Token![..]) && !input.peek2(token::Await)
+ {
++ let mut dot_token: Token![.] = input.parse()?;
++ let float_token: Option<LitFloat> = input.parse()?;
++ if let Some(float_token) = float_token {
++ if multi_index(&mut e, &mut dot_token, float_token)? {
++ continue;
++ }
++ }
+ e = Expr::Field(ExprField {
+ attrs: Vec::new(),
+ base: Box::new(e),
+- dot_token: input.parse()?,
++ dot_token,
+ member: input.parse()?,
+ });
+ } else if input.peek(token::Bracket) {
+ let content;
+ e = Expr::Index(ExprIndex {
+ attrs: Vec::new(),
+ expr: Box::new(e),
+ bracket_token: bracketed!(content in input),
+@@ -1641,17 +1609,21 @@ pub(crate) mod parsing {
+
+ Ok(e)
+ }
+
+ // Parse all atomic expressions which don't have to worry about precedence
+ // interactions, as they are fully contained.
+ #[cfg(feature = "full")]
+ fn atom_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- if input.peek(token::Group) {
++ if input.peek(token::Group)
++ && !input.peek2(Token![::])
++ && !input.peek2(Token![!])
++ && !input.peek2(token::Brace)
++ {
+ input.call(expr_group).map(Expr::Group)
+ } else if input.peek(Lit) {
+ input.parse().map(Expr::Lit)
+ } else if input.peek(Token![async])
+ && (input.peek2(token::Brace) || input.peek2(Token![move]) && input.peek3(token::Brace))
+ {
+ input.call(expr_async).map(Expr::Async)
+ } else if input.peek(Token![try]) && input.peek2(token::Brace) {
+@@ -1663,17 +1635,16 @@ pub(crate) mod parsing {
+ {
+ expr_closure(input, allow_struct).map(Expr::Closure)
+ } else if input.peek(Ident)
+ || input.peek(Token![::])
+ || input.peek(Token![<])
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ path_or_macro_or_struct(input, allow_struct)
+ } else if input.peek(token::Paren) {
+ paren_or_tuple(input)
+ } else if input.peek(Token![break]) {
+ expr_break(input, allow_struct).map(Expr::Break)
+ } else if input.peek(Token![continue]) {
+@@ -1735,17 +1706,16 @@ pub(crate) mod parsing {
+ } else if input.peek(token::Paren) {
+ input.call(expr_paren).map(Expr::Paren)
+ } else if input.peek(Ident)
+ || input.peek(Token![::])
+ || input.peek(Token![<])
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ input.parse().map(Expr::Path)
+ } else {
+ Err(input.error("unsupported expression; enable syn's features=[\"full\"]"))
+ }
+ }
+
+@@ -1873,17 +1843,17 @@ pub(crate) mod parsing {
+ }))
+ } else {
+ Err(content.error("expected `,` or `;`"))
+ }
+ }
+
+ #[cfg(feature = "full")]
+ pub(crate) fn expr_early(input: ParseStream) -> Result<Expr> {
+- let mut attrs = input.call(Attribute::parse_outer)?;
++ let mut attrs = input.call(expr_attrs)?;
+ let mut expr = if input.peek(Token![if]) {
+ Expr::If(input.parse()?)
+ } else if input.peek(Token![while]) {
+ Expr::While(input.parse()?)
+ } else if input.peek(Token![for]) {
+ Expr::ForLoop(input.parse()?)
+ } else if input.peek(Token![loop]) {
+ Expr::Loop(input.parse()?)
+@@ -1900,17 +1870,17 @@ pub(crate) mod parsing {
+ let mut expr = unary_expr(input, allow_struct)?;
+
+ attrs.extend(expr.replace_attrs(Vec::new()));
+ expr.replace_attrs(attrs);
+
+ return parse_expr(input, expr, allow_struct, Precedence::Any);
+ };
+
+- if input.peek(Token![.]) || input.peek(Token![?]) {
++ if input.peek(Token![.]) && !input.peek(Token![..]) || input.peek(Token![?]) {
+ expr = trailer_helper(input, expr)?;
+
+ attrs.extend(expr.replace_attrs(Vec::new()));
+ expr.replace_attrs(attrs);
+
+ let allow_struct = AllowStruct(true);
+ return parse_expr(input, expr, allow_struct, Precedence::Any);
+ }
+@@ -1946,63 +1916,48 @@ pub(crate) mod parsing {
+ attrs: Vec::new(),
+ paren_token: parenthesized!(content in input),
+ expr: content.parse()?,
+ })
+ }
+
+ #[cfg(feature = "full")]
+ fn generic_method_argument(input: ParseStream) -> Result<GenericMethodArgument> {
+- // TODO parse const generics as well
++ if input.peek(Lit) {
++ let lit = input.parse()?;
++ return Ok(GenericMethodArgument::Const(Expr::Lit(lit)));
++ }
++
++ if input.peek(token::Brace) {
++ let block = input.call(expr::parsing::expr_block)?;
++ return Ok(GenericMethodArgument::Const(Expr::Block(block)));
++ }
++
+ input.parse().map(GenericMethodArgument::Type)
+ }
+
+ #[cfg(feature = "full")]
+ fn expr_let(input: ParseStream) -> Result<ExprLet> {
+ Ok(ExprLet {
+ attrs: Vec::new(),
+ let_token: input.parse()?,
+- pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let pat: Pat = input.parse()?;
+- if leading_vert.is_some()
+- || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
+- {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|])
+- && !input.peek(Token![||])
+- && !input.peek(Token![|=])
+- {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- })
+- } else {
+- pat
+- }
+- },
++ pat: pat::parsing::multi_pat_with_leading_vert(input)?,
+ eq_token: input.parse()?,
+- expr: Box::new(input.call(expr_no_struct)?),
++ expr: Box::new(input.call(Expr::parse_without_eager_brace)?),
+ })
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for ExprIf {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ Ok(ExprIf {
+- attrs: Vec::new(),
++ attrs,
+ if_token: input.parse()?,
+- cond: Box::new(input.call(expr_no_struct)?),
++ cond: Box::new(input.call(Expr::parse_without_eager_brace)?),
+ then_branch: input.parse()?,
+ else_branch: {
+ if input.peek(Token![else]) {
+ Some(input.call(else_block)?)
+ } else {
+ None
+ }
+ },
+@@ -2028,94 +1983,81 @@ pub(crate) mod parsing {
+ };
+
+ Ok((else_token, Box::new(else_branch)))
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for ExprForLoop {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let for_token: Token![for] = input.parse()?;
+
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let mut pat: Pat = input.parse()?;
+- if leading_vert.is_some() || input.peek(Token![|]) {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|]) {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- pat = Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- });
+- }
++ let pat = pat::parsing::multi_pat_with_leading_vert(input)?;
+
+ let in_token: Token![in] = input.parse()?;
+- let expr: Expr = input.call(expr_no_struct)?;
++ let expr: Expr = input.call(Expr::parse_without_eager_brace)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprForLoop {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ for_token,
+ pat,
+ in_token,
+ expr: Box::new(expr),
+ body: Block { brace_token, stmts },
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for ExprLoop {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let loop_token: Token![loop] = input.parse()?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprLoop {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ loop_token,
+ body: Block { brace_token, stmts },
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for ExprMatch {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let match_token: Token![match] = input.parse()?;
+- let expr = expr_no_struct(input)?;
++ let expr = Expr::parse_without_eager_brace(input)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
+
+ let mut arms = Vec::new();
+ while !content.is_empty() {
+ arms.push(content.call(Arm::parse)?);
+ }
+
+ Ok(ExprMatch {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ match_token,
+ expr: Box::new(expr),
+ brace_token,
+ arms,
+ })
+ }
+ }
+
+@@ -2300,27 +2242,28 @@ pub(crate) mod parsing {
+ }
+ Ok(pat)
+ }
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for ExprWhile {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let while_token: Token![while] = input.parse()?;
+- let cond = expr_no_struct(input)?;
++ let cond = Expr::parse_without_eager_brace(input)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprWhile {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ while_token,
+ cond: Box::new(cond),
+ body: Block { brace_token, stmts },
+ })
+ }
+ }
+
+@@ -2394,16 +2337,17 @@ pub(crate) mod parsing {
+ }
+ },
+ })
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for FieldValue {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ let member: Member = input.parse()?;
+ let (colon_token, value) = if input.peek(Token![:]) || !member.is_named() {
+ let colon_token: Token![:] = input.parse()?;
+ let value: Expr = input.parse()?;
+ (Some(colon_token), value)
+ } else if let Member::Named(ident) = &member {
+ let value = Expr::Path(ExprPath {
+ attrs: Vec::new(),
+@@ -2411,73 +2355,63 @@ pub(crate) mod parsing {
+ path: Path::from(ident.clone()),
+ });
+ (None, value)
+ } else {
+ unreachable!()
+ };
+
+ Ok(FieldValue {
+- attrs: Vec::new(),
++ attrs,
+ member,
+ colon_token,
+ expr: value,
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn expr_struct_helper(
+ input: ParseStream,
+ outer_attrs: Vec<Attribute>,
+ path: Path,
+ ) -> Result<ExprStruct> {
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
++ let attrs = private::attrs(outer_attrs, inner_attrs);
+
+ let mut fields = Punctuated::new();
+- loop {
+- let attrs = content.call(Attribute::parse_outer)?;
+- // TODO: optimize using advance_to
+- if content.fork().parse::<Member>().is_err() {
+- if attrs.is_empty() {
+- break;
+- } else {
+- return Err(content.error("expected struct field"));
+- }
++ while !content.is_empty() {
++ if content.peek(Token![..]) {
++ return Ok(ExprStruct {
++ attrs,
++ brace_token,
++ path,
++ fields,
++ dot2_token: Some(content.parse()?),
++ rest: Some(Box::new(content.parse()?)),
++ });
+ }
+
+- fields.push(FieldValue {
+- attrs,
+- ..content.parse()?
+- });
+-
+- if !content.peek(Token![,]) {
++ fields.push(content.parse()?);
++ if content.is_empty() {
+ break;
+ }
+ let punct: Token![,] = content.parse()?;
+ fields.push_punct(punct);
+ }
+
+- let (dot2_token, rest) = if fields.empty_or_trailing() && content.peek(Token![..]) {
+- let dot2_token: Token![..] = content.parse()?;
+- let rest: Expr = content.parse()?;
+- (Some(dot2_token), Some(Box::new(rest)))
+- } else {
+- (None, None)
+- };
+-
+ Ok(ExprStruct {
+- attrs: private::attrs(outer_attrs, inner_attrs),
++ attrs,
+ brace_token,
+ path,
+ fields,
+- dot2_token,
+- rest,
++ dot2_token: None,
++ rest: None,
+ })
+ }
+
+ #[cfg(feature = "full")]
+ fn expr_unsafe(input: ParseStream) -> Result<ExprUnsafe> {
+ let unsafe_token: Token![unsafe] = input.parse()?;
+
+ let content;
+@@ -2572,37 +2506,17 @@ pub(crate) mod parsing {
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for Arm {
+ fn parse(input: ParseStream) -> Result<Arm> {
+ let requires_comma;
+ Ok(Arm {
+ attrs: input.call(Attribute::parse_outer)?,
+- pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let pat: Pat = input.parse()?;
+- if leading_vert.is_some() || input.peek(Token![|]) {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|]) {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- })
+- } else {
+- pat
+- }
+- },
++ pat: pat::parsing::multi_pat_with_leading_vert(input)?,
+ guard: {
+ if input.peek(Token![if]) {
+ let if_token: Token![if] = input.parse()?;
+ let guard: Expr = input.parse()?;
+ Some((if_token, Box::new(guard)))
+ } else {
+ None
+ }
+@@ -2636,16 +2550,36 @@ pub(crate) mod parsing {
+ span: lit.span(),
+ })
+ } else {
+ Err(Error::new(lit.span(), "expected unsuffixed integer"))
+ }
+ }
+ }
+
++ fn multi_index(e: &mut Expr, dot_token: &mut Token![.], float: LitFloat) -> Result<bool> {
++ let mut float_repr = float.to_string();
++ let trailing_dot = float_repr.ends_with('.');
++ if trailing_dot {
++ float_repr.truncate(float_repr.len() - 1);
++ }
++ for part in float_repr.split('.') {
++ let index = crate::parse_str(part).map_err(|err| Error::new(float.span(), err))?;
++ let base = mem::replace(e, Expr::__Nonexhaustive);
++ *e = Expr::Field(ExprField {
++ attrs: Vec::new(),
++ base: Box::new(base),
++ dot_token: Token![.](dot_token.span),
++ member: Member::Unnamed(index),
++ });
++ *dot_token = Token![.](float.span());
++ }
++ Ok(!trailing_dot)
++ }
++
+ #[cfg(feature = "full")]
+ impl Member {
+ fn is_named(&self) -> bool {
+ match *self {
+ Member::Named(_) => true,
+ Member::Unnamed(_) => false,
+ }
+ }
+diff --git a/third_party/rust/syn/src/ext.rs b/third_party/rust/syn/src/ext.rs
+--- third_party/rust/syn/src/ext.rs
++++ third_party/rust/syn/src/ext.rs
+@@ -1,27 +1,27 @@
+ //! Extension traits to provide parsing methods on foreign types.
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ use proc_macro2::Ident;
+
+ use crate::parse::{ParseStream, Result};
+
+ use crate::buffer::Cursor;
+ use crate::parse::Peek;
+ use crate::sealed::lookahead;
+ use crate::token::CustomToken;
+
+ /// Additional methods for `Ident` not provided by proc-macro2 or libproc_macro.
+ ///
+ /// This trait is sealed and cannot be implemented for types outside of Syn. It
+ /// is implemented only for `proc_macro2::Ident`.
+ ///
+-/// *This trait is available if Syn is built with the `"parsing"` feature.*
++/// *This trait is available only if Syn is built with the `"parsing"` feature.*
+ pub trait IdentExt: Sized + private::Sealed {
+ /// Parses any identifier including keywords.
+ ///
+ /// This is useful when parsing macro input which allows Rust keywords as
+ /// identifiers.
+ ///
+ /// # Example
+ ///
+@@ -124,12 +124,18 @@ impl lookahead::Sealed for private::Peek
+
+ mod private {
+ use proc_macro2::Ident;
+
+ pub trait Sealed {}
+
+ impl Sealed for Ident {}
+
+- #[derive(Copy, Clone)]
+ pub struct PeekFn;
+ pub struct IdentAny;
++
++ impl Copy for PeekFn {}
++ impl Clone for PeekFn {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
+ }
+diff --git a/third_party/rust/syn/src/file.rs b/third_party/rust/syn/src/file.rs
+--- third_party/rust/syn/src/file.rs
++++ third_party/rust/syn/src/file.rs
+@@ -1,14 +1,14 @@
+ use super::*;
+
+ ast_struct! {
+ /// A complete file of Rust source code.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Example
+ ///
+ /// Parse a Rust source file into a `syn::File` and print out a debug
+ /// representation of the syntax tree.
+ ///
+ /// ```
+ /// use std::env;
+@@ -32,16 +32,18 @@ ast_struct! {
+ /// };
+ ///
+ /// let mut file = File::open(&filename).expect("Unable to open file");
+ ///
+ /// let mut src = String::new();
+ /// file.read_to_string(&mut src).expect("Unable to read file");
+ ///
+ /// let syntax = syn::parse_file(&src).expect("Unable to parse file");
++ ///
++ /// // Debug impl is available if Syn is built with "extra-traits" feature.
+ /// println!("{:#?}", syntax);
+ /// }
+ /// ```
+ ///
+ /// Running with its own source code as input, this program prints output
+ /// that begins with:
+ ///
+ /// ```text
+diff --git a/third_party/rust/syn/src/gen/clone.rs b/third_party/rust/syn/src/gen/clone.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/src/gen/clone.rs
+@@ -0,0 +1,2051 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#![allow(clippy::clone_on_copy, clippy::expl_impl_clone_on_copy)]
++use crate::*;
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Abi {
++ fn clone(&self) -> Self {
++ Abi {
++ extern_token: self.extern_token.clone(),
++ name: self.name.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for AngleBracketedGenericArguments {
++ fn clone(&self) -> Self {
++ AngleBracketedGenericArguments {
++ colon2_token: self.colon2_token.clone(),
++ lt_token: self.lt_token.clone(),
++ args: self.args.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Arm {
++ fn clone(&self) -> Self {
++ Arm {
++ attrs: self.attrs.clone(),
++ pat: self.pat.clone(),
++ guard: self.guard.clone(),
++ fat_arrow_token: self.fat_arrow_token.clone(),
++ body: self.body.clone(),
++ comma: self.comma.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for AttrStyle {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for AttrStyle {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Attribute {
++ fn clone(&self) -> Self {
++ Attribute {
++ pound_token: self.pound_token.clone(),
++ style: self.style.clone(),
++ bracket_token: self.bracket_token.clone(),
++ path: self.path.clone(),
++ tokens: self.tokens.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BareFnArg {
++ fn clone(&self) -> Self {
++ BareFnArg {
++ attrs: self.attrs.clone(),
++ name: self.name.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for BinOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BinOp {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Binding {
++ fn clone(&self) -> Self {
++ Binding {
++ ident: self.ident.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Block {
++ fn clone(&self) -> Self {
++ Block {
++ brace_token: self.brace_token.clone(),
++ stmts: self.stmts.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BoundLifetimes {
++ fn clone(&self) -> Self {
++ BoundLifetimes {
++ for_token: self.for_token.clone(),
++ lt_token: self.lt_token.clone(),
++ lifetimes: self.lifetimes.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ConstParam {
++ fn clone(&self) -> Self {
++ ConstParam {
++ attrs: self.attrs.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ default: self.default.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Constraint {
++ fn clone(&self) -> Self {
++ Constraint {
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for Data {
++ fn clone(&self) -> Self {
++ match self {
++ Data::Struct(v0) => Data::Struct(v0.clone()),
++ Data::Enum(v0) => Data::Enum(v0.clone()),
++ Data::Union(v0) => Data::Union(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataEnum {
++ fn clone(&self) -> Self {
++ DataEnum {
++ enum_token: self.enum_token.clone(),
++ brace_token: self.brace_token.clone(),
++ variants: self.variants.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataStruct {
++ fn clone(&self) -> Self {
++ DataStruct {
++ struct_token: self.struct_token.clone(),
++ fields: self.fields.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataUnion {
++ fn clone(&self) -> Self {
++ DataUnion {
++ union_token: self.union_token.clone(),
++ fields: self.fields.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DeriveInput {
++ fn clone(&self) -> Self {
++ DeriveInput {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ data: self.data.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Expr {
++ fn clone(&self) -> Self {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => Expr::Array(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => Expr::Assign(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => Expr::AssignOp(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => Expr::Async(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => Expr::Await(v0.clone()),
++ Expr::Binary(v0) => Expr::Binary(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => Expr::Block(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => Expr::Box(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => Expr::Break(v0.clone()),
++ Expr::Call(v0) => Expr::Call(v0.clone()),
++ Expr::Cast(v0) => Expr::Cast(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => Expr::Closure(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => Expr::Continue(v0.clone()),
++ Expr::Field(v0) => Expr::Field(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => Expr::ForLoop(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => Expr::Group(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::If(v0) => Expr::If(v0.clone()),
++ Expr::Index(v0) => Expr::Index(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => Expr::Let(v0.clone()),
++ Expr::Lit(v0) => Expr::Lit(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => Expr::Loop(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => Expr::Macro(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => Expr::Match(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => Expr::MethodCall(v0.clone()),
++ Expr::Paren(v0) => Expr::Paren(v0.clone()),
++ Expr::Path(v0) => Expr::Path(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => Expr::Range(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => Expr::Reference(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => Expr::Repeat(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => Expr::Return(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => Expr::Struct(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => Expr::Try(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => Expr::TryBlock(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => Expr::Tuple(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => Expr::Type(v0.clone()),
++ Expr::Unary(v0) => Expr::Unary(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => Expr::Unsafe(v0.clone()),
++ Expr::Verbatim(v0) => Expr::Verbatim(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::While(v0) => Expr::While(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => Expr::Yield(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprArray {
++ fn clone(&self) -> Self {
++ ExprArray {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAssign {
++ fn clone(&self) -> Self {
++ ExprAssign {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ eq_token: self.eq_token.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAssignOp {
++ fn clone(&self) -> Self {
++ ExprAssignOp {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ op: self.op.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAsync {
++ fn clone(&self) -> Self {
++ ExprAsync {
++ attrs: self.attrs.clone(),
++ async_token: self.async_token.clone(),
++ capture: self.capture.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAwait {
++ fn clone(&self) -> Self {
++ ExprAwait {
++ attrs: self.attrs.clone(),
++ base: self.base.clone(),
++ dot_token: self.dot_token.clone(),
++ await_token: self.await_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprBinary {
++ fn clone(&self) -> Self {
++ ExprBinary {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ op: self.op.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBlock {
++ fn clone(&self) -> Self {
++ ExprBlock {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBox {
++ fn clone(&self) -> Self {
++ ExprBox {
++ attrs: self.attrs.clone(),
++ box_token: self.box_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBreak {
++ fn clone(&self) -> Self {
++ ExprBreak {
++ attrs: self.attrs.clone(),
++ break_token: self.break_token.clone(),
++ label: self.label.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprCall {
++ fn clone(&self) -> Self {
++ ExprCall {
++ attrs: self.attrs.clone(),
++ func: self.func.clone(),
++ paren_token: self.paren_token.clone(),
++ args: self.args.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprCast {
++ fn clone(&self) -> Self {
++ ExprCast {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ as_token: self.as_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprClosure {
++ fn clone(&self) -> Self {
++ ExprClosure {
++ attrs: self.attrs.clone(),
++ asyncness: self.asyncness.clone(),
++ movability: self.movability.clone(),
++ capture: self.capture.clone(),
++ or1_token: self.or1_token.clone(),
++ inputs: self.inputs.clone(),
++ or2_token: self.or2_token.clone(),
++ output: self.output.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprContinue {
++ fn clone(&self) -> Self {
++ ExprContinue {
++ attrs: self.attrs.clone(),
++ continue_token: self.continue_token.clone(),
++ label: self.label.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprField {
++ fn clone(&self) -> Self {
++ ExprField {
++ attrs: self.attrs.clone(),
++ base: self.base.clone(),
++ dot_token: self.dot_token.clone(),
++ member: self.member.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprForLoop {
++ fn clone(&self) -> Self {
++ ExprForLoop {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ for_token: self.for_token.clone(),
++ pat: self.pat.clone(),
++ in_token: self.in_token.clone(),
++ expr: self.expr.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprGroup {
++ fn clone(&self) -> Self {
++ ExprGroup {
++ attrs: self.attrs.clone(),
++ group_token: self.group_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprIf {
++ fn clone(&self) -> Self {
++ ExprIf {
++ attrs: self.attrs.clone(),
++ if_token: self.if_token.clone(),
++ cond: self.cond.clone(),
++ then_branch: self.then_branch.clone(),
++ else_branch: self.else_branch.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprIndex {
++ fn clone(&self) -> Self {
++ ExprIndex {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ bracket_token: self.bracket_token.clone(),
++ index: self.index.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprLet {
++ fn clone(&self) -> Self {
++ ExprLet {
++ attrs: self.attrs.clone(),
++ let_token: self.let_token.clone(),
++ pat: self.pat.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprLit {
++ fn clone(&self) -> Self {
++ ExprLit {
++ attrs: self.attrs.clone(),
++ lit: self.lit.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprLoop {
++ fn clone(&self) -> Self {
++ ExprLoop {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ loop_token: self.loop_token.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMacro {
++ fn clone(&self) -> Self {
++ ExprMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMatch {
++ fn clone(&self) -> Self {
++ ExprMatch {
++ attrs: self.attrs.clone(),
++ match_token: self.match_token.clone(),
++ expr: self.expr.clone(),
++ brace_token: self.brace_token.clone(),
++ arms: self.arms.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMethodCall {
++ fn clone(&self) -> Self {
++ ExprMethodCall {
++ attrs: self.attrs.clone(),
++ receiver: self.receiver.clone(),
++ dot_token: self.dot_token.clone(),
++ method: self.method.clone(),
++ turbofish: self.turbofish.clone(),
++ paren_token: self.paren_token.clone(),
++ args: self.args.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprParen {
++ fn clone(&self) -> Self {
++ ExprParen {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprPath {
++ fn clone(&self) -> Self {
++ ExprPath {
++ attrs: self.attrs.clone(),
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprRange {
++ fn clone(&self) -> Self {
++ ExprRange {
++ attrs: self.attrs.clone(),
++ from: self.from.clone(),
++ limits: self.limits.clone(),
++ to: self.to.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprReference {
++ fn clone(&self) -> Self {
++ ExprReference {
++ attrs: self.attrs.clone(),
++ and_token: self.and_token.clone(),
++ raw: self.raw.clone(),
++ mutability: self.mutability.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprRepeat {
++ fn clone(&self) -> Self {
++ ExprRepeat {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ len: self.len.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprReturn {
++ fn clone(&self) -> Self {
++ ExprReturn {
++ attrs: self.attrs.clone(),
++ return_token: self.return_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprStruct {
++ fn clone(&self) -> Self {
++ ExprStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ brace_token: self.brace_token.clone(),
++ fields: self.fields.clone(),
++ dot2_token: self.dot2_token.clone(),
++ rest: self.rest.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTry {
++ fn clone(&self) -> Self {
++ ExprTry {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ question_token: self.question_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTryBlock {
++ fn clone(&self) -> Self {
++ ExprTryBlock {
++ attrs: self.attrs.clone(),
++ try_token: self.try_token.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTuple {
++ fn clone(&self) -> Self {
++ ExprTuple {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprType {
++ fn clone(&self) -> Self {
++ ExprType {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprUnary {
++ fn clone(&self) -> Self {
++ ExprUnary {
++ attrs: self.attrs.clone(),
++ op: self.op.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprUnsafe {
++ fn clone(&self) -> Self {
++ ExprUnsafe {
++ attrs: self.attrs.clone(),
++ unsafe_token: self.unsafe_token.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprWhile {
++ fn clone(&self) -> Self {
++ ExprWhile {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ while_token: self.while_token.clone(),
++ cond: self.cond.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprYield {
++ fn clone(&self) -> Self {
++ ExprYield {
++ attrs: self.attrs.clone(),
++ yield_token: self.yield_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Field {
++ fn clone(&self) -> Self {
++ Field {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FieldPat {
++ fn clone(&self) -> Self {
++ FieldPat {
++ attrs: self.attrs.clone(),
++ member: self.member.clone(),
++ colon_token: self.colon_token.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FieldValue {
++ fn clone(&self) -> Self {
++ FieldValue {
++ attrs: self.attrs.clone(),
++ member: self.member.clone(),
++ colon_token: self.colon_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Fields {
++ fn clone(&self) -> Self {
++ match self {
++ Fields::Named(v0) => Fields::Named(v0.clone()),
++ Fields::Unnamed(v0) => Fields::Unnamed(v0.clone()),
++ Fields::Unit => Fields::Unit,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for FieldsNamed {
++ fn clone(&self) -> Self {
++ FieldsNamed {
++ brace_token: self.brace_token.clone(),
++ named: self.named.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for FieldsUnnamed {
++ fn clone(&self) -> Self {
++ FieldsUnnamed {
++ paren_token: self.paren_token.clone(),
++ unnamed: self.unnamed.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for File {
++ fn clone(&self) -> Self {
++ File {
++ shebang: self.shebang.clone(),
++ attrs: self.attrs.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FnArg {
++ fn clone(&self) -> Self {
++ match self {
++ FnArg::Receiver(v0) => FnArg::Receiver(v0.clone()),
++ FnArg::Typed(v0) => FnArg::Typed(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItem {
++ fn clone(&self) -> Self {
++ match self {
++ ForeignItem::Fn(v0) => ForeignItem::Fn(v0.clone()),
++ ForeignItem::Static(v0) => ForeignItem::Static(v0.clone()),
++ ForeignItem::Type(v0) => ForeignItem::Type(v0.clone()),
++ ForeignItem::Macro(v0) => ForeignItem::Macro(v0.clone()),
++ ForeignItem::Verbatim(v0) => ForeignItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemFn {
++ fn clone(&self) -> Self {
++ ForeignItemFn {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ sig: self.sig.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemMacro {
++ fn clone(&self) -> Self {
++ ForeignItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemStatic {
++ fn clone(&self) -> Self {
++ ForeignItemStatic {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ static_token: self.static_token.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemType {
++ fn clone(&self) -> Self {
++ ForeignItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for GenericArgument {
++ fn clone(&self) -> Self {
++ match self {
++ GenericArgument::Lifetime(v0) => GenericArgument::Lifetime(v0.clone()),
++ GenericArgument::Type(v0) => GenericArgument::Type(v0.clone()),
++ GenericArgument::Binding(v0) => GenericArgument::Binding(v0.clone()),
++ GenericArgument::Constraint(v0) => GenericArgument::Constraint(v0.clone()),
++ GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for GenericMethodArgument {
++ fn clone(&self) -> Self {
++ match self {
++ GenericMethodArgument::Type(v0) => GenericMethodArgument::Type(v0.clone()),
++ GenericMethodArgument::Const(v0) => GenericMethodArgument::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for GenericParam {
++ fn clone(&self) -> Self {
++ match self {
++ GenericParam::Type(v0) => GenericParam::Type(v0.clone()),
++ GenericParam::Lifetime(v0) => GenericParam::Lifetime(v0.clone()),
++ GenericParam::Const(v0) => GenericParam::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Generics {
++ fn clone(&self) -> Self {
++ Generics {
++ lt_token: self.lt_token.clone(),
++ params: self.params.clone(),
++ gt_token: self.gt_token.clone(),
++ where_clause: self.where_clause.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItem {
++ fn clone(&self) -> Self {
++ match self {
++ ImplItem::Const(v0) => ImplItem::Const(v0.clone()),
++ ImplItem::Method(v0) => ImplItem::Method(v0.clone()),
++ ImplItem::Type(v0) => ImplItem::Type(v0.clone()),
++ ImplItem::Macro(v0) => ImplItem::Macro(v0.clone()),
++ ImplItem::Verbatim(v0) => ImplItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemConst {
++ fn clone(&self) -> Self {
++ ImplItemConst {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemMacro {
++ fn clone(&self) -> Self {
++ ImplItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemMethod {
++ fn clone(&self) -> Self {
++ ImplItemMethod {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ sig: self.sig.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemType {
++ fn clone(&self) -> Self {
++ ImplItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Index {
++ fn clone(&self) -> Self {
++ Index {
++ index: self.index.clone(),
++ span: self.span.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Item {
++ fn clone(&self) -> Self {
++ match self {
++ Item::Const(v0) => Item::Const(v0.clone()),
++ Item::Enum(v0) => Item::Enum(v0.clone()),
++ Item::ExternCrate(v0) => Item::ExternCrate(v0.clone()),
++ Item::Fn(v0) => Item::Fn(v0.clone()),
++ Item::ForeignMod(v0) => Item::ForeignMod(v0.clone()),
++ Item::Impl(v0) => Item::Impl(v0.clone()),
++ Item::Macro(v0) => Item::Macro(v0.clone()),
++ Item::Macro2(v0) => Item::Macro2(v0.clone()),
++ Item::Mod(v0) => Item::Mod(v0.clone()),
++ Item::Static(v0) => Item::Static(v0.clone()),
++ Item::Struct(v0) => Item::Struct(v0.clone()),
++ Item::Trait(v0) => Item::Trait(v0.clone()),
++ Item::TraitAlias(v0) => Item::TraitAlias(v0.clone()),
++ Item::Type(v0) => Item::Type(v0.clone()),
++ Item::Union(v0) => Item::Union(v0.clone()),
++ Item::Use(v0) => Item::Use(v0.clone()),
++ Item::Verbatim(v0) => Item::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemConst {
++ fn clone(&self) -> Self {
++ ItemConst {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemEnum {
++ fn clone(&self) -> Self {
++ ItemEnum {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ enum_token: self.enum_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ brace_token: self.brace_token.clone(),
++ variants: self.variants.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemExternCrate {
++ fn clone(&self) -> Self {
++ ItemExternCrate {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ extern_token: self.extern_token.clone(),
++ crate_token: self.crate_token.clone(),
++ ident: self.ident.clone(),
++ rename: self.rename.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemFn {
++ fn clone(&self) -> Self {
++ ItemFn {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ sig: self.sig.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemForeignMod {
++ fn clone(&self) -> Self {
++ ItemForeignMod {
++ attrs: self.attrs.clone(),
++ abi: self.abi.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemImpl {
++ fn clone(&self) -> Self {
++ ItemImpl {
++ attrs: self.attrs.clone(),
++ defaultness: self.defaultness.clone(),
++ unsafety: self.unsafety.clone(),
++ impl_token: self.impl_token.clone(),
++ generics: self.generics.clone(),
++ trait_: self.trait_.clone(),
++ self_ty: self.self_ty.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMacro {
++ fn clone(&self) -> Self {
++ ItemMacro {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMacro2 {
++ fn clone(&self) -> Self {
++ ItemMacro2 {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ macro_token: self.macro_token.clone(),
++ ident: self.ident.clone(),
++ rules: self.rules.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMod {
++ fn clone(&self) -> Self {
++ ItemMod {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ mod_token: self.mod_token.clone(),
++ ident: self.ident.clone(),
++ content: self.content.clone(),
++ semi: self.semi.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemStatic {
++ fn clone(&self) -> Self {
++ ItemStatic {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ static_token: self.static_token.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemStruct {
++ fn clone(&self) -> Self {
++ ItemStruct {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ struct_token: self.struct_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ fields: self.fields.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemTrait {
++ fn clone(&self) -> Self {
++ ItemTrait {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ unsafety: self.unsafety.clone(),
++ auto_token: self.auto_token.clone(),
++ trait_token: self.trait_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ colon_token: self.colon_token.clone(),
++ supertraits: self.supertraits.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemTraitAlias {
++ fn clone(&self) -> Self {
++ ItemTraitAlias {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ trait_token: self.trait_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ bounds: self.bounds.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemType {
++ fn clone(&self) -> Self {
++ ItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemUnion {
++ fn clone(&self) -> Self {
++ ItemUnion {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ union_token: self.union_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ fields: self.fields.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemUse {
++ fn clone(&self) -> Self {
++ ItemUse {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ use_token: self.use_token.clone(),
++ leading_colon: self.leading_colon.clone(),
++ tree: self.tree.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Label {
++ fn clone(&self) -> Self {
++ Label {
++ name: self.name.clone(),
++ colon_token: self.colon_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for LifetimeDef {
++ fn clone(&self) -> Self {
++ LifetimeDef {
++ attrs: self.attrs.clone(),
++ lifetime: self.lifetime.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++impl Clone for Lit {
++ fn clone(&self) -> Self {
++ match self {
++ Lit::Str(v0) => Lit::Str(v0.clone()),
++ Lit::ByteStr(v0) => Lit::ByteStr(v0.clone()),
++ Lit::Byte(v0) => Lit::Byte(v0.clone()),
++ Lit::Char(v0) => Lit::Char(v0.clone()),
++ Lit::Int(v0) => Lit::Int(v0.clone()),
++ Lit::Float(v0) => Lit::Float(v0.clone()),
++ Lit::Bool(v0) => Lit::Bool(v0.clone()),
++ Lit::Verbatim(v0) => Lit::Verbatim(v0.clone()),
++ }
++ }
++}
++impl Clone for LitBool {
++ fn clone(&self) -> Self {
++ LitBool {
++ value: self.value.clone(),
++ span: self.span.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Local {
++ fn clone(&self) -> Self {
++ Local {
++ attrs: self.attrs.clone(),
++ let_token: self.let_token.clone(),
++ pat: self.pat.clone(),
++ init: self.init.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Macro {
++ fn clone(&self) -> Self {
++ Macro {
++ path: self.path.clone(),
++ bang_token: self.bang_token.clone(),
++ delimiter: self.delimiter.clone(),
++ tokens: self.tokens.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MacroDelimiter {
++ fn clone(&self) -> Self {
++ match self {
++ MacroDelimiter::Paren(v0) => MacroDelimiter::Paren(v0.clone()),
++ MacroDelimiter::Brace(v0) => MacroDelimiter::Brace(v0.clone()),
++ MacroDelimiter::Bracket(v0) => MacroDelimiter::Bracket(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Member {
++ fn clone(&self) -> Self {
++ match self {
++ Member::Named(v0) => Member::Named(v0.clone()),
++ Member::Unnamed(v0) => Member::Unnamed(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Meta {
++ fn clone(&self) -> Self {
++ match self {
++ Meta::Path(v0) => Meta::Path(v0.clone()),
++ Meta::List(v0) => Meta::List(v0.clone()),
++ Meta::NameValue(v0) => Meta::NameValue(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MetaList {
++ fn clone(&self) -> Self {
++ MetaList {
++ path: self.path.clone(),
++ paren_token: self.paren_token.clone(),
++ nested: self.nested.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MetaNameValue {
++ fn clone(&self) -> Self {
++ MetaNameValue {
++ path: self.path.clone(),
++ eq_token: self.eq_token.clone(),
++ lit: self.lit.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for MethodTurbofish {
++ fn clone(&self) -> Self {
++ MethodTurbofish {
++ colon2_token: self.colon2_token.clone(),
++ lt_token: self.lt_token.clone(),
++ args: self.args.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for NestedMeta {
++ fn clone(&self) -> Self {
++ match self {
++ NestedMeta::Meta(v0) => NestedMeta::Meta(v0.clone()),
++ NestedMeta::Lit(v0) => NestedMeta::Lit(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ParenthesizedGenericArguments {
++ fn clone(&self) -> Self {
++ ParenthesizedGenericArguments {
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Pat {
++ fn clone(&self) -> Self {
++ match self {
++ Pat::Box(v0) => Pat::Box(v0.clone()),
++ Pat::Ident(v0) => Pat::Ident(v0.clone()),
++ Pat::Lit(v0) => Pat::Lit(v0.clone()),
++ Pat::Macro(v0) => Pat::Macro(v0.clone()),
++ Pat::Or(v0) => Pat::Or(v0.clone()),
++ Pat::Path(v0) => Pat::Path(v0.clone()),
++ Pat::Range(v0) => Pat::Range(v0.clone()),
++ Pat::Reference(v0) => Pat::Reference(v0.clone()),
++ Pat::Rest(v0) => Pat::Rest(v0.clone()),
++ Pat::Slice(v0) => Pat::Slice(v0.clone()),
++ Pat::Struct(v0) => Pat::Struct(v0.clone()),
++ Pat::Tuple(v0) => Pat::Tuple(v0.clone()),
++ Pat::TupleStruct(v0) => Pat::TupleStruct(v0.clone()),
++ Pat::Type(v0) => Pat::Type(v0.clone()),
++ Pat::Verbatim(v0) => Pat::Verbatim(v0.clone()),
++ Pat::Wild(v0) => Pat::Wild(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatBox {
++ fn clone(&self) -> Self {
++ PatBox {
++ attrs: self.attrs.clone(),
++ box_token: self.box_token.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatIdent {
++ fn clone(&self) -> Self {
++ PatIdent {
++ attrs: self.attrs.clone(),
++ by_ref: self.by_ref.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ subpat: self.subpat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatLit {
++ fn clone(&self) -> Self {
++ PatLit {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatMacro {
++ fn clone(&self) -> Self {
++ PatMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatOr {
++ fn clone(&self) -> Self {
++ PatOr {
++ attrs: self.attrs.clone(),
++ leading_vert: self.leading_vert.clone(),
++ cases: self.cases.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatPath {
++ fn clone(&self) -> Self {
++ PatPath {
++ attrs: self.attrs.clone(),
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatRange {
++ fn clone(&self) -> Self {
++ PatRange {
++ attrs: self.attrs.clone(),
++ lo: self.lo.clone(),
++ limits: self.limits.clone(),
++ hi: self.hi.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatReference {
++ fn clone(&self) -> Self {
++ PatReference {
++ attrs: self.attrs.clone(),
++ and_token: self.and_token.clone(),
++ mutability: self.mutability.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatRest {
++ fn clone(&self) -> Self {
++ PatRest {
++ attrs: self.attrs.clone(),
++ dot2_token: self.dot2_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatSlice {
++ fn clone(&self) -> Self {
++ PatSlice {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatStruct {
++ fn clone(&self) -> Self {
++ PatStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ brace_token: self.brace_token.clone(),
++ fields: self.fields.clone(),
++ dot2_token: self.dot2_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatTuple {
++ fn clone(&self) -> Self {
++ PatTuple {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatTupleStruct {
++ fn clone(&self) -> Self {
++ PatTupleStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatType {
++ fn clone(&self) -> Self {
++ PatType {
++ attrs: self.attrs.clone(),
++ pat: self.pat.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatWild {
++ fn clone(&self) -> Self {
++ PatWild {
++ attrs: self.attrs.clone(),
++ underscore_token: self.underscore_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Path {
++ fn clone(&self) -> Self {
++ Path {
++ leading_colon: self.leading_colon.clone(),
++ segments: self.segments.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PathArguments {
++ fn clone(&self) -> Self {
++ match self {
++ PathArguments::None => PathArguments::None,
++ PathArguments::AngleBracketed(v0) => PathArguments::AngleBracketed(v0.clone()),
++ PathArguments::Parenthesized(v0) => PathArguments::Parenthesized(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PathSegment {
++ fn clone(&self) -> Self {
++ PathSegment {
++ ident: self.ident.clone(),
++ arguments: self.arguments.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateEq {
++ fn clone(&self) -> Self {
++ PredicateEq {
++ lhs_ty: self.lhs_ty.clone(),
++ eq_token: self.eq_token.clone(),
++ rhs_ty: self.rhs_ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateLifetime {
++ fn clone(&self) -> Self {
++ PredicateLifetime {
++ lifetime: self.lifetime.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateType {
++ fn clone(&self) -> Self {
++ PredicateType {
++ lifetimes: self.lifetimes.clone(),
++ bounded_ty: self.bounded_ty.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for QSelf {
++ fn clone(&self) -> Self {
++ QSelf {
++ lt_token: self.lt_token.clone(),
++ ty: self.ty.clone(),
++ position: self.position.clone(),
++ as_token: self.as_token.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Copy for RangeLimits {}
++#[cfg(feature = "full")]
++impl Clone for RangeLimits {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Receiver {
++ fn clone(&self) -> Self {
++ Receiver {
++ attrs: self.attrs.clone(),
++ reference: self.reference.clone(),
++ mutability: self.mutability.clone(),
++ self_token: self.self_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ReturnType {
++ fn clone(&self) -> Self {
++ match self {
++ ReturnType::Default => ReturnType::Default,
++ ReturnType::Type(v0, v1) => ReturnType::Type(v0.clone(), v1.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Signature {
++ fn clone(&self) -> Self {
++ Signature {
++ constness: self.constness.clone(),
++ asyncness: self.asyncness.clone(),
++ unsafety: self.unsafety.clone(),
++ abi: self.abi.clone(),
++ fn_token: self.fn_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ variadic: self.variadic.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Stmt {
++ fn clone(&self) -> Self {
++ match self {
++ Stmt::Local(v0) => Stmt::Local(v0.clone()),
++ Stmt::Item(v0) => Stmt::Item(v0.clone()),
++ Stmt::Expr(v0) => Stmt::Expr(v0.clone()),
++ Stmt::Semi(v0, v1) => Stmt::Semi(v0.clone(), v1.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TraitBound {
++ fn clone(&self) -> Self {
++ TraitBound {
++ paren_token: self.paren_token.clone(),
++ modifier: self.modifier.clone(),
++ lifetimes: self.lifetimes.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for TraitBoundModifier {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TraitBoundModifier {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItem {
++ fn clone(&self) -> Self {
++ match self {
++ TraitItem::Const(v0) => TraitItem::Const(v0.clone()),
++ TraitItem::Method(v0) => TraitItem::Method(v0.clone()),
++ TraitItem::Type(v0) => TraitItem::Type(v0.clone()),
++ TraitItem::Macro(v0) => TraitItem::Macro(v0.clone()),
++ TraitItem::Verbatim(v0) => TraitItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemConst {
++ fn clone(&self) -> Self {
++ TraitItemConst {
++ attrs: self.attrs.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemMacro {
++ fn clone(&self) -> Self {
++ TraitItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemMethod {
++ fn clone(&self) -> Self {
++ TraitItemMethod {
++ attrs: self.attrs.clone(),
++ sig: self.sig.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemType {
++ fn clone(&self) -> Self {
++ TraitItemType {
++ attrs: self.attrs.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Type {
++ fn clone(&self) -> Self {
++ match self {
++ Type::Array(v0) => Type::Array(v0.clone()),
++ Type::BareFn(v0) => Type::BareFn(v0.clone()),
++ Type::Group(v0) => Type::Group(v0.clone()),
++ Type::ImplTrait(v0) => Type::ImplTrait(v0.clone()),
++ Type::Infer(v0) => Type::Infer(v0.clone()),
++ Type::Macro(v0) => Type::Macro(v0.clone()),
++ Type::Never(v0) => Type::Never(v0.clone()),
++ Type::Paren(v0) => Type::Paren(v0.clone()),
++ Type::Path(v0) => Type::Path(v0.clone()),
++ Type::Ptr(v0) => Type::Ptr(v0.clone()),
++ Type::Reference(v0) => Type::Reference(v0.clone()),
++ Type::Slice(v0) => Type::Slice(v0.clone()),
++ Type::TraitObject(v0) => Type::TraitObject(v0.clone()),
++ Type::Tuple(v0) => Type::Tuple(v0.clone()),
++ Type::Verbatim(v0) => Type::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeArray {
++ fn clone(&self) -> Self {
++ TypeArray {
++ bracket_token: self.bracket_token.clone(),
++ elem: self.elem.clone(),
++ semi_token: self.semi_token.clone(),
++ len: self.len.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeBareFn {
++ fn clone(&self) -> Self {
++ TypeBareFn {
++ lifetimes: self.lifetimes.clone(),
++ unsafety: self.unsafety.clone(),
++ abi: self.abi.clone(),
++ fn_token: self.fn_token.clone(),
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ variadic: self.variadic.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeGroup {
++ fn clone(&self) -> Self {
++ TypeGroup {
++ group_token: self.group_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeImplTrait {
++ fn clone(&self) -> Self {
++ TypeImplTrait {
++ impl_token: self.impl_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeInfer {
++ fn clone(&self) -> Self {
++ TypeInfer {
++ underscore_token: self.underscore_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeMacro {
++ fn clone(&self) -> Self {
++ TypeMacro {
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeNever {
++ fn clone(&self) -> Self {
++ TypeNever {
++ bang_token: self.bang_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParam {
++ fn clone(&self) -> Self {
++ TypeParam {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ eq_token: self.eq_token.clone(),
++ default: self.default.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParamBound {
++ fn clone(&self) -> Self {
++ match self {
++ TypeParamBound::Trait(v0) => TypeParamBound::Trait(v0.clone()),
++ TypeParamBound::Lifetime(v0) => TypeParamBound::Lifetime(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParen {
++ fn clone(&self) -> Self {
++ TypeParen {
++ paren_token: self.paren_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypePath {
++ fn clone(&self) -> Self {
++ TypePath {
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypePtr {
++ fn clone(&self) -> Self {
++ TypePtr {
++ star_token: self.star_token.clone(),
++ const_token: self.const_token.clone(),
++ mutability: self.mutability.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeReference {
++ fn clone(&self) -> Self {
++ TypeReference {
++ and_token: self.and_token.clone(),
++ lifetime: self.lifetime.clone(),
++ mutability: self.mutability.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeSlice {
++ fn clone(&self) -> Self {
++ TypeSlice {
++ bracket_token: self.bracket_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeTraitObject {
++ fn clone(&self) -> Self {
++ TypeTraitObject {
++ dyn_token: self.dyn_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeTuple {
++ fn clone(&self) -> Self {
++ TypeTuple {
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for UnOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for UnOp {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseGlob {
++ fn clone(&self) -> Self {
++ UseGlob {
++ star_token: self.star_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseGroup {
++ fn clone(&self) -> Self {
++ UseGroup {
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseName {
++ fn clone(&self) -> Self {
++ UseName {
++ ident: self.ident.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UsePath {
++ fn clone(&self) -> Self {
++ UsePath {
++ ident: self.ident.clone(),
++ colon2_token: self.colon2_token.clone(),
++ tree: self.tree.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseRename {
++ fn clone(&self) -> Self {
++ UseRename {
++ ident: self.ident.clone(),
++ as_token: self.as_token.clone(),
++ rename: self.rename.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseTree {
++ fn clone(&self) -> Self {
++ match self {
++ UseTree::Path(v0) => UseTree::Path(v0.clone()),
++ UseTree::Name(v0) => UseTree::Name(v0.clone()),
++ UseTree::Rename(v0) => UseTree::Rename(v0.clone()),
++ UseTree::Glob(v0) => UseTree::Glob(v0.clone()),
++ UseTree::Group(v0) => UseTree::Group(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Variadic {
++ fn clone(&self) -> Self {
++ Variadic {
++ attrs: self.attrs.clone(),
++ dots: self.dots.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Variant {
++ fn clone(&self) -> Self {
++ Variant {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ fields: self.fields.clone(),
++ discriminant: self.discriminant.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisCrate {
++ fn clone(&self) -> Self {
++ VisCrate {
++ crate_token: self.crate_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisPublic {
++ fn clone(&self) -> Self {
++ VisPublic {
++ pub_token: self.pub_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisRestricted {
++ fn clone(&self) -> Self {
++ VisRestricted {
++ pub_token: self.pub_token.clone(),
++ paren_token: self.paren_token.clone(),
++ in_token: self.in_token.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Visibility {
++ fn clone(&self) -> Self {
++ match self {
++ Visibility::Public(v0) => Visibility::Public(v0.clone()),
++ Visibility::Crate(v0) => Visibility::Crate(v0.clone()),
++ Visibility::Restricted(v0) => Visibility::Restricted(v0.clone()),
++ Visibility::Inherited => Visibility::Inherited,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for WhereClause {
++ fn clone(&self) -> Self {
++ WhereClause {
++ where_token: self.where_token.clone(),
++ predicates: self.predicates.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for WherePredicate {
++ fn clone(&self) -> Self {
++ match self {
++ WherePredicate::Type(v0) => WherePredicate::Type(v0.clone()),
++ WherePredicate::Lifetime(v0) => WherePredicate::Lifetime(v0.clone()),
++ WherePredicate::Eq(v0) => WherePredicate::Eq(v0.clone()),
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/debug.rs b/third_party/rust/syn/src/gen/debug.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/src/gen/debug.rs
+@@ -0,0 +1,2857 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++use crate::*;
++use std::fmt::{self, Debug};
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Abi {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Abi");
++ formatter.field("extern_token", &self.extern_token);
++ formatter.field("name", &self.name);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for AngleBracketedGenericArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("AngleBracketedGenericArguments");
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("args", &self.args);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Arm {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Arm");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("pat", &self.pat);
++ formatter.field("guard", &self.guard);
++ formatter.field("fat_arrow_token", &self.fat_arrow_token);
++ formatter.field("body", &self.body);
++ formatter.field("comma", &self.comma);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for AttrStyle {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ AttrStyle::Outer => formatter.write_str("Outer"),
++ AttrStyle::Inner(v0) => {
++ let mut formatter = formatter.debug_tuple("Inner");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Attribute {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Attribute");
++ formatter.field("pound_token", &self.pound_token);
++ formatter.field("style", &self.style);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("path", &self.path);
++ formatter.field("tokens", &self.tokens);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BareFnArg {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("BareFnArg");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("name", &self.name);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BinOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ BinOp::Add(v0) => {
++ let mut formatter = formatter.debug_tuple("Add");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Sub(v0) => {
++ let mut formatter = formatter.debug_tuple("Sub");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Mul(v0) => {
++ let mut formatter = formatter.debug_tuple("Mul");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Div(v0) => {
++ let mut formatter = formatter.debug_tuple("Div");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Rem(v0) => {
++ let mut formatter = formatter.debug_tuple("Rem");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::And(v0) => {
++ let mut formatter = formatter.debug_tuple("And");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Or(v0) => {
++ let mut formatter = formatter.debug_tuple("Or");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitXor(v0) => {
++ let mut formatter = formatter.debug_tuple("BitXor");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitAnd(v0) => {
++ let mut formatter = formatter.debug_tuple("BitAnd");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitOr(v0) => {
++ let mut formatter = formatter.debug_tuple("BitOr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Shl(v0) => {
++ let mut formatter = formatter.debug_tuple("Shl");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Shr(v0) => {
++ let mut formatter = formatter.debug_tuple("Shr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Eq(v0) => {
++ let mut formatter = formatter.debug_tuple("Eq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Lt(v0) => {
++ let mut formatter = formatter.debug_tuple("Lt");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Le(v0) => {
++ let mut formatter = formatter.debug_tuple("Le");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Ne(v0) => {
++ let mut formatter = formatter.debug_tuple("Ne");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Ge(v0) => {
++ let mut formatter = formatter.debug_tuple("Ge");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Gt(v0) => {
++ let mut formatter = formatter.debug_tuple("Gt");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::AddEq(v0) => {
++ let mut formatter = formatter.debug_tuple("AddEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::SubEq(v0) => {
++ let mut formatter = formatter.debug_tuple("SubEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::MulEq(v0) => {
++ let mut formatter = formatter.debug_tuple("MulEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::DivEq(v0) => {
++ let mut formatter = formatter.debug_tuple("DivEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::RemEq(v0) => {
++ let mut formatter = formatter.debug_tuple("RemEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitXorEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitXorEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitAndEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitAndEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitOrEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitOrEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::ShlEq(v0) => {
++ let mut formatter = formatter.debug_tuple("ShlEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::ShrEq(v0) => {
++ let mut formatter = formatter.debug_tuple("ShrEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Binding {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Binding");
++ formatter.field("ident", &self.ident);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Block {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Block");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("stmts", &self.stmts);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BoundLifetimes {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("BoundLifetimes");
++ formatter.field("for_token", &self.for_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ConstParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ConstParam");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("default", &self.default);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Constraint {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Constraint");
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for Data {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Data::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Data::Enum(v0) => {
++ let mut formatter = formatter.debug_tuple("Enum");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Data::Union(v0) => {
++ let mut formatter = formatter.debug_tuple("Union");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataEnum {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataEnum");
++ formatter.field("enum_token", &self.enum_token);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("variants", &self.variants);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataStruct");
++ formatter.field("struct_token", &self.struct_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataUnion {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataUnion");
++ formatter.field("union_token", &self.union_token);
++ formatter.field("fields", &self.fields);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DeriveInput {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DeriveInput");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("data", &self.data);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Expr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => {
++ let mut formatter = formatter.debug_tuple("Array");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => {
++ let mut formatter = formatter.debug_tuple("Assign");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => {
++ let mut formatter = formatter.debug_tuple("AssignOp");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => {
++ let mut formatter = formatter.debug_tuple("Async");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => {
++ let mut formatter = formatter.debug_tuple("Await");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Binary(v0) => {
++ let mut formatter = formatter.debug_tuple("Binary");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => {
++ let mut formatter = formatter.debug_tuple("Block");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => {
++ let mut formatter = formatter.debug_tuple("Box");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => {
++ let mut formatter = formatter.debug_tuple("Break");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Call(v0) => {
++ let mut formatter = formatter.debug_tuple("Call");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Cast(v0) => {
++ let mut formatter = formatter.debug_tuple("Cast");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => {
++ let mut formatter = formatter.debug_tuple("Closure");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => {
++ let mut formatter = formatter.debug_tuple("Continue");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Field(v0) => {
++ let mut formatter = formatter.debug_tuple("Field");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => {
++ let mut formatter = formatter.debug_tuple("ForLoop");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::If(v0) => {
++ let mut formatter = formatter.debug_tuple("If");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Index(v0) => {
++ let mut formatter = formatter.debug_tuple("Index");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => {
++ let mut formatter = formatter.debug_tuple("Let");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => {
++ let mut formatter = formatter.debug_tuple("Loop");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => {
++ let mut formatter = formatter.debug_tuple("Match");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => {
++ let mut formatter = formatter.debug_tuple("MethodCall");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => {
++ let mut formatter = formatter.debug_tuple("Range");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => {
++ let mut formatter = formatter.debug_tuple("Repeat");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => {
++ let mut formatter = formatter.debug_tuple("Return");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => {
++ let mut formatter = formatter.debug_tuple("Try");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => {
++ let mut formatter = formatter.debug_tuple("TryBlock");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Unary(v0) => {
++ let mut formatter = formatter.debug_tuple("Unary");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => {
++ let mut formatter = formatter.debug_tuple("Unsafe");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::While(v0) => {
++ let mut formatter = formatter.debug_tuple("While");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => {
++ let mut formatter = formatter.debug_tuple("Yield");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprArray {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprArray");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAssign {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAssign");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAssignOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAssignOp");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("op", &self.op);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAsync {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAsync");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("async_token", &self.async_token);
++ formatter.field("capture", &self.capture);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAwait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAwait");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("base", &self.base);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("await_token", &self.await_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprBinary {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBinary");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("op", &self.op);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBlock {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBlock");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBox {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBox");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("box_token", &self.box_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBreak {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBreak");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("break_token", &self.break_token);
++ formatter.field("label", &self.label);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprCall {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprCall");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("func", &self.func);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("args", &self.args);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprCast {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprCast");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprClosure {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprClosure");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("asyncness", &self.asyncness);
++ formatter.field("movability", &self.movability);
++ formatter.field("capture", &self.capture);
++ formatter.field("or1_token", &self.or1_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("or2_token", &self.or2_token);
++ formatter.field("output", &self.output);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprContinue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprContinue");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("continue_token", &self.continue_token);
++ formatter.field("label", &self.label);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprField {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprField");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("base", &self.base);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("member", &self.member);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprForLoop {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprForLoop");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("for_token", &self.for_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("in_token", &self.in_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprGroup");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("group_token", &self.group_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprIf {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprIf");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("if_token", &self.if_token);
++ formatter.field("cond", &self.cond);
++ formatter.field("then_branch", &self.then_branch);
++ formatter.field("else_branch", &self.else_branch);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprIndex {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprIndex");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("index", &self.index);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprLet {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLet");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("let_token", &self.let_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprLit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLit");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lit", &self.lit);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprLoop {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLoop");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("loop_token", &self.loop_token);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMatch {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMatch");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("match_token", &self.match_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("arms", &self.arms);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMethodCall {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMethodCall");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("receiver", &self.receiver);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("method", &self.method);
++ formatter.field("turbofish", &self.turbofish);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("args", &self.args);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprParen {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprParen");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprPath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprPath");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprRange {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprRange");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("from", &self.from);
++ formatter.field("limits", &self.limits);
++ formatter.field("to", &self.to);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprReference");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("and_token", &self.and_token);
++ formatter.field("raw", &self.raw);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprRepeat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprRepeat");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.field("len", &self.len);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprReturn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprReturn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("return_token", &self.return_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.field("rest", &self.rest);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTry {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTry");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("question_token", &self.question_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTryBlock {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTryBlock");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("try_token", &self.try_token);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTuple");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprUnary {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprUnary");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("op", &self.op);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprUnsafe {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprUnsafe");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("unsafe_token", &self.unsafe_token);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprWhile {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprWhile");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("while_token", &self.while_token);
++ formatter.field("cond", &self.cond);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprYield {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprYield");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("yield_token", &self.yield_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Field {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Field");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FieldPat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldPat");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("member", &self.member);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FieldValue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldValue");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("member", &self.member);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Fields {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Fields::Named(v0) => {
++ let mut formatter = formatter.debug_tuple("Named");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Fields::Unnamed(v0) => {
++ let mut formatter = formatter.debug_tuple("Unnamed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Fields::Unit => formatter.write_str("Unit"),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for FieldsNamed {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldsNamed");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("named", &self.named);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for FieldsUnnamed {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldsUnnamed");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("unnamed", &self.unnamed);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for File {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("File");
++ formatter.field("shebang", &self.shebang);
++ formatter.field("attrs", &self.attrs);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FnArg {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ FnArg::Receiver(v0) => {
++ let mut formatter = formatter.debug_tuple("Receiver");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ FnArg::Typed(v0) => {
++ let mut formatter = formatter.debug_tuple("Typed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ForeignItem::Fn(v0) => {
++ let mut formatter = formatter.debug_tuple("Fn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Static(v0) => {
++ let mut formatter = formatter.debug_tuple("Static");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemFn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("sig", &self.sig);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemStatic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemStatic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("static_token", &self.static_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for GenericArgument {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericArgument::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Binding(v0) => {
++ let mut formatter = formatter.debug_tuple("Binding");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Constraint(v0) => {
++ let mut formatter = formatter.debug_tuple("Constraint");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for GenericMethodArgument {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericMethodArgument::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericMethodArgument::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for GenericParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericParam::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericParam::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericParam::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Generics {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Generics");
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("params", &self.params);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.field("where_clause", &self.where_clause);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ImplItem::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Method(v0) => {
++ let mut formatter = formatter.debug_tuple("Method");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemMethod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemMethod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("sig", &self.sig);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Index {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Index");
++ formatter.field("index", &self.index);
++ formatter.field("span", &self.span);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Item {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Item::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Enum(v0) => {
++ let mut formatter = formatter.debug_tuple("Enum");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::ExternCrate(v0) => {
++ let mut formatter = formatter.debug_tuple("ExternCrate");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Fn(v0) => {
++ let mut formatter = formatter.debug_tuple("Fn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::ForeignMod(v0) => {
++ let mut formatter = formatter.debug_tuple("ForeignMod");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Impl(v0) => {
++ let mut formatter = formatter.debug_tuple("Impl");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Macro2(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro2");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Mod(v0) => {
++ let mut formatter = formatter.debug_tuple("Mod");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Static(v0) => {
++ let mut formatter = formatter.debug_tuple("Static");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Trait(v0) => {
++ let mut formatter = formatter.debug_tuple("Trait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::TraitAlias(v0) => {
++ let mut formatter = formatter.debug_tuple("TraitAlias");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Union(v0) => {
++ let mut formatter = formatter.debug_tuple("Union");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Use(v0) => {
++ let mut formatter = formatter.debug_tuple("Use");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemEnum {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemEnum");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("enum_token", &self.enum_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("variants", &self.variants);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemExternCrate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemExternCrate");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("extern_token", &self.extern_token);
++ formatter.field("crate_token", &self.crate_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("rename", &self.rename);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemFn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("sig", &self.sig);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemForeignMod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemForeignMod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("abi", &self.abi);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemImpl {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemImpl");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("impl_token", &self.impl_token);
++ formatter.field("generics", &self.generics);
++ formatter.field("trait_", &self.trait_);
++ formatter.field("self_ty", &self.self_ty);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMacro2 {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMacro2");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("macro_token", &self.macro_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("rules", &self.rules);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("mod_token", &self.mod_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("content", &self.content);
++ formatter.field("semi", &self.semi);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemStatic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemStatic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("static_token", &self.static_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("struct_token", &self.struct_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("fields", &self.fields);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemTrait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemTrait");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("auto_token", &self.auto_token);
++ formatter.field("trait_token", &self.trait_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("supertraits", &self.supertraits);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemTraitAlias {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemTraitAlias");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("trait_token", &self.trait_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemUnion {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemUnion");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("union_token", &self.union_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("fields", &self.fields);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemUse {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemUse");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("use_token", &self.use_token);
++ formatter.field("leading_colon", &self.leading_colon);
++ formatter.field("tree", &self.tree);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Label {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Label");
++ formatter.field("name", &self.name);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.finish()
++ }
++}
++impl Debug for Lifetime {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Lifetime");
++ formatter.field("apostrophe", &self.apostrophe);
++ formatter.field("ident", &self.ident);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for LifetimeDef {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("LifetimeDef");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++impl Debug for Lit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Lit::Str(v0) => {
++ let mut formatter = formatter.debug_tuple("Str");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::ByteStr(v0) => {
++ let mut formatter = formatter.debug_tuple("ByteStr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Byte(v0) => {
++ let mut formatter = formatter.debug_tuple("Byte");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Char(v0) => {
++ let mut formatter = formatter.debug_tuple("Char");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Int(v0) => {
++ let mut formatter = formatter.debug_tuple("Int");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Float(v0) => {
++ let mut formatter = formatter.debug_tuple("Float");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Bool(v0) => {
++ let mut formatter = formatter.debug_tuple("Bool");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Local {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Local");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("let_token", &self.let_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("init", &self.init);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Macro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Macro");
++ formatter.field("path", &self.path);
++ formatter.field("bang_token", &self.bang_token);
++ formatter.field("delimiter", &self.delimiter);
++ formatter.field("tokens", &self.tokens);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MacroDelimiter {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ MacroDelimiter::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ MacroDelimiter::Brace(v0) => {
++ let mut formatter = formatter.debug_tuple("Brace");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ MacroDelimiter::Bracket(v0) => {
++ let mut formatter = formatter.debug_tuple("Bracket");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Member {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Member::Named(v0) => {
++ let mut formatter = formatter.debug_tuple("Named");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Member::Unnamed(v0) => {
++ let mut formatter = formatter.debug_tuple("Unnamed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Meta {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Meta::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Meta::List(v0) => {
++ let mut formatter = formatter.debug_tuple("List");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Meta::NameValue(v0) => {
++ let mut formatter = formatter.debug_tuple("NameValue");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MetaList {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MetaList");
++ formatter.field("path", &self.path);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("nested", &self.nested);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MetaNameValue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MetaNameValue");
++ formatter.field("path", &self.path);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("lit", &self.lit);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for MethodTurbofish {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MethodTurbofish");
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("args", &self.args);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for NestedMeta {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ NestedMeta::Meta(v0) => {
++ let mut formatter = formatter.debug_tuple("Meta");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ NestedMeta::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ParenthesizedGenericArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ParenthesizedGenericArguments");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Pat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Pat::Box(v0) => {
++ let mut formatter = formatter.debug_tuple("Box");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Ident(v0) => {
++ let mut formatter = formatter.debug_tuple("Ident");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Or(v0) => {
++ let mut formatter = formatter.debug_tuple("Or");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Range(v0) => {
++ let mut formatter = formatter.debug_tuple("Range");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Rest(v0) => {
++ let mut formatter = formatter.debug_tuple("Rest");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Slice(v0) => {
++ let mut formatter = formatter.debug_tuple("Slice");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::TupleStruct(v0) => {
++ let mut formatter = formatter.debug_tuple("TupleStruct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Wild(v0) => {
++ let mut formatter = formatter.debug_tuple("Wild");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatBox {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatBox");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("box_token", &self.box_token);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatIdent {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatIdent");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("by_ref", &self.by_ref);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("subpat", &self.subpat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatLit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatLit");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatOr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatOr");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("leading_vert", &self.leading_vert);
++ formatter.field("cases", &self.cases);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatPath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatPath");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatRange {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatRange");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lo", &self.lo);
++ formatter.field("limits", &self.limits);
++ formatter.field("hi", &self.hi);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatReference");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("and_token", &self.and_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatRest {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatRest");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatSlice {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatSlice");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatTuple");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatTupleStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatTupleStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("pat", &self.pat);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatWild {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatWild");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("underscore_token", &self.underscore_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Path {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Path");
++ formatter.field("leading_colon", &self.leading_colon);
++ formatter.field("segments", &self.segments);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PathArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ PathArguments::None => formatter.write_str("None"),
++ PathArguments::AngleBracketed(v0) => {
++ let mut formatter = formatter.debug_tuple("AngleBracketed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ PathArguments::Parenthesized(v0) => {
++ let mut formatter = formatter.debug_tuple("Parenthesized");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PathSegment {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PathSegment");
++ formatter.field("ident", &self.ident);
++ formatter.field("arguments", &self.arguments);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateEq {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateEq");
++ formatter.field("lhs_ty", &self.lhs_ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("rhs_ty", &self.rhs_ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateLifetime {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateLifetime");
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateType");
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("bounded_ty", &self.bounded_ty);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for QSelf {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("QSelf");
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("position", &self.position);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for RangeLimits {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ RangeLimits::HalfOpen(v0) => {
++ let mut formatter = formatter.debug_tuple("HalfOpen");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ RangeLimits::Closed(v0) => {
++ let mut formatter = formatter.debug_tuple("Closed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Receiver {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Receiver");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("reference", &self.reference);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("self_token", &self.self_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ReturnType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ReturnType::Default => formatter.write_str("Default"),
++ ReturnType::Type(v0, v1) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.field(v1);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Signature {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Signature");
++ formatter.field("constness", &self.constness);
++ formatter.field("asyncness", &self.asyncness);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("abi", &self.abi);
++ formatter.field("fn_token", &self.fn_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("variadic", &self.variadic);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Stmt {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Stmt::Local(v0) => {
++ let mut formatter = formatter.debug_tuple("Local");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Item(v0) => {
++ let mut formatter = formatter.debug_tuple("Item");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Expr(v0) => {
++ let mut formatter = formatter.debug_tuple("Expr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Semi(v0, v1) => {
++ let mut formatter = formatter.debug_tuple("Semi");
++ formatter.field(v0);
++ formatter.field(v1);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TraitBound {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitBound");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("modifier", &self.modifier);
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TraitBoundModifier {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TraitBoundModifier::None => formatter.write_str("None"),
++ TraitBoundModifier::Maybe(v0) => {
++ let mut formatter = formatter.debug_tuple("Maybe");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TraitItem::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Method(v0) => {
++ let mut formatter = formatter.debug_tuple("Method");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemMethod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemMethod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("sig", &self.sig);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Type {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Type::Array(v0) => {
++ let mut formatter = formatter.debug_tuple("Array");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::BareFn(v0) => {
++ let mut formatter = formatter.debug_tuple("BareFn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::ImplTrait(v0) => {
++ let mut formatter = formatter.debug_tuple("ImplTrait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Infer(v0) => {
++ let mut formatter = formatter.debug_tuple("Infer");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Never(v0) => {
++ let mut formatter = formatter.debug_tuple("Never");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Ptr(v0) => {
++ let mut formatter = formatter.debug_tuple("Ptr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Slice(v0) => {
++ let mut formatter = formatter.debug_tuple("Slice");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::TraitObject(v0) => {
++ let mut formatter = formatter.debug_tuple("TraitObject");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeArray {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeArray");
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elem", &self.elem);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.field("len", &self.len);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeBareFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeBareFn");
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("abi", &self.abi);
++ formatter.field("fn_token", &self.fn_token);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("variadic", &self.variadic);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeGroup");
++ formatter.field("group_token", &self.group_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeImplTrait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeImplTrait");
++ formatter.field("impl_token", &self.impl_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeInfer {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeInfer");
++ formatter.field("underscore_token", &self.underscore_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeMacro");
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeNever {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeNever");
++ formatter.field("bang_token", &self.bang_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeParam");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("default", &self.default);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParamBound {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TypeParamBound::Trait(v0) => {
++ let mut formatter = formatter.debug_tuple("Trait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TypeParamBound::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParen {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeParen");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypePath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypePath");
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypePtr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypePtr");
++ formatter.field("star_token", &self.star_token);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeReference");
++ formatter.field("and_token", &self.and_token);
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeSlice {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeSlice");
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeTraitObject {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeTraitObject");
++ formatter.field("dyn_token", &self.dyn_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeTuple");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for UnOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ UnOp::Deref(v0) => {
++ let mut formatter = formatter.debug_tuple("Deref");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UnOp::Not(v0) => {
++ let mut formatter = formatter.debug_tuple("Not");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UnOp::Neg(v0) => {
++ let mut formatter = formatter.debug_tuple("Neg");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseGlob {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseGlob");
++ formatter.field("star_token", &self.star_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseGroup");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseName {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseName");
++ formatter.field("ident", &self.ident);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UsePath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UsePath");
++ formatter.field("ident", &self.ident);
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("tree", &self.tree);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseRename {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseRename");
++ formatter.field("ident", &self.ident);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("rename", &self.rename);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseTree {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ UseTree::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Name(v0) => {
++ let mut formatter = formatter.debug_tuple("Name");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Rename(v0) => {
++ let mut formatter = formatter.debug_tuple("Rename");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Glob(v0) => {
++ let mut formatter = formatter.debug_tuple("Glob");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Variadic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Variadic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("dots", &self.dots);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Variant {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Variant");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("fields", &self.fields);
++ formatter.field("discriminant", &self.discriminant);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisCrate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisCrate");
++ formatter.field("crate_token", &self.crate_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisPublic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisPublic");
++ formatter.field("pub_token", &self.pub_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisRestricted {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisRestricted");
++ formatter.field("pub_token", &self.pub_token);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("in_token", &self.in_token);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Visibility {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Visibility::Public(v0) => {
++ let mut formatter = formatter.debug_tuple("Public");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Crate(v0) => {
++ let mut formatter = formatter.debug_tuple("Crate");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Restricted(v0) => {
++ let mut formatter = formatter.debug_tuple("Restricted");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Inherited => formatter.write_str("Inherited"),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for WhereClause {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("WhereClause");
++ formatter.field("where_token", &self.where_token);
++ formatter.field("predicates", &self.predicates);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for WherePredicate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ WherePredicate::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ WherePredicate::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ WherePredicate::Eq(v0) => {
++ let mut formatter = formatter.debug_tuple("Eq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/eq.rs b/third_party/rust/syn/src/gen/eq.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/src/gen/eq.rs
+@@ -0,0 +1,1930 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#[cfg(any(feature = "derive", feature = "full"))]
++use crate::tt::TokenStreamHelper;
++use crate::*;
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Abi {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Abi {
++ fn eq(&self, other: &Self) -> bool {
++ self.name == other.name
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for AngleBracketedGenericArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for AngleBracketedGenericArguments {
++ fn eq(&self, other: &Self) -> bool {
++ self.colon2_token == other.colon2_token && self.args == other.args
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Arm {}
++#[cfg(feature = "full")]
++impl PartialEq for Arm {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.pat == other.pat
++ && self.guard == other.guard
++ && self.body == other.body
++ && self.comma == other.comma
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for AttrStyle {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for AttrStyle {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (AttrStyle::Outer, AttrStyle::Outer) => true,
++ (AttrStyle::Inner(_), AttrStyle::Inner(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Attribute {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Attribute {
++ fn eq(&self, other: &Self) -> bool {
++ self.style == other.style
++ && self.path == other.path
++ && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BareFnArg {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BareFnArg {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.name == other.name && self.ty == other.ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BinOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BinOp {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (BinOp::Add(_), BinOp::Add(_)) => true,
++ (BinOp::Sub(_), BinOp::Sub(_)) => true,
++ (BinOp::Mul(_), BinOp::Mul(_)) => true,
++ (BinOp::Div(_), BinOp::Div(_)) => true,
++ (BinOp::Rem(_), BinOp::Rem(_)) => true,
++ (BinOp::And(_), BinOp::And(_)) => true,
++ (BinOp::Or(_), BinOp::Or(_)) => true,
++ (BinOp::BitXor(_), BinOp::BitXor(_)) => true,
++ (BinOp::BitAnd(_), BinOp::BitAnd(_)) => true,
++ (BinOp::BitOr(_), BinOp::BitOr(_)) => true,
++ (BinOp::Shl(_), BinOp::Shl(_)) => true,
++ (BinOp::Shr(_), BinOp::Shr(_)) => true,
++ (BinOp::Eq(_), BinOp::Eq(_)) => true,
++ (BinOp::Lt(_), BinOp::Lt(_)) => true,
++ (BinOp::Le(_), BinOp::Le(_)) => true,
++ (BinOp::Ne(_), BinOp::Ne(_)) => true,
++ (BinOp::Ge(_), BinOp::Ge(_)) => true,
++ (BinOp::Gt(_), BinOp::Gt(_)) => true,
++ (BinOp::AddEq(_), BinOp::AddEq(_)) => true,
++ (BinOp::SubEq(_), BinOp::SubEq(_)) => true,
++ (BinOp::MulEq(_), BinOp::MulEq(_)) => true,
++ (BinOp::DivEq(_), BinOp::DivEq(_)) => true,
++ (BinOp::RemEq(_), BinOp::RemEq(_)) => true,
++ (BinOp::BitXorEq(_), BinOp::BitXorEq(_)) => true,
++ (BinOp::BitAndEq(_), BinOp::BitAndEq(_)) => true,
++ (BinOp::BitOrEq(_), BinOp::BitOrEq(_)) => true,
++ (BinOp::ShlEq(_), BinOp::ShlEq(_)) => true,
++ (BinOp::ShrEq(_), BinOp::ShrEq(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Binding {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Binding {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Block {}
++#[cfg(feature = "full")]
++impl PartialEq for Block {
++ fn eq(&self, other: &Self) -> bool {
++ self.stmts == other.stmts
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BoundLifetimes {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BoundLifetimes {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ConstParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ConstParam {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.eq_token == other.eq_token
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Constraint {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Constraint {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.bounds == other.bounds
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for Data {}
++#[cfg(feature = "derive")]
++impl PartialEq for Data {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Data::Struct(self0), Data::Struct(other0)) => self0 == other0,
++ (Data::Enum(self0), Data::Enum(other0)) => self0 == other0,
++ (Data::Union(self0), Data::Union(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataEnum {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataEnum {
++ fn eq(&self, other: &Self) -> bool {
++ self.variants == other.variants
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataStruct {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.fields == other.fields && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataUnion {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataUnion {
++ fn eq(&self, other: &Self) -> bool {
++ self.fields == other.fields
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DeriveInput {}
++#[cfg(feature = "derive")]
++impl PartialEq for DeriveInput {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.data == other.data
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Expr {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Expr {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ #[cfg(feature = "full")]
++ (Expr::Array(self0), Expr::Array(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Assign(self0), Expr::Assign(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::AssignOp(self0), Expr::AssignOp(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Async(self0), Expr::Async(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Await(self0), Expr::Await(other0)) => self0 == other0,
++ (Expr::Binary(self0), Expr::Binary(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Block(self0), Expr::Block(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Box(self0), Expr::Box(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Break(self0), Expr::Break(other0)) => self0 == other0,
++ (Expr::Call(self0), Expr::Call(other0)) => self0 == other0,
++ (Expr::Cast(self0), Expr::Cast(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Closure(self0), Expr::Closure(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Continue(self0), Expr::Continue(other0)) => self0 == other0,
++ (Expr::Field(self0), Expr::Field(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::ForLoop(self0), Expr::ForLoop(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Group(self0), Expr::Group(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::If(self0), Expr::If(other0)) => self0 == other0,
++ (Expr::Index(self0), Expr::Index(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Let(self0), Expr::Let(other0)) => self0 == other0,
++ (Expr::Lit(self0), Expr::Lit(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Loop(self0), Expr::Loop(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Macro(self0), Expr::Macro(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Match(self0), Expr::Match(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::MethodCall(self0), Expr::MethodCall(other0)) => self0 == other0,
++ (Expr::Paren(self0), Expr::Paren(other0)) => self0 == other0,
++ (Expr::Path(self0), Expr::Path(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Range(self0), Expr::Range(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Reference(self0), Expr::Reference(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Repeat(self0), Expr::Repeat(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Return(self0), Expr::Return(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Struct(self0), Expr::Struct(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Try(self0), Expr::Try(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::TryBlock(self0), Expr::TryBlock(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Tuple(self0), Expr::Tuple(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Type(self0), Expr::Type(other0)) => self0 == other0,
++ (Expr::Unary(self0), Expr::Unary(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Unsafe(self0), Expr::Unsafe(other0)) => self0 == other0,
++ (Expr::Verbatim(self0), Expr::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ #[cfg(feature = "full")]
++ (Expr::While(self0), Expr::While(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Yield(self0), Expr::Yield(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprArray {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprArray {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAssign {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAssign {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.left == other.left && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAssignOp {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAssignOp {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.left == other.left
++ && self.op == other.op
++ && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAsync {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAsync {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.capture == other.capture && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAwait {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAwait {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.base == other.base
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprBinary {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprBinary {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.left == other.left
++ && self.op == other.op
++ && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBlock {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBlock {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBox {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBox {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBreak {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBreak {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprCall {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprCall {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.func == other.func && self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprCast {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprCast {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprClosure {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprClosure {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.asyncness == other.asyncness
++ && self.movability == other.movability
++ && self.capture == other.capture
++ && self.inputs == other.inputs
++ && self.output == other.output
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprContinue {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprContinue {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprField {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprField {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.base == other.base && self.member == other.member
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprForLoop {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprForLoop {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.label == other.label
++ && self.pat == other.pat
++ && self.expr == other.expr
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprGroup {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprIf {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprIf {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.cond == other.cond
++ && self.then_branch == other.then_branch
++ && self.else_branch == other.else_branch
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprIndex {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprIndex {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.index == other.index
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprLet {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprLet {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprLit {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprLit {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.lit == other.lit
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprLoop {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprLoop {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMatch {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMatch {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.arms == other.arms
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMethodCall {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMethodCall {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.receiver == other.receiver
++ && self.method == other.method
++ && self.turbofish == other.turbofish
++ && self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprParen {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprParen {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprPath {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprPath {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprRange {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprRange {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.from == other.from
++ && self.limits == other.limits
++ && self.to == other.to
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprReference {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mutability == other.mutability && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprRepeat {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprRepeat {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.len == other.len
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprReturn {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprReturn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.path == other.path
++ && self.fields == other.fields
++ && self.dot2_token == other.dot2_token
++ && self.rest == other.rest
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTry {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTry {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTryBlock {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTryBlock {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTuple {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprType {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprUnary {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprUnary {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.op == other.op && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprUnsafe {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprUnsafe {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprWhile {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprWhile {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.label == other.label
++ && self.cond == other.cond
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprYield {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprYield {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Field {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Field {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.colon_token == other.colon_token
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FieldPat {}
++#[cfg(feature = "full")]
++impl PartialEq for FieldPat {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.member == other.member
++ && self.colon_token == other.colon_token
++ && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FieldValue {}
++#[cfg(feature = "full")]
++impl PartialEq for FieldValue {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.member == other.member
++ && self.colon_token == other.colon_token
++ && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Fields {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Fields {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Fields::Named(self0), Fields::Named(other0)) => self0 == other0,
++ (Fields::Unnamed(self0), Fields::Unnamed(other0)) => self0 == other0,
++ (Fields::Unit, Fields::Unit) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for FieldsNamed {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for FieldsNamed {
++ fn eq(&self, other: &Self) -> bool {
++ self.named == other.named
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for FieldsUnnamed {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for FieldsUnnamed {
++ fn eq(&self, other: &Self) -> bool {
++ self.unnamed == other.unnamed
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for File {}
++#[cfg(feature = "full")]
++impl PartialEq for File {
++ fn eq(&self, other: &Self) -> bool {
++ self.shebang == other.shebang && self.attrs == other.attrs && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FnArg {}
++#[cfg(feature = "full")]
++impl PartialEq for FnArg {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (FnArg::Receiver(self0), FnArg::Receiver(other0)) => self0 == other0,
++ (FnArg::Typed(self0), FnArg::Typed(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItem {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ForeignItem::Fn(self0), ForeignItem::Fn(other0)) => self0 == other0,
++ (ForeignItem::Static(self0), ForeignItem::Static(other0)) => self0 == other0,
++ (ForeignItem::Type(self0), ForeignItem::Type(other0)) => self0 == other0,
++ (ForeignItem::Macro(self0), ForeignItem::Macro(other0)) => self0 == other0,
++ (ForeignItem::Verbatim(self0), ForeignItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemFn {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.vis == other.vis && self.sig == other.sig
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemStatic {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemStatic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for GenericArgument {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for GenericArgument {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericArgument::Lifetime(self0), GenericArgument::Lifetime(other0)) => {
++ self0 == other0
++ }
++ (GenericArgument::Type(self0), GenericArgument::Type(other0)) => self0 == other0,
++ (GenericArgument::Binding(self0), GenericArgument::Binding(other0)) => self0 == other0,
++ (GenericArgument::Constraint(self0), GenericArgument::Constraint(other0)) => {
++ self0 == other0
++ }
++ (GenericArgument::Const(self0), GenericArgument::Const(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for GenericMethodArgument {}
++#[cfg(feature = "full")]
++impl PartialEq for GenericMethodArgument {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericMethodArgument::Type(self0), GenericMethodArgument::Type(other0)) => {
++ self0 == other0
++ }
++ (GenericMethodArgument::Const(self0), GenericMethodArgument::Const(other0)) => {
++ self0 == other0
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for GenericParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for GenericParam {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericParam::Type(self0), GenericParam::Type(other0)) => self0 == other0,
++ (GenericParam::Lifetime(self0), GenericParam::Lifetime(other0)) => self0 == other0,
++ (GenericParam::Const(self0), GenericParam::Const(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Generics {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Generics {
++ fn eq(&self, other: &Self) -> bool {
++ self.lt_token == other.lt_token
++ && self.params == other.params
++ && self.gt_token == other.gt_token
++ && self.where_clause == other.where_clause
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItem {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ImplItem::Const(self0), ImplItem::Const(other0)) => self0 == other0,
++ (ImplItem::Method(self0), ImplItem::Method(other0)) => self0 == other0,
++ (ImplItem::Type(self0), ImplItem::Type(other0)) => self0 == other0,
++ (ImplItem::Macro(self0), ImplItem::Macro(other0)) => self0 == other0,
++ (ImplItem::Verbatim(self0), ImplItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemMethod {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemMethod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.sig == other.sig
++ && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Item {}
++#[cfg(feature = "full")]
++impl PartialEq for Item {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Item::Const(self0), Item::Const(other0)) => self0 == other0,
++ (Item::Enum(self0), Item::Enum(other0)) => self0 == other0,
++ (Item::ExternCrate(self0), Item::ExternCrate(other0)) => self0 == other0,
++ (Item::Fn(self0), Item::Fn(other0)) => self0 == other0,
++ (Item::ForeignMod(self0), Item::ForeignMod(other0)) => self0 == other0,
++ (Item::Impl(self0), Item::Impl(other0)) => self0 == other0,
++ (Item::Macro(self0), Item::Macro(other0)) => self0 == other0,
++ (Item::Macro2(self0), Item::Macro2(other0)) => self0 == other0,
++ (Item::Mod(self0), Item::Mod(other0)) => self0 == other0,
++ (Item::Static(self0), Item::Static(other0)) => self0 == other0,
++ (Item::Struct(self0), Item::Struct(other0)) => self0 == other0,
++ (Item::Trait(self0), Item::Trait(other0)) => self0 == other0,
++ (Item::TraitAlias(self0), Item::TraitAlias(other0)) => self0 == other0,
++ (Item::Type(self0), Item::Type(other0)) => self0 == other0,
++ (Item::Union(self0), Item::Union(other0)) => self0 == other0,
++ (Item::Use(self0), Item::Use(other0)) => self0 == other0,
++ (Item::Verbatim(self0), Item::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemEnum {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemEnum {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.variants == other.variants
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemExternCrate {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemExternCrate {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.rename == other.rename
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemFn {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.sig == other.sig
++ && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemForeignMod {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemForeignMod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.abi == other.abi && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemImpl {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemImpl {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.defaultness == other.defaultness
++ && self.unsafety == other.unsafety
++ && self.generics == other.generics
++ && self.trait_ == other.trait_
++ && self.self_ty == other.self_ty
++ && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.mac == other.mac
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMacro2 {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMacro2 {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules)
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMod {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.content == other.content
++ && self.semi == other.semi
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemStatic {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemStatic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.fields == other.fields
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemTrait {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemTrait {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.unsafety == other.unsafety
++ && self.auto_token == other.auto_token
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.colon_token == other.colon_token
++ && self.supertraits == other.supertraits
++ && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemTraitAlias {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemTraitAlias {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.bounds == other.bounds
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemUnion {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemUnion {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.fields == other.fields
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemUse {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemUse {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.leading_colon == other.leading_colon
++ && self.tree == other.tree
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Label {}
++#[cfg(feature = "full")]
++impl PartialEq for Label {
++ fn eq(&self, other: &Self) -> bool {
++ self.name == other.name
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for LifetimeDef {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for LifetimeDef {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.lifetime == other.lifetime
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ }
++}
++impl Eq for Lit {}
++impl PartialEq for Lit {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Lit::Str(self0), Lit::Str(other0)) => self0 == other0,
++ (Lit::ByteStr(self0), Lit::ByteStr(other0)) => self0 == other0,
++ (Lit::Byte(self0), Lit::Byte(other0)) => self0 == other0,
++ (Lit::Char(self0), Lit::Char(other0)) => self0 == other0,
++ (Lit::Int(self0), Lit::Int(other0)) => self0 == other0,
++ (Lit::Float(self0), Lit::Float(other0)) => self0 == other0,
++ (Lit::Bool(self0), Lit::Bool(other0)) => self0 == other0,
++ (Lit::Verbatim(self0), Lit::Verbatim(other0)) => {
++ self0.to_string() == other0.to_string()
++ }
++ _ => false,
++ }
++ }
++}
++impl Eq for LitBool {}
++impl PartialEq for LitBool {
++ fn eq(&self, other: &Self) -> bool {
++ self.value == other.value
++ }
++}
++impl Eq for LitByte {}
++impl Eq for LitByteStr {}
++impl Eq for LitChar {}
++impl Eq for LitFloat {}
++impl Eq for LitInt {}
++impl Eq for LitStr {}
++#[cfg(feature = "full")]
++impl Eq for Local {}
++#[cfg(feature = "full")]
++impl PartialEq for Local {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.init == other.init
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Macro {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Macro {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path
++ && self.delimiter == other.delimiter
++ && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MacroDelimiter {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MacroDelimiter {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (MacroDelimiter::Paren(_), MacroDelimiter::Paren(_)) => true,
++ (MacroDelimiter::Brace(_), MacroDelimiter::Brace(_)) => true,
++ (MacroDelimiter::Bracket(_), MacroDelimiter::Bracket(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Meta {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Meta {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Meta::Path(self0), Meta::Path(other0)) => self0 == other0,
++ (Meta::List(self0), Meta::List(other0)) => self0 == other0,
++ (Meta::NameValue(self0), Meta::NameValue(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MetaList {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MetaList {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path && self.nested == other.nested
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MetaNameValue {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MetaNameValue {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path && self.lit == other.lit
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for MethodTurbofish {}
++#[cfg(feature = "full")]
++impl PartialEq for MethodTurbofish {
++ fn eq(&self, other: &Self) -> bool {
++ self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for NestedMeta {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for NestedMeta {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (NestedMeta::Meta(self0), NestedMeta::Meta(other0)) => self0 == other0,
++ (NestedMeta::Lit(self0), NestedMeta::Lit(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ParenthesizedGenericArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ParenthesizedGenericArguments {
++ fn eq(&self, other: &Self) -> bool {
++ self.inputs == other.inputs && self.output == other.output
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Pat {}
++#[cfg(feature = "full")]
++impl PartialEq for Pat {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Pat::Box(self0), Pat::Box(other0)) => self0 == other0,
++ (Pat::Ident(self0), Pat::Ident(other0)) => self0 == other0,
++ (Pat::Lit(self0), Pat::Lit(other0)) => self0 == other0,
++ (Pat::Macro(self0), Pat::Macro(other0)) => self0 == other0,
++ (Pat::Or(self0), Pat::Or(other0)) => self0 == other0,
++ (Pat::Path(self0), Pat::Path(other0)) => self0 == other0,
++ (Pat::Range(self0), Pat::Range(other0)) => self0 == other0,
++ (Pat::Reference(self0), Pat::Reference(other0)) => self0 == other0,
++ (Pat::Rest(self0), Pat::Rest(other0)) => self0 == other0,
++ (Pat::Slice(self0), Pat::Slice(other0)) => self0 == other0,
++ (Pat::Struct(self0), Pat::Struct(other0)) => self0 == other0,
++ (Pat::Tuple(self0), Pat::Tuple(other0)) => self0 == other0,
++ (Pat::TupleStruct(self0), Pat::TupleStruct(other0)) => self0 == other0,
++ (Pat::Type(self0), Pat::Type(other0)) => self0 == other0,
++ (Pat::Verbatim(self0), Pat::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ (Pat::Wild(self0), Pat::Wild(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatBox {}
++#[cfg(feature = "full")]
++impl PartialEq for PatBox {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatIdent {}
++#[cfg(feature = "full")]
++impl PartialEq for PatIdent {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.by_ref == other.by_ref
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.subpat == other.subpat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatLit {}
++#[cfg(feature = "full")]
++impl PartialEq for PatLit {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for PatMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatOr {}
++#[cfg(feature = "full")]
++impl PartialEq for PatOr {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.leading_vert == other.leading_vert
++ && self.cases == other.cases
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatPath {}
++#[cfg(feature = "full")]
++impl PartialEq for PatPath {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatRange {}
++#[cfg(feature = "full")]
++impl PartialEq for PatRange {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.lo == other.lo
++ && self.limits == other.limits
++ && self.hi == other.hi
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatReference {}
++#[cfg(feature = "full")]
++impl PartialEq for PatReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mutability == other.mutability && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatRest {}
++#[cfg(feature = "full")]
++impl PartialEq for PatRest {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatSlice {}
++#[cfg(feature = "full")]
++impl PartialEq for PatSlice {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for PatStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.path == other.path
++ && self.fields == other.fields
++ && self.dot2_token == other.dot2_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatTuple {}
++#[cfg(feature = "full")]
++impl PartialEq for PatTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatTupleStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for PatTupleStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.path == other.path && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatType {}
++#[cfg(feature = "full")]
++impl PartialEq for PatType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatWild {}
++#[cfg(feature = "full")]
++impl PartialEq for PatWild {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Path {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Path {
++ fn eq(&self, other: &Self) -> bool {
++ self.leading_colon == other.leading_colon && self.segments == other.segments
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PathArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PathArguments {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (PathArguments::None, PathArguments::None) => true,
++ (PathArguments::AngleBracketed(self0), PathArguments::AngleBracketed(other0)) => {
++ self0 == other0
++ }
++ (PathArguments::Parenthesized(self0), PathArguments::Parenthesized(other0)) => {
++ self0 == other0
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PathSegment {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PathSegment {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.arguments == other.arguments
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateEq {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateEq {
++ fn eq(&self, other: &Self) -> bool {
++ self.lhs_ty == other.lhs_ty && self.rhs_ty == other.rhs_ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateLifetime {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateLifetime {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetime == other.lifetime && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateType {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateType {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ && self.bounded_ty == other.bounded_ty
++ && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for QSelf {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for QSelf {
++ fn eq(&self, other: &Self) -> bool {
++ self.ty == other.ty && self.position == other.position && self.as_token == other.as_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for RangeLimits {}
++#[cfg(feature = "full")]
++impl PartialEq for RangeLimits {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (RangeLimits::HalfOpen(_), RangeLimits::HalfOpen(_)) => true,
++ (RangeLimits::Closed(_), RangeLimits::Closed(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Receiver {}
++#[cfg(feature = "full")]
++impl PartialEq for Receiver {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.reference == other.reference
++ && self.mutability == other.mutability
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ReturnType {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ReturnType {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ReturnType::Default, ReturnType::Default) => true,
++ (ReturnType::Type(_, self1), ReturnType::Type(_, other1)) => self1 == other1,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Signature {}
++#[cfg(feature = "full")]
++impl PartialEq for Signature {
++ fn eq(&self, other: &Self) -> bool {
++ self.constness == other.constness
++ && self.asyncness == other.asyncness
++ && self.unsafety == other.unsafety
++ && self.abi == other.abi
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.inputs == other.inputs
++ && self.variadic == other.variadic
++ && self.output == other.output
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Stmt {}
++#[cfg(feature = "full")]
++impl PartialEq for Stmt {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Stmt::Local(self0), Stmt::Local(other0)) => self0 == other0,
++ (Stmt::Item(self0), Stmt::Item(other0)) => self0 == other0,
++ (Stmt::Expr(self0), Stmt::Expr(other0)) => self0 == other0,
++ (Stmt::Semi(self0, _), Stmt::Semi(other0, _)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TraitBound {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TraitBound {
++ fn eq(&self, other: &Self) -> bool {
++ self.paren_token == other.paren_token
++ && self.modifier == other.modifier
++ && self.lifetimes == other.lifetimes
++ && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TraitBoundModifier {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TraitBoundModifier {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TraitBoundModifier::None, TraitBoundModifier::None) => true,
++ (TraitBoundModifier::Maybe(_), TraitBoundModifier::Maybe(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItem {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TraitItem::Const(self0), TraitItem::Const(other0)) => self0 == other0,
++ (TraitItem::Method(self0), TraitItem::Method(other0)) => self0 == other0,
++ (TraitItem::Type(self0), TraitItem::Type(other0)) => self0 == other0,
++ (TraitItem::Macro(self0), TraitItem::Macro(other0)) => self0 == other0,
++ (TraitItem::Verbatim(self0), TraitItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.default == other.default
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemMethod {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemMethod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.sig == other.sig
++ && self.default == other.default
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Type {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Type {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Type::Array(self0), Type::Array(other0)) => self0 == other0,
++ (Type::BareFn(self0), Type::BareFn(other0)) => self0 == other0,
++ (Type::Group(self0), Type::Group(other0)) => self0 == other0,
++ (Type::ImplTrait(self0), Type::ImplTrait(other0)) => self0 == other0,
++ (Type::Infer(self0), Type::Infer(other0)) => self0 == other0,
++ (Type::Macro(self0), Type::Macro(other0)) => self0 == other0,
++ (Type::Never(self0), Type::Never(other0)) => self0 == other0,
++ (Type::Paren(self0), Type::Paren(other0)) => self0 == other0,
++ (Type::Path(self0), Type::Path(other0)) => self0 == other0,
++ (Type::Ptr(self0), Type::Ptr(other0)) => self0 == other0,
++ (Type::Reference(self0), Type::Reference(other0)) => self0 == other0,
++ (Type::Slice(self0), Type::Slice(other0)) => self0 == other0,
++ (Type::TraitObject(self0), Type::TraitObject(other0)) => self0 == other0,
++ (Type::Tuple(self0), Type::Tuple(other0)) => self0 == other0,
++ (Type::Verbatim(self0), Type::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeArray {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeArray {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem && self.len == other.len
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeBareFn {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeBareFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ && self.unsafety == other.unsafety
++ && self.abi == other.abi
++ && self.inputs == other.inputs
++ && self.variadic == other.variadic
++ && self.output == other.output
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeGroup {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeImplTrait {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeImplTrait {
++ fn eq(&self, other: &Self) -> bool {
++ self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeInfer {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeInfer {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeMacro {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.mac == other.mac
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeNever {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeNever {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParam {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ && self.eq_token == other.eq_token
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParamBound {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParamBound {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TypeParamBound::Trait(self0), TypeParamBound::Trait(other0)) => self0 == other0,
++ (TypeParamBound::Lifetime(self0), TypeParamBound::Lifetime(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParen {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParen {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypePath {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypePath {
++ fn eq(&self, other: &Self) -> bool {
++ self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypePtr {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypePtr {
++ fn eq(&self, other: &Self) -> bool {
++ self.const_token == other.const_token
++ && self.mutability == other.mutability
++ && self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeReference {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetime == other.lifetime
++ && self.mutability == other.mutability
++ && self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeSlice {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeSlice {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeTraitObject {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeTraitObject {
++ fn eq(&self, other: &Self) -> bool {
++ self.dyn_token == other.dyn_token && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeTuple {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.elems == other.elems
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for UnOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for UnOp {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (UnOp::Deref(_), UnOp::Deref(_)) => true,
++ (UnOp::Not(_), UnOp::Not(_)) => true,
++ (UnOp::Neg(_), UnOp::Neg(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseGlob {}
++#[cfg(feature = "full")]
++impl PartialEq for UseGlob {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseGroup {}
++#[cfg(feature = "full")]
++impl PartialEq for UseGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseName {}
++#[cfg(feature = "full")]
++impl PartialEq for UseName {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UsePath {}
++#[cfg(feature = "full")]
++impl PartialEq for UsePath {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.tree == other.tree
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseRename {}
++#[cfg(feature = "full")]
++impl PartialEq for UseRename {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.rename == other.rename
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseTree {}
++#[cfg(feature = "full")]
++impl PartialEq for UseTree {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (UseTree::Path(self0), UseTree::Path(other0)) => self0 == other0,
++ (UseTree::Name(self0), UseTree::Name(other0)) => self0 == other0,
++ (UseTree::Rename(self0), UseTree::Rename(other0)) => self0 == other0,
++ (UseTree::Glob(self0), UseTree::Glob(other0)) => self0 == other0,
++ (UseTree::Group(self0), UseTree::Group(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Variadic {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Variadic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Variant {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Variant {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.fields == other.fields
++ && self.discriminant == other.discriminant
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisCrate {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisCrate {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisPublic {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisPublic {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisRestricted {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisRestricted {
++ fn eq(&self, other: &Self) -> bool {
++ self.in_token == other.in_token && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Visibility {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Visibility {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Visibility::Public(self0), Visibility::Public(other0)) => self0 == other0,
++ (Visibility::Crate(self0), Visibility::Crate(other0)) => self0 == other0,
++ (Visibility::Restricted(self0), Visibility::Restricted(other0)) => self0 == other0,
++ (Visibility::Inherited, Visibility::Inherited) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for WhereClause {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for WhereClause {
++ fn eq(&self, other: &Self) -> bool {
++ self.predicates == other.predicates
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for WherePredicate {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for WherePredicate {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (WherePredicate::Type(self0), WherePredicate::Type(other0)) => self0 == other0,
++ (WherePredicate::Lifetime(self0), WherePredicate::Lifetime(other0)) => self0 == other0,
++ (WherePredicate::Eq(self0), WherePredicate::Eq(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/fold.rs b/third_party/rust/syn/src/gen/fold.rs
+--- third_party/rust/syn/src/gen/fold.rs
++++ third_party/rust/syn/src/gen/fold.rs
+@@ -1,12 +1,13 @@
+ // This file is @generated by syn-internal-codegen.
+ // It is not intended for manual editing.
+
+ #![allow(unreachable_code, unused_variables)]
++#![allow(clippy::match_wildcard_for_single_variants)]
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::gen::helper::fold::*;
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::token::{Brace, Bracket, Group, Paren};
+ use crate::*;
+ use proc_macro2::Span;
+ #[cfg(feature = "full")]
+ macro_rules! full {
+@@ -21,17 +22,17 @@ macro_rules! full {
+ };
+ }
+ /// Syntax tree traversal to transform the nodes of an owned syntax tree.
+ ///
+ /// See the [module documentation] for details.
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"fold"` feature.*
++/// *This trait is available only if Syn is built with the `"fold"` feature.*
+ pub trait Fold {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_abi(&mut self, i: Abi) -> Abi {
+ fold_abi(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_angle_bracketed_generic_arguments(
+ &mut self,
+@@ -428,45 +429,37 @@ pub trait Fold {
+ }
+ fn fold_lifetime(&mut self, i: Lifetime) -> Lifetime {
+ fold_lifetime(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lifetime_def(&mut self, i: LifetimeDef) -> LifetimeDef {
+ fold_lifetime_def(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit(&mut self, i: Lit) -> Lit {
+ fold_lit(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_bool(&mut self, i: LitBool) -> LitBool {
+ fold_lit_bool(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_byte(&mut self, i: LitByte) -> LitByte {
+ fold_lit_byte(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_byte_str(&mut self, i: LitByteStr) -> LitByteStr {
+ fold_lit_byte_str(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_char(&mut self, i: LitChar) -> LitChar {
+ fold_lit_char(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_float(&mut self, i: LitFloat) -> LitFloat {
+ fold_lit_float(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_int(&mut self, i: LitInt) -> LitInt {
+ fold_lit_int(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_str(&mut self, i: LitStr) -> LitStr {
+ fold_lit_str(self, i)
+ }
+ #[cfg(feature = "full")]
+ fn fold_local(&mut self, i: Local) -> Local {
+ fold_local(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -794,39 +787,39 @@ where
+ pub fn fold_angle_bracketed_generic_arguments<F>(
+ f: &mut F,
+ node: AngleBracketedGenericArguments,
+ ) -> AngleBracketedGenericArguments
+ where
+ F: Fold + ?Sized,
+ {
+ AngleBracketedGenericArguments {
+- colon2_token: (node.colon2_token).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ colon2_token: (node.colon2_token).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ args: FoldHelper::lift(node.args, |it| f.fold_generic_argument(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_arm<F>(f: &mut F, node: Arm) -> Arm
+ where
+ F: Fold + ?Sized,
+ {
+ Arm {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ pat: f.fold_pat(node.pat),
+ guard: (node.guard).map(|it| {
+ (
+ Token![if](tokens_helper(f, &(it).0.span)),
+ Box::new(f.fold_expr(*(it).1)),
+ )
+ }),
+- fat_arrow_token: Token ! [ => ](tokens_helper(f, &node.fat_arrow_token.spans)),
++ fat_arrow_token: Token ! [=>](tokens_helper(f, &node.fat_arrow_token.spans)),
+ body: Box::new(f.fold_expr(*node.body)),
+- comma: (node.comma).map(|it| Token ! [ , ](tokens_helper(f, &it.spans))),
++ comma: (node.comma).map(|it| Token ! [,](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_attr_style<F>(f: &mut F, node: AttrStyle) -> AttrStyle
+ where
+ F: Fold + ?Sized,
+ {
+ match node {
+@@ -837,109 +830,97 @@ where
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_attribute<F>(f: &mut F, node: Attribute) -> Attribute
+ where
+ F: Fold + ?Sized,
+ {
+ Attribute {
+- pound_token: Token ! [ # ](tokens_helper(f, &node.pound_token.spans)),
++ pound_token: Token ! [#](tokens_helper(f, &node.pound_token.spans)),
+ style: f.fold_attr_style(node.style),
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ path: f.fold_path(node.path),
+ tokens: node.tokens,
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_bare_fn_arg<F>(f: &mut F, node: BareFnArg) -> BareFnArg
+ where
+ F: Fold + ?Sized,
+ {
+ BareFnArg {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ name: (node.name).map(|it| {
+ (
+ f.fold_ident((it).0),
+- Token ! [ : ](tokens_helper(f, &(it).1.spans)),
++ Token ! [:](tokens_helper(f, &(it).1.spans)),
+ )
+ }),
+ ty: f.fold_type(node.ty),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_bin_op<F>(f: &mut F, node: BinOp) -> BinOp
+ where
+ F: Fold + ?Sized,
+ {
+ match node {
+- BinOp::Add(_binding_0) => BinOp::Add(Token ! [ + ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Sub(_binding_0) => BinOp::Sub(Token ! [ - ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Mul(_binding_0) => BinOp::Mul(Token ! [ * ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Div(_binding_0) => BinOp::Div(Token ! [ / ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Rem(_binding_0) => BinOp::Rem(Token ! [ % ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::And(_binding_0) => BinOp::And(Token ! [ && ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Or(_binding_0) => BinOp::Or(Token ! [ || ](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Add(_binding_0) => BinOp::Add(Token ! [+](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Sub(_binding_0) => BinOp::Sub(Token ! [-](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Mul(_binding_0) => BinOp::Mul(Token ! [*](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Div(_binding_0) => BinOp::Div(Token ! [/](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Rem(_binding_0) => BinOp::Rem(Token ! [%](tokens_helper(f, &_binding_0.spans))),
++ BinOp::And(_binding_0) => BinOp::And(Token ! [&&](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Or(_binding_0) => BinOp::Or(Token ! [||](tokens_helper(f, &_binding_0.spans))),
+ BinOp::BitXor(_binding_0) => {
+- BinOp::BitXor(Token ! [ ^ ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitXor(Token ! [^](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitAnd(_binding_0) => {
+- BinOp::BitAnd(Token ! [ & ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::BitOr(_binding_0) => {
+- BinOp::BitOr(Token ! [ | ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitAnd(Token ! [&](tokens_helper(f, &_binding_0.spans)))
+ }
+- BinOp::Shl(_binding_0) => BinOp::Shl(Token ! [ << ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Shr(_binding_0) => BinOp::Shr(Token ! [ >> ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Eq(_binding_0) => BinOp::Eq(Token ! [ == ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Lt(_binding_0) => BinOp::Lt(Token ! [ < ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Le(_binding_0) => BinOp::Le(Token ! [ <= ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Ne(_binding_0) => BinOp::Ne(Token ! [ != ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Ge(_binding_0) => BinOp::Ge(Token ! [ >= ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Gt(_binding_0) => BinOp::Gt(Token ! [ > ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::AddEq(_binding_0) => {
+- BinOp::AddEq(Token ! [ += ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::SubEq(_binding_0) => {
+- BinOp::SubEq(Token ! [ -= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::MulEq(_binding_0) => {
+- BinOp::MulEq(Token ! [ *= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::DivEq(_binding_0) => {
+- BinOp::DivEq(Token ! [ /= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::RemEq(_binding_0) => {
+- BinOp::RemEq(Token ! [ %= ](tokens_helper(f, &_binding_0.spans)))
+- }
++ BinOp::BitOr(_binding_0) => BinOp::BitOr(Token ! [|](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Shl(_binding_0) => BinOp::Shl(Token ! [<<](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Shr(_binding_0) => BinOp::Shr(Token ! [>>](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Eq(_binding_0) => BinOp::Eq(Token ! [==](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Lt(_binding_0) => BinOp::Lt(Token ! [<](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Le(_binding_0) => BinOp::Le(Token ! [<=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Ne(_binding_0) => BinOp::Ne(Token ! [!=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Ge(_binding_0) => BinOp::Ge(Token ! [>=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Gt(_binding_0) => BinOp::Gt(Token ! [>](tokens_helper(f, &_binding_0.spans))),
++ BinOp::AddEq(_binding_0) => BinOp::AddEq(Token ! [+=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::SubEq(_binding_0) => BinOp::SubEq(Token ! [-=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::MulEq(_binding_0) => BinOp::MulEq(Token ! [*=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::DivEq(_binding_0) => BinOp::DivEq(Token ! [/=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::RemEq(_binding_0) => BinOp::RemEq(Token ! [%=](tokens_helper(f, &_binding_0.spans))),
+ BinOp::BitXorEq(_binding_0) => {
+- BinOp::BitXorEq(Token ! [ ^= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitXorEq(Token ! [^=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitAndEq(_binding_0) => {
+- BinOp::BitAndEq(Token ! [ &= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitAndEq(Token ! [&=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitOrEq(_binding_0) => {
+- BinOp::BitOrEq(Token ! [ |= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitOrEq(Token ! [|=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::ShlEq(_binding_0) => {
+- BinOp::ShlEq(Token ! [ <<= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::ShlEq(Token ! [<<=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::ShrEq(_binding_0) => {
+- BinOp::ShrEq(Token ! [ >>= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::ShrEq(Token ! [>>=](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_binding<F>(f: &mut F, node: Binding) -> Binding
+ where
+ F: Fold + ?Sized,
+ {
+ Binding {
+ ident: f.fold_ident(node.ident),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: f.fold_type(node.ty),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_block<F>(f: &mut F, node: Block) -> Block
+ where
+ F: Fold + ?Sized,
+ {
+@@ -950,44 +931,44 @@ where
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_bound_lifetimes<F>(f: &mut F, node: BoundLifetimes) -> BoundLifetimes
+ where
+ F: Fold + ?Sized,
+ {
+ BoundLifetimes {
+ for_token: Token![for](tokens_helper(f, &node.for_token.span)),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ lifetimes: FoldHelper::lift(node.lifetimes, |it| f.fold_lifetime_def(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_const_param<F>(f: &mut F, node: ConstParam) -> ConstParam
+ where
+ F: Fold + ?Sized,
+ {
+ ConstParam {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+- eq_token: (node.eq_token).map(|it| Token ! [ = ](tokens_helper(f, &it.spans))),
++ eq_token: (node.eq_token).map(|it| Token ! [=](tokens_helper(f, &it.spans))),
+ default: (node.default).map(|it| f.fold_expr(it)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_constraint<F>(f: &mut F, node: Constraint) -> Constraint
+ where
+ F: Fold + ?Sized,
+ {
+ Constraint {
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ }
+ }
+ #[cfg(feature = "derive")]
+ pub fn fold_data<F>(f: &mut F, node: Data) -> Data
+ where
+ F: Fold + ?Sized,
+ {
+@@ -1011,17 +992,17 @@ where
+ #[cfg(feature = "derive")]
+ pub fn fold_data_struct<F>(f: &mut F, node: DataStruct) -> DataStruct
+ where
+ F: Fold + ?Sized,
+ {
+ DataStruct {
+ struct_token: Token![struct](tokens_helper(f, &node.struct_token.span)),
+ fields: f.fold_fields(node.fields),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "derive")]
+ pub fn fold_data_union<F>(f: &mut F, node: DataUnion) -> DataUnion
+ where
+ F: Fold + ?Sized,
+ {
+ DataUnion {
+@@ -1107,17 +1088,17 @@ where
+ #[cfg(feature = "full")]
+ pub fn fold_expr_assign<F>(f: &mut F, node: ExprAssign) -> ExprAssign
+ where
+ F: Fold + ?Sized,
+ {
+ ExprAssign {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ left: Box::new(f.fold_expr(*node.left)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ right: Box::new(f.fold_expr(*node.right)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_expr_assign_op<F>(f: &mut F, node: ExprAssignOp) -> ExprAssignOp
+ where
+ F: Fold + ?Sized,
+ {
+@@ -1143,17 +1124,17 @@ where
+ #[cfg(feature = "full")]
+ pub fn fold_expr_await<F>(f: &mut F, node: ExprAwait) -> ExprAwait
+ where
+ F: Fold + ?Sized,
+ {
+ ExprAwait {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ base: Box::new(f.fold_expr(*node.base)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ await_token: crate::token::Await(tokens_helper(f, &node.await_token.span)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_expr_binary<F>(f: &mut F, node: ExprBinary) -> ExprBinary
+ where
+ F: Fold + ?Sized,
+ {
+@@ -1227,19 +1208,19 @@ pub fn fold_expr_closure<F>(f: &mut F, n
+ where
+ F: Fold + ?Sized,
+ {
+ ExprClosure {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ asyncness: (node.asyncness).map(|it| Token![async](tokens_helper(f, &it.span))),
+ movability: (node.movability).map(|it| Token![static](tokens_helper(f, &it.span))),
+ capture: (node.capture).map(|it| Token![move](tokens_helper(f, &it.span))),
+- or1_token: Token ! [ | ](tokens_helper(f, &node.or1_token.spans)),
++ or1_token: Token ! [|](tokens_helper(f, &node.or1_token.spans)),
+ inputs: FoldHelper::lift(node.inputs, |it| f.fold_pat(it)),
+- or2_token: Token ! [ | ](tokens_helper(f, &node.or2_token.spans)),
++ or2_token: Token ! [|](tokens_helper(f, &node.or2_token.spans)),
+ output: f.fold_return_type(node.output),
+ body: Box::new(f.fold_expr(*node.body)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_expr_continue<F>(f: &mut F, node: ExprContinue) -> ExprContinue
+ where
+ F: Fold + ?Sized,
+@@ -1253,17 +1234,17 @@ where
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_expr_field<F>(f: &mut F, node: ExprField) -> ExprField
+ where
+ F: Fold + ?Sized,
+ {
+ ExprField {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ base: Box::new(f.fold_expr(*node.base)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ member: f.fold_member(node.member),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_expr_for_loop<F>(f: &mut F, node: ExprForLoop) -> ExprForLoop
+ where
+ F: Fold + ?Sized,
+ {
+@@ -1322,17 +1303,17 @@ where
+ pub fn fold_expr_let<F>(f: &mut F, node: ExprLet) -> ExprLet
+ where
+ F: Fold + ?Sized,
+ {
+ ExprLet {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ let_token: Token![let](tokens_helper(f, &node.let_token.span)),
+ pat: f.fold_pat(node.pat),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_expr_lit<F>(f: &mut F, node: ExprLit) -> ExprLit
+ where
+ F: Fold + ?Sized,
+ {
+@@ -1379,17 +1360,17 @@ where
+ #[cfg(feature = "full")]
+ pub fn fold_expr_method_call<F>(f: &mut F, node: ExprMethodCall) -> ExprMethodCall
+ where
+ F: Fold + ?Sized,
+ {
+ ExprMethodCall {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ receiver: Box::new(f.fold_expr(*node.receiver)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ method: f.fold_ident(node.method),
+ turbofish: (node.turbofish).map(|it| f.fold_method_turbofish(it)),
+ paren_token: Paren(tokens_helper(f, &node.paren_token.span)),
+ args: FoldHelper::lift(node.args, |it| f.fold_expr(it)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_expr_paren<F>(f: &mut F, node: ExprParen) -> ExprParen
+@@ -1427,32 +1408,32 @@ where
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_expr_reference<F>(f: &mut F, node: ExprReference) -> ExprReference
+ where
+ F: Fold + ?Sized,
+ {
+ ExprReference {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ raw: node.raw,
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ expr: Box::new(f.fold_expr(*node.expr)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_expr_repeat<F>(f: &mut F, node: ExprRepeat) -> ExprRepeat
+ where
+ F: Fold + ?Sized,
+ {
+ ExprRepeat {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ len: Box::new(f.fold_expr(*node.len)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_expr_return<F>(f: &mut F, node: ExprReturn) -> ExprReturn
+ where
+ F: Fold + ?Sized,
+ {
+@@ -1479,17 +1460,17 @@ where
+ #[cfg(feature = "full")]
+ pub fn fold_expr_try<F>(f: &mut F, node: ExprTry) -> ExprTry
+ where
+ F: Fold + ?Sized,
+ {
+ ExprTry {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- question_token: Token ! [ ? ](tokens_helper(f, &node.question_token.spans)),
++ question_token: Token ! [?](tokens_helper(f, &node.question_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_expr_try_block<F>(f: &mut F, node: ExprTryBlock) -> ExprTryBlock
+ where
+ F: Fold + ?Sized,
+ {
+ ExprTryBlock {
+@@ -1512,17 +1493,17 @@ where
+ #[cfg(feature = "full")]
+ pub fn fold_expr_type<F>(f: &mut F, node: ExprType) -> ExprType
+ where
+ F: Fold + ?Sized,
+ {
+ ExprType {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_expr_unary<F>(f: &mut F, node: ExprUnary) -> ExprUnary
+ where
+ F: Fold + ?Sized,
+ {
+@@ -1571,41 +1552,41 @@ where
+ pub fn fold_field<F>(f: &mut F, node: Field) -> Field
+ where
+ F: Fold + ?Sized,
+ {
+ Field {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ ident: (node.ident).map(|it| f.fold_ident(it)),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ ty: f.fold_type(node.ty),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_field_pat<F>(f: &mut F, node: FieldPat) -> FieldPat
+ where
+ F: Fold + ?Sized,
+ {
+ FieldPat {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ member: f.fold_member(node.member),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ pat: Box::new(f.fold_pat(*node.pat)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_field_value<F>(f: &mut F, node: FieldValue) -> FieldValue
+ where
+ F: Fold + ?Sized,
+ {
+ FieldValue {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ member: f.fold_member(node.member),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ expr: f.fold_expr(node.expr),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_fields<F>(f: &mut F, node: Fields) -> Fields
+ where
+ F: Fold + ?Sized,
+ {
+@@ -1676,57 +1657,57 @@ where
+ pub fn fold_foreign_item_fn<F>(f: &mut F, node: ForeignItemFn) -> ForeignItemFn
+ where
+ F: Fold + ?Sized,
+ {
+ ForeignItemFn {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ sig: f.fold_signature(node.sig),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_foreign_item_macro<F>(f: &mut F, node: ForeignItemMacro) -> ForeignItemMacro
+ where
+ F: Fold + ?Sized,
+ {
+ ForeignItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_foreign_item_static<F>(f: &mut F, node: ForeignItemStatic) -> ForeignItemStatic
+ where
+ F: Fold + ?Sized,
+ {
+ ForeignItemStatic {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ static_token: Token![static](tokens_helper(f, &node.static_token.span)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_foreign_item_type<F>(f: &mut F, node: ForeignItemType) -> ForeignItemType
+ where
+ F: Fold + ?Sized,
+ {
+ ForeignItemType {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_generic_argument<F>(f: &mut F, node: GenericArgument) -> GenericArgument
+ where
+ F: Fold + ?Sized,
+ {
+ match node {
+@@ -1774,19 +1755,19 @@ where
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_generics<F>(f: &mut F, node: Generics) -> Generics
+ where
+ F: Fold + ?Sized,
+ {
+ Generics {
+- lt_token: (node.lt_token).map(|it| Token ! [ < ](tokens_helper(f, &it.spans))),
++ lt_token: (node.lt_token).map(|it| Token ! [<](tokens_helper(f, &it.spans))),
+ params: FoldHelper::lift(node.params, |it| f.fold_generic_param(it)),
+- gt_token: (node.gt_token).map(|it| Token ! [ > ](tokens_helper(f, &it.spans))),
++ gt_token: (node.gt_token).map(|it| Token ! [>](tokens_helper(f, &it.spans))),
+ where_clause: (node.where_clause).map(|it| f.fold_where_clause(it)),
+ }
+ }
+ pub fn fold_ident<F>(f: &mut F, node: Ident) -> Ident
+ where
+ F: Fold + ?Sized,
+ {
+ let mut node = node;
+@@ -1814,32 +1795,32 @@ where
+ F: Fold + ?Sized,
+ {
+ ImplItemConst {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ defaultness: (node.defaultness).map(|it| Token![default](tokens_helper(f, &it.span))),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: f.fold_expr(node.expr),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_impl_item_macro<F>(f: &mut F, node: ImplItemMacro) -> ImplItemMacro
+ where
+ F: Fold + ?Sized,
+ {
+ ImplItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_impl_item_method<F>(f: &mut F, node: ImplItemMethod) -> ImplItemMethod
+ where
+ F: Fold + ?Sized,
+ {
+ ImplItemMethod {
+@@ -1857,19 +1838,19 @@ where
+ {
+ ImplItemType {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ defaultness: (node.defaultness).map(|it| Token![default](tokens_helper(f, &it.span))),
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: f.fold_type(node.ty),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_index<F>(f: &mut F, node: Index) -> Index
+ where
+ F: Fold + ?Sized,
+ {
+ Index {
+@@ -1908,21 +1889,21 @@ pub fn fold_item_const<F>(f: &mut F, nod
+ where
+ F: Fold + ?Sized,
+ {
+ ItemConst {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_item_enum<F>(f: &mut F, node: ItemEnum) -> ItemEnum
+ where
+ F: Fold + ?Sized,
+ {
+ ItemEnum {
+@@ -1947,17 +1928,17 @@ where
+ crate_token: Token![crate](tokens_helper(f, &node.crate_token.span)),
+ ident: f.fold_ident(node.ident),
+ rename: (node.rename).map(|it| {
+ (
+ Token![as](tokens_helper(f, &(it).0.span)),
+ f.fold_ident((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_item_fn<F>(f: &mut F, node: ItemFn) -> ItemFn
+ where
+ F: Fold + ?Sized,
+ {
+ ItemFn {
+@@ -2006,17 +1987,17 @@ where
+ pub fn fold_item_macro<F>(f: &mut F, node: ItemMacro) -> ItemMacro
+ where
+ F: Fold + ?Sized,
+ {
+ ItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ ident: (node.ident).map(|it| f.fold_ident(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_item_macro2<F>(f: &mut F, node: ItemMacro2) -> ItemMacro2
+ where
+ F: Fold + ?Sized,
+ {
+ ItemMacro2 {
+@@ -2038,101 +2019,101 @@ where
+ mod_token: Token![mod](tokens_helper(f, &node.mod_token.span)),
+ ident: f.fold_ident(node.ident),
+ content: (node.content).map(|it| {
+ (
+ Brace(tokens_helper(f, &(it).0.span)),
+ FoldHelper::lift((it).1, |it| f.fold_item(it)),
+ )
+ }),
+- semi: (node.semi).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi: (node.semi).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_item_static<F>(f: &mut F, node: ItemStatic) -> ItemStatic
+ where
+ F: Fold + ?Sized,
+ {
+ ItemStatic {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ static_token: Token![static](tokens_helper(f, &node.static_token.span)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_item_struct<F>(f: &mut F, node: ItemStruct) -> ItemStruct
+ where
+ F: Fold + ?Sized,
+ {
+ ItemStruct {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ struct_token: Token![struct](tokens_helper(f, &node.struct_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ fields: f.fold_fields(node.fields),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_item_trait<F>(f: &mut F, node: ItemTrait) -> ItemTrait
+ where
+ F: Fold + ?Sized,
+ {
+ ItemTrait {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ unsafety: (node.unsafety).map(|it| Token![unsafe](tokens_helper(f, &it.span))),
+ auto_token: (node.auto_token).map(|it| Token![auto](tokens_helper(f, &it.span))),
+ trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ supertraits: FoldHelper::lift(node.supertraits, |it| f.fold_type_param_bound(it)),
+ brace_token: Brace(tokens_helper(f, &node.brace_token.span)),
+ items: FoldHelper::lift(node.items, |it| f.fold_trait_item(it)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_item_trait_alias<F>(f: &mut F, node: ItemTraitAlias) -> ItemTraitAlias
+ where
+ F: Fold + ?Sized,
+ {
+ ItemTraitAlias {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_item_type<F>(f: &mut F, node: ItemType) -> ItemType
+ where
+ F: Fold + ?Sized,
+ {
+ ItemType {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_item_union<F>(f: &mut F, node: ItemUnion) -> ItemUnion
+ where
+ F: Fold + ?Sized,
+ {
+ ItemUnion {
+@@ -2148,29 +2129,29 @@ where
+ pub fn fold_item_use<F>(f: &mut F, node: ItemUse) -> ItemUse
+ where
+ F: Fold + ?Sized,
+ {
+ ItemUse {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ use_token: Token![use](tokens_helper(f, &node.use_token.span)),
+- leading_colon: (node.leading_colon).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
++ leading_colon: (node.leading_colon).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
+ tree: f.fold_use_tree(node.tree),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_label<F>(f: &mut F, node: Label) -> Label
+ where
+ F: Fold + ?Sized,
+ {
+ Label {
+ name: f.fold_lifetime(node.name),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ }
+ }
+ pub fn fold_lifetime<F>(f: &mut F, node: Lifetime) -> Lifetime
+ where
+ F: Fold + ?Sized,
+ {
+ Lifetime {
+ apostrophe: f.fold_span(node.apostrophe),
+@@ -2180,97 +2161,89 @@ where
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lifetime_def<F>(f: &mut F, node: LifetimeDef) -> LifetimeDef
+ where
+ F: Fold + ?Sized,
+ {
+ LifetimeDef {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ lifetime: f.fold_lifetime(node.lifetime),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit<F>(f: &mut F, node: Lit) -> Lit
+ where
+ F: Fold + ?Sized,
+ {
+ match node {
+ Lit::Str(_binding_0) => Lit::Str(f.fold_lit_str(_binding_0)),
+ Lit::ByteStr(_binding_0) => Lit::ByteStr(f.fold_lit_byte_str(_binding_0)),
+ Lit::Byte(_binding_0) => Lit::Byte(f.fold_lit_byte(_binding_0)),
+ Lit::Char(_binding_0) => Lit::Char(f.fold_lit_char(_binding_0)),
+ Lit::Int(_binding_0) => Lit::Int(f.fold_lit_int(_binding_0)),
+ Lit::Float(_binding_0) => Lit::Float(f.fold_lit_float(_binding_0)),
+ Lit::Bool(_binding_0) => Lit::Bool(f.fold_lit_bool(_binding_0)),
+ Lit::Verbatim(_binding_0) => Lit::Verbatim(_binding_0),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_bool<F>(f: &mut F, node: LitBool) -> LitBool
+ where
+ F: Fold + ?Sized,
+ {
+ LitBool {
+ value: node.value,
+ span: f.fold_span(node.span),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_byte<F>(f: &mut F, node: LitByte) -> LitByte
+ where
+ F: Fold + ?Sized,
+ {
+ let span = f.fold_span(node.span());
+ let mut node = node;
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_byte_str<F>(f: &mut F, node: LitByteStr) -> LitByteStr
+ where
+ F: Fold + ?Sized,
+ {
+ let span = f.fold_span(node.span());
+ let mut node = node;
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_char<F>(f: &mut F, node: LitChar) -> LitChar
+ where
+ F: Fold + ?Sized,
+ {
+ let span = f.fold_span(node.span());
+ let mut node = node;
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_float<F>(f: &mut F, node: LitFloat) -> LitFloat
+ where
+ F: Fold + ?Sized,
+ {
+ let span = f.fold_span(node.span());
+ let mut node = node;
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_int<F>(f: &mut F, node: LitInt) -> LitInt
+ where
+ F: Fold + ?Sized,
+ {
+ let span = f.fold_span(node.span());
+ let mut node = node;
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_str<F>(f: &mut F, node: LitStr) -> LitStr
+ where
+ F: Fold + ?Sized,
+ {
+ let span = f.fold_span(node.span());
+ let mut node = node;
+ node.set_span(span);
+ node
+@@ -2281,21 +2254,21 @@ where
+ F: Fold + ?Sized,
+ {
+ Local {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ let_token: Token![let](tokens_helper(f, &node.let_token.span)),
+ pat: f.fold_pat(node.pat),
+ init: (node.init).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ Box::new(f.fold_expr(*(it).1)),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_macro<F>(f: &mut F, node: Macro) -> Macro
+ where
+ F: Fold + ?Sized,
+ {
+ Macro {
+@@ -2356,30 +2329,30 @@ where
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_meta_name_value<F>(f: &mut F, node: MetaNameValue) -> MetaNameValue
+ where
+ F: Fold + ?Sized,
+ {
+ MetaNameValue {
+ path: f.fold_path(node.path),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ lit: f.fold_lit(node.lit),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_method_turbofish<F>(f: &mut F, node: MethodTurbofish) -> MethodTurbofish
+ where
+ F: Fold + ?Sized,
+ {
+ MethodTurbofish {
+- colon2_token: Token ! [ :: ](tokens_helper(f, &node.colon2_token.spans)),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ colon2_token: Token ! [::](tokens_helper(f, &node.colon2_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ args: FoldHelper::lift(node.args, |it| f.fold_generic_method_argument(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_nested_meta<F>(f: &mut F, node: NestedMeta) -> NestedMeta
+ where
+ F: Fold + ?Sized,
+ {
+ match node {
+@@ -2444,17 +2417,17 @@ where
+ {
+ PatIdent {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ by_ref: (node.by_ref).map(|it| Token![ref](tokens_helper(f, &it.span))),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ ident: f.fold_ident(node.ident),
+ subpat: (node.subpat).map(|it| {
+ (
+- Token ! [ @ ](tokens_helper(f, &(it).0.spans)),
++ Token ! [@](tokens_helper(f, &(it).0.spans)),
+ Box::new(f.fold_pat(*(it).1)),
+ )
+ }),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_pat_lit<F>(f: &mut F, node: PatLit) -> PatLit
+ where
+@@ -2477,17 +2450,17 @@ where
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_pat_or<F>(f: &mut F, node: PatOr) -> PatOr
+ where
+ F: Fold + ?Sized,
+ {
+ PatOr {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- leading_vert: (node.leading_vert).map(|it| Token ! [ | ](tokens_helper(f, &it.spans))),
++ leading_vert: (node.leading_vert).map(|it| Token ! [|](tokens_helper(f, &it.spans))),
+ cases: FoldHelper::lift(node.cases, |it| f.fold_pat(it)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_pat_path<F>(f: &mut F, node: PatPath) -> PatPath
+ where
+ F: Fold + ?Sized,
+ {
+@@ -2511,17 +2484,17 @@ where
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_pat_reference<F>(f: &mut F, node: PatReference) -> PatReference
+ where
+ F: Fold + ?Sized,
+ {
+ PatReference {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ pat: Box::new(f.fold_pat(*node.pat)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_pat_rest<F>(f: &mut F, node: PatRest) -> PatRest
+ where
+ F: Fold + ?Sized,
+@@ -2580,17 +2553,17 @@ where
+ #[cfg(feature = "full")]
+ pub fn fold_pat_type<F>(f: &mut F, node: PatType) -> PatType
+ where
+ F: Fold + ?Sized,
+ {
+ PatType {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ pat: Box::new(f.fold_pat(*node.pat)),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_pat_wild<F>(f: &mut F, node: PatWild) -> PatWild
+ where
+ F: Fold + ?Sized,
+ {
+@@ -2600,17 +2573,17 @@ where
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_path<F>(f: &mut F, node: Path) -> Path
+ where
+ F: Fold + ?Sized,
+ {
+ Path {
+- leading_colon: (node.leading_colon).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
++ leading_colon: (node.leading_colon).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
+ segments: FoldHelper::lift(node.segments, |it| f.fold_path_segment(it)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_path_arguments<F>(f: &mut F, node: PathArguments) -> PathArguments
+ where
+ F: Fold + ?Sized,
+ {
+@@ -2636,96 +2609,96 @@ where
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_predicate_eq<F>(f: &mut F, node: PredicateEq) -> PredicateEq
+ where
+ F: Fold + ?Sized,
+ {
+ PredicateEq {
+ lhs_ty: f.fold_type(node.lhs_ty),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ rhs_ty: f.fold_type(node.rhs_ty),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_predicate_lifetime<F>(f: &mut F, node: PredicateLifetime) -> PredicateLifetime
+ where
+ F: Fold + ?Sized,
+ {
+ PredicateLifetime {
+ lifetime: f.fold_lifetime(node.lifetime),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_predicate_type<F>(f: &mut F, node: PredicateType) -> PredicateType
+ where
+ F: Fold + ?Sized,
+ {
+ PredicateType {
+ lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)),
+ bounded_ty: f.fold_type(node.bounded_ty),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_qself<F>(f: &mut F, node: QSelf) -> QSelf
+ where
+ F: Fold + ?Sized,
+ {
+ QSelf {
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ position: node.position,
+ as_token: (node.as_token).map(|it| Token![as](tokens_helper(f, &it.span))),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_range_limits<F>(f: &mut F, node: RangeLimits) -> RangeLimits
+ where
+ F: Fold + ?Sized,
+ {
+ match node {
+ RangeLimits::HalfOpen(_binding_0) => {
+ RangeLimits::HalfOpen(Token![..](tokens_helper(f, &_binding_0.spans)))
+ }
+ RangeLimits::Closed(_binding_0) => {
+- RangeLimits::Closed(Token ! [ ..= ](tokens_helper(f, &_binding_0.spans)))
++ RangeLimits::Closed(Token ! [..=](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_receiver<F>(f: &mut F, node: Receiver) -> Receiver
+ where
+ F: Fold + ?Sized,
+ {
+ Receiver {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ reference: (node.reference).map(|it| {
+ (
+- Token ! [ & ](tokens_helper(f, &(it).0.spans)),
++ Token ! [&](tokens_helper(f, &(it).0.spans)),
+ ((it).1).map(|it| f.fold_lifetime(it)),
+ )
+ }),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ self_token: Token![self](tokens_helper(f, &node.self_token.span)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_return_type<F>(f: &mut F, node: ReturnType) -> ReturnType
+ where
+ F: Fold + ?Sized,
+ {
+ match node {
+ ReturnType::Default => ReturnType::Default,
+ ReturnType::Type(_binding_0, _binding_1) => ReturnType::Type(
+- Token ! [ -> ](tokens_helper(f, &_binding_0.spans)),
++ Token ! [->](tokens_helper(f, &_binding_0.spans)),
+ Box::new(f.fold_type(*_binding_1)),
+ ),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_signature<F>(f: &mut F, node: Signature) -> Signature
+ where
+ F: Fold + ?Sized,
+@@ -2756,17 +2729,17 @@ where
+ F: Fold + ?Sized,
+ {
+ match node {
+ Stmt::Local(_binding_0) => Stmt::Local(f.fold_local(_binding_0)),
+ Stmt::Item(_binding_0) => Stmt::Item(f.fold_item(_binding_0)),
+ Stmt::Expr(_binding_0) => Stmt::Expr(f.fold_expr(_binding_0)),
+ Stmt::Semi(_binding_0, _binding_1) => Stmt::Semi(
+ f.fold_expr(_binding_0),
+- Token ! [ ; ](tokens_helper(f, &_binding_1.spans)),
++ Token ! [;](tokens_helper(f, &_binding_1.spans)),
+ ),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_trait_bound<F>(f: &mut F, node: TraitBound) -> TraitBound
+ where
+ F: Fold + ?Sized,
+ {
+@@ -2780,17 +2753,17 @@ where
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_trait_bound_modifier<F>(f: &mut F, node: TraitBoundModifier) -> TraitBoundModifier
+ where
+ F: Fold + ?Sized,
+ {
+ match node {
+ TraitBoundModifier::None => TraitBoundModifier::None,
+ TraitBoundModifier::Maybe(_binding_0) => {
+- TraitBoundModifier::Maybe(Token ! [ ? ](tokens_helper(f, &_binding_0.spans)))
++ TraitBoundModifier::Maybe(Token ! [?](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_trait_item<F>(f: &mut F, node: TraitItem) -> TraitItem
+ where
+ F: Fold + ?Sized,
+ {
+@@ -2807,69 +2780,69 @@ where
+ pub fn fold_trait_item_const<F>(f: &mut F, node: TraitItemConst) -> TraitItemConst
+ where
+ F: Fold + ?Sized,
+ {
+ TraitItemConst {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+ default: (node.default).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_expr((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_trait_item_macro<F>(f: &mut F, node: TraitItemMacro) -> TraitItemMacro
+ where
+ F: Fold + ?Sized,
+ {
+ TraitItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_trait_item_method<F>(f: &mut F, node: TraitItemMethod) -> TraitItemMethod
+ where
+ F: Fold + ?Sized,
+ {
+ TraitItemMethod {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ sig: f.fold_signature(node.sig),
+ default: (node.default).map(|it| f.fold_block(it)),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_trait_item_type<F>(f: &mut F, node: TraitItemType) -> TraitItemType
+ where
+ F: Fold + ?Sized,
+ {
+ TraitItemType {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ default: (node.default).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_type((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_type<F>(f: &mut F, node: Type) -> Type
+ where
+ F: Fold + ?Sized,
+ {
+ match node {
+@@ -2894,17 +2867,17 @@ where
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_type_array<F>(f: &mut F, node: TypeArray) -> TypeArray
+ where
+ F: Fold + ?Sized,
+ {
+ TypeArray {
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ elem: Box::new(f.fold_type(*node.elem)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ len: f.fold_expr(node.len),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_type_bare_fn<F>(f: &mut F, node: TypeBareFn) -> TypeBareFn
+ where
+ F: Fold + ?Sized,
+ {
+@@ -2969,19 +2942,19 @@ where
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_type_param<F>(f: &mut F, node: TypeParam) -> TypeParam
+ where
+ F: Fold + ?Sized,
+ {
+ TypeParam {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ ident: f.fold_ident(node.ident),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+- eq_token: (node.eq_token).map(|it| Token ! [ = ](tokens_helper(f, &it.spans))),
++ eq_token: (node.eq_token).map(|it| Token ! [=](tokens_helper(f, &it.spans))),
+ default: (node.default).map(|it| f.fold_type(it)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_type_param_bound<F>(f: &mut F, node: TypeParamBound) -> TypeParamBound
+ where
+ F: Fold + ?Sized,
+ {
+@@ -3013,29 +2986,29 @@ where
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_type_ptr<F>(f: &mut F, node: TypePtr) -> TypePtr
+ where
+ F: Fold + ?Sized,
+ {
+ TypePtr {
+- star_token: Token ! [ * ](tokens_helper(f, &node.star_token.spans)),
++ star_token: Token ! [*](tokens_helper(f, &node.star_token.spans)),
+ const_token: (node.const_token).map(|it| Token![const](tokens_helper(f, &it.span))),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ elem: Box::new(f.fold_type(*node.elem)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_type_reference<F>(f: &mut F, node: TypeReference) -> TypeReference
+ where
+ F: Fold + ?Sized,
+ {
+ TypeReference {
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ lifetime: (node.lifetime).map(|it| f.fold_lifetime(it)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ elem: Box::new(f.fold_type(*node.elem)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_type_slice<F>(f: &mut F, node: TypeSlice) -> TypeSlice
+ where
+@@ -3067,28 +3040,28 @@ where
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_un_op<F>(f: &mut F, node: UnOp) -> UnOp
+ where
+ F: Fold + ?Sized,
+ {
+ match node {
+- UnOp::Deref(_binding_0) => UnOp::Deref(Token ! [ * ](tokens_helper(f, &_binding_0.spans))),
++ UnOp::Deref(_binding_0) => UnOp::Deref(Token ! [*](tokens_helper(f, &_binding_0.spans))),
+ UnOp::Not(_binding_0) => UnOp::Not(Token![!](tokens_helper(f, &_binding_0.spans))),
+- UnOp::Neg(_binding_0) => UnOp::Neg(Token ! [ - ](tokens_helper(f, &_binding_0.spans))),
++ UnOp::Neg(_binding_0) => UnOp::Neg(Token ! [-](tokens_helper(f, &_binding_0.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_use_glob<F>(f: &mut F, node: UseGlob) -> UseGlob
+ where
+ F: Fold + ?Sized,
+ {
+ UseGlob {
+- star_token: Token ! [ * ](tokens_helper(f, &node.star_token.spans)),
++ star_token: Token ! [*](tokens_helper(f, &node.star_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_use_group<F>(f: &mut F, node: UseGroup) -> UseGroup
+ where
+ F: Fold + ?Sized,
+ {
+ UseGroup {
+@@ -3107,17 +3080,17 @@ where
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_use_path<F>(f: &mut F, node: UsePath) -> UsePath
+ where
+ F: Fold + ?Sized,
+ {
+ UsePath {
+ ident: f.fold_ident(node.ident),
+- colon2_token: Token ! [ :: ](tokens_helper(f, &node.colon2_token.spans)),
++ colon2_token: Token ! [::](tokens_helper(f, &node.colon2_token.spans)),
+ tree: Box::new(f.fold_use_tree(*node.tree)),
+ }
+ }
+ #[cfg(feature = "full")]
+ pub fn fold_use_rename<F>(f: &mut F, node: UseRename) -> UseRename
+ where
+ F: Fold + ?Sized,
+ {
+@@ -3142,31 +3115,31 @@ where
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_variadic<F>(f: &mut F, node: Variadic) -> Variadic
+ where
+ F: Fold + ?Sized,
+ {
+ Variadic {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- dots: Token ! [ ... ](tokens_helper(f, &node.dots.spans)),
++ dots: Token ! [...](tokens_helper(f, &node.dots.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_variant<F>(f: &mut F, node: Variant) -> Variant
+ where
+ F: Fold + ?Sized,
+ {
+ Variant {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ ident: f.fold_ident(node.ident),
+ fields: f.fold_fields(node.fields),
+ discriminant: (node.discriminant).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_expr((it).1),
+ )
+ }),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_vis_crate<F>(f: &mut F, node: VisCrate) -> VisCrate
+ where
+diff --git a/third_party/rust/syn/src/gen/hash.rs b/third_party/rust/syn/src/gen/hash.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/src/gen/hash.rs
+@@ -0,0 +1,2691 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#[cfg(any(feature = "derive", feature = "full"))]
++use crate::tt::TokenStreamHelper;
++use crate::*;
++use std::hash::{Hash, Hasher};
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Abi {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.name.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for AngleBracketedGenericArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.colon2_token.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Arm {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.guard.hash(state);
++ self.body.hash(state);
++ self.comma.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for AttrStyle {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ AttrStyle::Outer => {
++ state.write_u8(0u8);
++ }
++ AttrStyle::Inner(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Attribute {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.style.hash(state);
++ self.path.hash(state);
++ TokenStreamHelper(&self.tokens).hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BareFnArg {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.name.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BinOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ BinOp::Add(_) => {
++ state.write_u8(0u8);
++ }
++ BinOp::Sub(_) => {
++ state.write_u8(1u8);
++ }
++ BinOp::Mul(_) => {
++ state.write_u8(2u8);
++ }
++ BinOp::Div(_) => {
++ state.write_u8(3u8);
++ }
++ BinOp::Rem(_) => {
++ state.write_u8(4u8);
++ }
++ BinOp::And(_) => {
++ state.write_u8(5u8);
++ }
++ BinOp::Or(_) => {
++ state.write_u8(6u8);
++ }
++ BinOp::BitXor(_) => {
++ state.write_u8(7u8);
++ }
++ BinOp::BitAnd(_) => {
++ state.write_u8(8u8);
++ }
++ BinOp::BitOr(_) => {
++ state.write_u8(9u8);
++ }
++ BinOp::Shl(_) => {
++ state.write_u8(10u8);
++ }
++ BinOp::Shr(_) => {
++ state.write_u8(11u8);
++ }
++ BinOp::Eq(_) => {
++ state.write_u8(12u8);
++ }
++ BinOp::Lt(_) => {
++ state.write_u8(13u8);
++ }
++ BinOp::Le(_) => {
++ state.write_u8(14u8);
++ }
++ BinOp::Ne(_) => {
++ state.write_u8(15u8);
++ }
++ BinOp::Ge(_) => {
++ state.write_u8(16u8);
++ }
++ BinOp::Gt(_) => {
++ state.write_u8(17u8);
++ }
++ BinOp::AddEq(_) => {
++ state.write_u8(18u8);
++ }
++ BinOp::SubEq(_) => {
++ state.write_u8(19u8);
++ }
++ BinOp::MulEq(_) => {
++ state.write_u8(20u8);
++ }
++ BinOp::DivEq(_) => {
++ state.write_u8(21u8);
++ }
++ BinOp::RemEq(_) => {
++ state.write_u8(22u8);
++ }
++ BinOp::BitXorEq(_) => {
++ state.write_u8(23u8);
++ }
++ BinOp::BitAndEq(_) => {
++ state.write_u8(24u8);
++ }
++ BinOp::BitOrEq(_) => {
++ state.write_u8(25u8);
++ }
++ BinOp::ShlEq(_) => {
++ state.write_u8(26u8);
++ }
++ BinOp::ShrEq(_) => {
++ state.write_u8(27u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Binding {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Block {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.stmts.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BoundLifetimes {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ConstParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.eq_token.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Constraint {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for Data {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Data::Struct(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Data::Enum(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Data::Union(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataEnum {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.variants.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.fields.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataUnion {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.fields.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DeriveInput {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.data.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Expr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Expr::Binary(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Expr::Call(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Expr::Cast(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Expr::Field(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => {
++ state.write_u8(14u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::If(v0) => {
++ state.write_u8(16u8);
++ v0.hash(state);
++ }
++ Expr::Index(v0) => {
++ state.write_u8(17u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => {
++ state.write_u8(18u8);
++ v0.hash(state);
++ }
++ Expr::Lit(v0) => {
++ state.write_u8(19u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => {
++ state.write_u8(20u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => {
++ state.write_u8(21u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => {
++ state.write_u8(22u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => {
++ state.write_u8(23u8);
++ v0.hash(state);
++ }
++ Expr::Paren(v0) => {
++ state.write_u8(24u8);
++ v0.hash(state);
++ }
++ Expr::Path(v0) => {
++ state.write_u8(25u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => {
++ state.write_u8(26u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => {
++ state.write_u8(27u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => {
++ state.write_u8(28u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => {
++ state.write_u8(29u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => {
++ state.write_u8(30u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => {
++ state.write_u8(31u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => {
++ state.write_u8(32u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => {
++ state.write_u8(33u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => {
++ state.write_u8(34u8);
++ v0.hash(state);
++ }
++ Expr::Unary(v0) => {
++ state.write_u8(35u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => {
++ state.write_u8(36u8);
++ v0.hash(state);
++ }
++ Expr::Verbatim(v0) => {
++ state.write_u8(37u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::While(v0) => {
++ state.write_u8(38u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => {
++ state.write_u8(39u8);
++ v0.hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprArray {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAssign {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAssignOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.op.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAsync {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.capture.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAwait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.base.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprBinary {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.op.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBlock {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBox {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBreak {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprCall {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.func.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprCast {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprClosure {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.asyncness.hash(state);
++ self.movability.hash(state);
++ self.capture.hash(state);
++ self.inputs.hash(state);
++ self.output.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprContinue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprField {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.base.hash(state);
++ self.member.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprForLoop {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.pat.hash(state);
++ self.expr.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprIf {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.cond.hash(state);
++ self.then_branch.hash(state);
++ self.else_branch.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprIndex {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.index.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprLet {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprLit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lit.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprLoop {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMatch {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.arms.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMethodCall {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.receiver.hash(state);
++ self.method.hash(state);
++ self.turbofish.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprParen {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprPath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprRange {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.from.hash(state);
++ self.limits.hash(state);
++ self.to.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mutability.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprRepeat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.len.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprReturn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.fields.hash(state);
++ self.dot2_token.hash(state);
++ self.rest.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTry {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTryBlock {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprUnary {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.op.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprUnsafe {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprWhile {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.cond.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprYield {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Field {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.colon_token.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FieldPat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.member.hash(state);
++ self.colon_token.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FieldValue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.member.hash(state);
++ self.colon_token.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Fields {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Fields::Named(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Fields::Unnamed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Fields::Unit => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for FieldsNamed {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.named.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for FieldsUnnamed {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.unnamed.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for File {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.shebang.hash(state);
++ self.attrs.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FnArg {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ FnArg::Receiver(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ FnArg::Typed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ForeignItem::Fn(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ ForeignItem::Static(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ ForeignItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ ForeignItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ ForeignItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.sig.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemStatic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for GenericArgument {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericArgument::Lifetime(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericArgument::Type(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ GenericArgument::Binding(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ GenericArgument::Constraint(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ GenericArgument::Const(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for GenericMethodArgument {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericMethodArgument::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericMethodArgument::Const(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for GenericParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericParam::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericParam::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ GenericParam::Const(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Generics {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lt_token.hash(state);
++ self.params.hash(state);
++ self.gt_token.hash(state);
++ self.where_clause.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ImplItem::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ ImplItem::Method(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ ImplItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ ImplItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ ImplItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemMethod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.sig.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Item {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Item::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Item::Enum(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Item::ExternCrate(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Item::Fn(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Item::ForeignMod(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Item::Impl(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Item::Macro(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Item::Macro2(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Item::Mod(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Item::Static(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Item::Struct(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Item::Trait(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Item::TraitAlias(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Item::Type(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Item::Union(v0) => {
++ state.write_u8(14u8);
++ v0.hash(state);
++ }
++ Item::Use(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ Item::Verbatim(v0) => {
++ state.write_u8(16u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemEnum {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.variants.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemExternCrate {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.rename.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.sig.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemForeignMod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.abi.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemImpl {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.defaultness.hash(state);
++ self.unsafety.hash(state);
++ self.generics.hash(state);
++ self.trait_.hash(state);
++ self.self_ty.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMacro2 {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ TokenStreamHelper(&self.rules).hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.content.hash(state);
++ self.semi.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemStatic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.fields.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemTrait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.unsafety.hash(state);
++ self.auto_token.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.colon_token.hash(state);
++ self.supertraits.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemTraitAlias {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemUnion {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.fields.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemUse {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.leading_colon.hash(state);
++ self.tree.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Label {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.name.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for LifetimeDef {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lifetime.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ }
++}
++impl Hash for Lit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Lit::Str(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Lit::ByteStr(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Lit::Byte(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Lit::Char(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Lit::Int(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Lit::Float(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Lit::Bool(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Lit::Verbatim(v0) => {
++ state.write_u8(7u8);
++ v0.to_string().hash(state);
++ }
++ }
++ }
++}
++impl Hash for LitBool {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.value.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Local {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.init.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Macro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.delimiter.hash(state);
++ TokenStreamHelper(&self.tokens).hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MacroDelimiter {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ MacroDelimiter::Paren(_) => {
++ state.write_u8(0u8);
++ }
++ MacroDelimiter::Brace(_) => {
++ state.write_u8(1u8);
++ }
++ MacroDelimiter::Bracket(_) => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Meta {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Meta::Path(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Meta::List(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Meta::NameValue(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MetaList {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.nested.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MetaNameValue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.lit.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for MethodTurbofish {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for NestedMeta {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ NestedMeta::Meta(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ NestedMeta::Lit(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ParenthesizedGenericArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.inputs.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Pat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Pat::Box(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Pat::Ident(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Pat::Lit(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Pat::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Pat::Or(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Pat::Path(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Pat::Range(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Pat::Reference(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Pat::Rest(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Pat::Slice(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Pat::Struct(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Pat::Tuple(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Pat::TupleStruct(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Pat::Type(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Pat::Verbatim(v0) => {
++ state.write_u8(14u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ Pat::Wild(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatBox {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatIdent {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.by_ref.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.subpat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatLit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatOr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.leading_vert.hash(state);
++ self.cases.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatPath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatRange {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lo.hash(state);
++ self.limits.hash(state);
++ self.hi.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mutability.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatRest {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatSlice {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.fields.hash(state);
++ self.dot2_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatTupleStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatWild {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Path {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.leading_colon.hash(state);
++ self.segments.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PathArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ PathArguments::None => {
++ state.write_u8(0u8);
++ }
++ PathArguments::AngleBracketed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ PathArguments::Parenthesized(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PathSegment {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.arguments.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateEq {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lhs_ty.hash(state);
++ self.rhs_ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateLifetime {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetime.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ self.bounded_ty.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for QSelf {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ty.hash(state);
++ self.position.hash(state);
++ self.as_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for RangeLimits {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ RangeLimits::HalfOpen(_) => {
++ state.write_u8(0u8);
++ }
++ RangeLimits::Closed(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Receiver {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.reference.hash(state);
++ self.mutability.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ReturnType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ReturnType::Default => {
++ state.write_u8(0u8);
++ }
++ ReturnType::Type(_, v1) => {
++ state.write_u8(1u8);
++ v1.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Signature {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.constness.hash(state);
++ self.asyncness.hash(state);
++ self.unsafety.hash(state);
++ self.abi.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.inputs.hash(state);
++ self.variadic.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Stmt {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Stmt::Local(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Stmt::Item(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Stmt::Expr(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Stmt::Semi(v0, _) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TraitBound {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.paren_token.hash(state);
++ self.modifier.hash(state);
++ self.lifetimes.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TraitBoundModifier {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TraitBoundModifier::None => {
++ state.write_u8(0u8);
++ }
++ TraitBoundModifier::Maybe(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TraitItem::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ TraitItem::Method(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ TraitItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ TraitItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ TraitItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemMethod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.sig.hash(state);
++ self.default.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Type {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Type::Array(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Type::BareFn(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Type::Group(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Type::ImplTrait(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Type::Infer(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Type::Macro(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Type::Never(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Type::Paren(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Type::Path(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Type::Ptr(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Type::Reference(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Type::Slice(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Type::TraitObject(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Type::Tuple(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Type::Verbatim(v0) => {
++ state.write_u8(14u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeArray {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ self.len.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeBareFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ self.unsafety.hash(state);
++ self.abi.hash(state);
++ self.inputs.hash(state);
++ self.variadic.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeImplTrait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeInfer {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.mac.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeNever {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ self.eq_token.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParamBound {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TypeParamBound::Trait(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ TypeParamBound::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParen {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypePath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypePtr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.const_token.hash(state);
++ self.mutability.hash(state);
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetime.hash(state);
++ self.mutability.hash(state);
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeSlice {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeTraitObject {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.dyn_token.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elems.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for UnOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ UnOp::Deref(_) => {
++ state.write_u8(0u8);
++ }
++ UnOp::Not(_) => {
++ state.write_u8(1u8);
++ }
++ UnOp::Neg(_) => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseGlob {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseName {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UsePath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.tree.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseRename {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.rename.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseTree {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ UseTree::Path(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ UseTree::Name(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ UseTree::Rename(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ UseTree::Glob(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ UseTree::Group(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Variadic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Variant {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.fields.hash(state);
++ self.discriminant.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisCrate {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisPublic {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisRestricted {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.in_token.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Visibility {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Visibility::Public(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Visibility::Crate(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Visibility::Restricted(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Visibility::Inherited => {
++ state.write_u8(3u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for WhereClause {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.predicates.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for WherePredicate {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ WherePredicate::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ WherePredicate::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ WherePredicate::Eq(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/visit.rs b/third_party/rust/syn/src/gen/visit.rs
+--- third_party/rust/syn/src/gen/visit.rs
++++ third_party/rust/syn/src/gen/visit.rs
+@@ -15,27 +15,26 @@ macro_rules! full {
+ };
+ }
+ #[cfg(all(feature = "derive", not(feature = "full")))]
+ macro_rules! full {
+ ($e:expr) => {
+ unreachable!()
+ };
+ }
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! skip {
+ ($($tt:tt)*) => {};
+ }
+ /// Syntax tree traversal to walk a shared borrow of a syntax tree.
+ ///
+ /// See the [module documentation] for details.
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"visit"` feature.*
++/// *This trait is available only if Syn is built with the `"visit"` feature.*
+ pub trait Visit<'ast> {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_abi(&mut self, i: &'ast Abi) {
+ visit_abi(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_angle_bracketed_generic_arguments(&mut self, i: &'ast AngleBracketedGenericArguments) {
+ visit_angle_bracketed_generic_arguments(self, i)
+@@ -429,45 +428,37 @@ pub trait Visit<'ast> {
+ }
+ fn visit_lifetime(&mut self, i: &'ast Lifetime) {
+ visit_lifetime(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lifetime_def(&mut self, i: &'ast LifetimeDef) {
+ visit_lifetime_def(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit(&mut self, i: &'ast Lit) {
+ visit_lit(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_bool(&mut self, i: &'ast LitBool) {
+ visit_lit_bool(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte(&mut self, i: &'ast LitByte) {
+ visit_lit_byte(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_str(&mut self, i: &'ast LitByteStr) {
+ visit_lit_byte_str(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_char(&mut self, i: &'ast LitChar) {
+ visit_lit_char(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_float(&mut self, i: &'ast LitFloat) {
+ visit_lit_float(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_int(&mut self, i: &'ast LitInt) {
+ visit_lit_int(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_str(&mut self, i: &'ast LitStr) {
+ visit_lit_str(self, i)
+ }
+ #[cfg(feature = "full")]
+ fn visit_local(&mut self, i: &'ast Local) {
+ visit_local(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2532,17 +2523,16 @@ where
+ for el in Punctuated::pairs(&node.bounds) {
+ let (it, p) = el.into_tuple();
+ v.visit_lifetime(it);
+ if let Some(p) = p {
+ tokens_helper(v, &p.spans);
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit<'ast, V>(v: &mut V, node: &'ast Lit)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ match node {
+ Lit::Str(_binding_0) => {
+ v.visit_lit_str(_binding_0);
+ }
+@@ -2564,55 +2554,48 @@ where
+ Lit::Bool(_binding_0) => {
+ v.visit_lit_bool(_binding_0);
+ }
+ Lit::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_bool<'ast, V>(v: &mut V, node: &'ast LitBool)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ skip!(node.value);
+ v.visit_span(&node.span);
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte<'ast, V>(v: &mut V, node: &'ast LitByte)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_str<'ast, V>(v: &mut V, node: &'ast LitByteStr)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_char<'ast, V>(v: &mut V, node: &'ast LitChar)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_float<'ast, V>(v: &mut V, node: &'ast LitFloat)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_int<'ast, V>(v: &mut V, node: &'ast LitInt)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_str<'ast, V>(v: &mut V, node: &'ast LitStr)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+ #[cfg(feature = "full")]
+ pub fn visit_local<'ast, V>(v: &mut V, node: &'ast Local)
+ where
+diff --git a/third_party/rust/syn/src/gen/visit_mut.rs b/third_party/rust/syn/src/gen/visit_mut.rs
+--- third_party/rust/syn/src/gen/visit_mut.rs
++++ third_party/rust/syn/src/gen/visit_mut.rs
+@@ -15,28 +15,27 @@ macro_rules! full {
+ };
+ }
+ #[cfg(all(feature = "derive", not(feature = "full")))]
+ macro_rules! full {
+ ($e:expr) => {
+ unreachable!()
+ };
+ }
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! skip {
+ ($($tt:tt)*) => {};
+ }
+ /// Syntax tree traversal to mutate an exclusive borrow of a syntax tree in
+ /// place.
+ ///
+ /// See the [module documentation] for details.
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"visit-mut"` feature.*
++/// *This trait is available only if Syn is built with the `"visit-mut"` feature.*
+ pub trait VisitMut {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_abi_mut(&mut self, i: &mut Abi) {
+ visit_abi_mut(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_angle_bracketed_generic_arguments_mut(
+ &mut self,
+@@ -433,45 +432,37 @@ pub trait VisitMut {
+ }
+ fn visit_lifetime_mut(&mut self, i: &mut Lifetime) {
+ visit_lifetime_mut(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lifetime_def_mut(&mut self, i: &mut LifetimeDef) {
+ visit_lifetime_def_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_mut(&mut self, i: &mut Lit) {
+ visit_lit_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_bool_mut(&mut self, i: &mut LitBool) {
+ visit_lit_bool_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_mut(&mut self, i: &mut LitByte) {
+ visit_lit_byte_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_str_mut(&mut self, i: &mut LitByteStr) {
+ visit_lit_byte_str_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_char_mut(&mut self, i: &mut LitChar) {
+ visit_lit_char_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_float_mut(&mut self, i: &mut LitFloat) {
+ visit_lit_float_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_int_mut(&mut self, i: &mut LitInt) {
+ visit_lit_int_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_str_mut(&mut self, i: &mut LitStr) {
+ visit_lit_str_mut(self, i)
+ }
+ #[cfg(feature = "full")]
+ fn visit_local_mut(&mut self, i: &mut Local) {
+ visit_local_mut(self, i)
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2538,17 +2529,16 @@ where
+ for el in Punctuated::pairs_mut(&mut node.bounds) {
+ let (it, p) = el.into_tuple();
+ v.visit_lifetime_mut(it);
+ if let Some(p) = p {
+ tokens_helper(v, &mut p.spans);
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_mut<V>(v: &mut V, node: &mut Lit)
+ where
+ V: VisitMut + ?Sized,
+ {
+ match node {
+ Lit::Str(_binding_0) => {
+ v.visit_lit_str_mut(_binding_0);
+ }
+@@ -2570,55 +2560,48 @@ where
+ Lit::Bool(_binding_0) => {
+ v.visit_lit_bool_mut(_binding_0);
+ }
+ Lit::Verbatim(_binding_0) => {
+ skip!(_binding_0);
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_bool_mut<V>(v: &mut V, node: &mut LitBool)
+ where
+ V: VisitMut + ?Sized,
+ {
+ skip!(node.value);
+ v.visit_span_mut(&mut node.span);
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_mut<V>(v: &mut V, node: &mut LitByte)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_str_mut<V>(v: &mut V, node: &mut LitByteStr)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_char_mut<V>(v: &mut V, node: &mut LitChar)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_float_mut<V>(v: &mut V, node: &mut LitFloat)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_int_mut<V>(v: &mut V, node: &mut LitInt)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_str_mut<V>(v: &mut V, node: &mut LitStr)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+ #[cfg(feature = "full")]
+ pub fn visit_local_mut<V>(v: &mut V, node: &mut Local)
+ where
+diff --git a/third_party/rust/syn/src/generics.rs b/third_party/rust/syn/src/generics.rs
+--- third_party/rust/syn/src/generics.rs
++++ third_party/rust/syn/src/generics.rs
+@@ -1,97 +1,108 @@
+ use super::*;
+ use crate::punctuated::{Iter, IterMut, Punctuated};
++#[cfg(all(feature = "printing", feature = "extra-traits"))]
++use std::fmt::{self, Debug};
++#[cfg(all(feature = "printing", feature = "extra-traits"))]
++use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// Lifetimes and type parameters attached to a declaration of a function,
+ /// enum, trait, etc.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[derive(Default)]
+ pub struct Generics {
+ pub lt_token: Option<Token![<]>,
+ pub params: Punctuated<GenericParam, Token![,]>,
+ pub gt_token: Option<Token![>]>,
+ pub where_clause: Option<WhereClause>,
+ }
+ }
+
+ ast_enum_of_structs! {
+ /// A generic type parameter, lifetime, or const generic: `T: Into<String>`,
+ /// `'a: 'b`, `const LEN: usize`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+- //
+- // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+- // blocked on https://github.com/rust-lang/rust/issues/62833
+ pub enum GenericParam {
+ /// A generic type parameter: `T: Into<String>`.
+ Type(TypeParam),
+
+ /// A lifetime definition: `'a: 'b + 'c + 'd`.
+ Lifetime(LifetimeDef),
+
+ /// A const generic parameter: `const LENGTH: usize`.
+ Const(ConstParam),
+ }
+ }
+
+ ast_struct! {
+ /// A generic type parameter: `T: Into<String>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeParam {
+ pub attrs: Vec<Attribute>,
+ pub ident: Ident,
+ pub colon_token: Option<Token![:]>,
+ pub bounds: Punctuated<TypeParamBound, Token![+]>,
+ pub eq_token: Option<Token![=]>,
+ pub default: Option<Type>,
+ }
+ }
+
+ ast_struct! {
+ /// A lifetime definition: `'a: 'b + 'c + 'd`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct LifetimeDef {
+ pub attrs: Vec<Attribute>,
+ pub lifetime: Lifetime,
+ pub colon_token: Option<Token![:]>,
+ pub bounds: Punctuated<Lifetime, Token![+]>,
+ }
+ }
+
+ ast_struct! {
+ /// A const generic parameter: `const LENGTH: usize`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ConstParam {
+ pub attrs: Vec<Attribute>,
+ pub const_token: Token![const],
+ pub ident: Ident,
+ pub colon_token: Token![:],
+ pub ty: Type,
+ pub eq_token: Option<Token![=]>,
+ pub default: Option<Expr>,
+ }
+ }
+
++impl Default for Generics {
++ fn default() -> Self {
++ Generics {
++ lt_token: None,
++ params: Punctuated::new(),
++ gt_token: None,
++ where_clause: None,
++ }
++ }
++}
++
+ impl Generics {
+ /// Returns an
+ /// <code
+ /// style="padding-right:0;">Iterator&lt;Item = &amp;</code><a
+ /// href="struct.TypeParam.html"><code
+ /// style="padding-left:0;padding-right:0;">TypeParam</code></a><code
+ /// style="padding-left:0;">&gt;</code>
+ /// over the type parameters in `self.params`.
+@@ -275,100 +286,148 @@ impl<'a> Iterator for ConstParamsMut<'a>
+ } else {
+ self.next()
+ }
+ }
+ }
+
+ /// Returned by `Generics::split_for_impl`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct ImplGenerics<'a>(&'a Generics);
+
+ /// Returned by `Generics::split_for_impl`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct TypeGenerics<'a>(&'a Generics);
+
+ /// Returned by `TypeGenerics::as_turbofish`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct Turbofish<'a>(&'a Generics);
+
+ #[cfg(feature = "printing")]
+ impl Generics {
+ /// Split a type's generics into the pieces required for impl'ing a trait
+ /// for that type.
+ ///
+ /// ```
+ /// # use proc_macro2::{Span, Ident};
+ /// # use quote::quote;
+ /// #
+- /// # fn main() {
+- /// # let generics: syn::Generics = Default::default();
+- /// # let name = Ident::new("MyType", Span::call_site());
++ /// # let generics: syn::Generics = Default::default();
++ /// # let name = Ident::new("MyType", Span::call_site());
+ /// #
+ /// let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
+ /// quote! {
+ /// impl #impl_generics MyTrait for #name #ty_generics #where_clause {
+ /// // ...
+ /// }
+ /// }
+- /// # ;
+- /// # }
++ /// # ;
+ /// ```
+ ///
+- /// *This method is available if Syn is built with the `"derive"` or
++ /// *This method is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature and the `"printing"` feature.*
+ pub fn split_for_impl(&self) -> (ImplGenerics, TypeGenerics, Option<&WhereClause>) {
+ (
+ ImplGenerics(self),
+ TypeGenerics(self),
+ self.where_clause.as_ref(),
+ )
+ }
+ }
+
+ #[cfg(feature = "printing")]
++macro_rules! generics_wrapper_impls {
++ ($ty:ident) => {
++ #[cfg(feature = "clone-impls")]
++ impl<'a> Clone for $ty<'a> {
++ fn clone(&self) -> Self {
++ $ty(self.0)
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Debug for $ty<'a> {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ formatter
++ .debug_tuple(stringify!($ty))
++ .field(self.0)
++ .finish()
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Eq for $ty<'a> {}
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> PartialEq for $ty<'a> {
++ fn eq(&self, other: &Self) -> bool {
++ self.0 == other.0
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Hash for $ty<'a> {
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ self.0.hash(state);
++ }
++ }
++ };
++}
++
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(ImplGenerics);
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(TypeGenerics);
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(Turbofish);
++
++#[cfg(feature = "printing")]
+ impl<'a> TypeGenerics<'a> {
+ /// Turn a type's generics like `<X, Y>` into a turbofish like `::<X, Y>`.
+ ///
+- /// *This method is available if Syn is built with the `"derive"` or
++ /// *This method is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature and the `"printing"` feature.*
+ pub fn as_turbofish(&self) -> Turbofish {
+ Turbofish(self.0)
+ }
+ }
+
+ ast_struct! {
+ /// A set of bound lifetimes: `for<'a, 'b, 'c>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[derive(Default)]
+ pub struct BoundLifetimes {
+ pub for_token: Token![for],
+ pub lt_token: Token![<],
+ pub lifetimes: Punctuated<LifetimeDef, Token![,]>,
+ pub gt_token: Token![>],
+ }
+ }
+
++impl Default for BoundLifetimes {
++ fn default() -> Self {
++ BoundLifetimes {
++ for_token: Default::default(),
++ lt_token: Default::default(),
++ lifetimes: Punctuated::new(),
++ gt_token: Default::default(),
++ }
++ }
++}
++
+ impl LifetimeDef {
+ pub fn new(lifetime: Lifetime) -> Self {
+ LifetimeDef {
+ attrs: Vec::new(),
+ lifetime,
+ colon_token: None,
+ bounds: Punctuated::new(),
+ }
+@@ -386,122 +445,118 @@ impl From<Ident> for TypeParam {
+ default: None,
+ }
+ }
+ }
+
+ ast_enum_of_structs! {
+ /// A trait or lifetime used as a bound on a type parameter.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum TypeParamBound {
+ Trait(TraitBound),
+ Lifetime(Lifetime),
+ }
+ }
+
+ ast_struct! {
+ /// A trait used as a bound on a type parameter.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct TraitBound {
+ pub paren_token: Option<token::Paren>,
+ pub modifier: TraitBoundModifier,
+ /// The `for<'a>` in `for<'a> Foo<&'a T>`
+ pub lifetimes: Option<BoundLifetimes>,
+ /// The `Foo<&'a T>` in `for<'a> Foo<&'a T>`
+ pub path: Path,
+ }
+ }
+
+ ast_enum! {
+ /// A modifier on a trait bound, currently only used for the `?` in
+ /// `?Sized`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum TraitBoundModifier {
+ None,
+ Maybe(Token![?]),
+ }
+ }
+
+ ast_struct! {
+ /// A `where` clause in a definition: `where T: Deserialize<'de>, D:
+ /// 'static`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct WhereClause {
+ pub where_token: Token![where],
+ pub predicates: Punctuated<WherePredicate, Token![,]>,
+ }
+ }
+
+ ast_enum_of_structs! {
+ /// A single predicate in a `where` clause: `T: Deserialize<'de>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+- //
+- // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+- // blocked on https://github.com/rust-lang/rust/issues/62833
+ pub enum WherePredicate {
+ /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
+ Type(PredicateType),
+
+ /// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
+ Lifetime(PredicateLifetime),
+
+ /// An equality predicate in a `where` clause (unsupported).
+ Eq(PredicateEq),
+ }
+ }
+
+ ast_struct! {
+ /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateType {
+ /// Any lifetimes from a `for` binding
+ pub lifetimes: Option<BoundLifetimes>,
+ /// The type being bounded
+ pub bounded_ty: Type,
+ pub colon_token: Token![:],
+ /// Trait and lifetime bounds (`Clone+Send+'static`)
+ pub bounds: Punctuated<TypeParamBound, Token![+]>,
+ }
+ }
+
+ ast_struct! {
+ /// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateLifetime {
+ pub lifetime: Lifetime,
+ pub colon_token: Token![:],
+ pub bounds: Punctuated<Lifetime, Token![+]>,
+ }
+ }
+
+ ast_struct! {
+ /// An equality predicate in a `where` clause (unsupported).
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateEq {
+ pub lhs_ty: Type,
+ pub eq_token: Token![=],
+ pub rhs_ty: Type,
+ }
+ }
+
+@@ -516,38 +571,36 @@ pub mod parsing {
+ if !input.peek(Token![<]) {
+ return Ok(Generics::default());
+ }
+
+ let lt_token: Token![<] = input.parse()?;
+
+ let mut params = Punctuated::new();
+ let mut allow_lifetime_param = true;
+- let mut allow_type_param = true;
+ loop {
+ if input.peek(Token![>]) {
+ break;
+ }
+
+ let attrs = input.call(Attribute::parse_outer)?;
+ let lookahead = input.lookahead1();
+ if allow_lifetime_param && lookahead.peek(Lifetime) {
+ params.push_value(GenericParam::Lifetime(LifetimeDef {
+ attrs,
+ ..input.parse()?
+ }));
+- } else if allow_type_param && lookahead.peek(Ident) {
++ } else if lookahead.peek(Ident) {
+ allow_lifetime_param = false;
+ params.push_value(GenericParam::Type(TypeParam {
+ attrs,
+ ..input.parse()?
+ }));
+ } else if lookahead.peek(Token![const]) {
+ allow_lifetime_param = false;
+- allow_type_param = false;
+ params.push_value(GenericParam::Const(ConstParam {
+ attrs,
+ ..input.parse()?
+ }));
+ } else {
+ return Err(lookahead.error());
+ }
+
+@@ -660,67 +713,63 @@ pub mod parsing {
+ } else {
+ Ok(None)
+ }
+ }
+ }
+
+ impl Parse for TypeParam {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let has_colon;
+- let has_default;
+- Ok(TypeParam {
+- attrs: input.call(Attribute::parse_outer)?,
+- ident: input.parse()?,
+- colon_token: {
+- if input.peek(Token![:]) {
+- has_colon = true;
+- Some(input.parse()?)
+- } else {
+- has_colon = false;
+- None
++ let attrs = input.call(Attribute::parse_outer)?;
++ let ident: Ident = input.parse()?;
++ let colon_token: Option<Token![:]> = input.parse()?;
++
++ let begin_bound = input.fork();
++ let mut is_maybe_const = false;
++ let mut bounds = Punctuated::new();
++ if colon_token.is_some() {
++ loop {
++ if input.peek(Token![,]) || input.peek(Token![>]) || input.peek(Token![=]) {
++ break;
++ }
++ if input.peek(Token![?]) && input.peek2(Token![const]) {
++ input.parse::<Token![?]>()?;
++ input.parse::<Token![const]>()?;
++ is_maybe_const = true;
++ }
++ let value: TypeParamBound = input.parse()?;
++ bounds.push_value(value);
++ if !input.peek(Token![+]) {
++ break;
+ }
+- },
+- bounds: {
+- let mut bounds = Punctuated::new();
+- if has_colon {
+- loop {
+- if input.peek(Token![,])
+- || input.peek(Token![>])
+- || input.peek(Token![=])
+- {
+- break;
+- }
+- let value = input.parse()?;
+- bounds.push_value(value);
+- if !input.peek(Token![+]) {
+- break;
+- }
+- let punct = input.parse()?;
+- bounds.push_punct(punct);
+- }
+- }
+- bounds
+- },
+- eq_token: {
+- if input.peek(Token![=]) {
+- has_default = true;
+- Some(input.parse()?)
+- } else {
+- has_default = false;
+- None
+- }
+- },
+- default: {
+- if has_default {
+- Some(input.parse()?)
+- } else {
+- None
+- }
+- },
++ let punct: Token![+] = input.parse()?;
++ bounds.push_punct(punct);
++ }
++ }
++
++ let mut eq_token: Option<Token![=]> = input.parse()?;
++ let mut default = if eq_token.is_some() {
++ Some(input.parse::<Type>()?)
++ } else {
++ None
++ };
++
++ if is_maybe_const {
++ bounds.clear();
++ eq_token = None;
++ default = Some(Type::Verbatim(verbatim::between(begin_bound, input)));
++ }
++
++ Ok(TypeParam {
++ attrs,
++ ident,
++ colon_token,
++ bounds,
++ eq_token,
++ default,
+ })
+ }
+ }
+
+ impl Parse for TypeParamBound {
+ fn parse(input: ParseStream) -> Result<Self> {
+ if input.peek(Lifetime) {
+ return input.parse().map(TypeParamBound::Lifetime);
+@@ -893,16 +942,18 @@ pub mod parsing {
+ }
+ }
+
+ #[cfg(feature = "printing")]
+ mod printing {
+ use super::*;
+
+ use proc_macro2::TokenStream;
++ #[cfg(feature = "full")]
++ use proc_macro2::TokenTree;
+ use quote::{ToTokens, TokenStreamExt};
+
+ use crate::attr::FilterAttrs;
+ use crate::print::TokensOrDefault;
+
+ impl ToTokens for Generics {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ if self.params.is_empty() {
+@@ -1075,19 +1126,35 @@ mod printing {
+ impl ToTokens for TypeParam {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.ident.to_tokens(tokens);
+ if !self.bounds.is_empty() {
+ TokensOrDefault(&self.colon_token).to_tokens(tokens);
+ self.bounds.to_tokens(tokens);
+ }
+- if self.default.is_some() {
++ if let Some(default) = &self.default {
++ #[cfg(feature = "full")]
++ {
++ if self.eq_token.is_none() {
++ if let Type::Verbatim(default) = default {
++ let mut iter = default.clone().into_iter();
++ match (iter.next(), iter.next()) {
++ (Some(TokenTree::Punct(ref q)), Some(TokenTree::Ident(ref c)))
++ if q.as_char() == '?' && c == "const" =>
++ {
++ return default.to_tokens(tokens);
++ }
++ _ => {}
++ }
++ }
++ }
++ }
+ TokensOrDefault(&self.eq_token).to_tokens(tokens);
+- self.default.to_tokens(tokens);
++ default.to_tokens(tokens);
+ }
+ }
+ }
+
+ impl ToTokens for TraitBound {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ let to_tokens = |tokens: &mut TokenStream| {
+ self.modifier.to_tokens(tokens);
+@@ -1112,19 +1179,19 @@ mod printing {
+
+ impl ToTokens for ConstParam {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.const_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+- if self.default.is_some() {
++ if let Some(default) = &self.default {
+ TokensOrDefault(&self.eq_token).to_tokens(tokens);
+- self.default.to_tokens(tokens);
++ default.to_tokens(tokens);
+ }
+ }
+ }
+
+ impl ToTokens for WhereClause {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ if !self.predicates.is_empty() {
+ self.where_token.to_tokens(tokens);
+diff --git a/third_party/rust/syn/src/item.rs b/third_party/rust/syn/src/item.rs
+--- third_party/rust/syn/src/item.rs
++++ third_party/rust/syn/src/item.rs
+@@ -1,32 +1,30 @@
+ use super::*;
+-use crate::derive::{Data, DeriveInput};
++use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
+ use crate::punctuated::Punctuated;
+ use proc_macro2::TokenStream;
+
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
++#[cfg(feature = "parsing")]
++use std::mem;
+
+ ast_enum_of_structs! {
+ /// Things that can appear directly inside of a module or scope.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Item #manual_extra_traits {
++ pub enum Item {
+ /// A constant item: `const MAX: u16 = 65535`.
+ Const(ItemConst),
+
+ /// An enum definition: `enum Foo<A, B> { A(A), B(B) }`.
+ Enum(ItemEnum),
+
+ /// An `extern crate` item: `extern crate serde`.
+ ExternCrate(ItemExternCrate),
+@@ -78,90 +76,90 @@ ast_enum_of_structs! {
+ #[doc(hidden)]
+ __Nonexhaustive,
+ }
+ }
+
+ ast_struct! {
+ /// A constant item: `const MAX: u16 = 65535`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemConst {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub const_token: Token![const],
+ pub ident: Ident,
+ pub colon_token: Token![:],
+ pub ty: Box<Type>,
+ pub eq_token: Token![=],
+ pub expr: Box<Expr>,
+ pub semi_token: Token![;],
+ }
+ }
+
+ ast_struct! {
+ /// An enum definition: `enum Foo<A, B> { A(A), B(B) }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemEnum {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub enum_token: Token![enum],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub brace_token: token::Brace,
+ pub variants: Punctuated<Variant, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// An `extern crate` item: `extern crate serde`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemExternCrate {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub extern_token: Token![extern],
+ pub crate_token: Token![crate],
+ pub ident: Ident,
+ pub rename: Option<(Token![as], Ident)>,
+ pub semi_token: Token![;],
+ }
+ }
+
+ ast_struct! {
+ /// A free-standing function: `fn process(n: usize) -> Result<()> { ...
+ /// }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemFn {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub sig: Signature,
+ pub block: Box<Block>,
+ }
+ }
+
+ ast_struct! {
+ /// A block of foreign items: `extern "C" { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemForeignMod {
+ pub attrs: Vec<Attribute>,
+ pub abi: Abi,
+ pub brace_token: token::Brace,
+ pub items: Vec<ForeignItem>,
+ }
+ }
+
+ ast_struct! {
+ /// An impl block providing trait or associated items: `impl<A> Trait
+ /// for Data<A> { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemImpl {
+ pub attrs: Vec<Attribute>,
+ pub defaultness: Option<Token![default]>,
+ pub unsafety: Option<Token![unsafe]>,
+ pub impl_token: Token![impl],
+ pub generics: Generics,
+ /// Trait this impl implements.
+ pub trait_: Option<(Option<Token![!]>, Path, Token![for])>,
+@@ -170,57 +168,57 @@ ast_struct! {
+ pub brace_token: token::Brace,
+ pub items: Vec<ImplItem>,
+ }
+ }
+
+ ast_struct! {
+ /// A macro invocation, which includes `macro_rules!` definitions.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemMacro {
+ pub attrs: Vec<Attribute>,
+ /// The `example` in `macro_rules! example { ... }`.
+ pub ident: Option<Ident>,
+ pub mac: Macro,
+ pub semi_token: Option<Token![;]>,
+ }
+ }
+
+ ast_struct! {
+ /// A 2.0-style declarative macro introduced by the `macro` keyword.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
+- pub struct ItemMacro2 #manual_extra_traits {
++ /// *This type is available only if Syn is built with the `"full"` feature.*
++ pub struct ItemMacro2 {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub macro_token: Token![macro],
+ pub ident: Ident,
+ pub rules: TokenStream,
+ }
+ }
+
+ ast_struct! {
+ /// A module or module declaration: `mod m` or `mod m { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemMod {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub mod_token: Token![mod],
+ pub ident: Ident,
+ pub content: Option<(token::Brace, Vec<Item>)>,
+ pub semi: Option<Token![;]>,
+ }
+ }
+
+ ast_struct! {
+ /// A static item: `static BIKE: Shed = Shed(42)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemStatic {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub static_token: Token![static],
+ pub mutability: Option<Token![mut]>,
+ pub ident: Ident,
+ pub colon_token: Token![:],
+ pub ty: Box<Type>,
+@@ -228,32 +226,32 @@ ast_struct! {
+ pub expr: Box<Expr>,
+ pub semi_token: Token![;],
+ }
+ }
+
+ ast_struct! {
+ /// A struct definition: `struct Foo<A> { x: A }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemStruct {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub struct_token: Token![struct],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub fields: Fields,
+ pub semi_token: Option<Token![;]>,
+ }
+ }
+
+ ast_struct! {
+ /// A trait definition: `pub trait Iterator { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemTrait {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub unsafety: Option<Token![unsafe]>,
+ pub auto_token: Option<Token![auto]>,
+ pub trait_token: Token![trait],
+ pub ident: Ident,
+ pub generics: Generics,
+@@ -262,212 +260,99 @@ ast_struct! {
+ pub brace_token: token::Brace,
+ pub items: Vec<TraitItem>,
+ }
+ }
+
+ ast_struct! {
+ /// A trait alias: `pub trait SharableIterator = Iterator + Sync`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemTraitAlias {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub trait_token: Token![trait],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub eq_token: Token![=],
+ pub bounds: Punctuated<TypeParamBound, Token![+]>,
+ pub semi_token: Token![;],
+ }
+ }
+
+ ast_struct! {
+ /// A type alias: `type Result<T> = std::result::Result<T, MyError>`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub type_token: Token![type],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub eq_token: Token![=],
+ pub ty: Box<Type>,
+ pub semi_token: Token![;],
+ }
+ }
+
+ ast_struct! {
+ /// A union definition: `union Foo<A, B> { x: A, y: B }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemUnion {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub union_token: Token![union],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub fields: FieldsNamed,
+ }
+ }
+
+ ast_struct! {
+ /// A use declaration: `use std::collections::HashMap`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemUse {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub use_token: Token![use],
+ pub leading_colon: Option<Token![::]>,
+ pub tree: UseTree,
+ pub semi_token: Token![;],
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Item {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Item {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Item::Const(this), Item::Const(other)) => this == other,
+- (Item::Enum(this), Item::Enum(other)) => this == other,
+- (Item::ExternCrate(this), Item::ExternCrate(other)) => this == other,
+- (Item::Fn(this), Item::Fn(other)) => this == other,
+- (Item::ForeignMod(this), Item::ForeignMod(other)) => this == other,
+- (Item::Impl(this), Item::Impl(other)) => this == other,
+- (Item::Macro(this), Item::Macro(other)) => this == other,
+- (Item::Macro2(this), Item::Macro2(other)) => this == other,
+- (Item::Mod(this), Item::Mod(other)) => this == other,
+- (Item::Static(this), Item::Static(other)) => this == other,
+- (Item::Struct(this), Item::Struct(other)) => this == other,
+- (Item::Trait(this), Item::Trait(other)) => this == other,
+- (Item::TraitAlias(this), Item::TraitAlias(other)) => this == other,
+- (Item::Type(this), Item::Type(other)) => this == other,
+- (Item::Union(this), Item::Union(other)) => this == other,
+- (Item::Use(this), Item::Use(other)) => this == other,
+- (Item::Verbatim(this), Item::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Item {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
++impl Item {
++ #[cfg(feature = "parsing")]
++ pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
+ match self {
+- Item::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- Item::Enum(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- Item::ExternCrate(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- Item::Fn(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- Item::ForeignMod(item) => {
+- state.write_u8(4);
+- item.hash(state);
+- }
+- Item::Impl(item) => {
+- state.write_u8(5);
+- item.hash(state);
+- }
+- Item::Macro(item) => {
+- state.write_u8(6);
+- item.hash(state);
+- }
+- Item::Macro2(item) => {
+- state.write_u8(7);
+- item.hash(state);
+- }
+- Item::Mod(item) => {
+- state.write_u8(8);
+- item.hash(state);
+- }
+- Item::Static(item) => {
+- state.write_u8(9);
+- item.hash(state);
+- }
+- Item::Struct(item) => {
+- state.write_u8(10);
+- item.hash(state);
+- }
+- Item::Trait(item) => {
+- state.write_u8(11);
+- item.hash(state);
+- }
+- Item::TraitAlias(item) => {
+- state.write_u8(12);
+- item.hash(state);
+- }
+- Item::Type(item) => {
+- state.write_u8(13);
+- item.hash(state);
+- }
+- Item::Union(item) => {
+- state.write_u8(14);
+- item.hash(state);
+- }
+- Item::Use(item) => {
+- state.write_u8(15);
+- item.hash(state);
+- }
+- Item::Verbatim(item) => {
+- state.write_u8(16);
+- TokenStreamHelper(item).hash(state);
+- }
++ Item::ExternCrate(ItemExternCrate { attrs, .. })
++ | Item::Use(ItemUse { attrs, .. })
++ | Item::Static(ItemStatic { attrs, .. })
++ | Item::Const(ItemConst { attrs, .. })
++ | Item::Fn(ItemFn { attrs, .. })
++ | Item::Mod(ItemMod { attrs, .. })
++ | Item::ForeignMod(ItemForeignMod { attrs, .. })
++ | Item::Type(ItemType { attrs, .. })
++ | Item::Struct(ItemStruct { attrs, .. })
++ | Item::Enum(ItemEnum { attrs, .. })
++ | Item::Union(ItemUnion { attrs, .. })
++ | Item::Trait(ItemTrait { attrs, .. })
++ | Item::TraitAlias(ItemTraitAlias { attrs, .. })
++ | Item::Impl(ItemImpl { attrs, .. })
++ | Item::Macro(ItemMacro { attrs, .. })
++ | Item::Macro2(ItemMacro2 { attrs, .. }) => mem::replace(attrs, new),
++ Item::Verbatim(_) => Vec::new(),
+ Item::__Nonexhaustive => unreachable!(),
+ }
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ItemMacro2 {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ItemMacro2 {
+- fn eq(&self, other: &Self) -> bool {
+- self.attrs == other.attrs
+- && self.vis == other.vis
+- && self.macro_token == other.macro_token
+- && self.ident == other.ident
+- && TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ItemMacro2 {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.attrs.hash(state);
+- self.vis.hash(state);
+- self.macro_token.hash(state);
+- self.ident.hash(state);
+- TokenStreamHelper(&self.rules).hash(state);
+- }
+-}
+-
+ impl From<DeriveInput> for Item {
+ fn from(input: DeriveInput) -> Item {
+ match input.data {
+ Data::Struct(data) => Item::Struct(ItemStruct {
+ attrs: input.attrs,
+ vis: input.vis,
+ struct_token: data.struct_token,
+ ident: input.ident,
+@@ -491,20 +376,67 @@ impl From<DeriveInput> for Item {
+ ident: input.ident,
+ generics: input.generics,
+ fields: data.fields,
+ }),
+ }
+ }
+ }
+
++impl From<ItemStruct> for DeriveInput {
++ fn from(input: ItemStruct) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Struct(DataStruct {
++ struct_token: input.struct_token,
++ fields: input.fields,
++ semi_token: input.semi_token,
++ }),
++ }
++ }
++}
++
++impl From<ItemEnum> for DeriveInput {
++ fn from(input: ItemEnum) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Enum(DataEnum {
++ enum_token: input.enum_token,
++ brace_token: input.brace_token,
++ variants: input.variants,
++ }),
++ }
++ }
++}
++
++impl From<ItemUnion> for DeriveInput {
++ fn from(input: ItemUnion) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Union(DataUnion {
++ union_token: input.union_token,
++ fields: input.fields,
++ }),
++ }
++ }
++}
++
+ ast_enum_of_structs! {
+ /// A suffix of an import tree in a `use` item: `Type as Renamed` or `*`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+@@ -525,77 +457,77 @@ ast_enum_of_structs! {
+ /// A braced group of imports in a `use` item: `{A, B, C}`.
+ Group(UseGroup),
+ }
+ }
+
+ ast_struct! {
+ /// A path prefix of imports in a `use` item: `std::...`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UsePath {
+ pub ident: Ident,
+ pub colon2_token: Token![::],
+ pub tree: Box<UseTree>,
+ }
+ }
+
+ ast_struct! {
+ /// An identifier imported by a `use` item: `HashMap`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseName {
+ pub ident: Ident,
+ }
+ }
+
+ ast_struct! {
+ /// An renamed identifier imported by a `use` item: `HashMap as Map`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseRename {
+ pub ident: Ident,
+ pub as_token: Token![as],
+ pub rename: Ident,
+ }
+ }
+
+ ast_struct! {
+ /// A glob import in a `use` item: `*`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseGlob {
+ pub star_token: Token![*],
+ }
+ }
+
+ ast_struct! {
+ /// A braced group of imports in a `use` item: `{A, B, C}`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseGroup {
+ pub brace_token: token::Brace,
+ pub items: Punctuated<UseTree, Token![,]>,
+ }
+ }
+
+ ast_enum_of_structs! {
+ /// An item within an `extern` block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum ForeignItem #manual_extra_traits {
++ pub enum ForeignItem {
+ /// A foreign function in an `extern` block.
+ Fn(ForeignItemFn),
+
+ /// A foreign static item in an `extern` block: `static ext: u8`.
+ Static(ForeignItemStatic),
+
+ /// A foreign type in an `extern` block: `type void`.
+ Type(ForeignItemType),
+@@ -609,130 +541,79 @@ ast_enum_of_structs! {
+ #[doc(hidden)]
+ __Nonexhaustive,
+ }
+ }
+
+ ast_struct! {
+ /// A foreign function in an `extern` block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemFn {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub sig: Signature,
+ pub semi_token: Token![;],
+ }
+ }
+
+ ast_struct! {
+ /// A foreign static item in an `extern` block: `static ext: u8`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemStatic {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub static_token: Token![static],
+ pub mutability: Option<Token![mut]>,
+ pub ident: Ident,
+ pub colon_token: Token![:],
+ pub ty: Box<Type>,
+ pub semi_token: Token![;],
+ }
+ }
+
+ ast_struct! {
+ /// A foreign type in an `extern` block: `type void`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub type_token: Token![type],
+ pub ident: Ident,
+ pub semi_token: Token![;],
+ }
+ }
+
+ ast_struct! {
+ /// A macro invocation within an extern block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+ pub semi_token: Option<Token![;]>,
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ForeignItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ForeignItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (ForeignItem::Fn(this), ForeignItem::Fn(other)) => this == other,
+- (ForeignItem::Static(this), ForeignItem::Static(other)) => this == other,
+- (ForeignItem::Type(this), ForeignItem::Type(other)) => this == other,
+- (ForeignItem::Macro(this), ForeignItem::Macro(other)) => this == other,
+- (ForeignItem::Verbatim(this), ForeignItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ForeignItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- ForeignItem::Fn(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- ForeignItem::Static(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- ForeignItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- ForeignItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- ForeignItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- ForeignItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_enum_of_structs! {
+ /// An item declaration within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum TraitItem #manual_extra_traits {
++ pub enum TraitItem {
+ /// An associated constant within the definition of a trait.
+ Const(TraitItemConst),
+
+ /// A trait method within the definition of a trait.
+ Method(TraitItemMethod),
+
+ /// An associated type within the definition of a trait.
+ Type(TraitItemType),
+@@ -746,132 +627,81 @@ ast_enum_of_structs! {
+ #[doc(hidden)]
+ __Nonexhaustive,
+ }
+ }
+
+ ast_struct! {
+ /// An associated constant within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemConst {
+ pub attrs: Vec<Attribute>,
+ pub const_token: Token![const],
+ pub ident: Ident,
+ pub colon_token: Token![:],
+ pub ty: Type,
+ pub default: Option<(Token![=], Expr)>,
+ pub semi_token: Token![;],
+ }
+ }
+
+ ast_struct! {
+ /// A trait method within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemMethod {
+ pub attrs: Vec<Attribute>,
+ pub sig: Signature,
+ pub default: Option<Block>,
+ pub semi_token: Option<Token![;]>,
+ }
+ }
+
+ ast_struct! {
+ /// An associated type within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemType {
+ pub attrs: Vec<Attribute>,
+ pub type_token: Token![type],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub colon_token: Option<Token![:]>,
+ pub bounds: Punctuated<TypeParamBound, Token![+]>,
+ pub default: Option<(Token![=], Type)>,
+ pub semi_token: Token![;],
+ }
+ }
+
+ ast_struct! {
+ /// A macro invocation within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+ pub semi_token: Option<Token![;]>,
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for TraitItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for TraitItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (TraitItem::Const(this), TraitItem::Const(other)) => this == other,
+- (TraitItem::Method(this), TraitItem::Method(other)) => this == other,
+- (TraitItem::Type(this), TraitItem::Type(other)) => this == other,
+- (TraitItem::Macro(this), TraitItem::Macro(other)) => this == other,
+- (TraitItem::Verbatim(this), TraitItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for TraitItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- TraitItem::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- TraitItem::Method(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- TraitItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- TraitItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- TraitItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- TraitItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_enum_of_structs! {
+ /// An item within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum ImplItem #manual_extra_traits {
++ pub enum ImplItem {
+ /// An associated constant within an impl block.
+ Const(ImplItemConst),
+
+ /// A method within an impl block.
+ Method(ImplItemMethod),
+
+ /// An associated type within an impl block.
+ Type(ImplItemType),
+@@ -885,17 +715,17 @@ ast_enum_of_structs! {
+ #[doc(hidden)]
+ __Nonexhaustive,
+ }
+ }
+
+ ast_struct! {
+ /// An associated constant within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemConst {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub defaultness: Option<Token![default]>,
+ pub const_token: Token![const],
+ pub ident: Ident,
+ pub colon_token: Token![:],
+ pub ty: Type,
+@@ -903,144 +733,117 @@ ast_struct! {
+ pub expr: Expr,
+ pub semi_token: Token![;],
+ }
+ }
+
+ ast_struct! {
+ /// A method within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemMethod {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub defaultness: Option<Token![default]>,
+ pub sig: Signature,
+ pub block: Block,
+ }
+ }
+
+ ast_struct! {
+ /// An associated type within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub defaultness: Option<Token![default]>,
+ pub type_token: Token![type],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub eq_token: Token![=],
+ pub ty: Type,
+ pub semi_token: Token![;],
+ }
+ }
+
+ ast_struct! {
+ /// A macro invocation within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+ pub semi_token: Option<Token![;]>,
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ImplItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ImplItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (ImplItem::Const(this), ImplItem::Const(other)) => this == other,
+- (ImplItem::Method(this), ImplItem::Method(other)) => this == other,
+- (ImplItem::Type(this), ImplItem::Type(other)) => this == other,
+- (ImplItem::Macro(this), ImplItem::Macro(other)) => this == other,
+- (ImplItem::Verbatim(this), ImplItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ImplItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- ImplItem::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- ImplItem::Method(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- ImplItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- ImplItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- ImplItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- ImplItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_struct! {
+ /// A function signature in a trait or implementation: `unsafe fn
+ /// initialize(&self)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Signature {
+ pub constness: Option<Token![const]>,
+ pub asyncness: Option<Token![async]>,
+ pub unsafety: Option<Token![unsafe]>,
+ pub abi: Option<Abi>,
+ pub fn_token: Token![fn],
+ pub ident: Ident,
+ pub generics: Generics,
+ pub paren_token: token::Paren,
+ pub inputs: Punctuated<FnArg, Token![,]>,
+ pub variadic: Option<Variadic>,
+ pub output: ReturnType,
+ }
+ }
+
++impl Signature {
++ /// A method's `self` receiver, such as `&self` or `self: Box<Self>`.
++ pub fn receiver(&self) -> Option<&FnArg> {
++ let arg = self.inputs.first()?;
++ match arg {
++ FnArg::Receiver(_) => Some(arg),
++ FnArg::Typed(PatType { pat, .. }) => {
++ if let Pat::Ident(PatIdent { ident, .. }) = &**pat {
++ if ident == "self" {
++ return Some(arg);
++ }
++ }
++ None
++ }
++ }
++ }
++}
++
+ ast_enum_of_structs! {
+ /// An argument in a function signature: the `n: usize` in `fn f(n: usize)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum FnArg {
+ /// The `self` argument of an associated method, whether taken by value
+ /// or by reference.
++ ///
++ /// Note that `self` receivers with a specified type, such as `self:
++ /// Box<Self>`, are parsed as a `FnArg::Typed`.
+ Receiver(Receiver),
+
+ /// A function argument accepted by pattern and type.
+ Typed(PatType),
+ }
+ }
+
+ ast_struct! {
+ /// The `self` argument of an associated method, whether taken by value
+ /// or by reference.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// Note that `self` receivers with a specified type, such as `self:
++ /// Box<Self>`, are parsed as a `FnArg::Typed`.
++ ///
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Receiver {
+ pub attrs: Vec<Attribute>,
+ pub reference: Option<(Token![&], Option<Lifetime>)>,
+ pub mutability: Option<Token![mut]>,
+ pub self_token: Token![self],
+ }
+ }
+
+@@ -1051,149 +854,233 @@ impl Receiver {
+ }
+
+ #[cfg(feature = "parsing")]
+ pub mod parsing {
+ use super::*;
+
+ use crate::ext::IdentExt;
+ use crate::parse::discouraged::Speculative;
+- use crate::parse::{Parse, ParseStream, Result};
++ use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
++ use crate::token::Brace;
+ use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenTree};
+ use std::iter::{self, FromIterator};
+
+ crate::custom_keyword!(existential);
+
+ impl Parse for Item {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![extern]) {
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ parse_rest_of_fn(input, Vec::new(), vis, sig).map(Item::Fn)
++ }
++ } else if lookahead.peek(Token![extern]) {
+ ahead.parse::<Token![extern]>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Token![crate]) {
+ input.parse().map(Item::ExternCrate)
+- } else if lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else if lookahead.peek(token::Brace) {
+ input.parse().map(Item::ForeignMod)
+ } else if lookahead.peek(LitStr) {
+ ahead.parse::<LitStr>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(token::Brace) {
+ input.parse().map(Item::ForeignMod)
+- } else if lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else {
+ Err(lookahead.error())
+ }
+ } else {
+ Err(lookahead.error())
+ }
+ } else if lookahead.peek(Token![use]) {
+ input.parse().map(Item::Use)
+ } else if lookahead.peek(Token![static]) {
+- input.parse().map(Item::Static)
++ let vis = input.parse()?;
++ let static_token = input.parse()?;
++ let mutability = input.parse()?;
++ let ident = input.parse()?;
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(Item::Static(ItemStatic {
++ attrs: Vec::new(),
++ vis,
++ static_token,
++ mutability,
++ ident,
++ colon_token,
++ ty,
++ eq_token: input.parse()?,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![const]) {
+ ahead.parse::<Token![const]>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+- input.parse().map(Item::Const)
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(Item::Fn)
++ let vis = input.parse()?;
++ let const_token = input.parse()?;
++ let ident = {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ };
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(Item::Const(ItemConst {
++ attrs: Vec::new(),
++ vis,
++ const_token,
++ ident,
++ colon_token,
++ ty,
++ eq_token: input.parse()?,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else {
+ Err(lookahead.error())
+ }
+ } else if lookahead.peek(Token![unsafe]) {
+ ahead.parse::<Token![unsafe]>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Token![trait])
+ || lookahead.peek(Token![auto]) && ahead.peek2(Token![trait])
+ {
+ input.parse().map(Item::Trait)
+ } else if lookahead.peek(Token![impl]) {
+- input.parse().map(Item::Impl)
+- } else if lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(Item::Fn)
++ let allow_const_impl = true;
++ if let Some(item) = parse_impl(input, allow_const_impl)? {
++ Ok(Item::Impl(item))
++ } else {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ }
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![async]) || lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else if lookahead.peek(Token![mod]) {
+ input.parse().map(Item::Mod)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(Item::Type)
++ parse_item_type(begin, input)
+ } else if lookahead.peek(existential) {
+ input.call(item_existential).map(Item::Verbatim)
+ } else if lookahead.peek(Token![struct]) {
+ input.parse().map(Item::Struct)
+ } else if lookahead.peek(Token![enum]) {
+ input.parse().map(Item::Enum)
+ } else if lookahead.peek(Token![union]) && ahead.peek2(Ident) {
+ input.parse().map(Item::Union)
+ } else if lookahead.peek(Token![trait]) {
+ input.call(parse_trait_or_trait_alias)
+ } else if lookahead.peek(Token![auto]) && ahead.peek2(Token![trait]) {
+ input.parse().map(Item::Trait)
+ } else if lookahead.peek(Token![impl])
+ || lookahead.peek(Token![default]) && !ahead.peek2(Token![!])
+ {
+- input.parse().map(Item::Impl)
++ let allow_const_impl = true;
++ if let Some(item) = parse_impl(input, allow_const_impl)? {
++ Ok(Item::Impl(item))
++ } else {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ }
+ } else if lookahead.peek(Token![macro]) {
+ input.parse().map(Item::Macro2)
+ } else if vis.is_inherited()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+ input.parse().map(Item::Macro)
+ } else {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- Item::ExternCrate(item) => &mut item.attrs,
+- Item::Use(item) => &mut item.attrs,
+- Item::Static(item) => &mut item.attrs,
+- Item::Const(item) => &mut item.attrs,
+- Item::Fn(item) => &mut item.attrs,
+- Item::Mod(item) => &mut item.attrs,
+- Item::ForeignMod(item) => &mut item.attrs,
+- Item::Type(item) => &mut item.attrs,
+- Item::Struct(item) => &mut item.attrs,
+- Item::Enum(item) => &mut item.attrs,
+- Item::Union(item) => &mut item.attrs,
+- Item::Trait(item) => &mut item.attrs,
+- Item::TraitAlias(item) => &mut item.attrs,
+- Item::Impl(item) => &mut item.attrs,
+- Item::Macro(item) => &mut item.attrs,
+- Item::Macro2(item) => &mut item.attrs,
+- Item::Verbatim(_) => return Ok(item),
+- Item::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
++ attrs.extend(item.replace_attrs(Vec::new()));
++ item.replace_attrs(attrs);
++ Ok(item)
++ }
++ }
++
++ struct FlexibleItemType {
++ vis: Visibility,
++ defaultness: Option<Token![default]>,
++ type_token: Token![type],
++ ident: Ident,
++ generics: Generics,
++ colon_token: Option<Token![:]>,
++ bounds: Punctuated<TypeParamBound, Token![+]>,
++ ty: Option<(Token![=], Type)>,
++ semi_token: Token![;],
++ }
++
++ impl Parse for FlexibleItemType {
++ fn parse(input: ParseStream) -> Result<Self> {
++ let vis: Visibility = input.parse()?;
++ let defaultness: Option<Token![default]> = input.parse()?;
++ let type_token: Token![type] = input.parse()?;
++ let ident: Ident = input.parse()?;
++ let mut generics: Generics = input.parse()?;
++ let colon_token: Option<Token![:]> = input.parse()?;
++ let mut bounds = Punctuated::new();
++ if colon_token.is_some() {
++ loop {
++ bounds.push_value(input.parse::<TypeParamBound>()?);
++ if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) {
++ break;
++ }
++ bounds.push_punct(input.parse::<Token![+]>()?);
++ if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) {
++ break;
++ }
++ }
+ }
++ generics.where_clause = input.parse()?;
++ let ty = if let Some(eq_token) = input.parse()? {
++ Some((eq_token, input.parse::<Type>()?))
++ } else {
++ None
++ };
++ let semi_token: Token![;] = input.parse()?;
+
+- Ok(item)
++ Ok(FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ ty,
++ semi_token,
++ })
+ }
+ }
+
+ impl Parse for ItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let path = input.call(Path::parse_mod_style)?;
+ let bang_token: Token![!] = input.parse()?;
+@@ -1305,17 +1192,16 @@ pub mod parsing {
+
+ impl Parse for UseTree {
+ fn parse(input: ParseStream) -> Result<UseTree> {
+ let lookahead = input.lookahead1();
+ if lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+ || lookahead.peek(Token![crate])
+- || lookahead.peek(Token![extern])
+ {
+ let ident = input.call(Ident::parse_any)?;
+ if input.peek(Token![::]) {
+ Ok(UseTree::Path(UsePath {
+ ident,
+ colon2_token: input.parse()?,
+ tree: Box::new(input.parse()?),
+ }))
+@@ -1387,77 +1273,134 @@ pub mod parsing {
+ ty: input.parse()?,
+ eq_token: input.parse()?,
+ expr: input.parse()?,
+ semi_token: input.parse()?,
+ })
+ }
+ }
+
++ fn pop_variadic(args: &mut Punctuated<FnArg, Token![,]>) -> Option<Variadic> {
++ let trailing_punct = args.trailing_punct();
++
++ let last = match args.last_mut()? {
++ FnArg::Typed(last) => last,
++ _ => return None,
++ };
++
++ let ty = match last.ty.as_ref() {
++ Type::Verbatim(ty) => ty,
++ _ => return None,
++ };
++
++ let mut variadic = Variadic {
++ attrs: Vec::new(),
++ dots: parse2(ty.clone()).ok()?,
++ };
++
++ if let Pat::Verbatim(pat) = last.pat.as_ref() {
++ if pat.to_string() == "..." && !trailing_punct {
++ variadic.attrs = mem::replace(&mut last.attrs, Vec::new());
++ args.pop();
++ }
++ }
++
++ Some(variadic)
++ }
++
++ fn variadic_to_tokens(dots: &Token![...]) -> TokenStream {
++ TokenStream::from_iter(vec![
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Joint);
++ dot.set_span(dots.spans[0]);
++ dot
++ }),
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Joint);
++ dot.set_span(dots.spans[1]);
++ dot
++ }),
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Alone);
++ dot.set_span(dots.spans[2]);
++ dot
++ }),
++ ])
++ }
++
++ fn peek_signature(input: ParseStream) -> bool {
++ let fork = input.fork();
++ fork.parse::<Option<Token![const]>>().is_ok()
++ && fork.parse::<Option<Token![async]>>().is_ok()
++ && fork.parse::<Option<Token![unsafe]>>().is_ok()
++ && fork.parse::<Option<Abi>>().is_ok()
++ && fork.peek(Token![fn])
++ }
++
++ fn parse_signature(input: ParseStream) -> Result<Signature> {
++ let constness: Option<Token![const]> = input.parse()?;
++ let asyncness: Option<Token![async]> = input.parse()?;
++ let unsafety: Option<Token![unsafe]> = input.parse()?;
++ let abi: Option<Abi> = input.parse()?;
++ let fn_token: Token![fn] = input.parse()?;
++ let ident: Ident = input.parse()?;
++ let generics: Generics = input.parse()?;
++
++ let content;
++ let paren_token = parenthesized!(content in input);
++ let mut inputs = parse_fn_args(&content)?;
++ let variadic = pop_variadic(&mut inputs);
++
++ let output: ReturnType = input.parse()?;
++ let where_clause: Option<WhereClause> = input.parse()?;
++
++ Ok(Signature {
++ constness,
++ asyncness,
++ unsafety,
++ abi,
++ fn_token,
++ ident,
++ paren_token,
++ inputs,
++ output,
++ variadic,
++ generics: Generics {
++ where_clause,
++ ..generics
++ },
++ })
++ }
++
+ impl Parse for ItemFn {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+- let variadic = inputs.last().as_ref().and_then(get_variadic);
+-
+- fn get_variadic(input: &&FnArg) -> Option<Variadic> {
+- if let FnArg::Typed(PatType { ty, .. }) = input {
+- if let Type::Verbatim(tokens) = &**ty {
+- if let Ok(dots) = parse2(tokens.clone()) {
+- return Some(Variadic {
+- attrs: Vec::new(),
+- dots,
+- });
+- }
+- }
+- }
+- None
+- }
++ let sig = parse_signature(input)?;
++ parse_rest_of_fn(input, outer_attrs, vis, sig)
++ }
++ }
+
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
+-
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
+- let stmts = content.call(Block::parse_within)?;
++ fn parse_rest_of_fn(
++ input: ParseStream,
++ outer_attrs: Vec<Attribute>,
++ vis: Visibility,
++ sig: Signature,
++ ) -> Result<ItemFn> {
++ let content;
++ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
++ let stmts = content.call(Block::parse_within)?;
+
+- Ok(ItemFn {
+- attrs: private::attrs(outer_attrs, inner_attrs),
+- vis,
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
+- block: Box::new(Block { brace_token, stmts }),
+- })
+- }
++ Ok(ItemFn {
++ attrs: private::attrs(outer_attrs, inner_attrs),
++ vis,
++ sig,
++ block: Box::new(Block { brace_token, stmts }),
++ })
+ }
+
+ impl Parse for FnArg {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+
+ let ahead = input.fork();
+ if let Ok(mut receiver) = ahead.parse::<Receiver>() {
+@@ -1486,36 +1429,89 @@ pub mod parsing {
+ }
+ },
+ mutability: input.parse()?,
+ self_token: input.parse()?,
+ })
+ }
+ }
+
++ fn parse_fn_args(input: ParseStream) -> Result<Punctuated<FnArg, Token![,]>> {
++ let mut args = Punctuated::new();
++ let mut has_receiver = false;
++
++ while !input.is_empty() {
++ let attrs = input.call(Attribute::parse_outer)?;
++
++ let arg = if let Some(dots) = input.parse::<Option<Token![...]>>()? {
++ FnArg::Typed(PatType {
++ attrs,
++ pat: Box::new(Pat::Verbatim(variadic_to_tokens(&dots))),
++ colon_token: Token![:](dots.spans[0]),
++ ty: Box::new(Type::Verbatim(variadic_to_tokens(&dots))),
++ })
++ } else {
++ let mut arg: FnArg = input.parse()?;
++ match &mut arg {
++ FnArg::Receiver(receiver) if has_receiver => {
++ return Err(Error::new(
++ receiver.self_token.span,
++ "unexpected second method receiver",
++ ));
++ }
++ FnArg::Receiver(receiver) if !args.is_empty() => {
++ return Err(Error::new(
++ receiver.self_token.span,
++ "unexpected method receiver",
++ ));
++ }
++ FnArg::Receiver(receiver) => {
++ has_receiver = true;
++ receiver.attrs = attrs;
++ }
++ FnArg::Typed(arg) => arg.attrs = attrs,
++ }
++ arg
++ };
++ args.push_value(arg);
++
++ if input.is_empty() {
++ break;
++ }
++
++ let comma: Token![,] = input.parse()?;
++ args.push_punct(comma);
++ }
++
++ Ok(args)
++ }
++
+ fn fn_arg_typed(input: ParseStream) -> Result<PatType> {
++ // Hack to parse pre-2018 syntax in
++ // test/ui/rfc-2565-param-attrs/param-attrs-pretty.rs
++ // because the rest of the test case is valuable.
++ if input.peek(Ident) && input.peek2(Token![<]) {
++ let span = input.fork().parse::<Ident>()?.span();
++ return Ok(PatType {
++ attrs: Vec::new(),
++ pat: Box::new(Pat::Wild(PatWild {
++ attrs: Vec::new(),
++ underscore_token: Token![_](span),
++ })),
++ colon_token: Token![:](span),
++ ty: input.parse()?,
++ });
++ }
++
+ Ok(PatType {
+ attrs: Vec::new(),
+- pat: input.parse()?,
++ pat: Box::new(pat::parsing::multi_pat(input)?),
+ colon_token: input.parse()?,
+ ty: Box::new(match input.parse::<Option<Token![...]>>()? {
+- Some(dot3) => {
+- let args = vec![
+- TokenTree::Punct(Punct::new('.', Spacing::Joint)),
+- TokenTree::Punct(Punct::new('.', Spacing::Joint)),
+- TokenTree::Punct(Punct::new('.', Spacing::Alone)),
+- ];
+- let tokens = TokenStream::from_iter(args.into_iter().zip(&dot3.spans).map(
+- |(mut arg, span)| {
+- arg.set_span(*span);
+- arg
+- },
+- ));
+- Type::Verbatim(tokens)
+- }
++ Some(dot3) => Type::Verbatim(variadic_to_tokens(&dot3)),
+ None => input.parse()?,
+ }),
+ })
+ }
+
+ impl Parse for ItemMod {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let outer_attrs = input.call(Attribute::parse_outer)?;
+@@ -1576,109 +1572,103 @@ pub mod parsing {
+ brace_token,
+ items,
+ })
+ }
+ }
+
+ impl Parse for ForeignItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![fn]) {
+- input.parse().map(ForeignItem::Fn)
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ if input.peek(token::Brace) {
++ let content;
++ braced!(content in input);
++ content.call(Attribute::parse_inner)?;
++ content.call(Block::parse_within)?;
++
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Fn(ForeignItemFn {
++ attrs: Vec::new(),
++ vis,
++ sig,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![static]) {
+- input.parse().map(ForeignItem::Static)
++ let vis = input.parse()?;
++ let static_token = input.parse()?;
++ let mutability = input.parse()?;
++ let ident = input.parse()?;
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![=]) {
++ input.parse::<Token![=]>()?;
++ input.parse::<Expr>()?;
++ input.parse::<Token![;]>()?;
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Static(ForeignItemStatic {
++ attrs: Vec::new(),
++ vis,
++ static_token,
++ mutability,
++ ident,
++ colon_token,
++ ty,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(ForeignItem::Type)
++ parse_foreign_item_type(begin, input)
+ } else if vis.is_inherited()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+ input.parse().map(ForeignItem::Macro)
+ } else {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- ForeignItem::Fn(item) => &mut item.attrs,
+- ForeignItem::Static(item) => &mut item.attrs,
+- ForeignItem::Type(item) => &mut item.attrs,
+- ForeignItem::Macro(item) => &mut item.attrs,
+- ForeignItem::Verbatim(_) | ForeignItem::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
+- }
++ let item_attrs = match &mut item {
++ ForeignItem::Fn(item) => &mut item.attrs,
++ ForeignItem::Static(item) => &mut item.attrs,
++ ForeignItem::Type(item) => &mut item.attrs,
++ ForeignItem::Macro(item) => &mut item.attrs,
++ ForeignItem::Verbatim(_) => return Ok(item),
++ ForeignItem::__Nonexhaustive => unreachable!(),
++ };
++ attrs.extend(item_attrs.drain(..));
++ *item_attrs = attrs;
+
+ Ok(item)
+ }
+ }
+
+ impl Parse for ForeignItemFn {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let mut inputs = Punctuated::new();
+- let mut variadic = None;
+- while !content.is_empty() {
+- let attrs = content.call(Attribute::parse_outer)?;
+-
+- if let Some(dots) = content.parse()? {
+- variadic = Some(Variadic { attrs, dots });
+- break;
+- }
+-
+- let mut arg = content.call(fn_arg_typed)?;
+- arg.attrs = attrs;
+- inputs.push_value(FnArg::Typed(arg));
+- if content.is_empty() {
+- break;
+- }
+-
+- inputs.push_punct(content.parse()?);
+- }
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let sig = parse_signature(input)?;
+ let semi_token: Token![;] = input.parse()?;
+-
+ Ok(ForeignItemFn {
+ attrs,
+ vis,
+- sig: Signature {
+- constness: None,
+- asyncness: None,
+- unsafety: None,
+- abi: None,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
++ sig,
+ semi_token,
+ })
+ }
+ }
+
+ impl Parse for ForeignItemStatic {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(ForeignItemStatic {
+@@ -1701,16 +1691,47 @@ pub mod parsing {
+ vis: input.parse()?,
+ type_token: input.parse()?,
+ ident: input.parse()?,
+ semi_token: input.parse()?,
+ })
+ }
+ }
+
++ fn parse_foreign_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ForeignItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some()
++ || generics.lt_token.is_some()
++ || generics.where_clause.is_some()
++ || colon_token.is_some()
++ || ty.is_some()
++ {
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Type(ForeignItemType {
++ attrs: Vec::new(),
++ vis,
++ type_token,
++ ident,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for ForeignItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let mac: Macro = input.parse()?;
+ let semi_token: Option<Token![;]> = if mac.delimiter.is_brace() {
+ None
+ } else {
+ Some(input.parse()?)
+@@ -1737,16 +1758,46 @@ pub mod parsing {
+ },
+ eq_token: input.parse()?,
+ ty: input.parse()?,
+ semi_token: input.parse()?,
+ })
+ }
+ }
+
++ fn parse_item_type(begin: ParseBuffer, input: ParseStream) -> Result<Item> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some() || colon_token.is_some() || ty.is_none() {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ let (eq_token, ty) = ty.unwrap();
++ Ok(Item::Type(ItemType {
++ attrs: Vec::new(),
++ vis,
++ type_token,
++ ident,
++ generics,
++ eq_token,
++ ty: Box::new(ty),
++ semi_token,
++ }))
++ }
++ }
++
+ #[cfg(not(feature = "printing"))]
+ fn item_existential(input: ParseStream) -> Result<TokenStream> {
+ Err(input.error("existential type is not supported"))
+ }
+
+ #[cfg(feature = "printing")]
+ fn item_existential(input: ParseStream) -> Result<TokenStream> {
+ use crate::attr::FilterAttrs;
+@@ -1882,39 +1933,39 @@ pub mod parsing {
+ .map(Item::TraitAlias)
+ } else {
+ Err(lookahead.error())
+ }
+ }
+
+ impl Parse for ItemTrait {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let attrs = input.call(Attribute::parse_outer)?;
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let unsafety: Option<Token![unsafe]> = input.parse()?;
+ let auto_token: Option<Token![auto]> = input.parse()?;
+ let trait_token: Token![trait] = input.parse()?;
+ let ident: Ident = input.parse()?;
+ let generics: Generics = input.parse()?;
+ parse_rest_of_trait(
+ input,
+- attrs,
++ outer_attrs,
+ vis,
+ unsafety,
+ auto_token,
+ trait_token,
+ ident,
+ generics,
+ )
+ }
+ }
+
+ fn parse_rest_of_trait(
+ input: ParseStream,
+- attrs: Vec<Attribute>,
++ outer_attrs: Vec<Attribute>,
+ vis: Visibility,
+ unsafety: Option<Token![unsafe]>,
+ auto_token: Option<Token![auto]>,
+ trait_token: Token![trait],
+ ident: Ident,
+ mut generics: Generics,
+ ) -> Result<ItemTrait> {
+ let colon_token: Option<Token![:]> = input.parse()?;
+@@ -1932,23 +1983,24 @@ pub mod parsing {
+ }
+ }
+ }
+
+ generics.where_clause = input.parse()?;
+
+ let content;
+ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
+ let mut items = Vec::new();
+ while !content.is_empty() {
+ items.push(content.parse()?);
+ }
+
+ Ok(ItemTrait {
+- attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ vis,
+ unsafety,
+ auto_token,
+ trait_token,
+ ident,
+ generics,
+ colon_token,
+ supertraits,
+@@ -2009,76 +2061,83 @@ pub mod parsing {
+ eq_token,
+ bounds,
+ semi_token,
+ })
+ }
+
+ impl Parse for TraitItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
++ let vis: Visibility = input.parse()?;
++ let defaultness: Option<Token![default]> = input.parse()?;
+ let ahead = input.fork();
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![const]) {
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ input.parse().map(TraitItem::Method)
++ } else if lookahead.peek(Token![const]) {
+ ahead.parse::<Token![const]>()?;
+ let lookahead = ahead.lookahead1();
+- if lookahead.peek(Ident) {
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+ input.parse().map(TraitItem::Const)
+ } else if lookahead.peek(Token![async])
+ || lookahead.peek(Token![unsafe])
+ || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![fn])
+ {
+ input.parse().map(TraitItem::Method)
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![async])
+- || lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(TraitItem::Method)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(TraitItem::Type)
++ parse_trait_item_type(begin.fork(), input)
+ } else if lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::])
+ {
+ input.parse().map(TraitItem::Macro)
+ } else {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- TraitItem::Const(item) => &mut item.attrs,
+- TraitItem::Method(item) => &mut item.attrs,
+- TraitItem::Type(item) => &mut item.attrs,
+- TraitItem::Macro(item) => &mut item.attrs,
+- TraitItem::Verbatim(_) | TraitItem::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
++ match (vis, defaultness) {
++ (Visibility::Inherited, None) => {}
++ _ => return Ok(TraitItem::Verbatim(verbatim::between(begin, input))),
+ }
+
++ let item_attrs = match &mut item {
++ TraitItem::Const(item) => &mut item.attrs,
++ TraitItem::Method(item) => &mut item.attrs,
++ TraitItem::Type(item) => &mut item.attrs,
++ TraitItem::Macro(item) => &mut item.attrs,
++ TraitItem::Verbatim(_) | TraitItem::__Nonexhaustive => unreachable!(),
++ };
++ attrs.extend(item_attrs.drain(..));
++ *item_attrs = attrs;
+ Ok(item)
+ }
+ }
+
+ impl Parse for TraitItemConst {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(TraitItemConst {
+ attrs: input.call(Attribute::parse_outer)?,
+ const_token: input.parse()?,
+- ident: input.parse()?,
++ ident: {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ },
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ default: {
+ if input.peek(Token![=]) {
+ let eq_token: Token![=] = input.parse()?;
+ let default: Expr = input.parse()?;
+ Some((eq_token, default))
+ } else {
+@@ -2088,30 +2147,17 @@ pub mod parsing {
+ semi_token: input.parse()?,
+ })
+ }
+ }
+
+ impl Parse for TraitItemMethod {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let outer_attrs = input.call(Attribute::parse_outer)?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let sig = parse_signature(input)?;
+
+ let lookahead = input.lookahead1();
+ let (brace_token, inner_attrs, stmts, semi_token) = if lookahead.peek(token::Brace) {
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
+ let stmts = content.call(Block::parse_within)?;
+ (Some(brace_token), inner_attrs, stmts, None)
+@@ -2119,32 +2165,17 @@ pub mod parsing {
+ let semi_token: Token![;] = input.parse()?;
+ (None, Vec::new(), Vec::new(), Some(semi_token))
+ } else {
+ return Err(lookahead.error());
+ };
+
+ Ok(TraitItemMethod {
+ attrs: private::attrs(outer_attrs, inner_attrs),
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic: None,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
++ sig,
+ default: brace_token.map(|brace_token| Block { brace_token, stmts }),
+ semi_token,
+ })
+ }
+ }
+
+ impl Parse for TraitItemType {
+ fn parse(input: ParseStream) -> Result<Self> {
+@@ -2183,16 +2214,45 @@ pub mod parsing {
+ colon_token,
+ bounds,
+ default,
+ semi_token,
+ })
+ }
+ }
+
++ fn parse_trait_item_type(begin: ParseBuffer, input: ParseStream) -> Result<TraitItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some() || vis.is_some() {
++ Ok(TraitItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(TraitItem::Type(TraitItemType {
++ attrs: Vec::new(),
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ default: ty,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for TraitItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let mac: Macro = input.parse()?;
+ let semi_token: Option<Token![;]> = if mac.delimiter.is_brace() {
+ None
+ } else {
+ Some(input.parse()?)
+@@ -2202,123 +2262,148 @@ pub mod parsing {
+ mac,
+ semi_token,
+ })
+ }
+ }
+
+ impl Parse for ItemImpl {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+- let defaultness: Option<Token![default]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let impl_token: Token![impl] = input.parse()?;
++ let allow_const_impl = false;
++ parse_impl(input, allow_const_impl).map(Option::unwrap)
++ }
++ }
++
++ fn parse_impl(input: ParseStream, allow_const_impl: bool) -> Result<Option<ItemImpl>> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
++ let defaultness: Option<Token![default]> = input.parse()?;
++ let unsafety: Option<Token![unsafe]> = input.parse()?;
++ let impl_token: Token![impl] = input.parse()?;
+
+- let has_generics = input.peek(Token![<])
+- && (input.peek2(Token![>])
+- || input.peek2(Token![#])
+- || (input.peek2(Ident) || input.peek2(Lifetime))
+- && (input.peek3(Token![:])
+- || input.peek3(Token![,])
+- || input.peek3(Token![>])));
+- let generics: Generics = if has_generics {
+- input.parse()?
+- } else {
+- Generics::default()
+- };
++ let has_generics = input.peek(Token![<])
++ && (input.peek2(Token![>])
++ || input.peek2(Token![#])
++ || (input.peek2(Ident) || input.peek2(Lifetime))
++ && (input.peek3(Token![:])
++ || input.peek3(Token![,])
++ || input.peek3(Token![>]))
++ || input.peek2(Token![const]));
++ let generics: Generics = if has_generics {
++ input.parse()?
++ } else {
++ Generics::default()
++ };
+
+- let trait_ = {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- if ahead.parse::<Option<Token![!]>>().is_ok()
+- && ahead.parse::<Path>().is_ok()
+- && ahead.parse::<Token![for]>().is_ok()
+- {
+- let polarity: Option<Token![!]> = input.parse()?;
+- let path: Path = input.parse()?;
+- let for_token: Token![for] = input.parse()?;
+- Some((polarity, path, for_token))
+- } else {
+- None
+- }
+- };
+- let self_ty: Type = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let is_const_impl = allow_const_impl
++ && (input.peek(Token![const]) || input.peek(Token![?]) && input.peek2(Token![const]));
++ if is_const_impl {
++ input.parse::<Option<Token![?]>>()?;
++ input.parse::<Token![const]>()?;
++ }
+
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
++ let trait_ = (|| -> Option<_> {
++ let ahead = input.fork();
++ let polarity: Option<Token![!]> = ahead.parse().ok()?;
++ let mut path: Path = ahead.parse().ok()?;
++ if path.segments.last().unwrap().arguments.is_empty() && ahead.peek(token::Paren) {
++ let parenthesized = PathArguments::Parenthesized(ahead.parse().ok()?);
++ path.segments.last_mut().unwrap().arguments = parenthesized;
++ }
++ let for_token: Token![for] = ahead.parse().ok()?;
++ input.advance_to(&ahead);
++ Some((polarity, path, for_token))
++ })();
+
+- let mut items = Vec::new();
+- while !content.is_empty() {
+- items.push(content.parse()?);
+- }
++ let self_ty: Type = input.parse()?;
++ let where_clause: Option<WhereClause> = input.parse()?;
++
++ let content;
++ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
+
+- Ok(ItemImpl {
++ let mut items = Vec::new();
++ while !content.is_empty() {
++ items.push(content.parse()?);
++ }
++
++ if is_const_impl {
++ Ok(None)
++ } else {
++ Ok(Some(ItemImpl {
+ attrs: private::attrs(outer_attrs, inner_attrs),
+ defaultness,
+ unsafety,
+ impl_token,
+ generics: Generics {
+ where_clause,
+ ..generics
+ },
+ trait_,
+ self_ty: Box::new(self_ty),
+ brace_token,
+ items,
+- })
++ }))
+ }
+ }
+
+ impl Parse for ImplItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+
+ let mut lookahead = ahead.lookahead1();
+ let defaultness = if lookahead.peek(Token![default]) && !ahead.peek2(Token![!]) {
+ let defaultness: Token![default] = ahead.parse()?;
+ lookahead = ahead.lookahead1();
+ Some(defaultness)
+ } else {
+ None
+ };
+
+- let mut item = if lookahead.peek(Token![const]) {
+- ahead.parse::<Token![const]>()?;
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ input.parse().map(ImplItem::Method)
++ } else if lookahead.peek(Token![const]) {
++ let const_token: Token![const] = ahead.parse()?;
+ let lookahead = ahead.lookahead1();
+- if lookahead.peek(Ident) {
+- input.parse().map(ImplItem::Const)
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(ImplItem::Method)
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.advance_to(&ahead);
++ let ident: Ident = input.call(Ident::parse_any)?;
++ let colon_token: Token![:] = input.parse()?;
++ let ty: Type = input.parse()?;
++ if let Some(eq_token) = input.parse()? {
++ return Ok(ImplItem::Const(ImplItemConst {
++ attrs,
++ vis,
++ defaultness,
++ const_token,
++ ident,
++ colon_token,
++ ty,
++ eq_token,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }));
++ } else {
++ input.parse::<Token![;]>()?;
++ return Ok(ImplItem::Verbatim(verbatim::between(begin, input)));
++ }
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(ImplItem::Method)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(ImplItem::Type)
++ parse_impl_item_type(begin, input)
+ } else if vis.is_inherited() && defaultness.is_none() && lookahead.peek(existential) {
+ input.call(item_existential).map(ImplItem::Verbatim)
+ } else if vis.is_inherited()
+ && defaultness.is_none()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+ input.parse().map(ImplItem::Macro)
+ } else {
+ Err(lookahead.error())
+ }?;
+
+@@ -2341,72 +2426,68 @@ pub mod parsing {
+
+ impl Parse for ImplItemConst {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(ImplItemConst {
+ attrs: input.call(Attribute::parse_outer)?,
+ vis: input.parse()?,
+ defaultness: input.parse()?,
+ const_token: input.parse()?,
+- ident: input.parse()?,
++ ident: {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ },
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ eq_token: input.parse()?,
+ expr: input.parse()?,
+ semi_token: input.parse()?,
+ })
+ }
+ }
+
+ impl Parse for ImplItemMethod {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
++ let mut attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let defaultness: Option<Token![default]> = input.parse()?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
++ let sig = parse_signature(input)?;
+
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
+-
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
+- let stmts = content.call(Block::parse_within)?;
++ let block = if let Some(semi) = input.parse::<Option<Token![;]>>()? {
++ // Accept methods without a body in an impl block because
++ // rustc's *parser* does not reject them (the compilation error
++ // is emitted later than parsing) and it can be useful for macro
++ // DSLs.
++ let mut punct = Punct::new(';', Spacing::Alone);
++ punct.set_span(semi.span);
++ let tokens = TokenStream::from_iter(vec![TokenTree::Punct(punct)]);
++ Block {
++ brace_token: Brace::default(),
++ stmts: vec![Stmt::Item(Item::Verbatim(tokens))],
++ }
++ } else {
++ let content;
++ let brace_token = braced!(content in input);
++ attrs.extend(content.call(Attribute::parse_inner)?);
++ Block {
++ brace_token,
++ stmts: content.call(Block::parse_within)?,
++ }
++ };
+
+ Ok(ImplItemMethod {
+- attrs: private::attrs(outer_attrs, inner_attrs),
++ attrs,
+ vis,
+ defaultness,
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic: None,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
+- block: Block { brace_token, stmts },
++ sig,
++ block,
+ })
+ }
+ }
+
+ impl Parse for ImplItemType {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(ImplItemType {
+ attrs: input.call(Attribute::parse_outer)?,
+@@ -2421,16 +2502,47 @@ pub mod parsing {
+ },
+ eq_token: input.parse()?,
+ ty: input.parse()?,
+ semi_token: input.parse()?,
+ })
+ }
+ }
+
++ fn parse_impl_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ImplItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if colon_token.is_some() || ty.is_none() {
++ Ok(ImplItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ let (eq_token, ty) = ty.unwrap();
++ Ok(ImplItem::Type(ImplItemType {
++ attrs: Vec::new(),
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ eq_token,
++ ty,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for ImplItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let mac: Macro = input.parse()?;
+ let semi_token: Option<Token![;]> = if mac.delimiter.is_brace() {
+ None
+ } else {
+ Some(input.parse()?)
+@@ -2466,16 +2578,17 @@ pub mod parsing {
+ mod printing {
+ use super::*;
+
+ use proc_macro2::TokenStream;
+ use quote::{ToTokens, TokenStreamExt};
+
+ use crate::attr::FilterAttrs;
+ use crate::print::TokensOrDefault;
++ use crate::punctuated::Pair;
+
+ impl ToTokens for ItemExternCrate {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.extern_token.to_tokens(tokens);
+ self.crate_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+@@ -2830,16 +2943,24 @@ mod printing {
+ }
+
+ impl ToTokens for ImplItemMethod {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.vis.to_tokens(tokens);
+ self.defaultness.to_tokens(tokens);
+ self.sig.to_tokens(tokens);
++ if self.block.stmts.len() == 1 {
++ if let Stmt::Item(Item::Verbatim(verbatim)) = &self.block.stmts[0] {
++ if verbatim.to_string() == ";" {
++ verbatim.to_tokens(tokens);
++ return;
++ }
++ }
++ }
+ self.block.brace_token.surround(tokens, |tokens| {
+ tokens.append_all(self.attrs.inner());
+ tokens.append_all(&self.block.stmts);
+ });
+ }
+ }
+
+ impl ToTokens for ImplItemType {
+@@ -2900,31 +3021,71 @@ mod printing {
+ impl ToTokens for ForeignItemMacro {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.mac.to_tokens(tokens);
+ self.semi_token.to_tokens(tokens);
+ }
+ }
+
++ fn maybe_variadic_to_tokens(arg: &FnArg, tokens: &mut TokenStream) -> bool {
++ let arg = match arg {
++ FnArg::Typed(arg) => arg,
++ FnArg::Receiver(receiver) => {
++ receiver.to_tokens(tokens);
++ return false;
++ }
++ };
++
++ match arg.ty.as_ref() {
++ Type::Verbatim(ty) if ty.to_string() == "..." => {
++ match arg.pat.as_ref() {
++ Pat::Verbatim(pat) if pat.to_string() == "..." => {
++ tokens.append_all(arg.attrs.outer());
++ pat.to_tokens(tokens);
++ }
++ _ => arg.to_tokens(tokens),
++ }
++ true
++ }
++ _ => {
++ arg.to_tokens(tokens);
++ false
++ }
++ }
++ }
++
+ impl ToTokens for Signature {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.constness.to_tokens(tokens);
+ self.asyncness.to_tokens(tokens);
+ self.unsafety.to_tokens(tokens);
+ self.abi.to_tokens(tokens);
+ self.fn_token.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ self.paren_token.surround(tokens, |tokens| {
+- self.inputs.to_tokens(tokens);
+- if self.variadic.is_some() && !self.inputs.empty_or_trailing() {
+- <Token![,]>::default().to_tokens(tokens);
++ let mut last_is_variadic = false;
++ for input in self.inputs.pairs() {
++ match input {
++ Pair::Punctuated(input, comma) => {
++ maybe_variadic_to_tokens(input, tokens);
++ comma.to_tokens(tokens);
++ }
++ Pair::End(input) => {
++ last_is_variadic = maybe_variadic_to_tokens(input, tokens);
++ }
++ }
+ }
+- self.variadic.to_tokens(tokens);
++ if self.variadic.is_some() && !last_is_variadic {
++ if !self.inputs.empty_or_trailing() {
++ <Token![,]>::default().to_tokens(tokens);
++ }
++ self.variadic.to_tokens(tokens);
++ }
+ });
+ self.output.to_tokens(tokens);
+ self.generics.where_clause.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for Receiver {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+diff --git a/third_party/rust/syn/src/keyword.rs b/third_party/rust/syn/src/keyword.rs
+deleted file mode 100644
+diff --git a/third_party/rust/syn/src/lib.rs b/third_party/rust/syn/src/lib.rs
+--- third_party/rust/syn/src/lib.rs
++++ third_party/rust/syn/src/lib.rs
+@@ -1,8 +1,16 @@
++//! [![github]](https://github.com/dtolnay/syn)&ensp;[![crates-io]](https://crates.io/crates/syn)&ensp;[![docs-rs]](https://docs.rs/syn)
++//!
++//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
++//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
++//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K
++//!
++//! <br>
++//!
+ //! Syn is a parsing library for parsing a stream of Rust tokens into a syntax
+ //! tree of Rust source code.
+ //!
+ //! Currently this library is geared toward use in Rust procedural macros, but
+ //! contains some APIs that may be useful more generally.
+ //!
+ //! - **Data structures** — Syn provides a complete syntax tree that can
+ //! represent any valid Rust source code. The syntax tree is rooted at
+@@ -57,18 +65,18 @@
+ //! syn = "1.0"
+ //! quote = "1.0"
+ //!
+ //! [lib]
+ //! proc-macro = true
+ //! ```
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use quote::quote;
+ //! use syn::{parse_macro_input, DeriveInput};
+ //!
+ //! # const IGNORE_TOKENS: &str = stringify! {
+ //! #[proc_macro_derive(MyMacro)]
+ //! # };
+ //! pub fn my_macro(input: TokenStream) -> TokenStream {
+@@ -237,59 +245,71 @@
+ //! - **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree
+ //! types.
+ //! - **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree
+ //! types.
+ //! - **`proc-macro`** *(enabled by default)* — Runtime dependency on the
+ //! dynamic library libproc_macro from rustc toolchain.
+
+ // Syn types in rustdoc of other crates get linked to here.
+-#![doc(html_root_url = "https://docs.rs/syn/1.0.5")]
++#![doc(html_root_url = "https://docs.rs/syn/1.0.40")]
+ #![deny(clippy::all, clippy::pedantic)]
+ // Ignored clippy lints.
+ #![allow(
+- clippy::block_in_if_condition_stmt,
++ clippy::blocks_in_if_conditions,
+ clippy::cognitive_complexity,
+ clippy::doc_markdown,
+ clippy::eval_order_dependence,
+ clippy::inherent_to_string,
+ clippy::large_enum_variant,
++ clippy::manual_non_exhaustive,
++ clippy::match_like_matches_macro,
++ clippy::match_on_vec_items,
++ clippy::needless_doctest_main,
+ clippy::needless_pass_by_value,
+ clippy::never_loop,
+ clippy::suspicious_op_assign_impl,
+ clippy::too_many_arguments,
+- clippy::trivially_copy_pass_by_ref
++ clippy::trivially_copy_pass_by_ref,
++ clippy::unnecessary_unwrap
+ )]
+ // Ignored clippy_pedantic lints.
+ #![allow(
+ clippy::cast_possible_truncation,
++ clippy::default_trait_access,
+ clippy::empty_enum,
++ clippy::expl_impl_clone_on_copy,
+ clippy::if_not_else,
+ clippy::items_after_statements,
++ clippy::match_same_arms,
++ clippy::missing_errors_doc,
+ clippy::module_name_repetitions,
++ clippy::must_use_candidate,
++ clippy::option_if_let_else,
+ clippy::shadow_unrelated,
+ clippy::similar_names,
+ clippy::single_match_else,
++ clippy::too_many_lines,
+ clippy::unseparated_literal_suffix,
+ clippy::use_self,
+- clippy::used_underscore_binding
++ clippy::used_underscore_binding,
++ clippy::wildcard_imports
+ )]
+
+ #[cfg(all(
+ not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
+ feature = "proc-macro"
+ ))]
+ extern crate proc_macro;
+ extern crate proc_macro2;
+ extern crate unicode_xid;
+
+ #[cfg(feature = "printing")]
+ extern crate quote;
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[macro_use]
+ mod macros;
+
+ // Not public API.
+ #[cfg(feature = "parsing")]
+ #[doc(hidden)]
+ #[macro_use]
+ pub mod group;
+@@ -302,17 +322,16 @@ pub use crate::ident::Ident;
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+ mod attr;
+ #[cfg(any(feature = "full", feature = "derive"))]
+ pub use crate::attr::{
+ AttrStyle, Attribute, AttributeArgs, Meta, MetaList, MetaNameValue, NestedMeta,
+ };
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ mod bigint;
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+ mod data;
+ #[cfg(any(feature = "full", feature = "derive"))]
+ pub use crate::data::{
+ Field, Fields, FieldsNamed, FieldsUnnamed, Variant, VisCrate, VisPublic, VisRestricted,
+ Visibility,
+@@ -359,19 +378,17 @@ pub use crate::item::{
+ #[cfg(feature = "full")]
+ mod file;
+ #[cfg(feature = "full")]
+ pub use crate::file::File;
+
+ mod lifetime;
+ pub use crate::lifetime::Lifetime;
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ mod lit;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ pub use crate::lit::{
+ Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr, StrStyle,
+ };
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+ mod mac;
+ #[cfg(any(feature = "full", feature = "derive"))]
+ pub use crate::mac::{Macro, MacroDelimiter};
+@@ -436,16 +453,19 @@ pub mod parse_quote;
+ feature = "proc-macro"
+ ))]
+ #[doc(hidden)]
+ pub mod parse_macro_input;
+
+ #[cfg(all(feature = "parsing", feature = "printing"))]
+ pub mod spanned;
+
++#[cfg(all(feature = "parsing", feature = "full"))]
++mod whitespace;
++
+ mod gen {
+ /// Syntax tree traversal to walk a shared borrow of a syntax tree.
+ ///
+ /// Each method of the [`Visit`] trait is a hook that can be overridden to
+ /// customize the behavior when visiting the corresponding type of node. By
+ /// default, every method recursively visits the substructure of the input
+ /// by invoking the right visitor method of each of its fields.
+ ///
+@@ -477,17 +497,17 @@ mod gen {
+ /// v.visit_expr(&*node.left);
+ /// v.visit_bin_op(&node.op);
+ /// v.visit_expr(&*node.right);
+ /// }
+ ///
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"visit"` feature.*
++ /// *This module is available only if Syn is built with the `"visit"` feature.*
+ ///
+ /// <br>
+ ///
+ /// # Example
+ ///
+ /// This visitor will print the name of every freestanding function in the
+ /// syntax tree, including nested functions.
+ ///
+@@ -598,17 +618,17 @@ mod gen {
+ /// v.visit_expr_mut(&mut *node.left);
+ /// v.visit_bin_op_mut(&mut node.op);
+ /// v.visit_expr_mut(&mut *node.right);
+ /// }
+ ///
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"visit-mut"`
++ /// *This module is available only if Syn is built with the `"visit-mut"`
+ /// feature.*
+ ///
+ /// <br>
+ ///
+ /// # Example
+ ///
+ /// This mut visitor replace occurrences of u256 suffixed integer literals
+ /// like `999u256` with a macro invocation `bigint::u256!(999)`.
+@@ -697,17 +717,17 @@ mod gen {
+ /// op: v.fold_bin_op(node.op),
+ /// right: Box::new(v.fold_expr(*node.right)),
+ /// }
+ /// }
+ ///
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"fold"` feature.*
++ /// *This module is available only if Syn is built with the `"fold"` feature.*
+ ///
+ /// <br>
+ ///
+ /// # Example
+ ///
+ /// This fold inserts parentheses to fully parenthesizes any expression.
+ ///
+ /// ```
+@@ -739,43 +759,63 @@ mod gen {
+ ///
+ /// // Output: (((a)()) + (((b)((1))) * ((c).d)))
+ /// }
+ /// ```
+ #[cfg(feature = "fold")]
+ #[rustfmt::skip]
+ pub mod fold;
+
++ #[cfg(feature = "clone-impls")]
++ #[rustfmt::skip]
++ mod clone;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod eq;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod hash;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod debug;
++
+ #[cfg(any(feature = "full", feature = "derive"))]
+ #[path = "../gen_helper.rs"]
+ mod helper;
+ }
+ pub use crate::gen::*;
+
+ // Not public API.
+ #[doc(hidden)]
+ pub mod export;
+
+ mod custom_keyword;
+ mod custom_punctuation;
+ mod sealed;
++mod span;
++mod thread;
+
+ #[cfg(feature = "parsing")]
+ mod lookahead;
+
+ #[cfg(feature = "parsing")]
+ pub mod parse;
+
+-mod span;
++#[cfg(feature = "full")]
++mod reserved;
++
++#[cfg(all(any(feature = "full", feature = "derive"), feature = "parsing"))]
++mod verbatim;
+
+ #[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
+ mod print;
+
+-mod thread;
+-
+ ////////////////////////////////////////////////////////////////////////////////
+
+ #[allow(dead_code, non_camel_case_types)]
+ struct private;
+
+ // https://github.com/rust-lang/rust/issues/62830
+ #[cfg(feature = "parsing")]
+ mod rustdoc_workaround {
+@@ -795,24 +835,24 @@ pub use crate::error::{Error, Result};
+ /// messages.
+ ///
+ /// This function parses a `proc_macro::TokenStream` which is the type used for
+ /// interop with the compiler in a procedural macro. To parse a
+ /// `proc_macro2::TokenStream`, use [`syn::parse2`] instead.
+ ///
+ /// [`syn::parse2`]: parse2
+ ///
+-/// *This function is available if Syn is built with both the `"parsing"` and
++/// *This function is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ ///
+ /// # Examples
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use quote::quote;
+ /// use syn::DeriveInput;
+ ///
+ /// # const IGNORE_TOKENS: &str = stringify! {
+ /// #[proc_macro_derive(MyMacro)]
+ /// # };
+ /// pub fn my_macro(input: TokenStream) -> TokenStream {
+@@ -842,25 +882,25 @@ pub fn parse<T: parse::Parse>(tokens: pr
+ /// This function parses a `proc_macro2::TokenStream` which is commonly useful
+ /// when the input comes from a node of the Syn syntax tree, for example the
+ /// body tokens of a [`Macro`] node. When in a procedural macro parsing the
+ /// `proc_macro::TokenStream` provided by the compiler, use [`syn::parse`]
+ /// instead.
+ ///
+ /// [`syn::parse`]: parse()
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` feature.*
++/// *This function is available only if Syn is built with the `"parsing"` feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+ parse::Parser::parse2(T::parse, tokens)
+ }
+
+ /// Parse a string of Rust code into the chosen syntax tree node.
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` feature.*
++/// *This function is available only if Syn is built with the `"parsing"` feature.*
+ ///
+ /// # Hygiene
+ ///
+ /// Every span in the resulting syntax tree will be set to resolve at the macro
+ /// call site.
+ ///
+ /// # Examples
+ ///
+@@ -869,19 +909,17 @@ pub fn parse2<T: parse::Parse>(tokens: p
+ ///
+ /// fn run() -> Result<()> {
+ /// let code = "assert_eq!(u8::max_value(), 255)";
+ /// let expr = syn::parse_str::<Expr>(code)?;
+ /// println!("{:#?}", expr);
+ /// Ok(())
+ /// }
+ /// #
+-/// # fn main() {
+-/// # run().unwrap();
+-/// # }
++/// # run().unwrap();
+ /// ```
+ #[cfg(feature = "parsing")]
+ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+ parse::Parser::parse_str(T::parse, s)
+ }
+
+ // FIXME the name parse_file makes it sound like you might pass in a path to a
+ // file, rather than the content.
+@@ -889,17 +927,17 @@ pub fn parse_str<T: parse::Parse>(s: &st
+ ///
+ /// This is different from `syn::parse_str::<File>(content)` in two ways:
+ ///
+ /// - It discards a leading byte order mark `\u{FEFF}` if the file has one.
+ /// - It preserves the shebang line of the file, such as `#!/usr/bin/env rustx`.
+ ///
+ /// If present, either of these would be an error using `from_str`.
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` and
++/// *This function is available only if Syn is built with the `"parsing"` and
+ /// `"full"` features.*
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::error::Error;
+ /// use std::fs::File;
+ /// use std::io::Read;
+@@ -913,35 +951,36 @@ pub fn parse_str<T: parse::Parse>(s: &st
+ /// if let Some(shebang) = ast.shebang {
+ /// println!("{}", shebang);
+ /// }
+ /// println!("{} items", ast.items.len());
+ ///
+ /// Ok(())
+ /// }
+ /// #
+-/// # fn main() {
+-/// # run().unwrap();
+-/// # }
++/// # run().unwrap();
+ /// ```
+ #[cfg(all(feature = "parsing", feature = "full"))]
+ pub fn parse_file(mut content: &str) -> Result<File> {
+ // Strip the BOM if it is present
+ const BOM: &str = "\u{feff}";
+ if content.starts_with(BOM) {
+ content = &content[BOM.len()..];
+ }
+
+ let mut shebang = None;
+- if content.starts_with("#!") && !content.starts_with("#![") {
+- if let Some(idx) = content.find('\n') {
+- shebang = Some(content[..idx].to_string());
+- content = &content[idx..];
+- } else {
+- shebang = Some(content.to_string());
+- content = "";
++ if content.starts_with("#!") {
++ let rest = whitespace::skip(&content[2..]);
++ if !rest.starts_with('[') {
++ if let Some(idx) = content.find('\n') {
++ shebang = Some(content[..idx].to_string());
++ content = &content[idx..];
++ } else {
++ shebang = Some(content.to_string());
++ content = "";
++ }
+ }
+ }
+
+ let mut file: File = parse_str(content)?;
+ file.shebang = shebang;
+ Ok(file)
+ }
+diff --git a/third_party/rust/syn/src/lifetime.rs b/third_party/rust/syn/src/lifetime.rs
+--- third_party/rust/syn/src/lifetime.rs
++++ third_party/rust/syn/src/lifetime.rs
+@@ -13,20 +13,18 @@ use crate::lookahead;
+ ///
+ /// - Must start with an apostrophe.
+ /// - Must not consist of just an apostrophe: `'`.
+ /// - Character after the apostrophe must be `_` or a Unicode code point with
+ /// the XID_Start property.
+ /// - All following characters must be Unicode code points with the XID_Continue
+ /// property.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+-#[cfg_attr(feature = "extra-traits", derive(Debug))]
+-#[derive(Clone)]
+ pub struct Lifetime {
+ pub apostrophe: Span,
+ pub ident: Ident,
+ }
+
+ impl Lifetime {
+ /// # Panics
+ ///
+@@ -67,16 +65,25 @@ impl Lifetime {
+
+ impl Display for Lifetime {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ "'".fmt(formatter)?;
+ self.ident.fmt(formatter)
+ }
+ }
+
++impl Clone for Lifetime {
++ fn clone(&self) -> Self {
++ Lifetime {
++ apostrophe: self.apostrophe,
++ ident: self.ident.clone(),
++ }
++ }
++}
++
+ impl PartialEq for Lifetime {
+ fn eq(&self, other: &Lifetime) -> bool {
+ self.ident.eq(&other.ident)
+ }
+ }
+
+ impl Eq for Lifetime {}
+
+diff --git a/third_party/rust/syn/src/lit.rs b/third_party/rust/syn/src/lit.rs
+--- third_party/rust/syn/src/lit.rs
++++ third_party/rust/syn/src/lit.rs
+@@ -17,28 +17,25 @@ use std::hash::{Hash, Hasher};
+ use crate::lookahead;
+ #[cfg(feature = "parsing")]
+ use crate::parse::{Parse, Parser};
+ use crate::{Error, Result};
+
+ ast_enum_of_structs! {
+ /// A Rust literal such as a string or integer or boolean.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
+- ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Lit #manual_extra_traits {
++ pub enum Lit {
+ /// A UTF-8 string literal: `"foo"`.
+ Str(LitStr),
+
+ /// A byte string literal: `b"foo"`.
+ ByteStr(LitByteStr),
+
+ /// A byte literal: `b'f'`.
+ Byte(LitByte),
+@@ -59,184 +56,98 @@ ast_enum_of_structs! {
+
+ /// A raw token literal not interpreted by Syn.
+ Verbatim(Literal),
+ }
+ }
+
+ ast_struct! {
+ /// A UTF-8 string literal: `"foo"`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitStr #manual_extra_traits_debug {
+- repr: Box<LitStrRepr>,
++ pub struct LitStr {
++ repr: Box<LitRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+-struct LitStrRepr {
+- token: Literal,
+- suffix: Box<str>,
+-}
+-
+ ast_struct! {
+ /// A byte string literal: `b"foo"`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitByteStr #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitByteStr {
++ repr: Box<LitRepr>,
+ }
+ }
+
+ ast_struct! {
+ /// A byte literal: `b'f'`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitByte #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitByte {
++ repr: Box<LitRepr>,
+ }
+ }
+
+ ast_struct! {
+ /// A character literal: `'a'`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitChar #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitChar {
++ repr: Box<LitRepr>,
+ }
+ }
+
++struct LitRepr {
++ token: Literal,
++ suffix: Box<str>,
++}
++
+ ast_struct! {
+ /// An integer literal: `1` or `1u16`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitInt #manual_extra_traits_debug {
++ pub struct LitInt {
+ repr: Box<LitIntRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ struct LitIntRepr {
+ token: Literal,
+ digits: Box<str>,
+ suffix: Box<str>,
+ }
+
+ ast_struct! {
+ /// A floating point literal: `1f64` or `1.0e10f64`.
+ ///
+ /// Must be finite. May not be infinte or NaN.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitFloat #manual_extra_traits_debug {
++ pub struct LitFloat {
+ repr: Box<LitFloatRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ struct LitFloatRepr {
+ token: Literal,
+ digits: Box<str>,
+ suffix: Box<str>,
+ }
+
+ ast_struct! {
+ /// A boolean literal: `true` or `false`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitBool #manual_extra_traits_debug {
++ pub struct LitBool {
+ pub value: bool,
+ pub span: Span,
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Lit {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Lit {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Lit::Str(this), Lit::Str(other)) => this == other,
+- (Lit::ByteStr(this), Lit::ByteStr(other)) => this == other,
+- (Lit::Byte(this), Lit::Byte(other)) => this == other,
+- (Lit::Char(this), Lit::Char(other)) => this == other,
+- (Lit::Int(this), Lit::Int(other)) => this == other,
+- (Lit::Float(this), Lit::Float(other)) => this == other,
+- (Lit::Bool(this), Lit::Bool(other)) => this == other,
+- (Lit::Verbatim(this), Lit::Verbatim(other)) => this.to_string() == other.to_string(),
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Lit {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Lit::Str(lit) => {
+- hash.write_u8(0);
+- lit.hash(hash);
+- }
+- Lit::ByteStr(lit) => {
+- hash.write_u8(1);
+- lit.hash(hash);
+- }
+- Lit::Byte(lit) => {
+- hash.write_u8(2);
+- lit.hash(hash);
+- }
+- Lit::Char(lit) => {
+- hash.write_u8(3);
+- lit.hash(hash);
+- }
+- Lit::Int(lit) => {
+- hash.write_u8(4);
+- lit.hash(hash);
+- }
+- Lit::Float(lit) => {
+- hash.write_u8(5);
+- lit.hash(hash);
+- }
+- Lit::Bool(lit) => {
+- hash.write_u8(6);
+- lit.hash(hash);
+- }
+- Lit::Verbatim(lit) => {
+- hash.write_u8(7);
+- lit.to_string().hash(hash);
+- }
+- }
+- }
+-}
+-
+ impl LitStr {
+ pub fn new(value: &str, span: Span) -> Self {
+- let mut lit = Literal::string(value);
+- lit.set_span(span);
++ let mut token = Literal::string(value);
++ token.set_span(span);
+ LitStr {
+- repr: Box::new(LitStrRepr {
+- token: lit,
++ repr: Box::new(LitRepr {
++ token,
+ suffix: Box::<str>::default(),
+ }),
+ }
+ }
+
+ pub fn value(&self) -> String {
+- let (value, _) = value::parse_lit_str(&self.repr.token.to_string());
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_str(&repr);
+ String::from(value)
+ }
+
+ /// Parse a syntax tree node from the content of this string literal.
+ ///
+ /// All spans in the syntax tree will point to the span of this `LitStr`.
+ ///
+ /// # Example
+@@ -306,17 +217,17 @@ impl LitStr {
+ .map(|token| respan_token_tree(token, span))
+ .collect()
+ }
+
+ // Token tree with every span replaced by the given one.
+ fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
+ match &mut token {
+ TokenTree::Group(g) => {
+- let stream = respan_token_stream(g.stream().clone(), span);
++ let stream = respan_token_stream(g.stream(), span);
+ *g = Group::new(g.delimiter(), stream);
+ g.set_span(span);
+ }
+ other => other.set_span(span),
+ }
+ token
+ }
+
+@@ -340,86 +251,124 @@ impl LitStr {
+ &self.repr.suffix
+ }
+ }
+
+ impl LitByteStr {
+ pub fn new(value: &[u8], span: Span) -> Self {
+ let mut token = Literal::byte_string(value);
+ token.set_span(span);
+- LitByteStr { token }
++ LitByteStr {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> Vec<u8> {
+- value::parse_lit_byte_str(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_byte_str(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+ impl LitByte {
+ pub fn new(value: u8, span: Span) -> Self {
+ let mut token = Literal::u8_suffixed(value);
+ token.set_span(span);
+- LitByte { token }
++ LitByte {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> u8 {
+- value::parse_lit_byte(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_byte(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+ impl LitChar {
+ pub fn new(value: char, span: Span) -> Self {
+ let mut token = Literal::character(value);
+ token.set_span(span);
+- LitChar { token }
++ LitChar {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> char {
+- value::parse_lit_char(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_char(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+ impl LitInt {
+ pub fn new(repr: &str, span: Span) -> Self {
+- if let Some((digits, suffix)) = value::parse_lit_int(repr) {
+- let mut token = value::to_literal(repr);
+- token.set_span(span);
+- LitInt {
+- repr: Box::new(LitIntRepr {
+- token,
+- digits,
+- suffix,
+- }),
+- }
+- } else {
+- panic!("Not an integer literal: `{}`", repr);
++ let (digits, suffix) = match value::parse_lit_int(repr) {
++ Some(parse) => parse,
++ None => panic!("Not an integer literal: `{}`", repr),
++ };
++
++ let mut token = match value::to_literal(repr, &digits, &suffix) {
++ Some(token) => token,
++ None => panic!("Unsupported integer literal: `{}`", repr),
++ };
++
++ token.set_span(span);
++ LitInt {
++ repr: Box::new(LitIntRepr {
++ token,
++ digits,
++ suffix,
++ }),
+ }
+ }
+
+ pub fn base10_digits(&self) -> &str {
+ &self.repr.digits
+ }
+
+ /// Parses the literal into a selected number type.
+@@ -487,28 +436,33 @@ impl From<Literal> for LitInt {
+ impl Display for LitInt {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ self.repr.token.fmt(formatter)
+ }
+ }
+
+ impl LitFloat {
+ pub fn new(repr: &str, span: Span) -> Self {
+- if let Some((digits, suffix)) = value::parse_lit_float(repr) {
+- let mut token = value::to_literal(repr);
+- token.set_span(span);
+- LitFloat {
+- repr: Box::new(LitFloatRepr {
+- token,
+- digits,
+- suffix,
+- }),
+- }
+- } else {
+- panic!("Not a float literal: `{}`", repr);
++ let (digits, suffix) = match value::parse_lit_float(repr) {
++ Some(parse) => parse,
++ None => panic!("Not a float literal: `{}`", repr),
++ };
++
++ let mut token = match value::to_literal(repr, &digits, &suffix) {
++ Some(token) => token,
++ None => panic!("Unsupported float literal: `{}`", repr),
++ };
++
++ token.set_span(span);
++ LitFloat {
++ repr: Box::new(LitFloatRepr {
++ token,
++ digits,
++ suffix,
++ }),
+ }
+ }
+
+ pub fn base10_digits(&self) -> &str {
+ &self.repr.digits
+ }
+
+ pub fn base10_parse<N>(&self) -> Result<N>
+@@ -570,35 +524,35 @@ mod debug_impls {
+ .finish()
+ }
+ }
+
+ impl Debug for LitByteStr {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitByteStr")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+
+ impl Debug for LitByte {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitByte")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+
+ impl Debug for LitChar {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitChar")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+
+ impl Debug for LitInt {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitInt")
+@@ -621,61 +575,102 @@ mod debug_impls {
+ formatter
+ .debug_struct("LitBool")
+ .field("value", &self.value)
+ .finish()
+ }
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl Clone for LitRepr {
++ fn clone(&self) -> Self {
++ LitRepr {
++ token: self.token.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for LitIntRepr {
++ fn clone(&self) -> Self {
++ LitIntRepr {
++ token: self.token.clone(),
++ digits: self.digits.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for LitFloatRepr {
++ fn clone(&self) -> Self {
++ LitFloatRepr {
++ token: self.token.clone(),
++ digits: self.digits.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
+ macro_rules! lit_extra_traits {
+- ($ty:ident, $($field:ident).+) => {
+- #[cfg(feature = "extra-traits")]
+- impl Eq for $ty {}
++ ($ty:ident) => {
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $ty {
++ fn clone(&self) -> Self {
++ $ty {
++ repr: self.repr.clone(),
++ }
++ }
++ }
+
+ #[cfg(feature = "extra-traits")]
+ impl PartialEq for $ty {
+ fn eq(&self, other: &Self) -> bool {
+- self.$($field).+.to_string() == other.$($field).+.to_string()
++ self.repr.token.to_string() == other.repr.token.to_string()
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+ impl Hash for $ty {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+- self.$($field).+.to_string().hash(state);
++ self.repr.token.to_string().hash(state);
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ #[doc(hidden)]
+ #[allow(non_snake_case)]
+ pub fn $ty(marker: lookahead::TokenMarker) -> $ty {
+ match marker {}
+ }
+ };
+ }
+
+-lit_extra_traits!(LitStr, repr.token);
+-lit_extra_traits!(LitByteStr, token);
+-lit_extra_traits!(LitByte, token);
+-lit_extra_traits!(LitChar, token);
+-lit_extra_traits!(LitInt, repr.token);
+-lit_extra_traits!(LitFloat, repr.token);
+-lit_extra_traits!(LitBool, value);
++lit_extra_traits!(LitStr);
++lit_extra_traits!(LitByteStr);
++lit_extra_traits!(LitByte);
++lit_extra_traits!(LitChar);
++lit_extra_traits!(LitInt);
++lit_extra_traits!(LitFloat);
++
++#[cfg(feature = "parsing")]
++#[doc(hidden)]
++#[allow(non_snake_case)]
++pub fn LitBool(marker: lookahead::TokenMarker) -> LitBool {
++ match marker {}
++}
+
+ ast_enum! {
+ /// The style of a string literal, either plain quoted or a raw string like
+ /// `r##"data"##`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
+ pub enum StrStyle #no_visit {
+ /// An ordinary string like `"data"`.
+ Cooked,
+ /// A raw string like `r##"data"##`.
+ ///
+ /// The unsigned integer is the number of `#` symbols used.
+ Raw(usize),
+ }
+@@ -686,43 +681,93 @@ ast_enum! {
+ #[allow(non_snake_case)]
+ pub fn Lit(marker: lookahead::TokenMarker) -> Lit {
+ match marker {}
+ }
+
+ #[cfg(feature = "parsing")]
+ pub mod parsing {
+ use super::*;
++ use crate::buffer::Cursor;
+ use crate::parse::{Parse, ParseStream, Result};
++ use proc_macro2::Punct;
+
+ impl Parse for Lit {
+ fn parse(input: ParseStream) -> Result<Self> {
+ input.step(|cursor| {
+ if let Some((lit, rest)) = cursor.literal() {
+ return Ok((Lit::new(lit), rest));
+ }
+- while let Some((ident, rest)) = cursor.ident() {
+- let value = if ident == "true" {
+- true
+- } else if ident == "false" {
+- false
+- } else {
+- break;
+- };
+- let lit_bool = LitBool {
+- value,
+- span: ident.span(),
+- };
+- return Ok((Lit::Bool(lit_bool), rest));
++
++ if let Some((ident, rest)) = cursor.ident() {
++ let value = ident == "true";
++ if value || ident == "false" {
++ let lit_bool = LitBool {
++ value,
++ span: ident.span(),
++ };
++ return Ok((Lit::Bool(lit_bool), rest));
++ }
+ }
++
++ if let Some((punct, rest)) = cursor.punct() {
++ if punct.as_char() == '-' {
++ if let Some((lit, rest)) = parse_negative_lit(punct, rest) {
++ return Ok((lit, rest));
++ }
++ }
++ }
++
+ Err(cursor.error("expected literal"))
+ })
+ }
+ }
+
++ fn parse_negative_lit(neg: Punct, cursor: Cursor) -> Option<(Lit, Cursor)> {
++ let (lit, rest) = cursor.literal()?;
++
++ let mut span = neg.span();
++ span = span.join(lit.span()).unwrap_or(span);
++
++ let mut repr = lit.to_string();
++ repr.insert(0, '-');
++
++ if !(repr.ends_with("f32") || repr.ends_with("f64")) {
++ if let Some((digits, suffix)) = value::parse_lit_int(&repr) {
++ if let Some(mut token) = value::to_literal(&repr, &digits, &suffix) {
++ token.set_span(span);
++ return Some((
++ Lit::Int(LitInt {
++ repr: Box::new(LitIntRepr {
++ token,
++ digits,
++ suffix,
++ }),
++ }),
++ rest,
++ ));
++ }
++ }
++ }
++
++ let (digits, suffix) = value::parse_lit_float(&repr)?;
++ let mut token = value::to_literal(&repr, &digits, &suffix)?;
++ token.set_span(span);
++ Some((
++ Lit::Float(LitFloat {
++ repr: Box::new(LitFloatRepr {
++ token,
++ digits,
++ suffix,
++ }),
++ }),
++ rest,
++ ))
++ }
++
+ impl Parse for LitStr {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let head = input.fork();
+ match input.parse()? {
+ Lit::Str(lit) => Ok(lit),
+ _ => Err(head.error("expected string literal")),
+ }
+ }
+@@ -798,29 +843,29 @@ mod printing {
+ impl ToTokens for LitStr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for LitByteStr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for LitByte {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for LitChar {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for LitInt {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.repr.token.to_tokens(tokens);
+ }
+ }
+@@ -850,30 +895,39 @@ mod value {
+ /// Interpret a Syn literal from a proc-macro2 literal.
+ pub fn new(token: Literal) -> Self {
+ let repr = token.to_string();
+
+ match byte(&repr, 0) {
+ b'"' | b'r' => {
+ let (_, suffix) = parse_lit_str(&repr);
+ return Lit::Str(LitStr {
+- repr: Box::new(LitStrRepr { token, suffix }),
++ repr: Box::new(LitRepr { token, suffix }),
+ });
+ }
+ b'b' => match byte(&repr, 1) {
+ b'"' | b'r' => {
+- return Lit::ByteStr(LitByteStr { token });
++ let (_, suffix) = parse_lit_byte_str(&repr);
++ return Lit::ByteStr(LitByteStr {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ b'\'' => {
+- return Lit::Byte(LitByte { token });
++ let (_, suffix) = parse_lit_byte(&repr);
++ return Lit::Byte(LitByte {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ _ => {}
+ },
+ b'\'' => {
+- return Lit::Char(LitChar { token });
++ let (_, suffix) = parse_lit_char(&repr);
++ return Lit::Char(LitChar {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ b'0'..=b'9' | b'-' => {
+ if !(repr.ends_with("f32") || repr.ends_with("f64")) {
+ if let Some((digits, suffix)) = parse_lit_int(&repr) {
+ return Lit::Int(LitInt {
+ repr: Box::new(LitIntRepr {
+ token,
+ digits,
+@@ -900,16 +954,54 @@ mod value {
+ });
+ }
+ }
+ _ => {}
+ }
+
+ panic!("Unrecognized literal: `{}`", repr);
+ }
++
++ pub fn suffix(&self) -> &str {
++ match self {
++ Lit::Str(lit) => lit.suffix(),
++ Lit::ByteStr(lit) => lit.suffix(),
++ Lit::Byte(lit) => lit.suffix(),
++ Lit::Char(lit) => lit.suffix(),
++ Lit::Int(lit) => lit.suffix(),
++ Lit::Float(lit) => lit.suffix(),
++ Lit::Bool(_) | Lit::Verbatim(_) => "",
++ }
++ }
++
++ pub fn span(&self) -> Span {
++ match self {
++ Lit::Str(lit) => lit.span(),
++ Lit::ByteStr(lit) => lit.span(),
++ Lit::Byte(lit) => lit.span(),
++ Lit::Char(lit) => lit.span(),
++ Lit::Int(lit) => lit.span(),
++ Lit::Float(lit) => lit.span(),
++ Lit::Bool(lit) => lit.span,
++ Lit::Verbatim(lit) => lit.span(),
++ }
++ }
++
++ pub fn set_span(&mut self, span: Span) {
++ match self {
++ Lit::Str(lit) => lit.set_span(span),
++ Lit::ByteStr(lit) => lit.set_span(span),
++ Lit::Byte(lit) => lit.set_span(span),
++ Lit::Char(lit) => lit.set_span(span),
++ Lit::Int(lit) => lit.set_span(span),
++ Lit::Float(lit) => lit.set_span(span),
++ Lit::Bool(lit) => lit.span = span,
++ Lit::Verbatim(lit) => lit.set_span(span),
++ }
++ }
+ }
+
+ /// Get the byte at offset idx, or a default of `b'\0'` if we're looking
+ /// past the end of the input buffer.
+ pub fn byte<S: AsRef<[u8]> + ?Sized>(s: &S, idx: usize) -> u8 {
+ let s = s.as_ref();
+ if idx < s.len() {
+ s[idx]
+@@ -999,140 +1091,144 @@ mod value {
+ assert_eq!(byte(s, 0), b'r');
+ s = &s[1..];
+
+ let mut pounds = 0;
+ while byte(s, pounds) == b'#' {
+ pounds += 1;
+ }
+ assert_eq!(byte(s, pounds), b'"');
+- assert_eq!(byte(s, s.len() - pounds - 1), b'"');
+- for end in s[s.len() - pounds..].bytes() {
++ let close = s.rfind('"').unwrap();
++ for end in s[close + 1..close + 1 + pounds].bytes() {
+ assert_eq!(end, b'#');
+ }
+
+- let content = s[pounds + 1..s.len() - pounds - 1]
+- .to_owned()
+- .into_boxed_str();
+- let suffix = Box::<str>::default(); // todo
++ let content = s[pounds + 1..close].to_owned().into_boxed_str();
++ let suffix = s[close + 1 + pounds..].to_owned().into_boxed_str();
+ (content, suffix)
+ }
+
+- pub fn parse_lit_byte_str(s: &str) -> Vec<u8> {
++ // Returns (content, suffix).
++ pub fn parse_lit_byte_str(s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ match byte(s, 1) {
+ b'"' => parse_lit_byte_str_cooked(s),
+ b'r' => parse_lit_byte_str_raw(s),
+ _ => unreachable!(),
+ }
+ }
+
+ // Clippy false positive
+ // https://github.com/rust-lang-nursery/rust-clippy/issues/2329
+ #[allow(clippy::needless_continue)]
+- fn parse_lit_byte_str_cooked(mut s: &str) -> Vec<u8> {
++ fn parse_lit_byte_str_cooked(mut s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ assert_eq!(byte(s, 1), b'"');
+ s = &s[2..];
+
+ // We're going to want to have slices which don't respect codepoint boundaries.
+- let mut s = s.as_bytes();
++ let mut v = s.as_bytes();
+
+ let mut out = Vec::new();
+ 'outer: loop {
+- let byte = match byte(s, 0) {
++ let byte = match byte(v, 0) {
+ b'"' => break,
+ b'\\' => {
+- let b = byte(s, 1);
+- s = &s[2..];
++ let b = byte(v, 1);
++ v = &v[2..];
+ match b {
+ b'x' => {
+- let (b, rest) = backslash_x(s);
+- s = rest;
++ let (b, rest) = backslash_x(v);
++ v = rest;
+ b
+ }
+ b'n' => b'\n',
+ b'r' => b'\r',
+ b't' => b'\t',
+ b'\\' => b'\\',
+ b'0' => b'\0',
+ b'\'' => b'\'',
+ b'"' => b'"',
+ b'\r' | b'\n' => loop {
+- let byte = byte(s, 0);
++ let byte = byte(v, 0);
+ let ch = char::from_u32(u32::from(byte)).unwrap();
+ if ch.is_whitespace() {
+- s = &s[1..];
++ v = &v[1..];
+ } else {
+ continue 'outer;
+ }
+ },
+ b => panic!("unexpected byte {:?} after \\ character in byte literal", b),
+ }
+ }
+ b'\r' => {
+- assert_eq!(byte(s, 1), b'\n', "Bare CR not allowed in string");
+- s = &s[2..];
++ assert_eq!(byte(v, 1), b'\n', "Bare CR not allowed in string");
++ v = &v[2..];
+ b'\n'
+ }
+ b => {
+- s = &s[1..];
++ v = &v[1..];
+ b
+ }
+ };
+ out.push(byte);
+ }
+
+- assert_eq!(s, b"\"");
+- out
++ assert_eq!(byte(v, 0), b'"');
++ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
++ (out, suffix)
+ }
+
+- fn parse_lit_byte_str_raw(s: &str) -> Vec<u8> {
++ fn parse_lit_byte_str_raw(s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+- String::from(parse_lit_str_raw(&s[1..]).0).into_bytes()
++ let (value, suffix) = parse_lit_str_raw(&s[1..]);
++ (String::from(value).into_bytes(), suffix)
+ }
+
+- pub fn parse_lit_byte(s: &str) -> u8 {
++ // Returns (value, suffix).
++ pub fn parse_lit_byte(s: &str) -> (u8, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ assert_eq!(byte(s, 1), b'\'');
+
+ // We're going to want to have slices which don't respect codepoint boundaries.
+- let mut s = s[2..].as_bytes();
++ let mut v = s[2..].as_bytes();
+
+- let b = match byte(s, 0) {
++ let b = match byte(v, 0) {
+ b'\\' => {
+- let b = byte(s, 1);
+- s = &s[2..];
++ let b = byte(v, 1);
++ v = &v[2..];
+ match b {
+ b'x' => {
+- let (b, rest) = backslash_x(s);
+- s = rest;
++ let (b, rest) = backslash_x(v);
++ v = rest;
+ b
+ }
+ b'n' => b'\n',
+ b'r' => b'\r',
+ b't' => b'\t',
+ b'\\' => b'\\',
+ b'0' => b'\0',
+ b'\'' => b'\'',
+ b'"' => b'"',
+ b => panic!("unexpected byte {:?} after \\ character in byte literal", b),
+ }
+ }
+ b => {
+- s = &s[1..];
++ v = &v[1..];
+ b
+ }
+ };
+
+- assert_eq!(byte(s, 0), b'\'');
+- b
++ assert_eq!(byte(v, 0), b'\'');
++ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
++ (b, suffix)
+ }
+
+- pub fn parse_lit_char(mut s: &str) -> char {
++ // Returns (value, suffix).
++ pub fn parse_lit_char(mut s: &str) -> (char, Box<str>) {
+ assert_eq!(byte(s, 0), b'\'');
+ s = &s[1..];
+
+ let ch = match byte(s, 0) {
+ b'\\' => {
+ let b = byte(s, 1);
+ s = &s[2..];
+ match b {
+@@ -1158,18 +1254,19 @@ mod value {
+ }
+ }
+ _ => {
+ let ch = next_chr(s);
+ s = &s[ch.len_utf8()..];
+ ch
+ }
+ };
+- assert_eq!(s, "\'", "Expected end of char literal");
+- ch
++ assert_eq!(byte(s, 0), b'\'');
++ let suffix = s[1..].to_owned().into_boxed_str();
++ (ch, suffix)
+ }
+
+ fn backslash_x<S>(s: &S) -> (u8, &S)
+ where
+ S: Index<RangeFrom<usize>, Output = S> + AsRef<[u8]> + ?Sized,
+ {
+ let mut ch = 0;
+ let b0 = byte(s, 0);
+@@ -1329,17 +1426,21 @@ mod value {
+ if has_e || has_dot {
+ return None;
+ }
+ has_dot = true;
+ bytes[write] = b'.';
+ }
+ b'e' | b'E' => {
+ if has_e {
+- return None;
++ if has_exponent {
++ break;
++ } else {
++ return None;
++ }
+ }
+ has_e = true;
+ bytes[write] = b'e';
+ }
+ b'-' | b'+' => {
+ if has_sign || has_exponent || !has_e {
+ return None;
+ }
+@@ -1367,16 +1468,38 @@ mod value {
+ digits.truncate(write);
+ if suffix.is_empty() || crate::ident::xid_ok(&suffix) {
+ Some((digits.into_boxed_str(), suffix.into_boxed_str()))
+ } else {
+ None
+ }
+ }
+
+- pub fn to_literal(s: &str) -> Literal {
+- let stream = s.parse::<TokenStream>().unwrap();
+- match stream.into_iter().next().unwrap() {
+- TokenTree::Literal(l) => l,
+- _ => unreachable!(),
++ pub fn to_literal(repr: &str, digits: &str, suffix: &str) -> Option<Literal> {
++ if repr.starts_with('-') {
++ if suffix == "f64" {
++ digits.parse().ok().map(Literal::f64_suffixed)
++ } else if suffix == "f32" {
++ digits.parse().ok().map(Literal::f32_suffixed)
++ } else if suffix == "i64" {
++ digits.parse().ok().map(Literal::i64_suffixed)
++ } else if suffix == "i32" {
++ digits.parse().ok().map(Literal::i32_suffixed)
++ } else if suffix == "i16" {
++ digits.parse().ok().map(Literal::i16_suffixed)
++ } else if suffix == "i8" {
++ digits.parse().ok().map(Literal::i8_suffixed)
++ } else if !suffix.is_empty() {
++ None
++ } else if digits.contains('.') {
++ digits.parse().ok().map(Literal::f64_unsuffixed)
++ } else {
++ digits.parse().ok().map(Literal::i64_unsuffixed)
++ }
++ } else {
++ let stream = repr.parse::<TokenStream>().unwrap();
++ match stream.into_iter().next().unwrap() {
++ TokenTree::Literal(l) => Some(l),
++ _ => unreachable!(),
++ }
+ }
+ }
+ }
+diff --git a/third_party/rust/syn/src/mac.rs b/third_party/rust/syn/src/mac.rs
+--- third_party/rust/syn/src/mac.rs
++++ third_party/rust/syn/src/mac.rs
+@@ -1,79 +1,56 @@
+ use super::*;
+ use crate::token::{Brace, Bracket, Paren};
+ use proc_macro2::TokenStream;
+ #[cfg(feature = "parsing")]
+-use proc_macro2::{Delimiter, Span, TokenTree};
++use proc_macro2::{Delimiter, Group, Span, TokenTree};
+
+ #[cfg(feature = "parsing")]
+ use crate::parse::{Parse, ParseStream, Parser, Result};
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// A macro invocation: `println!("{}", mac)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- pub struct Macro #manual_extra_traits {
++ pub struct Macro {
+ pub path: Path,
+ pub bang_token: Token![!],
+ pub delimiter: MacroDelimiter,
+ pub tokens: TokenStream,
+ }
+ }
+
+ ast_enum! {
+ /// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}` or `m![...]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum MacroDelimiter {
+ Paren(Paren),
+ Brace(Brace),
+ Bracket(Bracket),
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Macro {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Macro {
+- fn eq(&self, other: &Self) -> bool {
+- self.path == other.path
+- && self.bang_token == other.bang_token
+- && self.delimiter == other.delimiter
+- && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Macro {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.path.hash(state);
+- self.bang_token.hash(state);
+- self.delimiter.hash(state);
+- TokenStreamHelper(&self.tokens).hash(state);
+- }
+-}
+-
+ #[cfg(feature = "parsing")]
+-fn delimiter_span(delimiter: &MacroDelimiter) -> Span {
+- match delimiter {
++fn delimiter_span_close(macro_delimiter: &MacroDelimiter) -> Span {
++ let delimiter = match macro_delimiter {
++ MacroDelimiter::Paren(_) => Delimiter::Parenthesis,
++ MacroDelimiter::Brace(_) => Delimiter::Brace,
++ MacroDelimiter::Bracket(_) => Delimiter::Bracket,
++ };
++ let mut group = Group::new(delimiter, TokenStream::new());
++ group.set_span(match macro_delimiter {
+ MacroDelimiter::Paren(token) => token.span,
+ MacroDelimiter::Brace(token) => token.span,
+ MacroDelimiter::Bracket(token) => token.span,
+- }
++ });
++ group.span_close()
+ }
+
+ impl Macro {
+ /// Parse the tokens within the macro invocation's delimiters into a syntax
+ /// tree.
+ ///
+ /// This is equivalent to `syn::parse2::<T>(mac.tokens)` except that it
+ /// produces a more useful span when `tokens` is empty.
+@@ -158,19 +135,17 @@ impl Macro {
+ pub fn parse_body<T: Parse>(&self) -> Result<T> {
+ self.parse_body_with(T::parse)
+ }
+
+ /// Parse the tokens within the macro invocation's delimiters using the
+ /// given parser.
+ #[cfg(feature = "parsing")]
+ pub fn parse_body_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+- // TODO: see if we can get a group.span_close() span in here as the
+- // scope, rather than the span of the whole group.
+- let scope = delimiter_span(&self.delimiter);
++ let scope = delimiter_span_close(&self.delimiter);
+ crate::parse::parse_scoped(parser, scope, self.tokens.clone())
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ pub fn parse_delimiter(input: ParseStream) -> Result<(MacroDelimiter, TokenStream)> {
+ input.step(|cursor| {
+ if let Some((TokenTree::Group(g), rest)) = cursor.token_tree() {
+diff --git a/third_party/rust/syn/src/macros.rs b/third_party/rust/syn/src/macros.rs
+--- third_party/rust/syn/src/macros.rs
++++ third_party/rust/syn/src/macros.rs
+@@ -1,56 +1,33 @@
+ macro_rules! ast_struct {
+ (
+ [$($attrs_pub:tt)*]
+ struct $name:ident #full $($rest:tt)*
+ ) => {
+ #[cfg(feature = "full")]
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name $($rest)*
+
+ #[cfg(not(feature = "full"))]
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name {
+- _noconstruct: (),
++ _noconstruct: ::std::marker::PhantomData<::proc_macro2::Span>,
+ }
+
+ #[cfg(all(not(feature = "full"), feature = "printing"))]
+ impl ::quote::ToTokens for $name {
+ fn to_tokens(&self, _: &mut ::proc_macro2::TokenStream) {
+ unreachable!()
+ }
+ }
+ };
+
+ (
+ [$($attrs_pub:tt)*]
+- struct $name:ident #manual_extra_traits $($rest:tt)*
+- ) => {
+- #[cfg_attr(feature = "extra-traits", derive(Debug))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* struct $name $($rest)*
+- };
+-
+- (
+- [$($attrs_pub:tt)*]
+- struct $name:ident #manual_extra_traits_debug $($rest:tt)*
+- ) => {
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* struct $name $($rest)*
+- };
+-
+- (
+- [$($attrs_pub:tt)*]
+ struct $name:ident $($rest:tt)*
+ ) => {
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name $($rest)*
+ };
+
+ ($($t:tt)*) => {
+ strip_attrs_pub!(ast_struct!($($t)*));
+ };
+ }
+
+@@ -60,29 +37,18 @@ macro_rules! ast_enum {
+ [$($attrs_pub:tt)*]
+ enum $name:ident #no_visit $($rest:tt)*
+ ) => (
+ ast_enum!([$($attrs_pub)*] enum $name $($rest)*);
+ );
+
+ (
+ [$($attrs_pub:tt)*]
+- enum $name:ident #manual_extra_traits $($rest:tt)*
+- ) => (
+- #[cfg_attr(feature = "extra-traits", derive(Debug))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* enum $name $($rest)*
+- );
+-
+- (
+- [$($attrs_pub:tt)*]
+ enum $name:ident $($rest:tt)*
+ ) => (
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* enum $name $($rest)*
+ );
+
+ ($($t:tt)*) => {
+ strip_attrs_pub!(ast_enum!($($t)*));
+ };
+ }
+
+@@ -115,36 +81,43 @@ macro_rules! ast_enum_of_structs_impl {
+ )*
+ }
+
+ $($remaining:tt)*
+ ) => {
+ check_keyword_matches!(pub $pub);
+ check_keyword_matches!(enum $enum);
+
+- $(
+- $(
+- impl From<$member> for $name {
+- fn from(e: $member) -> $name {
+- $name::$variant(e)
+- }
+- }
+- )*
+- )*
++ $($(
++ ast_enum_from_struct!($name::$variant, $member);
++ )*)*
+
+ #[cfg(feature = "printing")]
+ generate_to_tokens! {
+ $($remaining)*
+ ()
+ tokens
+ $name { $($variant $($member)*,)* }
+ }
+ };
+ }
+
++macro_rules! ast_enum_from_struct {
++ // No From<TokenStream> for verbatim variants.
++ ($name:ident::Verbatim, $member:ident) => {};
++
++ ($name:ident::$variant:ident, $member:ident) => {
++ impl From<$member> for $name {
++ fn from(e: $member) -> $name {
++ $name::$variant(e)
++ }
++ }
++ };
++}
++
+ #[cfg(feature = "printing")]
+ macro_rules! generate_to_tokens {
+ (do_not_generate_to_tokens $($foo:tt)*) => ();
+
+ (($($arms:tt)*) $tokens:ident $name:ident { $variant:ident, $($next:tt)*}) => {
+ generate_to_tokens!(
+ ($($arms)* $name::$variant => {})
+ $tokens $name { $($next)* }
+diff --git a/third_party/rust/syn/src/op.rs b/third_party/rust/syn/src/op.rs
+--- third_party/rust/syn/src/op.rs
++++ third_party/rust/syn/src/op.rs
+@@ -1,14 +1,13 @@
+ ast_enum! {
+ /// A binary operator: `+`, `+=`, `&`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum BinOp {
+ /// The `+` operator (addition)
+ Add(Token![+]),
+ /// The `-` operator (subtraction)
+ Sub(Token![-]),
+ /// The `*` operator (multiplication)
+ Mul(Token![*]),
+ /// The `/` operator (division)
+@@ -62,19 +61,18 @@ ast_enum! {
+ /// The `>>=` operator
+ ShrEq(Token![>>=]),
+ }
+ }
+
+ ast_enum! {
+ /// A unary operator: `*`, `!`, `-`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum UnOp {
+ /// The `*` operator for dereferencing
+ Deref(Token![*]),
+ /// The `!` operator for logical inversion
+ Not(Token![!]),
+ /// The `-` operator for negation
+ Neg(Token![-]),
+ }
+diff --git a/third_party/rust/syn/src/parse.rs b/third_party/rust/syn/src/parse.rs
+--- third_party/rust/syn/src/parse.rs
++++ third_party/rust/syn/src/parse.rs
+@@ -21,18 +21,18 @@
+ //! procedural macro through [`parse_macro_input!`] as shown at the bottom of
+ //! the snippet. If the caller provides syntactically invalid input to the
+ //! procedural macro, they will receive a helpful compiler error message
+ //! pointing out the exact token that triggered the failure to parse.
+ //!
+ //! [`parse_macro_input!`]: ../macro.parse_macro_input.html
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use syn::{braced, parse_macro_input, token, Field, Ident, Result, Token};
+ //! use syn::parse::{Parse, ParseStream};
+ //! use syn::punctuated::Punctuated;
+ //!
+ //! enum Item {
+ //! Struct(ItemStruct),
+ //! Enum(ItemEnum),
+@@ -104,19 +104,17 @@
+ //! ```
+ //! use syn::Type;
+ //!
+ //! # fn run_parser() -> syn::Result<()> {
+ //! let t: Type = syn::parse_str("std::collections::HashMap<String, Value>")?;
+ //! # Ok(())
+ //! # }
+ //! #
+-//! # fn main() {
+-//! # run_parser().unwrap();
+-//! # }
++//! # run_parser().unwrap();
+ //! ```
+ //!
+ //! The [`parse_quote!`] macro also uses this approach.
+ //!
+ //! [`parse_quote!`]: ../macro.parse_quote.html
+ //!
+ //! # The `Parser` trait
+ //!
+@@ -150,18 +148,18 @@
+ //!
+ //! In these cases the types provide a choice of parser functions rather than a
+ //! single `Parse` implementation, and those parser functions can be invoked
+ //! through the [`Parser`] trait.
+ //!
+ //! [`Parser`]: trait.Parser.html
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use syn::parse::Parser;
+ //! use syn::punctuated::Punctuated;
+ //! use syn::{Attribute, Expr, PathSegment, Result, Token};
+ //!
+ //! fn call_some_parser_methods(input: TokenStream) -> Result<()> {
+ //! // Parse a nonempty sequence of path segments separated by `::` punctuation
+ //! // with no trailing punctuation.
+@@ -181,17 +179,17 @@
+ //! let _attrs = parser.parse(tokens)?;
+ //!
+ //! Ok(())
+ //! }
+ //! ```
+ //!
+ //! ---
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ #[path = "discouraged.rs"]
+ pub mod discouraged;
+
+ use std::cell::Cell;
+ use std::fmt::{self, Debug, Display};
+ use std::marker::PhantomData;
+ use std::mem;
+@@ -212,16 +210,21 @@ use crate::lookahead;
+ use crate::punctuated::Punctuated;
+ use crate::token::Token;
+
+ pub use crate::error::{Error, Result};
+ pub use crate::lookahead::{Lookahead1, Peek};
+
+ /// Parsing interface implemented by all types that can be parsed in a default
+ /// way from a token stream.
++///
++/// Refer to the [module documentation] for details about implementing and using
++/// the `Parse` trait.
++///
++/// [module documentation]: self
+ pub trait Parse: Sized {
+ fn parse(input: ParseStream) -> Result<Self>;
+ }
+
+ /// Input to a Syn parser function.
+ ///
+ /// See the methods of this type under the documentation of [`ParseBuffer`]. For
+ /// an overview of parsing in Syn, refer to the [module documentation].
+@@ -258,23 +261,26 @@ pub struct ParseBuffer<'a> {
+ // ParseBuffer<'a>, upcast to ParseBuffer<'short> for some lifetime shorter
+ // than 'a, and then assign a Cursor<'short> into the Cell.
+ //
+ // By extension, it would not be safe to expose an API that accepts a
+ // Cursor<'a> and trusts that it lives as long as the cursor currently in
+ // the cell.
+ cell: Cell<Cursor<'static>>,
+ marker: PhantomData<Cursor<'a>>,
+- unexpected: Rc<Cell<Option<Span>>>,
++ unexpected: Cell<Option<Rc<Cell<Unexpected>>>>,
+ }
+
+ impl<'a> Drop for ParseBuffer<'a> {
+ fn drop(&mut self) {
+- if !self.is_empty() && self.unexpected.get().is_none() {
+- self.unexpected.set(Some(self.cursor().span()));
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(self.cursor()) {
++ let (inner, old_span) = inner_unexpected(self);
++ if old_span.is_none() {
++ inner.set(Unexpected::Some(unexpected_span));
++ }
+ }
+ }
+ }
+
+ impl<'a> Display for ParseBuffer<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ Display::fmt(&self.cursor().token_stream(), f)
+ }
+@@ -319,25 +325,22 @@ impl<'a> Debug for ParseBuffer<'a> {
+ /// #
+ /// # fn remainder_after_skipping_past_next_at(
+ /// # input: ParseStream,
+ /// # ) -> Result<proc_macro2::TokenStream> {
+ /// # skip_past_next_at(input)?;
+ /// # input.parse()
+ /// # }
+ /// #
+-/// # fn main() {
+-/// # use syn::parse::Parser;
+-/// # let remainder = remainder_after_skipping_past_next_at
+-/// # .parse_str("a @ b c")
+-/// # .unwrap();
+-/// # assert_eq!(remainder.to_string(), "b c");
+-/// # }
++/// # use syn::parse::Parser;
++/// # let remainder = remainder_after_skipping_past_next_at
++/// # .parse_str("a @ b c")
++/// # .unwrap();
++/// # assert_eq!(remainder.to_string(), "b c");
+ /// ```
+-#[derive(Copy, Clone)]
+ pub struct StepCursor<'c, 'a> {
+ scope: Span,
+ // This field is covariant in 'c.
+ cursor: Cursor<'c>,
+ // This field is contravariant in 'c. Together these make StepCursor
+ // invariant in 'c. Also covariant in 'a. The user cannot cast 'c to a
+ // different lifetime but can upcast into a StepCursor with a shorter
+ // lifetime 'a.
+@@ -351,16 +354,24 @@ pub struct StepCursor<'c, 'a> {
+ impl<'c, 'a> Deref for StepCursor<'c, 'a> {
+ type Target = Cursor<'c>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.cursor
+ }
+ }
+
++impl<'c, 'a> Copy for StepCursor<'c, 'a> {}
++
++impl<'c, 'a> Clone for StepCursor<'c, 'a> {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++
+ impl<'c, 'a> StepCursor<'c, 'a> {
+ /// Triggers an error at the current position of the parse stream.
+ ///
+ /// The `ParseStream::step` invocation will return this same error without
+ /// advancing the stream state.
+ pub fn error<T: Display>(self, message: T) -> Error {
+ error::new_at(self.scope, self.cursor, message)
+ }
+@@ -370,46 +381,91 @@ pub(crate) fn advance_step_cursor<'c, 'a
+ // Refer to the comments within the StepCursor definition. We use the
+ // fact that a StepCursor<'c, 'a> exists as proof that 'c outlives 'a.
+ // Cursor is covariant in its lifetime parameter so we can cast a
+ // Cursor<'c> to one with the shorter lifetime Cursor<'a>.
+ let _ = proof;
+ unsafe { mem::transmute::<Cursor<'c>, Cursor<'a>>(to) }
+ }
+
+-fn skip(input: ParseStream) -> bool {
+- input
+- .step(|cursor| {
+- if let Some((_lifetime, rest)) = cursor.lifetime() {
+- Ok((true, rest))
+- } else if let Some((_token, rest)) = cursor.token_tree() {
+- Ok((true, rest))
+- } else {
+- Ok((false, *cursor))
+- }
+- })
+- .unwrap()
+-}
+-
+ pub(crate) fn new_parse_buffer(
+ scope: Span,
+ cursor: Cursor,
+- unexpected: Rc<Cell<Option<Span>>>,
++ unexpected: Rc<Cell<Unexpected>>,
+ ) -> ParseBuffer {
+ ParseBuffer {
+ scope,
+ // See comment on `cell` in the struct definition.
+ cell: Cell::new(unsafe { mem::transmute::<Cursor, Cursor<'static>>(cursor) }),
+ marker: PhantomData,
+- unexpected,
++ unexpected: Cell::new(Some(unexpected)),
++ }
++}
++
++pub(crate) enum Unexpected {
++ None,
++ Some(Span),
++ Chain(Rc<Cell<Unexpected>>),
++}
++
++impl Default for Unexpected {
++ fn default() -> Self {
++ Unexpected::None
++ }
++}
++
++impl Clone for Unexpected {
++ fn clone(&self) -> Self {
++ match self {
++ Unexpected::None => Unexpected::None,
++ Unexpected::Some(span) => Unexpected::Some(*span),
++ Unexpected::Chain(next) => Unexpected::Chain(next.clone()),
++ }
+ }
+ }
+
+-pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Option<Span>>> {
+- buffer.unexpected.clone()
++// We call this on Cell<Unexpected> and Cell<Option<T>> where temporarily
++// swapping in a None is cheap.
++fn cell_clone<T: Default + Clone>(cell: &Cell<T>) -> T {
++ let prev = cell.take();
++ let ret = prev.clone();
++ cell.set(prev);
++ ret
++}
++
++fn inner_unexpected(buffer: &ParseBuffer) -> (Rc<Cell<Unexpected>>, Option<Span>) {
++ let mut unexpected = get_unexpected(buffer);
++ loop {
++ match cell_clone(&unexpected) {
++ Unexpected::None => return (unexpected, None),
++ Unexpected::Some(span) => return (unexpected, Some(span)),
++ Unexpected::Chain(next) => unexpected = next,
++ }
++ }
++}
++
++pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Unexpected>> {
++ cell_clone(&buffer.unexpected).unwrap()
++}
++
++fn span_of_unexpected_ignoring_nones(mut cursor: Cursor) -> Option<Span> {
++ if cursor.eof() {
++ return None;
++ }
++ while let Some((inner, _span, rest)) = cursor.group(Delimiter::None) {
++ if let Some(unexpected) = span_of_unexpected_ignoring_nones(inner) {
++ return Some(unexpected);
++ }
++ cursor = rest;
++ }
++ if cursor.eof() {
++ None
++ } else {
++ Some(cursor.span())
++ }
+ }
+
+ impl<'a> ParseBuffer<'a> {
+ /// Parses a syntax tree node of type `T`, advancing the position of our
+ /// parse stream past it.
+ pub fn parse<T: Parse>(&self) -> Result<T> {
+ T::parse(self)
+ }
+@@ -561,24 +617,27 @@ impl<'a> ParseBuffer<'a> {
+ /// input.parse().map(UnionOrMacro::Union)
+ /// } else {
+ /// input.parse().map(UnionOrMacro::Macro)
+ /// }
+ /// }
+ /// }
+ /// ```
+ pub fn peek2<T: Peek>(&self, token: T) -> bool {
+- let ahead = self.fork();
+- skip(&ahead) && ahead.peek(token)
++ let _ = token;
++ self.cursor().skip().map_or(false, T::Token::peek)
+ }
+
+ /// Looks at the third-next token in the parse stream.
+ pub fn peek3<T: Peek>(&self, token: T) -> bool {
+- let ahead = self.fork();
+- skip(&ahead) && skip(&ahead) && ahead.peek(token)
++ let _ = token;
++ self.cursor()
++ .skip()
++ .and_then(Cursor::skip)
++ .map_or(false, T::Token::peek)
+ }
+
+ /// Parses zero or more occurrences of `T` separated by punctuation of type
+ /// `P`, with optional trailing punctuation.
+ ///
+ /// Parsing continues until the end of this parse stream. The entire content
+ /// of this parse stream must consist of `T` and `P`.
+ ///
+@@ -610,22 +669,20 @@ impl<'a> ParseBuffer<'a> {
+ /// ident: input.parse()?,
+ /// paren_token: parenthesized!(content in input),
+ /// fields: content.parse_terminated(Type::parse)?,
+ /// semi_token: input.parse()?,
+ /// })
+ /// }
+ /// }
+ /// #
+- /// # fn main() {
+- /// # let input = quote! {
+- /// # struct S(A, B);
+- /// # };
+- /// # syn::parse2::<TupleStruct>(input).unwrap();
+- /// # }
++ /// # let input = quote! {
++ /// # struct S(A, B);
++ /// # };
++ /// # syn::parse2::<TupleStruct>(input).unwrap();
+ /// ```
+ pub fn parse_terminated<T, P: Parse>(
+ &self,
+ parser: fn(ParseStream) -> Result<T>,
+ ) -> Result<Punctuated<T, P>> {
+ Punctuated::parse_terminated_with(self, parser)
+ }
+
+@@ -842,18 +899,18 @@ impl<'a> ParseBuffer<'a> {
+ /// }
+ /// ```
+ pub fn fork(&self) -> Self {
+ ParseBuffer {
+ scope: self.scope,
+ cell: self.cell.clone(),
+ marker: PhantomData,
+ // Not the parent's unexpected. Nothing cares whether the clone
+- // parses all the way.
+- unexpected: Rc::new(Cell::new(None)),
++ // parses all the way unless we `advance_to`.
++ unexpected: Cell::new(Some(Rc::new(Cell::new(Unexpected::None)))),
+ }
+ }
+
+ /// Triggers an error at the current position of the parse stream.
+ ///
+ /// # Example
+ ///
+ /// ```
+@@ -918,23 +975,21 @@ impl<'a> ParseBuffer<'a> {
+ /// #
+ /// # fn remainder_after_skipping_past_next_at(
+ /// # input: ParseStream,
+ /// # ) -> Result<proc_macro2::TokenStream> {
+ /// # skip_past_next_at(input)?;
+ /// # input.parse()
+ /// # }
+ /// #
+- /// # fn main() {
+- /// # use syn::parse::Parser;
+- /// # let remainder = remainder_after_skipping_past_next_at
+- /// # .parse_str("a @ b c")
+- /// # .unwrap();
+- /// # assert_eq!(remainder.to_string(), "b c");
+- /// # }
++ /// # use syn::parse::Parser;
++ /// # let remainder = remainder_after_skipping_past_next_at
++ /// # .parse_str("a @ b c")
++ /// # .unwrap();
++ /// # assert_eq!(remainder.to_string(), "b c");
+ /// ```
+ pub fn step<F, R>(&self, function: F) -> Result<R>
+ where
+ F: for<'c> FnOnce(StepCursor<'c, 'a>) -> Result<(R, Cursor<'c>)>,
+ {
+ // Since the user's function is required to work for any 'c, we know
+ // that the Cursor<'c> they return is either derived from the input
+ // StepCursor<'c, 'a> or from a Cursor<'static>.
+@@ -956,27 +1011,39 @@ impl<'a> ParseBuffer<'a> {
+ scope: self.scope,
+ cursor: self.cell.get(),
+ marker: PhantomData,
+ })?;
+ self.cell.set(rest);
+ Ok(node)
+ }
+
++ /// Returns the `Span` of the next token in the parse stream, or
++ /// `Span::call_site()` if this parse stream has completely exhausted its
++ /// input `TokenStream`.
++ pub fn span(&self) -> Span {
++ let cursor = self.cursor();
++ if cursor.eof() {
++ self.scope
++ } else {
++ crate::buffer::open_span_of_group(cursor)
++ }
++ }
++
+ /// Provides low-level access to the token representation underlying this
+ /// parse stream.
+ ///
+ /// Cursors are immutable so no operations you perform against the cursor
+ /// will affect the state of this parse stream.
+ pub fn cursor(&self) -> Cursor<'a> {
+ self.cell.get()
+ }
+
+ fn check_unexpected(&self) -> Result<()> {
+- match self.unexpected.get() {
++ match inner_unexpected(self).1 {
+ Some(span) => Err(Error::new(span, "unexpected token")),
+ None => Ok(()),
+ }
+ }
+ }
+
+ impl<T: Parse> Parse for Box<T> {
+ fn parse(input: ParseStream) -> Result<Self> {
+@@ -1043,32 +1110,32 @@ impl Parse for Literal {
+ }
+
+ /// Parser that can parse Rust tokens into a particular syntax tree node.
+ ///
+ /// Refer to the [module documentation] for details about parsing in Syn.
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"parsing"` feature.*
++/// *This trait is available only if Syn is built with the `"parsing"` feature.*
+ pub trait Parser: Sized {
+ type Output;
+
+ /// Parse a proc-macro2 token stream into the chosen syntax tree node.
+ ///
+ /// This function will check that the input is fully parsed. If there are
+ /// any unparsed tokens at the end of the stream, an error is returned.
+ fn parse2(self, tokens: TokenStream) -> Result<Self::Output>;
+
+ /// Parse tokens of source code into the chosen syntax tree node.
+ ///
+ /// This function will check that the input is fully parsed. If there are
+ /// any unparsed tokens at the end of the stream, an error is returned.
+ ///
+- /// *This method is available if Syn is built with both the `"parsing"` and
++ /// *This method is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ #[cfg(all(
+ not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
+ feature = "proc-macro"
+ ))]
+ fn parse(self, tokens: proc_macro::TokenStream) -> Result<Self::Output> {
+ self.parse2(proc_macro2::TokenStream::from(tokens))
+ }
+@@ -1083,90 +1150,96 @@ pub trait Parser: Sized {
+ /// Every span in the resulting syntax tree will be set to resolve at the
+ /// macro call site.
+ fn parse_str(self, s: &str) -> Result<Self::Output> {
+ self.parse2(proc_macro2::TokenStream::from_str(s)?)
+ }
+
+ // Not public API.
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
+ let _ = scope;
+ self.parse2(tokens)
+ }
+
+ // Not public API.
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
+ input.parse().and_then(|tokens| self.parse2(tokens))
+ }
+ }
+
+ fn tokens_to_parse_buffer(tokens: &TokenBuffer) -> ParseBuffer {
+ let scope = Span::call_site();
+ let cursor = tokens.begin();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ new_parse_buffer(scope, cursor, unexpected)
+ }
+
+ impl<F, T> Parser for F
+ where
+ F: FnOnce(ParseStream) -> Result<T>,
+ {
+ type Output = T;
+
+ fn parse2(self, tokens: TokenStream) -> Result<T> {
+ let buf = TokenBuffer::new2(tokens);
+ let state = tokens_to_parse_buffer(&buf);
+ let node = self(&state)?;
+ state.check_unexpected()?;
+- if state.is_empty() {
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) {
++ Err(Error::new(unexpected_span, "unexpected token"))
++ } else {
+ Ok(node)
+- } else {
+- Err(state.error("unexpected token"))
+ }
+ }
+
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
+ let buf = TokenBuffer::new2(tokens);
+ let cursor = buf.begin();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ let state = new_parse_buffer(scope, cursor, unexpected);
+ let node = self(&state)?;
+ state.check_unexpected()?;
+- if state.is_empty() {
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) {
++ Err(Error::new(unexpected_span, "unexpected token"))
++ } else {
+ Ok(node)
+- } else {
+- Err(state.error("unexpected token"))
+ }
+ }
+
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
+ self(input)
+ }
+ }
+
++#[cfg(any(feature = "full", feature = "derive"))]
+ pub(crate) fn parse_scoped<F: Parser>(f: F, scope: Span, tokens: TokenStream) -> Result<F::Output> {
+ f.__parse_scoped(scope, tokens)
+ }
+
++#[cfg(any(feature = "full", feature = "derive"))]
+ pub(crate) fn parse_stream<F: Parser>(f: F, input: ParseStream) -> Result<F::Output> {
+ f.__parse_stream(input)
+ }
+
+ /// An empty syntax tree node that consumes no tokens when parsed.
+ ///
+ /// This is useful for attribute macros that want to ensure they are not
+ /// provided any attribute args.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::parse_macro_input;
+ /// use syn::parse::Nothing;
+ ///
+ /// # const IGNORE: &str = stringify! {
+ /// #[proc_macro_attribute]
+ /// # };
+ /// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream {
+diff --git a/third_party/rust/syn/src/parse_macro_input.rs b/third_party/rust/syn/src/parse_macro_input.rs
+--- third_party/rust/syn/src/parse_macro_input.rs
++++ third_party/rust/syn/src/parse_macro_input.rs
+@@ -11,18 +11,18 @@
+ /// # Intended usage
+ ///
+ /// This macro must be called from a function that returns
+ /// `proc_macro::TokenStream`. Usually this will be your proc macro entry point,
+ /// the function that has the #\[proc_macro\] / #\[proc_macro_derive\] /
+ /// #\[proc_macro_attribute\] attribute.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, Result};
+ /// use syn::parse::{Parse, ParseStream};
+ ///
+ /// struct MyMacroInput {
+ /// /* ... */
+ /// }
+ ///
+@@ -38,28 +38,52 @@
+ /// # };
+ /// pub fn my_macro(tokens: TokenStream) -> TokenStream {
+ /// let input = parse_macro_input!(tokens as MyMacroInput);
+ ///
+ /// /* ... */
+ /// # "".parse().unwrap()
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++///
++/// <br>
++///
++/// # Expansion
++///
++/// `parse_macro_input!($variable as $Type)` expands to something like:
++///
++/// ```no_run
++/// # extern crate proc_macro;
++/// #
++/// # macro_rules! doc_test {
++/// # ($variable:ident as $Type:ty) => {
++/// match syn::parse::<$Type>($variable) {
++/// Ok(syntax_tree) => syntax_tree,
++/// Err(err) => return proc_macro::TokenStream::from(err.to_compile_error()),
++/// }
++/// # };
++/// # }
++/// #
++/// # fn test(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
++/// # let _ = doc_test!(input as syn::Ident);
++/// # proc_macro::TokenStream::new()
++/// # }
++/// ```
++#[macro_export]
+ macro_rules! parse_macro_input {
+ ($tokenstream:ident as $ty:ty) => {
+ match $crate::parse_macro_input::parse::<$ty>($tokenstream) {
+ $crate::export::Ok(data) => data,
+ $crate::export::Err(err) => {
+ return $crate::export::TokenStream::from(err.to_compile_error());
+ }
+ }
+ };
+ ($tokenstream:ident) => {
+- parse_macro_input!($tokenstream as _)
++ $crate::parse_macro_input!($tokenstream as _)
+ };
+ }
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Can parse any type that implements Parse.
+
+ use crate::parse::{Parse, ParseStream, Parser, Result};
+ use proc_macro::TokenStream;
+diff --git a/third_party/rust/syn/src/parse_quote.rs b/third_party/rust/syn/src/parse_quote.rs
+--- third_party/rust/syn/src/parse_quote.rs
++++ third_party/rust/syn/src/parse_quote.rs
+@@ -19,17 +19,17 @@
+ /// let stmt: Stmt = parse_quote! {
+ /// let #name: #ty = Default::default();
+ /// };
+ ///
+ /// println!("{:#?}", stmt);
+ /// }
+ /// ```
+ ///
+-/// *This macro is available if Syn is built with the `"parsing"` feature,
++/// *This macro is available only if Syn is built with the `"parsing"` feature,
+ /// although interpolation of syntax tree nodes into the quoted tokens is only
+ /// supported if Syn is built with the `"printing"` feature as well.*
+ ///
+ /// # Example
+ ///
+ /// The following helper function adds a bound `T: HeapSize` to every type
+ /// parameter `T` in the input generics.
+ ///
+@@ -51,28 +51,30 @@
+ ///
+ /// This macro can parse the following additional types as a special case even
+ /// though they do not implement the `Parse` trait.
+ ///
+ /// - [`Attribute`] — parses one attribute, allowing either outer like `#[...]`
+ /// or inner like `#![...]`
+ /// - [`Punctuated<T, P>`] — parses zero or more `T` separated by punctuation
+ /// `P` with optional trailing punctuation
++/// - [`Vec<Stmt>`] — parses the same as `Block::parse_within`
+ ///
+ /// [`Punctuated<T, P>`]: punctuated::Punctuated
++/// [`Vec<Stmt>`]: Block::parse_within
+ ///
+ /// # Panics
+ ///
+ /// Panics if the tokens fail to parse as the expected syntax tree type. The
+ /// caller is responsible for ensuring that the input tokens are syntactically
+ /// valid.
+ //
+ // TODO: allow Punctuated to be inferred as intra doc link, currently blocked on
+ // https://github.com/rust-lang/rust/issues/62834
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! parse_quote {
+ ($($tt:tt)*) => {
+ $crate::parse_quote::parse(
+ $crate::export::From::from(
+ $crate::export::quote::quote!($($tt)*)
+ )
+ )
+ };
+@@ -107,16 +109,18 @@ impl<T: Parse> ParseQuote for T {
+ }
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // Any other types that we want `parse_quote!` to be able to parse.
+
+ use crate::punctuated::Punctuated;
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::{attr, Attribute};
++#[cfg(feature = "full")]
++use crate::{Block, Stmt};
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+ impl ParseQuote for Attribute {
+ fn parse(input: ParseStream) -> Result<Self> {
+ if input.peek(Token![#]) && input.peek2(Token![!]) {
+ attr::parsing::single_parse_inner(input)
+ } else {
+ attr::parsing::single_parse_outer(input)
+@@ -124,8 +128,15 @@ impl ParseQuote for Attribute {
+ }
+ }
+
+ impl<T: Parse, P: Parse> ParseQuote for Punctuated<T, P> {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Self::parse_terminated(input)
+ }
+ }
++
++#[cfg(feature = "full")]
++impl ParseQuote for Vec<Stmt> {
++ fn parse(input: ParseStream) -> Result<Self> {
++ Block::parse_within(input)
++ }
++}
+diff --git a/third_party/rust/syn/src/pat.rs b/third_party/rust/syn/src/pat.rs
+--- third_party/rust/syn/src/pat.rs
++++ third_party/rust/syn/src/pat.rs
+@@ -1,31 +1,27 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+ use proc_macro2::TokenStream;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_enum_of_structs! {
+ /// A pattern in a local binding, function signature, match expression, or
+ /// various other places.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Pat #manual_extra_traits {
++ pub enum Pat {
+ /// A box pattern: `box v`.
+ Box(PatBox),
+
+ /// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
+ Ident(PatIdent),
+
+ /// A literal pattern: `0`.
+ ///
+@@ -81,321 +77,219 @@ ast_enum_of_structs! {
+ #[doc(hidden)]
+ __Nonexhaustive,
+ }
+ }
+
+ ast_struct! {
+ /// A box pattern: `box v`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatBox {
+ pub attrs: Vec<Attribute>,
+ pub box_token: Token![box],
+ pub pat: Box<Pat>,
+ }
+ }
+
+ ast_struct! {
+ /// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// It may also be a unit struct or struct variant (e.g. `None`), or a
++ /// constant; these cannot be distinguished syntactically.
++ ///
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatIdent {
+ pub attrs: Vec<Attribute>,
+ pub by_ref: Option<Token![ref]>,
+ pub mutability: Option<Token![mut]>,
+ pub ident: Ident,
+ pub subpat: Option<(Token![@], Box<Pat>)>,
+ }
+ }
+
+ ast_struct! {
+ /// A literal pattern: `0`.
+ ///
+ /// This holds an `Expr` rather than a `Lit` because negative numbers
+ /// are represented as an `Expr::Unary`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatLit {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A macro in pattern position.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+ }
+ }
+
+ ast_struct! {
+ /// A pattern that matches any one of a set of cases.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatOr {
+ pub attrs: Vec<Attribute>,
+ pub leading_vert: Option<Token![|]>,
+ pub cases: Punctuated<Pat, Token![|]>,
+ }
+ }
+
+ ast_struct! {
+ /// A path pattern like `Color::Red`, optionally qualified with a
+ /// self-type.
+ ///
+ /// Unqualified path patterns can legally refer to variants, structs,
+ /// constants or associated constants. Qualified path patterns like
+ /// `<A>::B::C` and `<A as Trait>::B::C` can only legally refer to
+ /// associated constants.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatPath {
+ pub attrs: Vec<Attribute>,
+ pub qself: Option<QSelf>,
+ pub path: Path,
+ }
+ }
+
+ ast_struct! {
+ /// A range pattern: `1..=2`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatRange {
+ pub attrs: Vec<Attribute>,
+ pub lo: Box<Expr>,
+ pub limits: RangeLimits,
+ pub hi: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A reference pattern: `&mut var`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatReference {
+ pub attrs: Vec<Attribute>,
+ pub and_token: Token![&],
+ pub mutability: Option<Token![mut]>,
+ pub pat: Box<Pat>,
+ }
+ }
+
+ ast_struct! {
+ /// The dots in a tuple or slice pattern: `[0, 1, ..]`
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatRest {
+ pub attrs: Vec<Attribute>,
+ pub dot2_token: Token![..],
+ }
+ }
+
+ ast_struct! {
+ /// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatSlice {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+ pub elems: Punctuated<Pat, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// A struct or struct variant pattern: `Variant { x, y, .. }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatStruct {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+ pub brace_token: token::Brace,
+ pub fields: Punctuated<FieldPat, Token![,]>,
+ pub dot2_token: Option<Token![..]>,
+ }
+ }
+
+ ast_struct! {
+ /// A tuple pattern: `(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatTuple {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+ pub elems: Punctuated<Pat, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatTupleStruct {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+ pub pat: PatTuple,
+ }
+ }
+
+ ast_struct! {
+ /// A type ascription pattern: `foo: f64`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatType {
+ pub attrs: Vec<Attribute>,
+ pub pat: Box<Pat>,
+ pub colon_token: Token![:],
+ pub ty: Box<Type>,
+ }
+ }
+
+ ast_struct! {
+ /// A pattern that matches any value: `_`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatWild {
+ pub attrs: Vec<Attribute>,
+ pub underscore_token: Token![_],
+ }
+ }
+
+ ast_struct! {
+ /// A single field in a struct pattern.
+ ///
+ /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` are treated
+ /// the same as `x: x, y: ref y, z: ref mut z` but there is no colon token.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct FieldPat {
+ pub attrs: Vec<Attribute>,
+ pub member: Member,
+ pub colon_token: Option<Token![:]>,
+ pub pat: Box<Pat>,
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Pat {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Pat {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Pat::Box(this), Pat::Box(other)) => this == other,
+- (Pat::Ident(this), Pat::Ident(other)) => this == other,
+- (Pat::Lit(this), Pat::Lit(other)) => this == other,
+- (Pat::Macro(this), Pat::Macro(other)) => this == other,
+- (Pat::Or(this), Pat::Or(other)) => this == other,
+- (Pat::Path(this), Pat::Path(other)) => this == other,
+- (Pat::Range(this), Pat::Range(other)) => this == other,
+- (Pat::Reference(this), Pat::Reference(other)) => this == other,
+- (Pat::Rest(this), Pat::Rest(other)) => this == other,
+- (Pat::Slice(this), Pat::Slice(other)) => this == other,
+- (Pat::Struct(this), Pat::Struct(other)) => this == other,
+- (Pat::Tuple(this), Pat::Tuple(other)) => this == other,
+- (Pat::TupleStruct(this), Pat::TupleStruct(other)) => this == other,
+- (Pat::Type(this), Pat::Type(other)) => this == other,
+- (Pat::Verbatim(this), Pat::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- (Pat::Wild(this), Pat::Wild(other)) => this == other,
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Pat {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Pat::Box(pat) => {
+- hash.write_u8(0);
+- pat.hash(hash);
+- }
+- Pat::Ident(pat) => {
+- hash.write_u8(1);
+- pat.hash(hash);
+- }
+- Pat::Lit(pat) => {
+- hash.write_u8(2);
+- pat.hash(hash);
+- }
+- Pat::Macro(pat) => {
+- hash.write_u8(3);
+- pat.hash(hash);
+- }
+- Pat::Or(pat) => {
+- hash.write_u8(4);
+- pat.hash(hash);
+- }
+- Pat::Path(pat) => {
+- hash.write_u8(5);
+- pat.hash(hash);
+- }
+- Pat::Range(pat) => {
+- hash.write_u8(6);
+- pat.hash(hash);
+- }
+- Pat::Reference(pat) => {
+- hash.write_u8(7);
+- pat.hash(hash);
+- }
+- Pat::Rest(pat) => {
+- hash.write_u8(8);
+- pat.hash(hash);
+- }
+- Pat::Slice(pat) => {
+- hash.write_u8(9);
+- pat.hash(hash);
+- }
+- Pat::Struct(pat) => {
+- hash.write_u8(10);
+- pat.hash(hash);
+- }
+- Pat::Tuple(pat) => {
+- hash.write_u8(11);
+- pat.hash(hash);
+- }
+- Pat::TupleStruct(pat) => {
+- hash.write_u8(12);
+- pat.hash(hash);
+- }
+- Pat::Type(pat) => {
+- hash.write_u8(13);
+- pat.hash(hash);
+- }
+- Pat::Verbatim(pat) => {
+- hash.write_u8(14);
+- TokenStreamHelper(pat).hash(hash);
+- }
+- Pat::Wild(pat) => {
+- hash.write_u8(15);
+- pat.hash(hash);
+- }
+- Pat::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ #[cfg(feature = "parsing")]
+-mod parsing {
++pub mod parsing {
+ use super::*;
+
+ use crate::ext::IdentExt;
+- use crate::parse::{Parse, ParseStream, Result};
++ use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
+ use crate::path;
+
+ impl Parse for Pat {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let lookahead = input.lookahead1();
+ if lookahead.peek(Ident)
+ && ({
+ input.peek2(Token![::])
+ || input.peek2(Token![!])
+ || input.peek2(token::Brace)
+ || input.peek2(token::Paren)
+ || input.peek2(Token![..])
+@@ -406,17 +300,16 @@ mod parsing {
+ ahead.is_empty() || ahead.peek(Token![,])
+ }
+ })
+ || input.peek(Token![self]) && input.peek2(Token![::])
+ || lookahead.peek(Token![::])
+ || lookahead.peek(Token![<])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ pat_path_or_macro_or_struct_or_range(input)
+ } else if lookahead.peek(Token![_]) {
+ input.call(pat_wild).map(Pat::Wild)
+ } else if input.peek(Token![box]) {
+ input.call(pat_box).map(Pat::Box)
+ } else if input.peek(Token![-]) || lookahead.peek(Lit) {
+@@ -429,28 +322,29 @@ mod parsing {
+ input.call(pat_ident).map(Pat::Ident)
+ } else if lookahead.peek(Token![&]) {
+ input.call(pat_reference).map(Pat::Reference)
+ } else if lookahead.peek(token::Paren) {
+ input.call(pat_tuple).map(Pat::Tuple)
+ } else if lookahead.peek(token::Bracket) {
+ input.call(pat_slice).map(Pat::Slice)
+ } else if lookahead.peek(Token![..]) && !input.peek(Token![...]) {
+- input.call(pat_rest).map(Pat::Rest)
++ pat_range_half_open(input, begin)
+ } else {
+ Err(lookahead.error())
+ }
+ }
+ }
+
+ fn pat_path_or_macro_or_struct_or_range(input: ParseStream) -> Result<Pat> {
++ let begin = input.fork();
+ let (qself, path) = path::parsing::qpath(input, true)?;
+
+ if input.peek(Token![..]) {
+- return pat_range(input, qself, path).map(Pat::Range);
++ return pat_range(input, begin, qself, path);
+ }
+
+ if qself.is_some() {
+ return Ok(Pat::Path(PatPath {
+ attrs: Vec::new(),
+ qself,
+ path,
+ }));
+@@ -482,17 +376,17 @@ mod parsing {
+ }
+ }
+
+ if input.peek(token::Brace) {
+ pat_struct(input, path).map(Pat::Struct)
+ } else if input.peek(token::Paren) {
+ pat_tuple_struct(input, path).map(Pat::TupleStruct)
+ } else if input.peek(Token![..]) {
+- pat_range(input, qself, path).map(Pat::Range)
++ pat_range(input, begin, qself, path)
+ } else {
+ Ok(Pat::Path(PatPath {
+ attrs: Vec::new(),
+ qself,
+ path,
+ }))
+ }
+ }
+@@ -541,17 +435,17 @@ mod parsing {
+ fn pat_struct(input: ParseStream, path: Path) -> Result<PatStruct> {
+ let content;
+ let brace_token = braced!(content in input);
+
+ let mut fields = Punctuated::new();
+ while !content.is_empty() && !content.peek(Token![..]) {
+ let value = content.call(field_pat)?;
+ fields.push_value(value);
+- if !content.peek(Token![,]) {
++ if content.is_empty() {
+ break;
+ }
+ let punct: Token![,] = content.parse()?;
+ fields.push_punct(punct);
+ }
+
+ let dot2_token = if fields.empty_or_trailing() && content.peek(Token![..]) {
+ Some(content.parse()?)
+@@ -573,29 +467,30 @@ mod parsing {
+ match *self {
+ Member::Named(_) => false,
+ Member::Unnamed(_) => true,
+ }
+ }
+ }
+
+ fn field_pat(input: ParseStream) -> Result<FieldPat> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ let boxed: Option<Token![box]> = input.parse()?;
+ let by_ref: Option<Token![ref]> = input.parse()?;
+ let mutability: Option<Token![mut]> = input.parse()?;
+ let member: Member = input.parse()?;
+
+ if boxed.is_none() && by_ref.is_none() && mutability.is_none() && input.peek(Token![:])
+ || member.is_unnamed()
+ {
+ return Ok(FieldPat {
+- attrs: Vec::new(),
++ attrs,
+ member,
+ colon_token: input.parse()?,
+- pat: input.parse()?,
++ pat: Box::new(multi_pat(input)?),
+ });
+ }
+
+ let ident = match member {
+ Member::Named(ident) => ident,
+ Member::Unnamed(_) => unreachable!(),
+ };
+
+@@ -605,49 +500,76 @@ mod parsing {
+ mutability,
+ ident: ident.clone(),
+ subpat: None,
+ });
+
+ if let Some(boxed) = boxed {
+ pat = Pat::Box(PatBox {
+ attrs: Vec::new(),
++ box_token: boxed,
+ pat: Box::new(pat),
+- box_token: boxed,
+ });
+ }
+
+ Ok(FieldPat {
++ attrs,
+ member: Member::Named(ident),
++ colon_token: None,
+ pat: Box::new(pat),
+- attrs: Vec::new(),
+- colon_token: None,
+ })
+ }
+
+- fn pat_range(input: ParseStream, qself: Option<QSelf>, path: Path) -> Result<PatRange> {
+- Ok(PatRange {
+- attrs: Vec::new(),
+- lo: Box::new(Expr::Path(ExprPath {
++ fn pat_range(
++ input: ParseStream,
++ begin: ParseBuffer,
++ qself: Option<QSelf>,
++ path: Path,
++ ) -> Result<Pat> {
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if let Some(hi) = hi {
++ Ok(Pat::Range(PatRange {
+ attrs: Vec::new(),
+- qself,
+- path,
+- })),
+- limits: input.parse()?,
+- hi: input.call(pat_lit_expr)?,
+- })
++ lo: Box::new(Expr::Path(ExprPath {
++ attrs: Vec::new(),
++ qself,
++ path,
++ })),
++ limits,
++ hi,
++ }))
++ } else {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ }
++ }
++
++ fn pat_range_half_open(input: ParseStream, begin: ParseBuffer) -> Result<Pat> {
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if hi.is_some() {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ } else {
++ match limits {
++ RangeLimits::HalfOpen(dot2_token) => Ok(Pat::Rest(PatRest {
++ attrs: Vec::new(),
++ dot2_token,
++ })),
++ RangeLimits::Closed(_) => Err(input.error("expected range upper bound")),
++ }
++ }
+ }
+
+ fn pat_tuple(input: ParseStream) -> Result<PatTuple> {
+ let content;
+ let paren_token = parenthesized!(content in input);
+
+ let mut elems = Punctuated::new();
+ while !content.is_empty() {
+- let value: Pat = content.parse()?;
++ let value = multi_pat(&content)?;
+ elems.push_value(value);
+ if content.is_empty() {
+ break;
+ }
+ let punct = content.parse()?;
+ elems.push_punct(punct);
+ }
+
+@@ -663,225 +585,280 @@ mod parsing {
+ attrs: Vec::new(),
+ and_token: input.parse()?,
+ mutability: input.parse()?,
+ pat: input.parse()?,
+ })
+ }
+
+ fn pat_lit_or_range(input: ParseStream) -> Result<Pat> {
+- let lo = input.call(pat_lit_expr)?;
++ let begin = input.fork();
++ let lo = input.call(pat_lit_expr)?.unwrap();
+ if input.peek(Token![..]) {
+- Ok(Pat::Range(PatRange {
+- attrs: Vec::new(),
+- lo,
+- limits: input.parse()?,
+- hi: input.call(pat_lit_expr)?,
+- }))
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if let Some(hi) = hi {
++ Ok(Pat::Range(PatRange {
++ attrs: Vec::new(),
++ lo,
++ limits,
++ hi,
++ }))
++ } else {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ }
+ } else {
+ Ok(Pat::Lit(PatLit {
+ attrs: Vec::new(),
+ expr: lo,
+ }))
+ }
+ }
+
+- fn pat_lit_expr(input: ParseStream) -> Result<Box<Expr>> {
++ fn pat_lit_expr(input: ParseStream) -> Result<Option<Box<Expr>>> {
++ if input.is_empty()
++ || input.peek(Token![|])
++ || input.peek(Token![=>])
++ || input.peek(Token![:]) && !input.peek(Token![::])
++ || input.peek(Token![,])
++ || input.peek(Token![;])
++ {
++ return Ok(None);
++ }
++
+ let neg: Option<Token![-]> = input.parse()?;
+
+ let lookahead = input.lookahead1();
+ let expr = if lookahead.peek(Lit) {
+ Expr::Lit(input.parse()?)
+ } else if lookahead.peek(Ident)
+ || lookahead.peek(Token![::])
+ || lookahead.peek(Token![<])
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![Self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ {
+ Expr::Path(input.parse()?)
+ } else {
+ return Err(lookahead.error());
+ };
+
+- Ok(Box::new(if let Some(neg) = neg {
++ Ok(Some(Box::new(if let Some(neg) = neg {
+ Expr::Unary(ExprUnary {
+ attrs: Vec::new(),
+ op: UnOp::Neg(neg),
+ expr: Box::new(expr),
+ })
+ } else {
+ expr
+- }))
++ })))
+ }
+
+ fn pat_slice(input: ParseStream) -> Result<PatSlice> {
+ let content;
+ let bracket_token = bracketed!(content in input);
+
+ let mut elems = Punctuated::new();
+ while !content.is_empty() {
+- let value: Pat = content.parse()?;
++ let value = multi_pat(&content)?;
+ elems.push_value(value);
+ if content.is_empty() {
+ break;
+ }
+ let punct = content.parse()?;
+ elems.push_punct(punct);
+ }
+
+ Ok(PatSlice {
+ attrs: Vec::new(),
+ bracket_token,
+ elems,
+ })
+ }
+
+- fn pat_rest(input: ParseStream) -> Result<PatRest> {
+- Ok(PatRest {
+- attrs: Vec::new(),
+- dot2_token: input.parse()?,
+- })
++ pub fn multi_pat(input: ParseStream) -> Result<Pat> {
++ multi_pat_impl(input, None)
++ }
++
++ pub fn multi_pat_with_leading_vert(input: ParseStream) -> Result<Pat> {
++ let leading_vert: Option<Token![|]> = input.parse()?;
++ multi_pat_impl(input, leading_vert)
++ }
++
++ fn multi_pat_impl(input: ParseStream, leading_vert: Option<Token![|]>) -> Result<Pat> {
++ let mut pat: Pat = input.parse()?;
++ if leading_vert.is_some()
++ || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
++ {
++ let mut cases = Punctuated::new();
++ cases.push_value(pat);
++ while input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=]) {
++ let punct = input.parse()?;
++ cases.push_punct(punct);
++ let pat: Pat = input.parse()?;
++ cases.push_value(pat);
++ }
++ pat = Pat::Or(PatOr {
++ attrs: Vec::new(),
++ leading_vert,
++ cases,
++ });
++ }
++ Ok(pat)
+ }
+ }
+
+ #[cfg(feature = "printing")]
+ mod printing {
+ use super::*;
+
+ use proc_macro2::TokenStream;
+ use quote::{ToTokens, TokenStreamExt};
+
+ use crate::attr::FilterAttrs;
+
+ impl ToTokens for PatWild {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.underscore_token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatIdent {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.by_ref.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+ if let Some((at_token, subpat)) = &self.subpat {
+ at_token.to_tokens(tokens);
+ subpat.to_tokens(tokens);
+ }
+ }
+ }
+
+ impl ToTokens for PatStruct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.path.to_tokens(tokens);
+ self.brace_token.surround(tokens, |tokens| {
+ self.fields.to_tokens(tokens);
+ // NOTE: We need a comma before the dot2 token if it is present.
+ if !self.fields.empty_or_trailing() && self.dot2_token.is_some() {
+ <Token![,]>::default().to_tokens(tokens);
+ }
+ self.dot2_token.to_tokens(tokens);
+ });
+ }
+ }
+
+ impl ToTokens for PatTupleStruct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.path.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatType {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ tokens.append_all(self.attrs.outer());
+ self.pat.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatPath {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ private::print_path(tokens, &self.qself, &self.path);
+ }
+ }
+
+ impl ToTokens for PatTuple {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.paren_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ });
+ }
+ }
+
+ impl ToTokens for PatBox {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.box_token.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatReference {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.and_token.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatRest {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.dot2_token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatLit {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.expr.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatRange {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.lo.to_tokens(tokens);
+ match &self.limits {
+ RangeLimits::HalfOpen(t) => t.to_tokens(tokens),
+ RangeLimits::Closed(t) => t.to_tokens(tokens),
+ }
+ self.hi.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatSlice {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.bracket_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ });
+ }
+ }
+
+ impl ToTokens for PatMacro {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.mac.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatOr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.leading_vert.to_tokens(tokens);
+ self.cases.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for FieldPat {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ if let Some(colon_token) = &self.colon_token {
+ self.member.to_tokens(tokens);
+ colon_token.to_tokens(tokens);
+ }
+ self.pat.to_tokens(tokens);
+ }
+ }
+ }
+diff --git a/third_party/rust/syn/src/path.rs b/third_party/rust/syn/src/path.rs
+--- third_party/rust/syn/src/path.rs
++++ third_party/rust/syn/src/path.rs
+@@ -1,15 +1,15 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+
+ ast_struct! {
+- /// A path at which a named item is exported: `std::collections::HashMap`.
++ /// A path at which a named item is exported (e.g. `std::collections::HashMap`).
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Path {
+ pub leading_colon: Option<Token![::]>,
+ pub segments: Punctuated<PathSegment, Token![::]>,
+ }
+ }
+
+ impl<T> From<T> for Path
+@@ -24,17 +24,17 @@ where
+ path.segments.push_value(segment.into());
+ path
+ }
+ }
+
+ ast_struct! {
+ /// A segment of a path together with any path arguments on that segment.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct PathSegment {
+ pub ident: Ident,
+ pub arguments: PathArguments,
+ }
+ }
+
+ impl<T> From<T> for PathSegment
+@@ -47,17 +47,17 @@ where
+ arguments: PathArguments::None,
+ }
+ }
+ }
+
+ ast_enum! {
+ /// Angle bracketed or parenthesized arguments of a path segment.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// ## Angle bracketed
+ ///
+ /// The `<'a, T>` in `std::slice::iter<'a, T>`.
+ ///
+ /// ## Parenthesized
+ ///
+@@ -93,17 +93,17 @@ impl PathArguments {
+ PathArguments::AngleBracketed(_) | PathArguments::Parenthesized(_) => false,
+ }
+ }
+ }
+
+ ast_enum! {
+ /// An individual generic argument, like `'a`, `T`, or `Item = T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum GenericArgument {
+ /// A lifetime argument.
+ Lifetime(Lifetime),
+ /// A type argument.
+ Type(Type),
+ /// A binding (equality constraint) on an associated type: the `Item =
+ /// u8` in `Iterator<Item = u8>`.
+@@ -117,55 +117,55 @@ ast_enum! {
+ Const(Expr),
+ }
+ }
+
+ ast_struct! {
+ /// Angle bracketed arguments of a path segment: the `<K, V>` in `HashMap<K,
+ /// V>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct AngleBracketedGenericArguments {
+ pub colon2_token: Option<Token![::]>,
+ pub lt_token: Token![<],
+ pub args: Punctuated<GenericArgument, Token![,]>,
+ pub gt_token: Token![>],
+ }
+ }
+
+ ast_struct! {
+ /// A binding (equality constraint) on an associated type: `Item = u8`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Binding {
+ pub ident: Ident,
+ pub eq_token: Token![=],
+ pub ty: Type,
+ }
+ }
+
+ ast_struct! {
+ /// An associated type bound: `Iterator<Item: Display>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Constraint {
+ pub ident: Ident,
+ pub colon_token: Token![:],
+ pub bounds: Punctuated<TypeParamBound, Token![+]>,
+ }
+ }
+
+ ast_struct! {
+ /// Arguments of a function path segment: the `(A, B) -> C` in `Fn(A,B) ->
+ /// C`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct ParenthesizedGenericArguments {
+ pub paren_token: token::Paren,
+ /// `(A, B)`
+ pub inputs: Punctuated<Type, Token![,]>,
+ /// `C`
+ pub output: ReturnType,
+ }
+@@ -184,17 +184,17 @@ ast_struct! {
+ /// ^~~~~~ ~~~~~~~~~~~~~~^
+ /// ty position = 3
+ ///
+ /// <Vec<T>>::AssociatedItem
+ /// ^~~~~~ ^
+ /// ty position = 0
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct QSelf {
+ pub lt_token: Token![<],
+ pub ty: Box<Type>,
+ pub position: usize,
+ pub as_token: Option<Token![as]>,
+ pub gt_token: Token![>],
+ }
+@@ -286,21 +286,17 @@ pub mod parsing {
+ impl Parse for PathSegment {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Self::parse_helper(input, false)
+ }
+ }
+
+ impl PathSegment {
+ fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
+- if input.peek(Token![super])
+- || input.peek(Token![self])
+- || input.peek(Token![crate])
+- || input.peek(Token![extern])
+- {
++ if input.peek(Token![super]) || input.peek(Token![self]) || input.peek(Token![crate]) {
+ let ident = input.call(Ident::parse_any)?;
+ return Ok(PathSegment::from(ident));
+ }
+
+ let ident = if input.peek(Token![Self]) {
+ input.call(Ident::parse_any)?
+ } else {
+ input.parse()?
+@@ -353,17 +349,17 @@ pub mod parsing {
+ },
+ })
+ }
+ }
+
+ impl Path {
+ /// Parse a `Path` containing no path arguments on any of its segments.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use syn::{Path, Result, Token};
+ /// use syn::parse::{Parse, ParseStream};
+ ///
+@@ -395,17 +391,16 @@ pub mod parsing {
+ segments: {
+ let mut segments = Punctuated::new();
+ loop {
+ if !input.peek(Ident)
+ && !input.peek(Token![super])
+ && !input.peek(Token![self])
+ && !input.peek(Token![Self])
+ && !input.peek(Token![crate])
+- && !input.peek(Token![extern])
+ {
+ break;
+ }
+ let ident = Ident::parse_any(input)?;
+ segments.push_value(PathSegment::from(ident));
+ if !input.peek(Token![::]) {
+ break;
+ }
+@@ -428,17 +423,17 @@ pub mod parsing {
+ /// For them to compare equal, it must be the case that:
+ ///
+ /// - the path has no leading colon,
+ /// - the number of path segments is 1,
+ /// - the first path segment has no angle bracketed or parenthesized
+ /// path arguments, and
+ /// - the ident of the first path segment is equal to the given one.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use syn::{Attribute, Error, Meta, NestedMeta, Result};
+ /// # use std::iter::FromIterator;
+ ///
+@@ -467,17 +462,17 @@ pub mod parsing {
+ ///
+ /// A path is considered an ident if:
+ ///
+ /// - the path has no leading colon,
+ /// - the number of path segments is 1, and
+ /// - the first path segment has no angle bracketed or parenthesized
+ /// path arguments.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ pub fn get_ident(&self) -> Option<&Ident> {
+ if self.leading_colon.is_none()
+ && self.segments.len() == 1
+ && self.segments[0].arguments.is_none()
+ {
+ Some(&self.segments[0].ident)
+ } else {
+diff --git a/third_party/rust/syn/src/punctuated.rs b/third_party/rust/syn/src/punctuated.rs
+--- third_party/rust/syn/src/punctuated.rs
++++ third_party/rust/syn/src/punctuated.rs
+@@ -17,16 +17,18 @@
+ //!
+ //! ```text
+ //! a_function_call(arg1, arg2, arg3);
+ //! ~~~~^ ~~~~^ ~~~~
+ //! ```
+
+ #[cfg(feature = "extra-traits")]
+ use std::fmt::{self, Debug};
++#[cfg(feature = "extra-traits")]
++use std::hash::{Hash, Hasher};
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use std::iter;
+ use std::iter::FromIterator;
+ use std::ops::{Index, IndexMut};
+ use std::option;
+ use std::slice;
+ use std::vec;
+
+@@ -36,18 +38,16 @@ use crate::parse::{Parse, ParseStream, R
+ use crate::token::Token;
+
+ /// A punctuated sequence of syntax tree nodes of type `T` separated by
+ /// punctuation of type `P`.
+ ///
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[cfg_attr(feature = "extra-traits", derive(Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct Punctuated<T, P> {
+ inner: Vec<(T, P)>,
+ last: Option<Box<T>>,
+ }
+
+ impl<T, P> Punctuated<T, P> {
+ /// Creates an empty punctuated sequence.
+ pub fn new() -> Punctuated<T, P> {
+@@ -71,32 +71,29 @@ impl<T, P> Punctuated<T, P> {
+ self.inner.len() + if self.last.is_some() { 1 } else { 0 }
+ }
+
+ /// Borrows the first element in this sequence.
+ pub fn first(&self) -> Option<&T> {
+ self.iter().next()
+ }
+
++ /// Mutably borrows the first element in this sequence.
++ pub fn first_mut(&mut self) -> Option<&mut T> {
++ self.iter_mut().next()
++ }
++
+ /// Borrows the last element in this sequence.
+ pub fn last(&self) -> Option<&T> {
+- if self.last.is_some() {
+- self.last.as_ref().map(Box::as_ref)
+- } else {
+- self.inner.last().map(|pair| &pair.0)
+- }
++ self.iter().next_back()
+ }
+
+ /// Mutably borrows the last element in this sequence.
+ pub fn last_mut(&mut self) -> Option<&mut T> {
+- if self.last.is_some() {
+- self.last.as_mut().map(Box::as_mut)
+- } else {
+- self.inner.last_mut().map(|pair| &mut pair.0)
+- }
++ self.iter_mut().next_back()
+ }
+
+ /// Returns an iterator over borrowed syntax tree nodes of type `&T`.
+ pub fn iter(&self) -> Iter<T> {
+ Iter {
+ inner: Box::new(PrivateIter {
+ inner: self.inner.iter(),
+ last: self.last.as_ref().map(Box::as_ref).into_iter(),
+@@ -225,23 +222,29 @@ impl<T, P> Punctuated<T, P> {
+
+ if index == self.len() {
+ self.push(value);
+ } else {
+ self.inner.insert(index, (value, Default::default()));
+ }
+ }
+
++ /// Clears the sequence of all values and punctuation, making it empty.
++ pub fn clear(&mut self) {
++ self.inner.clear();
++ self.last = None;
++ }
++
+ /// Parses zero or more occurrences of `T` separated by punctuation of type
+ /// `P`, with optional trailing punctuation.
+ ///
+ /// Parsing continues until the end of this parse stream. The entire content
+ /// of this parse stream must consist of `T` and `P`.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_terminated(input: ParseStream) -> Result<Self>
+ where
+ T: Parse,
+ P: Parse,
+ {
+ Self::parse_terminated_with(input, T::parse)
+@@ -251,17 +254,17 @@ impl<T, P> Punctuated<T, P> {
+ /// separated by punctuation of type `P`, with optional trailing
+ /// punctuation.
+ ///
+ /// Like [`parse_terminated`], the entire content of this stream is expected
+ /// to be parsed.
+ ///
+ /// [`parse_terminated`]: Punctuated::parse_terminated
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_terminated_with(
+ input: ParseStream,
+ parser: fn(ParseStream) -> Result<T>,
+ ) -> Result<Self>
+ where
+ P: Parse,
+@@ -287,17 +290,17 @@ impl<T, P> Punctuated<T, P> {
+ /// Parses one or more occurrences of `T` separated by punctuation of type
+ /// `P`, not accepting trailing punctuation.
+ ///
+ /// Parsing continues as long as punctuation `P` is present at the head of
+ /// the stream. This method returns upon parsing a `T` and observing that it
+ /// is not followed by a `P`, even if there are remaining tokens in the
+ /// stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_separated_nonempty(input: ParseStream) -> Result<Self>
+ where
+ T: Parse,
+ P: Token + Parse,
+ {
+ Self::parse_separated_nonempty_with(input, T::parse)
+@@ -307,17 +310,17 @@ impl<T, P> Punctuated<T, P> {
+ /// separated by punctuation of type `P`, not accepting trailing
+ /// punctuation.
+ ///
+ /// Like [`parse_separated_nonempty`], may complete early without parsing
+ /// the entire content of this stream.
+ ///
+ /// [`parse_separated_nonempty`]: Punctuated::parse_separated_nonempty
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_separated_nonempty_with(
+ input: ParseStream,
+ parser: fn(ParseStream) -> Result<T>,
+ ) -> Result<Self>
+ where
+ P: Token + Parse,
+@@ -333,16 +336,63 @@ impl<T, P> Punctuated<T, P> {
+ let punct = input.parse()?;
+ punctuated.push_punct(punct);
+ }
+
+ Ok(punctuated)
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl<T, P> Clone for Punctuated<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ Punctuated {
++ inner: self.inner.clone(),
++ last: self.last.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> Eq for Punctuated<T, P>
++where
++ T: Eq,
++ P: Eq,
++{
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> PartialEq for Punctuated<T, P>
++where
++ T: PartialEq,
++ P: PartialEq,
++{
++ fn eq(&self, other: &Self) -> bool {
++ let Punctuated { inner, last } = self;
++ *inner == other.inner && *last == other.last
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> Hash for Punctuated<T, P>
++where
++ T: Hash,
++ P: Hash,
++{
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ let Punctuated { inner, last } = self;
++ inner.hash(state);
++ last.hash(state);
++ }
++}
++
+ #[cfg(feature = "extra-traits")]
+ impl<T: Debug, P: Debug> Debug for Punctuated<T, P> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut list = f.debug_list();
+ for (t, p) in &self.inner {
+ list.entry(t);
+ list.entry(p);
+ }
+@@ -531,17 +581,16 @@ impl<'a, T, P> ExactSizeIterator for Pai
+ }
+ }
+
+ /// An iterator over owned pairs of type `Pair<T, P>`.
+ ///
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[derive(Clone)]
+ pub struct IntoPairs<T, P> {
+ inner: vec::IntoIter<(T, P)>,
+ last: option::IntoIter<T>,
+ }
+
+ impl<T, P> Iterator for IntoPairs<T, P> {
+ type Item = Pair<T, P>;
+
+@@ -567,22 +616,34 @@ impl<T, P> DoubleEndedIterator for IntoP
+ }
+
+ impl<T, P> ExactSizeIterator for IntoPairs<T, P> {
+ fn len(&self) -> usize {
+ self.inner.len() + self.last.len()
+ }
+ }
+
++impl<T, P> Clone for IntoPairs<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ IntoPairs {
++ inner: self.inner.clone(),
++ last: self.last.clone(),
++ }
++ }
++}
++
+ /// An iterator over owned values of type `T`.
+ ///
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[derive(Clone)]
+ pub struct IntoIter<T> {
+ inner: vec::IntoIter<T>,
+ }
+
+ impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+@@ -601,16 +662,27 @@ impl<T> DoubleEndedIterator for IntoIter
+ }
+
+ impl<T> ExactSizeIterator for IntoIter<T> {
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+ }
+
++impl<T> Clone for IntoIter<T>
++where
++ T: Clone,
++{
++ fn clone(&self) -> Self {
++ IntoIter {
++ inner: self.inner.clone(),
++ }
++ }
++}
++
+ /// An iterator over borrowed values of type `&T`.
+ ///
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+ pub struct Iter<'a, T: 'a> {
+ // The `Item = &'a T` needs to be specified to support rustc 1.31 and older.
+ // On modern compilers we would be able to write just IterTrait<'a, T> where
+@@ -794,17 +866,16 @@ impl<'a, T: 'a, I: 'a> IterMutTrait<'a,
+ }
+
+ /// A single syntax tree node of type `T` followed by its trailing punctuation
+ /// of type `P` if any.
+ ///
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub enum Pair<T, P> {
+ Punctuated(T, P),
+ End(T),
+ }
+
+ impl<T, P> Pair<T, P> {
+ /// Extracts the syntax tree node from this punctuated pair, discarding the
+ /// following punctuation.
+@@ -851,16 +922,30 @@ impl<T, P> Pair<T, P> {
+ pub fn into_tuple(self) -> (T, Option<P>) {
+ match self {
+ Pair::Punctuated(t, d) => (t, Some(d)),
+ Pair::End(t) => (t, None),
+ }
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl<T, P> Clone for Pair<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ match self {
++ Pair::Punctuated(t, p) => Pair::Punctuated(t.clone(), p.clone()),
++ Pair::End(t) => Pair::End(t.clone()),
++ }
++ }
++}
++
+ impl<T, P> Index<usize> for Punctuated<T, P> {
+ type Output = T;
+
+ fn index(&self, index: usize) -> &Self::Output {
+ if index == self.len() - 1 {
+ match &self.last {
+ Some(t) => t,
+ None => &self.inner[index].0,
+diff --git a/third_party/rust/syn/src/reserved.rs b/third_party/rust/syn/src/reserved.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/src/reserved.rs
+@@ -0,0 +1,42 @@
++// Type for a syntax tree node that is reserved for future use.
++//
++// For example ExprReference contains a field `raw` of type Reserved. If `&raw
++// place` syntax becomes a thing as per https://github.com/rust-lang/rfcs/pull/2582,
++// we can backward compatibly change `raw`'s type to Option<Token![raw]> without
++// the possibility of breaking any code.
++
++use proc_macro2::Span;
++use std::marker::PhantomData;
++
++#[cfg(feature = "extra-traits")]
++use std::fmt::{self, Debug};
++
++ast_struct! {
++ pub struct Reserved {
++ _private: PhantomData<Span>,
++ }
++}
++
++impl Default for Reserved {
++ fn default() -> Self {
++ Reserved {
++ _private: PhantomData,
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for Reserved {
++ fn clone(&self) -> Self {
++ Reserved {
++ _private: self._private,
++ }
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl Debug for Reserved {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ formatter.debug_struct("Reserved").finish()
++ }
++}
+diff --git a/third_party/rust/syn/src/spanned.rs b/third_party/rust/syn/src/spanned.rs
+--- third_party/rust/syn/src/spanned.rs
++++ third_party/rust/syn/src/spanned.rs
+@@ -1,12 +1,12 @@
+ //! A trait that can provide the `Span` of the complete contents of a syntax
+ //! tree node.
+ //!
+-//! *This module is available if Syn is built with both the `"parsing"` and
++//! *This module is available only if Syn is built with both the `"parsing"` and
+ //! `"printing"` features.*
+ //!
+ //! <br>
+ //!
+ //! # Example
+ //!
+ //! Suppose in a procedural macro we have a [`Type`] that we want to assert
+ //! implements the [`Sync`] trait. Maybe this is the type of one of the fields
+@@ -92,17 +92,17 @@ use quote::spanned::Spanned as ToTokens;
+ /// [`ToTokens`] from the `quote` crate, as well as for `Span` itself.
+ ///
+ /// [`ToTokens`]: quote::ToTokens
+ ///
+ /// See the [module documentation] for an example.
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with both the `"parsing"` and
++/// *This trait is available only if Syn is built with both the `"parsing"` and
+ /// `"printing"` features.*
+ pub trait Spanned {
+ /// Returns a `Span` covering the complete contents of this syntax tree
+ /// node, or [`Span::call_site()`] if this node is empty.
+ ///
+ /// [`Span::call_site()`]: proc_macro2::Span::call_site
+ fn span(&self) -> Span;
+ }
+diff --git a/third_party/rust/syn/src/stmt.rs b/third_party/rust/syn/src/stmt.rs
+--- third_party/rust/syn/src/stmt.rs
++++ third_party/rust/syn/src/stmt.rs
+@@ -1,25 +1,25 @@
+ use super::*;
+
+ ast_struct! {
+ /// A braced block containing Rust statements.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Block {
+ pub brace_token: token::Brace,
+ /// Statements in a block
+ pub stmts: Vec<Stmt>,
+ }
+ }
+
+ ast_enum! {
+ /// A statement, usually ending in a semicolon.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum Stmt {
+ /// A local (let) binding.
+ Local(Local),
+
+ /// An item definition.
+ Item(Item),
+
+ /// Expr without trailing semicolon.
+@@ -28,38 +28,39 @@ ast_enum! {
+ /// Expression with trailing semicolon.
+ Semi(Expr, Token![;]),
+ }
+ }
+
+ ast_struct! {
+ /// A local `let` binding: `let x: u64 = s.parse()?`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Local {
+ pub attrs: Vec<Attribute>,
+ pub let_token: Token![let],
+ pub pat: Pat,
+ pub init: Option<(Token![=], Box<Expr>)>,
+ pub semi_token: Token![;],
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ pub mod parsing {
+ use super::*;
+
++ use crate::parse::discouraged::Speculative;
+ use crate::parse::{Parse, ParseStream, Result};
+- use crate::punctuated::Punctuated;
++ use proc_macro2::TokenStream;
+
+ impl Block {
+ /// Parse the body of a block as zero or more statements, possibly
+ /// including one trailing expression.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use syn::{braced, token, Attribute, Block, Ident, Result, Stmt, Token};
+ /// use syn::parse::{Parse, ParseStream};
+ ///
+@@ -101,18 +102,18 @@ pub mod parsing {
+ /// stmts,
+ /// })
+ /// }
+ /// }
+ /// ```
+ pub fn parse_within(input: ParseStream) -> Result<Vec<Stmt>> {
+ let mut stmts = Vec::new();
+ loop {
+- while input.peek(Token![;]) {
+- input.parse::<Token![;]>()?;
++ while let Some(semi) = input.parse::<Option<Token![;]>>()? {
++ stmts.push(Stmt::Semi(Expr::Verbatim(TokenStream::new()), semi));
+ }
+ if input.is_empty() {
+ break;
+ }
+ let s = parse_stmt(input, true)?;
+ let requires_semicolon = if let Stmt::Expr(s) = &s {
+ expr::requires_terminator(s)
+ } else {
+@@ -141,65 +142,65 @@ pub mod parsing {
+
+ impl Parse for Stmt {
+ fn parse(input: ParseStream) -> Result<Self> {
+ parse_stmt(input, false)
+ }
+ }
+
+ fn parse_stmt(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
+- // TODO: optimize using advance_to
++ let mut attrs = input.call(Attribute::parse_outer)?;
++
++ // brace-style macros; paren and bracket macros get parsed as
++ // expression statements.
+ let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
++ if let Ok(path) = ahead.call(Path::parse_mod_style) {
++ if ahead.peek(Token![!]) && (ahead.peek2(token::Brace) || ahead.peek2(Ident)) {
++ input.advance_to(&ahead);
++ return stmt_mac(input, attrs, path);
++ }
++ }
+
+- if {
+- let ahead = ahead.fork();
+- // Only parse braces here; paren and bracket will get parsed as
+- // expression statements
+- ahead.call(Path::parse_mod_style).is_ok()
+- && ahead.parse::<Token![!]>().is_ok()
+- && (ahead.peek(token::Brace) || ahead.peek(Ident))
+- } {
+- stmt_mac(input)
+- } else if ahead.peek(Token![let]) {
+- stmt_local(input).map(Stmt::Local)
+- } else if ahead.peek(Token![pub])
+- || ahead.peek(Token![crate]) && !ahead.peek2(Token![::])
+- || ahead.peek(Token![extern]) && !ahead.peek2(Token![::])
+- || ahead.peek(Token![use])
+- || ahead.peek(Token![static]) && (ahead.peek2(Token![mut]) || ahead.peek2(Ident))
+- || ahead.peek(Token![const])
+- || ahead.peek(Token![unsafe]) && !ahead.peek2(token::Brace)
+- || ahead.peek(Token![async])
+- && (ahead.peek2(Token![unsafe])
+- || ahead.peek2(Token![extern])
+- || ahead.peek2(Token![fn]))
+- || ahead.peek(Token![fn])
+- || ahead.peek(Token![mod])
+- || ahead.peek(Token![type])
+- || ahead.peek(item::parsing::existential) && ahead.peek2(Token![type])
+- || ahead.peek(Token![struct])
+- || ahead.peek(Token![enum])
+- || ahead.peek(Token![union]) && ahead.peek2(Ident)
+- || ahead.peek(Token![auto]) && ahead.peek2(Token![trait])
+- || ahead.peek(Token![trait])
+- || ahead.peek(Token![default])
+- && (ahead.peek2(Token![unsafe]) || ahead.peek2(Token![impl]))
+- || ahead.peek(Token![impl])
+- || ahead.peek(Token![macro])
++ if input.peek(Token![let]) {
++ stmt_local(input, attrs).map(Stmt::Local)
++ } else if input.peek(Token![pub])
++ || input.peek(Token![crate]) && !input.peek2(Token![::])
++ || input.peek(Token![extern])
++ || input.peek(Token![use])
++ || input.peek(Token![static]) && (input.peek2(Token![mut]) || input.peek2(Ident))
++ || input.peek(Token![const])
++ || input.peek(Token![unsafe]) && !input.peek2(token::Brace)
++ || input.peek(Token![async])
++ && (input.peek2(Token![unsafe])
++ || input.peek2(Token![extern])
++ || input.peek2(Token![fn]))
++ || input.peek(Token![fn])
++ || input.peek(Token![mod])
++ || input.peek(Token![type])
++ || input.peek(item::parsing::existential) && input.peek2(Token![type])
++ || input.peek(Token![struct])
++ || input.peek(Token![enum])
++ || input.peek(Token![union]) && input.peek2(Ident)
++ || input.peek(Token![auto]) && input.peek2(Token![trait])
++ || input.peek(Token![trait])
++ || input.peek(Token![default])
++ && (input.peek2(Token![unsafe]) || input.peek2(Token![impl]))
++ || input.peek(Token![impl])
++ || input.peek(Token![macro])
+ {
+- input.parse().map(Stmt::Item)
++ let mut item: Item = input.parse()?;
++ attrs.extend(item.replace_attrs(Vec::new()));
++ item.replace_attrs(attrs);
++ Ok(Stmt::Item(item))
+ } else {
+- stmt_expr(input, allow_nosemi)
++ stmt_expr(input, allow_nosemi, attrs)
+ }
+ }
+
+- fn stmt_mac(input: ParseStream) -> Result<Stmt> {
+- let attrs = input.call(Attribute::parse_outer)?;
+- let path = input.call(Path::parse_mod_style)?;
++ fn stmt_mac(input: ParseStream, attrs: Vec<Attribute>, path: Path) -> Result<Stmt> {
+ let bang_token: Token![!] = input.parse()?;
+ let ident: Option<Ident> = input.parse()?;
+ let (delimiter, tokens) = mac::parse_delimiter(input)?;
+ let semi_token: Option<Token![;]> = input.parse()?;
+
+ Ok(Stmt::Item(Item::Macro(ItemMacro {
+ attrs,
+ ident,
+@@ -208,43 +209,22 @@ pub mod parsing {
+ bang_token,
+ delimiter,
+ tokens,
+ },
+ semi_token,
+ })))
+ }
+
+- fn stmt_local(input: ParseStream) -> Result<Local> {
++ fn stmt_local(input: ParseStream, attrs: Vec<Attribute>) -> Result<Local> {
+ Ok(Local {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs,
+ let_token: input.parse()?,
+ pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let mut pat: Pat = input.parse()?;
+- if leading_vert.is_some()
+- || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
+- {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|])
+- && !input.peek(Token![||])
+- && !input.peek(Token![|=])
+- {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- pat = Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- });
+- }
++ let mut pat: Pat = pat::parsing::multi_pat_with_leading_vert(input)?;
+ if input.peek(Token![:]) {
+ let colon_token: Token![:] = input.parse()?;
+ let ty: Type = input.parse()?;
+ pat = Pat::Type(PatType {
+ attrs: Vec::new(),
+ pat: Box::new(pat),
+ colon_token,
+ ty: Box::new(ty),
+@@ -260,22 +240,29 @@ pub mod parsing {
+ } else {
+ None
+ }
+ },
+ semi_token: input.parse()?,
+ })
+ }
+
+- fn stmt_expr(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
+- let mut attrs = input.call(Attribute::parse_outer)?;
++ fn stmt_expr(
++ input: ParseStream,
++ allow_nosemi: bool,
++ mut attrs: Vec<Attribute>,
++ ) -> Result<Stmt> {
+ let mut e = expr::parsing::expr_early(input)?;
+
+- attrs.extend(e.replace_attrs(Vec::new()));
+- e.replace_attrs(attrs);
++ let mut attr_target = &mut e;
++ while let Expr::Binary(e) = attr_target {
++ attr_target = &mut e.left;
++ }
++ attrs.extend(attr_target.replace_attrs(Vec::new()));
++ attr_target.replace_attrs(attrs);
+
+ if input.peek(Token![;]) {
+ return Ok(Stmt::Semi(e, input.parse()?));
+ }
+
+ if allow_nosemi || !expr::requires_terminator(&e) {
+ Ok(Stmt::Expr(e))
+ } else {
+diff --git a/third_party/rust/syn/src/token.rs b/third_party/rust/syn/src/token.rs
+--- third_party/rust/syn/src/token.rs
++++ third_party/rust/syn/src/token.rs
+@@ -83,44 +83,41 @@
+ //!
+ //! - Field access to its span — `let sp = the_token.span`
+ //!
+ //! [Peeking]: ../parse/struct.ParseBuffer.html#method.peek
+ //! [Parsing]: ../parse/struct.ParseBuffer.html#method.parse
+ //! [Printing]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html
+ //! [`Span`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html
+
+-use std;
+ #[cfg(feature = "extra-traits")]
+ use std::cmp;
+ #[cfg(feature = "extra-traits")]
+ use std::fmt::{self, Debug};
+ #[cfg(feature = "extra-traits")]
+ use std::hash::{Hash, Hasher};
+ use std::ops::{Deref, DerefMut};
+
+-#[cfg(feature = "parsing")]
+-use proc_macro2::Delimiter;
+ #[cfg(any(feature = "parsing", feature = "printing"))]
+ use proc_macro2::Ident;
+ use proc_macro2::Span;
+ #[cfg(feature = "printing")]
+ use proc_macro2::TokenStream;
++#[cfg(feature = "parsing")]
++use proc_macro2::{Delimiter, Literal, Punct, TokenTree};
+ #[cfg(feature = "printing")]
+ use quote::{ToTokens, TokenStreamExt};
+
+ use self::private::WithSpan;
+ #[cfg(feature = "parsing")]
+ use crate::buffer::Cursor;
+ #[cfg(feature = "parsing")]
+ use crate::error::Result;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ use crate::lifetime::Lifetime;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ use crate::lit::{Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr};
+ #[cfg(feature = "parsing")]
+ use crate::lookahead;
+ #[cfg(feature = "parsing")]
+ use crate::parse::{Parse, ParseStream};
+ use crate::span::IntoSpans;
+
+@@ -150,31 +147,30 @@ mod private {
+ pub struct WithSpan {
+ pub span: Span,
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ impl private::Sealed for Ident {}
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ fn peek_impl(cursor: Cursor, peek: fn(ParseStream) -> bool) -> bool {
++ use crate::parse::Unexpected;
+ use std::cell::Cell;
+ use std::rc::Rc;
+
+ let scope = Span::call_site();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ let buffer = crate::parse::new_parse_buffer(scope, cursor, unexpected);
+ peek(&buffer)
+ }
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! impl_token {
+- ($name:ident $display:expr) => {
++ ($display:tt $name:ty) => {
+ #[cfg(feature = "parsing")]
+ impl Token for $name {
+ fn peek(cursor: Cursor) -> bool {
+ fn peek(input: ParseStream) -> bool {
+ <$name as Parse>::parse(input).is_ok()
+ }
+ peek_impl(cursor, peek)
+ }
+@@ -184,34 +180,48 @@ macro_rules! impl_token {
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ impl private::Sealed for $name {}
+ };
+ }
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(Lifetime "lifetime");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(Lit "literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitStr "string literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitByteStr "byte string literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitByte "byte literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitChar "character literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitInt "integer literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitFloat "floating point literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitBool "boolean literal");
++impl_token!("lifetime" Lifetime);
++impl_token!("literal" Lit);
++impl_token!("string literal" LitStr);
++impl_token!("byte string literal" LitByteStr);
++impl_token!("byte literal" LitByte);
++impl_token!("character literal" LitChar);
++impl_token!("integer literal" LitInt);
++impl_token!("floating point literal" LitFloat);
++impl_token!("boolean literal" LitBool);
++impl_token!("group token" proc_macro2::Group);
++
++macro_rules! impl_low_level_token {
++ ($display:tt $ty:ident $get:ident) => {
++ #[cfg(feature = "parsing")]
++ impl Token for $ty {
++ fn peek(cursor: Cursor) -> bool {
++ cursor.$get().is_some()
++ }
++
++ fn display() -> &'static str {
++ $display
++ }
++ }
++
++ #[cfg(feature = "parsing")]
++ impl private::Sealed for $ty {}
++ };
++}
++
++impl_low_level_token!("punctuation token" Punct punct);
++impl_low_level_token!("literal" Literal literal);
++impl_low_level_token!("token" TokenTree token_tree);
+
+ // Not public API.
+ #[doc(hidden)]
+ #[cfg(feature = "parsing")]
+ pub trait CustomToken {
+ fn peek(cursor: Cursor) -> bool;
+ fn display() -> &'static str;
+ }
+@@ -228,17 +238,16 @@ impl<T: CustomToken> Token for T {
+ fn display() -> &'static str {
+ <Self as CustomToken>::display()
+ }
+ }
+
+ macro_rules! define_keywords {
+ ($($token:tt pub struct $name:ident #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[$doc]
+ ///
+ /// Don't try to remember the name of this type &mdash; use the
+ /// [`Token!`] macro instead.
+ ///
+ /// [`Token!`]: crate::token
+ pub struct $name {
+ pub span: Span,
+@@ -255,16 +264,26 @@ macro_rules! define_keywords {
+ impl std::default::Default for $name {
+ fn default() -> Self {
+ $name {
+ span: Span::call_site(),
+ }
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str(stringify!($name))
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+@@ -333,17 +352,16 @@ macro_rules! impl_deref_if_len_is_1 {
+ };
+
+ ($name:ident/$len:tt) => {};
+ }
+
+ macro_rules! define_punctuation_structs {
+ ($($token:tt pub struct $name:ident/$len:tt #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[repr(C)]
+ #[$doc]
+ ///
+ /// Don't try to remember the name of this type &mdash; use the
+ /// [`Token!`] macro instead.
+ ///
+ /// [`Token!`]: crate::token
+ pub struct $name {
+@@ -361,16 +379,26 @@ macro_rules! define_punctuation_structs
+ impl std::default::Default for $name {
+ fn default() -> Self {
+ $name {
+ spans: [Span::call_site(); $len],
+ }
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str(stringify!($name))
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+@@ -431,17 +459,16 @@ macro_rules! define_punctuation {
+ impl private::Sealed for $name {}
+ )*
+ };
+ }
+
+ macro_rules! define_delimiters {
+ ($($token:tt pub struct $name:ident #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[$doc]
+ pub struct $name {
+ pub span: Span,
+ }
+
+ #[doc(hidden)]
+ #[allow(non_snake_case)]
+ pub fn $name<S: IntoSpans<[Span; 1]>>(span: S) -> $name {
+@@ -453,16 +480,26 @@ macro_rules! define_delimiters {
+ impl std::default::Default for $name {
+ fn default() -> Self {
+ $name {
+ span: Span::call_site(),
+ }
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str(stringify!($name))
+ }
+ }
+
+ #[cfg(feature = "extra-traits")]
+@@ -850,17 +887,17 @@ pub mod parsing {
+ if let Some((ident, _rest)) = cursor.ident() {
+ ident == token
+ } else {
+ false
+ }
+ }
+
+ pub fn punct<S: FromSpans>(input: ParseStream, token: &str) -> Result<S> {
+- let mut spans = [input.cursor().span(); 3];
++ let mut spans = [input.span(); 3];
+ punct_helper(input, token, &mut spans)?;
+ Ok(S::from_spans(&spans))
+ }
+
+ fn punct_helper(input: ParseStream, token: &str, spans: &mut [Span; 3]) -> Result<()> {
+ input.step(|cursor| {
+ let mut cursor = *cursor;
+ assert!(token.len() <= spans.len());
+diff --git a/third_party/rust/syn/src/tt.rs b/third_party/rust/syn/src/tt.rs
+--- third_party/rust/syn/src/tt.rs
++++ third_party/rust/syn/src/tt.rs
+@@ -13,18 +13,18 @@ impl<'a> PartialEq for TokenTreeHelper<'
+ match (g1.delimiter(), g2.delimiter()) {
+ (Delimiter::Parenthesis, Delimiter::Parenthesis)
+ | (Delimiter::Brace, Delimiter::Brace)
+ | (Delimiter::Bracket, Delimiter::Bracket)
+ | (Delimiter::None, Delimiter::None) => {}
+ _ => return false,
+ }
+
+- let s1 = g1.stream().clone().into_iter();
+- let mut s2 = g2.stream().clone().into_iter();
++ let s1 = g1.stream().into_iter();
++ let mut s2 = g2.stream().into_iter();
+
+ for item1 in s1 {
+ let item2 = match s2.next() {
+ Some(item) => item,
+ None => return false,
+ };
+ if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) {
+ return false;
+@@ -55,17 +55,17 @@ impl<'a> Hash for TokenTreeHelper<'a> {
+ 0u8.hash(h);
+ match g.delimiter() {
+ Delimiter::Parenthesis => 0u8.hash(h),
+ Delimiter::Brace => 1u8.hash(h),
+ Delimiter::Bracket => 2u8.hash(h),
+ Delimiter::None => 3u8.hash(h),
+ }
+
+- for item in g.stream().clone() {
++ for item in g.stream() {
+ TokenTreeHelper(&item).hash(h);
+ }
+ 0xffu8.hash(h); // terminator w/ a variant we don't normally hash
+ }
+ TokenTree::Punct(op) => {
+ 1u8.hash(h);
+ op.as_char().hash(h);
+ match op.spacing() {
+diff --git a/third_party/rust/syn/src/ty.rs b/third_party/rust/syn/src/ty.rs
+--- third_party/rust/syn/src/ty.rs
++++ third_party/rust/syn/src/ty.rs
+@@ -1,31 +1,27 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+ use proc_macro2::TokenStream;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_enum_of_structs! {
+ /// The possible types that a Rust value could have.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Type #manual_extra_traits {
++ pub enum Type {
+ /// A fixed size array type: `[T; n]`.
+ Array(TypeArray),
+
+ /// A bare function type: `fn(usize) -> bool`.
+ BareFn(TypeBareFn),
+
+ /// A type contained within invisible delimiters.
+ Group(TypeGroup),
+@@ -72,295 +68,194 @@ ast_enum_of_structs! {
+ #[doc(hidden)]
+ __Nonexhaustive,
+ }
+ }
+
+ ast_struct! {
+ /// A fixed size array type: `[T; n]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeArray {
+ pub bracket_token: token::Bracket,
+ pub elem: Box<Type>,
+ pub semi_token: Token![;],
+ pub len: Expr,
+ }
+ }
+
+ ast_struct! {
+ /// A bare function type: `fn(usize) -> bool`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeBareFn {
+ pub lifetimes: Option<BoundLifetimes>,
+ pub unsafety: Option<Token![unsafe]>,
+ pub abi: Option<Abi>,
+ pub fn_token: Token![fn],
+ pub paren_token: token::Paren,
+ pub inputs: Punctuated<BareFnArg, Token![,]>,
+ pub variadic: Option<Variadic>,
+ pub output: ReturnType,
+ }
+ }
+
+ ast_struct! {
+ /// A type contained within invisible delimiters.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeGroup {
+ pub group_token: token::Group,
+ pub elem: Box<Type>,
+ }
+ }
+
+ ast_struct! {
+ /// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or
+ /// a lifetime.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeImplTrait {
+ pub impl_token: Token![impl],
+ pub bounds: Punctuated<TypeParamBound, Token![+]>,
+ }
+ }
+
+ ast_struct! {
+ /// Indication that a type should be inferred by the compiler: `_`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeInfer {
+ pub underscore_token: Token![_],
+ }
+ }
+
+ ast_struct! {
+ /// A macro in the type position.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeMacro {
+ pub mac: Macro,
+ }
+ }
+
+ ast_struct! {
+ /// The never type: `!`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeNever {
+ pub bang_token: Token![!],
+ }
+ }
+
+ ast_struct! {
+ /// A parenthesized type equivalent to the inner type.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeParen {
+ pub paren_token: token::Paren,
+ pub elem: Box<Type>,
+ }
+ }
+
+ ast_struct! {
+ /// A path like `std::slice::Iter`, optionally qualified with a
+ /// self-type as in `<Vec<T> as SomeTrait>::Associated`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypePath {
+ pub qself: Option<QSelf>,
+ pub path: Path,
+ }
+ }
+
+ ast_struct! {
+ /// A raw pointer type: `*const T` or `*mut T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypePtr {
+ pub star_token: Token![*],
+ pub const_token: Option<Token![const]>,
+ pub mutability: Option<Token![mut]>,
+ pub elem: Box<Type>,
+ }
+ }
+
+ ast_struct! {
+ /// A reference type: `&'a T` or `&'a mut T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeReference {
+ pub and_token: Token![&],
+ pub lifetime: Option<Lifetime>,
+ pub mutability: Option<Token![mut]>,
+ pub elem: Box<Type>,
+ }
+ }
+
+ ast_struct! {
+ /// A dynamically sized slice type: `[T]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeSlice {
+ pub bracket_token: token::Bracket,
+ pub elem: Box<Type>,
+ }
+ }
+
+ ast_struct! {
+ /// A trait object type `Bound1 + Bound2 + Bound3` where `Bound` is a
+ /// trait or a lifetime.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeTraitObject {
+ pub dyn_token: Option<Token![dyn]>,
+ pub bounds: Punctuated<TypeParamBound, Token![+]>,
+ }
+ }
+
+ ast_struct! {
+ /// A tuple type: `(A, B, C, String)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeTuple {
+ pub paren_token: token::Paren,
+ pub elems: Punctuated<Type, Token![,]>,
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Type {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Type {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Type::Array(this), Type::Array(other)) => this == other,
+- (Type::BareFn(this), Type::BareFn(other)) => this == other,
+- (Type::Group(this), Type::Group(other)) => this == other,
+- (Type::ImplTrait(this), Type::ImplTrait(other)) => this == other,
+- (Type::Infer(this), Type::Infer(other)) => this == other,
+- (Type::Macro(this), Type::Macro(other)) => this == other,
+- (Type::Never(this), Type::Never(other)) => this == other,
+- (Type::Paren(this), Type::Paren(other)) => this == other,
+- (Type::Path(this), Type::Path(other)) => this == other,
+- (Type::Ptr(this), Type::Ptr(other)) => this == other,
+- (Type::Reference(this), Type::Reference(other)) => this == other,
+- (Type::Slice(this), Type::Slice(other)) => this == other,
+- (Type::TraitObject(this), Type::TraitObject(other)) => this == other,
+- (Type::Tuple(this), Type::Tuple(other)) => this == other,
+- (Type::Verbatim(this), Type::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Type {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Type::Array(ty) => {
+- hash.write_u8(0);
+- ty.hash(hash);
+- }
+- Type::BareFn(ty) => {
+- hash.write_u8(1);
+- ty.hash(hash);
+- }
+- Type::Group(ty) => {
+- hash.write_u8(2);
+- ty.hash(hash);
+- }
+- Type::ImplTrait(ty) => {
+- hash.write_u8(3);
+- ty.hash(hash);
+- }
+- Type::Infer(ty) => {
+- hash.write_u8(4);
+- ty.hash(hash);
+- }
+- Type::Macro(ty) => {
+- hash.write_u8(5);
+- ty.hash(hash);
+- }
+- Type::Never(ty) => {
+- hash.write_u8(6);
+- ty.hash(hash);
+- }
+- Type::Paren(ty) => {
+- hash.write_u8(7);
+- ty.hash(hash);
+- }
+- Type::Path(ty) => {
+- hash.write_u8(8);
+- ty.hash(hash);
+- }
+- Type::Ptr(ty) => {
+- hash.write_u8(9);
+- ty.hash(hash);
+- }
+- Type::Reference(ty) => {
+- hash.write_u8(10);
+- ty.hash(hash);
+- }
+- Type::Slice(ty) => {
+- hash.write_u8(11);
+- ty.hash(hash);
+- }
+- Type::TraitObject(ty) => {
+- hash.write_u8(12);
+- ty.hash(hash);
+- }
+- Type::Tuple(ty) => {
+- hash.write_u8(13);
+- ty.hash(hash);
+- }
+- Type::Verbatim(ty) => {
+- hash.write_u8(14);
+- TokenStreamHelper(ty).hash(hash);
+- }
+- Type::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_struct! {
+ /// The binary interface of a function: `extern "C"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Abi {
+ pub extern_token: Token![extern],
+ pub name: Option<LitStr>,
+ }
+ }
+
+ ast_struct! {
+ /// An argument in a function type: the `usize` in `fn(usize) -> bool`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct BareFnArg {
+ pub attrs: Vec<Attribute>,
+ pub name: Option<(Ident, Token![:])>,
+ pub ty: Type,
+ }
+ }
+
+@@ -372,28 +267,28 @@ ast_struct! {
+ /// # struct c_int;
+ /// #
+ /// extern "C" {
+ /// fn printf(format: *const c_char, ...) -> c_int;
+ /// // ^^^
+ /// }
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Variadic {
+ pub attrs: Vec<Attribute>,
+ pub dots: Token![...],
+ }
+ }
+
+ ast_enum! {
+ /// Return type of a function signature.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum ReturnType {
+ /// Return type is not specified.
+ ///
+ /// Functions default to `()` and closures default to type inference.
+ Default,
+ /// A particular type is returned.
+ Type(Token![->], Box<Type>),
+@@ -402,39 +297,44 @@ ast_enum! {
+
+ #[cfg(feature = "parsing")]
+ pub mod parsing {
+ use super::*;
+
+ use crate::ext::IdentExt;
+ use crate::parse::{Parse, ParseStream, Result};
+ use crate::path;
++ use proc_macro2::{Punct, Spacing, TokenTree};
++ use std::iter::FromIterator;
+
+ impl Parse for Type {
+ fn parse(input: ParseStream) -> Result<Self> {
+- ambig_ty(input, true)
++ let allow_plus = true;
++ ambig_ty(input, allow_plus)
+ }
+ }
+
+ impl Type {
+ /// In some positions, types may not contain the `+` character, to
+ /// disambiguate them. For example in the expression `1 as T`, T may not
+ /// contain a `+` character.
+ ///
+ /// This parser does not allow a `+`, while the default parser does.
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- ambig_ty(input, false)
++ let allow_plus = false;
++ ambig_ty(input, allow_plus)
+ }
+ }
+
+ fn ambig_ty(input: ParseStream, allow_plus: bool) -> Result<Type> {
+- if input.peek(token::Group) {
++ if input.peek(token::Group) && !input.peek2(Token![::]) && !input.peek2(Token![<]) {
+ return input.parse().map(Type::Group);
+ }
+
++ let begin = input.fork();
+ let mut lifetimes = None::<BoundLifetimes>;
+ let mut lookahead = input.lookahead1();
+ if lookahead.peek(Token![for]) {
+ lifetimes = input.parse()?;
+ lookahead = input.lookahead1();
+ if !lookahead.peek(Ident)
+ && !lookahead.peek(Token![fn])
+ && !lookahead.peek(Token![unsafe])
+@@ -519,17 +419,17 @@ pub mod parsing {
+ }
+ match bounds.into_iter().next().unwrap() {
+ TypeParamBound::Trait(trait_bound) => {
+ TypeParamBound::Trait(TraitBound {
+ paren_token: Some(paren_token),
+ ..trait_bound
+ })
+ }
+- other => other,
++ other @ TypeParamBound::Lifetime(_) => other,
+ }
+ }
+ _ => break,
+ };
+ return Ok(Type::TraitObject(TypeTraitObject {
+ dyn_token: None,
+ bounds: {
+ let mut bounds = Punctuated::new();
+@@ -544,27 +444,30 @@ pub mod parsing {
+ }
+ }
+ Ok(Type::Paren(TypeParen {
+ paren_token,
+ elem: Box::new(first),
+ }))
+ } else if lookahead.peek(Token![fn])
+ || lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![extern]) && !input.peek2(Token![::])
++ || lookahead.peek(Token![extern])
+ {
+- let mut bare_fn: TypeBareFn = input.parse()?;
+- bare_fn.lifetimes = lifetimes;
+- Ok(Type::BareFn(bare_fn))
++ let allow_mut_self = true;
++ if let Some(mut bare_fn) = parse_bare_fn(input, allow_mut_self)? {
++ bare_fn.lifetimes = lifetimes;
++ Ok(Type::BareFn(bare_fn))
++ } else {
++ Ok(Type::Verbatim(verbatim::between(begin, input)))
++ }
+ } else if lookahead.peek(Ident)
+ || input.peek(Token![super])
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![crate])
+- || input.peek(Token![extern])
+ || lookahead.peek(Token![::])
+ || lookahead.peek(Token![<])
+ {
+ if input.peek(Token![dyn]) {
+ let mut trait_object: TypeTraitObject = input.parse()?;
+ if lifetimes.is_some() {
+ match trait_object.bounds.iter_mut().next().unwrap() {
+ TypeParamBound::Trait(trait_bound) => {
+@@ -717,48 +620,68 @@ pub mod parsing {
+ // & binds tighter than +, so we don't allow + here.
+ elem: Box::new(input.call(Type::without_plus)?),
+ })
+ }
+ }
+
+ impl Parse for TypeBareFn {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let args;
+- let allow_variadic;
+- Ok(TypeBareFn {
+- lifetimes: input.parse()?,
+- unsafety: input.parse()?,
+- abi: input.parse()?,
+- fn_token: input.parse()?,
+- paren_token: parenthesized!(args in input),
+- inputs: {
+- let mut inputs = Punctuated::new();
+- while !args.is_empty() && !args.peek(Token![...]) {
+- inputs.push_value(args.parse()?);
+- if args.is_empty() {
+- break;
+- }
+- inputs.push_punct(args.parse()?);
++ let allow_mut_self = false;
++ parse_bare_fn(input, allow_mut_self).map(Option::unwrap)
++ }
++ }
++
++ fn parse_bare_fn(input: ParseStream, allow_mut_self: bool) -> Result<Option<TypeBareFn>> {
++ let args;
++ let mut variadic = None;
++ let mut has_mut_self = false;
++
++ let bare_fn = TypeBareFn {
++ lifetimes: input.parse()?,
++ unsafety: input.parse()?,
++ abi: input.parse()?,
++ fn_token: input.parse()?,
++ paren_token: parenthesized!(args in input),
++ inputs: {
++ let mut inputs = Punctuated::new();
++
++ while !args.is_empty() {
++ let attrs = args.call(Attribute::parse_outer)?;
++
++ if inputs.empty_or_trailing() && args.peek(Token![...]) {
++ variadic = Some(Variadic {
++ attrs,
++ dots: args.parse()?,
++ });
++ break;
+ }
+- allow_variadic = inputs.empty_or_trailing();
+- inputs
+- },
+- variadic: {
+- if allow_variadic && args.peek(Token![...]) {
+- Some(Variadic {
+- attrs: Vec::new(),
+- dots: args.parse()?,
+- })
++
++ if let Some(arg) = parse_bare_fn_arg(&args, allow_mut_self)? {
++ inputs.push_value(BareFnArg { attrs, ..arg });
+ } else {
+- None
++ has_mut_self = true;
++ }
++ if args.is_empty() {
++ break;
+ }
+- },
+- output: input.call(ReturnType::without_plus)?,
+- })
++
++ inputs.push_punct(args.parse()?);
++ }
++
++ inputs
++ },
++ variadic,
++ output: input.call(ReturnType::without_plus)?,
++ };
++
++ if has_mut_self {
++ Ok(None)
++ } else {
++ Ok(Some(bare_fn))
+ }
+ }
+
+ impl Parse for TypeNever {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(TypeNever {
+ bang_token: input.parse()?,
+ })
+@@ -771,19 +694,37 @@ pub mod parsing {
+ underscore_token: input.parse()?,
+ })
+ }
+ }
+
+ impl Parse for TypeTuple {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let content;
++ let paren_token = parenthesized!(content in input);
++
++ if content.is_empty() {
++ return Ok(TypeTuple {
++ paren_token,
++ elems: Punctuated::new(),
++ });
++ }
++
++ let first: Type = content.parse()?;
+ Ok(TypeTuple {
+- paren_token: parenthesized!(content in input),
+- elems: content.parse_terminated(Type::parse)?,
++ paren_token,
++ elems: {
++ let mut elems = Punctuated::new();
++ elems.push_value(first);
++ elems.push_punct(content.parse()?);
++ let rest: Punctuated<Type, Token![,]> =
++ content.parse_terminated(Parse::parse)?;
++ elems.extend(rest);
++ elems
++ },
+ })
+ }
+ }
+
+ impl Parse for TypeMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(TypeMacro {
+ mac: input.parse()?,
+@@ -802,19 +743,21 @@ pub mod parsing {
+ }
+
+ Ok(TypePath { qself, path })
+ }
+ }
+
+ impl ReturnType {
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+
++ #[doc(hidden)]
+ pub fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
+ if input.peek(Token![->]) {
+ let arrow = input.parse()?;
+ let ty = ambig_ty(input, allow_plus)?;
+ Ok(ReturnType::Type(arrow, Box::new(ty)))
+ } else {
+ Ok(ReturnType::Default)
+ }
+@@ -839,20 +782,22 @@ pub mod parsing {
+ return true;
+ }
+ }
+ false
+ }
+
+ impl TypeTraitObject {
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+
+ // Only allow multiple trait references if allow_plus is true.
++ #[doc(hidden)]
+ pub fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
+ Ok(TypeTraitObject {
+ dyn_token: input.parse()?,
+ bounds: {
+ let mut bounds = Punctuated::new();
+ if allow_plus {
+ loop {
+ bounds.push_value(input.parse()?);
+@@ -905,48 +850,99 @@ pub mod parsing {
+ group_token: group.token,
+ elem: group.content.parse()?,
+ })
+ }
+ }
+
+ impl Parse for TypeParen {
+ fn parse(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+ }
+
+ impl TypeParen {
+ fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
+ let content;
+ Ok(TypeParen {
+ paren_token: parenthesized!(content in input),
+ elem: Box::new(ambig_ty(&content, allow_plus)?),
+ })
+ }
+ }
+
+ impl Parse for BareFnArg {
+ fn parse(input: ParseStream) -> Result<Self> {
+- Ok(BareFnArg {
+- attrs: input.call(Attribute::parse_outer)?,
+- name: {
+- if (input.peek(Ident) || input.peek(Token![_]))
+- && input.peek2(Token![:])
+- && !input.peek2(Token![::])
+- {
+- let name = input.call(Ident::parse_any)?;
+- let colon: Token![:] = input.parse()?;
+- Some((name, colon))
+- } else {
+- None
+- }
+- },
+- ty: input.parse()?,
+- })
++ let allow_mut_self = false;
++ parse_bare_fn_arg(input, allow_mut_self).map(Option::unwrap)
++ }
++ }
++
++ fn parse_bare_fn_arg(
++ input: ParseStream,
++ mut allow_mut_self: bool,
++ ) -> Result<Option<BareFnArg>> {
++ let mut has_mut_self = false;
++ let arg = BareFnArg {
++ attrs: input.call(Attribute::parse_outer)?,
++ name: {
++ if (input.peek(Ident) || input.peek(Token![_]) || input.peek(Token![self]))
++ && input.peek2(Token![:])
++ && !input.peek2(Token![::])
++ {
++ let name = input.call(Ident::parse_any)?;
++ let colon: Token![:] = input.parse()?;
++ Some((name, colon))
++ } else if allow_mut_self
++ && input.peek(Token![mut])
++ && input.peek2(Token![self])
++ && input.peek3(Token![:])
++ && !input.peek3(Token![::])
++ {
++ has_mut_self = true;
++ allow_mut_self = false;
++ input.parse::<Token![mut]>()?;
++ input.parse::<Token![self]>()?;
++ input.parse::<Token![:]>()?;
++ None
++ } else {
++ None
++ }
++ },
++ ty: if !has_mut_self && input.peek(Token![...]) {
++ let dot3 = input.parse::<Token![...]>()?;
++ let args = vec![
++ TokenTree::Punct(Punct::new('.', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('.', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('.', Spacing::Alone)),
++ ];
++ let tokens = TokenStream::from_iter(args.into_iter().zip(&dot3.spans).map(
++ |(mut arg, span)| {
++ arg.set_span(*span);
++ arg
++ },
++ ));
++ Type::Verbatim(tokens)
++ } else if allow_mut_self && input.peek(Token![mut]) && input.peek2(Token![self]) {
++ has_mut_self = true;
++ input.parse::<Token![mut]>()?;
++ Type::Path(TypePath {
++ qself: None,
++ path: input.parse::<Token![self]>()?.into(),
++ })
++ } else {
++ input.parse()?
++ },
++ };
++
++ if has_mut_self {
++ Ok(None)
++ } else {
++ Ok(Some(arg))
+ }
+ }
+
+ impl Parse for Abi {
+ fn parse(input: ParseStream) -> Result<Self> {
+ Ok(Abi {
+ extern_token: input.parse()?,
+ name: input.parse()?,
+diff --git a/third_party/rust/syn/src/verbatim.rs b/third_party/rust/syn/src/verbatim.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/src/verbatim.rs
+@@ -0,0 +1,15 @@
++use crate::parse::{ParseBuffer, ParseStream};
++use proc_macro2::TokenStream;
++use std::iter;
++
++pub fn between<'a>(begin: ParseBuffer<'a>, end: ParseStream<'a>) -> TokenStream {
++ let end = end.cursor();
++ let mut cursor = begin.cursor();
++ let mut tokens = TokenStream::new();
++ while cursor != end {
++ let (tt, next) = cursor.token_tree().unwrap();
++ tokens.extend(iter::once(tt));
++ cursor = next;
++ }
++ tokens
++}
+diff --git a/third_party/rust/syn/src/whitespace.rs b/third_party/rust/syn/src/whitespace.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/src/whitespace.rs
+@@ -0,0 +1,65 @@
++pub fn skip(mut s: &str) -> &str {
++ 'skip: while !s.is_empty() {
++ let byte = s.as_bytes()[0];
++ if byte == b'/' {
++ if s.starts_with("//")
++ && (!s.starts_with("///") || s.starts_with("////"))
++ && !s.starts_with("//!")
++ {
++ if let Some(i) = s.find('\n') {
++ s = &s[i + 1..];
++ continue;
++ } else {
++ return "";
++ }
++ } else if s.starts_with("/**/") {
++ s = &s[4..];
++ continue;
++ } else if s.starts_with("/*")
++ && (!s.starts_with("/**") || s.starts_with("/***"))
++ && !s.starts_with("/*!")
++ {
++ let mut depth = 0;
++ let bytes = s.as_bytes();
++ let mut i = 0;
++ let upper = bytes.len() - 1;
++ while i < upper {
++ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
++ depth += 1;
++ i += 1; // eat '*'
++ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
++ depth -= 1;
++ if depth == 0 {
++ s = &s[i + 2..];
++ continue 'skip;
++ }
++ i += 1; // eat '/'
++ }
++ i += 1;
++ }
++ return s;
++ }
++ }
++ match byte {
++ b' ' | 0x09..=0x0d => {
++ s = &s[1..];
++ continue;
++ }
++ b if b <= 0x7f => {}
++ _ => {
++ let ch = s.chars().next().unwrap();
++ if is_whitespace(ch) {
++ s = &s[ch.len_utf8()..];
++ continue;
++ }
++ }
++ }
++ return s;
++ }
++ s
++}
++
++fn is_whitespace(ch: char) -> bool {
++ // Rust treats left-to-right mark and right-to-left mark as whitespace
++ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
++}
+diff --git a/third_party/rust/syn/tests/.gitignore b/third_party/rust/syn/tests/.gitignore
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/tests/.gitignore
+@@ -0,0 +1,1 @@
++/*.pending-snap
+diff --git a/third_party/rust/syn/tests/clone.sh b/third_party/rust/syn/tests/clone.sh
+deleted file mode 100755
+--- third_party/rust/syn/tests/clone.sh
++++ /dev/null
+@@ -1,16 +0,0 @@
+-#!/bin/bash
+-
+-REV=4560cb830fce63fcffdc4558f4281aaac6a3a1ba
+-
+-set -euo pipefail
+-cd "$(dirname "${BASH_SOURCE[0]}")"
+-mkdir -p rust
+-touch rust/COMMIT
+-
+-if [ "$(cat rust/COMMIT)" != "$REV" ]; then
+- rm -rf rust
+- mkdir rust
+- curl -L "https://github.com/rust-lang/rust/archive/${REV}.tar.gz" \
+- | tar xz --directory rust --strip-components 1
+- echo "$REV" > rust/COMMIT
+-fi
+diff --git a/third_party/rust/syn/tests/common/eq.rs b/third_party/rust/syn/tests/common/eq.rs
+--- third_party/rust/syn/tests/common/eq.rs
++++ third_party/rust/syn/tests/common/eq.rs
+@@ -1,41 +1,40 @@
++extern crate rustc_ast;
+ extern crate rustc_data_structures;
++extern crate rustc_span;
+ extern crate rustc_target;
+-extern crate syntax;
+-extern crate syntax_pos;
+
+ use std::mem;
+
+-use self::rustc_data_structures::sync::Lrc;
+-use self::rustc_data_structures::thin_vec::ThinVec;
+-use self::rustc_target::abi::FloatTy;
+-use self::rustc_target::spec::abi::Abi;
+-use self::syntax::ast::{
+- AngleBracketedArgs, AnonConst, Arg, Arm, AsmDialect, AssocTyConstraint, AssocTyConstraintKind,
+- AttrId, AttrStyle, Attribute, BareFnTy, BinOpKind, BindingMode, Block, BlockCheckMode,
+- CaptureBy, Constness, Crate, CrateSugar, Defaultness, EnumDef, Expr, ExprKind, Field, FieldPat,
+- FnDecl, FnHeader, ForeignItem, ForeignItemKind, ForeignMod, FunctionRetTy, GenericArg,
+- GenericArgs, GenericBound, GenericParam, GenericParamKind, Generics, GlobalAsm, Ident,
+- ImplItem, ImplItemKind, ImplPolarity, InlineAsm, InlineAsmOutput, IntTy, IsAsync, IsAuto, Item,
+- ItemKind, Label, Lifetime, Lit, LitIntType, LitKind, Local, Mac, MacDelimiter, MacStmtStyle,
+- MacroDef, MethodSig, Mod, Movability, MutTy, Mutability, NodeId, ParenthesizedArgs, Pat,
+- PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
+- StmtKind, StrStyle, StructField, TraitBoundModifier, TraitItem, TraitItemKind,
+- TraitObjectSyntax, TraitRef, Ty, TyKind, UintTy, UnOp, UnsafeSource, Unsafety, UseTree,
+- UseTreeKind, Variant, VariantData, VisibilityKind, WhereBoundPredicate, WhereClause,
+- WhereEqPredicate, WherePredicate, WhereRegionPredicate,
++use rustc_ast::ast::{
++ AngleBracketedArg, AngleBracketedArgs, AnonConst, Arm, AssocItemKind, AssocTyConstraint,
++ AssocTyConstraintKind, Async, AttrId, AttrItem, AttrKind, AttrStyle, Attribute, BareFnTy,
++ BinOpKind, BindingMode, Block, BlockCheckMode, BorrowKind, CaptureBy, Const, Crate, CrateSugar,
++ Defaultness, EnumDef, Expr, ExprKind, Extern, Field, FieldPat, FloatTy, FnDecl, FnHeader,
++ FnRetTy, FnSig, ForeignItemKind, ForeignMod, GenericArg, GenericArgs, GenericBound,
++ GenericParam, GenericParamKind, Generics, GlobalAsm, ImplPolarity, InlineAsm, InlineAsmOperand,
++ InlineAsmOptions, InlineAsmRegOrRegClass, InlineAsmTemplatePiece, IntTy, IsAuto, Item,
++ ItemKind, Label, Lifetime, Lit, LitFloatType, LitIntType, LitKind, LlvmAsmDialect,
++ LlvmInlineAsm, LlvmInlineAsmOutput, Local, MacArgs, MacCall, MacCallStmt, MacDelimiter,
++ MacStmtStyle, MacroDef, Mod, Movability, MutTy, Mutability, NodeId, Param, ParenthesizedArgs,
++ Pat, PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
++ StmtKind, StrLit, StrStyle, StructField, TraitBoundModifier, TraitObjectSyntax, TraitRef, Ty,
++ TyKind, UintTy, UnOp, Unsafe, UnsafeSource, UseTree, UseTreeKind, Variant, VariantData,
++ VisibilityKind, WhereBoundPredicate, WhereClause, WhereEqPredicate, WherePredicate,
++ WhereRegionPredicate,
+ };
+-use self::syntax::parse::lexer::comments;
+-use self::syntax::parse::token::{self, DelimToken, Token, TokenKind};
+-use self::syntax::ptr::P;
+-use self::syntax::source_map::Spanned;
+-use self::syntax::symbol::{sym, Symbol};
+-use self::syntax::tokenstream::{DelimSpan, TokenStream, TokenTree};
+-use self::syntax_pos::{Span, SyntaxContext, DUMMY_SP};
++use rustc_ast::ptr::P;
++use rustc_ast::token::{self, CommentKind, DelimToken, Token, TokenKind};
++use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
++use rustc_data_structures::sync::Lrc;
++use rustc_data_structures::thin_vec::ThinVec;
++use rustc_span::source_map::Spanned;
++use rustc_span::symbol::Ident;
++use rustc_span::{Span, Symbol, SyntaxContext};
+
+ pub trait SpanlessEq {
+ fn eq(&self, other: &Self) -> bool;
+ }
+
+ impl<T: SpanlessEq> SpanlessEq for P<T> {
+ fn eq(&self, other: &Self) -> bool {
+ SpanlessEq::eq(&**self, &**other)
+@@ -81,24 +80,16 @@ impl<T: SpanlessEq> SpanlessEq for Spann
+ }
+
+ impl<A: SpanlessEq, B: SpanlessEq> SpanlessEq for (A, B) {
+ fn eq(&self, other: &Self) -> bool {
+ SpanlessEq::eq(&self.0, &other.0) && SpanlessEq::eq(&self.1, &other.1)
+ }
+ }
+
+-impl<A: SpanlessEq, B: SpanlessEq, C: SpanlessEq> SpanlessEq for (A, B, C) {
+- fn eq(&self, other: &Self) -> bool {
+- SpanlessEq::eq(&self.0, &other.0)
+- && SpanlessEq::eq(&self.1, &other.1)
+- && SpanlessEq::eq(&self.2, &other.2)
+- }
+-}
+-
+ macro_rules! spanless_eq_true {
+ ($name:ident) => {
+ impl SpanlessEq for $name {
+ fn eq(&self, _other: &Self) -> bool {
+ true
+ }
+ }
+ };
+@@ -121,60 +112,62 @@ macro_rules! spanless_eq_partial_eq {
+ }
+
+ spanless_eq_partial_eq!(bool);
+ spanless_eq_partial_eq!(u8);
+ spanless_eq_partial_eq!(u16);
+ spanless_eq_partial_eq!(u128);
+ spanless_eq_partial_eq!(usize);
+ spanless_eq_partial_eq!(char);
++spanless_eq_partial_eq!(String);
+ spanless_eq_partial_eq!(Symbol);
+-spanless_eq_partial_eq!(Abi);
++spanless_eq_partial_eq!(CommentKind);
+ spanless_eq_partial_eq!(DelimToken);
++spanless_eq_partial_eq!(InlineAsmOptions);
+
+ macro_rules! spanless_eq_struct {
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $(![$ignore:ident])*
+ } => {
+- impl SpanlessEq for $name {
++ impl $(<$param: SpanlessEq>)* SpanlessEq for $name $(<$param>)* {
+ fn eq(&self, other: &Self) -> bool {
+ let $name { $($field,)* $($ignore: _,)* } = self;
+ let $name { $($field: $other,)* $($ignore: _,)* } = other;
+ $(SpanlessEq::eq($field, $other))&&*
+ }
+ }
+ };
+
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $next:ident
+ $($rest:ident)*
+ $(!$ignore:ident)*
+ } => {
+ spanless_eq_struct! {
+- $name;
++ $name $(<$param>)*;
+ $([$field $other])*
+ [$next other]
+ $($rest)*
+ $(!$ignore)*
+ }
+ };
+
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $(![$ignore:ident])*
+ !$next:ident
+ $(!$rest:ident)*
+ } => {
+ spanless_eq_struct! {
+- $name;
++ $name $(<$param>)*;
+ $([$field $other])*
+ $(![$ignore])*
+ ![$next]
+ $(!$rest)*
+ }
+ };
+ }
+
+@@ -258,129 +251,141 @@ macro_rules! spanless_eq_enum {
+ $name;
+ $([$variant $($fields)*])*
+ [$next]
+ $($rest)*
+ }
+ };
+ }
+
+-spanless_eq_struct!(AngleBracketedArgs; span args constraints);
++spanless_eq_struct!(AngleBracketedArgs; span args);
+ spanless_eq_struct!(AnonConst; id value);
+-spanless_eq_struct!(Arg; attrs ty pat id span);
+-spanless_eq_struct!(Arm; attrs pats guard body span id);
++spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder);
+ spanless_eq_struct!(AssocTyConstraint; id ident kind span);
+-spanless_eq_struct!(Attribute; id style path tokens span !is_sugared_doc);
+-spanless_eq_struct!(BareFnTy; unsafety abi generic_params decl);
++spanless_eq_struct!(AttrItem; path args);
++spanless_eq_struct!(Attribute; kind id style span);
++spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl);
+ spanless_eq_struct!(Block; stmts id rules span);
+-spanless_eq_struct!(Crate; module attrs span);
++spanless_eq_struct!(Crate; module attrs span proc_macros);
+ spanless_eq_struct!(EnumDef; variants);
+-spanless_eq_struct!(Expr; id node span attrs);
+-spanless_eq_struct!(Field; ident expr span is_shorthand attrs id);
+-spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span);
+-spanless_eq_struct!(FnDecl; inputs output c_variadic);
+-spanless_eq_struct!(FnHeader; constness asyncness unsafety abi);
+-spanless_eq_struct!(ForeignItem; ident attrs node id span vis);
++spanless_eq_struct!(Expr; id kind span attrs !tokens);
++spanless_eq_struct!(Field; attrs id span ident expr is_shorthand is_placeholder);
++spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span is_placeholder);
++spanless_eq_struct!(FnDecl; inputs output);
++spanless_eq_struct!(FnHeader; constness asyncness unsafety ext);
++spanless_eq_struct!(FnSig; header decl span);
+ spanless_eq_struct!(ForeignMod; abi items);
+-spanless_eq_struct!(GenericParam; id ident attrs bounds kind);
++spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind);
+ spanless_eq_struct!(Generics; params where_clause span);
+ spanless_eq_struct!(GlobalAsm; asm);
+-spanless_eq_struct!(ImplItem; id ident vis defaultness attrs generics node span !tokens);
+-spanless_eq_struct!(InlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
+-spanless_eq_struct!(InlineAsmOutput; constraint expr is_rw is_indirect);
+-spanless_eq_struct!(Item; ident attrs id node vis span !tokens);
++spanless_eq_struct!(InlineAsm; template operands options line_spans);
++spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens);
+ spanless_eq_struct!(Label; ident);
+ spanless_eq_struct!(Lifetime; id ident);
+-spanless_eq_struct!(Lit; token node span);
++spanless_eq_struct!(Lit; token kind span);
++spanless_eq_struct!(LlvmInlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
++spanless_eq_struct!(LlvmInlineAsmOutput; constraint expr is_rw is_indirect);
+ spanless_eq_struct!(Local; pat ty init id span attrs);
+-spanless_eq_struct!(Mac; path delim tts span prior_type_ascription);
+-spanless_eq_struct!(MacroDef; tokens legacy);
+-spanless_eq_struct!(MethodSig; header decl);
++spanless_eq_struct!(MacCall; path args prior_type_ascription);
++spanless_eq_struct!(MacCallStmt; mac style attrs);
++spanless_eq_struct!(MacroDef; body macro_rules);
+ spanless_eq_struct!(Mod; inner items inline);
+ spanless_eq_struct!(MutTy; ty mutbl);
++spanless_eq_struct!(Param; attrs ty pat id span is_placeholder);
+ spanless_eq_struct!(ParenthesizedArgs; span inputs output);
+-spanless_eq_struct!(Pat; id node span);
++spanless_eq_struct!(Pat; id kind span tokens);
+ spanless_eq_struct!(Path; span segments);
+ spanless_eq_struct!(PathSegment; ident id args);
+ spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span);
+ spanless_eq_struct!(QSelf; ty path_span position);
+-spanless_eq_struct!(Stmt; id node span);
+-spanless_eq_struct!(StructField; span ident vis id ty attrs);
++spanless_eq_struct!(Stmt; id kind span);
++spanless_eq_struct!(StrLit; style symbol suffix span symbol_unescaped);
++spanless_eq_struct!(StructField; attrs id span vis ident ty is_placeholder);
+ spanless_eq_struct!(Token; kind span);
+-spanless_eq_struct!(TraitItem; id ident attrs generics node span !tokens);
+ spanless_eq_struct!(TraitRef; path ref_id);
+-spanless_eq_struct!(Ty; id node span);
++spanless_eq_struct!(Ty; id kind span);
+ spanless_eq_struct!(UseTree; prefix kind span);
+-spanless_eq_struct!(Variant; ident attrs id data disr_expr span);
++spanless_eq_struct!(Variant; attrs id span vis ident data disr_expr is_placeholder);
+ spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds);
+-spanless_eq_struct!(WhereClause; predicates span);
++spanless_eq_struct!(WhereClause; has_where_token predicates span);
+ spanless_eq_struct!(WhereEqPredicate; id span lhs_ty rhs_ty);
+ spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
+-spanless_eq_enum!(AsmDialect; Att Intel);
++spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
++spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
+ spanless_eq_enum!(AssocTyConstraintKind; Equality(ty) Bound(bounds));
++spanless_eq_enum!(Async; Yes(span closure_id return_impl_trait_id) No);
++spanless_eq_enum!(AttrKind; Normal(0) DocComment(0 1));
+ spanless_eq_enum!(AttrStyle; Outer Inner);
+ spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
+ spanless_eq_enum!(BindingMode; ByRef(0) ByValue(0));
+ spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
++spanless_eq_enum!(BorrowKind; Ref Raw);
+ spanless_eq_enum!(CaptureBy; Value Ref);
+-spanless_eq_enum!(Constness; Const NotConst);
++spanless_eq_enum!(Const; Yes(0) No);
+ spanless_eq_enum!(CrateSugar; PubCrate JustCrate);
+-spanless_eq_enum!(Defaultness; Default Final);
++spanless_eq_enum!(Defaultness; Default(0) Final);
++spanless_eq_enum!(Extern; None Implicit Explicit(0));
+ spanless_eq_enum!(FloatTy; F32 F64);
+-spanless_eq_enum!(ForeignItemKind; Fn(0 1) Static(0 1) Ty Macro(0));
+-spanless_eq_enum!(FunctionRetTy; Default(0) Ty(0));
++spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
++spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
+ spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0));
+ spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0));
+ spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0));
+-spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty));
+-spanless_eq_enum!(ImplItemKind; Const(0 1) Method(0 1) TyAlias(0) OpaqueTy(0) Macro(0));
+-spanless_eq_enum!(ImplPolarity; Positive Negative);
++spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span));
++spanless_eq_enum!(ImplPolarity; Positive Negative(0));
++spanless_eq_enum!(InlineAsmRegOrRegClass; Reg(0) RegClass(0));
++spanless_eq_enum!(InlineAsmTemplatePiece; String(0) Placeholder(operand_idx modifier span));
+ spanless_eq_enum!(IntTy; Isize I8 I16 I32 I64 I128);
+-spanless_eq_enum!(IsAsync; Async(closure_id return_impl_trait_id) NotAsync);
+ spanless_eq_enum!(IsAuto; Yes No);
++spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
+ spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
++spanless_eq_enum!(LlvmAsmDialect; Att Intel);
++spanless_eq_enum!(MacArgs; Empty Delimited(0 1 2) Eq(0 1));
+ spanless_eq_enum!(MacDelimiter; Parenthesis Bracket Brace);
+ spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
+ spanless_eq_enum!(Movability; Static Movable);
+-spanless_eq_enum!(Mutability; Mutable Immutable);
++spanless_eq_enum!(Mutability; Mut Not);
+ spanless_eq_enum!(RangeEnd; Included(0) Excluded);
+ spanless_eq_enum!(RangeLimits; HalfOpen Closed);
+-spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Mac(0));
++spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
+ spanless_eq_enum!(StrStyle; Cooked Raw(0));
+ spanless_eq_enum!(TokenTree; Token(0) Delimited(0 1 2));
+-spanless_eq_enum!(TraitBoundModifier; None Maybe);
+-spanless_eq_enum!(TraitItemKind; Const(0 1) Method(0 1) Type(0 1) Macro(0));
++spanless_eq_enum!(TraitBoundModifier; None Maybe MaybeConst MaybeConstMaybe);
+ spanless_eq_enum!(TraitObjectSyntax; Dyn None);
+ spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
+ spanless_eq_enum!(UnOp; Deref Not Neg);
++spanless_eq_enum!(Unsafe; Yes(0) No);
+ spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
+-spanless_eq_enum!(Unsafety; Unsafe Normal);
+ spanless_eq_enum!(UseTreeKind; Simple(0 1 2) Nested(0) Glob);
+ spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0));
+ spanless_eq_enum!(VisibilityKind; Public Crate(0) Restricted(path id) Inherited);
+ spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
+-spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1) Tup(0)
++spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1 2) Tup(0)
+ Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1) If(0 1 2)
+ While(0 1 2) ForLoop(0 1 2 3) Loop(0 1) Match(0 1) Closure(0 1 2 3 4 5)
+- Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1) AssignOp(0 1 2)
+- Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1) Break(0 1)
+- Continue(0) Ret(0) InlineAsm(0) Mac(0) Struct(0 1 2) Repeat(0 1) Paren(0)
+- Try(0) Yield(0) Err);
+-spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1)
+- Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1) OpaqueTy(0 1)
+- Enum(0 1) Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
+- Impl(0 1 2 3 4 5 6) Mac(0) MacroDef(0));
++ Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1 2) AssignOp(0 1 2)
++ Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1)
++ Continue(0) Ret(0) InlineAsm(0) LlvmInlineAsm(0) MacCall(0) Struct(0 1 2)
++ Repeat(0 1) Paren(0) Try(0) Yield(0) Err);
++spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
++ InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(expr)
++ Sym(expr));
++spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1 2)
++ Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1 2 3) Enum(0 1)
++ Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
++ Impl(unsafety polarity defaultness constness generics of_trait self_ty items)
++ MacCall(0) MacroDef(0));
+ spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0) Byte(0) Char(0) Int(0 1)
+- Float(0 1) FloatUnsuffixed(0) Bool(0) Err(0));
++ Float(0 1) Bool(0) Err(0));
+ spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2) TupleStruct(0 1)
+ Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest
+- Paren(0) Mac(0));
++ Paren(0) MacCall(0));
+ spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Rptr(0 1) BareFn(0) Never
+ Tup(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1) Paren(0) Typeof(0) Infer
+- ImplicitSelf Mac(0) Err CVarArgs);
++ ImplicitSelf MacCall(0) Err CVarArgs);
+
+ impl SpanlessEq for Ident {
+ fn eq(&self, other: &Self) -> bool {
+ self.as_str() == other.as_str()
+ }
+ }
+
+ // Give up on comparing literals inside of macros because there are so many
+@@ -409,49 +414,25 @@ impl SpanlessEq for TokenKind {
+ },
+ _ => self == other,
+ }
+ }
+ }
+
+ impl SpanlessEq for TokenStream {
+ fn eq(&self, other: &Self) -> bool {
+- SpanlessEq::eq(&expand_tts(self), &expand_tts(other))
++ let mut this = self.clone().into_trees();
++ let mut other = other.clone().into_trees();
++ loop {
++ let this = match this.next() {
++ None => return other.next().is_none(),
++ Some(val) => val,
++ };
++ let other = match other.next() {
++ None => return false,
++ Some(val) => val,
++ };
++ if !SpanlessEq::eq(&this, &other) {
++ return false;
++ }
++ }
+ }
+ }
+-
+-fn expand_tts(tts: &TokenStream) -> Vec<TokenTree> {
+- let mut tokens = Vec::new();
+- for tt in tts.clone().into_trees() {
+- let c = match tt {
+- TokenTree::Token(Token {
+- kind: TokenKind::DocComment(c),
+- ..
+- }) => c,
+- _ => {
+- tokens.push(tt);
+- continue;
+- }
+- };
+- let contents = comments::strip_doc_comment_decoration(&c.as_str());
+- let style = comments::doc_comment_style(&c.as_str());
+- tokens.push(TokenTree::token(TokenKind::Pound, DUMMY_SP));
+- if style == AttrStyle::Inner {
+- tokens.push(TokenTree::token(TokenKind::Not, DUMMY_SP));
+- }
+- let lit = token::Lit {
+- kind: token::LitKind::Str,
+- symbol: Symbol::intern(&contents),
+- suffix: None,
+- };
+- let tts = vec![
+- TokenTree::token(TokenKind::Ident(sym::doc, false), DUMMY_SP),
+- TokenTree::token(TokenKind::Eq, DUMMY_SP),
+- TokenTree::token(TokenKind::Literal(lit), DUMMY_SP),
+- ];
+- tokens.push(TokenTree::Delimited(
+- DelimSpan::dummy(),
+- DelimToken::Bracket,
+- tts.into_iter().collect::<TokenStream>().into(),
+- ));
+- }
+- tokens
+-}
+diff --git a/third_party/rust/syn/tests/common/mod.rs b/third_party/rust/syn/tests/common/mod.rs
+--- third_party/rust/syn/tests/common/mod.rs
++++ third_party/rust/syn/tests/common/mod.rs
+@@ -1,14 +1,27 @@
+ #![allow(dead_code)]
+
++use rayon::ThreadPoolBuilder;
+ use std::env;
+
+ pub mod eq;
+ pub mod parse;
+
+ /// Read the `ABORT_AFTER_FAILURE` environment variable, and parse it.
+ pub fn abort_after() -> usize {
+ match env::var("ABORT_AFTER_FAILURE") {
+ Ok(s) => s.parse().expect("failed to parse ABORT_AFTER_FAILURE"),
+ Err(_) => usize::max_value(),
+ }
+ }
++
++/// Configure Rayon threadpool.
++pub fn rayon_init() {
++ let stack_size = match env::var("RUST_MIN_STACK") {
++ Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"),
++ Err(_) => 20 * 1024 * 1024,
++ };
++ ThreadPoolBuilder::new()
++ .stack_size(stack_size)
++ .build_global()
++ .unwrap();
++}
+diff --git a/third_party/rust/syn/tests/common/parse.rs b/third_party/rust/syn/tests/common/parse.rs
+--- third_party/rust/syn/tests/common/parse.rs
++++ third_party/rust/syn/tests/common/parse.rs
+@@ -1,25 +1,25 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
++extern crate rustc_ast;
++extern crate rustc_expand;
++extern crate rustc_parse as parse;
++extern crate rustc_session;
++extern crate rustc_span;
+
+-use self::syntax::ast;
+-use self::syntax::parse::{self, ParseSess};
+-use self::syntax::ptr::P;
+-use self::syntax::source_map::FilePathMapping;
+-use self::syntax_pos::FileName;
++use rustc_ast::ast;
++use rustc_ast::ptr::P;
++use rustc_session::parse::ParseSess;
++use rustc_span::source_map::FilePathMapping;
++use rustc_span::FileName;
+
+ use std::panic;
+
+-pub fn libsyntax_expr(input: &str) -> Option<P<ast::Expr>> {
++pub fn librustc_expr(input: &str) -> Option<P<ast::Expr>> {
+ match panic::catch_unwind(|| {
+ let sess = ParseSess::new(FilePathMapping::empty());
+- sess.span_diagnostic.set_continue_after_error(false);
+ let e = parse::new_parser_from_source_str(
+ &sess,
+ FileName::Custom("test_precedence".to_string()),
+ input.to_string(),
+ )
+ .parse_expr();
+ match e {
+ Ok(expr) => Some(expr),
+@@ -27,17 +27,17 @@ pub fn libsyntax_expr(input: &str) -> Op
+ diagnostic.emit();
+ None
+ }
+ }
+ }) {
+ Ok(Some(e)) => Some(e),
+ Ok(None) => None,
+ Err(_) => {
+- errorf!("libsyntax panicked\n");
++ errorf!("librustc panicked\n");
+ None
+ }
+ }
+ }
+
+ pub fn syn_expr(input: &str) -> Option<syn::Expr> {
+ match syn::parse_str(input) {
+ Ok(e) => Some(e),
+diff --git a/third_party/rust/syn/tests/debug/gen.rs b/third_party/rust/syn/tests/debug/gen.rs
+--- third_party/rust/syn/tests/debug/gen.rs
++++ third_party/rust/syn/tests/debug/gen.rs
+@@ -1,13 +1,13 @@
+ // This file is @generated by syn-internal-codegen.
+ // It is not intended for manual editing.
+
+ use super::{Lite, RefCast};
+-use std::fmt::{self, Debug};
++use std::fmt::{self, Debug, Display};
+ impl Debug for Lite<syn::Abi> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let _val = &self.value;
+ let mut formatter = formatter.debug_struct("Abi");
+ if let Some(val) = &_val.name {
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ struct Print(syn::LitStr);
+@@ -1034,19 +1034,19 @@ impl Debug for Lite<syn::Expr> {
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("block", Lite(&_val.block));
+ formatter.finish()
+ }
+ syn::Expr::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ syn::Expr::While(_val) => {
+ let mut formatter = formatter.debug_struct("Expr::While");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ if let Some(val) = &_val.label {
+@@ -2111,19 +2111,19 @@ impl Debug for Lite<syn::ForeignItem> {
+ }
+ }
+ formatter.field("semi_token", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+ syn::ForeignItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+ }
+ }
+ impl Debug for Lite<syn::ForeignItemFn> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+@@ -2427,19 +2427,19 @@ impl Debug for Lite<syn::ImplItem> {
+ }
+ }
+ formatter.field("semi_token", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+ syn::ImplItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+ }
+ }
+ impl Debug for Lite<syn::ImplItemConst> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+@@ -2935,19 +2935,19 @@ impl Debug for Lite<syn::Item> {
+ }
+ formatter.field("leading_colon", Print::ref_cast(val));
+ }
+ formatter.field("tree", Lite(&_val.tree));
+ formatter.finish()
+ }
+ syn::Item::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+ }
+ }
+ impl Debug for Lite<syn::ItemConst> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+@@ -3432,19 +3432,19 @@ impl Debug for Lite<syn::Lit> {
+ syn::Lit::Float(_val) => write!(formatter, "{}", _val),
+ syn::Lit::Bool(_val) => {
+ let mut formatter = formatter.debug_struct("Lit::Bool");
+ formatter.field("value", Lite(&_val.value));
+ formatter.finish()
+ }
+ syn::Lit::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ }
+ }
+ }
+ impl Debug for Lite<syn::LitBool> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let _val = &self.value;
+@@ -3873,19 +3873,19 @@ impl Debug for Lite<syn::Pat> {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.field("pat", Lite(&_val.pat));
+ formatter.field("ty", Lite(&_val.ty));
+ formatter.finish()
+ }
+ syn::Pat::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ syn::Pat::Wild(_val) => {
+ let mut formatter = formatter.debug_struct("Pat::Wild");
+ if !_val.attrs.is_empty() {
+ formatter.field("attrs", Lite(&_val.attrs));
+ }
+ formatter.finish()
+@@ -4669,19 +4669,19 @@ impl Debug for Lite<syn::TraitItem> {
+ }
+ }
+ formatter.field("semi_token", Print::ref_cast(val));
+ }
+ formatter.finish()
+ }
+ syn::TraitItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+ }
+ }
+ impl Debug for Lite<syn::TraitItemConst> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+@@ -5035,19 +5035,19 @@ impl Debug for Lite<syn::Type> {
+ let mut formatter = formatter.debug_struct("Type::Tuple");
+ if !_val.elems.is_empty() {
+ formatter.field("elems", Lite(&_val.elems));
+ }
+ formatter.finish()
+ }
+ syn::Type::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+ }
+ }
+ }
+ impl Debug for Lite<syn::TypeArray> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+diff --git a/third_party/rust/syn/tests/debug/mod.rs b/third_party/rust/syn/tests/debug/mod.rs
+--- third_party/rust/syn/tests/debug/mod.rs
++++ third_party/rust/syn/tests/debug/mod.rs
+@@ -1,15 +1,12 @@
+-extern crate proc_macro2;
+-extern crate ref_cast;
+-
+ mod gen;
+
+-use self::proc_macro2::{Ident, Literal, TokenStream};
+-use self::ref_cast::RefCast;
++use proc_macro2::{Ident, Literal, TokenStream};
++use ref_cast::RefCast;
+ use std::fmt::{self, Debug};
+ use std::ops::Deref;
+ use syn::punctuated::Punctuated;
+
+ #[derive(RefCast)]
+ #[repr(transparent)]
+ pub struct Lite<T: ?Sized> {
+ value: T,
+@@ -61,17 +58,25 @@ impl Debug for Lite<Ident> {
+ impl Debug for Lite<Literal> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ write!(formatter, "{}", self.value)
+ }
+ }
+
+ impl Debug for Lite<TokenStream> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- write!(formatter, "`{}`", self.value)
++ let string = self.value.to_string();
++ if string.len() <= 80 {
++ write!(formatter, "TokenStream(`{}`)", self.value)
++ } else {
++ formatter
++ .debug_tuple("TokenStream")
++ .field(&format_args!("`{}`", string))
++ .finish()
++ }
+ }
+ }
+
+ impl<'a, T> Debug for Lite<&'a T>
+ where
+ Lite<T>: Debug,
+ {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+diff --git a/third_party/rust/syn/tests/features/error.rs b/third_party/rust/syn/tests/features/error.rs
+deleted file mode 100644
+--- third_party/rust/syn/tests/features/error.rs
++++ /dev/null
+@@ -1,1 +0,0 @@
+-"Hello! You want: cargo test --release --all-features"
+diff --git a/third_party/rust/syn/tests/features/mod.rs b/third_party/rust/syn/tests/features/mod.rs
+deleted file mode 100644
+--- third_party/rust/syn/tests/features/mod.rs
++++ /dev/null
+@@ -1,22 +0,0 @@
+-#[allow(unused_macros)]
+-macro_rules! hide_from_rustfmt {
+- ($mod:item) => {
+- $mod
+- };
+-}
+-
+-#[cfg(not(all(
+- feature = "derive",
+- feature = "full",
+- feature = "parsing",
+- feature = "printing",
+- feature = "visit",
+- feature = "visit-mut",
+- feature = "fold",
+- feature = "clone-impls",
+- feature = "extra-traits",
+- feature = "proc-macro",
+-)))]
+-hide_from_rustfmt! {
+- mod error;
+-}
+diff --git a/third_party/rust/syn/tests/macros/mod.rs b/third_party/rust/syn/tests/macros/mod.rs
+--- third_party/rust/syn/tests/macros/mod.rs
++++ third_party/rust/syn/tests/macros/mod.rs
+@@ -1,10 +1,8 @@
+-extern crate proc_macro2;
+-
+ #[path = "../debug/mod.rs"]
+ pub mod debug;
+
+ use syn;
+ use syn::parse::{Parse, Result};
+
+ #[macro_export]
+ macro_rules! errorf {
+@@ -37,28 +35,28 @@ macro_rules! snapshot {
+ };
+ }
+
+ #[macro_export]
+ macro_rules! snapshot_impl {
+ (($expr:ident) as $t:ty, @$snapshot:literal) => {
+ let $expr = crate::macros::Tokens::parse::<$t>($expr).unwrap();
+ let debug = crate::macros::debug::Lite(&$expr);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ };
+ (($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{
+ let syntax_tree = crate::macros::Tokens::parse::<$t>($($expr)*).unwrap();
+ let debug = crate::macros::debug::Lite(&syntax_tree);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ syntax_tree
+ }};
+ (($($expr:tt)*) , @$snapshot:literal) => {{
+ let syntax_tree = $($expr)*;
+ let debug = crate::macros::debug::Lite(&syntax_tree);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ syntax_tree
+ }};
+ (($($expr:tt)*) $next:tt $($rest:tt)*) => {
+ snapshot_impl!(($($expr)* $next) $($rest)*)
+ };
+ }
+
+ pub trait Tokens {
+diff --git a/third_party/rust/syn/tests/repo/mod.rs b/third_party/rust/syn/tests/repo/mod.rs
+--- third_party/rust/syn/tests/repo/mod.rs
++++ third_party/rust/syn/tests/repo/mod.rs
+@@ -1,60 +1,135 @@
+-extern crate walkdir;
++mod progress;
++
++use self::progress::Progress;
++use anyhow::Result;
++use flate2::read::GzDecoder;
++use std::fs;
++use std::path::Path;
++use tar::Archive;
++use walkdir::DirEntry;
++
++const REVISION: &str = "792c645ca7d11a8d254df307d019c5bf01445c37";
++
++#[rustfmt::skip]
++static EXCLUDE: &[&str] = &[
++ // Compile-fail expr parameter in const generic position: f::<1 + 2>()
++ "test/ui/const-generics/const-expression-parameter.rs",
+
+-use std::process::Command;
++ // Deprecated anonymous parameter syntax in traits
++ "test/ui/issues/issue-13105.rs",
++ "test/ui/issues/issue-13775.rs",
++ "test/ui/issues/issue-34074.rs",
++ "test/ui/proc-macro/trait-fn-args-2015.rs",
+
+-use self::walkdir::DirEntry;
++ // Not actually test cases
++ "test/rustdoc-ui/test-compile-fail2.rs",
++ "test/rustdoc-ui/test-compile-fail3.rs",
++ "test/ui/include-single-expr-helper.rs",
++ "test/ui/include-single-expr-helper-1.rs",
++ "test/ui/issues/auxiliary/issue-21146-inc.rs",
++ "test/ui/json-bom-plus-crlf-multifile-aux.rs",
++ "test/ui/lint/expansion-time-include.rs",
++ "test/ui/macros/auxiliary/macro-comma-support.rs",
++ "test/ui/macros/auxiliary/macro-include-items-expr.rs",
++];
+
+ pub fn base_dir_filter(entry: &DirEntry) -> bool {
+ let path = entry.path();
+ if path.is_dir() {
+ return true; // otherwise walkdir does not visit the files
+ }
+ if path.extension().map(|e| e != "rs").unwrap_or(true) {
+ return false;
+ }
+- let path_string = path.to_string_lossy();
+- let path_string = if cfg!(windows) {
+- path_string.replace('\\', "/").into()
++
++ let mut path_string = path.to_string_lossy();
++ if cfg!(windows) {
++ path_string = path_string.replace('\\', "/").into();
++ }
++ let path = if let Some(path) = path_string.strip_prefix("tests/rust/src/") {
++ path
++ } else if let Some(path) = path_string.strip_prefix("tests/rust/library/") {
++ path
+ } else {
+- path_string
++ panic!("unexpected path in Rust dist: {}", path_string);
+ };
++
+ // TODO assert that parsing fails on the parse-fail cases
+- if path_string.starts_with("tests/rust/src/test/parse-fail")
+- || path_string.starts_with("tests/rust/src/test/compile-fail")
+- || path_string.starts_with("tests/rust/src/test/rustfix")
++ if path.starts_with("test/parse-fail")
++ || path.starts_with("test/compile-fail")
++ || path.starts_with("test/rustfix")
+ {
+ return false;
+ }
+
+- if path_string.starts_with("tests/rust/src/test/ui") {
+- let stderr_path = path.with_extension("stderr");
++ if path.starts_with("test/ui") {
++ let stderr_path = entry.path().with_extension("stderr");
+ if stderr_path.exists() {
+ // Expected to fail in some way
+ return false;
+ }
+ }
+
+- match path_string.as_ref() {
+- // Deprecated placement syntax
+- "tests/rust/src/test/ui/obsolete-in-place/bad.rs" |
+- // Deprecated anonymous parameter syntax in traits
+- "tests/rust/src/test/ui/error-codes/e0119/auxiliary/issue-23563-a.rs" |
+- "tests/rust/src/test/ui/issues/issue-13105.rs" |
+- "tests/rust/src/test/ui/issues/issue-13775.rs" |
+- "tests/rust/src/test/ui/issues/issue-34074.rs" |
+- // Deprecated await macro syntax
+- "tests/rust/src/test/ui/async-await/await-macro.rs" |
+- // 2015-style dyn that libsyntax rejects
+- "tests/rust/src/test/ui/dyn-keyword/dyn-2015-no-warnings-without-lints.rs" |
+- // not actually test cases
+- "tests/rust/src/test/ui/macros/auxiliary/macro-comma-support.rs" |
+- "tests/rust/src/test/ui/macros/auxiliary/macro-include-items-expr.rs" |
+- "tests/rust/src/test/ui/issues/auxiliary/issue-21146-inc.rs" => false,
+- _ => true,
++ !EXCLUDE.contains(&path)
++}
++
++#[allow(dead_code)]
++pub fn edition(path: &Path) -> &'static str {
++ if path.ends_with("dyn-2015-no-warnings-without-lints.rs") {
++ "2015"
++ } else {
++ "2018"
+ }
+ }
+
+ pub fn clone_rust() {
+- let result = Command::new("tests/clone.sh").status().unwrap();
+- assert!(result.success());
++ let needs_clone = match fs::read_to_string("tests/rust/COMMIT") {
++ Err(_) => true,
++ Ok(contents) => contents.trim() != REVISION,
++ };
++ if needs_clone {
++ download_and_unpack().unwrap();
++ }
++ let mut missing = String::new();
++ let test_src = Path::new("tests/rust/src");
++ for exclude in EXCLUDE {
++ if !test_src.join(exclude).exists() {
++ missing += "\ntests/rust/src/";
++ missing += exclude;
++ }
++ }
++ if !missing.is_empty() {
++ panic!("excluded test file does not exist:{}\n", missing);
++ }
+ }
++
++fn download_and_unpack() -> Result<()> {
++ let url = format!(
++ "https://github.com/rust-lang/rust/archive/{}.tar.gz",
++ REVISION
++ );
++ let response = reqwest::blocking::get(&url)?.error_for_status()?;
++ let progress = Progress::new(response);
++ let decoder = GzDecoder::new(progress);
++ let mut archive = Archive::new(decoder);
++ let prefix = format!("rust-{}", REVISION);
++
++ let tests_rust = Path::new("tests/rust");
++ if tests_rust.exists() {
++ fs::remove_dir_all(tests_rust)?;
++ }
++
++ for entry in archive.entries()? {
++ let mut entry = entry?;
++ let path = entry.path()?;
++ if path == Path::new("pax_global_header") {
++ continue;
++ }
++ let relative = path.strip_prefix(&prefix)?;
++ let out = tests_rust.join(relative);
++ entry.unpack(&out)?;
++ }
++
++ fs::write("tests/rust/COMMIT", REVISION)?;
++ Ok(())
++}
+diff --git a/third_party/rust/syn/tests/repo/progress.rs b/third_party/rust/syn/tests/repo/progress.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/tests/repo/progress.rs
+@@ -0,0 +1,37 @@
++use std::io::{Read, Result};
++use std::time::{Duration, Instant};
++
++pub struct Progress<R> {
++ bytes: usize,
++ tick: Instant,
++ stream: R,
++}
++
++impl<R> Progress<R> {
++ pub fn new(stream: R) -> Self {
++ Progress {
++ bytes: 0,
++ tick: Instant::now() + Duration::from_millis(2000),
++ stream,
++ }
++ }
++}
++
++impl<R: Read> Read for Progress<R> {
++ fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
++ let num = self.stream.read(buf)?;
++ self.bytes += num;
++ let now = Instant::now();
++ if now > self.tick {
++ self.tick = now + Duration::from_millis(500);
++ errorf!("downloading... {} bytes\n", self.bytes);
++ }
++ Ok(num)
++ }
++}
++
++impl<R> Drop for Progress<R> {
++ fn drop(&mut self) {
++ errorf!("done ({} bytes)\n", self.bytes);
++ }
++}
+diff --git a/third_party/rust/syn/tests/test_asyncness.rs b/third_party/rust/syn/tests/test_asyncness.rs
+--- third_party/rust/syn/tests/test_asyncness.rs
++++ third_party/rust/syn/tests/test_asyncness.rs
+@@ -1,41 +1,37 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+ use syn::{Expr, Item};
+
+ #[test]
+ fn test_async_fn() {
+ let input = "async fn process() {}";
+
+ snapshot!(input as Item, @r###"
+- ⋮Item::Fn {
+- ⋮ vis: Inherited,
+- ⋮ sig: Signature {
+- ⋮ asyncness: Some,
+- ⋮ ident: "process",
+- ⋮ generics: Generics,
+- ⋮ output: Default,
+- ⋮ },
+- ⋮ block: Block,
+- ⋮}
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ asyncness: Some,
++ ident: "process",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ }
+ "###);
+ }
+
+ #[test]
+ fn test_async_closure() {
+ let input = "async || {}";
+
+ snapshot!(input as Expr, @r###"
+- ⋮Expr::Closure {
+- ⋮ asyncness: Some,
+- ⋮ output: Default,
+- ⋮ body: Expr::Block {
+- ⋮ block: Block,
+- ⋮ },
+- ⋮}
++ Expr::Closure {
++ asyncness: Some,
++ output: Default,
++ body: Expr::Block {
++ block: Block,
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_attribute.rs b/third_party/rust/syn/tests/test_attribute.rs
+--- third_party/rust/syn/tests/test_attribute.rs
++++ third_party/rust/syn/tests/test_attribute.rs
+@@ -1,295 +1,333 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+ use syn::parse::Parser;
+ use syn::{Attribute, Meta};
+
+ #[test]
+ fn test_meta_item_word() {
+ let meta = test("#[foo]");
+
+ snapshot!(meta, @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+ #[test]
+ fn test_meta_item_name_value() {
+ let meta = test("#[foo = 5]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+ }
+
+ #[test]
+ fn test_meta_item_bool_value() {
+ let meta = test("#[foo = true]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: true,
+- ⋮ },
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: true,
++ },
++ }
+ "###);
+
+ let meta = test("#[foo = false]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: false,
+- ⋮ },
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: false,
++ },
++ }
+ "###);
+ }
+
+ #[test]
+ fn test_meta_item_list_lit() {
+ let meta = test("#[foo(5)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+ }
+
+ #[test]
+ fn test_meta_item_list_word() {
+ let meta = test("#[foo(bar)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+ #[test]
+ fn test_meta_item_list_name_value() {
+ let meta = test("#[foo(bar = 5)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ ],
++ }
+ "###);
+ }
+
+ #[test]
+ fn test_meta_item_list_bool_value() {
+ let meta = test("#[foo(bar = true)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: true,
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: true,
++ },
++ }),
++ ],
++ }
+ "###);
+ }
+
+ #[test]
+ fn test_meta_item_multiple() {
+ let meta = test("#[foo(word, name = 5, list(name2 = 6), word2)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+ #[test]
+ fn test_bool_lit() {
+ let meta = test("#[foo(true)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(Lit::Bool {
+- ⋮ value: true,
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(Lit::Bool {
++ value: true,
++ }),
++ ],
++ }
++ "###);
++}
++
++#[test]
++fn test_negative_lit() {
++ let meta = test("#[form(min = -1, max = 200)]");
++
++ snapshot!(meta, @r###"
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "form",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "min",
++ arguments: None,
++ },
++ ],
++ },
++ lit: -1,
++ }),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "max",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 200,
++ }),
++ ],
++ }
+ "###);
+ }
+
+ fn test(input: &str) -> Meta {
+ let attrs = Attribute::parse_outer.parse_str(input).unwrap();
+
+ assert_eq!(attrs.len(), 1);
+ let attr = attrs.into_iter().next().unwrap();
+diff --git a/third_party/rust/syn/tests/test_derive_input.rs b/third_party/rust/syn/tests/test_derive_input.rs
+--- third_party/rust/syn/tests/test_derive_input.rs
++++ third_party/rust/syn/tests/test_derive_input.rs
+@@ -1,201 +1,196 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+ use quote::quote;
+ use syn::{Data, DeriveInput};
+
+ #[test]
+ fn test_unit() {
+ let input = quote! {
+ struct Unit;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "Unit",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "Unit",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+ #[test]
+ fn test_struct() {
+ let input = quote! {
+ #[derive(Debug, Clone)]
+ pub struct Item {
+ pub ident: Ident,
+ pub attrs: Vec<Attribute>
+ }
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "derive",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `( Debug , Clone )`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: "Item",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Named {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("ident"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Ident",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("attrs"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Vec",
+- ⋮ arguments: PathArguments::AngleBracketed {
+- ⋮ args: [
+- ⋮ Type(Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Attribute",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "derive",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`(Debug , Clone)`),
++ },
++ ],
++ vis: Visibility::Public,
++ ident: "Item",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Visibility::Public,
++ ident: Some("ident"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Ident",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("attrs"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Vec",
++ arguments: PathArguments::AngleBracketed {
++ args: [
++ Type(Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Attribute",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ },
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+
+ snapshot!(input.attrs[0].parse_meta().unwrap(), @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "derive",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Clone",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "derive",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "Clone",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+ #[test]
+ fn test_union() {
+ let input = quote! {
+ union MaybeUninit<T> {
+ uninit: (),
+ value: T
+ }
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "MaybeUninit",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "T",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ },
+- ⋮ data: Data::Union {
+- ⋮ fields: FieldsNamed {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("uninit"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Tuple,
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("value"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "MaybeUninit",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "T",
++ }),
++ ],
++ gt_token: Some,
++ },
++ data: Data::Union {
++ fields: FieldsNamed {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("uninit"),
++ colon_token: Some,
++ ty: Type::Tuple,
++ },
++ Field {
++ vis: Inherited,
++ ident: Some("value"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+ }
+
+ #[test]
+ #[cfg(feature = "full")]
+ fn test_enum() {
+ let input = quote! {
+ /// See the std::result module documentation for details.
+@@ -207,472 +202,472 @@ fn test_enum() {
+
+ // Smuggling data into a proc_macro_derive,
+ // in the style of https://github.com/dtolnay/proc-macro-hack
+ ProcMacroHack = (0, "data").0
+ }
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "doc",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `= r" See the std::result module documentation for details."`,
+- ⋮ },
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "must_use",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: "Result",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "T",
+- ⋮ }),
+- ⋮ Type(TypeParam {
+- ⋮ ident: "E",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ },
+- ⋮ data: Data::Enum {
+- ⋮ variants: [
+- ⋮ Variant {
+- ⋮ ident: "Ok",
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "Err",
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "E",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "Surprise",
+- ⋮ fields: Unit,
+- ⋮ discriminant: Some(Expr::Lit {
+- ⋮ lit: 0isize,
+- ⋮ }),
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "ProcMacroHack",
+- ⋮ fields: Unit,
+- ⋮ discriminant: Some(Expr::Field {
+- ⋮ base: Expr::Tuple {
+- ⋮ elems: [
+- ⋮ Expr::Lit {
+- ⋮ lit: 0,
+- ⋮ },
+- ⋮ Expr::Lit {
+- ⋮ lit: "data",
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ member: Unnamed(Index {
+- ⋮ index: 0,
+- ⋮ }),
+- ⋮ }),
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "doc",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`= r" See the std::result module documentation for details."`),
++ },
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "must_use",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Visibility::Public,
++ ident: "Result",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "T",
++ }),
++ Type(TypeParam {
++ ident: "E",
++ }),
++ ],
++ gt_token: Some,
++ },
++ data: Data::Enum {
++ variants: [
++ Variant {
++ ident: "Ok",
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ Variant {
++ ident: "Err",
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "E",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ Variant {
++ ident: "Surprise",
++ fields: Unit,
++ discriminant: Some(Expr::Lit {
++ lit: 0isize,
++ }),
++ },
++ Variant {
++ ident: "ProcMacroHack",
++ fields: Unit,
++ discriminant: Some(Expr::Field {
++ base: Expr::Tuple {
++ elems: [
++ Expr::Lit {
++ lit: 0,
++ },
++ Expr::Lit {
++ lit: "data",
++ },
++ ],
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }),
++ },
++ ],
++ },
++ }
+ "###);
+
+ let meta_items: Vec<_> = input
+ .attrs
+ .into_iter()
+ .map(|attr| attr.parse_meta().unwrap())
+ .collect();
+
+ snapshot!(meta_items, @r###"
+- ⋮[
+- ⋮ Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "doc",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: " See the std::result module documentation for details.",
+- ⋮ },
+- ⋮ Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "must_use",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ }),
+- ⋮]
++ [
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "doc",
++ arguments: None,
++ },
++ ],
++ },
++ lit: " See the std::result module documentation for details.",
++ },
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "must_use",
++ arguments: None,
++ },
++ ],
++ }),
++ ]
+ "###);
+ }
+
+ #[test]
+ fn test_attr_with_path() {
+ let input = quote! {
+ #[::attr_args::identity
+ fn main() { assert_eq!(foo(), "Hello, world!"); }]
+ struct Dummy;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ leading_colon: Some,
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "attr_args",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "identity",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `fn main ( ) { assert_eq ! ( foo ( ) , "Hello, world!" ) ; }`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "Dummy",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ leading_colon: Some,
++ segments: [
++ PathSegment {
++ ident: "attr_args",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "identity",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`fn main () { assert_eq ! (foo () , "Hello, world!") ; }`),
++ },
++ ],
++ vis: Inherited,
++ ident: "Dummy",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ assert!(input.attrs[0].parse_meta().is_err());
+ }
+
+ #[test]
+ fn test_attr_with_non_mod_style_path() {
+ let input = quote! {
+ #[inert <T>]
+ struct S;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "inert",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `< T >`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "inert",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`< T >`),
++ },
++ ],
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ assert!(input.attrs[0].parse_meta().is_err());
+ }
+
+ #[test]
+ fn test_attr_with_mod_style_path_with_self() {
+ let input = quote! {
+ #[foo::self]
+ struct S;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "self",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "self",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ snapshot!(input.attrs[0].parse_meta().unwrap(), @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "self",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "self",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+ #[test]
+ fn test_pub_restricted() {
+ // Taken from tests/rust/src/test/ui/resolve/auxiliary/privacy-struct-ctor.rs
+ let input = quote! {
+ pub(in m) struct Z(pub(in m::n) u8);
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "m",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "Z",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "m",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "n",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "u8",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "Z",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "n",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "u8",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+ #[test]
+ fn test_vis_crate() {
+ let input = quote! {
+ crate struct S;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Crate,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Crate,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+ #[test]
+ fn test_pub_restricted_crate() {
+ let input = quote! {
+ pub(crate) struct S;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "crate",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "crate",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+ #[test]
+ fn test_pub_restricted_super() {
+ let input = quote! {
+ pub(super) struct S;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "super",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "super",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+ #[test]
+ fn test_pub_restricted_in_super() {
+ let input = quote! {
+ pub(in super) struct S;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "super",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "super",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+ #[test]
+ fn test_fields_on_unit_struct() {
+ let input = quote! {
+ struct S;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let data = match input.data {
+ Data::Struct(data) => data,
+ _ => panic!("expected a struct"),
+ };
+
+ assert_eq!(0, data.fields.iter().count());
+@@ -683,215 +678,215 @@ fn test_fields_on_named_struct() {
+ let input = quote! {
+ struct S {
+ foo: i32,
+ pub bar: String,
+ }
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Named {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("foo"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("bar"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("foo"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("bar"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+
+ let data = match input.data {
+ Data::Struct(data) => data,
+ _ => panic!("expected a struct"),
+ };
+
+ snapshot!(data.fields.into_iter().collect::<Vec<_>>(), @r###"
+- ⋮[
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("foo"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("bar"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮]
++ [
++ Field {
++ vis: Inherited,
++ ident: Some("foo"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("bar"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ]
+ "###);
+ }
+
+ #[test]
+ fn test_fields_on_tuple_struct() {
+ let input = quote! {
+ struct S(i32, pub String);
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let data = match input.data {
+ Data::Struct(data) => data,
+ _ => panic!("expected a struct"),
+ };
+
+ snapshot!(data.fields.iter().collect::<Vec<_>>(), @r###"
+- ⋮[
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮]
++ [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ]
+ "###);
+ }
+
+ #[test]
+ fn test_ambiguous_crate() {
+ let input = quote! {
+ // The field type is `(crate::X)` not `crate (::X)`.
+ struct S(crate::X);
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "crate",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "X",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "crate",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "X",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_expr.rs b/third_party/rust/syn/tests/test_expr.rs
+--- third_party/rust/syn/tests/test_expr.rs
++++ third_party/rust/syn/tests/test_expr.rs
+@@ -1,40 +1,302 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+ #[macro_use]
+ mod macros;
+
+-use std::str::FromStr;
+-
+-use proc_macro2::TokenStream;
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
+ use syn::{Expr, ExprRange};
+
+ #[test]
+ fn test_expr_parse() {
+- let code = "..100u32";
+- let tt = TokenStream::from_str(code).unwrap();
+- let expr: Expr = syn::parse2(tt.clone()).unwrap();
+- let expr_range: ExprRange = syn::parse2(tt).unwrap();
+- assert_eq!(expr, Expr::Range(expr_range));
++ let tokens = quote!(..100u32);
++ snapshot!(tokens as Expr, @r###"
++ Expr::Range {
++ limits: HalfOpen,
++ to: Some(Expr::Lit {
++ lit: 100u32,
++ }),
++ }
++ "###);
++
++ let tokens = quote!(..100u32);
++ snapshot!(tokens as ExprRange, @r###"
++ ExprRange {
++ limits: HalfOpen,
++ to: Some(Expr::Lit {
++ lit: 100u32,
++ }),
++ }
++ "###);
+ }
+
+ #[test]
+ fn test_await() {
+ // Must not parse as Expr::Field.
+- let expr = syn::parse_str::<Expr>("fut.await").unwrap();
++ let tokens = quote!(fut.await);
+
+- snapshot!(expr, @r###"
+- ⋮Expr::Await {
+- ⋮ base: Expr::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "fut",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ snapshot!(tokens as Expr, @r###"
++ Expr::Await {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "fut",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ }
+ "###);
+ }
++
++#[rustfmt::skip]
++#[test]
++fn test_tuple_multi_index() {
++ for &input in &[
++ "tuple.0.0",
++ "tuple .0.0",
++ "tuple. 0.0",
++ "tuple.0 .0",
++ "tuple.0. 0",
++ "tuple . 0 . 0",
++ ] {
++ snapshot!(input as Expr, @r###"
++ Expr::Field {
++ base: Expr::Field {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "tuple",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }
++ "###);
++ }
++
++ for tokens in vec![
++ quote!(tuple.0.0),
++ quote!(tuple .0.0),
++ quote!(tuple. 0.0),
++ quote!(tuple.0 .0),
++ quote!(tuple.0. 0),
++ quote!(tuple . 0 . 0),
++ ] {
++ snapshot!(tokens as Expr, @r###"
++ Expr::Field {
++ base: Expr::Field {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "tuple",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }
++ "###);
++ }
++}
++
++#[test]
++fn test_macro_variable_func() {
++ // mimics the token stream corresponding to `$fn()`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { f })),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Call {
++ func: Expr::Group {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "f",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ }
++ "###);
++
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('#', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Bracket, quote! { outside })),
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[inside] f })),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Call {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "outside",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ func: Expr::Group {
++ expr: Expr::Path {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "inside",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "f",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_macro() {
++ // mimics the token stream corresponding to `$macro!()`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { m })),
++ TokenTree::Punct(Punct::new('!', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Macro {
++ mac: Macro {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ ],
++ },
++ delimiter: Paren,
++ tokens: TokenStream(``),
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_struct() {
++ // mimics the token stream corresponding to `$struct {}`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { S })),
++ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Struct {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "S",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_match_arm() {
++ // mimics the token stream corresponding to `match v { _ => $expr }`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Ident(Ident::new("match", Span::call_site())),
++ TokenTree::Ident(Ident::new("v", Span::call_site())),
++ TokenTree::Group(Group::new(
++ Delimiter::Brace,
++ TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('_', Spacing::Alone)),
++ TokenTree::Punct(Punct::new('=', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[a] () })),
++ ]),
++ )),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Match {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "v",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ arms: [
++ Arm {
++ pat: Pat::Wild,
++ body: Expr::Group {
++ expr: Expr::Tuple {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "a",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ },
++ },
++ },
++ ],
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_generics.rs b/third_party/rust/syn/tests/test_generics.rs
+--- third_party/rust/syn/tests/test_generics.rs
++++ third_party/rust/syn/tests/test_generics.rs
+@@ -1,110 +1,105 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+ use quote::quote;
+ use syn::{DeriveInput, ItemFn, TypeParamBound, WhereClause, WherePredicate};
+
+ #[test]
+ fn test_split_for_impl() {
+ let input = quote! {
+ struct S<'a, 'b: 'a, #[may_dangle] T: 'a = ()> where T: Debug;
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Lifetime(LifetimeDef {
+- ⋮ lifetime: Lifetime {
+- ⋮ ident: "a",
+- ⋮ },
+- ⋮ }),
+- ⋮ Lifetime(LifetimeDef {
+- ⋮ lifetime: Lifetime {
+- ⋮ ident: "b",
+- ⋮ },
+- ⋮ colon_token: Some,
+- ⋮ bounds: [
+- ⋮ Lifetime {
+- ⋮ ident: "a",
+- ⋮ },
+- ⋮ ],
+- ⋮ }),
+- ⋮ Type(TypeParam {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "may_dangle",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ ident: "T",
+- ⋮ colon_token: Some,
+- ⋮ bounds: [
+- ⋮ Lifetime(Lifetime {
+- ⋮ ident: "a",
+- ⋮ }),
+- ⋮ ],
+- ⋮ eq_token: Some,
+- ⋮ default: Some(Type::Tuple),
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ where_clause: Some(WhereClause {
+- ⋮ predicates: [
+- ⋮ Type(PredicateType {
+- ⋮ bounded_ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ bounds: [
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ },
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Lifetime(LifetimeDef {
++ lifetime: Lifetime {
++ ident: "a",
++ },
++ }),
++ Lifetime(LifetimeDef {
++ lifetime: Lifetime {
++ ident: "b",
++ },
++ colon_token: Some,
++ bounds: [
++ Lifetime {
++ ident: "a",
++ },
++ ],
++ }),
++ Type(TypeParam {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "may_dangle",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ ident: "T",
++ colon_token: Some,
++ bounds: [
++ Lifetime(Lifetime {
++ ident: "a",
++ }),
++ ],
++ eq_token: Some,
++ default: Some(Type::Tuple),
++ }),
++ ],
++ gt_token: Some,
++ where_clause: Some(WhereClause {
++ predicates: [
++ Type(PredicateType {
++ bounded_ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ bounds: [
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ }),
++ ],
++ }),
++ },
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let generics = input.generics;
+ let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
+
+ let generated = quote! {
+ impl #impl_generics MyTrait for Test #ty_generics #where_clause {}
+ };
+@@ -126,156 +121,156 @@ fn test_split_for_impl() {
+ };
+ assert_eq!(generated.to_string(), expected.to_string());
+ }
+
+ #[test]
+ fn test_ty_param_bound() {
+ let tokens = quote!('a);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Lifetime(Lifetime {
+- ⋮ ident: "a",
+- ⋮})
++ Lifetime(Lifetime {
++ ident: "a",
++ })
+ "###);
+
+ let tokens = quote!('_);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Lifetime(Lifetime {
+- ⋮ ident: "_",
+- ⋮})
++ Lifetime(Lifetime {
++ ident: "_",
++ })
+ "###);
+
+ let tokens = quote!(Debug);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮})
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ },
++ })
+ "###);
+
+ let tokens = quote!(?Sized);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Trait(TraitBound {
+- ⋮ modifier: Maybe,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Sized",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮})
++ Trait(TraitBound {
++ modifier: Maybe,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Sized",
++ arguments: None,
++ },
++ ],
++ },
++ })
+ "###);
+ }
+
+ #[test]
+ fn test_fn_precedence_in_where_clause() {
+ // This should parse as two separate bounds, `FnOnce() -> i32` and `Send` - not
+ // `FnOnce() -> (i32 + Send)`.
+ let input = quote! {
+ fn f<G>()
+ where
+ G: FnOnce() -> i32 + Send,
+ {
+ }
+ };
+
+ snapshot!(input as ItemFn, @r###"
+- ⋮ItemFn {
+- ⋮ vis: Inherited,
+- ⋮ sig: Signature {
+- ⋮ ident: "f",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "G",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ where_clause: Some(WhereClause {
+- ⋮ predicates: [
+- ⋮ Type(PredicateType {
+- ⋮ bounded_ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "G",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ bounds: [
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "FnOnce",
+- ⋮ arguments: PathArguments::Parenthesized {
+- ⋮ output: Type(
+- ⋮ Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ),
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Send",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ },
+- ⋮ output: Default,
+- ⋮ },
+- ⋮ block: Block,
+- ⋮}
++ ItemFn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "f",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "G",
++ }),
++ ],
++ gt_token: Some,
++ where_clause: Some(WhereClause {
++ predicates: [
++ Type(PredicateType {
++ bounded_ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "G",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ bounds: [
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "FnOnce",
++ arguments: PathArguments::Parenthesized {
++ output: Type(
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ),
++ },
++ },
++ ],
++ },
++ }),
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Send",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ }),
++ ],
++ }),
++ },
++ output: Default,
++ },
++ block: Block,
++ }
+ "###);
+
+ let where_clause = input.sig.generics.where_clause.as_ref().unwrap();
+ assert_eq!(where_clause.predicates.len(), 1);
+
+ let predicate = match &where_clause.predicates[0] {
+ WherePredicate::Type(pred) => pred,
+ _ => panic!("wrong predicate kind"),
+ };
+
+ assert_eq!(predicate.bounds.len(), 2, "{:#?}", predicate.bounds);
+
+ let first_bound = &predicate.bounds[0];
+- assert_eq!(quote!(#first_bound).to_string(), "FnOnce ( ) -> i32");
++ assert_eq!(quote!(#first_bound).to_string(), "FnOnce () -> i32");
+
+ let second_bound = &predicate.bounds[1];
+ assert_eq!(quote!(#second_bound).to_string(), "Send");
+ }
+
+ #[test]
+ fn test_where_clause_at_end_of_input() {
+ let input = quote! {
+diff --git a/third_party/rust/syn/tests/test_grouping.rs b/third_party/rust/syn/tests/test_grouping.rs
+--- third_party/rust/syn/tests/test_grouping.rs
++++ third_party/rust/syn/tests/test_grouping.rs
+@@ -1,13 +1,8 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+ use proc_macro2::{Delimiter, Group, Literal, Punct, Spacing, TokenStream, TokenTree};
+ use syn::Expr;
+
+ use std::iter::FromIterator;
+
+@@ -23,36 +18,36 @@ fn test_grouping() {
+ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
+ TokenTree::Literal(Literal::i32_suffixed(3)),
+ ]),
+ )),
+ TokenTree::Punct(Punct::new('*', Spacing::Alone)),
+ TokenTree::Literal(Literal::i32_suffixed(4)),
+ ]);
+
+- assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
++ assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
+
+ snapshot!(tokens as Expr, @r###"
+- ⋮Expr::Binary {
+- ⋮ left: Expr::Lit {
+- ⋮ lit: 1i32,
+- ⋮ },
+- ⋮ op: Add,
+- ⋮ right: Expr::Binary {
+- ⋮ left: Expr::Group {
+- ⋮ expr: Expr::Binary {
+- ⋮ left: Expr::Lit {
+- ⋮ lit: 2i32,
+- ⋮ },
+- ⋮ op: Add,
+- ⋮ right: Expr::Lit {
+- ⋮ lit: 3i32,
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ op: Mul,
+- ⋮ right: Expr::Lit {
+- ⋮ lit: 4i32,
+- ⋮ },
+- ⋮ },
+- ⋮}
++ Expr::Binary {
++ left: Expr::Lit {
++ lit: 1i32,
++ },
++ op: Add,
++ right: Expr::Binary {
++ left: Expr::Group {
++ expr: Expr::Binary {
++ left: Expr::Lit {
++ lit: 2i32,
++ },
++ op: Add,
++ right: Expr::Lit {
++ lit: 3i32,
++ },
++ },
++ },
++ op: Mul,
++ right: Expr::Lit {
++ lit: 4i32,
++ },
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_ident.rs b/third_party/rust/syn/tests/test_ident.rs
+--- third_party/rust/syn/tests/test_ident.rs
++++ third_party/rust/syn/tests/test_ident.rs
+@@ -1,13 +1,8 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+-mod features;
+-
+ use proc_macro2::{Ident, Span, TokenStream};
+ use std::str::FromStr;
+ use syn::Result;
+
+ fn parse(s: &str) -> Result<Ident> {
+ syn::parse2(TokenStream::from_str(s).unwrap())
+ }
+
+diff --git a/third_party/rust/syn/tests/test_item.rs b/third_party/rust/syn/tests/test_item.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/tests/test_item.rs
+@@ -0,0 +1,45 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::Item;
++
++#[test]
++fn test_macro_variable_attr() {
++ // mimics the token stream corresponding to `$attr fn f() {}`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[test] })),
++ TokenTree::Ident(Ident::new("fn", Span::call_site())),
++ TokenTree::Ident(Ident::new("f", Span::call_site())),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Item, @r###"
++ Item::Fn {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "test",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Inherited,
++ sig: Signature {
++ ident: "f",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_iterators.rs b/third_party/rust/syn/tests/test_iterators.rs
+--- third_party/rust/syn/tests/test_iterators.rs
++++ third_party/rust/syn/tests/test_iterators.rs
+@@ -1,15 +1,10 @@
+ use syn::punctuated::{Pair, Punctuated};
+-
+-extern crate quote;
+-#[macro_use]
+-extern crate syn;
+-
+-mod features;
++use syn::Token;
+
+ #[macro_use]
+ mod macros;
+
+ macro_rules! check_exact_size_iterator {
+ ($iter:expr) => {{
+ let iter = $iter;
+ let size_hint = iter.size_hint();
+diff --git a/third_party/rust/syn/tests/test_lit.rs b/third_party/rust/syn/tests/test_lit.rs
+--- third_party/rust/syn/tests/test_lit.rs
++++ third_party/rust/syn/tests/test_lit.rs
+@@ -1,18 +1,16 @@
+-extern crate proc_macro2;
+-extern crate quote;
+-extern crate syn;
++#[macro_use]
++mod macros;
+
+-mod features;
+-
+-use proc_macro2::{TokenStream, TokenTree};
++use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
+ use quote::ToTokens;
++use std::iter::FromIterator;
+ use std::str::FromStr;
+-use syn::Lit;
++use syn::{Lit, LitFloat, LitInt};
+
+ fn lit(s: &str) -> Lit {
+ match TokenStream::from_str(s)
+ .unwrap()
+ .into_iter()
+ .next()
+ .unwrap()
+ {
+@@ -45,16 +43,19 @@ fn strings() {
+ test_string("\"'\"", "'");
+ test_string("\"\"", "");
+ test_string("\"\\u{1F415}\"", "\u{1F415}");
+ test_string(
+ "\"contains\nnewlines\\\nescaped newlines\"",
+ "contains\nnewlinesescaped newlines",
+ );
+ test_string("r\"raw\nstring\\\nhere\"", "raw\nstring\\\nhere");
++ test_string("\"...\"q", "...");
++ test_string("r\"...\"q", "...");
++ test_string("r##\"...\"##q", "...");
+ }
+
+ #[test]
+ fn byte_strings() {
+ fn test_byte_string(s: &str, value: &[u8]) {
+ match lit(s) {
+ Lit::ByteStr(lit) => {
+ assert_eq!(lit.value(), value);
+@@ -74,16 +75,19 @@ fn byte_strings() {
+ test_byte_string("b\"\\\"\"", b"\"");
+ test_byte_string("b\"'\"", b"'");
+ test_byte_string("b\"\"", b"");
+ test_byte_string(
+ "b\"contains\nnewlines\\\nescaped newlines\"",
+ b"contains\nnewlinesescaped newlines",
+ );
+ test_byte_string("br\"raw\nstring\\\nhere\"", b"raw\nstring\\\nhere");
++ test_byte_string("b\"...\"q", b"...");
++ test_byte_string("br\"...\"q", b"...");
++ test_byte_string("br##\"...\"##q", b"...");
+ }
+
+ #[test]
+ fn bytes() {
+ fn test_byte(s: &str, value: u8) {
+ match lit(s) {
+ Lit::Byte(lit) => {
+ assert_eq!(lit.value(), value);
+@@ -95,16 +99,17 @@ fn bytes() {
+ }
+
+ test_byte("b'a'", b'a');
+ test_byte("b'\\n'", b'\n');
+ test_byte("b'\\r'", b'\r');
+ test_byte("b'\\t'", b'\t');
+ test_byte("b'\\''", b'\'');
+ test_byte("b'\"'", b'"');
++ test_byte("b'a'q", b'a');
+ }
+
+ #[test]
+ fn chars() {
+ fn test_char(s: &str, value: char) {
+ match lit(s) {
+ Lit::Char(lit) => {
+ assert_eq!(lit.value(), value);
+@@ -120,16 +125,17 @@ fn chars() {
+ test_char("'a'", 'a');
+ test_char("'\\n'", '\n');
+ test_char("'\\r'", '\r');
+ test_char("'\\t'", '\t');
+ test_char("'🐕'", '🐕'); // NOTE: This is an emoji
+ test_char("'\\''", '\'');
+ test_char("'\"'", '"');
+ test_char("'\\u{1F415}'", '\u{1F415}');
++ test_char("'a'q", 'a');
+ }
+
+ #[test]
+ fn ints() {
+ fn test_int(s: &str, value: u64, suffix: &str) {
+ match lit(s) {
+ Lit::Int(lit) => {
+ assert_eq!(lit.base10_digits().parse::<u64>().unwrap(), value);
+@@ -180,9 +186,64 @@ fn floats() {
+ }
+ }
+
+ test_float("5.5", 5.5, "");
+ test_float("5.5E12", 5.5e12, "");
+ test_float("5.5e12", 5.5e12, "");
+ test_float("1.0__3e-12", 1.03e-12, "");
+ test_float("1.03e+12", 1.03e12, "");
++ test_float("9e99e99", 9e99, "e99");
+ }
++
++#[test]
++fn negative() {
++ let span = Span::call_site();
++ assert_eq!("-1", LitInt::new("-1", span).to_string());
++ assert_eq!("-1i8", LitInt::new("-1i8", span).to_string());
++ assert_eq!("-1i16", LitInt::new("-1i16", span).to_string());
++ assert_eq!("-1i32", LitInt::new("-1i32", span).to_string());
++ assert_eq!("-1i64", LitInt::new("-1i64", span).to_string());
++ assert_eq!("-1.5", LitFloat::new("-1.5", span).to_string());
++ assert_eq!("-1.5f32", LitFloat::new("-1.5f32", span).to_string());
++ assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string());
++}
++
++#[test]
++fn suffix() {
++ fn get_suffix(token: &str) -> String {
++ let lit = syn::parse_str::<Lit>(token).unwrap();
++ match lit {
++ Lit::Str(lit) => lit.suffix().to_owned(),
++ Lit::ByteStr(lit) => lit.suffix().to_owned(),
++ Lit::Byte(lit) => lit.suffix().to_owned(),
++ Lit::Char(lit) => lit.suffix().to_owned(),
++ Lit::Int(lit) => lit.suffix().to_owned(),
++ Lit::Float(lit) => lit.suffix().to_owned(),
++ _ => unimplemented!(),
++ }
++ }
++
++ assert_eq!(get_suffix("\"\"s"), "s");
++ assert_eq!(get_suffix("r\"\"r"), "r");
++ assert_eq!(get_suffix("b\"\"b"), "b");
++ assert_eq!(get_suffix("br\"\"br"), "br");
++ assert_eq!(get_suffix("r#\"\"#r"), "r");
++ assert_eq!(get_suffix("'c'c"), "c");
++ assert_eq!(get_suffix("b'b'b"), "b");
++ assert_eq!(get_suffix("1i32"), "i32");
++ assert_eq!(get_suffix("1_i32"), "i32");
++ assert_eq!(get_suffix("1.0f32"), "f32");
++ assert_eq!(get_suffix("1.0_f32"), "f32");
++}
++
++#[test]
++fn test_deep_group_empty() {
++ let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Literal(Literal::string("hi"))]),
++ ))]),
++ ))]);
++
++ snapshot!(tokens as Lit, @r#""hi""# );
++}
+diff --git a/third_party/rust/syn/tests/test_meta.rs b/third_party/rust/syn/tests/test_meta.rs
+--- third_party/rust/syn/tests/test_meta.rs
++++ third_party/rust/syn/tests/test_meta.rs
+@@ -1,343 +1,339 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+ use syn::{Meta, MetaList, MetaNameValue, NestedMeta};
+
+ #[test]
+ fn test_parse_meta_item_word() {
+ let input = "hello";
+
+ snapshot!(input as Meta, @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "hello",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "hello",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+ #[test]
+ fn test_parse_meta_name_value() {
+ let input = "foo = 5";
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+ }
+
+ #[test]
+ fn test_parse_meta_name_value_with_keyword() {
+ let input = "static = 5";
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "static",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "static",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "static",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "static",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+ }
+
+ #[test]
+ fn test_parse_meta_name_value_with_bool() {
+ let input = "true = 5";
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "true",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "true",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "true",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "true",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+ }
+
+ #[test]
+ fn test_parse_meta_item_list_lit() {
+ let input = "foo(5)";
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaList, @r###"
+- ⋮MetaList {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ MetaList {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+ }
+
+ #[test]
+ fn test_parse_meta_item_multiple() {
+ let input = "foo(word, name = 5, list(name2 = 6), word2)";
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaList, @r###"
+- ⋮MetaList {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ MetaList {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+ }
+
+ #[test]
+ fn test_parse_nested_meta() {
+ let input = "5";
+ snapshot!(input as NestedMeta, @"Lit(5)");
+
+ let input = "list(name2 = 6)";
+ snapshot!(input as NestedMeta, @r###"
+- ⋮Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮})
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ })
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_parse_buffer.rs b/third_party/rust/syn/tests/test_parse_buffer.rs
+--- third_party/rust/syn/tests/test_parse_buffer.rs
++++ third_party/rust/syn/tests/test_parse_buffer.rs
+@@ -1,12 +1,12 @@
+-#[macro_use]
+-extern crate syn;
+-
++use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenStream, TokenTree};
++use std::iter::FromIterator;
+ use syn::parse::{discouraged::Speculative, Parse, ParseStream, Parser, Result};
++use syn::{parenthesized, Token};
+
+ #[test]
+ #[should_panic(expected = "Fork was not derived from the advancing parse stream")]
+ fn smuggled_speculative_cursor_between_sources() {
+ struct BreakRules;
+ impl Parse for BreakRules {
+ fn parse(input1: ParseStream) -> Result<Self> {
+ let nested = |input2: ParseStream| {
+@@ -48,8 +48,43 @@ fn smuggled_speculative_cursor_into_brac
+ parenthesized!(a in input);
+ input.advance_to(&a);
+ Ok(Self)
+ }
+ }
+
+ syn::parse_str::<BreakRules>("()").unwrap();
+ }
++
++#[test]
++fn trailing_empty_none_group() {
++ fn parse(input: ParseStream) -> Result<()> {
++ input.parse::<Token![+]>()?;
++
++ let content;
++ parenthesized!(content in input);
++ content.parse::<Token![+]>()?;
++
++ Ok(())
++ }
++
++ // `+ ( + <Ø Ø> ) <Ø <Ø Ø> Ø>`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
++ TokenTree::Group(Group::new(
++ Delimiter::Parenthesis,
++ TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ ]),
++ )),
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::new(),
++ ))]),
++ )),
++ ]);
++
++ parse.parse2(tokens).unwrap();
++}
+diff --git a/third_party/rust/syn/tests/test_parse_stream.rs b/third_party/rust/syn/tests/test_parse_stream.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/tests/test_parse_stream.rs
+@@ -0,0 +1,12 @@
++use syn::ext::IdentExt;
++use syn::parse::ParseStream;
++use syn::{Ident, Token};
++
++#[test]
++fn test_peek() {
++ let _ = |input: ParseStream| {
++ let _ = input.peek(Ident);
++ let _ = input.peek(Ident::peek_any);
++ let _ = input.peek(Token![::]);
++ };
++}
+diff --git a/third_party/rust/syn/tests/test_pat.rs b/third_party/rust/syn/tests/test_pat.rs
+--- third_party/rust/syn/tests/test_pat.rs
++++ third_party/rust/syn/tests/test_pat.rs
+@@ -1,23 +1,38 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ use quote::quote;
+-use syn::Pat;
++use syn::{Item, Pat, Stmt};
+
+ #[test]
+ fn test_pat_ident() {
+ match syn::parse2(quote!(self)).unwrap() {
+ Pat::Ident(_) => (),
+ value => panic!("expected PatIdent, got {:?}", value),
+ }
+ }
+
+ #[test]
+ fn test_pat_path() {
+ match syn::parse2(quote!(self::CONST)).unwrap() {
+ Pat::Path(_) => (),
+ value => panic!("expected PatPath, got {:?}", value),
+ }
+ }
++
++#[test]
++fn test_leading_vert() {
++ // https://github.com/rust-lang/rust/blob/1.43.0/src/test/ui/or-patterns/remove-leading-vert.rs
++
++ syn::parse_str::<Item>("fn f() {}").unwrap();
++ syn::parse_str::<Item>("fn fun1(| A: E) {}").unwrap_err();
++ syn::parse_str::<Item>("fn fun2(|| A: E) {}").unwrap_err();
++
++ syn::parse_str::<Stmt>("let | () = ();").unwrap();
++ syn::parse_str::<Stmt>("let (| A): E;").unwrap_err();
++ syn::parse_str::<Stmt>("let (|| A): (E);").unwrap_err();
++ syn::parse_str::<Stmt>("let (| A,): (E,);").unwrap_err();
++ syn::parse_str::<Stmt>("let [| A]: [E; 1];").unwrap_err();
++ syn::parse_str::<Stmt>("let [|| A]: [E; 1];").unwrap_err();
++ syn::parse_str::<Stmt>("let TS(| A): TS;").unwrap_err();
++ syn::parse_str::<Stmt>("let TS(|| A): TS;").unwrap_err();
++ syn::parse_str::<Stmt>("let NS { f: | A }: NS;").unwrap_err();
++ syn::parse_str::<Stmt>("let NS { f: || A }: NS;").unwrap_err();
++}
+diff --git a/third_party/rust/syn/tests/test_path.rs b/third_party/rust/syn/tests/test_path.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/tests/test_path.rs
+@@ -0,0 +1,52 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::{Expr, Type};
++
++#[test]
++fn parse_interpolated_leading_component() {
++ // mimics the token stream corresponding to `$mod::rest`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { first })),
++ TokenTree::Punct(Punct::new(':', Spacing::Joint)),
++ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
++ TokenTree::Ident(Ident::new("rest", Span::call_site())),
++ ]);
++
++ snapshot!(tokens.clone() as Expr, @r###"
++ Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "first",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "rest",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++
++ snapshot!(tokens as Type, @r###"
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "first",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "rest",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_precedence.rs b/third_party/rust/syn/tests/test_precedence.rs
+--- third_party/rust/syn/tests/test_precedence.rs
++++ third_party/rust/syn/tests/test_precedence.rs
+@@ -1,43 +1,34 @@
+ #![cfg(not(syn_disable_nightly_tests))]
+ #![recursion_limit = "1024"]
+ #![feature(rustc_private)]
+
+ //! The tests in this module do the following:
+ //!
+-//! 1. Parse a given expression in both `syn` and `libsyntax`.
++//! 1. Parse a given expression in both `syn` and `librustc`.
+ //! 2. Fold over the expression adding brackets around each subexpression (with
+-//! some complications - see the `syn_brackets` and `libsyntax_brackets`
++//! some complications - see the `syn_brackets` and `librustc_brackets`
+ //! methods).
+ //! 3. Serialize the `syn` expression back into a string, and re-parse it with
+-//! `libsyntax`.
++//! `librustc`.
+ //! 4. Respan all of the expressions, replacing the spans with the default
+ //! spans.
+ //! 5. Compare the expressions with one another, if they are not equal fail.
+
+-extern crate quote;
+-extern crate rayon;
+-extern crate regex;
++extern crate rustc_ast;
+ extern crate rustc_data_structures;
+-extern crate smallvec;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
+-extern crate walkdir;
+-
+-mod features;
++extern crate rustc_span;
+
+ use quote::quote;
+ use rayon::iter::{IntoParallelIterator, ParallelIterator};
+ use regex::Regex;
+-use smallvec::smallvec;
+-use syntax::ast;
+-use syntax::ptr::P;
+-use syntax_pos::edition::Edition;
++use rustc_ast::ast;
++use rustc_ast::ptr::P;
++use rustc_span::edition::Edition;
+ use walkdir::{DirEntry, WalkDir};
+
+ use std::fs::File;
+ use std::io::Read;
+ use std::process;
+ use std::sync::atomic::{AtomicUsize, Ordering};
+
+ use common::eq::SpanlessEq;
+@@ -68,17 +59,17 @@ fn test_simple_precedence() {
+ for input in EXPRS {
+ let expr = if let Some(expr) = parse::syn_expr(input) {
+ expr
+ } else {
+ failed += 1;
+ continue;
+ };
+
+- let pf = match test_expressions(vec![expr]) {
++ let pf = match test_expressions(Edition::Edition2018, vec![expr]) {
+ (1, 0) => "passed",
+ (0, 1) => {
+ failed += 1;
+ "failed"
+ }
+ _ => unreachable!(),
+ };
+ errorf!("=== {}: {}\n", input, pf);
+@@ -86,18 +77,18 @@ fn test_simple_precedence() {
+
+ if failed > 0 {
+ panic!("Failed {} tests", failed);
+ }
+ }
+
+ /// Test expressions from rustc, like in `test_round_trip`.
+ #[test]
+-#[cfg_attr(target_os = "windows", ignore = "requires nix .sh")]
+ fn test_rustc_precedence() {
++ common::rayon_init();
+ repo::clone_rust();
+ let abort_after = common::abort_after();
+ if abort_after == 0 {
+ panic!("Skipping all precedence tests");
+ }
+
+ let passed = AtomicUsize::new(0);
+ let failed = AtomicUsize::new(0);
+@@ -113,34 +104,26 @@ fn test_rustc_precedence() {
+ .unwrap()
+ .into_par_iter()
+ .for_each(|entry| {
+ let path = entry.path();
+ if path.is_dir() {
+ return;
+ }
+
+- // Our version of `libsyntax` can't parse this tests
+- if path
+- .to_str()
+- .unwrap()
+- .ends_with("optional_comma_in_match_arm.rs")
+- {
+- return;
+- }
+-
+ let mut file = File::open(path).unwrap();
+ let mut content = String::new();
+ file.read_to_string(&mut content).unwrap();
+ let content = edition_regex.replace_all(&content, "_$0");
+
+ let (l_passed, l_failed) = match syn::parse_file(&content) {
+ Ok(file) => {
++ let edition = repo::edition(path).parse().unwrap();
+ let exprs = collect_exprs(file);
+- test_expressions(exprs)
++ test_expressions(edition, exprs)
+ }
+ Err(msg) => {
+ errorf!("syn failed to parse\n{:?}\n", msg);
+ (0, 1)
+ }
+ };
+
+ errorf!(
+@@ -164,147 +147,182 @@ fn test_rustc_precedence() {
+ errorf!("\n===== Precedence Test Results =====\n");
+ errorf!("{} passed | {} failed\n", passed, failed);
+
+ if failed > 0 {
+ panic!("{} failures", failed);
+ }
+ }
+
+-fn test_expressions(exprs: Vec<syn::Expr>) -> (usize, usize) {
++fn test_expressions(edition: Edition, exprs: Vec<syn::Expr>) -> (usize, usize) {
+ let mut passed = 0;
+ let mut failed = 0;
+
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(edition, || {
+ for expr in exprs {
+ let raw = quote!(#expr).to_string();
+
+- let libsyntax_ast = if let Some(e) = libsyntax_parse_and_rewrite(&raw) {
++ let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&raw) {
+ e
+ } else {
+ failed += 1;
+- errorf!("\nFAIL - libsyntax failed to parse raw\n");
++ errorf!("\nFAIL - librustc failed to parse raw\n");
+ continue;
+ };
+
+ let syn_expr = syn_brackets(expr);
+- let syn_ast = if let Some(e) = parse::libsyntax_expr(&quote!(#syn_expr).to_string()) {
++ let syn_ast = if let Some(e) = parse::librustc_expr(&quote!(#syn_expr).to_string()) {
+ e
+ } else {
+ failed += 1;
+- errorf!("\nFAIL - libsyntax failed to parse bracketed\n");
++ errorf!("\nFAIL - librustc failed to parse bracketed\n");
+ continue;
+ };
+
+- if SpanlessEq::eq(&syn_ast, &libsyntax_ast) {
++ if SpanlessEq::eq(&syn_ast, &librustc_ast) {
+ passed += 1;
+ } else {
+ failed += 1;
+- errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, libsyntax_ast);
++ errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, librustc_ast);
+ }
+ }
+ });
+
+ (passed, failed)
+ }
+
+-fn libsyntax_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
+- parse::libsyntax_expr(input).and_then(libsyntax_brackets)
++fn librustc_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
++ parse::librustc_expr(input).and_then(librustc_brackets)
+ }
+
+ /// Wrap every expression which is not already wrapped in parens with parens, to
+ /// reveal the precidence of the parsed expressions, and produce a stringified
+ /// form of the resulting expression.
+ ///
+-/// This method operates on libsyntax objects.
+-fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
++/// This method operates on librustc objects.
++fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
++ use rustc_ast::ast::{
++ Block, BorrowKind, Expr, ExprKind, Field, GenericArg, MacCall, Pat, Stmt, StmtKind, Ty,
++ };
++ use rustc_ast::mut_visit::{noop_visit_generic_arg, MutVisitor};
++ use rustc_data_structures::map_in_place::MapInPlace;
+ use rustc_data_structures::thin_vec::ThinVec;
+- use smallvec::SmallVec;
++ use rustc_span::DUMMY_SP;
+ use std::mem;
+- use syntax::ast::{Expr, ExprKind, Field, Mac, Pat, Stmt, StmtKind, Ty};
+- use syntax::mut_visit::{noop_visit_expr, MutVisitor};
+- use syntax_pos::DUMMY_SP;
+
+ struct BracketsVisitor {
+ failed: bool,
+ };
+
++ fn flat_map_field<T: MutVisitor>(mut f: Field, vis: &mut T) -> Vec<Field> {
++ if f.is_shorthand {
++ noop_visit_expr(&mut f.expr, vis);
++ } else {
++ vis.visit_expr(&mut f.expr);
++ }
++ vec![f]
++ }
++
++ fn flat_map_stmt<T: MutVisitor>(stmt: Stmt, vis: &mut T) -> Vec<Stmt> {
++ let kind = match stmt.kind {
++ // Don't wrap toplevel expressions in statements.
++ StmtKind::Expr(mut e) => {
++ noop_visit_expr(&mut e, vis);
++ StmtKind::Expr(e)
++ }
++ StmtKind::Semi(mut e) => {
++ noop_visit_expr(&mut e, vis);
++ StmtKind::Semi(e)
++ }
++ s => s,
++ };
++
++ vec![Stmt { kind, ..stmt }]
++ }
++
++ fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) {
++ use rustc_ast::mut_visit::{noop_visit_expr, visit_opt, visit_thin_attrs};
++ match &mut e.kind {
++ ExprKind::AddrOf(BorrowKind::Raw, ..) => {}
++ ExprKind::Struct(path, fields, expr) => {
++ vis.visit_path(path);
++ fields.flat_map_in_place(|field| flat_map_field(field, vis));
++ visit_opt(expr, |expr| vis.visit_expr(expr));
++ vis.visit_id(&mut e.id);
++ vis.visit_span(&mut e.span);
++ visit_thin_attrs(&mut e.attrs, vis);
++ }
++ _ => noop_visit_expr(e, vis),
++ }
++ }
++
+ impl MutVisitor for BracketsVisitor {
+ fn visit_expr(&mut self, e: &mut P<Expr>) {
+ noop_visit_expr(e, self);
+- match e.node {
++ match e.kind {
+ ExprKind::If(..) | ExprKind::Block(..) | ExprKind::Let(..) => {}
+ _ => {
+ let inner = mem::replace(
+ e,
+ P(Expr {
+ id: ast::DUMMY_NODE_ID,
+- node: ExprKind::Err,
++ kind: ExprKind::Err,
+ span: DUMMY_SP,
+ attrs: ThinVec::new(),
++ tokens: None,
+ }),
+ );
+- e.node = ExprKind::Paren(inner);
++ e.kind = ExprKind::Paren(inner);
+ }
+ }
+ }
+
+- fn flat_map_field(&mut self, mut f: Field) -> SmallVec<[Field; 1]> {
+- if f.is_shorthand {
+- noop_visit_expr(&mut f.expr, self);
+- } else {
+- self.visit_expr(&mut f.expr);
++ fn visit_generic_arg(&mut self, arg: &mut GenericArg) {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericArg::Const(arg) => noop_visit_expr(&mut arg.value, self),
++ _ => noop_visit_generic_arg(arg, self),
+ }
+- SmallVec::from([f])
++ }
++
++ fn visit_block(&mut self, block: &mut P<Block>) {
++ self.visit_id(&mut block.id);
++ block
++ .stmts
++ .flat_map_in_place(|stmt| flat_map_stmt(stmt, self));
++ self.visit_span(&mut block.span);
+ }
+
+ // We don't want to look at expressions that might appear in patterns or
+ // types yet. We'll look into comparing those in the future. For now
+ // focus on expressions appearing in other places.
+ fn visit_pat(&mut self, pat: &mut P<Pat>) {
+ let _ = pat;
+ }
+
+ fn visit_ty(&mut self, ty: &mut P<Ty>) {
+ let _ = ty;
+ }
+
+- fn flat_map_stmt(&mut self, stmt: Stmt) -> SmallVec<[Stmt; 1]> {
+- let node = match stmt.node {
+- // Don't wrap toplevel expressions in statements.
+- StmtKind::Expr(mut e) => {
+- noop_visit_expr(&mut e, self);
+- StmtKind::Expr(e)
+- }
+- StmtKind::Semi(mut e) => {
+- noop_visit_expr(&mut e, self);
+- StmtKind::Semi(e)
+- }
+- s => s,
+- };
+-
+- smallvec![Stmt { node, ..stmt }]
+- }
+-
+- fn visit_mac(&mut self, mac: &mut Mac) {
+- // By default when folding over macros, libsyntax panics. This is
++ fn visit_mac(&mut self, mac: &mut MacCall) {
++ // By default when folding over macros, librustc panics. This is
+ // because it's usually not what you want, you want to run after
+ // macro expansion. We do want to do that (syn doesn't do macro
+ // expansion), so we implement visit_mac to just return the macro
+ // unchanged.
+ let _ = mac;
+ }
+ }
+
+ let mut folder = BracketsVisitor { failed: false };
+- folder.visit_expr(&mut libsyntax_expr);
++ folder.visit_expr(&mut librustc_expr);
+ if folder.failed {
+ None
+ } else {
+- Some(libsyntax_expr)
++ Some(librustc_expr)
+ }
+ }
+
+ /// Wrap every expression which is not already wrapped in parens with parens, to
+ /// reveal the precedence of the parsed expressions, and produce a stringified
+ /// form of the resulting expression.
+ fn syn_brackets(syn_expr: syn::Expr) -> syn::Expr {
+ use syn::fold::*;
+@@ -313,24 +331,43 @@ fn syn_brackets(syn_expr: syn::Expr) ->
+ struct ParenthesizeEveryExpr;
+ impl Fold for ParenthesizeEveryExpr {
+ fn fold_expr(&mut self, expr: Expr) -> Expr {
+ match expr {
+ Expr::Group(_) => unreachable!(),
+ Expr::If(..) | Expr::Unsafe(..) | Expr::Block(..) | Expr::Let(..) => {
+ fold_expr(self, expr)
+ }
+- node => Expr::Paren(ExprParen {
++ _ => Expr::Paren(ExprParen {
+ attrs: Vec::new(),
+- expr: Box::new(fold_expr(self, node)),
++ expr: Box::new(fold_expr(self, expr)),
+ paren_token: token::Paren::default(),
+ }),
+ }
+ }
+
++ fn fold_generic_argument(&mut self, arg: GenericArgument) -> GenericArgument {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericArgument::Const(a) => GenericArgument::Const(fold_expr(self, a)),
++ _ => fold_generic_argument(self, arg),
++ }
++ }
++
++ fn fold_generic_method_argument(
++ &mut self,
++ arg: GenericMethodArgument,
++ ) -> GenericMethodArgument {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericMethodArgument::Const(a) => GenericMethodArgument::Const(fold_expr(self, a)),
++ _ => fold_generic_method_argument(self, arg),
++ }
++ }
++
+ fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
+ match stmt {
+ // Don't wrap toplevel expressions in statements.
+ Stmt::Expr(e) => Stmt::Expr(fold_expr(self, e)),
+ Stmt::Semi(e, semi) => Stmt::Semi(fold_expr(self, e), semi),
+ s => s,
+ }
+ }
+@@ -355,17 +392,20 @@ fn syn_brackets(syn_expr: syn::Expr) ->
+ fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
+ use syn::fold::*;
+ use syn::punctuated::Punctuated;
+ use syn::*;
+
+ struct CollectExprs(Vec<Expr>);
+ impl Fold for CollectExprs {
+ fn fold_expr(&mut self, expr: Expr) -> Expr {
+- self.0.push(expr);
++ match expr {
++ Expr::Verbatim(tokens) if tokens.is_empty() => {}
++ _ => self.0.push(expr),
++ }
+
+ Expr::Tuple(ExprTuple {
+ attrs: vec![],
+ elems: Punctuated::new(),
+ paren_token: token::Paren::default(),
+ })
+ }
+ }
+diff --git a/third_party/rust/syn/tests/test_receiver.rs b/third_party/rust/syn/tests/test_receiver.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/tests/test_receiver.rs
+@@ -0,0 +1,127 @@
++use syn::{parse_quote, FnArg, Receiver, TraitItemMethod};
++
++#[test]
++fn test_by_value() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_value(self: Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_mut_value() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_mut(mut self: Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_ref() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_ref(self: &Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_box() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_box(self: Box<Self>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_pin() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_pin(self: Pin<Self>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_explicit_type() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn explicit_type(self: Pin<MyType>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_value_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn value_shorthand(self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: None,
++ mutability: None,
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver without ref/mut, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_mut_value_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn mut_value_shorthand(mut self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: None,
++ mutability: Some(_),
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with mut, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_ref_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn ref_shorthand(&self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: Some(_),
++ mutability: None,
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with ref, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_ref_mut_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn ref_mut_shorthand(&mut self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: Some(_),
++ mutability: Some(_),
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with ref+mut, got {:?}", value),
++ }
++}
+diff --git a/third_party/rust/syn/tests/test_round_trip.rs b/third_party/rust/syn/tests/test_round_trip.rs
+--- third_party/rust/syn/tests/test_round_trip.rs
++++ third_party/rust/syn/tests/test_round_trip.rs
+@@ -1,28 +1,26 @@
+ #![cfg(not(syn_disable_nightly_tests))]
+ #![recursion_limit = "1024"]
+ #![feature(rustc_private)]
+
+-extern crate quote;
+-extern crate rayon;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
+-extern crate walkdir;
+-
+-mod features;
++extern crate rustc_ast;
++extern crate rustc_errors;
++extern crate rustc_expand;
++extern crate rustc_parse as parse;
++extern crate rustc_session;
++extern crate rustc_span;
+
+ use quote::quote;
+ use rayon::iter::{IntoParallelIterator, ParallelIterator};
+-use syntax::ast;
+-use syntax::parse::{self, PResult, ParseSess};
+-use syntax::source_map::FilePathMapping;
+-use syntax_pos::edition::Edition;
+-use syntax_pos::FileName;
++use rustc_ast::ast;
++use rustc_errors::PResult;
++use rustc_session::parse::ParseSess;
++use rustc_span::source_map::FilePathMapping;
++use rustc_span::FileName;
+ use walkdir::{DirEntry, WalkDir};
+
+ use std::fs::File;
+ use std::io::Read;
+ use std::panic;
+ use std::process;
+ use std::sync::atomic::{AtomicUsize, Ordering};
+ use std::time::Instant;
+@@ -33,18 +31,18 @@ mod macros;
+ #[allow(dead_code)]
+ mod common;
+
+ mod repo;
+
+ use common::eq::SpanlessEq;
+
+ #[test]
+-#[cfg_attr(target_os = "windows", ignore = "requires nix .sh")]
+ fn test_round_trip() {
++ common::rayon_init();
+ repo::clone_rust();
+ let abort_after = common::abort_after();
+ if abort_after == 0 {
+ panic!("Skipping all round_trip tests");
+ }
+
+ let failed = AtomicUsize::new(0);
+
+@@ -73,43 +71,44 @@ fn test_round_trip() {
+ let prev_failed = failed.fetch_add(1, Ordering::SeqCst);
+ if prev_failed + 1 >= abort_after {
+ process::exit(1);
+ }
+ return;
+ }
+ };
+ let back = quote!(#krate).to_string();
++ let edition = repo::edition(path).parse().unwrap();
+
+ let equal = panic::catch_unwind(|| {
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(edition, || {
+ let sess = ParseSess::new(FilePathMapping::empty());
+- let before = match libsyntax_parse(content, &sess) {
++ let before = match librustc_parse(content, &sess) {
+ Ok(before) => before,
+ Err(mut diagnostic) => {
+ diagnostic.cancel();
+ if diagnostic
+ .message()
+ .starts_with("file not found for module")
+ {
+ errorf!("=== {}: ignore\n", path.display());
+ } else {
+ errorf!(
+- "=== {}: ignore - libsyntax failed to parse original content: {}\n",
++ "=== {}: ignore - librustc failed to parse original content: {}\n",
+ path.display(),
+ diagnostic.message()
+ );
+ }
+ return true;
+ }
+ };
+- let after = match libsyntax_parse(back, &sess) {
++ let after = match librustc_parse(back, &sess) {
+ Ok(after) => after,
+ Err(mut diagnostic) => {
+- errorf!("=== {}: libsyntax failed to parse", path.display());
++ errorf!("=== {}: librustc failed to parse", path.display());
+ diagnostic.emit();
+ return false;
+ }
+ };
+
+ if SpanlessEq::eq(&before, &after) {
+ errorf!(
+ "=== {}: pass in {}ms\n",
+@@ -125,29 +124,29 @@ fn test_round_trip() {
+ before,
+ after,
+ );
+ false
+ }
+ })
+ });
+ match equal {
+- Err(_) => errorf!("=== {}: ignoring libsyntax panic\n", path.display()),
++ Err(_) => errorf!("=== {}: ignoring librustc panic\n", path.display()),
+ Ok(true) => {}
+ Ok(false) => {
+ let prev_failed = failed.fetch_add(1, Ordering::SeqCst);
+ if prev_failed + 1 >= abort_after {
+ process::exit(1);
+ }
+ }
+ }
+ });
+
+ let failed = failed.load(Ordering::SeqCst);
+ if failed > 0 {
+ panic!("{} failures", failed);
+ }
+ }
+
+-fn libsyntax_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
++fn librustc_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
+ let name = FileName::Custom("test_round_trip".to_string());
+ parse::parse_crate_from_source_str(name, content, sess)
+ }
+diff --git a/third_party/rust/syn/tests/test_shebang.rs b/third_party/rust/syn/tests/test_shebang.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/tests/test_shebang.rs
+@@ -0,0 +1,59 @@
++#[macro_use]
++mod macros;
++
++#[test]
++fn test_basic() {
++ let content = "#!/usr/bin/env rustx\nfn main() {}";
++ let file = syn::parse_file(content).unwrap();
++ snapshot!(file, @r###"
++ File {
++ shebang: Some("#!/usr/bin/env rustx"),
++ items: [
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "main",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ },
++ ],
++ }
++ "###);
++}
++
++#[test]
++fn test_comment() {
++ let content = "#!//am/i/a/comment\n[allow(dead_code)] fn main() {}";
++ let file = syn::parse_file(content).unwrap();
++ snapshot!(file, @r###"
++ File {
++ attrs: [
++ Attribute {
++ style: Inner,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "allow",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`(dead_code)`),
++ },
++ ],
++ items: [
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "main",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ },
++ ],
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_should_parse.rs b/third_party/rust/syn/tests/test_should_parse.rs
+--- third_party/rust/syn/tests/test_should_parse.rs
++++ third_party/rust/syn/tests/test_should_parse.rs
+@@ -1,12 +1,8 @@
+-extern crate syn;
+-
+-mod features;
+-
+ macro_rules! should_parse {
+ ($name:ident, { $($in:tt)* }) => {
+ #[test]
+ fn $name() {
+ // Make sure we can parse the file!
+ syn::parse_file(stringify!($($in)*)).unwrap();
+ }
+ }
+diff --git a/third_party/rust/syn/tests/test_size.rs b/third_party/rust/syn/tests/test_size.rs
+--- third_party/rust/syn/tests/test_size.rs
++++ third_party/rust/syn/tests/test_size.rs
+@@ -1,12 +1,10 @@
+ #![cfg(target_pointer_width = "64")]
+
+-mod features;
+-
+ use std::mem;
+ use syn::*;
+
+ #[test]
+ fn test_expr_size() {
+ assert_eq!(mem::size_of::<Expr>(), 280);
+ }
+
+diff --git a/third_party/rust/syn/tests/test_stmt.rs b/third_party/rust/syn/tests/test_stmt.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/tests/test_stmt.rs
+@@ -0,0 +1,44 @@
++#[macro_use]
++mod macros;
++
++use syn::Stmt;
++
++#[test]
++fn test_raw_operator() {
++ let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap();
++
++ snapshot!(stmt, @r###"
++ Local(Local {
++ pat: Pat::Wild,
++ init: Some(Verbatim(`& raw const x`)),
++ })
++ "###);
++}
++
++#[test]
++fn test_raw_variable() {
++ let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap();
++
++ snapshot!(stmt, @r###"
++ Local(Local {
++ pat: Pat::Wild,
++ init: Some(Expr::Reference {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "raw",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ }),
++ })
++ "###);
++}
++
++#[test]
++fn test_raw_invalid() {
++ assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
++}
+diff --git a/third_party/rust/syn/tests/test_token_trees.rs b/third_party/rust/syn/tests/test_token_trees.rs
+--- third_party/rust/syn/tests/test_token_trees.rs
++++ third_party/rust/syn/tests/test_token_trees.rs
+@@ -1,14 +1,8 @@
+-extern crate proc_macro2;
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+ use proc_macro2::TokenStream;
+ use quote::quote;
+ use syn::Lit;
+
+ #[test]
+@@ -16,17 +10,21 @@ fn test_struct() {
+ let input = "
+ #[derive(Debug, Clone)]
+ pub struct Item {
+ pub ident: Ident,
+ pub attrs: Vec<Attribute>,
+ }
+ ";
+
+- snapshot!(input as TokenStream, @"`# [ derive ( Debug , Clone ) ] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`");
++ snapshot!(input as TokenStream, @r###"
++ TokenStream(
++ `# [derive (Debug , Clone)] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`,
++ )
++ "###);
+ }
+
+ #[test]
+ fn test_literal_mangling() {
+ let code = "0_4";
+ let parsed: Lit = syn::parse_str(code).unwrap();
+ assert_eq!(code, quote!(#parsed).to_string());
+ }
+diff --git a/third_party/rust/syn/tests/test_ty.rs b/third_party/rust/syn/tests/test_ty.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/tests/test_ty.rs
+@@ -0,0 +1,53 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::Type;
++
++#[test]
++fn test_mut_self() {
++ syn::parse_str::<Type>("fn(mut self)").unwrap();
++ syn::parse_str::<Type>("fn(mut self: ())").unwrap();
++ syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err();
++ syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err();
++ syn::parse_str::<Type>("fn(mut self::T)").unwrap_err();
++}
++
++#[test]
++fn test_macro_variable_type() {
++ // mimics the token stream corresponding to `$ty<T>`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
++ TokenTree::Punct(Punct::new('<', Spacing::Alone)),
++ TokenTree::Ident(Ident::new("T", Span::call_site())),
++ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
++ ]);
++
++ snapshot!(tokens as Type, @r###"
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "ty",
++ arguments: PathArguments::AngleBracketed {
++ args: [
++ Type(Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ },
++ },
++ ],
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_visibility.rs b/third_party/rust/syn/tests/test_visibility.rs
+new file mode 100644
+--- /dev/null
++++ third_party/rust/syn/tests/test_visibility.rs
+@@ -0,0 +1,145 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use std::iter::FromIterator;
++use syn::parse::{Parse, ParseStream};
++use syn::{DeriveInput, Result, Visibility};
++
++#[derive(Debug)]
++struct VisRest {
++ vis: Visibility,
++ rest: TokenStream,
++}
++
++impl Parse for VisRest {
++ fn parse(input: ParseStream) -> Result<Self> {
++ Ok(VisRest {
++ vis: input.parse()?,
++ rest: input.parse()?,
++ })
++ }
++}
++
++macro_rules! assert_vis_parse {
++ ($input:expr, Ok($p:pat)) => {
++ assert_vis_parse!($input, Ok($p) + "");
++ };
++
++ ($input:expr, Ok($p:pat) + $rest:expr) => {
++ let expected = $rest.parse::<TokenStream>().unwrap();
++ let parse: VisRest = syn::parse_str($input).unwrap();
++
++ match parse.vis {
++ $p => {}
++ _ => panic!("Expected {}, got {:?}", stringify!($p), parse.vis),
++ }
++
++ // NOTE: Round-trips through `to_string` to avoid potential whitespace
++ // diffs.
++ assert_eq!(parse.rest.to_string(), expected.to_string());
++ };
++
++ ($input:expr, Err) => {
++ syn::parse2::<VisRest>($input.parse().unwrap()).unwrap_err();
++ };
++}
++
++#[test]
++fn test_pub() {
++ assert_vis_parse!("pub", Ok(Visibility::Public(_)));
++}
++
++#[test]
++fn test_crate() {
++ assert_vis_parse!("crate", Ok(Visibility::Crate(_)));
++}
++
++#[test]
++fn test_inherited() {
++ assert_vis_parse!("", Ok(Visibility::Inherited));
++}
++
++#[test]
++fn test_in() {
++ assert_vis_parse!("pub(in foo::bar)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_crate() {
++ assert_vis_parse!("pub(crate)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_self() {
++ assert_vis_parse!("pub(self)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_super() {
++ assert_vis_parse!("pub(super)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_missing_in() {
++ assert_vis_parse!("pub(foo::bar)", Ok(Visibility::Public(_)) + "(foo::bar)");
++}
++
++#[test]
++fn test_missing_in_path() {
++ assert_vis_parse!("pub(in)", Err);
++}
++
++#[test]
++fn test_crate_path() {
++ assert_vis_parse!("pub(crate::A, crate::B)", Ok(Visibility::Public(_)) + "(crate::A, crate::B)");
++}
++
++#[test]
++fn test_junk_after_in() {
++ assert_vis_parse!("pub(in some::path @@garbage)", Err);
++}
++
++#[test]
++fn test_empty_group_vis() {
++ // mimics `struct S { $vis $field: () }` where $vis is empty
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Ident(Ident::new("struct", Span::call_site())),
++ TokenTree::Ident(Ident::new("S", Span::call_site())),
++ TokenTree::Group(Group::new(
++ Delimiter::Brace,
++ TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Ident(Ident::new(
++ "f",
++ Span::call_site(),
++ ))]),
++ )),
++ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]),
++ )),
++ ]);
++
++ snapshot!(tokens as DeriveInput, @r###"
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("f"),
++ colon_token: Some,
++ ty: Type::Tuple,
++ },
++ ],
++ },
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/zzz_stable.rs b/third_party/rust/syn/tests/zzz_stable.rs
+--- third_party/rust/syn/tests/zzz_stable.rs
++++ third_party/rust/syn/tests/zzz_stable.rs
+@@ -1,21 +1,19 @@
+ #![cfg(syn_disable_nightly_tests)]
+
+-extern crate termcolor;
+-
+ use std::io::{self, Write};
+ use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
+
+ const MSG: &str = "\
+ ‖
+ ‖ WARNING:
+ ‖ This is not a nightly compiler so not all tests were able to
+ ‖ run. Syn includes tests that compare Syn's parser against the
+-‖ compiler's parser, which requires access to unstable libsyntax
++‖ compiler's parser, which requires access to unstable librustc
+ ‖ data structures and a nightly compiler.
+ ‖
+ ";
+
+ #[test]
+ fn notice() -> io::Result<()> {
+ let header = "WARNING";
+ let index_of_header = MSG.find(header).unwrap();
+
Property changes on: head/www/firefox/files/patch-bug1663715
___________________________________________________________________
Added: fbsd:nokeywords
## -0,0 +1 ##
+yes
\ No newline at end of property
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Index: head/www/firefox-esr/files/patch-bug1663715
===================================================================
--- head/www/firefox-esr/files/patch-bug1663715 (nonexistent)
+++ head/www/firefox-esr/files/patch-bug1663715 (revision 552221)
@@ -0,0 +1,31087 @@
+From 63678ae69e03325d65255d29f1af4a6ea3dd354a Mon Sep 17 00:00:00 2001
+From: Thomas Deutschmann <whissi@gentoo.org>
+Date: Sat, 10 Oct 2020 16:07:49 +0200
+Subject: [PATCH 36/38] bmo#1643201: Cherry-pick some servo changes to
+ derive_common
+
+Link: https://bugzilla.mozilla.org/show_bug.cgi?id=1663715#c7
+Signed-off-by: Thomas Deutschmann <whissi@gentoo.org>
+---
+ servo/components/derive_common/cg.rs | 6 +++++-
+ 1 file changed, 5 insertions(+), 1 deletion(-)
+
+diff --git a/servo/components/derive_common/cg.rs b/servo/components/derive_common/cg.rs
+index 55a75398c7..c51c0d7750 100644
+--- servo/components/derive_common/cg.rs
++++ servo/components/derive_common/cg.rs
+@@ -7,7 +7,7 @@ use proc_macro2::{Span, TokenStream};
+ use quote::TokenStreamExt;
+ use syn::{self, AngleBracketedGenericArguments, Binding, DeriveInput, Field};
+ use syn::{GenericArgument, GenericParam, Ident, Path};
+-use syn::{PathArguments, PathSegment, QSelf, Type, TypeArray};
++use syn::{PathArguments, PathSegment, QSelf, Type, TypeArray, TypeGroup};
+ use syn::{TypeParam, TypeParen, TypePath, TypeSlice, TypeTuple};
+ use syn::{Variant, WherePredicate};
+ use synstructure::{self, BindStyle, BindingInfo, VariantAst, VariantInfo};
+@@ -208,6 +208,10 @@ where
+ elem: Box::new(map_type_params(&inner.elem, params, f)),
+ ..inner.clone()
+ }),
++ Type::Group(ref inner) => Type::from(TypeGroup {
++ elem: Box::new(map_type_params(&inner.elem, params, f)),
++ ..inner.clone()
++ }),
+ ref ty => panic!("type {:?} cannot be mapped yet", ty),
+ }
+ }
+--
+2.28.0
+
+From 23f22e9de6cc2236d58cc03997a1040e62c532e1 Mon Sep 17 00:00:00 2001
+From: Thomas Deutschmann <whissi@gentoo.org>
+Date: Sat, 10 Oct 2020 16:09:04 +0200
+Subject: [PATCH 37/38] bmo#1653339: Teach style_derive's map_type_params about
+ mapping self correctly
+
+Link: https://bugzilla.mozilla.org/show_bug.cgi?id=1663715#c7
+Signed-off-by: Thomas Deutschmann <whissi@gentoo.org>
+---
+ servo/components/derive_common/cg.rs | 30 +++++++++++--------
+ .../style_derive/to_computed_value.rs | 4 ++-
+ 2 files changed, 21 insertions(+), 13 deletions(-)
+
+diff --git a/servo/components/derive_common/cg.rs b/servo/components/derive_common/cg.rs
+index c51c0d7750..8abfd87149 100644
+--- servo/components/derive_common/cg.rs
++++ servo/components/derive_common/cg.rs
+@@ -154,19 +154,19 @@ pub fn fmap_trait_output(input: &DeriveInput, trait_path: &Path, trait_output: &
+ segment.into()
+ }
+
+-pub fn map_type_params<F>(ty: &Type, params: &[&TypeParam], f: &mut F) -> Type
++pub fn map_type_params<F>(ty: &Type, params: &[&TypeParam], self_type: &Path, f: &mut F) -> Type
+ where
+ F: FnMut(&Ident) -> Type,
+ {
+ match *ty {
+ Type::Slice(ref inner) => Type::from(TypeSlice {
+- elem: Box::new(map_type_params(&inner.elem, params, f)),
++ elem: Box::new(map_type_params(&inner.elem, params, self_type, f)),
+ ..inner.clone()
+ }),
+ Type::Array(ref inner) => {
+ //ref ty, ref expr) => {
+ Type::from(TypeArray {
+- elem: Box::new(map_type_params(&inner.elem, params, f)),
++ elem: Box::new(map_type_params(&inner.elem, params, self_type, f)),
+ ..inner.clone()
+ })
+ },
+@@ -175,7 +175,7 @@ where
+ elems: inner
+ .elems
+ .iter()
+- .map(|ty| map_type_params(&ty, params, f))
++ .map(|ty| map_type_params(&ty, params, self_type, f))
+ .collect(),
+ ..inner.clone()
+ }),
+@@ -187,10 +187,16 @@ where
+ if params.iter().any(|ref param| &param.ident == ident) {
+ return f(ident);
+ }
++ if ident == "Self" {
++ return Type::from(TypePath {
++ qself: None,
++ path: self_type.clone(),
++ });
++ }
+ }
+ Type::from(TypePath {
+ qself: None,
+- path: map_type_params_in_path(path, params, f),
++ path: map_type_params_in_path(path, params, self_type, f),
+ })
+ },
+ Type::Path(TypePath {
+@@ -198,25 +204,25 @@ where
+ ref path,
+ }) => Type::from(TypePath {
+ qself: qself.as_ref().map(|qself| QSelf {
+- ty: Box::new(map_type_params(&qself.ty, params, f)),
++ ty: Box::new(map_type_params(&qself.ty, params, self_type, f)),
+ position: qself.position,
+ ..qself.clone()
+ }),
+- path: map_type_params_in_path(path, params, f),
++ path: map_type_params_in_path(path, params, self_type, f),
+ }),
+ Type::Paren(ref inner) => Type::from(TypeParen {
+- elem: Box::new(map_type_params(&inner.elem, params, f)),
++ elem: Box::new(map_type_params(&inner.elem, params, self_type, f)),
+ ..inner.clone()
+ }),
+ Type::Group(ref inner) => Type::from(TypeGroup {
+- elem: Box::new(map_type_params(&inner.elem, params, f)),
++ elem: Box::new(map_type_params(&inner.elem, params, self_type, f)),
+ ..inner.clone()
+ }),
+ ref ty => panic!("type {:?} cannot be mapped yet", ty),
+ }
+ }
+
+-fn map_type_params_in_path<F>(path: &Path, params: &[&TypeParam], f: &mut F) -> Path
++fn map_type_params_in_path<F>(path: &Path, params: &[&TypeParam], self_type: &Path, f: &mut F) -> Path
+ where
+ F: FnMut(&Ident) -> Type,
+ {
+@@ -236,11 +242,11 @@ where
+ .map(|arg| match arg {
+ ty @ &GenericArgument::Lifetime(_) => ty.clone(),
+ &GenericArgument::Type(ref data) => {
+- GenericArgument::Type(map_type_params(data, params, f))
++ GenericArgument::Type(map_type_params(data, params, self_type, f))
+ },
+ &GenericArgument::Binding(ref data) => {
+ GenericArgument::Binding(Binding {
+- ty: map_type_params(&data.ty, params, f),
++ ty: map_type_params(&data.ty, params, self_type, f),
+ ..data.clone()
+ })
+ },
+diff --git a/servo/components/style_derive/to_computed_value.rs b/servo/components/style_derive/to_computed_value.rs
+index fe6bddb7ed..1dc422e2dd 100644
+--- servo/components/style_derive/to_computed_value.rs
++++ servo/components/style_derive/to_computed_value.rs
+@@ -47,12 +47,15 @@ pub fn derive_to_value(
+ cg::add_predicate(&mut where_clause, parse_quote!(#param: #trait_path));
+ }
+
++ let computed_value_type = cg::fmap_trait_output(&input, &trait_path, &output_type_name);
++
+ let mut add_field_bound = |binding: &BindingInfo| {
+ let ty = &binding.ast().ty;
+
+ let output_type = cg::map_type_params(
+ ty,
+ &params,
++ &computed_value_type,
+ &mut |ident| parse_quote!(<#ident as #trait_path>::#output_type_name),
+ );
+
+@@ -142,7 +145,6 @@ pub fn derive_to_value(
+
+ input.generics.where_clause = where_clause;
+ let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
+- let computed_value_type = cg::fmap_trait_output(&input, &trait_path, &output_type_name);
+
+ let impl_ = trait_impl(from_body, to_body);
+
+--
+2.28.0
+
+From 300e01e71c9dc536d499d80563968c5fc7f7e34a Mon Sep 17 00:00:00 2001
+From: Thomas Deutschmann <whissi@gentoo.org>
+Date: Sat, 10 Oct 2020 16:10:20 +0200
+Subject: [PATCH 38/38] bmo#1663715: Update syn and proc-macro2 so that Firefox
+ can build on Rust nightly again
+
+Link: https://bugzilla.mozilla.org/show_bug.cgi?id=1663715#c7
+Signed-off-by: Thomas Deutschmann <whissi@gentoo.org>
+---
+ Cargo.lock | 8 +-
+ .../rust/lucet-wasi/.cargo-checksum.json | 2 +-
+ .../rust/packed_simd/.cargo-checksum.json | 2 +-
+ .../rust/proc-macro2/.cargo-checksum.json | 2 +-
+ third_party/rust/proc-macro2/Cargo.toml | 15 +-
+ third_party/rust/proc-macro2/README.md | 2 +-
+ third_party/rust/proc-macro2/build.rs | 20 +
+ third_party/rust/proc-macro2/src/detection.rs | 67 +
+ third_party/rust/proc-macro2/src/fallback.rs | 1010 ++----
+ third_party/rust/proc-macro2/src/lib.rs | 225 +-
+ third_party/rust/proc-macro2/src/marker.rs | 18 +
+ third_party/rust/proc-macro2/src/parse.rs | 849 +++++
+ third_party/rust/proc-macro2/src/strnom.rs | 391 ---
+ third_party/rust/proc-macro2/src/wrapper.rs | 258 +-
+ .../rust/proc-macro2/tests/comments.rs | 103 +
+ third_party/rust/proc-macro2/tests/marker.rs | 33 +
+ third_party/rust/proc-macro2/tests/test.rs | 240 +-
+ .../rust/proc-macro2/tests/test_fmt.rs | 26 +
+ .../spirv-cross-internal/.cargo-checksum.json | 2 +-
+ third_party/rust/syn/.cargo-checksum.json | 2 +-
+ third_party/rust/syn/Cargo.toml | 35 +-
+ third_party/rust/syn/README.md | 16 +-
+ third_party/rust/syn/benches/file.rs | 7 +
+ third_party/rust/syn/benches/rust.rs | 45 +-
+ third_party/rust/syn/build.rs | 38 +-
+ third_party/rust/syn/src/attr.rs | 126 +-
+ third_party/rust/syn/src/buffer.rs | 56 +-
+ third_party/rust/syn/src/custom_keyword.rs | 12 +-
+ .../rust/syn/src/custom_punctuation.rs | 50 +-
+ third_party/rust/syn/src/data.rs | 96 +-
+ third_party/rust/syn/src/derive.rs | 10 +-
+ third_party/rust/syn/src/discouraged.rs | 27 +-
+ third_party/rust/syn/src/error.rs | 33 +-
+ third_party/rust/syn/src/expr.rs | 826 +++--
+ third_party/rust/syn/src/ext.rs | 12 +-
+ third_party/rust/syn/src/file.rs | 4 +-
+ third_party/rust/syn/src/gen/clone.rs | 2051 ++++++++++++
+ third_party/rust/syn/src/gen/debug.rs | 2857 +++++++++++++++++
+ third_party/rust/syn/src/gen/eq.rs | 1930 +++++++++++
+ third_party/rust/syn/src/gen/fold.rs | 287 +-
+ third_party/rust/syn/src/gen/hash.rs | 2691 ++++++++++++++++
+ third_party/rust/syn/src/gen/visit.rs | 19 +-
+ third_party/rust/syn/src/gen/visit_mut.rs | 19 +-
+ third_party/rust/syn/src/generics.rs | 255 +-
+ third_party/rust/syn/src/item.rs | 1515 +++++----
+ third_party/rust/syn/src/keyword.rs | 0
+ third_party/rust/syn/src/lib.rs | 109 +-
+ third_party/rust/syn/src/lifetime.rs | 13 +-
+ third_party/rust/syn/src/lit.rs | 581 ++--
+ third_party/rust/syn/src/mac.rs | 55 +-
+ third_party/rust/syn/src/macros.rs | 61 +-
+ third_party/rust/syn/src/op.rs | 6 +-
+ third_party/rust/syn/src/parse.rs | 211 +-
+ third_party/rust/syn/src/parse_macro_input.rs | 32 +-
+ third_party/rust/syn/src/parse_quote.rs | 15 +-
+ third_party/rust/syn/src/pat.rs | 313 +-
+ third_party/rust/syn/src/path.rs | 33 +-
+ third_party/rust/syn/src/punctuated.rs | 123 +-
+ third_party/rust/syn/src/reserved.rs | 42 +
+ third_party/rust/syn/src/spanned.rs | 4 +-
+ third_party/rust/syn/src/stmt.rs | 141 +-
+ third_party/rust/syn/src/token.rs | 99 +-
+ third_party/rust/syn/src/tt.rs | 6 +-
+ third_party/rust/syn/src/ty.rs | 364 ++-
+ third_party/rust/syn/src/verbatim.rs | 15 +
+ third_party/rust/syn/src/whitespace.rs | 65 +
+ third_party/rust/syn/tests/clone.sh | 16 -
+ third_party/rust/syn/tests/common/eq.rs | 247 +-
+ third_party/rust/syn/tests/common/mod.rs | 13 +
+ third_party/rust/syn/tests/common/parse.rs | 24 +-
+ third_party/rust/syn/tests/debug/gen.rs | 50 +-
+ third_party/rust/syn/tests/debug/mod.rs | 17 +-
+ third_party/rust/syn/tests/features/error.rs | 1 -
+ third_party/rust/syn/tests/features/mod.rs | 22 -
+ third_party/rust/syn/tests/macros/mod.rs | 8 +-
+ third_party/rust/syn/tests/repo/mod.rs | 137 +-
+ third_party/rust/syn/tests/repo/progress.rs | 37 +
+ third_party/rust/syn/tests/test_asyncness.rs | 38 +-
+ third_party/rust/syn/tests/test_attribute.rs | 452 +--
+ .../rust/syn/tests/test_derive_input.rs | 1321 ++++----
+ third_party/rust/syn/tests/test_expr.rs | 314 +-
+ third_party/rust/syn/tests/test_generics.rs | 371 ++-
+ third_party/rust/syn/tests/test_grouping.rs | 53 +-
+ third_party/rust/syn/tests/test_ident.rs | 5 -
+ third_party/rust/syn/tests/test_item.rs | 45 +
+ third_party/rust/syn/tests/test_iterators.rs | 7 +-
+ third_party/rust/syn/tests/test_lit.rs | 75 +-
+ third_party/rust/syn/tests/test_meta.rs | 498 ++-
+ .../rust/syn/tests/test_parse_buffer.rs | 41 +-
+ .../rust/syn/tests/test_parse_stream.rs | 12 +
+ third_party/rust/syn/tests/test_pat.rs | 27 +-
+ third_party/rust/syn/tests/test_path.rs | 52 +
+ third_party/rust/syn/tests/test_precedence.rs | 196 +-
+ third_party/rust/syn/tests/test_receiver.rs | 127 +
+ third_party/rust/syn/tests/test_round_trip.rs | 41 +-
+ third_party/rust/syn/tests/test_shebang.rs | 59 +
+ .../rust/syn/tests/test_should_parse.rs | 4 -
+ third_party/rust/syn/tests/test_size.rs | 2 -
+ third_party/rust/syn/tests/test_stmt.rs | 44 +
+ .../rust/syn/tests/test_token_trees.rs | 12 +-
+ third_party/rust/syn/tests/test_ty.rs | 53 +
+ third_party/rust/syn/tests/test_visibility.rs | 145 +
+ third_party/rust/syn/tests/zzz_stable.rs | 4 +-
+ 103 files changed, 17319 insertions(+), 5831 deletions(-)
+ create mode 100644 third_party/rust/proc-macro2/src/detection.rs
+ create mode 100644 third_party/rust/proc-macro2/src/marker.rs
+ create mode 100644 third_party/rust/proc-macro2/src/parse.rs
+ delete mode 100644 third_party/rust/proc-macro2/src/strnom.rs
+ create mode 100644 third_party/rust/proc-macro2/tests/comments.rs
+ create mode 100644 third_party/rust/proc-macro2/tests/test_fmt.rs
+ create mode 100644 third_party/rust/syn/src/gen/clone.rs
+ create mode 100644 third_party/rust/syn/src/gen/debug.rs
+ create mode 100644 third_party/rust/syn/src/gen/eq.rs
+ create mode 100644 third_party/rust/syn/src/gen/hash.rs
+ delete mode 100644 third_party/rust/syn/src/keyword.rs
+ create mode 100644 third_party/rust/syn/src/reserved.rs
+ create mode 100644 third_party/rust/syn/src/verbatim.rs
+ create mode 100644 third_party/rust/syn/src/whitespace.rs
+ delete mode 100755 third_party/rust/syn/tests/clone.sh
+ delete mode 100644 third_party/rust/syn/tests/features/error.rs
+ delete mode 100644 third_party/rust/syn/tests/features/mod.rs
+ create mode 100644 third_party/rust/syn/tests/repo/progress.rs
+ create mode 100644 third_party/rust/syn/tests/test_item.rs
+ create mode 100644 third_party/rust/syn/tests/test_parse_stream.rs
+ create mode 100644 third_party/rust/syn/tests/test_path.rs
+ create mode 100644 third_party/rust/syn/tests/test_receiver.rs
+ create mode 100644 third_party/rust/syn/tests/test_shebang.rs
+ create mode 100644 third_party/rust/syn/tests/test_stmt.rs
+ create mode 100644 third_party/rust/syn/tests/test_ty.rs
+ create mode 100644 third_party/rust/syn/tests/test_visibility.rs
+
+diff --git a/Cargo.lock b/Cargo.lock
+index 19117e8368..d5fe0f6457 100644
+--- Cargo.lock
++++ Cargo.lock
+@@ -3717,9 +3717,9 @@ dependencies = [
+
+ [[package]]
+ name = "proc-macro2"
+-version = "1.0.5"
++version = "1.0.24"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"
++checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
+ dependencies = [
+ "unicode-xid",
+ ]
+@@ -4647,9 +4647,9 @@ dependencies = [
+
+ [[package]]
+ name = "syn"
+-version = "1.0.5"
++version = "1.0.40"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"
++checksum = "963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"
+ dependencies = [
+ "proc-macro2",
+ "quote",
+diff --git a/third_party/rust/lucet-wasi/.cargo-checksum.json b/third_party/rust/lucet-wasi/.cargo-checksum.json
+index 229fc9978c..2c8c0a3c22 100644
+--- third_party/rust/lucet-wasi/.cargo-checksum.json
++++ third_party/rust/lucet-wasi/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"fea1408a1c1b1e84b06044a0b12cb26c8fd3253ca124debb6cd3e4faab48fcbd","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","LICENSE.cloudabi-utils":"86a34251f0aab76b7dc3daf8d252afbdf481ea94aa5b46d020205178b7e2eac1","LICENSE.wasmtime":"a6c48161a09acc75a0e25503bab66a731eb5fba5392ed4bb4743e4ba5085327a","bindings.json":"fad8362f845e6f7a2af1d7547cee5730894e7b96804f338181fc070ffdcfae1e","build.rs":"593173ad03963afcbef43f1843be6f04cde1df3eae0298ca20bf881019dd350a","examples/.gitignore":"44575cf5b28512d75644bf54a517dcef304ff809fd511747621b4d64f19aac66","examples/Makefile":"d2d2ceeb1bc4435189ea9a2710b6f5f5331ce6aa73ae8a4f4edcca215058a9b4","examples/README.md":"f2a5be6cc88d511c9f4d3bfefdd42dcb2ace813bc23f6a4430b6b543f7373341","examples/hello.c":"9cbc0d3173e02309e15372835fa849d196b2a202d81806fea60378e1878d0c53","examples/pseudoquine.c":"8fd696f8e1b8fb86188564a05f4776875ead31d785a12e3aa4af9d9c1b46d5b5","include/lucet_wasi.h":"497f712c64f753ebdf73ab05b0b340d50094f9c59bb8637ccbf99d895cd20966","src/bindings.rs":"edbeb51d1a93fd31039ee1f1dc7c1b6c0bded2cf5dad10039e8b7da81a4d4a12","src/c_api.rs":"a9c73070a88a461882a28e3e2adfd773f569b964c7ffabde39a3cef907782f71","src/ctx.rs":"578f87c35cce12eaebec95d03e31954c3e6cd0afa214a0fec068f03814eb0cc7","src/fdentry.rs":"94a8480fa587e5586327dfd6b66d8a6a3ef1f8091ba8deb335bf45642f4f98e6","src/host.rs":"6f05f8fea2afed827abfc3c008a5854a8023d91d066580ecfb49e5c8036ef3a3","src/hostcalls/fs.rs":"4726e6f695f7d1d4e371ec52c57f4e36b0ba0d2302fc008b21a301f5fd7a5c97","src/hostcalls/fs_helpers.rs":"474bce0a1f15fa23b0b0b8aa83143d993dd2cbd7cdfc38c118d452d04e80caea","src/hostcalls/misc.rs":"83d087891d92af08cfa2d2e0c5f41cc47cb8219460f6dbcc8666b418dfef206e","src/hostcalls/mod.rs":"4c5d3f65c69503e11e647770879026c37c0e5e01a99b7116c8fb9411b4797187","src/hostcalls/timers.rs":"e65d6a491256b5d6051b6816f6c5049ba3cdc6142651bac81f34d659c1c2a104","src/lib.rs":"5554e1a3f0cd3756173ece6435a0d01b2f520b3401cd5fc33180a04fb9f69bbe","src/memory.rs":"0a09026b15d27f99d74e560cd94795f645cba414a8491bc961987fab9d9da69b","src/wasi_host.rs":"cacbdac28304a837b11e5ad400ae9de3ee79c0284be335e64606ecdfe426ad6e","src/wasm32.rs":"13a5dc6e59784662f1e55eccb457cbbae241a96f70cfa72c41d55858ca05b980","tests/guests/cant_dotdot.c":"609b8cece9443e375a0b38a7e43651b179f66ee9c686edba6696fe1bcd45b111","tests/guests/clock_getres.c":"f5e41c0a2b05a8d7cdb5b4da6c8b6778b858004c1e9d115503c45a1d976be33b","tests/guests/duplicate_import.wat":"4bd8d7a5c1d1597dbe7648300e94e3fab84d7ab068d56cfb656aa1a208026cee","tests/guests/exitcode.c":"b7c5cec3ead0ed82326c568287a1f3398e71ae7e447ce49a3c4c7114c82495af","tests/guests/follow_symlink.c":"de3143ad2bbbfe834c0c32b54c9fcf144ca4eba5cdcf7588929e5f47225ab616","tests/guests/fs.c":"0dca5232ff5da1b7745e3b44bca39333c01a20ba4eae1a6a0a1c492c71ca1efa","tests/guests/getentropy.c":"5d80bcc68dcf3ba91576969055099d61635ae713c057b3cb36afb122a5f26347","tests/guests/getrusage.c":"8114c103b85eb564d9ab43684958bc1939de3794d314b7c121762f3a2f0434a6","tests/guests/gettimeofday.c":"4a57f376b06f4228017b82695448a0bd213fb91455f5301d689cd87fcff01f06","tests/guests/notdir.c":"bd8f8b24360b7cf8d5dced9d9ba4c15843fcbbae89fecc13e3a457c33a275e28","tests/guests/poll.c":"aefaa9b58ce9906dc379e0bd25fa68dfbf8cdffb48cd5ecde1d67708b83b366d","tests/guests/preopen_populates.c":"f186e4eb4aab6a1d9ec7bc5c49eaea6d9d162e0159dfe8f953bb48ade9b58d43","tests/guests/read_file.c":"1aab9393f005f05b69592826d7c4d384a115d5bca42c66f10a901811b4b1dcac","tests/guests/read_file_twice.c":"04a3dad7a43b93e36efd4e2c822c11b3f129429ec799af304d82b358686c578a","tests/guests/stat.c":"02756933ea7d4337b4fa04344b32968851b02f9d0bd5ea1cb0e2f022e8c65ab0","tests/guests/stdin.c":"66efc4b54f68d1138046f1afefae15f7d4555b2904b4a988818e61e67fe8fefb","tests/guests/symlink_escape.c":"686e047b5c986e29c854bcd93996d027dcdc8721219fa9fa532efc98d2798f5c","tests/guests/symlink_loop.c":"2bbddf3a5edfc6e5f3c0fa82cee4ac92b18804810509e263abd17f5240cd37e5","tests/guests/write_file.c":"9e9b14552c2445cfa6d0aa26b334081a59e6e3428dbb17ceca005a9ba59d3220","tests/test_helpers/mod.rs":"bc18194317611fe1be5c439a7a9e0de75399555c3b6de4275af149fb180456c8","tests/tests.rs":"173a7e0f086f6ed46474686cc3413ee68bbd2ff67004f7790e963a1392c7c46e"},"package":null}
+\ No newline at end of file
++{"files":{"Cargo.toml":"fea1408a1c1b1e84b06044a0b12cb26c8fd3253ca124debb6cd3e4faab48fcbd","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","LICENSE.cloudabi-utils":"86a34251f0aab76b7dc3daf8d252afbdf481ea94aa5b46d020205178b7e2eac1","LICENSE.wasmtime":"a6c48161a09acc75a0e25503bab66a731eb5fba5392ed4bb4743e4ba5085327a","bindings.json":"fad8362f845e6f7a2af1d7547cee5730894e7b96804f338181fc070ffdcfae1e","build.rs":"593173ad03963afcbef43f1843be6f04cde1df3eae0298ca20bf881019dd350a","examples/Makefile":"d2d2ceeb1bc4435189ea9a2710b6f5f5331ce6aa73ae8a4f4edcca215058a9b4","examples/README.md":"f2a5be6cc88d511c9f4d3bfefdd42dcb2ace813bc23f6a4430b6b543f7373341","examples/hello.c":"9cbc0d3173e02309e15372835fa849d196b2a202d81806fea60378e1878d0c53","examples/pseudoquine.c":"8fd696f8e1b8fb86188564a05f4776875ead31d785a12e3aa4af9d9c1b46d5b5","include/lucet_wasi.h":"497f712c64f753ebdf73ab05b0b340d50094f9c59bb8637ccbf99d895cd20966","src/bindings.rs":"edbeb51d1a93fd31039ee1f1dc7c1b6c0bded2cf5dad10039e8b7da81a4d4a12","src/c_api.rs":"a9c73070a88a461882a28e3e2adfd773f569b964c7ffabde39a3cef907782f71","src/ctx.rs":"578f87c35cce12eaebec95d03e31954c3e6cd0afa214a0fec068f03814eb0cc7","src/fdentry.rs":"94a8480fa587e5586327dfd6b66d8a6a3ef1f8091ba8deb335bf45642f4f98e6","src/host.rs":"6f05f8fea2afed827abfc3c008a5854a8023d91d066580ecfb49e5c8036ef3a3","src/hostcalls/fs.rs":"4726e6f695f7d1d4e371ec52c57f4e36b0ba0d2302fc008b21a301f5fd7a5c97","src/hostcalls/fs_helpers.rs":"474bce0a1f15fa23b0b0b8aa83143d993dd2cbd7cdfc38c118d452d04e80caea","src/hostcalls/misc.rs":"83d087891d92af08cfa2d2e0c5f41cc47cb8219460f6dbcc8666b418dfef206e","src/hostcalls/mod.rs":"4c5d3f65c69503e11e647770879026c37c0e5e01a99b7116c8fb9411b4797187","src/hostcalls/timers.rs":"e65d6a491256b5d6051b6816f6c5049ba3cdc6142651bac81f34d659c1c2a104","src/lib.rs":"5554e1a3f0cd3756173ece6435a0d01b2f520b3401cd5fc33180a04fb9f69bbe","src/memory.rs":"0a09026b15d27f99d74e560cd94795f645cba414a8491bc961987fab9d9da69b","src/wasi_host.rs":"cacbdac28304a837b11e5ad400ae9de3ee79c0284be335e64606ecdfe426ad6e","src/wasm32.rs":"13a5dc6e59784662f1e55eccb457cbbae241a96f70cfa72c41d55858ca05b980","tests/guests/cant_dotdot.c":"609b8cece9443e375a0b38a7e43651b179f66ee9c686edba6696fe1bcd45b111","tests/guests/clock_getres.c":"f5e41c0a2b05a8d7cdb5b4da6c8b6778b858004c1e9d115503c45a1d976be33b","tests/guests/duplicate_import.wat":"4bd8d7a5c1d1597dbe7648300e94e3fab84d7ab068d56cfb656aa1a208026cee","tests/guests/exitcode.c":"b7c5cec3ead0ed82326c568287a1f3398e71ae7e447ce49a3c4c7114c82495af","tests/guests/follow_symlink.c":"de3143ad2bbbfe834c0c32b54c9fcf144ca4eba5cdcf7588929e5f47225ab616","tests/guests/fs.c":"0dca5232ff5da1b7745e3b44bca39333c01a20ba4eae1a6a0a1c492c71ca1efa","tests/guests/getentropy.c":"5d80bcc68dcf3ba91576969055099d61635ae713c057b3cb36afb122a5f26347","tests/guests/getrusage.c":"8114c103b85eb564d9ab43684958bc1939de3794d314b7c121762f3a2f0434a6","tests/guests/gettimeofday.c":"4a57f376b06f4228017b82695448a0bd213fb91455f5301d689cd87fcff01f06","tests/guests/notdir.c":"bd8f8b24360b7cf8d5dced9d9ba4c15843fcbbae89fecc13e3a457c33a275e28","tests/guests/poll.c":"aefaa9b58ce9906dc379e0bd25fa68dfbf8cdffb48cd5ecde1d67708b83b366d","tests/guests/preopen_populates.c":"f186e4eb4aab6a1d9ec7bc5c49eaea6d9d162e0159dfe8f953bb48ade9b58d43","tests/guests/read_file.c":"1aab9393f005f05b69592826d7c4d384a115d5bca42c66f10a901811b4b1dcac","tests/guests/read_file_twice.c":"04a3dad7a43b93e36efd4e2c822c11b3f129429ec799af304d82b358686c578a","tests/guests/stat.c":"02756933ea7d4337b4fa04344b32968851b02f9d0bd5ea1cb0e2f022e8c65ab0","tests/guests/stdin.c":"66efc4b54f68d1138046f1afefae15f7d4555b2904b4a988818e61e67fe8fefb","tests/guests/symlink_escape.c":"686e047b5c986e29c854bcd93996d027dcdc8721219fa9fa532efc98d2798f5c","tests/guests/symlink_loop.c":"2bbddf3a5edfc6e5f3c0fa82cee4ac92b18804810509e263abd17f5240cd37e5","tests/guests/write_file.c":"9e9b14552c2445cfa6d0aa26b334081a59e6e3428dbb17ceca005a9ba59d3220","tests/test_helpers/mod.rs":"bc18194317611fe1be5c439a7a9e0de75399555c3b6de4275af149fb180456c8","tests/tests.rs":"173a7e0f086f6ed46474686cc3413ee68bbd2ff67004f7790e963a1392c7c46e"},"package":null}
+\ No newline at end of file
+diff --git a/third_party/rust/packed_simd/.cargo-checksum.json b/third_party/rust/packed_simd/.cargo-checksum.json
+index 01afcc1efd..c727a10006 100644
+--- third_party/rust/packed_simd/.cargo-checksum.json
++++ third_party/rust/packed_simd/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{".appveyor.yml":"f1ed01850e0d725f9498f52a1a63ddf40702ad6e0bf5b2d7c4c04d76e96794a3",".travis.yml":"e9258d9a54fdaf4cbc12405fe5993ac4497eb2b29021691dbc91b19cb9b52227","Cargo.toml":"089941ba3c89ea111cbea3cc3abdcdcf2b9d0ae0db268d7269ee38226db950e5","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","bors.toml":"dee881dc69b9b7834e4eba5d95c3ed5a416d4628815a167d6a22d4cb4fb064b8","build.rs":"f3baefc5e5bb9b250e762a1466371b922fd7ee4243c217b2d014307603c2f57a","ci/all.sh":"a23d14e10cb26a0eb719e389c30eb955fa53cddcd436890646df09af640bd2eb","ci/android-install-ndk.sh":"0f1746108cc30bf9b9ba45bcde7b19fc1a8bdf5b0258035b4eb8dc69b75efac4","ci/android-install-sdk.sh":"3490432022c5c8f5a115c084f7a9aca1626f96c0c87ffb62019228c4346b47e4","ci/android-sysimage.sh":"ebf4e5daa1f0fe1b2092b79f0f3f161c4c4275cb744e52352c4d81ab451e4c5a","ci/benchmark.sh":"b61d19ef6b90deba8fb79dee74c8b062d94844676293da346da87bb78a9a49a4","ci/deploy_and_run_on_ios_simulator.rs":"ec8ecf82d92072676aa47f0d1a3d021b60a7ae3531153ef12d2ff4541fc294dc","ci/docker/aarch64-linux-android/Dockerfile":"ace2e7d33c87bc0f6d3962a4a3408c04557646f7f51ab99cfbf574906796b016","ci/docker/aarch64-unknown-linux-gnu/Dockerfile":"1ecdac757101d951794fb2ab0deaa278199cf25f2e08a15c7d40ff31a8556184","ci/docker/arm-linux-androideabi/Dockerfile":"370e55d3330a413a3ccf677b3afb3e0ef9018a5fab263faa97ae8ac017fc2286","ci/docker/arm-unknown-linux-gnueabi/Dockerfile":"e25d88f6c0c94aada3d2e3f08243f755feb7e869dc5dc505b3799719cb1af591","ci/docker/arm-unknown-linux-gnueabihf/Dockerfile":"f126f4c7bae8c11ab8b16df06ad997863f0838825a9c08c9899a3eedb6d570bd","ci/docker/armv7-unknown-linux-gnueabihf/Dockerfile":"b647545c158ee480a4c581dbdc1f57833aef056c8d498acc04b573e842bf803c","ci/docker/i586-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/i686-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/mips-unknown-linux-gnu/Dockerfile":"323776469bb7b160385f3621d66e3ee14c75242f8180f916e65af048a29d4ea0","ci/docker/mips64-unknown-linux-gnuabi64/Dockerfile":"c647f6948a9a43b0be695cbed4eac752120d0faf28e5e69c718cb10406921dab","ci/docker/mips64el-unknown-linux-gnuabi64/Dockerfile":"77bfd00cc8639509be381b394f077e39b45a00158ad61b4e1656714c714665d1","ci/docker/mipsel-unknown-linux-musl/Dockerfile":"ec5bea6c98a3b626731fdb95f9ff2d1182639c76e8fb16d3271d0fc884901524","ci/docker/powerpc-unknown-linux-gnu/Dockerfile":"4f2b662de66e83d1354f650b7077692309637f786c2ea5516c31b5c2ee10af2d","ci/docker/powerpc64-unknown-linux-gnu/Dockerfile":"a9595402b772bc365982e22a0096a8988825d90b09b5faa97ab192e76072f71d","ci/docker/powerpc64le-unknown-linux-gnu/Dockerfile":"df3c381c157439695ae8cd10ab71664702c061e3b4ab22906a5ad6c2680acfed","ci/docker/s390x-unknown-linux-gnu/Dockerfile":"93fb44df3d7fd31ead158570667c97b5076a05c3d968af4a84bc13819a8f2db8","ci/docker/sparc64-unknown-linux-gnu/Dockerfile":"da1c39a3ff1fe22e41395fa7c8934e90b4c1788e551b9aec6e38bfd94effc437","ci/docker/thumbv7neon-linux-androideabi/Dockerfile":"c2decd5591bd7a09378901bef629cd944acf052eb55e4f35b79eb9cb4d62246a","ci/docker/thumbv7neon-unknown-linux-gnueabihf/Dockerfile":"75c0c56161c7382b439de74c00de1c0e3dc9d59560cd6720976a751034b78714","ci/docker/wasm32-unknown-unknown/Dockerfile":"3e5f294bc1e004aa599086c2af49d6f3e7459fa250f5fbdd60cf67d53db78758","ci/docker/x86_64-linux-android/Dockerfile":"685040273cf350d5509e580ac451555efa19790c8723ca2af066adadc6880ad2","ci/docker/x86_64-unknown-linux-gnu-emulated/Dockerfile":"44b6203d9290bfdc53d81219f0937e1110847a23dd982ec8c4de388354f01536","ci/docker/x86_64-unknown-linux-gnu/Dockerfile":"d253c86803b22da428fa9cc671a05f18d3318eca7733b8dccb4f7be1ddf524c5","ci/dox.sh":"5b61711be47a4e3dde0ddd15ba73d256ea95fd75af3897732c24db1dc7e66366","ci/linux-s390x.sh":"d6b732d7795b4ba131326aff893bca6228a7d2eb0e9402f135705413dbbe0dce","ci/linux-sparc64.sh":"c92966838b1ab7ad3b7a344833ee726aba6b647cf5952e56f0ad1ba420b13325","ci/lld-shim.rs":"3d7f71ec23a49e2b67f694a0168786f9a954dda15f5a138815d966643fd3fcc3","ci/max_line_width.sh":"0a1518bba4c9ecaa55694cb2e9930d0e19c265baabf73143f17f9cf285aaa5bb","ci/run-docker.sh":"92e036390ad9b0d16f109579df1b5ced2e72e9afea40c7d011400ebd3a2a90de","ci/run.sh":"63259e22a96ba539f53c06b1b39f53e3a78a71171652e7afc170836110ccd913","ci/run_examples.sh":"d1a23c6c35374a0678ba5114b9b8fefd8be0a79e774872a8bf0898d1baca18d0","ci/runtest-android.rs":"145a8e9799a5223975061fe7e586ade5669ee4877a7d7a4cf6b4ab48e8e36c7c","ci/setup_benchmarks.sh":"73fb981a8fdb1dcd54409d3c0fbbfb8f77a3ceabf8626a6b9bf9d21d6bc8ce72","ci/test-runner-linux":"c8aa6025cff5306f4f31d0c61dc5f9d4dd5a1d189ab613ef8d4c367c694d9ccd","contributing.md":"2cc8c9c560ae17867e69b06d09b758dbf7bc39eb774ada50a743724b10acc0a2","perf-guide/.gitignore":"fe82c7da551079d832cf74200b0b359b4df9828cb4a0416fa7384f07a2ae6a13","perf-guide/book.toml":"115a98284126c6b180178b44713314cc494f08a71662ee2ce15cf67f17a51064","perf-guide/src/SUMMARY.md":"3e03bffc991fdc2050f3d51842d72d9d21ea6abab56a3baf3b2d5973a78b89e1","perf-guide/src/ascii.css":"29afb08833b2fe2250f0412e1fa1161a2432a0820a14953c87124407417c741a","perf-guide/src/bound_checks.md":"5e4991ff58a183ef0cd9fdc1feb4cd12d083b44bdf87393bbb0927808ef3ce7d","perf-guide/src/float-math/approx.md":"8c09032fa2d795a0c5db1775826c850d28eb2627846d0965c60ee72de63735ad","perf-guide/src/float-math/fma.md":"311076ba4b741d604a82e74b83a8d7e8c318fcbd7f64c4392d1cf5af95c60243","perf-guide/src/float-math/fp.md":"04153e775ab6e4f0d7837bcc515230d327b04edfa34c84ce9c9e10ebaeef2be8","perf-guide/src/float-math/svml.md":"0798873b8eedaeda5fed62dc91645b57c20775a02d3cd74d8bd06958f1516506","perf-guide/src/introduction.md":"9f5a19e9e6751f25d2daad39891a0cc600974527ec4c8305843f9618910671bd","perf-guide/src/prof/linux.md":"447731eb5de7d69166728fdbc5ecb0c0c9db678ea493b45a592d67dd002184c0","perf-guide/src/prof/mca.md":"f56d54f3d20e7aa4d32052186e8237b03d65971eb5d112802b442570ff11d344","perf-guide/src/prof/profiling.md":"8a650c0fd6ede0964789bb6577557eeef1d8226a896788602ce61528e260e43c","perf-guide/src/target-feature/attribute.md":"615f88dca0a707b6c416fa605435dd6e1fb5361cc639429cbf68cd87624bd78b","perf-guide/src/target-feature/features.md":"17077760ff24c006b606dd21889c53d87228f4311f3ba3a574f9afdeacd86165","perf-guide/src/target-feature/inlining.md":"7ed1d7068d8173a00d84c16cfe5871cd68b9f04f8d0cca2d01ebc84957ebf2f6","perf-guide/src/target-feature/practice.md":"c4b371842e0086df178488fec97f20def8f0c62ee588bcd25fd948b9b1fa227e","perf-guide/src/target-feature/runtime.md":"835425f5ee597fb3e51d36e725a81ebee29f4561231d19563cd4da81dbb1cfcb","perf-guide/src/target-feature/rustflags.md":"ab49712e9293a65d74d540ba4784fcb57ff1119ec05a575d895c071f1a620f64","perf-guide/src/vert-hor-ops.md":"c6211c0ee91e60552ec592d89d9d957eedc21dee3cbd89e1ad6765ea06a27471","readme.md":"585a8f0e16877fb9abb00cd17a175fcb9d7857840c6c61209f1827ffab095070","rustfmt.toml":"de6101d0670bad65fb3b337d56957d2a024e017e5ab146ec784d77312daaf8ff","src/api.rs":"331a3a4abb19cee2df5f2df4ad7c3e88b45e62cf23fdacfc9bbaa633dc5cf788","src/api/bit_manip.rs":"e68290ee679cc5abc9c73afbe635c1035f8cbfe849e5c751a1680e459244c39e","src/api/cast.rs":"03b94a3d316ac7b7be7068810044911e965e889a0ace7bae762749ca74a92747","src/api/cast/macros.rs":"b0a14d0c83ad2ebb7a275180f6d9e3f2bc312ba57a7d3d6c39fad4e0f20f9408","src/api/cast/v128.rs":"63e28c6a3edf1a7a635f51b8d3c6adbb1d46f884d92a196b3d4a6e743d809416","src/api/cast/v16.rs":"2a584eeb57fd47baad6f3533764301b04aaaac23702b7a8db12598ac02899262","src/api/cast/v256.rs":"b91c15ed8d1536ecd97b4eb79ff9d5aba0552cd9b6f0ea6435b05f2273e23b3a","src/api/cast/v32.rs":"62ec89fcce7fa7f28497ee5770adc8f81d2d3a6b2925b02f7dc06504c40e8f38","src/api/cast/v512.rs":"d855cb943ae7106e9599ef38e30a3afb1c6bd5433178baca54cb128fd9a7d143","src/api/cast/v64.rs":"fe0f7dfaf4fc0c0c1a78c96fcfcdfdc2a1e2845843b11aa797a0c6fb52a8f774","src/api/cmp.rs":"357c3a2a09c6d4611c32dd7fa95be2fae933d513e229026ec9b44451a77b884e","src/api/cmp/eq.rs":"60f70f355bae4cb5b17db53204cacc3890f70670611c17df638d4c04f7cc8075","src/api/cmp/ord.rs":"589f7234761c294fa5df8f525bc4acd5a47cdb602207d524a0d4e19804cd9695","src/api/cmp/partial_eq.rs":"3ed23d2a930b0f9750c3a5309da766b03dc4f9c4d375b42ad3c50fe732693d15","src/api/cmp/partial_ord.rs":"e16b11805c94048acd058c93994b5bc74bb187f8d7e3b86a87df60e1601467f9","src/api/cmp/vertical.rs":"de3d62f38eba817299aa16f1e1939954c9a447e316509397465c2830852ba053","src/api/default.rs":"b61f92fc0e33a2633b3375eb405beba480da071cde03df4d437d8a6058afcd97","src/api/fmt.rs":"67fb804bb86b6cd77cf8cd492b5733ce437071b66fe3297278b8a6552c325dda","src/api/fmt/binary.rs":"35cb5c266197d6224d598fb3d286e5fe48ef0c01ed356c2ff6fe9ba946f96a92","src/api/fmt/debug.rs":"aa18eea443bf353fea3db8b1a025132bbcaf91e747ecfa43b8d9fce9af395a0c","src/api/fmt/lower_hex.rs":"69d5be366631af309f214e8031c8c20267fcc27a695eac6f45c6bc1df72a67e6","src/api/fmt/octal.rs":"9eb11ba3d990213f3c7f1ec25edba7ce997cb1320e16d308c83498ba6b9bfbd9","src/api/fmt/upper_hex.rs":"a4637d085b7bb20e759ce58e08435b510a563ba3dd468af2b03560fdc5511562","src/api/from.rs":"2e599d8329cb05eaf06224cc441355c4b7b51254fc19256619333be8c149d444","src/api/from/from_array.rs":"4151593c7bba7455821fffa5b59867005a77c95d32f1f0cc3fd87294000157d9","src/api/from/from_vector.rs":"9764371aa9e6005aace74dea14f59e5611a095b7cf42707940924749282c52f0","src/api/hash.rs":"562cfa3f1d8eb9a733c035a3665a599c2f1e341ee820d8fbdd102a4398a441bc","src/api/into_bits.rs":"82297f0697d67b5a015e904e7e6e7b2a7066ba825bc54b94b4ff3e22d7a1eefb","src/api/into_bits/arch_specific.rs":"1f925390b0ce7132587d95f2419c6e2ad3e1a9d17eb1d9c120a1c1c4bdf4277e","src/api/into_bits/macros.rs":"d762406de25aedff88d460dec7a80dc8e825a2a419d53218ce007efa6a1d3e04","src/api/into_bits/v128.rs":"ecdc5893664c71d7ab1ff3697c3fbe490d20d8748b9b76881d05e7625e40d74c","src/api/into_bits/v16.rs":"5459ec7dad1ad7bd30dc7e48374580b993abf23701d9c3cb22203fa0a9aabb6d","src/api/into_bits/v256.rs":"90ea351da0380ead1bf0f63b620afd40d01d638d09f7e7be31840bd2c1d9c663","src/api/into_bits/v32.rs":"ee1dc5a430050e16f51154b5fe85b1536f5feddf2ea23dd1d3859b67c4afc6fc","src/api/into_bits/v512.rs":"f72098ed1c9a23944f3d01abaf5e0f2d0e81d35a06fdadd2183e896d41b59867","src/api/into_bits/v64.rs":"6394462facdfe7827349c742b7801f1291e75a720dfb8c0b52100df46f371c98","src/api/math.rs":"8b2a2fc651917a850539f993aa0b9e5bf4da67b11685285b8de8cdca311719ec","src/api/math/float.rs":"61d2794d68262a1090ae473bd30793b5f65cf732f32a6694a3af2ce5d9225616","src/api/math/float/abs.rs":"5b6b2701e2e11135b7ce58a05052ea8120e10e4702c95d046b9d21b827b26bf8","src/api/math/float/consts.rs":"78acba000d3fa527111300b6327c1932de9c4c1e02d4174e1a5615c01463d38c","src/api/math/float/cos.rs":"4c2dd7173728ef189314f1576c9486e03be21b7da98843b2f9011282a7979e31","src/api/math/float/exp.rs":"7c6d5f1e304f498a01cfa23b92380c815d7da0ad94eae3483783bc377d287eef","src/api/math/float/ln.rs":"54c7583f3df793b39ff57534fade27b41bb992439e5dc178252f5ca3190a3e54","src/api/math/float/mul_add.rs":"62cac77660d20159276d4c9ef066eb90c81cbddb808e8e157182c607625ad2eb","src/api/math/float/mul_adde.rs":"bae056ee9f3a70df39ec3c3b2f6437c65303888a7b843ef1a5bcf1f5aca0e602","src/api/math/float/powf.rs":"9ddb938984b36d39d82a82f862f80df8f7fb013f1d222d45698d41d88472f568","src/api/math/float/recpre.rs":"589225794ff1dbf31158dff660e6d4509ecc8befbb57c633900dea5ac0b840d6","src/api/math/float/rsqrte.rs":"a32abdcc318d7ccc8448231f54d75b884b7cbeb03a7d595713ab6243036f4dbf","src/api/math/float/sin.rs":"cbd3622b7df74f19691743001c8cf747a201f8977ad90542fee915f37dcd1e49","src/api/math/float/sqrt.rs":"0c66d5d63fb08e4d99c6b82a8828e41173aff1ac9fa1a2764a11fac217ccf2ac","src/api/math/float/sqrte.rs":"731e1c9f321b662accdd27dacb3aac2e8043b7aecb2f2161dde733bd9f025362","src/api/minimal.rs":"1f22bcc528555444e76de569ec0ae2029b9ae9d04805efeafa93369c8098036b","src/api/minimal/iuf.rs":"c501a6696950cf5e521765f178de548af64fdfb6e10d026616d09fab93ca2d17","src/api/minimal/mask.rs":"42e415f536c5193d0218f5a754b34b87fd7c971bff068009f958712166ff056d","src/api/minimal/ptr.rs":"a9ee482d1dd1c956fb8f3f179e6e620b1de4e9d713961461d4c6923a4ef2e67c","src/api/ops.rs":"3e273b277a0f3019d42c3c59ca94a5afd4885d5ae6d2182e5089bbeec9de42ee","src/api/ops/scalar_arithmetic.rs":"d2d5ad897a59dd0787544f927e0e7ca4072c3e58b0f4a2324083312b0d5a21d7","src/api/ops/scalar_bitwise.rs":"482204e459ca6be79568e1c9f70adbe2d2151412ddf122fb2161be8ebb51c40c","src/api/ops/scalar_mask_bitwise.rs":"c250f52042e37b22d57256c80d4604104cfd2fbe2a2e127c676267270ca5d350","src/api/ops/scalar_shifts.rs":"987f8fdebeedc16e3d77c1b732e7826ef70633c541d16dfa290845d5c6289150","src/api/ops/vector_arithmetic.rs":"ddca15d09ddeef502c2ed66117a62300ca65d87e959e8b622d767bdf1c307910","src/api/ops/vector_bitwise.rs":"b3968f7005b649edcc22a54e2379b14d5ee19045f2e784029805781ae043b5ee","src/api/ops/vector_float_min_max.rs":"f5155dce75219f4ba11275b1f295d2fdcddd49d174a6f1fb2ace7ea42813ce41","src/api/ops/vector_int_min_max.rs":"a378789c6ff9b32a51fbd0a97ffd36ed102cd1fe6a067d2b02017c1df342def6","src/api/ops/vector_mask_bitwise.rs":"5052d18517d765415d40327e6e8e55a312daaca0a5e2aec959bfa54b1675f9c8","src/api/ops/vector_neg.rs":"5c62f6b0221983cdbd23cd0a3af3672e6ba1255f0dfe8b19aae6fbd6503e231b","src/api/ops/vector_rotates.rs":"03cbe8a400fd7c688e4ee771a990a6754f2031b1a59b19ae81158b21471167e5","src/api/ops/vector_shifts.rs":"9bf69d0087268f61009e39aea52e03a90f378910206b6a28e8393178b6a5d0e0","src/api/ptr.rs":"8a793251bed6130dcfb2f1519ceaa18b751bbb15875928d0fb6deb5a5e07523a","src/api/ptr/gather_scatter.rs":"9ddd960365e050674b25b2fd3116e24d94669b4375d74e71c03e3f1469576066","src/api/reductions.rs":"ae5baca81352ecd44526d6c30c0a1feeda475ec73ddd3c3ec6b14e944e5448ee","src/api/reductions/bitwise.rs":"8bf910ae226188bd15fc7e125f058cd2566b6186fcd0cd8fd020f352c39ce139","src/api/reductions/float_arithmetic.rs":"e58c8c87806a95df2b2b5b48ac5991036df024096d9d7c171a480fe9282896a4","src/api/reductions/integer_arithmetic.rs":"47471da1c5f859489680bb5d34ced3d3aa20081c16053a3af121a4496fcb57bf","src/api/reductions/mask.rs":"db83327a950e33a317f37fd33ca4e20c347fb415975ec024f3e23da8509425af","src/api/reductions/min_max.rs":"f27be3aa28e1c1f46de7890198db6e12f00c207085e89ef2de7e57ee443cdb98","src/api/select.rs":"a98e2ccf9fc6bdeed32d337c8675bc96c2fbe2cc34fbf149ad6047fb8e749774","src/api/shuffle.rs":"da58200790868c09659819322a489929a5b6e56c596ed07e6a44293ea02e7d09","src/api/shuffle1_dyn.rs":"bfea5a91905b31444e9ef7ca6eddb7a9606b7e22d3f71bb842eb2795a0346620","src/api/slice.rs":"ee87484e8af329547b9a5d4f2a69e8bed6ea10bbd96270d706083843d4eea2ac","src/api/slice/from_slice.rs":"4d4fe8a329c885fcb4fbcbedf99efb15a95296fe6b3f595056cc37037450d5ac","src/api/slice/write_to_slice.rs":"f5b23b2c4b91cfb26b713a9013a6c0da7f45eaefb79ba06dcbc27f3f23bda679","src/api/swap_bytes.rs":"4a6792a2e49a77475e1b237592b4b2804dbddb79c474331acd0dd71b36934259","src/codegen.rs":"c6eebc3d3665420aa6a2f317977e3c41a4f43e0550ac630cdbe8e4bbed5e2031","src/codegen/bit_manip.rs":"5559e095105a80003e0de35af1d19b0c65c9ab04eb743c7e01c5442d882eb34e","src/codegen/llvm.rs":"d1299c189abb17a6133f047574cffc7a6db4c1be37cb7d4785491cb5e8f8cf54","src/codegen/math.rs":"35f96e37a78fcf0cdb02146b7f27a45108fe06a37fc2a54d8851ce131a326178","src/codegen/math/float.rs":"dd86c0449e576c83b719700962ac017c332987fac08d91f2b7a2b1b883598170","src/codegen/math/float/abs.rs":"f56e2b4b8055ea861c1f5cbc6b6e1d8e7e5af163b62c13574ddee4e09513bfbc","src/codegen/math/float/cos.rs":"ef3b511a24d23045b310315e80348a9b7fedb576fc2de52d74290616a0abeb2a","src/codegen/math/float/cos_pi.rs":"4e7631a5d73dac21531e09ef1802d1180f8997509c2c8fa9f67f322194263a97","src/codegen/math/float/exp.rs":"61b691598c41b5622f24e4320c1bdd08701e612a516438bdddcc728fc3405c8c","src/codegen/math/float/ln.rs":"46b718b1ba8c9d99e1ad40f53d20dfde08a3063ca7bd2a9fdd6698e060da687e","src/codegen/math/float/macros.rs":"dd42135fff13f9aca4fd3a1a4e14c7e6c31aadc6d817d63b0d2fb9e62e062744","src/codegen/math/float/mul_add.rs":"a37bf764345d4b1714f97e83897b7cf0855fc2811704bcbc0012db91825339e1","src/codegen/math/float/mul_adde.rs":"c75702bfcb361de45964a93caf959a695ef2376bd069227600b8c6872665c755","src/codegen/math/float/powf.rs":"642346e982bc4c39203de0864d2149c4179cd7b21cf67a2951687932b4675872","src/codegen/math/float/sin.rs":"9d68164c90cdca6a85155040cdac42e27342ebe0b925273ef1593df721af4258","src/codegen/math/float/sin_cos_pi.rs":"9be02ad48585a1e8d99129382fbffbaed47852f15459256a708850b6b7a75405","src/codegen/math/float/sin_pi.rs":"9890347905b4d4a3c7341c3eb06406e46e60582bcf6960688bd727e5dadc6c57","src/codegen/math/float/sqrt.rs":"e3c60dcfb0c6d2fc62adabcc931b2d4040b83cab294dea36443fb4b89eb79e34","src/codegen/math/float/sqrte.rs":"f0f4ef9eb475ae41bcc7ec6a95ad744ba6b36925faa8b2c2814004396d196b63","src/codegen/pointer_sized_int.rs":"a70697169c28218b56fd2e8d5353f2e00671d1150d0c8cef77d613bdfacd84cb","src/codegen/reductions.rs":"645e2514746d01387ddd07f0aa4ffd8430cc9ab428d4fb13773ea319fa25dd95","src/codegen/reductions/mask.rs":"8f1afe6aabf096a3278e1fc3a30f736e04aa8b9ce96373cee22162d18cfe2702","src/codegen/reductions/mask/aarch64.rs":"cba6e17603d39795dcfe8339b6b7d8714c3e162a1f0a635979f037aa24fe4206","src/codegen/reductions/mask/arm.rs":"9447904818aa2c7c25d0963eead452a639a11ca7dbd6d21eedbfcaade07a0f33","src/codegen/reductions/mask/fallback.rs":"7a0ef9f7fd03ae318b495b95e121350cd61caffc5cc6ee17fabf130d5d933453","src/codegen/reductions/mask/fallback_impl.rs":"76547f396e55ef403327c77c314cf8db8c7a5c9b9819bfb925abeacf130249e5","src/codegen/reductions/mask/x86.rs":"14bd2c482071f2355beebcf7b7ecf950ff2dfcdb08c3ca50993092434a9de717","src/codegen/reductions/mask/x86/avx.rs":"b4913d87844c522903641cbbf10db4551addb1ce5e9e78278e21612fa65c733b","src/codegen/reductions/mask/x86/avx2.rs":"677aed3f056285285daa3adff8bc65e739630b4424defa6d9665e160f027507e","src/codegen/reductions/mask/x86/sse.rs":"226610b4ff88c676d5187114dd57b4a8800de6ce40884675e9198445b1ed0306","src/codegen/reductions/mask/x86/sse2.rs":"bc38e6c31cb4b3d62147eba6cac264e519e2a48e0f7ce9010cfa9ef0cf0ec9fd","src/codegen/shuffle.rs":"0abca97e92cdce49a58a39cc447eb09dc7d7715ef256c8dbd2181a186e61bb64","src/codegen/shuffle1_dyn.rs":"04523e9338133bdedb012dd076c2c564b79ce5593b0fc56d0fb6910e04190a81","src/codegen/swap_bytes.rs":"1d6cdc716eadddc92b4fd506b2445a821caa8dc00860447de09d7ebd69c2087f","src/codegen/v128.rs":"94226b31ec403d18d9d2fe06713f147c9c79e9b5f9105089088266313f843185","src/codegen/v16.rs":"ddec4ffb66b6f7aaffb9a1780c5ddba82557abd74f45073d335047e04cf74924","src/codegen/v256.rs":"6b63917f0444118d6b1595bff2045e59b97c4d24012bd575f69f1f0efc5a0241","src/codegen/v32.rs":"3477b3c5540aed86e61e2f5807dd31db947413cec9181c587d93ed6ec74f0eba","src/codegen/v512.rs":"5854f99d3aabc4cd42b28a20d9ce447756dc2ba024a409a69b6a8ae1f1842fc5","src/codegen/v64.rs":"e9e89caebfe63d10c0cbca61e4dfdba3b7e02ee0989170f80beed23237ddd950","src/codegen/vPtr.rs":"96d609a9eece4dcbbcc01ba0b8744d7f5958be12774176a2945bc676f4e6b5cb","src/codegen/vSize.rs":"eeee9858749aa82142b27bc120d1989bb74a6b82e1e4efbbeaccc9634dc9acfc","src/lib.rs":"1b5d419ff05ee0370d671810423ccc254708cc8d415c1dbac2a7a36be4bf63a8","src/masks.rs":"870f429967b2d7d5133f4d28d6c753fc5cef0570b27b29d4e966a066d22d2d0e","src/sealed.rs":"ff7f0324276408ae8249941cfa32c90b8835a54d750896b683efea857af19db2","src/testing.rs":"1d3a7862ef625e235a5734ad7204e68d350f902c0695182b1f08a0552432416e","src/testing/macros.rs":"6378856d7a40ba5ec5c7c0dad6327d79f0c77266921c24296d10aed6c68e9b98","src/testing/utils.rs":"d6fd5a5017f1f85d9d99585754f8f6ad06fc3d683b34083543e67a7cc6c1772c","src/v128.rs":"18fe263c4aa28cd06461c7070b0269f69f4a2e75749b8f142a83dfdfe4d22bf5","src/v16.rs":"e5c663c9fb3547eaeac78a5f7db9969f4d8b5ec96112bf2954602fff11f0aebd","src/v256.rs":"68732cd688ad12a56d8b4f8ddf279f77bdfe1be2943c7dc0c1b4f1a76798aa0f","src/v32.rs":"785b22a1ccb4a41bb53dfeb0670f624c0ce42e6cdf62d1747e3283777a1c70bd","src/v512.rs":"d1337bfe07f06a8f37f8e8fa7d4315b9307476ee435ad80dd5269eaed564fbfa","src/v64.rs":"3077468d65125b8f085e9454c8b2463a4d5225697464ba6a1300f8799528fd4b","src/vPtr.rs":"c9a53f41f466e17b6648a4ce390fd8f4d3a848d440eb8a9a803a11608d76eb05","src/vSize.rs":"5c46d3e8c3ee5863d9b6e37e681f871386e0efc254d6d84ba711edb529ce7b3c","tests/endianness.rs":"541a144be017e3dd7da7c8ea49d907dc02538245e8c5f3deb5bd43da92c929e1"},"package":null}
+\ No newline at end of file
++{"files":{".appveyor.yml":"f1ed01850e0d725f9498f52a1a63ddf40702ad6e0bf5b2d7c4c04d76e96794a3",".travis.yml":"e9258d9a54fdaf4cbc12405fe5993ac4497eb2b29021691dbc91b19cb9b52227","Cargo.toml":"089941ba3c89ea111cbea3cc3abdcdcf2b9d0ae0db268d7269ee38226db950e5","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","bors.toml":"dee881dc69b9b7834e4eba5d95c3ed5a416d4628815a167d6a22d4cb4fb064b8","build.rs":"f3baefc5e5bb9b250e762a1466371b922fd7ee4243c217b2d014307603c2f57a","ci/all.sh":"a23d14e10cb26a0eb719e389c30eb955fa53cddcd436890646df09af640bd2eb","ci/android-install-ndk.sh":"0f1746108cc30bf9b9ba45bcde7b19fc1a8bdf5b0258035b4eb8dc69b75efac4","ci/android-install-sdk.sh":"3490432022c5c8f5a115c084f7a9aca1626f96c0c87ffb62019228c4346b47e4","ci/android-sysimage.sh":"ebf4e5daa1f0fe1b2092b79f0f3f161c4c4275cb744e52352c4d81ab451e4c5a","ci/benchmark.sh":"b61d19ef6b90deba8fb79dee74c8b062d94844676293da346da87bb78a9a49a4","ci/deploy_and_run_on_ios_simulator.rs":"ec8ecf82d92072676aa47f0d1a3d021b60a7ae3531153ef12d2ff4541fc294dc","ci/docker/aarch64-linux-android/Dockerfile":"ace2e7d33c87bc0f6d3962a4a3408c04557646f7f51ab99cfbf574906796b016","ci/docker/aarch64-unknown-linux-gnu/Dockerfile":"1ecdac757101d951794fb2ab0deaa278199cf25f2e08a15c7d40ff31a8556184","ci/docker/arm-linux-androideabi/Dockerfile":"370e55d3330a413a3ccf677b3afb3e0ef9018a5fab263faa97ae8ac017fc2286","ci/docker/arm-unknown-linux-gnueabi/Dockerfile":"e25d88f6c0c94aada3d2e3f08243f755feb7e869dc5dc505b3799719cb1af591","ci/docker/arm-unknown-linux-gnueabihf/Dockerfile":"f126f4c7bae8c11ab8b16df06ad997863f0838825a9c08c9899a3eedb6d570bd","ci/docker/armv7-unknown-linux-gnueabihf/Dockerfile":"b647545c158ee480a4c581dbdc1f57833aef056c8d498acc04b573e842bf803c","ci/docker/i586-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/i686-unknown-linux-gnu/Dockerfile":"0d492759017307ccf74dc2aa4a8cf6623daf3dc728c708dc2b18fa7940800cba","ci/docker/mips-unknown-linux-gnu/Dockerfile":"323776469bb7b160385f3621d66e3ee14c75242f8180f916e65af048a29d4ea0","ci/docker/mips64-unknown-linux-gnuabi64/Dockerfile":"c647f6948a9a43b0be695cbed4eac752120d0faf28e5e69c718cb10406921dab","ci/docker/mips64el-unknown-linux-gnuabi64/Dockerfile":"77bfd00cc8639509be381b394f077e39b45a00158ad61b4e1656714c714665d1","ci/docker/mipsel-unknown-linux-musl/Dockerfile":"ec5bea6c98a3b626731fdb95f9ff2d1182639c76e8fb16d3271d0fc884901524","ci/docker/powerpc-unknown-linux-gnu/Dockerfile":"4f2b662de66e83d1354f650b7077692309637f786c2ea5516c31b5c2ee10af2d","ci/docker/powerpc64-unknown-linux-gnu/Dockerfile":"a9595402b772bc365982e22a0096a8988825d90b09b5faa97ab192e76072f71d","ci/docker/powerpc64le-unknown-linux-gnu/Dockerfile":"df3c381c157439695ae8cd10ab71664702c061e3b4ab22906a5ad6c2680acfed","ci/docker/s390x-unknown-linux-gnu/Dockerfile":"93fb44df3d7fd31ead158570667c97b5076a05c3d968af4a84bc13819a8f2db8","ci/docker/sparc64-unknown-linux-gnu/Dockerfile":"da1c39a3ff1fe22e41395fa7c8934e90b4c1788e551b9aec6e38bfd94effc437","ci/docker/thumbv7neon-linux-androideabi/Dockerfile":"c2decd5591bd7a09378901bef629cd944acf052eb55e4f35b79eb9cb4d62246a","ci/docker/thumbv7neon-unknown-linux-gnueabihf/Dockerfile":"75c0c56161c7382b439de74c00de1c0e3dc9d59560cd6720976a751034b78714","ci/docker/wasm32-unknown-unknown/Dockerfile":"3e5f294bc1e004aa599086c2af49d6f3e7459fa250f5fbdd60cf67d53db78758","ci/docker/x86_64-linux-android/Dockerfile":"685040273cf350d5509e580ac451555efa19790c8723ca2af066adadc6880ad2","ci/docker/x86_64-unknown-linux-gnu-emulated/Dockerfile":"44b6203d9290bfdc53d81219f0937e1110847a23dd982ec8c4de388354f01536","ci/docker/x86_64-unknown-linux-gnu/Dockerfile":"d253c86803b22da428fa9cc671a05f18d3318eca7733b8dccb4f7be1ddf524c5","ci/dox.sh":"5b61711be47a4e3dde0ddd15ba73d256ea95fd75af3897732c24db1dc7e66366","ci/linux-s390x.sh":"d6b732d7795b4ba131326aff893bca6228a7d2eb0e9402f135705413dbbe0dce","ci/linux-sparc64.sh":"c92966838b1ab7ad3b7a344833ee726aba6b647cf5952e56f0ad1ba420b13325","ci/lld-shim.rs":"3d7f71ec23a49e2b67f694a0168786f9a954dda15f5a138815d966643fd3fcc3","ci/max_line_width.sh":"0a1518bba4c9ecaa55694cb2e9930d0e19c265baabf73143f17f9cf285aaa5bb","ci/run-docker.sh":"92e036390ad9b0d16f109579df1b5ced2e72e9afea40c7d011400ebd3a2a90de","ci/run.sh":"63259e22a96ba539f53c06b1b39f53e3a78a71171652e7afc170836110ccd913","ci/run_examples.sh":"d1a23c6c35374a0678ba5114b9b8fefd8be0a79e774872a8bf0898d1baca18d0","ci/runtest-android.rs":"145a8e9799a5223975061fe7e586ade5669ee4877a7d7a4cf6b4ab48e8e36c7c","ci/setup_benchmarks.sh":"73fb981a8fdb1dcd54409d3c0fbbfb8f77a3ceabf8626a6b9bf9d21d6bc8ce72","ci/test-runner-linux":"c8aa6025cff5306f4f31d0c61dc5f9d4dd5a1d189ab613ef8d4c367c694d9ccd","contributing.md":"2cc8c9c560ae17867e69b06d09b758dbf7bc39eb774ada50a743724b10acc0a2","perf-guide/book.toml":"115a98284126c6b180178b44713314cc494f08a71662ee2ce15cf67f17a51064","perf-guide/src/SUMMARY.md":"3e03bffc991fdc2050f3d51842d72d9d21ea6abab56a3baf3b2d5973a78b89e1","perf-guide/src/ascii.css":"29afb08833b2fe2250f0412e1fa1161a2432a0820a14953c87124407417c741a","perf-guide/src/bound_checks.md":"5e4991ff58a183ef0cd9fdc1feb4cd12d083b44bdf87393bbb0927808ef3ce7d","perf-guide/src/float-math/approx.md":"8c09032fa2d795a0c5db1775826c850d28eb2627846d0965c60ee72de63735ad","perf-guide/src/float-math/fma.md":"311076ba4b741d604a82e74b83a8d7e8c318fcbd7f64c4392d1cf5af95c60243","perf-guide/src/float-math/fp.md":"04153e775ab6e4f0d7837bcc515230d327b04edfa34c84ce9c9e10ebaeef2be8","perf-guide/src/float-math/svml.md":"0798873b8eedaeda5fed62dc91645b57c20775a02d3cd74d8bd06958f1516506","perf-guide/src/introduction.md":"9f5a19e9e6751f25d2daad39891a0cc600974527ec4c8305843f9618910671bd","perf-guide/src/prof/linux.md":"447731eb5de7d69166728fdbc5ecb0c0c9db678ea493b45a592d67dd002184c0","perf-guide/src/prof/mca.md":"f56d54f3d20e7aa4d32052186e8237b03d65971eb5d112802b442570ff11d344","perf-guide/src/prof/profiling.md":"8a650c0fd6ede0964789bb6577557eeef1d8226a896788602ce61528e260e43c","perf-guide/src/target-feature/attribute.md":"615f88dca0a707b6c416fa605435dd6e1fb5361cc639429cbf68cd87624bd78b","perf-guide/src/target-feature/features.md":"17077760ff24c006b606dd21889c53d87228f4311f3ba3a574f9afdeacd86165","perf-guide/src/target-feature/inlining.md":"7ed1d7068d8173a00d84c16cfe5871cd68b9f04f8d0cca2d01ebc84957ebf2f6","perf-guide/src/target-feature/practice.md":"c4b371842e0086df178488fec97f20def8f0c62ee588bcd25fd948b9b1fa227e","perf-guide/src/target-feature/runtime.md":"835425f5ee597fb3e51d36e725a81ebee29f4561231d19563cd4da81dbb1cfcb","perf-guide/src/target-feature/rustflags.md":"ab49712e9293a65d74d540ba4784fcb57ff1119ec05a575d895c071f1a620f64","perf-guide/src/vert-hor-ops.md":"c6211c0ee91e60552ec592d89d9d957eedc21dee3cbd89e1ad6765ea06a27471","readme.md":"585a8f0e16877fb9abb00cd17a175fcb9d7857840c6c61209f1827ffab095070","rustfmt.toml":"de6101d0670bad65fb3b337d56957d2a024e017e5ab146ec784d77312daaf8ff","src/api.rs":"331a3a4abb19cee2df5f2df4ad7c3e88b45e62cf23fdacfc9bbaa633dc5cf788","src/api/bit_manip.rs":"e68290ee679cc5abc9c73afbe635c1035f8cbfe849e5c751a1680e459244c39e","src/api/cast.rs":"03b94a3d316ac7b7be7068810044911e965e889a0ace7bae762749ca74a92747","src/api/cast/macros.rs":"b0a14d0c83ad2ebb7a275180f6d9e3f2bc312ba57a7d3d6c39fad4e0f20f9408","src/api/cast/v128.rs":"63e28c6a3edf1a7a635f51b8d3c6adbb1d46f884d92a196b3d4a6e743d809416","src/api/cast/v16.rs":"2a584eeb57fd47baad6f3533764301b04aaaac23702b7a8db12598ac02899262","src/api/cast/v256.rs":"b91c15ed8d1536ecd97b4eb79ff9d5aba0552cd9b6f0ea6435b05f2273e23b3a","src/api/cast/v32.rs":"62ec89fcce7fa7f28497ee5770adc8f81d2d3a6b2925b02f7dc06504c40e8f38","src/api/cast/v512.rs":"d855cb943ae7106e9599ef38e30a3afb1c6bd5433178baca54cb128fd9a7d143","src/api/cast/v64.rs":"fe0f7dfaf4fc0c0c1a78c96fcfcdfdc2a1e2845843b11aa797a0c6fb52a8f774","src/api/cmp.rs":"357c3a2a09c6d4611c32dd7fa95be2fae933d513e229026ec9b44451a77b884e","src/api/cmp/eq.rs":"60f70f355bae4cb5b17db53204cacc3890f70670611c17df638d4c04f7cc8075","src/api/cmp/ord.rs":"589f7234761c294fa5df8f525bc4acd5a47cdb602207d524a0d4e19804cd9695","src/api/cmp/partial_eq.rs":"3ed23d2a930b0f9750c3a5309da766b03dc4f9c4d375b42ad3c50fe732693d15","src/api/cmp/partial_ord.rs":"e16b11805c94048acd058c93994b5bc74bb187f8d7e3b86a87df60e1601467f9","src/api/cmp/vertical.rs":"de3d62f38eba817299aa16f1e1939954c9a447e316509397465c2830852ba053","src/api/default.rs":"b61f92fc0e33a2633b3375eb405beba480da071cde03df4d437d8a6058afcd97","src/api/fmt.rs":"67fb804bb86b6cd77cf8cd492b5733ce437071b66fe3297278b8a6552c325dda","src/api/fmt/binary.rs":"35cb5c266197d6224d598fb3d286e5fe48ef0c01ed356c2ff6fe9ba946f96a92","src/api/fmt/debug.rs":"aa18eea443bf353fea3db8b1a025132bbcaf91e747ecfa43b8d9fce9af395a0c","src/api/fmt/lower_hex.rs":"69d5be366631af309f214e8031c8c20267fcc27a695eac6f45c6bc1df72a67e6","src/api/fmt/octal.rs":"9eb11ba3d990213f3c7f1ec25edba7ce997cb1320e16d308c83498ba6b9bfbd9","src/api/fmt/upper_hex.rs":"a4637d085b7bb20e759ce58e08435b510a563ba3dd468af2b03560fdc5511562","src/api/from.rs":"2e599d8329cb05eaf06224cc441355c4b7b51254fc19256619333be8c149d444","src/api/from/from_array.rs":"4151593c7bba7455821fffa5b59867005a77c95d32f1f0cc3fd87294000157d9","src/api/from/from_vector.rs":"9764371aa9e6005aace74dea14f59e5611a095b7cf42707940924749282c52f0","src/api/hash.rs":"562cfa3f1d8eb9a733c035a3665a599c2f1e341ee820d8fbdd102a4398a441bc","src/api/into_bits.rs":"82297f0697d67b5a015e904e7e6e7b2a7066ba825bc54b94b4ff3e22d7a1eefb","src/api/into_bits/arch_specific.rs":"1f925390b0ce7132587d95f2419c6e2ad3e1a9d17eb1d9c120a1c1c4bdf4277e","src/api/into_bits/macros.rs":"d762406de25aedff88d460dec7a80dc8e825a2a419d53218ce007efa6a1d3e04","src/api/into_bits/v128.rs":"ecdc5893664c71d7ab1ff3697c3fbe490d20d8748b9b76881d05e7625e40d74c","src/api/into_bits/v16.rs":"5459ec7dad1ad7bd30dc7e48374580b993abf23701d9c3cb22203fa0a9aabb6d","src/api/into_bits/v256.rs":"90ea351da0380ead1bf0f63b620afd40d01d638d09f7e7be31840bd2c1d9c663","src/api/into_bits/v32.rs":"ee1dc5a430050e16f51154b5fe85b1536f5feddf2ea23dd1d3859b67c4afc6fc","src/api/into_bits/v512.rs":"f72098ed1c9a23944f3d01abaf5e0f2d0e81d35a06fdadd2183e896d41b59867","src/api/into_bits/v64.rs":"6394462facdfe7827349c742b7801f1291e75a720dfb8c0b52100df46f371c98","src/api/math.rs":"8b2a2fc651917a850539f993aa0b9e5bf4da67b11685285b8de8cdca311719ec","src/api/math/float.rs":"61d2794d68262a1090ae473bd30793b5f65cf732f32a6694a3af2ce5d9225616","src/api/math/float/abs.rs":"5b6b2701e2e11135b7ce58a05052ea8120e10e4702c95d046b9d21b827b26bf8","src/api/math/float/consts.rs":"78acba000d3fa527111300b6327c1932de9c4c1e02d4174e1a5615c01463d38c","src/api/math/float/cos.rs":"4c2dd7173728ef189314f1576c9486e03be21b7da98843b2f9011282a7979e31","src/api/math/float/exp.rs":"7c6d5f1e304f498a01cfa23b92380c815d7da0ad94eae3483783bc377d287eef","src/api/math/float/ln.rs":"54c7583f3df793b39ff57534fade27b41bb992439e5dc178252f5ca3190a3e54","src/api/math/float/mul_add.rs":"62cac77660d20159276d4c9ef066eb90c81cbddb808e8e157182c607625ad2eb","src/api/math/float/mul_adde.rs":"bae056ee9f3a70df39ec3c3b2f6437c65303888a7b843ef1a5bcf1f5aca0e602","src/api/math/float/powf.rs":"9ddb938984b36d39d82a82f862f80df8f7fb013f1d222d45698d41d88472f568","src/api/math/float/recpre.rs":"589225794ff1dbf31158dff660e6d4509ecc8befbb57c633900dea5ac0b840d6","src/api/math/float/rsqrte.rs":"a32abdcc318d7ccc8448231f54d75b884b7cbeb03a7d595713ab6243036f4dbf","src/api/math/float/sin.rs":"cbd3622b7df74f19691743001c8cf747a201f8977ad90542fee915f37dcd1e49","src/api/math/float/sqrt.rs":"0c66d5d63fb08e4d99c6b82a8828e41173aff1ac9fa1a2764a11fac217ccf2ac","src/api/math/float/sqrte.rs":"731e1c9f321b662accdd27dacb3aac2e8043b7aecb2f2161dde733bd9f025362","src/api/minimal.rs":"1f22bcc528555444e76de569ec0ae2029b9ae9d04805efeafa93369c8098036b","src/api/minimal/iuf.rs":"c501a6696950cf5e521765f178de548af64fdfb6e10d026616d09fab93ca2d17","src/api/minimal/mask.rs":"42e415f536c5193d0218f5a754b34b87fd7c971bff068009f958712166ff056d","src/api/minimal/ptr.rs":"a9ee482d1dd1c956fb8f3f179e6e620b1de4e9d713961461d4c6923a4ef2e67c","src/api/ops.rs":"3e273b277a0f3019d42c3c59ca94a5afd4885d5ae6d2182e5089bbeec9de42ee","src/api/ops/scalar_arithmetic.rs":"d2d5ad897a59dd0787544f927e0e7ca4072c3e58b0f4a2324083312b0d5a21d7","src/api/ops/scalar_bitwise.rs":"482204e459ca6be79568e1c9f70adbe2d2151412ddf122fb2161be8ebb51c40c","src/api/ops/scalar_mask_bitwise.rs":"c250f52042e37b22d57256c80d4604104cfd2fbe2a2e127c676267270ca5d350","src/api/ops/scalar_shifts.rs":"987f8fdebeedc16e3d77c1b732e7826ef70633c541d16dfa290845d5c6289150","src/api/ops/vector_arithmetic.rs":"ddca15d09ddeef502c2ed66117a62300ca65d87e959e8b622d767bdf1c307910","src/api/ops/vector_bitwise.rs":"b3968f7005b649edcc22a54e2379b14d5ee19045f2e784029805781ae043b5ee","src/api/ops/vector_float_min_max.rs":"f5155dce75219f4ba11275b1f295d2fdcddd49d174a6f1fb2ace7ea42813ce41","src/api/ops/vector_int_min_max.rs":"a378789c6ff9b32a51fbd0a97ffd36ed102cd1fe6a067d2b02017c1df342def6","src/api/ops/vector_mask_bitwise.rs":"5052d18517d765415d40327e6e8e55a312daaca0a5e2aec959bfa54b1675f9c8","src/api/ops/vector_neg.rs":"5c62f6b0221983cdbd23cd0a3af3672e6ba1255f0dfe8b19aae6fbd6503e231b","src/api/ops/vector_rotates.rs":"03cbe8a400fd7c688e4ee771a990a6754f2031b1a59b19ae81158b21471167e5","src/api/ops/vector_shifts.rs":"9bf69d0087268f61009e39aea52e03a90f378910206b6a28e8393178b6a5d0e0","src/api/ptr.rs":"8a793251bed6130dcfb2f1519ceaa18b751bbb15875928d0fb6deb5a5e07523a","src/api/ptr/gather_scatter.rs":"9ddd960365e050674b25b2fd3116e24d94669b4375d74e71c03e3f1469576066","src/api/reductions.rs":"ae5baca81352ecd44526d6c30c0a1feeda475ec73ddd3c3ec6b14e944e5448ee","src/api/reductions/bitwise.rs":"8bf910ae226188bd15fc7e125f058cd2566b6186fcd0cd8fd020f352c39ce139","src/api/reductions/float_arithmetic.rs":"e58c8c87806a95df2b2b5b48ac5991036df024096d9d7c171a480fe9282896a4","src/api/reductions/integer_arithmetic.rs":"47471da1c5f859489680bb5d34ced3d3aa20081c16053a3af121a4496fcb57bf","src/api/reductions/mask.rs":"db83327a950e33a317f37fd33ca4e20c347fb415975ec024f3e23da8509425af","src/api/reductions/min_max.rs":"f27be3aa28e1c1f46de7890198db6e12f00c207085e89ef2de7e57ee443cdb98","src/api/select.rs":"a98e2ccf9fc6bdeed32d337c8675bc96c2fbe2cc34fbf149ad6047fb8e749774","src/api/shuffle.rs":"da58200790868c09659819322a489929a5b6e56c596ed07e6a44293ea02e7d09","src/api/shuffle1_dyn.rs":"bfea5a91905b31444e9ef7ca6eddb7a9606b7e22d3f71bb842eb2795a0346620","src/api/slice.rs":"ee87484e8af329547b9a5d4f2a69e8bed6ea10bbd96270d706083843d4eea2ac","src/api/slice/from_slice.rs":"4d4fe8a329c885fcb4fbcbedf99efb15a95296fe6b3f595056cc37037450d5ac","src/api/slice/write_to_slice.rs":"f5b23b2c4b91cfb26b713a9013a6c0da7f45eaefb79ba06dcbc27f3f23bda679","src/api/swap_bytes.rs":"4a6792a2e49a77475e1b237592b4b2804dbddb79c474331acd0dd71b36934259","src/codegen.rs":"c6eebc3d3665420aa6a2f317977e3c41a4f43e0550ac630cdbe8e4bbed5e2031","src/codegen/bit_manip.rs":"5559e095105a80003e0de35af1d19b0c65c9ab04eb743c7e01c5442d882eb34e","src/codegen/llvm.rs":"d1299c189abb17a6133f047574cffc7a6db4c1be37cb7d4785491cb5e8f8cf54","src/codegen/math.rs":"35f96e37a78fcf0cdb02146b7f27a45108fe06a37fc2a54d8851ce131a326178","src/codegen/math/float.rs":"dd86c0449e576c83b719700962ac017c332987fac08d91f2b7a2b1b883598170","src/codegen/math/float/abs.rs":"f56e2b4b8055ea861c1f5cbc6b6e1d8e7e5af163b62c13574ddee4e09513bfbc","src/codegen/math/float/cos.rs":"ef3b511a24d23045b310315e80348a9b7fedb576fc2de52d74290616a0abeb2a","src/codegen/math/float/cos_pi.rs":"4e7631a5d73dac21531e09ef1802d1180f8997509c2c8fa9f67f322194263a97","src/codegen/math/float/exp.rs":"61b691598c41b5622f24e4320c1bdd08701e612a516438bdddcc728fc3405c8c","src/codegen/math/float/ln.rs":"46b718b1ba8c9d99e1ad40f53d20dfde08a3063ca7bd2a9fdd6698e060da687e","src/codegen/math/float/macros.rs":"dd42135fff13f9aca4fd3a1a4e14c7e6c31aadc6d817d63b0d2fb9e62e062744","src/codegen/math/float/mul_add.rs":"a37bf764345d4b1714f97e83897b7cf0855fc2811704bcbc0012db91825339e1","src/codegen/math/float/mul_adde.rs":"c75702bfcb361de45964a93caf959a695ef2376bd069227600b8c6872665c755","src/codegen/math/float/powf.rs":"642346e982bc4c39203de0864d2149c4179cd7b21cf67a2951687932b4675872","src/codegen/math/float/sin.rs":"9d68164c90cdca6a85155040cdac42e27342ebe0b925273ef1593df721af4258","src/codegen/math/float/sin_cos_pi.rs":"9be02ad48585a1e8d99129382fbffbaed47852f15459256a708850b6b7a75405","src/codegen/math/float/sin_pi.rs":"9890347905b4d4a3c7341c3eb06406e46e60582bcf6960688bd727e5dadc6c57","src/codegen/math/float/sqrt.rs":"e3c60dcfb0c6d2fc62adabcc931b2d4040b83cab294dea36443fb4b89eb79e34","src/codegen/math/float/sqrte.rs":"f0f4ef9eb475ae41bcc7ec6a95ad744ba6b36925faa8b2c2814004396d196b63","src/codegen/pointer_sized_int.rs":"a70697169c28218b56fd2e8d5353f2e00671d1150d0c8cef77d613bdfacd84cb","src/codegen/reductions.rs":"645e2514746d01387ddd07f0aa4ffd8430cc9ab428d4fb13773ea319fa25dd95","src/codegen/reductions/mask.rs":"8f1afe6aabf096a3278e1fc3a30f736e04aa8b9ce96373cee22162d18cfe2702","src/codegen/reductions/mask/aarch64.rs":"cba6e17603d39795dcfe8339b6b7d8714c3e162a1f0a635979f037aa24fe4206","src/codegen/reductions/mask/arm.rs":"9447904818aa2c7c25d0963eead452a639a11ca7dbd6d21eedbfcaade07a0f33","src/codegen/reductions/mask/fallback.rs":"7a0ef9f7fd03ae318b495b95e121350cd61caffc5cc6ee17fabf130d5d933453","src/codegen/reductions/mask/fallback_impl.rs":"76547f396e55ef403327c77c314cf8db8c7a5c9b9819bfb925abeacf130249e5","src/codegen/reductions/mask/x86.rs":"14bd2c482071f2355beebcf7b7ecf950ff2dfcdb08c3ca50993092434a9de717","src/codegen/reductions/mask/x86/avx.rs":"b4913d87844c522903641cbbf10db4551addb1ce5e9e78278e21612fa65c733b","src/codegen/reductions/mask/x86/avx2.rs":"677aed3f056285285daa3adff8bc65e739630b4424defa6d9665e160f027507e","src/codegen/reductions/mask/x86/sse.rs":"226610b4ff88c676d5187114dd57b4a8800de6ce40884675e9198445b1ed0306","src/codegen/reductions/mask/x86/sse2.rs":"bc38e6c31cb4b3d62147eba6cac264e519e2a48e0f7ce9010cfa9ef0cf0ec9fd","src/codegen/shuffle.rs":"0abca97e92cdce49a58a39cc447eb09dc7d7715ef256c8dbd2181a186e61bb64","src/codegen/shuffle1_dyn.rs":"04523e9338133bdedb012dd076c2c564b79ce5593b0fc56d0fb6910e04190a81","src/codegen/swap_bytes.rs":"1d6cdc716eadddc92b4fd506b2445a821caa8dc00860447de09d7ebd69c2087f","src/codegen/v128.rs":"94226b31ec403d18d9d2fe06713f147c9c79e9b5f9105089088266313f843185","src/codegen/v16.rs":"ddec4ffb66b6f7aaffb9a1780c5ddba82557abd74f45073d335047e04cf74924","src/codegen/v256.rs":"6b63917f0444118d6b1595bff2045e59b97c4d24012bd575f69f1f0efc5a0241","src/codegen/v32.rs":"3477b3c5540aed86e61e2f5807dd31db947413cec9181c587d93ed6ec74f0eba","src/codegen/v512.rs":"5854f99d3aabc4cd42b28a20d9ce447756dc2ba024a409a69b6a8ae1f1842fc5","src/codegen/v64.rs":"e9e89caebfe63d10c0cbca61e4dfdba3b7e02ee0989170f80beed23237ddd950","src/codegen/vPtr.rs":"96d609a9eece4dcbbcc01ba0b8744d7f5958be12774176a2945bc676f4e6b5cb","src/codegen/vSize.rs":"eeee9858749aa82142b27bc120d1989bb74a6b82e1e4efbbeaccc9634dc9acfc","src/lib.rs":"1b5d419ff05ee0370d671810423ccc254708cc8d415c1dbac2a7a36be4bf63a8","src/masks.rs":"870f429967b2d7d5133f4d28d6c753fc5cef0570b27b29d4e966a066d22d2d0e","src/sealed.rs":"ff7f0324276408ae8249941cfa32c90b8835a54d750896b683efea857af19db2","src/testing.rs":"1d3a7862ef625e235a5734ad7204e68d350f902c0695182b1f08a0552432416e","src/testing/macros.rs":"6378856d7a40ba5ec5c7c0dad6327d79f0c77266921c24296d10aed6c68e9b98","src/testing/utils.rs":"d6fd5a5017f1f85d9d99585754f8f6ad06fc3d683b34083543e67a7cc6c1772c","src/v128.rs":"18fe263c4aa28cd06461c7070b0269f69f4a2e75749b8f142a83dfdfe4d22bf5","src/v16.rs":"e5c663c9fb3547eaeac78a5f7db9969f4d8b5ec96112bf2954602fff11f0aebd","src/v256.rs":"68732cd688ad12a56d8b4f8ddf279f77bdfe1be2943c7dc0c1b4f1a76798aa0f","src/v32.rs":"785b22a1ccb4a41bb53dfeb0670f624c0ce42e6cdf62d1747e3283777a1c70bd","src/v512.rs":"d1337bfe07f06a8f37f8e8fa7d4315b9307476ee435ad80dd5269eaed564fbfa","src/v64.rs":"3077468d65125b8f085e9454c8b2463a4d5225697464ba6a1300f8799528fd4b","src/vPtr.rs":"c9a53f41f466e17b6648a4ce390fd8f4d3a848d440eb8a9a803a11608d76eb05","src/vSize.rs":"5c46d3e8c3ee5863d9b6e37e681f871386e0efc254d6d84ba711edb529ce7b3c","tests/endianness.rs":"541a144be017e3dd7da7c8ea49d907dc02538245e8c5f3deb5bd43da92c929e1"},"package":null}
+\ No newline at end of file
+diff --git a/third_party/rust/proc-macro2/.cargo-checksum.json b/third_party/rust/proc-macro2/.cargo-checksum.json
+index eeef4120af..e7849f2896 100644
+--- third_party/rust/proc-macro2/.cargo-checksum.json
++++ third_party/rust/proc-macro2/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"e2c1fc6ed317eeef8462fcd192f6b6389e1d84f0d7afeac78f12c23903deddf8","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"362a2156f7645528061b6e8487a2eb0f32f1693012ed82ee57afa05c039bba0d","build.rs":"0cc6e2cb919ddbff59cf1d810283939f97a59f0037540c0f2ee3453237635ff8","src/fallback.rs":"5c6379a90735e27abcc40253b223158c6b1e5784f3850bc423335363e87ef038","src/lib.rs":"ae5251296ad3fcd8b600919a993fec0afd8b56da3e11fef6bc7265b273129936","src/strnom.rs":"37f7791f73f123817ad5403af1d4e2a0714be27401729a2d451bc80b1f26bac9","src/wrapper.rs":"81372e910604217a625aa71c47d43e65f4e008456eae93ac39325c9abf10701a","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"8c427be9cba1fa8d4a16647e53e3545e5863e29e2c0b311c93c9dd1399abf6a1"},"package":"90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"302d447d62c8d091d6241cf62bdad607c0d4ed8ff9f43d9b254c9d99c253ee8e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"e1f9d4fc22cff2c049f166a403b41458632a94357890d31cf0e3ad83807fb430","build.rs":"a71283fbc495095eebbbf46753df3fe2c19505c745b508dea157f65796b64dd7","src/detection.rs":"9d25d896889e65330858f2d6f6223c1b98cd1dad189813ad4161ff189fbda2b8","src/fallback.rs":"b114e013695260f6066395c8712cea112ec2a386010397a80f15a60f8b986444","src/lib.rs":"7f528764a958587f007f0c2a330a6a414bae2c8e73d5ed9fb64ff1b42b1805b1","src/marker.rs":"87fce2d0357f5b7998b6d9dfb064f4a0cbc9dabb19e33d4b514a446243ebe2e8","src/parse.rs":"1d2253eacbd40eb3a2a933be2adcee356af922bdb48cc89ff266252a41fd98a1","src/wrapper.rs":"f52646ce1705c1f6265516f30d4c43297b5f529dd31fb91f4c806be89d5a4122","tests/comments.rs":"ea6cbe6f4c8852e6a0612893c7d4f2c144a2e6a134a6c3db641a320cbfc3c800","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"652db9f25c69ffc65baa60cdca8f195aa2e254d4de0a9ddc85de4dc2470544b6","tests/test.rs":"5f30a704eeb2b9198b57f416d622da72d25cb9bf8d8b12e6d0e90aa2cb0e43fc","tests/test_fmt.rs":"745dfdc41d09c5308c221395eb43f2041f0a1413d2927a813bc2ad4554438fe2"},"package":"1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"}
+\ No newline at end of file
+diff --git a/third_party/rust/proc-macro2/Cargo.toml b/third_party/rust/proc-macro2/Cargo.toml
+index 95d653633d..22150c516a 100644
+--- third_party/rust/proc-macro2/Cargo.toml
++++ third_party/rust/proc-macro2/Cargo.toml
+@@ -13,21 +13,22 @@
+ [package]
+ edition = "2018"
+ name = "proc-macro2"
+-version = "1.0.5"
+-authors = ["Alex Crichton <alex@alexcrichton.com>"]
+-description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
+-homepage = "https://github.com/alexcrichton/proc-macro2"
++version = "1.0.24"
++authors = ["Alex Crichton <alex@alexcrichton.com>", "David Tolnay <dtolnay@gmail.com>"]
++description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n"
+ documentation = "https://docs.rs/proc-macro2"
+ readme = "README.md"
+ keywords = ["macros"]
++categories = ["development-tools::procedural-macro-helpers"]
+ license = "MIT OR Apache-2.0"
+ repository = "https://github.com/alexcrichton/proc-macro2"
+ [package.metadata.docs.rs]
+ rustc-args = ["--cfg", "procmacro2_semver_exempt"]
+ rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
++targets = ["x86_64-unknown-linux-gnu"]
+
+-[lib]
+-name = "proc_macro2"
++[package.metadata.playground]
++features = ["span-locations"]
+ [dependencies.unicode-xid]
+ version = "0.2"
+ [dev-dependencies.quote]
+@@ -39,5 +40,3 @@ default = ["proc-macro"]
+ nightly = []
+ proc-macro = []
+ span-locations = []
+-[badges.travis-ci]
+-repository = "alexcrichton/proc-macro2"
+diff --git a/third_party/rust/proc-macro2/README.md b/third_party/rust/proc-macro2/README.md
+index 19b0c3b5f8..3d05e871a7 100644
+--- third_party/rust/proc-macro2/README.md
++++ third_party/rust/proc-macro2/README.md
+@@ -1,6 +1,6 @@
+ # proc-macro2
+
+-[![Build Status](https://api.travis-ci.com/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.com/alexcrichton/proc-macro2)
++[![Build Status](https://img.shields.io/github/workflow/status/alexcrichton/proc-macro2/build%20and%20test)](https://github.com/alexcrichton/proc-macro2/actions)
+ [![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
+ [![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
+
+diff --git a/third_party/rust/proc-macro2/build.rs b/third_party/rust/proc-macro2/build.rs
+index deb9b92719..b247d874f6 100644
+--- third_party/rust/proc-macro2/build.rs
++++ third_party/rust/proc-macro2/build.rs
+@@ -14,6 +14,10 @@
+ // procmacro2_semver_exempt surface area is implemented by using the
+ // nightly-only proc_macro API.
+ //
++// "hygiene"
++// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at
++// and Span::located_at. Enabled on Rust 1.45+.
++//
+ // "proc_macro_span"
+ // Enable non-dummy behavior of Span::start and Span::end methods which
+ // requires an unstable compiler feature. Enabled when building with
+@@ -57,6 +61,22 @@ fn main() {
+ println!("cargo:rustc-cfg=span_locations");
+ }
+
++ if version.minor < 32 {
++ println!("cargo:rustc-cfg=no_libprocmacro_unwind_safe");
++ }
++
++ if version.minor < 39 {
++ println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard");
++ }
++
++ if version.minor >= 44 {
++ println!("cargo:rustc-cfg=lexerror_display");
++ }
++
++ if version.minor >= 45 {
++ println!("cargo:rustc-cfg=hygiene");
++ }
++
+ let target = env::var("TARGET").unwrap();
+ if !enable_use_proc_macro(&target) {
+ return;
+diff --git a/third_party/rust/proc-macro2/src/detection.rs b/third_party/rust/proc-macro2/src/detection.rs
+new file mode 100644
+index 0000000000..c597bc99c6
+--- /dev/null
++++ third_party/rust/proc-macro2/src/detection.rs
+@@ -0,0 +1,67 @@
++use std::panic::{self, PanicInfo};
++use std::sync::atomic::*;
++use std::sync::Once;
++
++static WORKS: AtomicUsize = AtomicUsize::new(0);
++static INIT: Once = Once::new();
++
++pub(crate) fn inside_proc_macro() -> bool {
++ match WORKS.load(Ordering::SeqCst) {
++ 1 => return false,
++ 2 => return true,
++ _ => {}
++ }
++
++ INIT.call_once(initialize);
++ inside_proc_macro()
++}
++
++pub(crate) fn force_fallback() {
++ WORKS.store(1, Ordering::SeqCst);
++}
++
++pub(crate) fn unforce_fallback() {
++ initialize();
++}
++
++// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
++// then use catch_unwind to determine whether the compiler's proc_macro is
++// working. When proc-macro2 is used from outside of a procedural macro all
++// of the proc_macro crate's APIs currently panic.
++//
++// The Once is to prevent the possibility of this ordering:
++//
++// thread 1 calls take_hook, gets the user's original hook
++// thread 1 calls set_hook with the null hook
++// thread 2 calls take_hook, thinks null hook is the original hook
++// thread 2 calls set_hook with the null hook
++// thread 1 calls set_hook with the actual original hook
++// thread 2 calls set_hook with what it thinks is the original hook
++//
++// in which the user's hook has been lost.
++//
++// There is still a race condition where a panic in a different thread can
++// happen during the interval that the user's original panic hook is
++// unregistered such that their hook is incorrectly not called. This is
++// sufficiently unlikely and less bad than printing panic messages to stderr
++// on correct use of this crate. Maybe there is a libstd feature request
++// here. For now, if a user needs to guarantee that this failure mode does
++// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
++// the main thread before launching any other threads.
++fn initialize() {
++ type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
++
++ let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
++ let sanity_check = &*null_hook as *const PanicHook;
++ let original_hook = panic::take_hook();
++ panic::set_hook(null_hook);
++
++ let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
++ WORKS.store(works as usize + 1, Ordering::SeqCst);
++
++ let hopefully_null_hook = panic::take_hook();
++ panic::set_hook(original_hook);
++ if sanity_check != &*hopefully_null_hook {
++ panic!("observed race condition in proc_macro2::inside_proc_macro");
++ }
++}
+diff --git a/third_party/rust/proc-macro2/src/fallback.rs b/third_party/rust/proc-macro2/src/fallback.rs
+index fe582b3b5f..8900c5ff0f 100644
+--- third_party/rust/proc-macro2/src/fallback.rs
++++ third_party/rust/proc-macro2/src/fallback.rs
+@@ -1,27 +1,41 @@
++use crate::parse::{token_stream, Cursor};
++use crate::{Delimiter, Spacing, TokenTree};
+ #[cfg(span_locations)]
+ use std::cell::RefCell;
+ #[cfg(span_locations)]
+ use std::cmp;
+-use std::fmt;
+-use std::iter;
++use std::fmt::{self, Debug, Display};
++use std::iter::FromIterator;
++use std::mem;
+ use std::ops::RangeBounds;
+ #[cfg(procmacro2_semver_exempt)]
+ use std::path::Path;
+ use std::path::PathBuf;
+ use std::str::FromStr;
+ use std::vec;
+-
+-use crate::strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
+-use crate::{Delimiter, Punct, Spacing, TokenTree};
+ use unicode_xid::UnicodeXID;
+
++/// Force use of proc-macro2's fallback implementation of the API for now, even
++/// if the compiler's implementation is available.
++pub fn force() {
++ #[cfg(wrap_proc_macro)]
++ crate::detection::force_fallback();
++}
++
++/// Resume using the compiler's implementation of the proc macro API if it is
++/// available.
++pub fn unforce() {
++ #[cfg(wrap_proc_macro)]
++ crate::detection::unforce_fallback();
++}
++
+ #[derive(Clone)]
+-pub struct TokenStream {
+- inner: Vec<TokenTree>,
++pub(crate) struct TokenStream {
++ pub(crate) inner: Vec<TokenTree>,
+ }
+
+ #[derive(Debug)]
+-pub struct LexError;
++pub(crate) struct LexError;
+
+ impl TokenStream {
+ pub fn new() -> TokenStream {
+@@ -31,6 +45,72 @@ impl TokenStream {
+ pub fn is_empty(&self) -> bool {
+ self.inner.len() == 0
+ }
++
++ fn take_inner(&mut self) -> Vec<TokenTree> {
++ mem::replace(&mut self.inner, Vec::new())
++ }
++
++ fn push_token(&mut self, token: TokenTree) {
++ // https://github.com/alexcrichton/proc-macro2/issues/235
++ match token {
++ #[cfg(not(no_bind_by_move_pattern_guard))]
++ TokenTree::Literal(crate::Literal {
++ #[cfg(wrap_proc_macro)]
++ inner: crate::imp::Literal::Fallback(literal),
++ #[cfg(not(wrap_proc_macro))]
++ inner: literal,
++ ..
++ }) if literal.text.starts_with('-') => {
++ push_negative_literal(self, literal);
++ }
++ #[cfg(no_bind_by_move_pattern_guard)]
++ TokenTree::Literal(crate::Literal {
++ #[cfg(wrap_proc_macro)]
++ inner: crate::imp::Literal::Fallback(literal),
++ #[cfg(not(wrap_proc_macro))]
++ inner: literal,
++ ..
++ }) => {
++ if literal.text.starts_with('-') {
++ push_negative_literal(self, literal);
++ } else {
++ self.inner
++ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
++ }
++ }
++ _ => self.inner.push(token),
++ }
++
++ #[cold]
++ fn push_negative_literal(stream: &mut TokenStream, mut literal: Literal) {
++ literal.text.remove(0);
++ let mut punct = crate::Punct::new('-', Spacing::Alone);
++ punct.set_span(crate::Span::_new_stable(literal.span));
++ stream.inner.push(TokenTree::Punct(punct));
++ stream
++ .inner
++ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
++ }
++ }
++}
++
++// Nonrecursive to prevent stack overflow.
++impl Drop for TokenStream {
++ fn drop(&mut self) {
++ while let Some(token) = self.inner.pop() {
++ let group = match token {
++ TokenTree::Group(group) => group.inner,
++ _ => continue,
++ };
++ #[cfg(wrap_proc_macro)]
++ let group = match group {
++ crate::imp::Group::Fallback(group) => group,
++ _ => continue,
++ };
++ let mut group = group;
++ self.inner.extend(group.stream.take_inner());
++ }
++ }
+ }
+
+ #[cfg(span_locations)]
+@@ -59,20 +139,22 @@ impl FromStr for TokenStream {
+ // Create a dummy file & add it to the source map
+ let cursor = get_cursor(src);
+
+- match token_stream(cursor) {
+- Ok((input, output)) => {
+- if skip_whitespace(input).len() != 0 {
+- Err(LexError)
+- } else {
+- Ok(output)
+- }
+- }
+- Err(LexError) => Err(LexError),
++ let (rest, tokens) = token_stream(cursor)?;
++ if rest.is_empty() {
++ Ok(tokens)
++ } else {
++ Err(LexError)
+ }
+ }
+ }
+
+-impl fmt::Display for TokenStream {
++impl Display for LexError {
++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
++ f.write_str("cannot parse string into token stream")
++ }
++}
++
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut joint = false;
+ for (i, tt) in self.inner.iter().enumerate() {
+@@ -80,37 +162,22 @@ impl fmt::Display for TokenStream {
+ write!(f, " ")?;
+ }
+ joint = false;
+- match *tt {
+- TokenTree::Group(ref tt) => {
+- let (start, end) = match tt.delimiter() {
+- Delimiter::Parenthesis => ("(", ")"),
+- Delimiter::Brace => ("{", "}"),
+- Delimiter::Bracket => ("[", "]"),
+- Delimiter::None => ("", ""),
+- };
+- if tt.stream().into_iter().next().is_none() {
+- write!(f, "{} {}", start, end)?
+- } else {
+- write!(f, "{} {} {}", start, tt.stream(), end)?
+- }
+- }
+- TokenTree::Ident(ref tt) => write!(f, "{}", tt)?,
+- TokenTree::Punct(ref tt) => {
+- write!(f, "{}", tt.as_char())?;
+- match tt.spacing() {
+- Spacing::Alone => {}
+- Spacing::Joint => joint = true,
+- }
++ match tt {
++ TokenTree::Group(tt) => Display::fmt(tt, f),
++ TokenTree::Ident(tt) => Display::fmt(tt, f),
++ TokenTree::Punct(tt) => {
++ joint = tt.spacing() == Spacing::Joint;
++ Display::fmt(tt, f)
+ }
+- TokenTree::Literal(ref tt) => write!(f, "{}", tt)?,
+- }
++ TokenTree::Literal(tt) => Display::fmt(tt, f),
++ }?
+ }
+
+ Ok(())
+ }
+ }
+
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+@@ -139,28 +206,26 @@ impl From<TokenStream> for proc_macro::TokenStream {
+
+ impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+- TokenStream { inner: vec![tree] }
++ let mut stream = TokenStream::new();
++ stream.push_token(tree);
++ stream
+ }
+ }
+
+-impl iter::FromIterator<TokenTree> for TokenStream {
+- fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
+- let mut v = Vec::new();
+-
+- for token in streams.into_iter() {
+- v.push(token);
+- }
+-
+- TokenStream { inner: v }
++impl FromIterator<TokenTree> for TokenStream {
++ fn from_iter<I: IntoIterator<Item = TokenTree>>(tokens: I) -> Self {
++ let mut stream = TokenStream::new();
++ stream.extend(tokens);
++ stream
+ }
+ }
+
+-impl iter::FromIterator<TokenStream> for TokenStream {
++impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut v = Vec::new();
+
+- for stream in streams.into_iter() {
+- v.extend(stream.inner);
++ for mut stream in streams {
++ v.extend(stream.take_inner());
+ }
+
+ TokenStream { inner: v }
+@@ -168,31 +233,30 @@ impl iter::FromIterator<TokenStream> for TokenStream {
+ }
+
+ impl Extend<TokenTree> for TokenStream {
+- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
+- self.inner.extend(streams);
++ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) {
++ tokens.into_iter().for_each(|token| self.push_token(token));
+ }
+ }
+
+ impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+- self.inner
+- .extend(streams.into_iter().flat_map(|stream| stream));
++ self.inner.extend(streams.into_iter().flatten());
+ }
+ }
+
+-pub type TokenTreeIter = vec::IntoIter<TokenTree>;
++pub(crate) type TokenTreeIter = vec::IntoIter<TokenTree>;
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = TokenTreeIter;
+
+- fn into_iter(self) -> TokenTreeIter {
+- self.inner.into_iter()
++ fn into_iter(mut self) -> TokenTreeIter {
++ self.take_inner().into_iter()
+ }
+ }
+
+ #[derive(Clone, PartialEq, Eq)]
+-pub struct SourceFile {
++pub(crate) struct SourceFile {
+ path: PathBuf,
+ }
+
+@@ -208,7 +272,7 @@ impl SourceFile {
+ }
+ }
+
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+@@ -218,7 +282,7 @@ impl fmt::Debug for SourceFile {
+ }
+
+ #[derive(Clone, Copy, Debug, PartialEq, Eq)]
+-pub struct LineColumn {
++pub(crate) struct LineColumn {
+ pub line: usize,
+ pub column: usize,
+ }
+@@ -228,23 +292,11 @@ thread_local! {
+ static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
+ // NOTE: We start with a single dummy file which all call_site() and
+ // def_site() spans reference.
+- files: vec![{
++ files: vec![FileInfo {
+ #[cfg(procmacro2_semver_exempt)]
+- {
+- FileInfo {
+- name: "<unspecified>".to_owned(),
+- span: Span { lo: 0, hi: 0 },
+- lines: vec![0],
+- }
+- }
+-
+- #[cfg(not(procmacro2_semver_exempt))]
+- {
+- FileInfo {
+- span: Span { lo: 0, hi: 0 },
+- lines: vec![0],
+- }
+- }
++ name: "<unspecified>".to_owned(),
++ span: Span { lo: 0, hi: 0 },
++ lines: vec![0],
+ }],
+ });
+ }
+@@ -282,16 +334,21 @@ impl FileInfo {
+ }
+ }
+
+-/// Computesthe offsets of each line in the given source string.
++/// Computes the offsets of each line in the given source string
++/// and the total number of characters
+ #[cfg(span_locations)]
+-fn lines_offsets(s: &str) -> Vec<usize> {
++fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
+ let mut lines = vec![0];
+- let mut prev = 0;
+- while let Some(len) = s[prev..].find('\n') {
+- prev += len + 1;
+- lines.push(prev);
++ let mut total = 0;
++
++ for ch in s.chars() {
++ total += 1;
++ if ch == '\n' {
++ lines.push(total);
++ }
+ }
+- lines
++
++ (total, lines)
+ }
+
+ #[cfg(span_locations)]
+@@ -310,23 +367,22 @@ impl SourceMap {
+ }
+
+ fn add_file(&mut self, name: &str, src: &str) -> Span {
+- let lines = lines_offsets(src);
++ let (len, lines) = lines_offsets(src);
+ let lo = self.next_start_pos();
+ // XXX(nika): Shouild we bother doing a checked cast or checked add here?
+ let span = Span {
+ lo,
+- hi: lo + (src.len() as u32),
++ hi: lo + (len as u32),
+ };
+
+- #[cfg(procmacro2_semver_exempt)]
+ self.files.push(FileInfo {
++ #[cfg(procmacro2_semver_exempt)]
+ name: name.to_owned(),
+ span,
+ lines,
+ });
+
+ #[cfg(not(procmacro2_semver_exempt))]
+- self.files.push(FileInfo { span, lines });
+ let _ = name;
+
+ span
+@@ -343,11 +399,11 @@ impl SourceMap {
+ }
+
+ #[derive(Clone, Copy, PartialEq, Eq)]
+-pub struct Span {
++pub(crate) struct Span {
+ #[cfg(span_locations)]
+- lo: u32,
++ pub(crate) lo: u32,
+ #[cfg(span_locations)]
+- hi: u32,
++ pub(crate) hi: u32,
+ }
+
+ impl Span {
+@@ -361,12 +417,16 @@ impl Span {
+ Span { lo: 0, hi: 0 }
+ }
+
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ Span::call_site()
++ }
++
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn def_site() -> Span {
+ Span::call_site()
+ }
+
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn resolved_at(&self, _other: Span) -> Span {
+ // Stable spans consist only of line/column information, so
+ // `resolved_at` and `located_at` only select which span the
+@@ -374,7 +434,6 @@ impl Span {
+ *self
+ }
+
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn located_at(&self, other: Span) -> Span {
+ other
+ }
+@@ -427,26 +486,59 @@ impl Span {
+ })
+ })
+ }
++
++ #[cfg(not(span_locations))]
++ fn first_byte(self) -> Self {
++ self
++ }
++
++ #[cfg(span_locations)]
++ fn first_byte(self) -> Self {
++ Span {
++ lo: self.lo,
++ hi: cmp::min(self.lo.saturating_add(1), self.hi),
++ }
++ }
++
++ #[cfg(not(span_locations))]
++ fn last_byte(self) -> Self {
++ self
++ }
++
++ #[cfg(span_locations)]
++ fn last_byte(self) -> Self {
++ Span {
++ lo: cmp::max(self.hi.saturating_sub(1), self.lo),
++ hi: self.hi,
++ }
++ }
+ }
+
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ return write!(f, "bytes({}..{})", self.lo, self.hi);
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ write!(f, "Span")
+ }
+ }
+
+-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+- if cfg!(procmacro2_semver_exempt) {
++pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
++ #[cfg(span_locations)]
++ {
++ if span.lo == 0 && span.hi == 0 {
++ return;
++ }
++ }
++
++ if cfg!(span_locations) {
+ debug.field("span", &span);
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Group {
++pub(crate) struct Group {
+ delimiter: Delimiter,
+ stream: TokenStream,
+ span: Span,
+@@ -474,11 +566,11 @@ impl Group {
+ }
+
+ pub fn span_open(&self) -> Span {
+- self.span
++ self.span.first_byte()
+ }
+
+ pub fn span_close(&self) -> Span {
+- self.span
++ self.span.last_byte()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+@@ -486,36 +578,45 @@ impl Group {
+ }
+ }
+
+-impl fmt::Display for Group {
++impl Display for Group {
++ // We attempt to match libproc_macro's formatting.
++ // Empty parens: ()
++ // Nonempty parens: (...)
++ // Empty brackets: []
++ // Nonempty brackets: [...]
++ // Empty braces: { }
++ // Nonempty braces: { ... }
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- let (left, right) = match self.delimiter {
++ let (open, close) = match self.delimiter {
+ Delimiter::Parenthesis => ("(", ")"),
+- Delimiter::Brace => ("{", "}"),
++ Delimiter::Brace => ("{ ", "}"),
+ Delimiter::Bracket => ("[", "]"),
+ Delimiter::None => ("", ""),
+ };
+
+- f.write_str(left)?;
+- self.stream.fmt(f)?;
+- f.write_str(right)?;
++ f.write_str(open)?;
++ Display::fmt(&self.stream, f)?;
++ if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty() {
++ f.write_str(" ")?;
++ }
++ f.write_str(close)?;
+
+ Ok(())
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Group");
+ debug.field("delimiter", &self.delimiter);
+ debug.field("stream", &self.stream);
+- #[cfg(procmacro2_semver_exempt)]
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Ident {
++pub(crate) struct Ident {
+ sym: String,
+ span: Span,
+ raw: bool,
+@@ -549,16 +650,14 @@ impl Ident {
+ }
+ }
+
+-#[inline]
+-fn is_ident_start(c: char) -> bool {
++pub(crate) fn is_ident_start(c: char) -> bool {
+ ('a' <= c && c <= 'z')
+ || ('A' <= c && c <= 'Z')
+ || c == '_'
+ || (c > '\x7f' && UnicodeXID::is_xid_start(c))
+ }
+
+-#[inline]
+-fn is_ident_continue(c: char) -> bool {
++pub(crate) fn is_ident_continue(c: char) -> bool {
+ ('a' <= c && c <= 'z')
+ || ('A' <= c && c <= 'Z')
+ || c == '_'
+@@ -615,18 +714,18 @@ where
+ }
+ }
+
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ if self.raw {
+- "r#".fmt(f)?;
++ f.write_str("r#")?;
+ }
+- self.sym.fmt(f)
++ Display::fmt(&self.sym, f)
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ // Ident(proc_macro), Ident(r#union)
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = f.debug_tuple("Ident");
+ debug.field(&format_args!("{}", self));
+@@ -637,17 +736,17 @@ impl fmt::Debug for Ident {
+ // sym: proc_macro,
+ // span: bytes(128..138)
+ // }
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = f.debug_struct("Ident");
+ debug.field("sym", &format_args!("{}", self));
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Literal {
++pub(crate) struct Literal {
+ text: String,
+ span: Span,
+ }
+@@ -669,7 +768,7 @@ macro_rules! unsuffixed_numbers {
+ }
+
+ impl Literal {
+- fn _new(text: String) -> Literal {
++ pub(crate) fn _new(text: String) -> Literal {
+ Literal {
+ text,
+ span: Span::call_site(),
+@@ -711,7 +810,7 @@ impl Literal {
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+ let mut s = f.to_string();
+- if !s.contains(".") {
++ if !s.contains('.') {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+@@ -719,7 +818,7 @@ impl Literal {
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+ let mut s = f.to_string();
+- if !s.contains(".") {
++ if !s.contains('.') {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+@@ -730,10 +829,10 @@ impl Literal {
+ text.push('"');
+ for c in t.chars() {
+ if c == '\'' {
+- // escape_default turns this into "\'" which is unnecessary.
++ // escape_debug turns this into "\'" which is unnecessary.
+ text.push(c);
+ } else {
+- text.extend(c.escape_default());
++ text.extend(c.escape_debug());
+ }
+ }
+ text.push('"');
+@@ -744,10 +843,10 @@ impl Literal {
+ let mut text = String::new();
+ text.push('\'');
+ if t == '"' {
+- // escape_default turns this into '\"' which is unnecessary.
++ // escape_debug turns this into '\"' which is unnecessary.
+ text.push(t);
+ } else {
+- text.extend(t.escape_default());
++ text.extend(t.escape_debug());
+ }
+ text.push('\'');
+ Literal::_new(text)
+@@ -756,6 +855,7 @@ impl Literal {
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ let mut escaped = "b\"".to_string();
+ for b in bytes {
++ #[allow(clippy::match_overlapping_arm)]
+ match *b {
+ b'\0' => escaped.push_str(r"\0"),
+ b'\t' => escaped.push_str(r"\t"),
+@@ -784,651 +884,17 @@ impl Literal {
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.text.fmt(f)
++ Display::fmt(&self.text, f)
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Literal");
+ debug.field("lit", &format_args!("{}", self.text));
+- #[cfg(procmacro2_semver_exempt)]
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+-
+-fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
+- let mut trees = Vec::new();
+- loop {
+- let input_no_ws = skip_whitespace(input);
+- if input_no_ws.rest.len() == 0 {
+- break;
+- }
+- if let Ok((a, tokens)) = doc_comment(input_no_ws) {
+- input = a;
+- trees.extend(tokens);
+- continue;
+- }
+-
+- let (a, tt) = match token_tree(input_no_ws) {
+- Ok(p) => p,
+- Err(_) => break,
+- };
+- trees.push(tt);
+- input = a;
+- }
+- Ok((input, TokenStream { inner: trees }))
+-}
+-
+-#[cfg(not(span_locations))]
+-fn spanned<'a, T>(
+- input: Cursor<'a>,
+- f: fn(Cursor<'a>) -> PResult<'a, T>,
+-) -> PResult<'a, (T, crate::Span)> {
+- let (a, b) = f(skip_whitespace(input))?;
+- Ok((a, ((b, crate::Span::_new_stable(Span::call_site())))))
+-}
+-
+-#[cfg(span_locations)]
+-fn spanned<'a, T>(
+- input: Cursor<'a>,
+- f: fn(Cursor<'a>) -> PResult<'a, T>,
+-) -> PResult<'a, (T, crate::Span)> {
+- let input = skip_whitespace(input);
+- let lo = input.off;
+- let (a, b) = f(input)?;
+- let hi = a.off;
+- let span = crate::Span::_new_stable(Span { lo, hi });
+- Ok((a, (b, span)))
+-}
+-
+-fn token_tree(input: Cursor) -> PResult<TokenTree> {
+- let (rest, (mut tt, span)) = spanned(input, token_kind)?;
+- tt.set_span(span);
+- Ok((rest, tt))
+-}
+-
+-named!(token_kind -> TokenTree, alt!(
+- map!(group, |g| TokenTree::Group(crate::Group::_new_stable(g)))
+- |
+- map!(literal, |l| TokenTree::Literal(crate::Literal::_new_stable(l))) // must be before symbol
+- |
+- map!(op, TokenTree::Punct)
+- |
+- symbol_leading_ws
+-));
+-
+-named!(group -> Group, alt!(
+- delimited!(
+- punct!("("),
+- token_stream,
+- punct!(")")
+- ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
+- |
+- delimited!(
+- punct!("["),
+- token_stream,
+- punct!("]")
+- ) => { |ts| Group::new(Delimiter::Bracket, ts) }
+- |
+- delimited!(
+- punct!("{"),
+- token_stream,
+- punct!("}")
+- ) => { |ts| Group::new(Delimiter::Brace, ts) }
+-));
+-
+-fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
+- symbol(skip_whitespace(input))
+-}
+-
+-fn symbol(input: Cursor) -> PResult<TokenTree> {
+- let raw = input.starts_with("r#");
+- let rest = input.advance((raw as usize) << 1);
+-
+- let (rest, sym) = symbol_not_raw(rest)?;
+-
+- if !raw {
+- let ident = crate::Ident::new(sym, crate::Span::call_site());
+- return Ok((rest, ident.into()));
+- }
+-
+- if sym == "_" {
+- return Err(LexError);
+- }
+-
+- let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
+- Ok((rest, ident.into()))
+-}
+-
+-fn symbol_not_raw(input: Cursor) -> PResult<&str> {
+- let mut chars = input.char_indices();
+-
+- match chars.next() {
+- Some((_, ch)) if is_ident_start(ch) => {}
+- _ => return Err(LexError),
+- }
+-
+- let mut end = input.len();
+- for (i, ch) in chars {
+- if !is_ident_continue(ch) {
+- end = i;
+- break;
+- }
+- }
+-
+- Ok((input.advance(end), &input.rest[..end]))
+-}
+-
+-fn literal(input: Cursor) -> PResult<Literal> {
+- let input_no_ws = skip_whitespace(input);
+-
+- match literal_nocapture(input_no_ws) {
+- Ok((a, ())) => {
+- let start = input.len() - input_no_ws.len();
+- let len = input_no_ws.len() - a.len();
+- let end = start + len;
+- Ok((a, Literal::_new(input.rest[start..end].to_string())))
+- }
+- Err(LexError) => Err(LexError),
+- }
+-}
+-
+-named!(literal_nocapture -> (), alt!(
+- string
+- |
+- byte_string
+- |
+- byte
+- |
+- character
+- |
+- float
+- |
+- int
+-));
+-
+-named!(string -> (), alt!(
+- quoted_string
+- |
+- preceded!(
+- punct!("r"),
+- raw_string
+- ) => { |_| () }
+-));
+-
+-named!(quoted_string -> (), do_parse!(
+- punct!("\"") >>
+- cooked_string >>
+- tag!("\"") >>
+- option!(symbol_not_raw) >>
+- (())
+-));
+-
+-fn cooked_string(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices().peekable();
+- while let Some((byte_offset, ch)) = chars.next() {
+- match ch {
+- '"' => {
+- return Ok((input.advance(byte_offset), ()));
+- }
+- '\r' => {
+- if let Some((_, '\n')) = chars.next() {
+- // ...
+- } else {
+- break;
+- }
+- }
+- '\\' => match chars.next() {
+- Some((_, 'x')) => {
+- if !backslash_x_char(&mut chars) {
+- break;
+- }
+- }
+- Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
+- | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
+- Some((_, 'u')) => {
+- if !backslash_u(&mut chars) {
+- break;
+- }
+- }
+- Some((_, '\n')) | Some((_, '\r')) => {
+- while let Some(&(_, ch)) = chars.peek() {
+- if ch.is_whitespace() {
+- chars.next();
+- } else {
+- break;
+- }
+- }
+- }
+- _ => break,
+- },
+- _ch => {}
+- }
+- }
+- Err(LexError)
+-}
+-
+-named!(byte_string -> (), alt!(
+- delimited!(
+- punct!("b\""),
+- cooked_byte_string,
+- tag!("\"")
+- ) => { |_| () }
+- |
+- preceded!(
+- punct!("br"),
+- raw_string
+- ) => { |_| () }
+-));
+-
+-fn cooked_byte_string(mut input: Cursor) -> PResult<()> {
+- let mut bytes = input.bytes().enumerate();
+- 'outer: while let Some((offset, b)) = bytes.next() {
+- match b {
+- b'"' => {
+- return Ok((input.advance(offset), ()));
+- }
+- b'\r' => {
+- if let Some((_, b'\n')) = bytes.next() {
+- // ...
+- } else {
+- break;
+- }
+- }
+- b'\\' => match bytes.next() {
+- Some((_, b'x')) => {
+- if !backslash_x_byte(&mut bytes) {
+- break;
+- }
+- }
+- Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
+- | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
+- Some((newline, b'\n')) | Some((newline, b'\r')) => {
+- let rest = input.advance(newline + 1);
+- for (offset, ch) in rest.char_indices() {
+- if !ch.is_whitespace() {
+- input = rest.advance(offset);
+- bytes = input.bytes().enumerate();
+- continue 'outer;
+- }
+- }
+- break;
+- }
+- _ => break,
+- },
+- b if b < 0x80 => {}
+- _ => break,
+- }
+- }
+- Err(LexError)
+-}
+-
+-fn raw_string(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices();
+- let mut n = 0;
+- while let Some((byte_offset, ch)) = chars.next() {
+- match ch {
+- '"' => {
+- n = byte_offset;
+- break;
+- }
+- '#' => {}
+- _ => return Err(LexError),
+- }
+- }
+- for (byte_offset, ch) in chars {
+- match ch {
+- '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
+- let rest = input.advance(byte_offset + 1 + n);
+- return Ok((rest, ()));
+- }
+- '\r' => {}
+- _ => {}
+- }
+- }
+- Err(LexError)
+-}
+-
+-named!(byte -> (), do_parse!(
+- punct!("b") >>
+- tag!("'") >>
+- cooked_byte >>
+- tag!("'") >>
+- (())
+-));
+-
+-fn cooked_byte(input: Cursor) -> PResult<()> {
+- let mut bytes = input.bytes().enumerate();
+- let ok = match bytes.next().map(|(_, b)| b) {
+- Some(b'\\') => match bytes.next().map(|(_, b)| b) {
+- Some(b'x') => backslash_x_byte(&mut bytes),
+- Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
+- | Some(b'"') => true,
+- _ => false,
+- },
+- b => b.is_some(),
+- };
+- if ok {
+- match bytes.next() {
+- Some((offset, _)) => {
+- if input.chars().as_str().is_char_boundary(offset) {
+- Ok((input.advance(offset), ()))
+- } else {
+- Err(LexError)
+- }
+- }
+- None => Ok((input.advance(input.len()), ())),
+- }
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-named!(character -> (), do_parse!(
+- punct!("'") >>
+- cooked_char >>
+- tag!("'") >>
+- (())
+-));
+-
+-fn cooked_char(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices();
+- let ok = match chars.next().map(|(_, ch)| ch) {
+- Some('\\') => match chars.next().map(|(_, ch)| ch) {
+- Some('x') => backslash_x_char(&mut chars),
+- Some('u') => backslash_u(&mut chars),
+- Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
+- true
+- }
+- _ => false,
+- },
+- ch => ch.is_some(),
+- };
+- if ok {
+- match chars.next() {
+- Some((idx, _)) => Ok((input.advance(idx), ())),
+- None => Ok((input.advance(input.len()), ())),
+- }
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-macro_rules! next_ch {
+- ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
+- match $chars.next() {
+- Some((_, ch)) => match ch {
+- $pat $(| $rest)* => ch,
+- _ => return false,
+- },
+- None => return false
+- }
+- };
+-}
+-
+-fn backslash_x_char<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, char)>,
+-{
+- next_ch!(chars @ '0'..='7');
+- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+- true
+-}
+-
+-fn backslash_x_byte<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, u8)>,
+-{
+- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+- true
+-}
+-
+-fn backslash_u<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, char)>,
+-{
+- next_ch!(chars @ '{');
+- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+- loop {
+- let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
+- if c == '}' {
+- return true;
+- }
+- }
+-}
+-
+-fn float(input: Cursor) -> PResult<()> {
+- let (mut rest, ()) = float_digits(input)?;
+- if let Some(ch) = rest.chars().next() {
+- if is_ident_start(ch) {
+- rest = symbol_not_raw(rest)?.0;
+- }
+- }
+- word_break(rest)
+-}
+-
+-fn float_digits(input: Cursor) -> PResult<()> {
+- let mut chars = input.chars().peekable();
+- match chars.next() {
+- Some(ch) if ch >= '0' && ch <= '9' => {}
+- _ => return Err(LexError),
+- }
+-
+- let mut len = 1;
+- let mut has_dot = false;
+- let mut has_exp = false;
+- while let Some(&ch) = chars.peek() {
+- match ch {
+- '0'..='9' | '_' => {
+- chars.next();
+- len += 1;
+- }
+- '.' => {
+- if has_dot {
+- break;
+- }
+- chars.next();
+- if chars
+- .peek()
+- .map(|&ch| ch == '.' || is_ident_start(ch))
+- .unwrap_or(false)
+- {
+- return Err(LexError);
+- }
+- len += 1;
+- has_dot = true;
+- }
+- 'e' | 'E' => {
+- chars.next();
+- len += 1;
+- has_exp = true;
+- break;
+- }
+- _ => break,
+- }
+- }
+-
+- let rest = input.advance(len);
+- if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
+- return Err(LexError);
+- }
+-
+- if has_exp {
+- let mut has_exp_value = false;
+- while let Some(&ch) = chars.peek() {
+- match ch {
+- '+' | '-' => {
+- if has_exp_value {
+- break;
+- }
+- chars.next();
+- len += 1;
+- }
+- '0'..='9' => {
+- chars.next();
+- len += 1;
+- has_exp_value = true;
+- }
+- '_' => {
+- chars.next();
+- len += 1;
+- }
+- _ => break,
+- }
+- }
+- if !has_exp_value {
+- return Err(LexError);
+- }
+- }
+-
+- Ok((input.advance(len), ()))
+-}
+-
+-fn int(input: Cursor) -> PResult<()> {
+- let (mut rest, ()) = digits(input)?;
+- if let Some(ch) = rest.chars().next() {
+- if is_ident_start(ch) {
+- rest = symbol_not_raw(rest)?.0;
+- }
+- }
+- word_break(rest)
+-}
+-
+-fn digits(mut input: Cursor) -> PResult<()> {
+- let base = if input.starts_with("0x") {
+- input = input.advance(2);
+- 16
+- } else if input.starts_with("0o") {
+- input = input.advance(2);
+- 8
+- } else if input.starts_with("0b") {
+- input = input.advance(2);
+- 2
+- } else {
+- 10
+- };
+-
+- let mut len = 0;
+- let mut empty = true;
+- for b in input.bytes() {
+- let digit = match b {
+- b'0'..=b'9' => (b - b'0') as u64,
+- b'a'..=b'f' => 10 + (b - b'a') as u64,
+- b'A'..=b'F' => 10 + (b - b'A') as u64,
+- b'_' => {
+- if empty && base == 10 {
+- return Err(LexError);
+- }
+- len += 1;
+- continue;
+- }
+- _ => break,
+- };
+- if digit >= base {
+- return Err(LexError);
+- }
+- len += 1;
+- empty = false;
+- }
+- if empty {
+- Err(LexError)
+- } else {
+- Ok((input.advance(len), ()))
+- }
+-}
+-
+-fn op(input: Cursor) -> PResult<Punct> {
+- let input = skip_whitespace(input);
+- match op_char(input) {
+- Ok((rest, '\'')) => {
+- symbol(rest)?;
+- Ok((rest, Punct::new('\'', Spacing::Joint)))
+- }
+- Ok((rest, ch)) => {
+- let kind = match op_char(rest) {
+- Ok(_) => Spacing::Joint,
+- Err(LexError) => Spacing::Alone,
+- };
+- Ok((rest, Punct::new(ch, kind)))
+- }
+- Err(LexError) => Err(LexError),
+- }
+-}
+-
+-fn op_char(input: Cursor) -> PResult<char> {
+- if input.starts_with("//") || input.starts_with("/*") {
+- // Do not accept `/` of a comment as an op.
+- return Err(LexError);
+- }
+-
+- let mut chars = input.chars();
+- let first = match chars.next() {
+- Some(ch) => ch,
+- None => {
+- return Err(LexError);
+- }
+- };
+- let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
+- if recognized.contains(first) {
+- Ok((input.advance(first.len_utf8()), first))
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
+- let mut trees = Vec::new();
+- let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
+- trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
+- if inner {
+- trees.push(Punct::new('!', Spacing::Alone).into());
+- }
+- let mut stream = vec![
+- TokenTree::Ident(crate::Ident::new("doc", span)),
+- TokenTree::Punct(Punct::new('=', Spacing::Alone)),
+- TokenTree::Literal(crate::Literal::string(comment)),
+- ];
+- for tt in stream.iter_mut() {
+- tt.set_span(span);
+- }
+- let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
+- trees.push(crate::Group::_new_stable(group).into());
+- for tt in trees.iter_mut() {
+- tt.set_span(span);
+- }
+- Ok((rest, trees))
+-}
+-
+-named!(doc_comment_contents -> (&str, bool), alt!(
+- do_parse!(
+- punct!("//!") >>
+- s: take_until_newline_or_eof!() >>
+- ((s, true))
+- )
+- |
+- do_parse!(
+- option!(whitespace) >>
+- peek!(tag!("/*!")) >>
+- s: block_comment >>
+- ((s, true))
+- )
+- |
+- do_parse!(
+- punct!("///") >>
+- not!(tag!("/")) >>
+- s: take_until_newline_or_eof!() >>
+- ((s, false))
+- )
+- |
+- do_parse!(
+- option!(whitespace) >>
+- peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
+- s: block_comment >>
+- ((s, false))
+- )
+-));
+diff --git a/third_party/rust/proc-macro2/src/lib.rs b/third_party/rust/proc-macro2/src/lib.rs
+index a08be3e815..c20fb50d4a 100644
+--- third_party/rust/proc-macro2/src/lib.rs
++++ third_party/rust/proc-macro2/src/lib.rs
+@@ -78,27 +78,24 @@
+ //! a different thread.
+
+ // Proc-macro2 types in rustdoc of other crates get linked to here.
+-#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.5")]
++#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.24")]
+ #![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
+ #![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
++#![allow(clippy::needless_doctest_main)]
+
+ #[cfg(use_proc_macro)]
+ extern crate proc_macro;
+
+-use std::cmp::Ordering;
+-use std::fmt;
+-use std::hash::{Hash, Hasher};
+-use std::iter::FromIterator;
+-use std::marker;
+-use std::ops::RangeBounds;
+-#[cfg(procmacro2_semver_exempt)]
+-use std::path::PathBuf;
+-use std::rc::Rc;
+-use std::str::FromStr;
++mod marker;
++mod parse;
++
++#[cfg(wrap_proc_macro)]
++mod detection;
+
+-#[macro_use]
+-mod strnom;
+-mod fallback;
++// Public for proc_macro2::fallback::force() and unforce(), but those are quite
++// a niche use case so we omit it from rustdoc.
++#[doc(hidden)]
++pub mod fallback;
+
+ #[cfg(not(wrap_proc_macro))]
+ use crate::fallback as imp;
+@@ -106,6 +103,17 @@ use crate::fallback as imp;
+ #[cfg(wrap_proc_macro)]
+ mod imp;
+
++use crate::marker::Marker;
++use std::cmp::Ordering;
++use std::error::Error;
++use std::fmt::{self, Debug, Display};
++use std::hash::{Hash, Hasher};
++use std::iter::FromIterator;
++use std::ops::RangeBounds;
++#[cfg(procmacro2_semver_exempt)]
++use std::path::PathBuf;
++use std::str::FromStr;
++
+ /// An abstract stream of tokens, or more concretely a sequence of token trees.
+ ///
+ /// This type provides interfaces for iterating over token trees and for
+@@ -116,27 +124,27 @@ mod imp;
+ #[derive(Clone)]
+ pub struct TokenStream {
+ inner: imp::TokenStream,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ /// Error returned from `TokenStream::from_str`.
+ pub struct LexError {
+ inner: imp::LexError,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl TokenStream {
+ fn _new(inner: imp::TokenStream) -> TokenStream {
+ TokenStream {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+ fn _new_stable(inner: fallback::TokenStream) -> TokenStream {
+ TokenStream {
+ inner: inner.into(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -173,7 +181,7 @@ impl FromStr for TokenStream {
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let e = src.parse().map_err(|e| LexError {
+ inner: e,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ })?;
+ Ok(TokenStream::_new(e))
+ }
+@@ -228,25 +236,33 @@ impl FromIterator<TokenStream> for TokenStream {
+ /// convertible back into the same token stream (modulo spans), except for
+ /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+ /// numeric literals.
+-impl fmt::Display for TokenStream {
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+ /// Prints token in a form convenient for debugging.
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
++ Debug::fmt(&self.inner, f)
++ }
++}
++
++impl Debug for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Debug for LexError {
++impl Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
++impl Error for LexError {}
++
+ /// The source file of a given `Span`.
+ ///
+ /// This type is semver exempt and not exposed by default.
+@@ -254,7 +270,7 @@ impl fmt::Debug for LexError {
+ #[derive(Clone, PartialEq, Eq)]
+ pub struct SourceFile {
+ inner: imp::SourceFile,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+@@ -262,7 +278,7 @@ impl SourceFile {
+ fn _new(inner: imp::SourceFile) -> Self {
+ SourceFile {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -291,9 +307,9 @@ impl SourceFile {
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -311,25 +327,41 @@ pub struct LineColumn {
+ pub column: usize,
+ }
+
++#[cfg(span_locations)]
++impl Ord for LineColumn {
++ fn cmp(&self, other: &Self) -> Ordering {
++ self.line
++ .cmp(&other.line)
++ .then(self.column.cmp(&other.column))
++ }
++}
++
++#[cfg(span_locations)]
++impl PartialOrd for LineColumn {
++ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
++ Some(self.cmp(other))
++ }
++}
++
+ /// A region of source code, along with macro expansion information.
+ #[derive(Copy, Clone)]
+ pub struct Span {
+ inner: imp::Span,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl Span {
+ fn _new(inner: imp::Span) -> Span {
+ Span {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+ fn _new_stable(inner: fallback::Span) -> Span {
+ Span {
+ inner: inner.into(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -342,6 +374,16 @@ impl Span {
+ Span::_new(imp::Span::call_site())
+ }
+
++ /// The span located at the invocation of the procedural macro, but with
++ /// local variables, labels, and `$crate` resolved at the definition site
++ /// of the macro. This is the same hygiene behavior as `macro_rules`.
++ ///
++ /// This function requires Rust 1.45 or later.
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ Span::_new(imp::Span::mixed_site())
++ }
++
+ /// A span that resolves at the macro definition site.
+ ///
+ /// This method is semver exempt and not exposed by default.
+@@ -352,18 +394,12 @@ impl Span {
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+- ///
+- /// This method is semver exempt and not exposed by default.
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.resolved_at(other.inner))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+- ///
+- /// This method is semver exempt and not exposed by default.
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn located_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.located_at(other.inner))
+ }
+@@ -439,9 +475,9 @@ impl Span {
+ }
+
+ /// Prints a span in a form convenient for debugging.
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -462,11 +498,11 @@ impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+- match *self {
+- TokenTree::Group(ref t) => t.span(),
+- TokenTree::Ident(ref t) => t.span(),
+- TokenTree::Punct(ref t) => t.span(),
+- TokenTree::Literal(ref t) => t.span(),
++ match self {
++ TokenTree::Group(t) => t.span(),
++ TokenTree::Ident(t) => t.span(),
++ TokenTree::Punct(t) => t.span(),
++ TokenTree::Literal(t) => t.span(),
+ }
+ }
+
+@@ -476,11 +512,11 @@ impl TokenTree {
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+- match *self {
+- TokenTree::Group(ref mut t) => t.set_span(span),
+- TokenTree::Ident(ref mut t) => t.set_span(span),
+- TokenTree::Punct(ref mut t) => t.set_span(span),
+- TokenTree::Literal(ref mut t) => t.set_span(span),
++ match self {
++ TokenTree::Group(t) => t.set_span(span),
++ TokenTree::Ident(t) => t.set_span(span),
++ TokenTree::Punct(t) => t.set_span(span),
++ TokenTree::Literal(t) => t.set_span(span),
+ }
+ }
+ }
+@@ -513,32 +549,32 @@ impl From<Literal> for TokenTree {
+ /// convertible back into the same token tree (modulo spans), except for
+ /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+ /// numeric literals.
+-impl fmt::Display for TokenTree {
++impl Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- match *self {
+- TokenTree::Group(ref t) => t.fmt(f),
+- TokenTree::Ident(ref t) => t.fmt(f),
+- TokenTree::Punct(ref t) => t.fmt(f),
+- TokenTree::Literal(ref t) => t.fmt(f),
++ match self {
++ TokenTree::Group(t) => Display::fmt(t, f),
++ TokenTree::Ident(t) => Display::fmt(t, f),
++ TokenTree::Punct(t) => Display::fmt(t, f),
++ TokenTree::Literal(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+ /// Prints token tree in a form convenient for debugging.
+-impl fmt::Debug for TokenTree {
++impl Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+- match *self {
+- TokenTree::Group(ref t) => t.fmt(f),
+- TokenTree::Ident(ref t) => {
++ match self {
++ TokenTree::Group(t) => Debug::fmt(t, f),
++ TokenTree::Ident(t) => {
+ let mut debug = f.debug_struct("Ident");
+ debug.field("sym", &format_args!("{}", t));
+ imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner);
+ debug.finish()
+ }
+- TokenTree::Punct(ref t) => t.fmt(f),
+- TokenTree::Literal(ref t) => t.fmt(f),
++ TokenTree::Punct(t) => Debug::fmt(t, f),
++ TokenTree::Literal(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+@@ -651,15 +687,15 @@ impl Group {
+ /// Prints the group as a string that should be losslessly convertible back
+ /// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+ /// with `Delimiter::None` delimiters.
+-impl fmt::Display for Group {
++impl Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- fmt::Display::fmt(&self.inner, formatter)
++ Display::fmt(&self.inner, formatter)
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- fmt::Debug::fmt(&self.inner, formatter)
++ Debug::fmt(&self.inner, formatter)
+ }
+ }
+
+@@ -669,7 +705,7 @@ impl fmt::Debug for Group {
+ /// `Punct` with different forms of `Spacing` returned.
+ #[derive(Clone)]
+ pub struct Punct {
+- op: char,
++ ch: char,
+ spacing: Spacing,
+ span: Span,
+ }
+@@ -695,9 +731,9 @@ impl Punct {
+ ///
+ /// The returned `Punct` will have the default span of `Span::call_site()`
+ /// which can be further configured with the `set_span` method below.
+- pub fn new(op: char, spacing: Spacing) -> Punct {
++ pub fn new(ch: char, spacing: Spacing) -> Punct {
+ Punct {
+- op,
++ ch,
+ spacing,
+ span: Span::call_site(),
+ }
+@@ -705,7 +741,7 @@ impl Punct {
+
+ /// Returns the value of this punctuation character as `char`.
+ pub fn as_char(&self) -> char {
+- self.op
++ self.ch
+ }
+
+ /// Returns the spacing of this punctuation character, indicating whether
+@@ -730,16 +766,16 @@ impl Punct {
+
+ /// Prints the punctuation character as a string that should be losslessly
+ /// convertible back into the same character.
+-impl fmt::Display for Punct {
++impl Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.op.fmt(f)
++ Display::fmt(&self.ch, f)
+ }
+ }
+
+-impl fmt::Debug for Punct {
++impl Debug for Punct {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Punct");
+- debug.field("op", &self.op);
++ debug.field("char", &self.ch);
+ debug.field("spacing", &self.spacing);
+ imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner);
+ debug.finish()
+@@ -813,14 +849,14 @@ impl fmt::Debug for Punct {
+ #[derive(Clone)]
+ pub struct Ident {
+ inner: imp::Ident,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl Ident {
+ fn _new(inner: imp::Ident) -> Ident {
+ Ident {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -920,15 +956,15 @@ impl Hash for Ident {
+
+ /// Prints the identifier as a string that should be losslessly convertible back
+ /// into the same identifier.
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -941,7 +977,7 @@ impl fmt::Debug for Ident {
+ #[derive(Clone)]
+ pub struct Literal {
+ inner: imp::Literal,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ macro_rules! suffixed_int_literals {
+@@ -988,14 +1024,14 @@ impl Literal {
+ fn _new(inner: imp::Literal) -> Literal {
+ Literal {
+ inner,
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+ fn _new_stable(inner: fallback::Literal) -> Literal {
+ Literal {
+ inner: inner.into(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+
+@@ -1140,26 +1176,25 @@ impl Literal {
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+ /// Public implementation details for the `TokenStream` type, such as iterators.
+ pub mod token_stream {
+- use std::fmt;
+- use std::marker;
+- use std::rc::Rc;
++ use crate::marker::Marker;
++ use crate::{imp, TokenTree};
++ use std::fmt::{self, Debug};
+
+ pub use crate::TokenStream;
+- use crate::{imp, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ ///
+@@ -1168,7 +1203,7 @@ pub mod token_stream {
+ #[derive(Clone)]
+ pub struct IntoIter {
+ inner: imp::TokenTreeIter,
+- _marker: marker::PhantomData<Rc<()>>,
++ _marker: Marker,
+ }
+
+ impl Iterator for IntoIter {
+@@ -1179,9 +1214,9 @@ pub mod token_stream {
+ }
+ }
+
+- impl fmt::Debug for IntoIter {
++ impl Debug for IntoIter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+@@ -1192,7 +1227,7 @@ pub mod token_stream {
+ fn into_iter(self) -> IntoIter {
+ IntoIter {
+ inner: self.inner.into_iter(),
+- _marker: marker::PhantomData,
++ _marker: Marker,
+ }
+ }
+ }
+diff --git a/third_party/rust/proc-macro2/src/marker.rs b/third_party/rust/proc-macro2/src/marker.rs
+new file mode 100644
+index 0000000000..58729baf4a
+--- /dev/null
++++ third_party/rust/proc-macro2/src/marker.rs
+@@ -0,0 +1,18 @@
++use std::marker::PhantomData;
++use std::panic::{RefUnwindSafe, UnwindSafe};
++use std::rc::Rc;
++
++// Zero sized marker with the correct set of autotrait impls we want all proc
++// macro types to have.
++pub(crate) type Marker = PhantomData<ProcMacroAutoTraits>;
++
++pub(crate) use self::value::*;
++
++mod value {
++ pub(crate) use std::marker::PhantomData as Marker;
++}
++
++pub(crate) struct ProcMacroAutoTraits(Rc<()>);
++
++impl UnwindSafe for ProcMacroAutoTraits {}
++impl RefUnwindSafe for ProcMacroAutoTraits {}
+diff --git a/third_party/rust/proc-macro2/src/parse.rs b/third_party/rust/proc-macro2/src/parse.rs
+new file mode 100644
+index 0000000000..365fe0484d
+--- /dev/null
++++ third_party/rust/proc-macro2/src/parse.rs
+@@ -0,0 +1,849 @@
++use crate::fallback::{
++ is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
++};
++use crate::{Delimiter, Punct, Spacing, TokenTree};
++use std::char;
++use std::str::{Bytes, CharIndices, Chars};
++
++#[derive(Copy, Clone, Eq, PartialEq)]
++pub(crate) struct Cursor<'a> {
++ pub rest: &'a str,
++ #[cfg(span_locations)]
++ pub off: u32,
++}
++
++impl<'a> Cursor<'a> {
++ fn advance(&self, bytes: usize) -> Cursor<'a> {
++ let (_front, rest) = self.rest.split_at(bytes);
++ Cursor {
++ rest,
++ #[cfg(span_locations)]
++ off: self.off + _front.chars().count() as u32,
++ }
++ }
++
++ fn starts_with(&self, s: &str) -> bool {
++ self.rest.starts_with(s)
++ }
++
++ pub(crate) fn is_empty(&self) -> bool {
++ self.rest.is_empty()
++ }
++
++ fn len(&self) -> usize {
++ self.rest.len()
++ }
++
++ fn as_bytes(&self) -> &'a [u8] {
++ self.rest.as_bytes()
++ }
++
++ fn bytes(&self) -> Bytes<'a> {
++ self.rest.bytes()
++ }
++
++ fn chars(&self) -> Chars<'a> {
++ self.rest.chars()
++ }
++
++ fn char_indices(&self) -> CharIndices<'a> {
++ self.rest.char_indices()
++ }
++
++ fn parse(&self, tag: &str) -> Result<Cursor<'a>, LexError> {
++ if self.starts_with(tag) {
++ Ok(self.advance(tag.len()))
++ } else {
++ Err(LexError)
++ }
++ }
++}
++
++type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
++
++fn skip_whitespace(input: Cursor) -> Cursor {
++ let mut s = input;
++
++ while !s.is_empty() {
++ let byte = s.as_bytes()[0];
++ if byte == b'/' {
++ if s.starts_with("//")
++ && (!s.starts_with("///") || s.starts_with("////"))
++ && !s.starts_with("//!")
++ {
++ let (cursor, _) = take_until_newline_or_eof(s);
++ s = cursor;
++ continue;
++ } else if s.starts_with("/**/") {
++ s = s.advance(4);
++ continue;
++ } else if s.starts_with("/*")
++ && (!s.starts_with("/**") || s.starts_with("/***"))
++ && !s.starts_with("/*!")
++ {
++ match block_comment(s) {
++ Ok((rest, _)) => {
++ s = rest;
++ continue;
++ }
++ Err(LexError) => return s,
++ }
++ }
++ }
++ match byte {
++ b' ' | 0x09..=0x0d => {
++ s = s.advance(1);
++ continue;
++ }
++ b if b <= 0x7f => {}
++ _ => {
++ let ch = s.chars().next().unwrap();
++ if is_whitespace(ch) {
++ s = s.advance(ch.len_utf8());
++ continue;
++ }
++ }
++ }
++ return s;
++ }
++ s
++}
++
++fn block_comment(input: Cursor) -> PResult<&str> {
++ if !input.starts_with("/*") {
++ return Err(LexError);
++ }
++
++ let mut depth = 0;
++ let bytes = input.as_bytes();
++ let mut i = 0;
++ let upper = bytes.len() - 1;
++
++ while i < upper {
++ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
++ depth += 1;
++ i += 1; // eat '*'
++ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
++ depth -= 1;
++ if depth == 0 {
++ return Ok((input.advance(i + 2), &input.rest[..i + 2]));
++ }
++ i += 1; // eat '/'
++ }
++ i += 1;
++ }
++
++ Err(LexError)
++}
++
++fn is_whitespace(ch: char) -> bool {
++ // Rust treats left-to-right mark and right-to-left mark as whitespace
++ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
++}
++
++fn word_break(input: Cursor) -> Result<Cursor, LexError> {
++ match input.chars().next() {
++ Some(ch) if is_ident_continue(ch) => Err(LexError),
++ Some(_) | None => Ok(input),
++ }
++}
++
++pub(crate) fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
++ let mut trees = Vec::new();
++ let mut stack = Vec::new();
++
++ loop {
++ input = skip_whitespace(input);
++
++ if let Ok((rest, tt)) = doc_comment(input) {
++ trees.extend(tt);
++ input = rest;
++ continue;
++ }
++
++ #[cfg(span_locations)]
++ let lo = input.off;
++
++ let first = match input.bytes().next() {
++ Some(first) => first,
++ None => break,
++ };
++
++ if let Some(open_delimiter) = match first {
++ b'(' => Some(Delimiter::Parenthesis),
++ b'[' => Some(Delimiter::Bracket),
++ b'{' => Some(Delimiter::Brace),
++ _ => None,
++ } {
++ input = input.advance(1);
++ let frame = (open_delimiter, trees);
++ #[cfg(span_locations)]
++ let frame = (lo, frame);
++ stack.push(frame);
++ trees = Vec::new();
++ } else if let Some(close_delimiter) = match first {
++ b')' => Some(Delimiter::Parenthesis),
++ b']' => Some(Delimiter::Bracket),
++ b'}' => Some(Delimiter::Brace),
++ _ => None,
++ } {
++ input = input.advance(1);
++ let frame = stack.pop().ok_or(LexError)?;
++ #[cfg(span_locations)]
++ let (lo, frame) = frame;
++ let (open_delimiter, outer) = frame;
++ if open_delimiter != close_delimiter {
++ return Err(LexError);
++ }
++ let mut g = Group::new(open_delimiter, TokenStream { inner: trees });
++ g.set_span(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: input.off,
++ });
++ trees = outer;
++ trees.push(TokenTree::Group(crate::Group::_new_stable(g)));
++ } else {
++ let (rest, mut tt) = leaf_token(input)?;
++ tt.set_span(crate::Span::_new_stable(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: rest.off,
++ }));
++ trees.push(tt);
++ input = rest;
++ }
++ }
++
++ if stack.is_empty() {
++ Ok((input, TokenStream { inner: trees }))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn leaf_token(input: Cursor) -> PResult<TokenTree> {
++ if let Ok((input, l)) = literal(input) {
++ // must be parsed before ident
++ Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l))))
++ } else if let Ok((input, p)) = punct(input) {
++ Ok((input, TokenTree::Punct(p)))
++ } else if let Ok((input, i)) = ident(input) {
++ Ok((input, TokenTree::Ident(i)))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn ident(input: Cursor) -> PResult<crate::Ident> {
++ if ["r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#"]
++ .iter()
++ .any(|prefix| input.starts_with(prefix))
++ {
++ Err(LexError)
++ } else {
++ ident_any(input)
++ }
++}
++
++fn ident_any(input: Cursor) -> PResult<crate::Ident> {
++ let raw = input.starts_with("r#");
++ let rest = input.advance((raw as usize) << 1);
++
++ let (rest, sym) = ident_not_raw(rest)?;
++
++ if !raw {
++ let ident = crate::Ident::new(sym, crate::Span::call_site());
++ return Ok((rest, ident));
++ }
++
++ if sym == "_" {
++ return Err(LexError);
++ }
++
++ let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
++ Ok((rest, ident))
++}
++
++fn ident_not_raw(input: Cursor) -> PResult<&str> {
++ let mut chars = input.char_indices();
++
++ match chars.next() {
++ Some((_, ch)) if is_ident_start(ch) => {}
++ _ => return Err(LexError),
++ }
++
++ let mut end = input.len();
++ for (i, ch) in chars {
++ if !is_ident_continue(ch) {
++ end = i;
++ break;
++ }
++ }
++
++ Ok((input.advance(end), &input.rest[..end]))
++}
++
++fn literal(input: Cursor) -> PResult<Literal> {
++ match literal_nocapture(input) {
++ Ok(a) => {
++ let end = input.len() - a.len();
++ Ok((a, Literal::_new(input.rest[..end].to_string())))
++ }
++ Err(LexError) => Err(LexError),
++ }
++}
++
++fn literal_nocapture(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(ok) = string(input) {
++ Ok(ok)
++ } else if let Ok(ok) = byte_string(input) {
++ Ok(ok)
++ } else if let Ok(ok) = byte(input) {
++ Ok(ok)
++ } else if let Ok(ok) = character(input) {
++ Ok(ok)
++ } else if let Ok(ok) = float(input) {
++ Ok(ok)
++ } else if let Ok(ok) = int(input) {
++ Ok(ok)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn literal_suffix(input: Cursor) -> Cursor {
++ match ident_not_raw(input) {
++ Ok((input, _)) => input,
++ Err(LexError) => input,
++ }
++}
++
++fn string(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(input) = input.parse("\"") {
++ cooked_string(input)
++ } else if let Ok(input) = input.parse("r") {
++ raw_string(input)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn cooked_string(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.char_indices().peekable();
++
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' => {
++ let input = input.advance(i + 1);
++ return Ok(literal_suffix(input));
++ }
++ '\r' => match chars.next() {
++ Some((_, '\n')) => {}
++ _ => break,
++ },
++ '\\' => match chars.next() {
++ Some((_, 'x')) => {
++ if !backslash_x_char(&mut chars) {
++ break;
++ }
++ }
++ Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
++ | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
++ Some((_, 'u')) => {
++ if !backslash_u(&mut chars) {
++ break;
++ }
++ }
++ Some((_, ch @ '\n')) | Some((_, ch @ '\r')) => {
++ let mut last = ch;
++ loop {
++ if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') {
++ return Err(LexError);
++ }
++ match chars.peek() {
++ Some((_, ch)) if ch.is_whitespace() => {
++ last = *ch;
++ chars.next();
++ }
++ _ => break,
++ }
++ }
++ }
++ _ => break,
++ },
++ _ch => {}
++ }
++ }
++ Err(LexError)
++}
++
++fn byte_string(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(input) = input.parse("b\"") {
++ cooked_byte_string(input)
++ } else if let Ok(input) = input.parse("br") {
++ raw_string(input)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, LexError> {
++ let mut bytes = input.bytes().enumerate();
++ while let Some((offset, b)) = bytes.next() {
++ match b {
++ b'"' => {
++ let input = input.advance(offset + 1);
++ return Ok(literal_suffix(input));
++ }
++ b'\r' => match bytes.next() {
++ Some((_, b'\n')) => {}
++ _ => break,
++ },
++ b'\\' => match bytes.next() {
++ Some((_, b'x')) => {
++ if !backslash_x_byte(&mut bytes) {
++ break;
++ }
++ }
++ Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
++ | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
++ Some((newline, b @ b'\n')) | Some((newline, b @ b'\r')) => {
++ let mut last = b as char;
++ let rest = input.advance(newline + 1);
++ let mut chars = rest.char_indices();
++ loop {
++ if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') {
++ return Err(LexError);
++ }
++ match chars.next() {
++ Some((_, ch)) if ch.is_whitespace() => last = ch,
++ Some((offset, _)) => {
++ input = rest.advance(offset);
++ bytes = input.bytes().enumerate();
++ break;
++ }
++ None => return Err(LexError),
++ }
++ }
++ }
++ _ => break,
++ },
++ b if b < 0x80 => {}
++ _ => break,
++ }
++ }
++ Err(LexError)
++}
++
++fn raw_string(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.char_indices();
++ let mut n = 0;
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' => {
++ n = i;
++ break;
++ }
++ '#' => {}
++ _ => return Err(LexError),
++ }
++ }
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => {
++ let rest = input.advance(i + 1 + n);
++ return Ok(literal_suffix(rest));
++ }
++ '\r' => match chars.next() {
++ Some((_, '\n')) => {}
++ _ => break,
++ },
++ _ => {}
++ }
++ }
++ Err(LexError)
++}
++
++fn byte(input: Cursor) -> Result<Cursor, LexError> {
++ let input = input.parse("b'")?;
++ let mut bytes = input.bytes().enumerate();
++ let ok = match bytes.next().map(|(_, b)| b) {
++ Some(b'\\') => match bytes.next().map(|(_, b)| b) {
++ Some(b'x') => backslash_x_byte(&mut bytes),
++ Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
++ | Some(b'"') => true,
++ _ => false,
++ },
++ b => b.is_some(),
++ };
++ if !ok {
++ return Err(LexError);
++ }
++ let (offset, _) = bytes.next().ok_or(LexError)?;
++ if !input.chars().as_str().is_char_boundary(offset) {
++ return Err(LexError);
++ }
++ let input = input.advance(offset).parse("'")?;
++ Ok(literal_suffix(input))
++}
++
++fn character(input: Cursor) -> Result<Cursor, LexError> {
++ let input = input.parse("'")?;
++ let mut chars = input.char_indices();
++ let ok = match chars.next().map(|(_, ch)| ch) {
++ Some('\\') => match chars.next().map(|(_, ch)| ch) {
++ Some('x') => backslash_x_char(&mut chars),
++ Some('u') => backslash_u(&mut chars),
++ Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
++ true
++ }
++ _ => false,
++ },
++ ch => ch.is_some(),
++ };
++ if !ok {
++ return Err(LexError);
++ }
++ let (idx, _) = chars.next().ok_or(LexError)?;
++ let input = input.advance(idx).parse("'")?;
++ Ok(literal_suffix(input))
++}
++
++macro_rules! next_ch {
++ ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
++ match $chars.next() {
++ Some((_, ch)) => match ch {
++ $pat $(| $rest)* => ch,
++ _ => return false,
++ },
++ None => return false,
++ }
++ };
++}
++
++fn backslash_x_char<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, char)>,
++{
++ next_ch!(chars @ '0'..='7');
++ next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
++ true
++}
++
++fn backslash_x_byte<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, u8)>,
++{
++ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
++ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
++ true
++}
++
++fn backslash_u<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, char)>,
++{
++ next_ch!(chars @ '{');
++ let mut value = 0;
++ let mut len = 0;
++ while let Some((_, ch)) = chars.next() {
++ let digit = match ch {
++ '0'..='9' => ch as u8 - b'0',
++ 'a'..='f' => 10 + ch as u8 - b'a',
++ 'A'..='F' => 10 + ch as u8 - b'A',
++ '_' if len > 0 => continue,
++ '}' if len > 0 => return char::from_u32(value).is_some(),
++ _ => return false,
++ };
++ if len == 6 {
++ return false;
++ }
++ value *= 0x10;
++ value += u32::from(digit);
++ len += 1;
++ }
++ false
++}
++
++fn float(input: Cursor) -> Result<Cursor, LexError> {
++ let mut rest = float_digits(input)?;
++ if let Some(ch) = rest.chars().next() {
++ if is_ident_start(ch) {
++ rest = ident_not_raw(rest)?.0;
++ }
++ }
++ word_break(rest)
++}
++
++fn float_digits(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.chars().peekable();
++ match chars.next() {
++ Some(ch) if ch >= '0' && ch <= '9' => {}
++ _ => return Err(LexError),
++ }
++
++ let mut len = 1;
++ let mut has_dot = false;
++ let mut has_exp = false;
++ while let Some(&ch) = chars.peek() {
++ match ch {
++ '0'..='9' | '_' => {
++ chars.next();
++ len += 1;
++ }
++ '.' => {
++ if has_dot {
++ break;
++ }
++ chars.next();
++ if chars
++ .peek()
++ .map(|&ch| ch == '.' || is_ident_start(ch))
++ .unwrap_or(false)
++ {
++ return Err(LexError);
++ }
++ len += 1;
++ has_dot = true;
++ }
++ 'e' | 'E' => {
++ chars.next();
++ len += 1;
++ has_exp = true;
++ break;
++ }
++ _ => break,
++ }
++ }
++
++ if !(has_dot || has_exp) {
++ return Err(LexError);
++ }
++
++ if has_exp {
++ let token_before_exp = if has_dot {
++ Ok(input.advance(len - 1))
++ } else {
++ Err(LexError)
++ };
++ let mut has_sign = false;
++ let mut has_exp_value = false;
++ while let Some(&ch) = chars.peek() {
++ match ch {
++ '+' | '-' => {
++ if has_exp_value {
++ break;
++ }
++ if has_sign {
++ return token_before_exp;
++ }
++ chars.next();
++ len += 1;
++ has_sign = true;
++ }
++ '0'..='9' => {
++ chars.next();
++ len += 1;
++ has_exp_value = true;
++ }
++ '_' => {
++ chars.next();
++ len += 1;
++ }
++ _ => break,
++ }
++ }
++ if !has_exp_value {
++ return token_before_exp;
++ }
++ }
++
++ Ok(input.advance(len))
++}
++
++fn int(input: Cursor) -> Result<Cursor, LexError> {
++ let mut rest = digits(input)?;
++ if let Some(ch) = rest.chars().next() {
++ if is_ident_start(ch) {
++ rest = ident_not_raw(rest)?.0;
++ }
++ }
++ word_break(rest)
++}
++
++fn digits(mut input: Cursor) -> Result<Cursor, LexError> {
++ let base = if input.starts_with("0x") {
++ input = input.advance(2);
++ 16
++ } else if input.starts_with("0o") {
++ input = input.advance(2);
++ 8
++ } else if input.starts_with("0b") {
++ input = input.advance(2);
++ 2
++ } else {
++ 10
++ };
++
++ let mut len = 0;
++ let mut empty = true;
++ for b in input.bytes() {
++ match b {
++ b'0'..=b'9' => {
++ let digit = (b - b'0') as u64;
++ if digit >= base {
++ return Err(LexError);
++ }
++ }
++ b'a'..=b'f' => {
++ let digit = 10 + (b - b'a') as u64;
++ if digit >= base {
++ break;
++ }
++ }
++ b'A'..=b'F' => {
++ let digit = 10 + (b - b'A') as u64;
++ if digit >= base {
++ break;
++ }
++ }
++ b'_' => {
++ if empty && base == 10 {
++ return Err(LexError);
++ }
++ len += 1;
++ continue;
++ }
++ _ => break,
++ };
++ len += 1;
++ empty = false;
++ }
++ if empty {
++ Err(LexError)
++ } else {
++ Ok(input.advance(len))
++ }
++}
++
++fn punct(input: Cursor) -> PResult<Punct> {
++ match punct_char(input) {
++ Ok((rest, '\'')) => {
++ if ident_any(rest)?.0.starts_with("'") {
++ Err(LexError)
++ } else {
++ Ok((rest, Punct::new('\'', Spacing::Joint)))
++ }
++ }
++ Ok((rest, ch)) => {
++ let kind = match punct_char(rest) {
++ Ok(_) => Spacing::Joint,
++ Err(LexError) => Spacing::Alone,
++ };
++ Ok((rest, Punct::new(ch, kind)))
++ }
++ Err(LexError) => Err(LexError),
++ }
++}
++
++fn punct_char(input: Cursor) -> PResult<char> {
++ if input.starts_with("//") || input.starts_with("/*") {
++ // Do not accept `/` of a comment as a punct.
++ return Err(LexError);
++ }
++
++ let mut chars = input.chars();
++ let first = match chars.next() {
++ Some(ch) => ch,
++ None => {
++ return Err(LexError);
++ }
++ };
++ let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
++ if recognized.contains(first) {
++ Ok((input.advance(first.len_utf8()), first))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
++ #[cfg(span_locations)]
++ let lo = input.off;
++ let (rest, (comment, inner)) = doc_comment_contents(input)?;
++ let span = crate::Span::_new_stable(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: rest.off,
++ });
++
++ let mut scan_for_bare_cr = comment;
++ while let Some(cr) = scan_for_bare_cr.find('\r') {
++ let rest = &scan_for_bare_cr[cr + 1..];
++ if !rest.starts_with('\n') {
++ return Err(LexError);
++ }
++ scan_for_bare_cr = rest;
++ }
++
++ let mut trees = Vec::new();
++ trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
++ if inner {
++ trees.push(Punct::new('!', Spacing::Alone).into());
++ }
++ let mut stream = vec![
++ TokenTree::Ident(crate::Ident::new("doc", span)),
++ TokenTree::Punct(Punct::new('=', Spacing::Alone)),
++ TokenTree::Literal(crate::Literal::string(comment)),
++ ];
++ for tt in stream.iter_mut() {
++ tt.set_span(span);
++ }
++ let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
++ trees.push(crate::Group::_new_stable(group).into());
++ for tt in trees.iter_mut() {
++ tt.set_span(span);
++ }
++ Ok((rest, trees))
++}
++
++fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
++ if input.starts_with("//!") {
++ let input = input.advance(3);
++ let (input, s) = take_until_newline_or_eof(input);
++ Ok((input, (s, true)))
++ } else if input.starts_with("/*!") {
++ let (input, s) = block_comment(input)?;
++ Ok((input, (&s[3..s.len() - 2], true)))
++ } else if input.starts_with("///") {
++ let input = input.advance(3);
++ if input.starts_with("/") {
++ return Err(LexError);
++ }
++ let (input, s) = take_until_newline_or_eof(input);
++ Ok((input, (s, false)))
++ } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
++ let (input, s) = block_comment(input)?;
++ Ok((input, (&s[3..s.len() - 2], false)))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
++ let chars = input.char_indices();
++
++ for (i, ch) in chars {
++ if ch == '\n' {
++ return (input.advance(i), &input.rest[..i]);
++ } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
++ return (input.advance(i + 1), &input.rest[..i]);
++ }
++ }
++
++ (input.advance(input.len()), input.rest)
++}
+diff --git a/third_party/rust/proc-macro2/src/strnom.rs b/third_party/rust/proc-macro2/src/strnom.rs
+deleted file mode 100644
+index eb7d0b8a8e..0000000000
+--- third_party/rust/proc-macro2/src/strnom.rs
++++ /dev/null
+@@ -1,391 +0,0 @@
+-//! Adapted from [`nom`](https://github.com/Geal/nom).
+-
+-use crate::fallback::LexError;
+-use std::str::{Bytes, CharIndices, Chars};
+-use unicode_xid::UnicodeXID;
+-
+-#[derive(Copy, Clone, Eq, PartialEq)]
+-pub struct Cursor<'a> {
+- pub rest: &'a str,
+- #[cfg(span_locations)]
+- pub off: u32,
+-}
+-
+-impl<'a> Cursor<'a> {
+- #[cfg(not(span_locations))]
+- pub fn advance(&self, amt: usize) -> Cursor<'a> {
+- Cursor {
+- rest: &self.rest[amt..],
+- }
+- }
+- #[cfg(span_locations)]
+- pub fn advance(&self, amt: usize) -> Cursor<'a> {
+- Cursor {
+- rest: &self.rest[amt..],
+- off: self.off + (amt as u32),
+- }
+- }
+-
+- pub fn find(&self, p: char) -> Option<usize> {
+- self.rest.find(p)
+- }
+-
+- pub fn starts_with(&self, s: &str) -> bool {
+- self.rest.starts_with(s)
+- }
+-
+- pub fn is_empty(&self) -> bool {
+- self.rest.is_empty()
+- }
+-
+- pub fn len(&self) -> usize {
+- self.rest.len()
+- }
+-
+- pub fn as_bytes(&self) -> &'a [u8] {
+- self.rest.as_bytes()
+- }
+-
+- pub fn bytes(&self) -> Bytes<'a> {
+- self.rest.bytes()
+- }
+-
+- pub fn chars(&self) -> Chars<'a> {
+- self.rest.chars()
+- }
+-
+- pub fn char_indices(&self) -> CharIndices<'a> {
+- self.rest.char_indices()
+- }
+-}
+-
+-pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
+-
+-pub fn whitespace(input: Cursor) -> PResult<()> {
+- if input.is_empty() {
+- return Err(LexError);
+- }
+-
+- let bytes = input.as_bytes();
+- let mut i = 0;
+- while i < bytes.len() {
+- let s = input.advance(i);
+- if bytes[i] == b'/' {
+- if s.starts_with("//")
+- && (!s.starts_with("///") || s.starts_with("////"))
+- && !s.starts_with("//!")
+- {
+- if let Some(len) = s.find('\n') {
+- i += len + 1;
+- continue;
+- }
+- break;
+- } else if s.starts_with("/**/") {
+- i += 4;
+- continue;
+- } else if s.starts_with("/*")
+- && (!s.starts_with("/**") || s.starts_with("/***"))
+- && !s.starts_with("/*!")
+- {
+- let (_, com) = block_comment(s)?;
+- i += com.len();
+- continue;
+- }
+- }
+- match bytes[i] {
+- b' ' | 0x09..=0x0d => {
+- i += 1;
+- continue;
+- }
+- b if b <= 0x7f => {}
+- _ => {
+- let ch = s.chars().next().unwrap();
+- if is_whitespace(ch) {
+- i += ch.len_utf8();
+- continue;
+- }
+- }
+- }
+- return if i > 0 { Ok((s, ())) } else { Err(LexError) };
+- }
+- Ok((input.advance(input.len()), ()))
+-}
+-
+-pub fn block_comment(input: Cursor) -> PResult<&str> {
+- if !input.starts_with("/*") {
+- return Err(LexError);
+- }
+-
+- let mut depth = 0;
+- let bytes = input.as_bytes();
+- let mut i = 0;
+- let upper = bytes.len() - 1;
+- while i < upper {
+- if bytes[i] == b'/' && bytes[i + 1] == b'*' {
+- depth += 1;
+- i += 1; // eat '*'
+- } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
+- depth -= 1;
+- if depth == 0 {
+- return Ok((input.advance(i + 2), &input.rest[..i + 2]));
+- }
+- i += 1; // eat '/'
+- }
+- i += 1;
+- }
+- Err(LexError)
+-}
+-
+-pub fn skip_whitespace(input: Cursor) -> Cursor {
+- match whitespace(input) {
+- Ok((rest, _)) => rest,
+- Err(LexError) => input,
+- }
+-}
+-
+-fn is_whitespace(ch: char) -> bool {
+- // Rust treats left-to-right mark and right-to-left mark as whitespace
+- ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
+-}
+-
+-pub fn word_break(input: Cursor) -> PResult<()> {
+- match input.chars().next() {
+- Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
+- Some(_) | None => Ok((input, ())),
+- }
+-}
+-
+-macro_rules! named {
+- ($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
+- fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
+- $submac!(i, $($args)*)
+- }
+- };
+-}
+-
+-macro_rules! alt {
+- ($i:expr, $e:ident | $($rest:tt)*) => {
+- alt!($i, call!($e) | $($rest)*)
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
+- match $subrule!($i, $($args)*) {
+- res @ Ok(_) => res,
+- _ => alt!($i, $($rest)*)
+- }
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
+- match $subrule!($i, $($args)*) {
+- Ok((i, o)) => Ok((i, $gen(o))),
+- Err(LexError) => alt!($i, $($rest)*)
+- }
+- };
+-
+- ($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
+- alt!($i, call!($e) => { $gen } | $($rest)*)
+- };
+-
+- ($i:expr, $e:ident => { $gen:expr }) => {
+- alt!($i, call!($e) => { $gen })
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
+- match $subrule!($i, $($args)*) {
+- Ok((i, o)) => Ok((i, $gen(o))),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-
+- ($i:expr, $e:ident) => {
+- alt!($i, call!($e))
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)*)) => {
+- $subrule!($i, $($args)*)
+- };
+-}
+-
+-macro_rules! do_parse {
+- ($i:expr, ( $($rest:expr),* )) => {
+- Ok(($i, ( $($rest),* )))
+- };
+-
+- ($i:expr, $e:ident >> $($rest:tt)*) => {
+- do_parse!($i, call!($e) >> $($rest)*)
+- };
+-
+- ($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, _)) => do_parse!(i, $($rest)*),
+- }
+- };
+-
+- ($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
+- do_parse!($i, $field: call!($e) >> $($rest)*)
+- };
+-
+- ($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => {
+- let $field = o;
+- do_parse!(i, $($rest)*)
+- },
+- }
+- };
+-}
+-
+-macro_rules! peek {
+- ($i:expr, $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Ok((_, o)) => Ok(($i, o)),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-}
+-
+-macro_rules! call {
+- ($i:expr, $fun:expr $(, $args:expr)*) => {
+- $fun($i $(, $args)*)
+- };
+-}
+-
+-macro_rules! option {
+- ($i:expr, $f:expr) => {
+- match $f($i) {
+- Ok((i, o)) => Ok((i, Some(o))),
+- Err(LexError) => Ok(($i, None)),
+- }
+- };
+-}
+-
+-macro_rules! take_until_newline_or_eof {
+- ($i:expr,) => {{
+- if $i.len() == 0 {
+- Ok(($i, ""))
+- } else {
+- match $i.find('\n') {
+- Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
+- None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
+- }
+- }
+- }};
+-}
+-
+-macro_rules! tuple {
+- ($i:expr, $($rest:tt)*) => {
+- tuple_parser!($i, (), $($rest)*)
+- };
+-}
+-
+-/// Do not use directly. Use `tuple!`.
+-macro_rules! tuple_parser {
+- ($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
+- tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
+- };
+-
+- ($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
+- }
+- };
+-
+- ($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
+- }
+- };
+-
+- ($i:expr, ($($parsed:tt),*), $e:ident) => {
+- tuple_parser!($i, ($($parsed),*), call!($e))
+- };
+-
+- ($i:expr, (), $submac:ident!( $($args:tt)* )) => {
+- $submac!($i, $($args)*)
+- };
+-
+- ($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => Ok((i, ($($parsed),*, o)))
+- }
+- };
+-
+- ($i:expr, ($($parsed:expr),*)) => {
+- Ok(($i, ($($parsed),*)))
+- };
+-}
+-
+-macro_rules! not {
+- ($i:expr, $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Ok((_, _)) => Err(LexError),
+- Err(LexError) => Ok(($i, ())),
+- }
+- };
+-}
+-
+-macro_rules! tag {
+- ($i:expr, $tag:expr) => {
+- if $i.starts_with($tag) {
+- Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
+- } else {
+- Err(LexError)
+- }
+- };
+-}
+-
+-macro_rules! punct {
+- ($i:expr, $punct:expr) => {
+- $crate::strnom::punct($i, $punct)
+- };
+-}
+-
+-/// Do not use directly. Use `punct!`.
+-pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
+- let input = skip_whitespace(input);
+- if input.starts_with(token) {
+- Ok((input.advance(token.len()), token))
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-macro_rules! preceded {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
+- match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
+- Ok((remaining, (_, o))) => Ok((remaining, o)),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-
+- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
+- preceded!($i, $submac!($($args)*), call!($g))
+- };
+-}
+-
+-macro_rules! delimited {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
+- match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i1, (_, o, _))) => Ok((i1, o))
+- }
+- };
+-}
+-
+-macro_rules! map {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => Ok((i, call!(o, $g)))
+- }
+- };
+-
+- ($i:expr, $f:expr, $g:expr) => {
+- map!($i, call!($f), $g)
+- };
+-}
+diff --git a/third_party/rust/proc-macro2/src/wrapper.rs b/third_party/rust/proc-macro2/src/wrapper.rs
+index 552b9381cf..3df044af17 100644
+--- third_party/rust/proc-macro2/src/wrapper.rs
++++ third_party/rust/proc-macro2/src/wrapper.rs
+@@ -1,15 +1,15 @@
+-use std::fmt;
+-use std::iter;
++use crate::detection::inside_proc_macro;
++use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
++use std::fmt::{self, Debug, Display};
++use std::iter::FromIterator;
+ use std::ops::RangeBounds;
+-use std::panic::{self, PanicInfo};
++use std::panic;
+ #[cfg(super_unstable)]
+ use std::path::PathBuf;
+ use std::str::FromStr;
+
+-use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
+-
+ #[derive(Clone)]
+-pub enum TokenStream {
++pub(crate) enum TokenStream {
+ Compiler(DeferredTokenStream),
+ Fallback(fallback::TokenStream),
+ }
+@@ -19,73 +19,16 @@ pub enum TokenStream {
+ // we hold on to the appended tokens and do proc_macro::TokenStream::extend as
+ // late as possible to batch together consecutive uses of the Extend impl.
+ #[derive(Clone)]
+-pub struct DeferredTokenStream {
++pub(crate) struct DeferredTokenStream {
+ stream: proc_macro::TokenStream,
+ extra: Vec<proc_macro::TokenTree>,
+ }
+
+-pub enum LexError {
++pub(crate) enum LexError {
+ Compiler(proc_macro::LexError),
+ Fallback(fallback::LexError),
+ }
+
+-fn nightly_works() -> bool {
+- use std::sync::atomic::*;
+- use std::sync::Once;
+-
+- static WORKS: AtomicUsize = AtomicUsize::new(0);
+- static INIT: Once = Once::new();
+-
+- match WORKS.load(Ordering::SeqCst) {
+- 1 => return false,
+- 2 => return true,
+- _ => {}
+- }
+-
+- // Swap in a null panic hook to avoid printing "thread panicked" to stderr,
+- // then use catch_unwind to determine whether the compiler's proc_macro is
+- // working. When proc-macro2 is used from outside of a procedural macro all
+- // of the proc_macro crate's APIs currently panic.
+- //
+- // The Once is to prevent the possibility of this ordering:
+- //
+- // thread 1 calls take_hook, gets the user's original hook
+- // thread 1 calls set_hook with the null hook
+- // thread 2 calls take_hook, thinks null hook is the original hook
+- // thread 2 calls set_hook with the null hook
+- // thread 1 calls set_hook with the actual original hook
+- // thread 2 calls set_hook with what it thinks is the original hook
+- //
+- // in which the user's hook has been lost.
+- //
+- // There is still a race condition where a panic in a different thread can
+- // happen during the interval that the user's original panic hook is
+- // unregistered such that their hook is incorrectly not called. This is
+- // sufficiently unlikely and less bad than printing panic messages to stderr
+- // on correct use of this crate. Maybe there is a libstd feature request
+- // here. For now, if a user needs to guarantee that this failure mode does
+- // not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
+- // the main thread before launching any other threads.
+- INIT.call_once(|| {
+- type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
+-
+- let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
+- let sanity_check = &*null_hook as *const PanicHook;
+- let original_hook = panic::take_hook();
+- panic::set_hook(null_hook);
+-
+- let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
+- WORKS.store(works as usize + 1, Ordering::SeqCst);
+-
+- let hopefully_null_hook = panic::take_hook();
+- panic::set_hook(original_hook);
+- if sanity_check != &*hopefully_null_hook {
+- panic!("observed race condition in proc_macro2::nightly_works");
+- }
+- });
+- nightly_works()
+-}
+-
+ fn mismatch() -> ! {
+ panic!("stable/nightly mismatch")
+ }
+@@ -103,7 +46,12 @@ impl DeferredTokenStream {
+ }
+
+ fn evaluate_now(&mut self) {
+- self.stream.extend(self.extra.drain(..));
++ // If-check provides a fast short circuit for the common case of `extra`
++ // being empty, which saves a round trip over the proc macro bridge.
++ // Improves macro expansion time in winrt by 6% in debug mode.
++ if !self.extra.is_empty() {
++ self.stream.extend(self.extra.drain(..));
++ }
+ }
+
+ fn into_token_stream(mut self) -> proc_macro::TokenStream {
+@@ -114,7 +62,7 @@ impl DeferredTokenStream {
+
+ impl TokenStream {
+ pub fn new() -> TokenStream {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
+ } else {
+ TokenStream::Fallback(fallback::TokenStream::new())
+@@ -147,9 +95,9 @@ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Ok(TokenStream::Compiler(DeferredTokenStream::new(
+- src.parse()?,
++ proc_macro_parse(src)?,
+ )))
+ } else {
+ Ok(TokenStream::Fallback(src.parse()?))
+@@ -157,11 +105,17 @@ impl FromStr for TokenStream {
+ }
+ }
+
+-impl fmt::Display for TokenStream {
++// Work around https://github.com/rust-lang/rust/issues/58736.
++fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
++ panic::catch_unwind(|| src.parse().map_err(LexError::Compiler))
++ .unwrap_or(Err(LexError::Fallback(fallback::LexError)))
++}
++
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
+- TokenStream::Fallback(tts) => tts.fmt(f),
++ TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f),
++ TokenStream::Fallback(tts) => Display::fmt(tts, f),
+ }
+ }
+ }
+@@ -187,7 +141,7 @@ impl From<fallback::TokenStream> for TokenStream {
+ }
+ }
+
+-// Assumes nightly_works().
++// Assumes inside_proc_macro().
+ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+ match token {
+ TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
+@@ -196,9 +150,9 @@ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+ Spacing::Joint => proc_macro::Spacing::Joint,
+ Spacing::Alone => proc_macro::Spacing::Alone,
+ };
+- let mut op = proc_macro::Punct::new(tt.as_char(), spacing);
+- op.set_span(tt.span().inner.unwrap_nightly());
+- op.into()
++ let mut punct = proc_macro::Punct::new(tt.as_char(), spacing);
++ punct.set_span(tt.span().inner.unwrap_nightly());
++ punct.into()
+ }
+ TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
+ TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
+@@ -207,7 +161,7 @@ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+
+ impl From<TokenTree> for TokenStream {
+ fn from(token: TokenTree) -> TokenStream {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
+ } else {
+ TokenStream::Fallback(token.into())
+@@ -215,9 +169,9 @@ impl From<TokenTree> for TokenStream {
+ }
+ }
+
+-impl iter::FromIterator<TokenTree> for TokenStream {
++impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(
+ trees.into_iter().map(into_compiler_token).collect(),
+ ))
+@@ -227,7 +181,7 @@ impl iter::FromIterator<TokenTree> for TokenStream {
+ }
+ }
+
+-impl iter::FromIterator<TokenStream> for TokenStream {
++impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut streams = streams.into_iter();
+ match streams.next() {
+@@ -252,14 +206,15 @@ impl iter::FromIterator<TokenStream> for TokenStream {
+ }
+
+ impl Extend<TokenTree> for TokenStream {
+- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
++ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
+ match self {
+ TokenStream::Compiler(tts) => {
+ // Here is the reason for DeferredTokenStream.
+- tts.extra
+- .extend(streams.into_iter().map(into_compiler_token));
++ for token in stream {
++ tts.extra.push(into_compiler_token(token));
++ }
+ }
+- TokenStream::Fallback(tts) => tts.extend(streams),
++ TokenStream::Fallback(tts) => tts.extend(stream),
+ }
+ }
+ }
+@@ -270,20 +225,20 @@ impl Extend<TokenStream> for TokenStream {
+ TokenStream::Compiler(tts) => {
+ tts.evaluate_now();
+ tts.stream
+- .extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
++ .extend(streams.into_iter().map(TokenStream::unwrap_nightly));
+ }
+ TokenStream::Fallback(tts) => {
+- tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()));
++ tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
+ }
+ }
+ }
+ }
+
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
+- TokenStream::Fallback(tts) => tts.fmt(f),
++ TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f),
++ TokenStream::Fallback(tts) => Debug::fmt(tts, f),
+ }
+ }
+ }
+@@ -300,17 +255,29 @@ impl From<fallback::LexError> for LexError {
+ }
+ }
+
+-impl fmt::Debug for LexError {
++impl Debug for LexError {
++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ LexError::Compiler(e) => Debug::fmt(e, f),
++ LexError::Fallback(e) => Debug::fmt(e, f),
++ }
++ }
++}
++
++impl Display for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- LexError::Compiler(e) => e.fmt(f),
+- LexError::Fallback(e) => e.fmt(f),
++ #[cfg(lexerror_display)]
++ LexError::Compiler(e) => Display::fmt(e, f),
++ #[cfg(not(lexerror_display))]
++ LexError::Compiler(_e) => Display::fmt(&fallback::LexError, f),
++ LexError::Fallback(e) => Display::fmt(e, f),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum TokenTreeIter {
++pub(crate) enum TokenTreeIter {
+ Compiler(proc_macro::token_stream::IntoIter),
+ Fallback(fallback::TokenTreeIter),
+ }
+@@ -361,7 +328,7 @@ impl Iterator for TokenTreeIter {
+ }
+ }
+
+-impl fmt::Debug for TokenTreeIter {
++impl Debug for TokenTreeIter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("TokenTreeIter").finish()
+ }
+@@ -369,7 +336,7 @@ impl fmt::Debug for TokenTreeIter {
+
+ #[derive(Clone, PartialEq, Eq)]
+ #[cfg(super_unstable)]
+-pub enum SourceFile {
++pub(crate) enum SourceFile {
+ Compiler(proc_macro::SourceFile),
+ Fallback(fallback::SourceFile),
+ }
+@@ -397,58 +364,77 @@ impl SourceFile {
+ }
+
+ #[cfg(super_unstable)]
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- SourceFile::Compiler(a) => a.fmt(f),
+- SourceFile::Fallback(a) => a.fmt(f),
++ SourceFile::Compiler(a) => Debug::fmt(a, f),
++ SourceFile::Fallback(a) => Debug::fmt(a, f),
+ }
+ }
+ }
+
+ #[cfg(any(super_unstable, feature = "span-locations"))]
+-pub struct LineColumn {
++pub(crate) struct LineColumn {
+ pub line: usize,
+ pub column: usize,
+ }
+
+ #[derive(Copy, Clone)]
+-pub enum Span {
++pub(crate) enum Span {
+ Compiler(proc_macro::Span),
+ Fallback(fallback::Span),
+ }
+
+ impl Span {
+ pub fn call_site() -> Span {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::call_site())
+ } else {
+ Span::Fallback(fallback::Span::call_site())
+ }
+ }
+
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ if inside_proc_macro() {
++ Span::Compiler(proc_macro::Span::mixed_site())
++ } else {
++ Span::Fallback(fallback::Span::mixed_site())
++ }
++ }
++
+ #[cfg(super_unstable)]
+ pub fn def_site() -> Span {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::def_site())
+ } else {
+ Span::Fallback(fallback::Span::def_site())
+ }
+ }
+
+- #[cfg(super_unstable)]
+ pub fn resolved_at(&self, other: Span) -> Span {
+ match (self, other) {
++ #[cfg(hygiene)]
+ (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
++
++ // Name resolution affects semantics, but location is only cosmetic
++ #[cfg(not(hygiene))]
++ (Span::Compiler(_), Span::Compiler(_)) => other,
++
+ (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
+ _ => mismatch(),
+ }
+ }
+
+- #[cfg(super_unstable)]
+ pub fn located_at(&self, other: Span) -> Span {
+ match (self, other) {
++ #[cfg(hygiene)]
+ (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
++
++ // Name resolution affects semantics, but location is only cosmetic
++ #[cfg(not(hygiene))]
++ (Span::Compiler(_), Span::Compiler(_)) => *self,
++
+ (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
+ _ => mismatch(),
+ }
+@@ -542,16 +528,16 @@ impl From<fallback::Span> for Span {
+ }
+ }
+
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Span::Compiler(s) => s.fmt(f),
+- Span::Fallback(s) => s.fmt(f),
++ Span::Compiler(s) => Debug::fmt(s, f),
++ Span::Fallback(s) => Debug::fmt(s, f),
+ }
+ }
+ }
+
+-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
++pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+ match span {
+ Span::Compiler(s) => {
+ debug.field("span", &s);
+@@ -561,7 +547,7 @@ pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span)
+ }
+
+ #[derive(Clone)]
+-pub enum Group {
++pub(crate) enum Group {
+ Compiler(proc_macro::Group),
+ Fallback(fallback::Group),
+ }
+@@ -652,26 +638,26 @@ impl From<fallback::Group> for Group {
+ }
+ }
+
+-impl fmt::Display for Group {
++impl Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Group::Compiler(group) => group.fmt(formatter),
+- Group::Fallback(group) => group.fmt(formatter),
++ Group::Compiler(group) => Display::fmt(group, formatter),
++ Group::Fallback(group) => Display::fmt(group, formatter),
+ }
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Group::Compiler(group) => group.fmt(formatter),
+- Group::Fallback(group) => group.fmt(formatter),
++ Group::Compiler(group) => Debug::fmt(group, formatter),
++ Group::Fallback(group) => Debug::fmt(group, formatter),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum Ident {
++pub(crate) enum Ident {
+ Compiler(proc_macro::Ident),
+ Fallback(fallback::Ident),
+ }
+@@ -747,26 +733,26 @@ where
+ }
+ }
+
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Ident::Compiler(t) => t.fmt(f),
+- Ident::Fallback(t) => t.fmt(f),
++ Ident::Compiler(t) => Display::fmt(t, f),
++ Ident::Fallback(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Ident::Compiler(t) => t.fmt(f),
+- Ident::Fallback(t) => t.fmt(f),
++ Ident::Compiler(t) => Debug::fmt(t, f),
++ Ident::Fallback(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum Literal {
++pub(crate) enum Literal {
+ Compiler(proc_macro::Literal),
+ Fallback(fallback::Literal),
+ }
+@@ -774,7 +760,7 @@ pub enum Literal {
+ macro_rules! suffixed_numbers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::$name(n))
+ } else {
+ Literal::Fallback(fallback::Literal::$name(n))
+@@ -786,7 +772,7 @@ macro_rules! suffixed_numbers {
+ macro_rules! unsuffixed_integers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::$name(n))
+ } else {
+ Literal::Fallback(fallback::Literal::$name(n))
+@@ -830,7 +816,7 @@ impl Literal {
+ }
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
+ } else {
+ Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
+@@ -838,7 +824,7 @@ impl Literal {
+ }
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
+ } else {
+ Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
+@@ -846,7 +832,7 @@ impl Literal {
+ }
+
+ pub fn string(t: &str) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::string(t))
+ } else {
+ Literal::Fallback(fallback::Literal::string(t))
+@@ -854,7 +840,7 @@ impl Literal {
+ }
+
+ pub fn character(t: char) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::character(t))
+ } else {
+ Literal::Fallback(fallback::Literal::character(t))
+@@ -862,7 +848,7 @@ impl Literal {
+ }
+
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::byte_string(bytes))
+ } else {
+ Literal::Fallback(fallback::Literal::byte_string(bytes))
+@@ -908,20 +894,20 @@ impl From<fallback::Literal> for Literal {
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Literal::Compiler(t) => t.fmt(f),
+- Literal::Fallback(t) => t.fmt(f),
++ Literal::Compiler(t) => Display::fmt(t, f),
++ Literal::Fallback(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Literal::Compiler(t) => t.fmt(f),
+- Literal::Fallback(t) => t.fmt(f),
++ Literal::Compiler(t) => Debug::fmt(t, f),
++ Literal::Fallback(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+diff --git a/third_party/rust/proc-macro2/tests/comments.rs b/third_party/rust/proc-macro2/tests/comments.rs
+new file mode 100644
+index 0000000000..708cccb880
+--- /dev/null
++++ third_party/rust/proc-macro2/tests/comments.rs
+@@ -0,0 +1,103 @@
++use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree};
++
++// #[doc = "..."] -> "..."
++fn lit_of_outer_doc_comment(tokens: TokenStream) -> Literal {
++ lit_of_doc_comment(tokens, false)
++}
++
++// #![doc = "..."] -> "..."
++fn lit_of_inner_doc_comment(tokens: TokenStream) -> Literal {
++ lit_of_doc_comment(tokens, true)
++}
++
++fn lit_of_doc_comment(tokens: TokenStream, inner: bool) -> Literal {
++ let mut iter = tokens.clone().into_iter();
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '#');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ if inner {
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '!');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ }
++ iter = match iter.next().unwrap() {
++ TokenTree::Group(group) => {
++ assert_eq!(group.delimiter(), Delimiter::Bracket);
++ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
++ group.stream().into_iter()
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ };
++ match iter.next().unwrap() {
++ TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"),
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '=');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Literal(literal) => {
++ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
++ literal
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++}
++
++#[test]
++fn closed_immediately() {
++ let stream = "/**/".parse::<TokenStream>().unwrap();
++ let tokens = stream.into_iter().collect::<Vec<_>>();
++ assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
++}
++
++#[test]
++fn incomplete() {
++ assert!("/*/".parse::<TokenStream>().is_err());
++}
++
++#[test]
++fn lit() {
++ let stream = "/// doc".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc\"");
++
++ let stream = "//! doc".parse::<TokenStream>().unwrap();
++ let lit = lit_of_inner_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc\"");
++
++ let stream = "/** doc */".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc \"");
++
++ let stream = "/*! doc */".parse::<TokenStream>().unwrap();
++ let lit = lit_of_inner_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc \"");
++}
++
++#[test]
++fn carriage_return() {
++ let stream = "///\r\n".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\"\"");
++
++ let stream = "/**\r\n*/".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\"\\r\\n\"");
++
++ "///\r".parse::<TokenStream>().unwrap_err();
++ "///\r \n".parse::<TokenStream>().unwrap_err();
++ "/**\r \n*/".parse::<TokenStream>().unwrap_err();
++}
+diff --git a/third_party/rust/proc-macro2/tests/marker.rs b/third_party/rust/proc-macro2/tests/marker.rs
+index 7af2539c1a..70e57677cd 100644
+--- third_party/rust/proc-macro2/tests/marker.rs
++++ third_party/rust/proc-macro2/tests/marker.rs
+@@ -57,3 +57,36 @@ mod semver_exempt {
+
+ assert_impl!(SourceFile is not Send or Sync);
+ }
++
++#[cfg(not(no_libprocmacro_unwind_safe))]
++mod unwind_safe {
++ use super::*;
++ use std::panic::{RefUnwindSafe, UnwindSafe};
++
++ macro_rules! assert_unwind_safe {
++ ($($types:ident)*) => {
++ $(
++ assert_impl!($types is UnwindSafe and RefUnwindSafe);
++ )*
++ };
++ }
++
++ assert_unwind_safe! {
++ Delimiter
++ Group
++ Ident
++ LexError
++ Literal
++ Punct
++ Spacing
++ Span
++ TokenStream
++ TokenTree
++ }
++
++ #[cfg(procmacro2_semver_exempt)]
++ assert_unwind_safe! {
++ LineColumn
++ SourceFile
++ }
++}
+diff --git a/third_party/rust/proc-macro2/tests/test.rs b/third_party/rust/proc-macro2/tests/test.rs
+index 7528388138..1e9f633944 100644
+--- third_party/rust/proc-macro2/tests/test.rs
++++ third_party/rust/proc-macro2/tests/test.rs
+@@ -1,7 +1,6 @@
++use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+ use std::str::{self, FromStr};
+
+-use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
+-
+ #[test]
+ fn idents() {
+ assert_eq!(
+@@ -84,6 +83,11 @@ fn literal_string() {
+ assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
+ }
+
++#[test]
++fn literal_raw_string() {
++ "r\"\r\n\"".parse::<TokenStream>().unwrap();
++}
++
+ #[test]
+ fn literal_character() {
+ assert_eq!(Literal::character('x').to_string(), "'x'");
+@@ -110,6 +114,37 @@ fn literal_suffix() {
+ assert_eq!(token_count("1._0"), 3);
+ assert_eq!(token_count("1._m"), 3);
+ assert_eq!(token_count("\"\"s"), 1);
++ assert_eq!(token_count("r\"\"r"), 1);
++ assert_eq!(token_count("b\"\"b"), 1);
++ assert_eq!(token_count("br\"\"br"), 1);
++ assert_eq!(token_count("r#\"\"#r"), 1);
++ assert_eq!(token_count("'c'c"), 1);
++ assert_eq!(token_count("b'b'b"), 1);
++ assert_eq!(token_count("0E"), 1);
++ assert_eq!(token_count("0o0A"), 1);
++ assert_eq!(token_count("0E--0"), 4);
++ assert_eq!(token_count("0.0ECMA"), 1);
++}
++
++#[test]
++fn literal_iter_negative() {
++ let negative_literal = Literal::i32_suffixed(-3);
++ let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
++ let mut iter = tokens.into_iter();
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '-');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ unexpected => panic!("unexpected token {:?}", unexpected),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Literal(literal) => {
++ assert_eq!(literal.to_string(), "3i32");
++ }
++ unexpected => panic!("unexpected token {:?}", unexpected),
++ }
++ assert!(iter.next().is_none());
+ }
+
+ #[test]
+@@ -161,41 +196,21 @@ fn fail() {
+ fail("' static");
+ fail("r#1");
+ fail("r#_");
++ fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits)
++ fail("\"\\u{999999}\""); // outside of valid range of char
++ fail("\"\\u{_0}\""); // leading underscore
++ fail("\"\\u{}\""); // empty
++ fail("b\"\r\""); // bare carriage return in byte string
++ fail("r\"\r\""); // bare carriage return in raw string
++ fail("\"\\\r \""); // backslash carriage return
++ fail("'aa'aa");
++ fail("br##\"\"#");
++ fail("\"\\\n\u{85}\r\"");
+ }
+
+ #[cfg(span_locations)]
+ #[test]
+ fn span_test() {
+- use proc_macro2::TokenTree;
+-
+- fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
+- let ts = p.parse::<TokenStream>().unwrap();
+- check_spans_internal(ts, &mut lines);
+- }
+-
+- fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
+- for i in ts {
+- if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
+- *lines = rest;
+-
+- let start = i.span().start();
+- assert_eq!(start.line, sline, "sline did not match for {}", i);
+- assert_eq!(start.column, scol, "scol did not match for {}", i);
+-
+- let end = i.span().end();
+- assert_eq!(end.line, eline, "eline did not match for {}", i);
+- assert_eq!(end.column, ecol, "ecol did not match for {}", i);
+-
+- match i {
+- TokenTree::Group(ref g) => {
+- check_spans_internal(g.stream().clone(), lines);
+- }
+- _ => {}
+- }
+- }
+- }
+- }
+-
+ check_spans(
+ "\
+ /// This is a document comment
+@@ -274,53 +289,11 @@ fn span_join() {
+ #[test]
+ fn no_panic() {
+ let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
+- assert!(s.parse::<proc_macro2::TokenStream>().is_err());
++ assert!(s.parse::<TokenStream>().is_err());
+ }
+
+ #[test]
+-fn tricky_doc_comment() {
+- let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
+-
+- let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
+- match tokens[0] {
+- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
+- _ => panic!("wrong token {:?}", tokens[0]),
+- }
+- let mut tokens = match tokens[1] {
+- proc_macro2::TokenTree::Group(ref tt) => {
+- assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
+- tt.stream().into_iter()
+- }
+- _ => panic!("wrong token {:?}", tokens[0]),
+- };
+-
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
+- t => panic!("wrong token {:?}", t),
+- }
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
+- t => panic!("wrong token {:?}", t),
+- }
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Literal(ref tt) => {
+- assert_eq!(tt.to_string(), "\" doc\"");
+- }
+- t => panic!("wrong token {:?}", t),
+- }
+- assert!(tokens.next().is_none());
+-
+- let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
+-}
+-
+-#[test]
+-fn op_before_comment() {
++fn punct_before_comment() {
+ let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
+ match tts.next().unwrap() {
+ TokenTree::Punct(tt) => {
+@@ -331,6 +304,22 @@ fn op_before_comment() {
+ }
+ }
+
++#[test]
++fn joint_last_token() {
++ // This test verifies that we match the behavior of libproc_macro *not* in
++ // the range nightly-2020-09-06 through nightly-2020-09-10, in which this
++ // behavior was temporarily broken.
++ // See https://github.com/rust-lang/rust/issues/76399
++
++ let joint_punct = Punct::new(':', Spacing::Joint);
++ let stream = TokenStream::from(TokenTree::Punct(joint_punct));
++ let punct = match stream.into_iter().next().unwrap() {
++ TokenTree::Punct(punct) => punct,
++ _ => unreachable!(),
++ };
++ assert_eq!(punct.spacing(), Spacing::Joint);
++}
++
+ #[test]
+ fn raw_identifier() {
+ let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
+@@ -345,11 +334,11 @@ fn raw_identifier() {
+ fn test_debug_ident() {
+ let ident = Ident::new("proc_macro", Span::call_site());
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected = "Ident(proc_macro)";
+
+- #[cfg(procmacro2_semver_exempt)]
+- let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
++ #[cfg(span_locations)]
++ let expected = "Ident { sym: proc_macro }";
+
+ assert_eq!(expected, format!("{:?}", ident));
+ }
+@@ -358,7 +347,7 @@ fn test_debug_ident() {
+ fn test_debug_tokenstream() {
+ let tts = TokenStream::from_str("[a + 1]").unwrap();
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected = "\
+ TokenStream [
+ Group {
+@@ -368,7 +357,7 @@ TokenStream [
+ sym: a,
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone,
+ },
+ Literal {
+@@ -379,7 +368,7 @@ TokenStream [
+ ]\
+ ";
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected_before_trailing_commas = "\
+ TokenStream [
+ Group {
+@@ -389,7 +378,7 @@ TokenStream [
+ sym: a
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone
+ },
+ Literal {
+@@ -400,7 +389,7 @@ TokenStream [
+ ]\
+ ";
+
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ let expected = "\
+ TokenStream [
+ Group {
+@@ -411,7 +400,7 @@ TokenStream [
+ span: bytes(2..3),
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone,
+ span: bytes(4..5),
+ },
+@@ -425,7 +414,7 @@ TokenStream [
+ ]\
+ ";
+
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ let expected_before_trailing_commas = "\
+ TokenStream [
+ Group {
+@@ -436,7 +425,7 @@ TokenStream [
+ span: bytes(2..3)
+ },
+ Punct {
+- op: '+',
++ char: '+',
+ spacing: Alone,
+ span: bytes(4..5)
+ },
+@@ -464,3 +453,80 @@ fn default_tokenstream_is_empty() {
+
+ assert!(default_token_stream.is_empty());
+ }
++
++#[test]
++fn tuple_indexing() {
++ // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
++ let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
++ assert_eq!("tuple", tokens.next().unwrap().to_string());
++ assert_eq!(".", tokens.next().unwrap().to_string());
++ assert_eq!("0.0", tokens.next().unwrap().to_string());
++ assert!(tokens.next().is_none());
++}
++
++#[cfg(span_locations)]
++#[test]
++fn non_ascii_tokens() {
++ check_spans("// abc", &[]);
++ check_spans("// ábc", &[]);
++ check_spans("// abc x", &[]);
++ check_spans("// ábc x", &[]);
++ check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
++ check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
++ check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
++ check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
++ check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
++ check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
++ check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
++ check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
++ check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]);
++ check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]);
++ check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
++ check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
++ check_spans("'a'", &[(1, 0, 1, 3)]);
++ check_spans("'á'", &[(1, 0, 1, 3)]);
++ check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
++ check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
++ check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
++ check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
++ check_spans("abc", &[(1, 0, 1, 3)]);
++ check_spans("ábc", &[(1, 0, 1, 3)]);
++ check_spans("ábć", &[(1, 0, 1, 3)]);
++ check_spans("abc// foo", &[(1, 0, 1, 3)]);
++ check_spans("ábc// foo", &[(1, 0, 1, 3)]);
++ check_spans("ábć// foo", &[(1, 0, 1, 3)]);
++ check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
++ check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]);
++}
++
++#[cfg(span_locations)]
++fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
++ let ts = p.parse::<TokenStream>().unwrap();
++ check_spans_internal(ts, &mut lines);
++ assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
++}
++
++#[cfg(span_locations)]
++fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
++ for i in ts {
++ if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
++ *lines = rest;
++
++ let start = i.span().start();
++ assert_eq!(start.line, sline, "sline did not match for {}", i);
++ assert_eq!(start.column, scol, "scol did not match for {}", i);
++
++ let end = i.span().end();
++ assert_eq!(end.line, eline, "eline did not match for {}", i);
++ assert_eq!(end.column, ecol, "ecol did not match for {}", i);
++
++ if let TokenTree::Group(g) = i {
++ check_spans_internal(g.stream().clone(), lines);
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/proc-macro2/tests/test_fmt.rs b/third_party/rust/proc-macro2/tests/test_fmt.rs
+new file mode 100644
+index 0000000000..99a0aee5c8
+--- /dev/null
++++ third_party/rust/proc-macro2/tests/test_fmt.rs
+@@ -0,0 +1,26 @@
++use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
++use std::iter::{self, FromIterator};
++
++#[test]
++fn test_fmt_group() {
++ let ident = Ident::new("x", Span::call_site());
++ let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident)));
++ let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new());
++ let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone());
++ let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new());
++ let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone());
++ let braces_empty = Group::new(Delimiter::Brace, TokenStream::new());
++ let braces_nonempty = Group::new(Delimiter::Brace, inner.clone());
++ let none_empty = Group::new(Delimiter::None, TokenStream::new());
++ let none_nonempty = Group::new(Delimiter::None, inner.clone());
++
++ // Matches libproc_macro.
++ assert_eq!("()", parens_empty.to_string());
++ assert_eq!("(x)", parens_nonempty.to_string());
++ assert_eq!("[]", brackets_empty.to_string());
++ assert_eq!("[x]", brackets_nonempty.to_string());
++ assert_eq!("{ }", braces_empty.to_string());
++ assert_eq!("{ x }", braces_nonempty.to_string());
++ assert_eq!("", none_empty.to_string());
++ assert_eq!("x", none_nonempty.to_string());
++}
+diff --git a/third_party/rust/spirv-cross-internal/.cargo-checksum.json b/third_party/rust/spirv-cross-internal/.cargo-checksum.json
+index 3c732d6d0e..014aa640e1 100644
+--- third_party/rust/spirv-cross-internal/.cargo-checksum.json
++++ third_party/rust/spirv-cross-internal/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"32644850d6dd8a8496fcf2dd0327dfc18a41f87bbd5f0922e174a905babde20d","build.rs":"68d1e7d99b4d890e21606aa7e7904d4f9b8caed6c0951bb8f5028745a7103d1e","src/bindings_native.rs":"6aa4e3a3bd73a638b3282405c0eaf2565ff8f364919c509f94f79e326679b03e","src/bindings_wasm.rs":"331d19691566beb163fc459d630e9830167cc86ec8cacde6be53fb0a7ca2b1be","src/bindings_wasm_functions.rs":"2515dd91e98e769fe282e7dc4f60820f4be7365c784a9f3bd17310d18a136da8","src/compiler.rs":"a53c7e0136ea3deddbfaf158295daca36623a0e244426b7c04d2856922d91c73","src/emscripten.rs":"3169890001970610013026468739910afca0d85e00d7e34beadfdd31bbcbeeb7","src/glsl.rs":"7ca1f6a11db8e7926a813fb91298dac1c0e2f90fe5426cc985cde7d8f36c05c9","src/hlsl.rs":"3734ad89a4f51647ebeb761753cb324a48ebca724970201e7598a3763d094d73","src/lib.rs":"cc41cbbe48f3e96791ba5338c66fa1fe0e533eaed6bbdced3f008d5e9fe6c6ce","src/msl.rs":"5505d1d626d8e7903ffa9d137e7392a1670fa7b574eacc4bbd33673937b36ea7","src/ptr_util.rs":"280404beede469b2c9ae40536323515a9213dac5d30014fac870a23b37672442","src/spirv.rs":"8a2cbe6b554bb6312f2aede0469338ace21e79720b5128c653e7a68c0e161277","src/vendor/SPIRV-Cross/.clang-format":"9ec4314e20afecad827a2dbd4832256be8464e88aab4a53fab45173ed129b2ed","src/vendor/SPIRV-Cross/.gitignore":"7f23cc92ddb5e1f584447e98d3e8ab6543fc182f1543f0f6ec29856f9250cdd6","src/vendor/SPIRV-Cross/.travis.yml":"abcc1b2f622b65feafd3e37a2b5e867fce3cf7211cae9fb2bf92a6de79100be4","src/vendor/SPIRV-Cross/CMakeLists.txt":"0972ab271e1eecdfe41ae0a85bf14ce7fe142b7609f8d2965b2c94f92c63b6aa","src/vendor/SPIRV-Cross/CODE_OF_CONDUCT.md":"a25e58cd66a9543e1500db9a5c3c027e874893ad1a264530bf26eb10918b5d80","src/vendor/SPIRV-Cross/GLSL.std.450.h":"20f32378793c5f416bc0704f44345c2a14c99cba3f411e3beaf1bcea372d58ba","src/vendor/SPIRV-Cross/LICENSE":"cfc7749b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30","src/vendor/SPIRV-Cross/Makefile":"e2255d32e47d447b100ce3cd0753d0758dc26460e0440d14cc157d4caf9b62f1","src/vendor/SPIRV-Cross/README.md":"b944fc5b1de709089fc3cae0dba09186ce2e43b64de6dcae1423ba1c139ee395","src/vendor/SPIRV-Cross/appveyor.yml":"0f18b8ae5fadc027a20f69307fc32e56a97edfbdcb55ac392852308f88545a04","src/vendor/SPIRV-Cross/include/spirv_cross/barrier.hpp":"bb796625e89f75e239e92f9a61597d421ffe5fb1902d200691ebe95cf856a1f8","src/vendor/SPIRV-Cross/include/spirv_cross/external_interface.h":"cdceda962d87133e44989510edc944e99052d713869b406a8b6b2d54e3d02dd7","src/vendor/SPIRV-Cross/include/spirv_cross/image.hpp":"681d0964b144c5009424196a8bc832cb81cfe5df5b91c2f3e1bfb625765a0c50","src/vendor/SPIRV-Cross/include/spirv_cross/internal_interface.hpp":"ab8851e5708b944a9bf340ce17297d94bef4876d30c833ea83d44b16f60726f6","src/vendor/SPIRV-Cross/include/spirv_cross/sampler.hpp":"b0b1bd4796b4315e826985c224ea5fa7d5bc823fcd6091f7527a5e18d9ae42fb","src/vendor/SPIRV-Cross/include/spirv_cross/thread_group.hpp":"70d9e0400f62de71d3775972eadc196ddb218254fa8155e8e33daf8d99957cc0","src/vendor/SPIRV-Cross/main.cpp":"ab2835f6dff9299f562a89755893f98a4bd946d4460abca885ba2172ebce851c","src/vendor/SPIRV-Cross/pkg-config/spirv-cross-c-shared.pc.in":"75a27e069ee07362bc6b7412b2c4e1270778dd639a8b96b8afbb113d21de79e7","src/vendor/SPIRV-Cross/spirv.h":"2c19cd1d262a3757e92276fbaa8e1a706b2328a92f47bda88f849b5cebebb630","src/vendor/SPIRV-Cross/spirv.hpp":"eb7c39a017f0a33e9d9bae7556c678c2840af4cc0beb98b2aeff9d651e370128","src/vendor/SPIRV-Cross/spirv_cfg.cpp":"09b756068d95a87a65a186831484fe21a973d3eb19675225b23864b9d37fe3d0","src/vendor/SPIRV-Cross/spirv_cfg.hpp":"ad98c4e82d6da3ed450dfc7939814115ae6b29ba2a25b0148cfc8c88774055b5","src/vendor/SPIRV-Cross/spirv_common.hpp":"5d0658a6fe9ac2f628754d2439b4d26ec8df7e50e4b196567cd4c7e7b8ce7574","src/vendor/SPIRV-Cross/spirv_cpp.cpp":"d067269ec6fc56860b4ed39edddf0a127f5e2d091512a85a79f02d5f3beccd19","src/vendor/SPIRV-Cross/spirv_cpp.hpp":"751ce82c70a399781f18aca54ea7fbb98083b98706f2a6be1cd1aa90627ef597","src/vendor/SPIRV-Cross/spirv_cross.cpp":"225b4cf4c7f8868019680d0a75a3da7983db428783ee1a3dcbb8c2765a71e79a","src/vendor/SPIRV-Cross/spirv_cross.hpp":"7eab2d7ae4b25a3c4fe85631e664727e036ae23a776aeb8c17db23f08020258a","src/vendor/SPIRV-Cross/spirv_cross_c.cpp":"80f438f4700f0b0b02547771f82be0610ba5e24ba3edb56d4331d59a206a4c2e","src/vendor/SPIRV-Cross/spirv_cross_c.h":"8b0ad63ed9fe52bc62f305afa27b5e33d9bd689a8b3f9935353742faf88a1e91","src/vendor/SPIRV-Cross/spirv_cross_containers.hpp":"409f60a5cb1dba1d0c96ef61761d8643402fc5ab67cecce12f6af3d135cfa338","src/vendor/SPIRV-Cross/spirv_cross_error_handling.hpp":"45868796346eef895cc05825582e60d26c34ec35d1a2b97f5e172f05fc565b19","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.cpp":"7c7e08db4dfc1084fd9cd266a6f829d35a6b2856198b16307a44f4d1c0743106","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.hpp":"d15446565eaa430bf2b14b596b3e579a72095a386b1eb256afc977cdf42ec5b9","src/vendor/SPIRV-Cross/spirv_cross_util.cpp":"f2426dccad2fff44053f6b1b9558719471a1ba0445c317f2c0311bc0d8b4636d","src/vendor/SPIRV-Cross/spirv_cross_util.hpp":"63ca5b2d6c80b5ad39473d6841676e48d5db3d2cbcb252fe6a6db0ef2077f0f5","src/vendor/SPIRV-Cross/spirv_glsl.cpp":"c1704189895442d593819472d3f852979107ac1798375d5a44d8f97be17fa3b5","src/vendor/SPIRV-Cross/spirv_glsl.hpp":"fb5e00ff589e3c5a25c5a15e78cf4809b753ebe04c84a008dc977330b8cbbc27","src/vendor/SPIRV-Cross/spirv_hlsl.cpp":"549ff42d0773a3f9e555d1aebd4c0084709e11528082f47c5f2b8b07de029f7c","src/vendor/SPIRV-Cross/spirv_hlsl.hpp":"18f303e7a384acf0e14677b579b100aee838813f54735330f0d1387f2493b363","src/vendor/SPIRV-Cross/spirv_msl.cpp":"7266afeb827e50782cb35362788d27fa9bd641992380008604ea1a4e48ec9c37","src/vendor/SPIRV-Cross/spirv_msl.hpp":"76b609e0b8d2f421c51e98d6357fcf44666b0d510935e083e4e02f20a80051e1","src/vendor/SPIRV-Cross/spirv_parser.cpp":"4def4db8c77967dcd2d6d93330343dc6ebb309e8cd696f52ee568a7c3ee97036","src/vendor/SPIRV-Cross/spirv_parser.hpp":"fc5f92184d1b4986f78088bee8ed7ddb9b342a34f9a3e55a4a594187292b66b8","src/vendor/SPIRV-Cross/spirv_reflect.cpp":"995a4e4315f67d4a2084e2b5d982de9baf72c7e265dde71d03ae0b8a29a24b92","src/vendor/SPIRV-Cross/spirv_reflect.hpp":"a874b40be0b210c69718e87a90e9add24653957656a995d83656f0918cfd10e4","src/wrapper.cpp":"7800cec5564dc1c7c13059cb1b1cac8d9be9248bbc887d5176600980c0d18142","src/wrapper.hpp":"b9a4f846bf57ee7a10fbb81f8f49df96ad11b1e5749c3c510925bb9b029ac08a","tests/common/mod.rs":"2843bf104c7938d93065f7b5688c9f063ad9e5720c407c737aedc5f2dee5a80f","tests/glsl_tests.rs":"ddd85107370dbfcde9d004d6717e229a7278d29ecba375828d8cb79e86625f66","tests/hlsl_tests.rs":"2058cb590c5ab85a636c93c18d0f2a79eb5ea5e647b7f10a2c83fbf394964dd6","tests/msl_tests.rs":"211d3b9cb43455a4c55bd619c05acdd21953358580c50ae75cac3f06eb26c5dd","tests/shaders/array.vert":"d0dab7ddea131e069961054f40a164602448aa78720b7ff480e141d1f7b0b2d6","tests/shaders/array.vert.spv":"8e44421590ade1716be66ad39f60fb1ce58eedeab8f0806335a7369687b308b1","tests/shaders/multiple_entry_points.cl":"2abbe57f2387f07f7f5f4cd375e47900be9c13bdc79aa0ed98a43a798cb0df81","tests/shaders/multiple_entry_points.cl.spv":"bdd34ce6765dbeab637631c3cbdf251532870d9fd6cd6c54883c0c872058ab3b","tests/shaders/rasterize_disabled.vert":"da6de172549830216933c44edf18b13113d7ca87462e3d09ad50dfc9c9836821","tests/shaders/rasterize_disabled.vert.spv":"2ba809eb500ed6e5a067389ccc056551e796e7019517593d110fb62c9dca2056","tests/shaders/sampler.frag":"4c568e65176afe596dd8ef279485e992607e94d612786214ae1c6702d0322e1f","tests/shaders/sampler.frag.spv":"bd7bd1973a82dcfdf5755361fa4dd420fdf1c32c5de0a6f0896a8d5971f98684","tests/shaders/simple.vert":"ea143c97dff5ef03728b96b2dd893bdc59d56651581ecf9fe50f10807b0efdd0","tests/shaders/simple.vert.spv":"a2b5094ffd76288e0d08c37ce0351e28f20bb6d80ddd73fc44a71c1c7cbbf7db","tests/shaders/specialization.comp":"ce32fa1615737209f2e465ea347d79030ddcb33a88c38447e7cde7dffc920163","tests/shaders/specialization.comp.spv":"848604e37b870b8999692b266677be2ce0df6ce38093a0d81e6bc43d0bdf8a3f","tests/shaders/struct.frag":"d8840bb1961d6f14609b00ee54406c1e3ea31cecd8231b77cfb73d28b71910c0","tests/shaders/struct.frag.spv":"774aa886374eb95abf9bb7d0045ee77d97e26529e9ec96b90991a515fdbca4be","tests/shaders/struct.vert":"9299cda83ddb5b5c3d95ab0d057e4df2af137dfd92d6c4d3e96295b7d42e29a1","tests/shaders/struct.vert.spv":"4a82bdee72616ac058bc60d4255efa3e78199a2b8597570c013bebbee7107fb7","tests/shaders/two_ubo.vert":"be109b2c65e5e9e1bb0dab968d7f651232f6b1c46935a3928f980bf7a40f2d62","tests/shaders/two_ubo.vert.spv":"efd14e2d0a782d61dd944711f30b9e7fcb14af17593c1fe4e11cf2b7e232bcc2","tests/shaders/workgroup.comp":"478044b5392e0d1fb042253d71ea6bf7b8a014af4a6ee35d8db4c5343ac69739","tests/shaders/workgroup.comp.spv":"72f636fe3d1d6d0c5963f71bf4349c7e40d544331b33b6b64fb5b65784e6abee","tests/spirv_tests.rs":"6de5c893f631d550fc86853537bddb824ffb534a0a7f130406be3b9ed166f198"},"package":null}
+\ No newline at end of file
++{"files":{"Cargo.toml":"32644850d6dd8a8496fcf2dd0327dfc18a41f87bbd5f0922e174a905babde20d","build.rs":"68d1e7d99b4d890e21606aa7e7904d4f9b8caed6c0951bb8f5028745a7103d1e","src/bindings_native.rs":"6aa4e3a3bd73a638b3282405c0eaf2565ff8f364919c509f94f79e326679b03e","src/bindings_wasm.rs":"331d19691566beb163fc459d630e9830167cc86ec8cacde6be53fb0a7ca2b1be","src/bindings_wasm_functions.rs":"2515dd91e98e769fe282e7dc4f60820f4be7365c784a9f3bd17310d18a136da8","src/compiler.rs":"a53c7e0136ea3deddbfaf158295daca36623a0e244426b7c04d2856922d91c73","src/emscripten.rs":"3169890001970610013026468739910afca0d85e00d7e34beadfdd31bbcbeeb7","src/glsl.rs":"7ca1f6a11db8e7926a813fb91298dac1c0e2f90fe5426cc985cde7d8f36c05c9","src/hlsl.rs":"3734ad89a4f51647ebeb761753cb324a48ebca724970201e7598a3763d094d73","src/lib.rs":"cc41cbbe48f3e96791ba5338c66fa1fe0e533eaed6bbdced3f008d5e9fe6c6ce","src/msl.rs":"5505d1d626d8e7903ffa9d137e7392a1670fa7b574eacc4bbd33673937b36ea7","src/ptr_util.rs":"280404beede469b2c9ae40536323515a9213dac5d30014fac870a23b37672442","src/spirv.rs":"8a2cbe6b554bb6312f2aede0469338ace21e79720b5128c653e7a68c0e161277","src/vendor/SPIRV-Cross/.clang-format":"9ec4314e20afecad827a2dbd4832256be8464e88aab4a53fab45173ed129b2ed","src/vendor/SPIRV-Cross/.travis.yml":"abcc1b2f622b65feafd3e37a2b5e867fce3cf7211cae9fb2bf92a6de79100be4","src/vendor/SPIRV-Cross/CMakeLists.txt":"0972ab271e1eecdfe41ae0a85bf14ce7fe142b7609f8d2965b2c94f92c63b6aa","src/vendor/SPIRV-Cross/CODE_OF_CONDUCT.md":"a25e58cd66a9543e1500db9a5c3c027e874893ad1a264530bf26eb10918b5d80","src/vendor/SPIRV-Cross/GLSL.std.450.h":"20f32378793c5f416bc0704f44345c2a14c99cba3f411e3beaf1bcea372d58ba","src/vendor/SPIRV-Cross/LICENSE":"cfc7749b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30","src/vendor/SPIRV-Cross/Makefile":"e2255d32e47d447b100ce3cd0753d0758dc26460e0440d14cc157d4caf9b62f1","src/vendor/SPIRV-Cross/README.md":"b944fc5b1de709089fc3cae0dba09186ce2e43b64de6dcae1423ba1c139ee395","src/vendor/SPIRV-Cross/appveyor.yml":"0f18b8ae5fadc027a20f69307fc32e56a97edfbdcb55ac392852308f88545a04","src/vendor/SPIRV-Cross/include/spirv_cross/barrier.hpp":"bb796625e89f75e239e92f9a61597d421ffe5fb1902d200691ebe95cf856a1f8","src/vendor/SPIRV-Cross/include/spirv_cross/external_interface.h":"cdceda962d87133e44989510edc944e99052d713869b406a8b6b2d54e3d02dd7","src/vendor/SPIRV-Cross/include/spirv_cross/image.hpp":"681d0964b144c5009424196a8bc832cb81cfe5df5b91c2f3e1bfb625765a0c50","src/vendor/SPIRV-Cross/include/spirv_cross/internal_interface.hpp":"ab8851e5708b944a9bf340ce17297d94bef4876d30c833ea83d44b16f60726f6","src/vendor/SPIRV-Cross/include/spirv_cross/sampler.hpp":"b0b1bd4796b4315e826985c224ea5fa7d5bc823fcd6091f7527a5e18d9ae42fb","src/vendor/SPIRV-Cross/include/spirv_cross/thread_group.hpp":"70d9e0400f62de71d3775972eadc196ddb218254fa8155e8e33daf8d99957cc0","src/vendor/SPIRV-Cross/main.cpp":"ab2835f6dff9299f562a89755893f98a4bd946d4460abca885ba2172ebce851c","src/vendor/SPIRV-Cross/pkg-config/spirv-cross-c-shared.pc.in":"75a27e069ee07362bc6b7412b2c4e1270778dd639a8b96b8afbb113d21de79e7","src/vendor/SPIRV-Cross/spirv.h":"2c19cd1d262a3757e92276fbaa8e1a706b2328a92f47bda88f849b5cebebb630","src/vendor/SPIRV-Cross/spirv.hpp":"eb7c39a017f0a33e9d9bae7556c678c2840af4cc0beb98b2aeff9d651e370128","src/vendor/SPIRV-Cross/spirv_cfg.cpp":"09b756068d95a87a65a186831484fe21a973d3eb19675225b23864b9d37fe3d0","src/vendor/SPIRV-Cross/spirv_cfg.hpp":"ad98c4e82d6da3ed450dfc7939814115ae6b29ba2a25b0148cfc8c88774055b5","src/vendor/SPIRV-Cross/spirv_common.hpp":"5d0658a6fe9ac2f628754d2439b4d26ec8df7e50e4b196567cd4c7e7b8ce7574","src/vendor/SPIRV-Cross/spirv_cpp.cpp":"d067269ec6fc56860b4ed39edddf0a127f5e2d091512a85a79f02d5f3beccd19","src/vendor/SPIRV-Cross/spirv_cpp.hpp":"751ce82c70a399781f18aca54ea7fbb98083b98706f2a6be1cd1aa90627ef597","src/vendor/SPIRV-Cross/spirv_cross.cpp":"225b4cf4c7f8868019680d0a75a3da7983db428783ee1a3dcbb8c2765a71e79a","src/vendor/SPIRV-Cross/spirv_cross.hpp":"7eab2d7ae4b25a3c4fe85631e664727e036ae23a776aeb8c17db23f08020258a","src/vendor/SPIRV-Cross/spirv_cross_c.cpp":"80f438f4700f0b0b02547771f82be0610ba5e24ba3edb56d4331d59a206a4c2e","src/vendor/SPIRV-Cross/spirv_cross_c.h":"8b0ad63ed9fe52bc62f305afa27b5e33d9bd689a8b3f9935353742faf88a1e91","src/vendor/SPIRV-Cross/spirv_cross_containers.hpp":"409f60a5cb1dba1d0c96ef61761d8643402fc5ab67cecce12f6af3d135cfa338","src/vendor/SPIRV-Cross/spirv_cross_error_handling.hpp":"45868796346eef895cc05825582e60d26c34ec35d1a2b97f5e172f05fc565b19","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.cpp":"7c7e08db4dfc1084fd9cd266a6f829d35a6b2856198b16307a44f4d1c0743106","src/vendor/SPIRV-Cross/spirv_cross_parsed_ir.hpp":"d15446565eaa430bf2b14b596b3e579a72095a386b1eb256afc977cdf42ec5b9","src/vendor/SPIRV-Cross/spirv_cross_util.cpp":"f2426dccad2fff44053f6b1b9558719471a1ba0445c317f2c0311bc0d8b4636d","src/vendor/SPIRV-Cross/spirv_cross_util.hpp":"63ca5b2d6c80b5ad39473d6841676e48d5db3d2cbcb252fe6a6db0ef2077f0f5","src/vendor/SPIRV-Cross/spirv_glsl.cpp":"c1704189895442d593819472d3f852979107ac1798375d5a44d8f97be17fa3b5","src/vendor/SPIRV-Cross/spirv_glsl.hpp":"fb5e00ff589e3c5a25c5a15e78cf4809b753ebe04c84a008dc977330b8cbbc27","src/vendor/SPIRV-Cross/spirv_hlsl.cpp":"549ff42d0773a3f9e555d1aebd4c0084709e11528082f47c5f2b8b07de029f7c","src/vendor/SPIRV-Cross/spirv_hlsl.hpp":"18f303e7a384acf0e14677b579b100aee838813f54735330f0d1387f2493b363","src/vendor/SPIRV-Cross/spirv_msl.cpp":"7266afeb827e50782cb35362788d27fa9bd641992380008604ea1a4e48ec9c37","src/vendor/SPIRV-Cross/spirv_msl.hpp":"76b609e0b8d2f421c51e98d6357fcf44666b0d510935e083e4e02f20a80051e1","src/vendor/SPIRV-Cross/spirv_parser.cpp":"4def4db8c77967dcd2d6d93330343dc6ebb309e8cd696f52ee568a7c3ee97036","src/vendor/SPIRV-Cross/spirv_parser.hpp":"fc5f92184d1b4986f78088bee8ed7ddb9b342a34f9a3e55a4a594187292b66b8","src/vendor/SPIRV-Cross/spirv_reflect.cpp":"995a4e4315f67d4a2084e2b5d982de9baf72c7e265dde71d03ae0b8a29a24b92","src/vendor/SPIRV-Cross/spirv_reflect.hpp":"a874b40be0b210c69718e87a90e9add24653957656a995d83656f0918cfd10e4","src/wrapper.cpp":"7800cec5564dc1c7c13059cb1b1cac8d9be9248bbc887d5176600980c0d18142","src/wrapper.hpp":"b9a4f846bf57ee7a10fbb81f8f49df96ad11b1e5749c3c510925bb9b029ac08a","tests/common/mod.rs":"2843bf104c7938d93065f7b5688c9f063ad9e5720c407c737aedc5f2dee5a80f","tests/glsl_tests.rs":"ddd85107370dbfcde9d004d6717e229a7278d29ecba375828d8cb79e86625f66","tests/hlsl_tests.rs":"2058cb590c5ab85a636c93c18d0f2a79eb5ea5e647b7f10a2c83fbf394964dd6","tests/msl_tests.rs":"211d3b9cb43455a4c55bd619c05acdd21953358580c50ae75cac3f06eb26c5dd","tests/shaders/array.vert":"d0dab7ddea131e069961054f40a164602448aa78720b7ff480e141d1f7b0b2d6","tests/shaders/array.vert.spv":"8e44421590ade1716be66ad39f60fb1ce58eedeab8f0806335a7369687b308b1","tests/shaders/multiple_entry_points.cl":"2abbe57f2387f07f7f5f4cd375e47900be9c13bdc79aa0ed98a43a798cb0df81","tests/shaders/multiple_entry_points.cl.spv":"bdd34ce6765dbeab637631c3cbdf251532870d9fd6cd6c54883c0c872058ab3b","tests/shaders/rasterize_disabled.vert":"da6de172549830216933c44edf18b13113d7ca87462e3d09ad50dfc9c9836821","tests/shaders/rasterize_disabled.vert.spv":"2ba809eb500ed6e5a067389ccc056551e796e7019517593d110fb62c9dca2056","tests/shaders/sampler.frag":"4c568e65176afe596dd8ef279485e992607e94d612786214ae1c6702d0322e1f","tests/shaders/sampler.frag.spv":"bd7bd1973a82dcfdf5755361fa4dd420fdf1c32c5de0a6f0896a8d5971f98684","tests/shaders/simple.vert":"ea143c97dff5ef03728b96b2dd893bdc59d56651581ecf9fe50f10807b0efdd0","tests/shaders/simple.vert.spv":"a2b5094ffd76288e0d08c37ce0351e28f20bb6d80ddd73fc44a71c1c7cbbf7db","tests/shaders/specialization.comp":"ce32fa1615737209f2e465ea347d79030ddcb33a88c38447e7cde7dffc920163","tests/shaders/specialization.comp.spv":"848604e37b870b8999692b266677be2ce0df6ce38093a0d81e6bc43d0bdf8a3f","tests/shaders/struct.frag":"d8840bb1961d6f14609b00ee54406c1e3ea31cecd8231b77cfb73d28b71910c0","tests/shaders/struct.frag.spv":"774aa886374eb95abf9bb7d0045ee77d97e26529e9ec96b90991a515fdbca4be","tests/shaders/struct.vert":"9299cda83ddb5b5c3d95ab0d057e4df2af137dfd92d6c4d3e96295b7d42e29a1","tests/shaders/struct.vert.spv":"4a82bdee72616ac058bc60d4255efa3e78199a2b8597570c013bebbee7107fb7","tests/shaders/two_ubo.vert":"be109b2c65e5e9e1bb0dab968d7f651232f6b1c46935a3928f980bf7a40f2d62","tests/shaders/two_ubo.vert.spv":"efd14e2d0a782d61dd944711f30b9e7fcb14af17593c1fe4e11cf2b7e232bcc2","tests/shaders/workgroup.comp":"478044b5392e0d1fb042253d71ea6bf7b8a014af4a6ee35d8db4c5343ac69739","tests/shaders/workgroup.comp.spv":"72f636fe3d1d6d0c5963f71bf4349c7e40d544331b33b6b64fb5b65784e6abee","tests/spirv_tests.rs":"6de5c893f631d550fc86853537bddb824ffb534a0a7f130406be3b9ed166f198"},"package":null}
+\ No newline at end of file
+diff --git a/third_party/rust/syn/.cargo-checksum.json b/third_party/rust/syn/.cargo-checksum.json
+index 77939d8fc6..704f2ed200 100644
+--- third_party/rust/syn/.cargo-checksum.json
++++ third_party/rust/syn/.cargo-checksum.json
+@@ -1 +1 @@
+-{"files":{"Cargo.toml":"484d29864d333a361652fa4e24e1dcfab9efa47705ffd8c106d802eb03b78da7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"ca605417b6db8c995458f8407afaad6c177aedcc2274004283600f5638fa1b0c","benches/file.rs":"b45211cc4a0296a77aac2b4de16dbc6b5cb66adfb5afac00a77bccea87f43968","benches/rust.rs":"9cc0f62e944f1583d05c43a395a1556731501cf5976ef67a081f4f6387f883ba","build.rs":"7423ab199728d55c7d64c44b7c6729cfd93bd8273366a77707353003e27565d7","src/attr.rs":"cf81add298f0e75c35a9980a59bc3c2fd3fe933635830d1591374eeb2487c225","src/await.rs":"18f0b2ecb319991f891e300011424985e3cf33d166ea9f29f22d575fc8c83a76","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"2a432c11a3da67a21d46c2272bf9ce60a0bb20893b5750027bbd8ca3e843ab35","src/custom_keyword.rs":"589e46ec1be9a04d6de12c0b8cadf87cc1c05606ed46ddea62e9869cbca4a191","src/custom_punctuation.rs":"2ba2e294e15a0fce7ede3686c42b2891797079a724dd1193b66e7d305624c891","src/data.rs":"cc9b250d084e444782d3ff5e63c1ba387cbde8f7f2e977eab9846d920b4b8c3f","src/derive.rs":"c18878f14be5d5ab11fd7dda2d2ff1ff75c9662daf11eed033de62e4d0670a89","src/discouraged.rs":"50e10915695c4d14f64a78e20ecbef90a2cd53a7c26ee3426a2524a8ee5c9cbf","src/error.rs":"2c17a402f83ed5ae4ad96e753216771bef620235c2ff1ccc23f4bbafc7266fe1","src/export.rs":"dcae67456278c0339acfbcbb4737b8d37cfba5a150ae789f31f4be79abf7e726","src/expr.rs":"871d8eeb43cef02ef88de3bea7477b79b4eabc096a0899dde0e5750edf482f49","src/ext.rs":"b97ed549490b9248b5b5df31b3d5b08ba8791e23e6c5d3a1157a0363eb683ff3","src/file.rs":"3cc2bf5c709238d515a557f721f231c8c725b196400de051f945b549299d38a7","src/gen/fold.rs":"10b3ae33d0ce410d6bbe8b93be9d5f9e856c7dc8212133cc46b703f97d548190","src/gen/visit.rs":"e0f5798552d186024696b7bfc7219d4ff53b0e45f735a83e77cbb6b6578c5fa4","src/gen/visit_mut.rs":"9f7dda83907969971dba84d545aaa563b0728e54db97ffab5050fdf43a79c731","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"d845d7a828863123a5187fd0fe59c9dae3636f63bad302bd035792eed3dcb1ba","src/group.rs":"119b62d8481b4b1c327639bed40e114bf1969765250b68186628247fd4144b3b","src/ident.rs":"503156ce51a7ef0420892e8dbf2ecf8fe51f42a84d52cc2c05654e1a83020cbf","src/item.rs":"213f2f58c65ee1aa222f111bc9b1be681f8fb069caed04ca56586839979318d0","src/keyword.rs":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/lib.rs":"24778e9f15e8025e75aca114c712716ada586b471adb3b3b69278f4d39b8a21b","src/lifetime.rs":"905359708f772ec858954badde69ee016d29e6eeba1dd205b268445b1aff6f3a","src/lit.rs":"5bb0bddb94cbd256e50e92dc091a0baa09f1be40a77058b897507f3b17191e5d","src/lookahead.rs":"5cce8b4cb345a85c24a452ea6d78eadb76f01ca0a789cbf5ce35108334904173","src/mac.rs":"6b468244cc07e3f2f10419f833d9e2ed23edbcd6dc34cf21c5947633699db964","src/macros.rs":"0d8c3bab47539aa2d00bec64e92c901ea2c9c0af74c868051c0905b82650f970","src/op.rs":"93cd44770bb110deadf807a01d9a666efe644b6e3010f4b51cae77ee7438cfbb","src/parse.rs":"5017123c249ebc65866af113a0ad671814b9873f47568180e6539a305eb0317d","src/parse_macro_input.rs":"f799aadb7216c2d333b579f48ed2fedfe07b5e96f004b25b569649ffbaa958d2","src/parse_quote.rs":"81575bf60b18b0d8624d7025a5bcc8dcd6633ad70c454dee2a06e4c391700b6c","src/pat.rs":"db0f2263b9813de1f4e3e3e0396fe0080b1e11c8090c6b4fb6fca3cfbe22bc96","src/path.rs":"32e685ac7fd2d4b9989802de8f326a8d47fa710f86ec3e45fd9d3ff8fdfe97ef","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"384e7b317b26f24118eb4b0c39e949ee9f4f3e700a4c80e462342c83b2cc3282","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"adddb6acae14a0fa340df302b932c31e34b259706ce56fd82ab597ec424500e1","src/stmt.rs":"fbccf2b4da7980fe6ea8d99457d291577c0f225b370c1dd97da41abf2a18fcf7","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"761d8d1793560eb2b631c36ddfdbb14ac65178405f095453aa0e75e8816bdbb9","src/tt.rs":"1e32ae216d14d895ff621bf32bc611f138aa00108b0090be2cbaa0affebe8e2a","src/ty.rs":"ce052e0079b65b66bea4e9502d2ff2c90ad4b867904bf7eb892eb60aa3ef219a","tests/clone.sh":"961243d42540d9992090efbbd5478b7aa395671db64a6c28cdadb6edc610ebdf","tests/common/eq.rs":"a42d339808fd32dd4bfd440c254add8c56d5e2cde3a6bf0c88621b618ce5eaa7","tests/common/mod.rs":"20a3300016351fa210a193fbb0db059ef5125fa7909585ded64790004d4977ed","tests/common/parse.rs":"17ba6d1e74aaa3f8096c6d379d803221f12d95cca69927be047d6ddf8367647f","tests/debug/gen.rs":"57bd5cf585e0b86ad00f29f09ff3db3390c4a756d503514a9b28407500dcea3c","tests/debug/mod.rs":"462d6fe34ee75c3ca1207d4db2ff3bdee5b430b9f9ca632e5671d1588d3f76b3","tests/features/error.rs":"e0581a2869cbd237c2bc18a0a85272296e1577bb5f7317a67fa85e28e04eea6f","tests/features/mod.rs":"66a2605ec54ede29208da350f2bed536dfa763b58408d64d3fca3b13de64b64f","tests/macros/mod.rs":"3f2d758c0ba76b93f54b0c1fc22ad50edff8ef42629ba4d47ac7d7f823da8359","tests/repo/mod.rs":"e851a68972c9194a9a8d7b68538b16ed79ae81cba55e1a2ce210d1b759fb1a21","tests/test_asyncness.rs":"b6c46118b036e6807d24eb0e1779244b4fca23dac0d8031e9843b3edec484ce8","tests/test_attribute.rs":"2d8f18a98c989d3f7adaaeb1aeebd4f8413365ace63feecb37cb3f9db9db4d8f","tests/test_derive_input.rs":"477d80f914c54b526f8ff229788dc0e7798d118f6dcfa348f4c99755edb347b9","tests/test_expr.rs":"f35ca80566849a36e6ba6403d9663519eff37e4224360c468fedff8b561a643e","tests/test_generics.rs":"83a5dc07f5c5701c12625399262f7120b66f01a742523f3eda28da2cf2c87eb3","tests/test_grouping.rs":"aadd75215addd9e5a8fa2f9472117d4cb80f1e8b84e07f4c0845675c9014164f","tests/test_ident.rs":"236c239dd66f543f084f44ff747d7bc3962cf11a019a279777fe972f6e17aa4c","tests/test_iterators.rs":"718938da14778dcba06324d36a99d9317c9d45d81a34c6a44c47e1fa38085e9f","tests/test_lit.rs":"7dff2661a5ac586d6ed2fe27501cb8ff62f4cf3f6c91f596bff6057c67ad7857","tests/test_meta.rs":"8444dee084882243b107dfc8a6aac27f9382f9774162d1ac8ed8ec30d60c048e","tests/test_parse_buffer.rs":"b244bb4bc41ff06d21f239e60a3d663fdec5aa4af33f2a354afef36d34f0aefc","tests/test_pat.rs":"41776b878efae9b8e340f21ffe6296e921cf309f618482efd98609c33e32c28b","tests/test_precedence.rs":"71f3ea52cda8b40166bb7416fb98774e6a653542497b521f8e183e283dcf579d","tests/test_round_trip.rs":"e0de37f45fa223b488d25a41beab185eb92abb7bf765a9f13fe5d870ff31f5f1","tests/test_should_parse.rs":"4da4e25ee2baa7e75135c375042a7f958de136c5698dab03f99ff7a774dcd463","tests/test_size.rs":"970150b9d49ef91ab4c8f8c6a59b83f9a68a02acb779f0280733a5efaec6487a","tests/test_token_trees.rs":"a07ea657bf03b9c667c821b2db2af49b176ca737e3e01217a73cca78b7f11380","tests/zzz_stable.rs":"961d4940a926db4ca523d834b060c62de988e6a8e01c9f5efaa7bb4c86745b47"},"package":"66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"28ddb678a5ccac4423435384c8b7116f804e896eabc5aae9d5c2bc666aaebbb4","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"03f3b53cf858536a0883aa5b5882ee61dcd0f1e71c0930c9106fcfa1d6aad2df","benches/file.rs":"b4724fc7c0f48b8f488e2632a1064f6c0bf16ded3969680fc3f4a2369536269b","benches/rust.rs":"ea6291ef2d2a83d94a3312fe179d48259f8ec0b04c961993ddd181d0a4ab740e","build.rs":"aeca2312f05aec658eaa66980a0ef3d578837db107a55702b39419ea0422eb4a","src/attr.rs":"7d79482634d6544eb4a4825405407b53660d0f5f8b929f7e1671e005b9d92038","src/await.rs":"18f0b2ecb319991f891e300011424985e3cf33d166ea9f29f22d575fc8c83a76","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"cf2a4b3bdc247b80c85ff5625a1dfb7a5f517fd835f6e1518a7b924990e4c293","src/custom_keyword.rs":"9627467063e41776315a6a14b2aaea3875592d8e0ebd2dc6df1fc2f12c06f146","src/custom_punctuation.rs":"b00e7bee96eb473507527e39db65e74e71592dc06421d2cfe45ed899c17d4847","src/data.rs":"7aec9a745cd53ec95688afa353f6efb9576e7fc0143757b51d28bc3d900b1d2a","src/derive.rs":"fa71866df6e383673dd3329f455a9f953585b83f9739050be3bf1f8c6d526b96","src/discouraged.rs":"a1f3d85e20dedf50b1b7b4571d970a3a6e9b2de4afde7dd0c986fe240df2ba46","src/error.rs":"c3005b50e3132026250c5356d0d391bf96db8087f0f5f744de98e360d8a20a3e","src/export.rs":"dcae67456278c0339acfbcbb4737b8d37cfba5a150ae789f31f4be79abf7e726","src/expr.rs":"54455fd20041996653ca5379b03cdf3c2fc1b3dd2e1149b5bc6b1dd492545d55","src/ext.rs":"870086d9021e6a6fcefa2f00cd91b55c4b74dcee8f0f6a07e76d96fb44707d61","src/file.rs":"75167ebc77e7870122078eabde1b872c337142d4b0962c20cedffcaaa2a5b7c6","src/gen/clone.rs":"0845c1bf8624c3f235cd247b4eb748e7e16b4c240097cb0ff16751f688c079ae","src/gen/debug.rs":"d24fe37f4ce1dd74f2dc54136e893782d3c4d0908323c036c97599551a56960c","src/gen/eq.rs":"1e6ef09b17ca7f36861ef23ce2a6991b231ed5f087f046469b5f23da40f5b419","src/gen/fold.rs":"3f59e59ed8ad2ab5dd347bfbe41bbc785c2aabd8ae902087a584a6daed597182","src/gen/hash.rs":"e5b2a52587173076777233a9e57e2b3c8e0dd6d6f41d16fa7c9fde68b05c2bfc","src/gen/visit.rs":"23008c170d4dd3975232876a0a654921d9b6af57372cb9fcc133ca740588d666","src/gen/visit_mut.rs":"42886c3ee02ded72d9c3eec006e20431eaee0c6b90ddefc1a36ec7bf50c6a24a","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"d1c175284ca21e777ef0414c28383929b170ccb00aaf7a929eb18d3b05e18da8","src/group.rs":"119b62d8481b4b1c327639bed40e114bf1969765250b68186628247fd4144b3b","src/ident.rs":"503156ce51a7ef0420892e8dbf2ecf8fe51f42a84d52cc2c05654e1a83020cbf","src/item.rs":"c9ad9881e8cda8ee3f157f0c7602fc53d08a7e3288b9afc388c393689eac5aea","src/lib.rs":"558ad13779233b27bebc4b2fc8025eb1c7e57b32130dc1dd911391e27b427500","src/lifetime.rs":"f390fe06692fc51fbf3eb490bb9f795da70e4452f51c5b0df3bbaa899084ddf1","src/lit.rs":"9fab84e38756b092fbb055dcdf01e31d42d916c49e3eaae8c9019043b0ee4301","src/lookahead.rs":"5cce8b4cb345a85c24a452ea6d78eadb76f01ca0a789cbf5ce35108334904173","src/mac.rs":"e5cecea397fd01a44958162781d8d94343fe2a1b9b9754a5666c3d2ab4d7ef64","src/macros.rs":"2ce05b553f14da4ee550bb681cb0733b7186ad94719cd36f96d53e15fd02cf2b","src/op.rs":"449514e146deab0ab020bc6f764544c294dbc780941c9802bf60cf1b2839d550","src/parse.rs":"bde888c98ee259f2a73489a693515ed4875432b0d79486ac83aea19f441992a3","src/parse_macro_input.rs":"653a020f023cac0eccbc1fcc34aa7bf80567b43e5475deab4ad3e487a5363201","src/parse_quote.rs":"642f21e5fa54df4b7c373fb158289ee1005d49e1a49b1d194df5438faee71c46","src/pat.rs":"1473b258162cc822f1ee0c0869f521053ed345a140c39ed83b9b4dfb6f9f2aca","src/path.rs":"f119f0c2af12fabd360eac9a2312e0f6e6c28c633c9671bde6ef0bece7c5ba3c","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"212f5a601d6c2eb8b8fa679be1167b455b595bee964d2775b0101ebb16c3eaa5","src/reserved.rs":"3625eb2a64589a4992ab79a1674e9679f465bea613ab139a671df5337e88cee6","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"7d77714d585e6f42397091ffb3a799fd7b20c05c5442c737683c429ea7d409a5","src/stmt.rs":"3917fbc897f80efe838267833c55650ff8d636cb49a6d1084e28eff65d0e3ccd","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"a1ca6298bf6592cb80cbab1db4eac2fa4e3fa56729bb807bfb0f08ab0f229ca5","src/tt.rs":"1cc9e200624288322f800f32e3d6e2e53da946467bb312dd40a52c02cdcc4730","src/ty.rs":"cb167cbb16240c59a31b44adec175172caaf75ffef9a0bb168584b51bf105795","src/verbatim.rs":"802a97df997432f18cac6e6200ff6ea29fb2474986005e0fcdbc2b65197f87f7","src/whitespace.rs":"e63dd0aa3d34029f17766a8b09c1a6e4479e36c552c8b7023d710a399333aace","tests/common/eq.rs":"4b190a3833bdfd20a4cb1e3dff25a698751dec71d6f30249cf09426e061a4fb1","tests/common/mod.rs":"25ef6d7daa09bad3198a0e9e91b2812425f92db7c585c1e34a03a84d7362ccd8","tests/common/parse.rs":"8b7ba32f4988c30758c108536c4877dc5a039a237bf9b0687220ef2295797bbd","tests/debug/gen.rs":"d6e2abf2a7bb58a7895a60c2f094a98a4f85c9189d02011d0dcef6ef053f26e3","tests/debug/mod.rs":"868763d0ef1609a3ad5e05e9f1bfa0f813e91e7e9a36653414a188bb2fdaa425","tests/macros/mod.rs":"c0eafa4e3845fc08f6efe6021bac37822c0ac325eb7b51194a5f35236f648d92","tests/repo/mod.rs":"9e316b88d57ae213e81950c35e45443078ec90e702798353bc3528cb8a2810b6","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/test_asyncness.rs":"cff01db49d28ab23b0b258bc6c0a5cc4071be4fe7248eef344a5d79d2fb649b7","tests/test_attribute.rs":"0ffd99384e1a52ae17d9fed5c4053e411e8f9018decef07ffa621d1faa7329d8","tests/test_derive_input.rs":"610444351e3bf99366976bbf1da109c334a70ac9500caef366bcf9b68819829f","tests/test_expr.rs":"0ee83f6f6de950018c043efcc3e85776b4227dae3068309998a8d9709f2fc66c","tests/test_generics.rs":"9d713f90a79d6145efc89fb6f946029ca03486c632219950889da39940152ba0","tests/test_grouping.rs":"46c27baec4daaaf1e891892f0b0515ea8a44619071c7d0cc9192580916f1569f","tests/test_ident.rs":"9eb53d1e21edf23e7c9e14dc74dcc2b2538e9221e19dbcc0a44e3acc2e90f3f6","tests/test_item.rs":"461ed0c8648afffcea3217f52c9a88298182b4d39d73a11803b1281d99c98c25","tests/test_iterators.rs":"53ed6078d37550bd6765d2411e3660be401aef8a31a407350cc064a7d08c7c33","tests/test_lit.rs":"2a46c5f2f2ad1dcbb7e9b0cd11b55861c5ff818c2c4c51351d07e2daa7c74674","tests/test_meta.rs":"1fc98af3279cadc3d8db3c7e8d4d7f9e9dbd4d17548cf6a2f6f4536ed65367f6","tests/test_parse_buffer.rs":"8bbe2d24ca8a3788f72c6908fc96c26d546f11c69687bf8d72727f851d5e2d27","tests/test_parse_stream.rs":"2f449a2c41a3dee6fd14bee24e1666a453cb808eda17332fd91afd127fcdd2a6","tests/test_pat.rs":"2cb331fe404496d51e7cc7e283ae13c519a2265ca82e1c88e113296f860c2cba","tests/test_path.rs":"fcd5591e639fc787acc9763d828a811c8114525c9341282eefda8f331e082a51","tests/test_precedence.rs":"8d03656741b01e577d7501ce24332d1a4febec3e31a043e47c61062b8c527ed2","tests/test_receiver.rs":"084eca59984b9a18651da52f2c4407355da3de1335916a12477652999e2d01cc","tests/test_round_trip.rs":"ba01bf4ec04cd2d6f9e4800c343563925ae960c5f16752dc0797fda4451b6cc2","tests/test_shebang.rs":"f5772cadad5b56e3112cb16308b779f92bce1c3a48091fc9933deb2276a69331","tests/test_should_parse.rs":"1d3535698a446e2755bfc360676bdb161841a1f454cdef6e7556c6d06a95c89d","tests/test_size.rs":"5fae772bab66809d6708232f35cfb4a287882486763b0f763feec2ad79fbb68b","tests/test_stmt.rs":"17e4355843ee2982b51faba2721a18966f8c2b9422e16b052a123b8ee8b80752","tests/test_token_trees.rs":"43e56a701817e3c3bfd0cae54a457dd7a38ccb3ca19da41e2b995fdf20e6ed18","tests/test_ty.rs":"5b7c0bfc4963d41920dd0b39fdea419e34f00409ba86ad4211d6c3c7e8bbe1c0","tests/test_visibility.rs":"3f958e2b3b5908005e756a80eea326a91eac97cc4ab60599bebde8d4b942d65c","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"}
+\ No newline at end of file
+diff --git a/third_party/rust/syn/Cargo.toml b/third_party/rust/syn/Cargo.toml
+index 7a5c962f06..20277fc461 100644
+--- third_party/rust/syn/Cargo.toml
++++ third_party/rust/syn/Cargo.toml
+@@ -13,7 +13,7 @@
+ [package]
+ edition = "2018"
+ name = "syn"
+-version = "1.0.5"
++version = "1.0.40"
+ authors = ["David Tolnay <dtolnay@gmail.com>"]
+ include = ["/benches/**", "/build.rs", "/Cargo.toml", "/LICENSE-APACHE", "/LICENSE-MIT", "/README.md", "/src/**", "/tests/**"]
+ description = "Parser for Rust source code"
+@@ -24,25 +24,21 @@ license = "MIT OR Apache-2.0"
+ repository = "https://github.com/dtolnay/syn"
+ [package.metadata.docs.rs]
+ all-features = true
++targets = ["x86_64-unknown-linux-gnu"]
+
+ [package.metadata.playground]
+-all-features = true
+-
+-[lib]
+-name = "syn"
++features = ["full", "visit", "visit-mut", "fold", "extra-traits"]
+
+ [[bench]]
+ name = "rust"
+ harness = false
+ required-features = ["full", "parsing"]
+-edition = "2018"
+
+ [[bench]]
+ name = "file"
+ required-features = ["full", "parsing"]
+-edition = "2018"
+ [dependencies.proc-macro2]
+-version = "1.0"
++version = "1.0.13"
+ default-features = false
+
+ [dependencies.quote]
+@@ -52,18 +48,34 @@ default-features = false
+
+ [dependencies.unicode-xid]
+ version = "0.2"
++[dev-dependencies.anyhow]
++version = "1.0"
++
++[dev-dependencies.flate2]
++version = "1.0"
++
+ [dev-dependencies.insta]
+-version = "0.9"
++version = "0.16"
+
+ [dev-dependencies.rayon]
+ version = "1.0"
+
+ [dev-dependencies.ref-cast]
+-version = "0.2"
++version = "1.0"
+
+ [dev-dependencies.regex]
+ version = "1.0"
+
++[dev-dependencies.reqwest]
++version = "0.10"
++features = ["blocking"]
++
++[dev-dependencies.syn-test-suite]
++version = "0"
++
++[dev-dependencies.tar]
++version = "0.4"
++
+ [dev-dependencies.termcolor]
+ version = "1.0"
+
+@@ -80,7 +92,6 @@ full = []
+ parsing = []
+ printing = ["quote"]
+ proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"]
++test = ["syn-test-suite/all-features"]
+ visit = []
+ visit-mut = []
+-[badges.travis-ci]
+-repository = "dtolnay/syn"
+diff --git a/third_party/rust/syn/README.md b/third_party/rust/syn/README.md
+index 29a7f32a46..12b5f45b3d 100644
+--- third_party/rust/syn/README.md
++++ third_party/rust/syn/README.md
+@@ -1,10 +1,10 @@
+ Parser for Rust source code
+ ===========================
+
+-[![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn)
+-[![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn)
+-[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/syn/1.0/syn/)
+-[![Rustc Version 1.31+](https://img.shields.io/badge/rustc-1.31+-lightgray.svg)](https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html)
++[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/syn-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/syn)
++[<img alt="crates.io" src="https://img.shields.io/crates/v/syn.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/syn)
++[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K" height="20">](https://docs.rs/syn)
++[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/syn/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
+
+ Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
+ of Rust source code.
+@@ -46,10 +46,6 @@ contains some APIs that may be useful more generally.
+ [`syn::DeriveInput`]: https://docs.rs/syn/1.0/syn/struct.DeriveInput.html
+ [parser functions]: https://docs.rs/syn/1.0/syn/parse/index.html
+
+-If you get stuck with anything involving procedural macros in Rust I am happy to
+-provide help even if the issue is not related to Syn. Please file a ticket in
+-this repo.
+-
+ *Version requirement: Syn supports rustc 1.31 and up.*
+
+ [*Release notes*](https://github.com/dtolnay/syn/releases)
+@@ -88,8 +84,6 @@ proc-macro = true
+ ```
+
+ ```rust
+-extern crate proc_macro;
+-
+ use proc_macro::TokenStream;
+ use quote::quote;
+ use syn::{parse_macro_input, DeriveInput};
+@@ -271,7 +265,7 @@ points, which are required by the language to use `proc_macro::TokenStream`.
+ The proc-macro2 crate will automatically detect and use the compiler's data
+ structures when a procedural macro is active.
+
+-[proc-macro2]: https://docs.rs/proc-macro2/1.0.0/proc_macro2/
++[proc-macro2]: https://docs.rs/proc-macro2/1.0/proc_macro2/
+
+ <br>
+
+diff --git a/third_party/rust/syn/benches/file.rs b/third_party/rust/syn/benches/file.rs
+index 08ecd90960..58ab8df297 100644
+--- third_party/rust/syn/benches/file.rs
++++ third_party/rust/syn/benches/file.rs
+@@ -1,9 +1,16 @@
+ // $ cargo bench --features full --bench file
+
+ #![feature(rustc_private, test)]
++#![recursion_limit = "1024"]
+
+ extern crate test;
+
++#[macro_use]
++#[path = "../tests/macros/mod.rs"]
++mod macros;
++
++#[path = "../tests/common/mod.rs"]
++mod common;
+ #[path = "../tests/repo/mod.rs"]
+ pub mod repo;
+
+diff --git a/third_party/rust/syn/benches/rust.rs b/third_party/rust/syn/benches/rust.rs
+index e3d9cd29ba..50e1a7f601 100644
+--- third_party/rust/syn/benches/rust.rs
++++ third_party/rust/syn/benches/rust.rs
+@@ -4,7 +4,14 @@
+ // $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full --bench rust
+
+ #![cfg_attr(not(syn_only), feature(rustc_private))]
++#![recursion_limit = "1024"]
+
++#[macro_use]
++#[path = "../tests/macros/mod.rs"]
++mod macros;
++
++#[path = "../tests/common/mod.rs"]
++mod common;
+ #[path = "../tests/repo/mod.rs"]
+ mod repo;
+
+@@ -28,31 +35,35 @@ mod syn_parse {
+ }
+
+ #[cfg(not(syn_only))]
+-mod libsyntax_parse {
++mod librustc_parse {
+ extern crate rustc_data_structures;
+- extern crate syntax;
+- extern crate syntax_pos;
++ extern crate rustc_errors;
++ extern crate rustc_parse;
++ extern crate rustc_session;
++ extern crate rustc_span;
+
+ use rustc_data_structures::sync::Lrc;
+- use syntax::edition::Edition;
+- use syntax::errors::{emitter::Emitter, DiagnosticBuilder, Handler};
+- use syntax::parse::ParseSess;
+- use syntax::source_map::{FilePathMapping, SourceMap};
+- use syntax_pos::FileName;
++ use rustc_errors::{emitter::Emitter, Diagnostic, Handler};
++ use rustc_session::parse::ParseSess;
++ use rustc_span::source_map::{FilePathMapping, SourceMap};
++ use rustc_span::{edition::Edition, FileName};
+
+ pub fn bench(content: &str) -> Result<(), ()> {
+ struct SilentEmitter;
+
+ impl Emitter for SilentEmitter {
+- fn emit_diagnostic(&mut self, _db: &DiagnosticBuilder) {}
++ fn emit_diagnostic(&mut self, _diag: &Diagnostic) {}
++ fn source_map(&self) -> Option<&Lrc<SourceMap>> {
++ None
++ }
+ }
+
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(Edition::Edition2018, || {
+ let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let emitter = Box::new(SilentEmitter);
+ let handler = Handler::with_emitter(false, None, emitter);
+ let sess = ParseSess::with_span_handler(handler, cm);
+- if let Err(mut diagnostic) = syntax::parse::parse_crate_from_source_str(
++ if let Err(mut diagnostic) = rustc_parse::parse_crate_from_source_str(
+ FileName::Custom("bench".to_owned()),
+ content.to_owned(),
+ &sess,
+@@ -104,11 +115,11 @@ fn main() {
+ repo::clone_rust();
+
+ macro_rules! testcases {
+- ($($(#[$cfg:meta])* $name:path,)*) => {
++ ($($(#[$cfg:meta])* $name:ident,)*) => {
+ vec![
+ $(
+ $(#[$cfg])*
+- (stringify!($name), $name as fn(&str) -> Result<(), ()>),
++ (stringify!($name), $name::bench as fn(&str) -> Result<(), ()>),
+ )*
+ ]
+ };
+@@ -128,12 +139,12 @@ fn main() {
+
+ for (name, f) in testcases!(
+ #[cfg(not(syn_only))]
+- read_from_disk::bench,
++ read_from_disk,
+ #[cfg(not(syn_only))]
+- tokenstream_parse::bench,
+- syn_parse::bench,
++ tokenstream_parse,
++ syn_parse,
+ #[cfg(not(syn_only))]
+- libsyntax_parse::bench,
++ librustc_parse,
+ ) {
+ eprint!("{:20}", format!("{}:", name));
+ let elapsed = exec(f);
+diff --git a/third_party/rust/syn/build.rs b/third_party/rust/syn/build.rs
+index c0f9ed3406..cf7681c3f9 100644
+--- third_party/rust/syn/build.rs
++++ third_party/rust/syn/build.rs
+@@ -1,6 +1,6 @@
+ use std::env;
+ use std::process::Command;
+-use std::str::{self, FromStr};
++use std::str;
+
+ // The rustc-cfg strings below are *not* public API. Please let us know by
+ // opening a GitHub issue if your build environment requires some way to enable
+@@ -26,38 +26,14 @@ struct Compiler {
+ }
+
+ fn rustc_version() -> Option<Compiler> {
+- let rustc = match env::var_os("RUSTC") {
+- Some(rustc) => rustc,
+- None => return None,
+- };
+-
+- let output = match Command::new(rustc).arg("--version").output() {
+- Ok(output) => output,
+- Err(_) => return None,
+- };
+-
+- let version = match str::from_utf8(&output.stdout) {
+- Ok(version) => version,
+- Err(_) => return None,
+- };
+-
++ let rustc = env::var_os("RUSTC")?;
++ let output = Command::new(rustc).arg("--version").output().ok()?;
++ let version = str::from_utf8(&output.stdout).ok()?;
+ let mut pieces = version.split('.');
+ if pieces.next() != Some("rustc 1") {
+ return None;
+ }
+-
+- let next = match pieces.next() {
+- Some(next) => next,
+- None => return None,
+- };
+-
+- let minor = match u32::from_str(next) {
+- Ok(minor) => minor,
+- Err(_) => return None,
+- };
+-
+- Some(Compiler {
+- minor: minor,
+- nightly: version.contains("nightly"),
+- })
++ let minor = pieces.next()?.parse().ok()?;
++ let nightly = version.contains("nightly");
++ Some(Compiler { minor, nightly })
+ }
+diff --git a/third_party/rust/syn/src/attr.rs b/third_party/rust/syn/src/attr.rs
+index 34009deabc..fa4f1cb2a3 100644
+--- third_party/rust/syn/src/attr.rs
++++ third_party/rust/syn/src/attr.rs
+@@ -9,15 +9,11 @@ use proc_macro2::TokenStream;
+ use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result};
+ #[cfg(feature = "parsing")]
+ use crate::punctuated::Pair;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// An attribute like `#[repr(transparent)]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// <br>
+@@ -111,7 +107,46 @@ ast_struct! {
+ ///
+ /// [`parse_meta()`]: Attribute::parse_meta
+ /// [`parse_args()`]: Attribute::parse_args
+- pub struct Attribute #manual_extra_traits {
++ ///
++ /// <p><br></p>
++ ///
++ /// # Doc comments
++ ///
++ /// The compiler transforms doc comments, such as `/// comment` and `/*!
++ /// comment */`, into attributes before macros are expanded. Each comment is
++ /// expanded into an attribute of the form `#[doc = r"comment"]`.
++ ///
++ /// As an example, the following `mod` items are expanded identically:
++ ///
++ /// ```
++ /// # use syn::{ItemMod, parse_quote};
++ /// let doc: ItemMod = parse_quote! {
++ /// /// Single line doc comments
++ /// /// We write so many!
++ /// /**
++ /// * Multi-line comments...
++ /// * May span many lines
++ /// */
++ /// mod example {
++ /// //! Of course, they can be inner too
++ /// /*! And fit in a single line */
++ /// }
++ /// };
++ /// let attr: ItemMod = parse_quote! {
++ /// #[doc = r" Single line doc comments"]
++ /// #[doc = r" We write so many!"]
++ /// #[doc = r"
++ /// * Multi-line comments...
++ /// * May span many lines
++ /// "]
++ /// mod example {
++ /// #![doc = r" Of course, they can be inner too"]
++ /// #![doc = r" And fit in a single line "]
++ /// }
++ /// };
++ /// assert_eq!(doc, attr);
++ /// ```
++ pub struct Attribute {
+ pub pound_token: Token![#],
+ pub style: AttrStyle,
+ pub bracket_token: token::Bracket,
+@@ -120,39 +155,11 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Attribute {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Attribute {
+- fn eq(&self, other: &Self) -> bool {
+- self.style == other.style
+- && self.pound_token == other.pound_token
+- && self.bracket_token == other.bracket_token
+- && self.path == other.path
+- && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Attribute {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.style.hash(state);
+- self.pound_token.hash(state);
+- self.bracket_token.hash(state);
+- self.path.hash(state);
+- TokenStreamHelper(&self.tokens).hash(state);
+- }
+-}
+-
+ impl Attribute {
+ /// Parses the content of the attribute, consisting of the path and tokens,
+ /// as a [`Meta`] if possible.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_meta(&self) -> Result<Meta> {
+@@ -199,7 +206,7 @@ impl Attribute {
+ /// ^^^^^^^^^ what gets parsed
+ /// ```
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_args<T: Parse>(&self) -> Result<T> {
+@@ -208,7 +215,7 @@ impl Attribute {
+
+ /// Parse the arguments to the attribute using the given parser.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+@@ -221,7 +228,7 @@ impl Attribute {
+
+ /// Parses zero or more outer attributes from the stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
+@@ -234,7 +241,7 @@ impl Attribute {
+
+ /// Parses zero or more inner attributes from the stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
+@@ -247,7 +254,7 @@ impl Attribute {
+ }
+
+ #[cfg(feature = "parsing")]
+-fn error_expected_args(attr: &Attribute) -> Error {
++fn expected_parentheses(attr: &Attribute) -> String {
+ let style = match attr.style {
+ AttrStyle::Outer => "#",
+ AttrStyle::Inner(_) => "#!",
+@@ -261,19 +268,23 @@ fn error_expected_args(attr: &Attribute) -> Error {
+ path += &segment.ident.to_string();
+ }
+
+- let msg = format!("expected attribute arguments: {}[{}(...)]", style, path);
+-
+- #[cfg(feature = "printing")]
+- return Error::new_spanned(attr, msg);
+-
+- #[cfg(not(feature = "printing"))]
+- return Error::new(attr.bracket_token.span, msg);
++ format!("{}[{}(...)]", style, path)
+ }
+
+ #[cfg(feature = "parsing")]
+ fn enter_args<'a>(attr: &Attribute, input: ParseStream<'a>) -> Result<ParseBuffer<'a>> {
+ if input.is_empty() {
+- return Err(error_expected_args(attr));
++ let expected = expected_parentheses(attr);
++ let msg = format!("expected attribute arguments in parentheses: {}", expected);
++ return Err(crate::error::new2(
++ attr.pound_token.span,
++ attr.bracket_token.span,
++ msg,
++ ));
++ } else if input.peek(Token![=]) {
++ let expected = expected_parentheses(attr);
++ let msg = format!("expected parentheses: {}", expected);
++ return Err(input.error(msg));
+ };
+
+ let content;
+@@ -298,7 +309,7 @@ ast_enum! {
+ /// Distinguishes between attributes that decorate an item and attributes
+ /// that are contained within an item.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Outer attributes
+@@ -312,7 +323,6 @@ ast_enum! {
+ /// - `#![feature(proc_macro)]`
+ /// - `//! # Example`
+ /// - `/*! Please file an issue */`
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum AttrStyle {
+ Outer,
+ Inner(Token![!]),
+@@ -322,7 +332,7 @@ ast_enum! {
+ ast_enum_of_structs! {
+ /// Content of a compile-time structured attribute.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// ## Path
+@@ -360,7 +370,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A structured list within an attribute, like `derive(Copy, Clone)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct MetaList {
+ pub path: Path,
+@@ -372,7 +382,7 @@ ast_struct! {
+ ast_struct! {
+ /// A name-value pair within an attribute, like `feature = "nightly"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct MetaNameValue {
+ pub path: Path,
+@@ -398,7 +408,7 @@ impl Meta {
+ ast_enum_of_structs! {
+ /// Element of a compile-time attribute list.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum NestedMeta {
+ /// A structured meta item, like the `Copy` in `#[derive(Copy)]` which
+@@ -429,8 +439,8 @@ ast_enum_of_structs! {
+ /// as type `AttributeArgs`.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, AttributeArgs, ItemFn};
+ ///
+@@ -464,7 +474,7 @@ where
+ fn is_outer(attr: &&Attribute) -> bool {
+ match attr.style {
+ AttrStyle::Outer => true,
+- _ => false,
++ AttrStyle::Inner(_) => false,
+ }
+ }
+ self.into_iter().filter(is_outer)
+@@ -474,7 +484,7 @@ where
+ fn is_inner(attr: &&Attribute) -> bool {
+ match attr.style {
+ AttrStyle::Inner(_) => true,
+- _ => false,
++ AttrStyle::Outer => false,
+ }
+ }
+ self.into_iter().filter(is_inner)
+diff --git a/third_party/rust/syn/src/buffer.rs b/third_party/rust/syn/src/buffer.rs
+index 551a5ac816..a461cc49ea 100644
+--- third_party/rust/syn/src/buffer.rs
++++ third_party/rust/syn/src/buffer.rs
+@@ -1,7 +1,7 @@
+ //! A stably addressed token buffer supporting efficient traversal based on a
+ //! cheaply copyable cursor.
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ // This module is heavily commented as it contains most of the unsafe code in
+ // Syn, and caution should be used when editing it. The public-facing interface
+@@ -36,7 +36,7 @@ enum Entry {
+ /// `TokenStream` which requires a deep copy in order to traverse more than
+ /// once.
+ ///
+-/// *This type is available if Syn is built with the `"parsing"` feature.*
++/// *This type is available only if Syn is built with the `"parsing"` feature.*
+ pub struct TokenBuffer {
+ // NOTE: Do not derive clone on this - there are raw pointers inside which
+ // will be messed up. Moving the `TokenBuffer` itself is safe as the actual
+@@ -98,7 +98,7 @@ impl TokenBuffer {
+ /// Creates a `TokenBuffer` containing all the tokens from the input
+ /// `TokenStream`.
+ ///
+- /// *This method is available if Syn is built with both the `"parsing"` and
++ /// *This method is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ #[cfg(all(
+ not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
+@@ -133,8 +133,7 @@ impl TokenBuffer {
+ /// Two cursors are equal if they have the same location in the same input
+ /// stream, and have the same scope.
+ ///
+-/// *This type is available if Syn is built with the `"parsing"` feature.*
+-#[derive(Copy, Clone, Eq, PartialEq)]
++/// *This type is available only if Syn is built with the `"parsing"` feature.*
+ pub struct Cursor<'a> {
+ // The current entry which the `Cursor` is pointing at.
+ ptr: *const Entry,
+@@ -201,13 +200,13 @@ impl<'a> Cursor<'a> {
+ Cursor::create(self.ptr.offset(1), self.scope)
+ }
+
+- /// If the cursor is looking at a `None`-delimited group, move it to look at
+- /// the first token inside instead. If the group is empty, this will move
++ /// While the cursor is looking at a `None`-delimited group, move it to look
++ /// at the first token inside instead. If the group is empty, this will move
+ /// the cursor past the `None`-delimited group.
+ ///
+ /// WARNING: This mutates its argument.
+ fn ignore_none(&mut self) {
+- if let Entry::Group(group, buf) = self.entry() {
++ while let Entry::Group(group, buf) = self.entry() {
+ if group.delimiter() == Delimiter::None {
+ // NOTE: We call `Cursor::create` here to make sure that
+ // situations where we should immediately exit the span after
+@@ -215,13 +214,14 @@ impl<'a> Cursor<'a> {
+ unsafe {
+ *self = Cursor::create(&buf.data[0], self.scope);
+ }
++ } else {
++ break;
+ }
+ }
+ }
+
+ /// Checks whether the cursor is currently pointing at the end of its valid
+ /// scope.
+- #[inline]
+ pub fn eof(self) -> bool {
+ // We're at eof if we're at the end of our scope.
+ self.ptr == self.scope
+@@ -342,6 +342,44 @@ impl<'a> Cursor<'a> {
+ Entry::End(..) => Span::call_site(),
+ }
+ }
++
++ /// Skip over the next token without cloning it. Returns `None` if this
++ /// cursor points to eof.
++ ///
++ /// This method treats `'lifetimes` as a single token.
++ pub(crate) fn skip(self) -> Option<Cursor<'a>> {
++ match self.entry() {
++ Entry::End(..) => None,
++
++ // Treat lifetimes as a single tt for the purposes of 'skip'.
++ Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
++ let next = unsafe { self.bump() };
++ match next.entry() {
++ Entry::Ident(_) => Some(unsafe { next.bump() }),
++ _ => Some(next),
++ }
++ }
++ _ => Some(unsafe { self.bump() }),
++ }
++ }
++}
++
++impl<'a> Copy for Cursor<'a> {}
++
++impl<'a> Clone for Cursor<'a> {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++
++impl<'a> Eq for Cursor<'a> {}
++
++impl<'a> PartialEq for Cursor<'a> {
++ fn eq(&self, other: &Self) -> bool {
++ let Cursor { ptr, scope, marker } = self;
++ let _ = marker;
++ *ptr == other.ptr && *scope == other.scope
++ }
+ }
+
+ pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool {
+diff --git a/third_party/rust/syn/src/custom_keyword.rs b/third_party/rust/syn/src/custom_keyword.rs
+index 200e8478ef..a33044a564 100644
+--- third_party/rust/syn/src/custom_keyword.rs
++++ third_party/rust/syn/src/custom_keyword.rs
+@@ -86,7 +86,7 @@
+ /// }
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_keyword {
+ ($ident:ident) => {
+ #[allow(non_camel_case_types)]
+@@ -95,7 +95,7 @@ macro_rules! custom_keyword {
+ }
+
+ #[doc(hidden)]
+- #[allow(non_snake_case)]
++ #[allow(dead_code, non_snake_case)]
+ pub fn $ident<__S: $crate::export::IntoSpans<[$crate::export::Span; 1]>>(
+ span: __S,
+ ) -> $ident {
+@@ -112,10 +112,10 @@ macro_rules! custom_keyword {
+ }
+ }
+
+- impl_parse_for_custom_keyword!($ident);
+- impl_to_tokens_for_custom_keyword!($ident);
+- impl_clone_for_custom_keyword!($ident);
+- impl_extra_traits_for_custom_keyword!($ident);
++ $crate::impl_parse_for_custom_keyword!($ident);
++ $crate::impl_to_tokens_for_custom_keyword!($ident);
++ $crate::impl_clone_for_custom_keyword!($ident);
++ $crate::impl_extra_traits_for_custom_keyword!($ident);
+ };
+ }
+
+diff --git a/third_party/rust/syn/src/custom_punctuation.rs b/third_party/rust/syn/src/custom_punctuation.rs
+index 29fa448bd8..70dff42851 100644
+--- third_party/rust/syn/src/custom_punctuation.rs
++++ third_party/rust/syn/src/custom_punctuation.rs
+@@ -74,19 +74,19 @@
+ /// let _: PathSegments = syn::parse_str(input).unwrap();
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ pub struct $ident {
+- pub spans: custom_punctuation_repr!($($tt)+),
++ pub spans: $crate::custom_punctuation_repr!($($tt)+),
+ }
+
+ #[doc(hidden)]
+- #[allow(non_snake_case)]
+- pub fn $ident<__S: $crate::export::IntoSpans<custom_punctuation_repr!($($tt)+)>>(
++ #[allow(dead_code, non_snake_case)]
++ pub fn $ident<__S: $crate::export::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>(
+ spans: __S,
+ ) -> $ident {
+- let _validate_len = 0 $(+ custom_punctuation_len!(strict, $tt))*;
++ let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*;
+ $ident {
+ spans: $crate::export::IntoSpans::into_spans(spans)
+ }
+@@ -98,33 +98,33 @@ macro_rules! custom_punctuation {
+ }
+ }
+
+- impl_parse_for_custom_punctuation!($ident, $($tt)+);
+- impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
+- impl_clone_for_custom_punctuation!($ident, $($tt)+);
+- impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_parse_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_clone_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
+ };
+ }
+
+ // Not public API.
+ #[cfg(feature = "parsing")]
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! impl_parse_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::token::CustomToken for $ident {
+ fn peek(cursor: $crate::buffer::Cursor) -> bool {
+- $crate::token::parsing::peek_punct(cursor, stringify_punct!($($tt)+))
++ $crate::token::parsing::peek_punct(cursor, $crate::stringify_punct!($($tt)+))
+ }
+
+ fn display() -> &'static $crate::export::str {
+- custom_punctuation_concat!("`", stringify_punct!($($tt)+), "`")
++ concat!("`", $crate::stringify_punct!($($tt)+), "`")
+ }
+ }
+
+ impl $crate::parse::Parse for $ident {
+ fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
+- let spans: custom_punctuation_repr!($($tt)+) =
+- $crate::token::parsing::punct(input, stringify_punct!($($tt)+))?;
++ let spans: $crate::custom_punctuation_repr!($($tt)+) =
++ $crate::token::parsing::punct(input, $crate::stringify_punct!($($tt)+))?;
+ Ok($ident(spans))
+ }
+ }
+@@ -142,12 +142,12 @@ macro_rules! impl_parse_for_custom_punctuation {
+ // Not public API.
+ #[cfg(feature = "printing")]
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! impl_to_tokens_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::export::ToTokens for $ident {
+ fn to_tokens(&self, tokens: &mut $crate::export::TokenStream2) {
+- $crate::token::printing::punct(stringify_punct!($($tt)+), &self.spans, tokens)
++ $crate::token::printing::punct($crate::stringify_punct!($($tt)+), &self.spans, tokens)
+ }
+ }
+ };
+@@ -221,16 +221,16 @@ macro_rules! impl_extra_traits_for_custom_punctuation {
+
+ // Not public API.
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_punctuation_repr {
+ ($($tt:tt)+) => {
+- [$crate::export::Span; 0 $(+ custom_punctuation_len!(lenient, $tt))+]
++ [$crate::export::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+]
+ };
+ }
+
+ // Not public API.
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ #[rustfmt::skip]
+ macro_rules! custom_punctuation_len {
+ ($mode:ident, +) => { 1 };
+@@ -279,7 +279,7 @@ macro_rules! custom_punctuation_len {
+ ($mode:ident, -=) => { 2 };
+ ($mode:ident, ~) => { 1 };
+ (lenient, $tt:tt) => { 0 };
+- (strict, $tt:tt) => {{ custom_punctuation_unexpected!($tt); 0 }};
++ (strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }};
+ }
+
+ // Not public API.
+@@ -297,13 +297,3 @@ macro_rules! stringify_punct {
+ concat!($(stringify!($tt)),+)
+ };
+ }
+-
+-// Not public API.
+-// Without this, local_inner_macros breaks when looking for concat!
+-#[doc(hidden)]
+-#[macro_export]
+-macro_rules! custom_punctuation_concat {
+- ($($tt:tt)*) => {
+- concat!($($tt)*)
+- };
+-}
+diff --git a/third_party/rust/syn/src/data.rs b/third_party/rust/syn/src/data.rs
+index be43679874..b217b8ca6f 100644
+--- third_party/rust/syn/src/data.rs
++++ third_party/rust/syn/src/data.rs
+@@ -4,7 +4,7 @@ use crate::punctuated::Punctuated;
+ ast_struct! {
+ /// An enum variant.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Variant {
+ /// Attributes tagged on the variant.
+@@ -24,7 +24,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// Data stored within an enum variant or struct.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -52,7 +52,7 @@ ast_struct! {
+ /// Named fields of a struct or struct variant such as `Point { x: f64,
+ /// y: f64 }`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct FieldsNamed {
+ pub brace_token: token::Brace,
+@@ -63,7 +63,7 @@ ast_struct! {
+ ast_struct! {
+ /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct FieldsUnnamed {
+ pub paren_token: token::Paren,
+@@ -93,6 +93,24 @@ impl Fields {
+ Fields::Unnamed(f) => f.unnamed.iter_mut(),
+ }
+ }
++
++ /// Returns the number of fields.
++ pub fn len(&self) -> usize {
++ match self {
++ Fields::Unit => 0,
++ Fields::Named(f) => f.named.len(),
++ Fields::Unnamed(f) => f.unnamed.len(),
++ }
++ }
++
++ /// Returns `true` if there are zero fields.
++ pub fn is_empty(&self) -> bool {
++ match self {
++ Fields::Unit => true,
++ Fields::Named(f) => f.named.is_empty(),
++ Fields::Unnamed(f) => f.unnamed.is_empty(),
++ }
++ }
+ }
+
+ impl IntoIterator for Fields {
+@@ -129,7 +147,7 @@ impl<'a> IntoIterator for &'a mut Fields {
+ ast_struct! {
+ /// A field of a struct or enum variant.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Field {
+ /// Attributes tagged on the field.
+@@ -154,7 +172,7 @@ ast_enum_of_structs! {
+ /// The visibility level of an item: inherited or `pub` or
+ /// `pub(restricted)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -184,7 +202,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A public visibility level: `pub`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisPublic {
+ pub pub_token: Token![pub],
+@@ -194,7 +212,7 @@ ast_struct! {
+ ast_struct! {
+ /// A crate-level visibility: `crate`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisCrate {
+ pub crate_token: Token![crate],
+@@ -205,7 +223,7 @@ ast_struct! {
+ /// A visibility level restricted to some path: `pub(self)` or
+ /// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisRestricted {
+ pub pub_token: Token![pub],
+@@ -220,12 +238,15 @@ pub mod parsing {
+ use super::*;
+
+ use crate::ext::IdentExt;
++ use crate::parse::discouraged::Speculative;
+ use crate::parse::{Parse, ParseStream, Result};
+
+ impl Parse for Variant {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
++ let _visibility: Visibility = input.parse()?;
+ Ok(Variant {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs,
+ ident: input.parse()?,
+ fields: {
+ if input.peek(token::Brace) {
+@@ -295,6 +316,17 @@ pub mod parsing {
+
+ impl Parse for Visibility {
+ fn parse(input: ParseStream) -> Result<Self> {
++ // Recognize an empty None-delimited group, as produced by a $:vis
++ // matcher that matched no tokens.
++ if input.peek(token::Group) {
++ let ahead = input.fork();
++ let group = crate::group::parse_group(&ahead)?;
++ if group.content.is_empty() {
++ input.advance_to(&ahead);
++ return Ok(Visibility::Inherited);
++ }
++ }
++
+ if input.peek(Token![pub]) {
+ Self::parse_pub(input)
+ } else if input.peek(Token![crate]) {
+@@ -310,27 +342,39 @@ pub mod parsing {
+ let pub_token = input.parse::<Token![pub]>()?;
+
+ if input.peek(token::Paren) {
+- // TODO: optimize using advance_to
+ let ahead = input.fork();
+- let mut content;
+- parenthesized!(content in ahead);
+
++ let content;
++ let paren_token = parenthesized!(content in ahead);
+ if content.peek(Token![crate])
+ || content.peek(Token![self])
+ || content.peek(Token![super])
+ {
+- return Ok(Visibility::Restricted(VisRestricted {
+- pub_token,
+- paren_token: parenthesized!(content in input),
+- in_token: None,
+- path: Box::new(Path::from(content.call(Ident::parse_any)?)),
+- }));
++ let path = content.call(Ident::parse_any)?;
++
++ // Ensure there are no additional tokens within `content`.
++ // Without explicitly checking, we may misinterpret a tuple
++ // field as a restricted visibility, causing a parse error.
++ // e.g. `pub (crate::A, crate::B)` (Issue #720).
++ if content.is_empty() {
++ input.advance_to(&ahead);
++ return Ok(Visibility::Restricted(VisRestricted {
++ pub_token,
++ paren_token,
++ in_token: None,
++ path: Box::new(Path::from(path)),
++ }));
++ }
+ } else if content.peek(Token![in]) {
++ let in_token: Token![in] = content.parse()?;
++ let path = content.call(Path::parse_mod_style)?;
++
++ input.advance_to(&ahead);
+ return Ok(Visibility::Restricted(VisRestricted {
+ pub_token,
+- paren_token: parenthesized!(content in input),
+- in_token: Some(content.parse()?),
+- path: Box::new(content.call(Path::parse_mod_style)?),
++ paren_token,
++ in_token: Some(in_token),
++ path: Box::new(path),
+ }));
+ }
+ }
+@@ -347,6 +391,14 @@ pub mod parsing {
+ }))
+ }
+ }
++
++ #[cfg(feature = "full")]
++ pub(crate) fn is_some(&self) -> bool {
++ match self {
++ Visibility::Inherited => false,
++ _ => true,
++ }
++ }
+ }
+ }
+
+diff --git a/third_party/rust/syn/src/derive.rs b/third_party/rust/syn/src/derive.rs
+index 8cb9cf7b6d..3fa9d89a93 100644
+--- third_party/rust/syn/src/derive.rs
++++ third_party/rust/syn/src/derive.rs
+@@ -4,7 +4,7 @@ use crate::punctuated::Punctuated;
+ ast_struct! {
+ /// Data structure sent to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` feature.*
++ /// *This type is available only if Syn is built with the `"derive"` feature.*
+ pub struct DeriveInput {
+ /// Attributes tagged on the whole struct or enum.
+ pub attrs: Vec<Attribute>,
+@@ -26,7 +26,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// The storage of a struct, enum or union data structure.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` feature.*
++ /// *This type is available only if Syn is built with the `"derive"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -53,7 +53,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A struct input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataStruct {
+ pub struct_token: Token![struct],
+@@ -65,7 +65,7 @@ ast_struct! {
+ ast_struct! {
+ /// An enum input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataEnum {
+ pub enum_token: Token![enum],
+@@ -77,7 +77,7 @@ ast_struct! {
+ ast_struct! {
+ /// An untagged union input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataUnion {
+ pub union_token: Token![union],
+diff --git a/third_party/rust/syn/src/discouraged.rs b/third_party/rust/syn/src/discouraged.rs
+index 4d9ff93728..76c9fce6f8 100644
+--- third_party/rust/syn/src/discouraged.rs
++++ third_party/rust/syn/src/discouraged.rs
+@@ -16,7 +16,7 @@ pub trait Speculative {
+ /// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem
+ /// is that when the fork fails to parse an `A`, it's impossible to tell
+ /// whether that was because of a syntax error and the user meant to provide
+- /// an `A`, or that the `A`s are finished and its time to start parsing
++ /// an `A`, or that the `A`s are finished and it's time to start parsing
+ /// `B`s. Use with care.
+ ///
+ /// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by
+@@ -72,7 +72,6 @@ pub trait Speculative {
+ /// || input.peek(Token![self])
+ /// || input.peek(Token![Self])
+ /// || input.peek(Token![crate])
+- /// || input.peek(Token![extern])
+ /// {
+ /// let ident = input.call(Ident::parse_any)?;
+ /// return Ok(PathSegment::from(ident));
+@@ -164,6 +163,30 @@ impl<'a> Speculative for ParseBuffer<'a> {
+ panic!("Fork was not derived from the advancing parse stream");
+ }
+
++ let (self_unexp, self_sp) = inner_unexpected(self);
++ let (fork_unexp, fork_sp) = inner_unexpected(fork);
++ if !Rc::ptr_eq(&self_unexp, &fork_unexp) {
++ match (fork_sp, self_sp) {
++ // Unexpected set on the fork, but not on `self`, copy it over.
++ (Some(span), None) => {
++ self_unexp.set(Unexpected::Some(span));
++ }
++ // Unexpected unset. Use chain to propagate errors from fork.
++ (None, None) => {
++ fork_unexp.set(Unexpected::Chain(self_unexp));
++
++ // Ensure toplevel 'unexpected' tokens from the fork don't
++ // bubble up the chain by replacing the root `unexpected`
++ // pointer, only 'unexpected' tokens from existing group
++ // parsers should bubble.
++ fork.unexpected
++ .set(Some(Rc::new(Cell::new(Unexpected::None))));
++ }
++ // Unexpected has been set on `self`. No changes needed.
++ (_, Some(_)) => {}
++ }
++ }
++
+ // See comment on `cell` in the struct definition.
+ self.cell
+ .set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) })
+diff --git a/third_party/rust/syn/src/error.rs b/third_party/rust/syn/src/error.rs
+index 146d652299..dba34f9254 100644
+--- third_party/rust/syn/src/error.rs
++++ third_party/rust/syn/src/error.rs
+@@ -1,4 +1,3 @@
+-use std;
+ use std::fmt::{self, Debug, Display};
+ use std::iter::FromIterator;
+ use std::slice;
+@@ -32,8 +31,8 @@ pub type Result<T> = std::result::Result<T, Error>;
+ /// conversion to `compile_error!` automatically.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, AttributeArgs, ItemFn};
+ ///
+@@ -82,7 +81,6 @@ pub type Result<T> = std::result::Result<T, Error>;
+ /// # }
+ /// # }
+ /// ```
+-#[derive(Clone)]
+ pub struct Error {
+ messages: Vec<ErrorMessage>,
+ }
+@@ -250,6 +248,17 @@ pub fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) -> Error {
+ }
+ }
+
++#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
++pub fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
++ Error {
++ messages: vec![ErrorMessage {
++ start_span: ThreadBound::new(start),
++ end_span: ThreadBound::new(end),
++ message: message.to_string(),
++ }],
++ }
++}
++
+ impl Debug for Error {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ if self.messages.len() == 1 {
+@@ -278,6 +287,14 @@ impl Display for Error {
+ }
+ }
+
++impl Clone for Error {
++ fn clone(&self) -> Self {
++ Error {
++ messages: self.messages.clone(),
++ }
++ }
++}
++
+ impl Clone for ErrorMessage {
+ fn clone(&self) -> Self {
+ let start = self
+@@ -355,3 +372,11 @@ impl<'a> Iterator for Iter<'a> {
+ })
+ }
+ }
++
++impl Extend<Error> for Error {
++ fn extend<T: IntoIterator<Item = Error>>(&mut self, iter: T) {
++ for err in iter {
++ self.combine(err);
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/expr.rs b/third_party/rust/syn/src/expr.rs
+index 2874a463aa..2fe0e0b5d8 100644
+--- third_party/rust/syn/src/expr.rs
++++ third_party/rust/syn/src/expr.rs
+@@ -1,18 +1,21 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
++#[cfg(feature = "full")]
++use crate::reserved::Reserved;
+ use proc_macro2::{Span, TokenStream};
+-#[cfg(feature = "extra-traits")]
++#[cfg(feature = "printing")]
++use quote::IdentFragment;
++#[cfg(feature = "printing")]
++use std::fmt::{self, Display};
+ use std::hash::{Hash, Hasher};
+-#[cfg(all(feature = "parsing", feature = "full"))]
++#[cfg(feature = "parsing")]
+ use std::mem;
+
+ ast_enum_of_structs! {
+ /// A Rust expression.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
++ /// feature, but most of the variants are not available unless "full" is enabled.*
+ ///
+ /// # Syntax tree enums
+ ///
+@@ -83,7 +86,7 @@ ast_enum_of_structs! {
+ /// A sign that you may not be choosing the right variable names is if you
+ /// see names getting repeated in your code, like accessing
+ /// `receiver.receiver` or `pat.pat` or `cond.cond`.
+- pub enum Expr #manual_extra_traits {
++ pub enum Expr {
+ /// A slice literal expression: `[a, b, c, d]`.
+ Array(ExprArray),
+
+@@ -228,7 +231,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A slice literal expression: `[a, b, c, d]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprArray #full {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+@@ -239,7 +242,7 @@ ast_struct! {
+ ast_struct! {
+ /// An assignment expression: `a = compute()`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAssign #full {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+@@ -251,7 +254,7 @@ ast_struct! {
+ ast_struct! {
+ /// A compound assignment expression: `counter += 1`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAssignOp #full {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+@@ -263,7 +266,7 @@ ast_struct! {
+ ast_struct! {
+ /// An async block: `async { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAsync #full {
+ pub attrs: Vec<Attribute>,
+ pub async_token: Token![async],
+@@ -275,7 +278,7 @@ ast_struct! {
+ ast_struct! {
+ /// An await expression: `fut.await`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAwait #full {
+ pub attrs: Vec<Attribute>,
+ pub base: Box<Expr>,
+@@ -287,7 +290,7 @@ ast_struct! {
+ ast_struct! {
+ /// A binary operation: `a + b`, `a * b`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprBinary {
+ pub attrs: Vec<Attribute>,
+@@ -300,7 +303,7 @@ ast_struct! {
+ ast_struct! {
+ /// A blocked scope: `{ ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBlock #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -311,7 +314,7 @@ ast_struct! {
+ ast_struct! {
+ /// A box expression: `box f`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBox #full {
+ pub attrs: Vec<Attribute>,
+ pub box_token: Token![box],
+@@ -323,7 +326,7 @@ ast_struct! {
+ /// A `break`, with an optional label to break and an optional
+ /// expression.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBreak #full {
+ pub attrs: Vec<Attribute>,
+ pub break_token: Token![break],
+@@ -335,7 +338,7 @@ ast_struct! {
+ ast_struct! {
+ /// A function call expression: `invoke(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprCall {
+ pub attrs: Vec<Attribute>,
+@@ -348,7 +351,7 @@ ast_struct! {
+ ast_struct! {
+ /// A cast expression: `foo as f64`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprCast {
+ pub attrs: Vec<Attribute>,
+@@ -361,7 +364,7 @@ ast_struct! {
+ ast_struct! {
+ /// A closure expression: `|a, b| a + b`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprClosure #full {
+ pub attrs: Vec<Attribute>,
+ pub asyncness: Option<Token![async]>,
+@@ -378,7 +381,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `continue`, with an optional label.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprContinue #full {
+ pub attrs: Vec<Attribute>,
+ pub continue_token: Token![continue],
+@@ -390,7 +393,7 @@ ast_struct! {
+ /// Access of a named struct field (`obj.k`) or unnamed tuple struct
+ /// field (`obj.0`).
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprField {
+ pub attrs: Vec<Attribute>,
+ pub base: Box<Expr>,
+@@ -402,7 +405,7 @@ ast_struct! {
+ ast_struct! {
+ /// A for loop: `for pat in expr { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprForLoop #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -421,7 +424,7 @@ ast_struct! {
+ /// of expressions and is related to `None`-delimited spans in a
+ /// `TokenStream`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprGroup #full {
+ pub attrs: Vec<Attribute>,
+ pub group_token: token::Group,
+@@ -436,7 +439,7 @@ ast_struct! {
+ /// The `else` branch expression may only be an `If` or `Block`
+ /// expression, not any of the other types of expression.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprIf #full {
+ pub attrs: Vec<Attribute>,
+ pub if_token: Token![if],
+@@ -449,7 +452,7 @@ ast_struct! {
+ ast_struct! {
+ /// A square bracketed indexing expression: `vector[2]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprIndex {
+ pub attrs: Vec<Attribute>,
+@@ -462,7 +465,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `let` guard: `let Some(x) = opt`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprLet #full {
+ pub attrs: Vec<Attribute>,
+ pub let_token: Token![let],
+@@ -475,7 +478,7 @@ ast_struct! {
+ ast_struct! {
+ /// A literal in place of an expression: `1`, `"foo"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprLit {
+ pub attrs: Vec<Attribute>,
+@@ -486,7 +489,7 @@ ast_struct! {
+ ast_struct! {
+ /// Conditionless loop: `loop { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprLoop #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -498,7 +501,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation expression: `format!("{}", q)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMacro #full {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -508,7 +511,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `match` expression: `match n { Some(n) => {}, None => {} }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMatch #full {
+ pub attrs: Vec<Attribute>,
+ pub match_token: Token![match],
+@@ -521,7 +524,7 @@ ast_struct! {
+ ast_struct! {
+ /// A method call expression: `x.foo::<T>(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMethodCall #full {
+ pub attrs: Vec<Attribute>,
+ pub receiver: Box<Expr>,
+@@ -536,7 +539,7 @@ ast_struct! {
+ ast_struct! {
+ /// A parenthesized expression: `(a + b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprParen {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+@@ -550,7 +553,7 @@ ast_struct! {
+ ///
+ /// A plain identifier like `x` is a path of length 1.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprPath {
+ pub attrs: Vec<Attribute>,
+@@ -562,7 +565,7 @@ ast_struct! {
+ ast_struct! {
+ /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprRange #full {
+ pub attrs: Vec<Attribute>,
+ pub from: Option<Box<Expr>>,
+@@ -574,7 +577,7 @@ ast_struct! {
+ ast_struct! {
+ /// A referencing operation: `&a` or `&mut a`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprReference #full {
+ pub attrs: Vec<Attribute>,
+ pub and_token: Token![&],
+@@ -587,7 +590,7 @@ ast_struct! {
+ ast_struct! {
+ /// An array literal constructed from one repeated element: `[0u8; N]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprRepeat #full {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+@@ -600,7 +603,7 @@ ast_struct! {
+ ast_struct! {
+ /// A `return`, with an optional value to be returned.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprReturn #full {
+ pub attrs: Vec<Attribute>,
+ pub return_token: Token![return],
+@@ -614,7 +617,7 @@ ast_struct! {
+ /// The `rest` provides the value of the remaining fields as in `S { a:
+ /// 1, b: 1, ..rest }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprStruct #full {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+@@ -628,7 +631,7 @@ ast_struct! {
+ ast_struct! {
+ /// A try-expression: `expr?`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTry #full {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+@@ -639,7 +642,7 @@ ast_struct! {
+ ast_struct! {
+ /// A try block: `try { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTryBlock #full {
+ pub attrs: Vec<Attribute>,
+ pub try_token: Token![try],
+@@ -650,7 +653,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple expression: `(a, b, c, d)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTuple #full {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+@@ -661,7 +664,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type ascription expression: `foo: f64`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprType #full {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+@@ -673,7 +676,7 @@ ast_struct! {
+ ast_struct! {
+ /// A unary operation: `!x`, `*x`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprUnary {
+ pub attrs: Vec<Attribute>,
+@@ -685,7 +688,7 @@ ast_struct! {
+ ast_struct! {
+ /// An unsafe block: `unsafe { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprUnsafe #full {
+ pub attrs: Vec<Attribute>,
+ pub unsafe_token: Token![unsafe],
+@@ -696,7 +699,7 @@ ast_struct! {
+ ast_struct! {
+ /// A while loop: `while expr { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprWhile #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+@@ -709,7 +712,7 @@ ast_struct! {
+ ast_struct! {
+ /// A yield expression: `yield expr`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprYield #full {
+ pub attrs: Vec<Attribute>,
+ pub yield_token: Token![yield],
+@@ -717,232 +720,6 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Expr {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Expr {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Expr::Array(this), Expr::Array(other)) => this == other,
+- (Expr::Assign(this), Expr::Assign(other)) => this == other,
+- (Expr::AssignOp(this), Expr::AssignOp(other)) => this == other,
+- (Expr::Async(this), Expr::Async(other)) => this == other,
+- (Expr::Await(this), Expr::Await(other)) => this == other,
+- (Expr::Binary(this), Expr::Binary(other)) => this == other,
+- (Expr::Block(this), Expr::Block(other)) => this == other,
+- (Expr::Box(this), Expr::Box(other)) => this == other,
+- (Expr::Break(this), Expr::Break(other)) => this == other,
+- (Expr::Call(this), Expr::Call(other)) => this == other,
+- (Expr::Cast(this), Expr::Cast(other)) => this == other,
+- (Expr::Closure(this), Expr::Closure(other)) => this == other,
+- (Expr::Continue(this), Expr::Continue(other)) => this == other,
+- (Expr::Field(this), Expr::Field(other)) => this == other,
+- (Expr::ForLoop(this), Expr::ForLoop(other)) => this == other,
+- (Expr::Group(this), Expr::Group(other)) => this == other,
+- (Expr::If(this), Expr::If(other)) => this == other,
+- (Expr::Index(this), Expr::Index(other)) => this == other,
+- (Expr::Let(this), Expr::Let(other)) => this == other,
+- (Expr::Lit(this), Expr::Lit(other)) => this == other,
+- (Expr::Loop(this), Expr::Loop(other)) => this == other,
+- (Expr::Macro(this), Expr::Macro(other)) => this == other,
+- (Expr::Match(this), Expr::Match(other)) => this == other,
+- (Expr::MethodCall(this), Expr::MethodCall(other)) => this == other,
+- (Expr::Paren(this), Expr::Paren(other)) => this == other,
+- (Expr::Path(this), Expr::Path(other)) => this == other,
+- (Expr::Range(this), Expr::Range(other)) => this == other,
+- (Expr::Reference(this), Expr::Reference(other)) => this == other,
+- (Expr::Repeat(this), Expr::Repeat(other)) => this == other,
+- (Expr::Return(this), Expr::Return(other)) => this == other,
+- (Expr::Struct(this), Expr::Struct(other)) => this == other,
+- (Expr::Try(this), Expr::Try(other)) => this == other,
+- (Expr::TryBlock(this), Expr::TryBlock(other)) => this == other,
+- (Expr::Tuple(this), Expr::Tuple(other)) => this == other,
+- (Expr::Type(this), Expr::Type(other)) => this == other,
+- (Expr::Unary(this), Expr::Unary(other)) => this == other,
+- (Expr::Unsafe(this), Expr::Unsafe(other)) => this == other,
+- (Expr::Verbatim(this), Expr::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- (Expr::While(this), Expr::While(other)) => this == other,
+- (Expr::Yield(this), Expr::Yield(other)) => this == other,
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Expr {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Expr::Array(expr) => {
+- hash.write_u8(0);
+- expr.hash(hash);
+- }
+- Expr::Assign(expr) => {
+- hash.write_u8(1);
+- expr.hash(hash);
+- }
+- Expr::AssignOp(expr) => {
+- hash.write_u8(2);
+- expr.hash(hash);
+- }
+- Expr::Async(expr) => {
+- hash.write_u8(3);
+- expr.hash(hash);
+- }
+- Expr::Await(expr) => {
+- hash.write_u8(4);
+- expr.hash(hash);
+- }
+- Expr::Binary(expr) => {
+- hash.write_u8(5);
+- expr.hash(hash);
+- }
+- Expr::Block(expr) => {
+- hash.write_u8(6);
+- expr.hash(hash);
+- }
+- Expr::Box(expr) => {
+- hash.write_u8(7);
+- expr.hash(hash);
+- }
+- Expr::Break(expr) => {
+- hash.write_u8(8);
+- expr.hash(hash);
+- }
+- Expr::Call(expr) => {
+- hash.write_u8(9);
+- expr.hash(hash);
+- }
+- Expr::Cast(expr) => {
+- hash.write_u8(10);
+- expr.hash(hash);
+- }
+- Expr::Closure(expr) => {
+- hash.write_u8(11);
+- expr.hash(hash);
+- }
+- Expr::Continue(expr) => {
+- hash.write_u8(12);
+- expr.hash(hash);
+- }
+- Expr::Field(expr) => {
+- hash.write_u8(13);
+- expr.hash(hash);
+- }
+- Expr::ForLoop(expr) => {
+- hash.write_u8(14);
+- expr.hash(hash);
+- }
+- Expr::Group(expr) => {
+- hash.write_u8(15);
+- expr.hash(hash);
+- }
+- Expr::If(expr) => {
+- hash.write_u8(16);
+- expr.hash(hash);
+- }
+- Expr::Index(expr) => {
+- hash.write_u8(17);
+- expr.hash(hash);
+- }
+- Expr::Let(expr) => {
+- hash.write_u8(18);
+- expr.hash(hash);
+- }
+- Expr::Lit(expr) => {
+- hash.write_u8(19);
+- expr.hash(hash);
+- }
+- Expr::Loop(expr) => {
+- hash.write_u8(20);
+- expr.hash(hash);
+- }
+- Expr::Macro(expr) => {
+- hash.write_u8(21);
+- expr.hash(hash);
+- }
+- Expr::Match(expr) => {
+- hash.write_u8(22);
+- expr.hash(hash);
+- }
+- Expr::MethodCall(expr) => {
+- hash.write_u8(23);
+- expr.hash(hash);
+- }
+- Expr::Paren(expr) => {
+- hash.write_u8(24);
+- expr.hash(hash);
+- }
+- Expr::Path(expr) => {
+- hash.write_u8(25);
+- expr.hash(hash);
+- }
+- Expr::Range(expr) => {
+- hash.write_u8(26);
+- expr.hash(hash);
+- }
+- Expr::Reference(expr) => {
+- hash.write_u8(27);
+- expr.hash(hash);
+- }
+- Expr::Repeat(expr) => {
+- hash.write_u8(28);
+- expr.hash(hash);
+- }
+- Expr::Return(expr) => {
+- hash.write_u8(29);
+- expr.hash(hash);
+- }
+- Expr::Struct(expr) => {
+- hash.write_u8(30);
+- expr.hash(hash);
+- }
+- Expr::Try(expr) => {
+- hash.write_u8(31);
+- expr.hash(hash);
+- }
+- Expr::TryBlock(expr) => {
+- hash.write_u8(32);
+- expr.hash(hash);
+- }
+- Expr::Tuple(expr) => {
+- hash.write_u8(33);
+- expr.hash(hash);
+- }
+- Expr::Type(expr) => {
+- hash.write_u8(34);
+- expr.hash(hash);
+- }
+- Expr::Unary(expr) => {
+- hash.write_u8(35);
+- expr.hash(hash);
+- }
+- Expr::Unsafe(expr) => {
+- hash.write_u8(36);
+- expr.hash(hash);
+- }
+- Expr::Verbatim(expr) => {
+- hash.write_u8(37);
+- TokenStreamHelper(expr).hash(hash);
+- }
+- Expr::While(expr) => {
+- hash.write_u8(38);
+- expr.hash(hash);
+- }
+- Expr::Yield(expr) => {
+- hash.write_u8(39);
+- expr.hash(hash);
+- }
+- Expr::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ impl Expr {
+ #[cfg(all(feature = "parsing", feature = "full"))]
+ pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
+@@ -996,7 +773,7 @@ ast_enum! {
+ /// A struct or tuple struct field accessed in a struct literal or field
+ /// expression.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum Member {
+ /// A named field like `self.x`.
+@@ -1006,12 +783,50 @@ ast_enum! {
+ }
+ }
+
++impl Eq for Member {}
++
++impl PartialEq for Member {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Member::Named(this), Member::Named(other)) => this == other,
++ (Member::Unnamed(this), Member::Unnamed(other)) => this == other,
++ _ => false,
++ }
++ }
++}
++
++impl Hash for Member {
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ match self {
++ Member::Named(m) => m.hash(state),
++ Member::Unnamed(m) => m.hash(state),
++ }
++ }
++}
++
++#[cfg(feature = "printing")]
++impl IdentFragment for Member {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Member::Named(m) => Display::fmt(m, formatter),
++ Member::Unnamed(m) => Display::fmt(&m.index, formatter),
++ }
++ }
++
++ fn span(&self) -> Option<Span> {
++ match self {
++ Member::Named(m) => Some(m.span()),
++ Member::Unnamed(m) => Some(m.span),
++ }
++ }
++}
++
+ ast_struct! {
+ /// The index of an unnamed tuple struct field.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- pub struct Index #manual_extra_traits {
++ pub struct Index {
+ pub index: u32,
+ pub span: Span,
+ }
+@@ -1027,28 +842,28 @@ impl From<usize> for Index {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+ impl Eq for Index {}
+
+-#[cfg(feature = "extra-traits")]
+ impl PartialEq for Index {
+ fn eq(&self, other: &Self) -> bool {
+ self.index == other.index
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+ impl Hash for Index {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.index.hash(state);
+ }
+ }
+
+-#[cfg(feature = "full")]
+-ast_struct! {
+- #[derive(Default)]
+- pub struct Reserved {
+- private: (),
++#[cfg(feature = "printing")]
++impl IdentFragment for Index {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ Display::fmt(&self.index, formatter)
++ }
++
++ fn span(&self) -> Option<Span> {
++ Some(self.span)
+ }
+ }
+
+@@ -1057,7 +872,7 @@ ast_struct! {
+ /// The `::<>` explicit type parameters passed to a method call:
+ /// `parse::<u64>()`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct MethodTurbofish {
+ pub colon2_token: Token![::],
+ pub lt_token: Token![<],
+@@ -1070,7 +885,7 @@ ast_struct! {
+ ast_enum! {
+ /// An individual generic argument to a method, like `T`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum GenericMethodArgument {
+ /// A type argument.
+ Type(Type),
+@@ -1086,7 +901,7 @@ ast_enum! {
+ ast_struct! {
+ /// A field-value pair in a struct literal.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct FieldValue {
+ /// Attributes tagged on the field.
+ pub attrs: Vec<Attribute>,
+@@ -1107,7 +922,7 @@ ast_struct! {
+ ast_struct! {
+ /// A lifetime labeling a `for`, `while`, or `loop`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Label {
+ pub name: Lifetime,
+ pub colon_token: Token![:],
+@@ -1134,7 +949,7 @@ ast_struct! {
+ /// # }
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Arm {
+ pub attrs: Vec<Attribute>,
+ pub pat: Pat,
+@@ -1149,8 +964,7 @@ ast_struct! {
+ ast_enum! {
+ /// Limit types of a range, inclusive or exclusive.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum RangeLimits {
+ /// Inclusive at the beginning, exclusive at the end.
+ HalfOpen(Token![..]),
+@@ -1162,7 +976,7 @@ ast_enum! {
+ #[cfg(any(feature = "parsing", feature = "printing"))]
+ #[cfg(feature = "full")]
+ pub(crate) fn requires_terminator(expr: &Expr) -> bool {
+- // see https://github.com/rust-lang/rust/blob/eb8f2586e/src/libsyntax/parse/classify.rs#L17-L37
++ // see https://github.com/rust-lang/rust/blob/2679c38fc/src/librustc_ast/util/classify.rs#L7-L25
+ match *expr {
+ Expr::Unsafe(..)
+ | Expr::Block(..)
+@@ -1183,16 +997,17 @@ pub(crate) mod parsing {
+
+ use crate::parse::{Parse, ParseStream, Result};
+ use crate::path;
++ use std::cmp::Ordering;
++
++ crate::custom_keyword!(raw);
+
+ // When we're parsing expressions which occur before blocks, like in an if
+ // statement's condition, we cannot parse a struct literal.
+ //
+ // Struct literals are ambiguous in certain positions
+ // https://github.com/rust-lang/rfcs/pull/92
+- #[derive(Copy, Clone)]
+ pub struct AllowStruct(bool);
+
+- #[derive(Copy, Clone, PartialEq, PartialOrd)]
+ enum Precedence {
+ Any,
+ Assign,
+@@ -1246,9 +1061,121 @@ pub(crate) mod parsing {
+ }
+ }
+
+- #[cfg(feature = "full")]
+- fn expr_no_struct(input: ParseStream) -> Result<Expr> {
+- ambiguous_expr(input, AllowStruct(false))
++ impl Expr {
++ /// An alternative to the primary `Expr::parse` parser (from the
++ /// [`Parse`] trait) for ambiguous syntactic positions in which a
++ /// trailing brace should not be taken as part of the expression.
++ ///
++ /// Rust grammar has an ambiguity where braces sometimes turn a path
++ /// expression into a struct initialization and sometimes do not. In the
++ /// following code, the expression `S {}` is one expression. Presumably
++ /// there is an empty struct `struct S {}` defined somewhere which it is
++ /// instantiating.
++ ///
++ /// ```
++ /// # struct S;
++ /// # impl std::ops::Deref for S {
++ /// # type Target = bool;
++ /// # fn deref(&self) -> &Self::Target {
++ /// # &true
++ /// # }
++ /// # }
++ /// let _ = *S {};
++ ///
++ /// // parsed by rustc as: `*(S {})`
++ /// ```
++ ///
++ /// We would want to parse the above using `Expr::parse` after the `=`
++ /// token.
++ ///
++ /// But in the following, `S {}` is *not* a struct init expression.
++ ///
++ /// ```
++ /// # const S: &bool = &true;
++ /// if *S {} {}
++ ///
++ /// // parsed by rustc as:
++ /// //
++ /// // if (*S) {
++ /// // /* empty block */
++ /// // }
++ /// // {
++ /// // /* another empty block */
++ /// // }
++ /// ```
++ ///
++ /// For that reason we would want to parse if-conditions using
++ /// `Expr::parse_without_eager_brace` after the `if` token. Same for
++ /// similar syntactic positions such as the condition expr after a
++ /// `while` token or the expr at the top of a `match`.
++ ///
++ /// The Rust grammar's choices around which way this ambiguity is
++ /// resolved at various syntactic positions is fairly arbitrary. Really
++ /// either parse behavior could work in most positions, and language
++ /// designers just decide each case based on which is more likely to be
++ /// what the programmer had in mind most of the time.
++ ///
++ /// ```
++ /// # struct S;
++ /// # fn doc() -> S {
++ /// if return S {} {}
++ /// # unreachable!()
++ /// # }
++ ///
++ /// // parsed by rustc as:
++ /// //
++ /// // if (return (S {})) {
++ /// // }
++ /// //
++ /// // but could equally well have been this other arbitrary choice:
++ /// //
++ /// // if (return S) {
++ /// // }
++ /// // {}
++ /// ```
++ ///
++ /// Note the grammar ambiguity on trailing braces is distinct from
++ /// precedence and is not captured by assigning a precedence level to
++ /// the braced struct init expr in relation to other operators. This can
++ /// be illustrated by `return 0..S {}` vs `match 0..S {}`. The former
++ /// parses as `return (0..(S {}))` implying tighter precedence for
++ /// struct init than `..`, while the latter parses as `match (0..S) {}`
++ /// implying tighter precedence for `..` than struct init, a
++ /// contradiction.
++ #[cfg(feature = "full")]
++ pub fn parse_without_eager_brace(input: ParseStream) -> Result<Expr> {
++ ambiguous_expr(input, AllowStruct(false))
++ }
++ }
++
++ impl Copy for AllowStruct {}
++
++ impl Clone for AllowStruct {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
++ impl Copy for Precedence {}
++
++ impl Clone for Precedence {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
++ impl PartialEq for Precedence {
++ fn eq(&self, other: &Self) -> bool {
++ *self as u8 == *other as u8
++ }
++ }
++
++ impl PartialOrd for Precedence {
++ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
++ let this = *self as u8;
++ let other = *other as u8;
++ Some(this.cmp(&other))
++ }
+ }
+
+ #[cfg(feature = "full")]
+@@ -1430,56 +1357,84 @@ pub(crate) mod parsing {
+ parse_expr(input, lhs, allow_struct, Precedence::Any)
+ }
+
++ #[cfg(feature = "full")]
++ fn expr_attrs(input: ParseStream) -> Result<Vec<Attribute>> {
++ let mut attrs = Vec::new();
++ loop {
++ if input.peek(token::Group) {
++ let ahead = input.fork();
++ let group = crate::group::parse_group(&ahead)?;
++ if !group.content.peek(Token![#]) || group.content.peek2(Token![!]) {
++ break;
++ }
++ let attr = group.content.call(attr::parsing::single_parse_outer)?;
++ if !group.content.is_empty() {
++ break;
++ }
++ attrs.push(attr);
++ } else if input.peek(Token![#]) {
++ attrs.push(input.call(attr::parsing::single_parse_outer)?);
++ } else {
++ break;
++ }
++ }
++ Ok(attrs)
++ }
++
+ // <UnOp> <trailer>
+ // & <trailer>
+ // &mut <trailer>
+ // box <trailer>
+ #[cfg(feature = "full")]
+ fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
+- if ahead.peek(Token![&])
+- || ahead.peek(Token![box])
+- || ahead.peek(Token![*])
+- || ahead.peek(Token![!])
+- || ahead.peek(Token![-])
+- {
+- let attrs = input.call(Attribute::parse_outer)?;
+- if input.peek(Token![&]) {
++ let begin = input.fork();
++ let attrs = input.call(expr_attrs)?;
++ if input.peek(Token![&]) {
++ let and_token: Token![&] = input.parse()?;
++ let raw: Option<raw> =
++ if input.peek(raw) && (input.peek2(Token![mut]) || input.peek2(Token![const])) {
++ Some(input.parse()?)
++ } else {
++ None
++ };
++ let mutability: Option<Token![mut]> = input.parse()?;
++ if raw.is_some() && mutability.is_none() {
++ input.parse::<Token![const]>()?;
++ }
++ let expr = Box::new(unary_expr(input, allow_struct)?);
++ if raw.is_some() {
++ Ok(Expr::Verbatim(verbatim::between(begin, input)))
++ } else {
+ Ok(Expr::Reference(ExprReference {
+ attrs,
+- and_token: input.parse()?,
++ and_token,
+ raw: Reserved::default(),
+- mutability: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
+- }))
+- } else if input.peek(Token![box]) {
+- Ok(Expr::Box(ExprBox {
+- attrs,
+- box_token: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
+- }))
+- } else {
+- Ok(Expr::Unary(ExprUnary {
+- attrs,
+- op: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
++ mutability,
++ expr,
+ }))
+ }
++ } else if input.peek(Token![box]) {
++ Ok(Expr::Box(ExprBox {
++ attrs,
++ box_token: input.parse()?,
++ expr: Box::new(unary_expr(input, allow_struct)?),
++ }))
++ } else if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
++ Ok(Expr::Unary(ExprUnary {
++ attrs,
++ op: input.parse()?,
++ expr: Box::new(unary_expr(input, allow_struct)?),
++ }))
+ } else {
+- trailer_expr(input, allow_struct)
++ trailer_expr(attrs, input, allow_struct)
+ }
+ }
+
+ #[cfg(not(feature = "full"))]
+ fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
+- if ahead.peek(Token![*]) || ahead.peek(Token![!]) || ahead.peek(Token![-]) {
++ if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
+ Ok(Expr::Unary(ExprUnary {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs: Vec::new(),
+ op: input.parse()?,
+ expr: Box::new(unary_expr(input, allow_struct)?),
+ }))
+@@ -1495,13 +1450,11 @@ pub(crate) mod parsing {
+ // <atom> [ <expr> ] ...
+ // <atom> ? ...
+ #[cfg(feature = "full")]
+- fn trailer_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- if input.peek(token::Group) {
+- return input.call(expr_group).map(Expr::Group);
+- }
+-
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+-
++ fn trailer_expr(
++ outer_attrs: Vec<Attribute>,
++ input: ParseStream,
++ allow_struct: AllowStruct,
++ ) -> Result<Expr> {
+ let atom = atom_expr(input, allow_struct)?;
+ let mut e = trailer_helper(input, atom)?;
+
+@@ -1523,18 +1476,26 @@ pub(crate) mod parsing {
+ args: content.parse_terminated(Expr::parse)?,
+ });
+ } else if input.peek(Token![.]) && !input.peek(Token![..]) {
+- let dot_token: Token![.] = input.parse()?;
++ let mut dot_token: Token![.] = input.parse()?;
+
+- if input.peek(token::Await) {
++ let await_token: Option<token::Await> = input.parse()?;
++ if let Some(await_token) = await_token {
+ e = Expr::Await(ExprAwait {
+ attrs: Vec::new(),
+ base: Box::new(e),
+ dot_token,
+- await_token: input.parse()?,
++ await_token,
+ });
+ continue;
+ }
+
++ let float_token: Option<LitFloat> = input.parse()?;
++ if let Some(float_token) = float_token {
++ if multi_index(&mut e, &mut dot_token, float_token)? {
++ continue;
++ }
++ }
++
+ let member: Member = input.parse()?;
+ let turbofish = if member.is_named() && input.peek(Token![::]) {
+ Some(MethodTurbofish {
+@@ -1620,10 +1581,17 @@ pub(crate) mod parsing {
+ });
+ } else if input.peek(Token![.]) && !input.peek(Token![..]) && !input.peek2(token::Await)
+ {
++ let mut dot_token: Token![.] = input.parse()?;
++ let float_token: Option<LitFloat> = input.parse()?;
++ if let Some(float_token) = float_token {
++ if multi_index(&mut e, &mut dot_token, float_token)? {
++ continue;
++ }
++ }
+ e = Expr::Field(ExprField {
+ attrs: Vec::new(),
+ base: Box::new(e),
+- dot_token: input.parse()?,
++ dot_token,
+ member: input.parse()?,
+ });
+ } else if input.peek(token::Bracket) {
+@@ -1646,7 +1614,11 @@ pub(crate) mod parsing {
+ // interactions, as they are fully contained.
+ #[cfg(feature = "full")]
+ fn atom_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- if input.peek(token::Group) {
++ if input.peek(token::Group)
++ && !input.peek2(Token![::])
++ && !input.peek2(Token![!])
++ && !input.peek2(token::Brace)
++ {
+ input.call(expr_group).map(Expr::Group)
+ } else if input.peek(Lit) {
+ input.parse().map(Expr::Lit)
+@@ -1668,7 +1640,6 @@ pub(crate) mod parsing {
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ path_or_macro_or_struct(input, allow_struct)
+@@ -1740,7 +1711,6 @@ pub(crate) mod parsing {
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ input.parse().map(Expr::Path)
+@@ -1878,7 +1848,7 @@ pub(crate) mod parsing {
+
+ #[cfg(feature = "full")]
+ pub(crate) fn expr_early(input: ParseStream) -> Result<Expr> {
+- let mut attrs = input.call(Attribute::parse_outer)?;
++ let mut attrs = input.call(expr_attrs)?;
+ let mut expr = if input.peek(Token![if]) {
+ Expr::If(input.parse()?)
+ } else if input.peek(Token![while]) {
+@@ -1905,7 +1875,7 @@ pub(crate) mod parsing {
+ return parse_expr(input, expr, allow_struct, Precedence::Any);
+ };
+
+- if input.peek(Token![.]) || input.peek(Token![?]) {
++ if input.peek(Token![.]) && !input.peek(Token![..]) || input.peek(Token![?]) {
+ expr = trailer_helper(input, expr)?;
+
+ attrs.extend(expr.replace_attrs(Vec::new()));
+@@ -1951,7 +1921,16 @@ pub(crate) mod parsing {
+
+ #[cfg(feature = "full")]
+ fn generic_method_argument(input: ParseStream) -> Result<GenericMethodArgument> {
+- // TODO parse const generics as well
++ if input.peek(Lit) {
++ let lit = input.parse()?;
++ return Ok(GenericMethodArgument::Const(Expr::Lit(lit)));
++ }
++
++ if input.peek(token::Brace) {
++ let block = input.call(expr::parsing::expr_block)?;
++ return Ok(GenericMethodArgument::Const(Expr::Block(block)));
++ }
++
+ input.parse().map(GenericMethodArgument::Type)
+ }
+
+@@ -1960,44 +1939,20 @@ pub(crate) mod parsing {
+ Ok(ExprLet {
+ attrs: Vec::new(),
+ let_token: input.parse()?,
+- pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let pat: Pat = input.parse()?;
+- if leading_vert.is_some()
+- || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
+- {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|])
+- && !input.peek(Token![||])
+- && !input.peek(Token![|=])
+- {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- })
+- } else {
+- pat
+- }
+- },
++ pat: pat::parsing::multi_pat_with_leading_vert(input)?,
+ eq_token: input.parse()?,
+- expr: Box::new(input.call(expr_no_struct)?),
++ expr: Box::new(input.call(Expr::parse_without_eager_brace)?),
+ })
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for ExprIf {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ Ok(ExprIf {
+- attrs: Vec::new(),
++ attrs,
+ if_token: input.parse()?,
+- cond: Box::new(input.call(expr_no_struct)?),
++ cond: Box::new(input.call(Expr::parse_without_eager_brace)?),
+ then_branch: input.parse()?,
+ else_branch: {
+ if input.peek(Token![else]) {
+@@ -2033,29 +1988,14 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprForLoop {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let for_token: Token![for] = input.parse()?;
+
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let mut pat: Pat = input.parse()?;
+- if leading_vert.is_some() || input.peek(Token![|]) {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|]) {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- pat = Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- });
+- }
++ let pat = pat::parsing::multi_pat_with_leading_vert(input)?;
+
+ let in_token: Token![in] = input.parse()?;
+- let expr: Expr = input.call(expr_no_struct)?;
++ let expr: Expr = input.call(Expr::parse_without_eager_brace)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+@@ -2063,7 +2003,7 @@ pub(crate) mod parsing {
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprForLoop {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ for_token,
+ pat,
+@@ -2077,6 +2017,7 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprLoop {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let loop_token: Token![loop] = input.parse()?;
+
+@@ -2086,7 +2027,7 @@ pub(crate) mod parsing {
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprLoop {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ loop_token,
+ body: Block { brace_token, stmts },
+@@ -2097,8 +2038,9 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprMatch {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let match_token: Token![match] = input.parse()?;
+- let expr = expr_no_struct(input)?;
++ let expr = Expr::parse_without_eager_brace(input)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+@@ -2110,7 +2052,7 @@ pub(crate) mod parsing {
+ }
+
+ Ok(ExprMatch {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ match_token,
+ expr: Box::new(expr),
+ brace_token,
+@@ -2305,9 +2247,10 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for ExprWhile {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let while_token: Token![while] = input.parse()?;
+- let cond = expr_no_struct(input)?;
++ let cond = Expr::parse_without_eager_brace(input)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+@@ -2315,7 +2258,7 @@ pub(crate) mod parsing {
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprWhile {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ while_token,
+ cond: Box::new(cond),
+@@ -2399,6 +2342,7 @@ pub(crate) mod parsing {
+ #[cfg(feature = "full")]
+ impl Parse for FieldValue {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ let member: Member = input.parse()?;
+ let (colon_token, value) = if input.peek(Token![:]) || !member.is_named() {
+ let colon_token: Token![:] = input.parse()?;
+@@ -2416,7 +2360,7 @@ pub(crate) mod parsing {
+ };
+
+ Ok(FieldValue {
+- attrs: Vec::new(),
++ attrs,
+ member,
+ colon_token,
+ expr: value,
+@@ -2433,46 +2377,36 @@ pub(crate) mod parsing {
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
++ let attrs = private::attrs(outer_attrs, inner_attrs);
+
+ let mut fields = Punctuated::new();
+- loop {
+- let attrs = content.call(Attribute::parse_outer)?;
+- // TODO: optimize using advance_to
+- if content.fork().parse::<Member>().is_err() {
+- if attrs.is_empty() {
+- break;
+- } else {
+- return Err(content.error("expected struct field"));
+- }
++ while !content.is_empty() {
++ if content.peek(Token![..]) {
++ return Ok(ExprStruct {
++ attrs,
++ brace_token,
++ path,
++ fields,
++ dot2_token: Some(content.parse()?),
++ rest: Some(Box::new(content.parse()?)),
++ });
+ }
+
+- fields.push(FieldValue {
+- attrs,
+- ..content.parse()?
+- });
+-
+- if !content.peek(Token![,]) {
++ fields.push(content.parse()?);
++ if content.is_empty() {
+ break;
+ }
+ let punct: Token![,] = content.parse()?;
+ fields.push_punct(punct);
+ }
+
+- let (dot2_token, rest) = if fields.empty_or_trailing() && content.peek(Token![..]) {
+- let dot2_token: Token![..] = content.parse()?;
+- let rest: Expr = content.parse()?;
+- (Some(dot2_token), Some(Box::new(rest)))
+- } else {
+- (None, None)
+- };
+-
+ Ok(ExprStruct {
+- attrs: private::attrs(outer_attrs, inner_attrs),
++ attrs,
+ brace_token,
+ path,
+ fields,
+- dot2_token,
+- rest,
++ dot2_token: None,
++ rest: None,
+ })
+ }
+
+@@ -2577,27 +2511,7 @@ pub(crate) mod parsing {
+ let requires_comma;
+ Ok(Arm {
+ attrs: input.call(Attribute::parse_outer)?,
+- pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let pat: Pat = input.parse()?;
+- if leading_vert.is_some() || input.peek(Token![|]) {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|]) {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- })
+- } else {
+- pat
+- }
+- },
++ pat: pat::parsing::multi_pat_with_leading_vert(input)?,
+ guard: {
+ if input.peek(Token![if]) {
+ let if_token: Token![if] = input.parse()?;
+@@ -2641,6 +2555,26 @@ pub(crate) mod parsing {
+ }
+ }
+
++ fn multi_index(e: &mut Expr, dot_token: &mut Token![.], float: LitFloat) -> Result<bool> {
++ let mut float_repr = float.to_string();
++ let trailing_dot = float_repr.ends_with('.');
++ if trailing_dot {
++ float_repr.truncate(float_repr.len() - 1);
++ }
++ for part in float_repr.split('.') {
++ let index = crate::parse_str(part).map_err(|err| Error::new(float.span(), err))?;
++ let base = mem::replace(e, Expr::__Nonexhaustive);
++ *e = Expr::Field(ExprField {
++ attrs: Vec::new(),
++ base: Box::new(base),
++ dot_token: Token![.](dot_token.span),
++ member: Member::Unnamed(index),
++ });
++ *dot_token = Token![.](float.span());
++ }
++ Ok(!trailing_dot)
++ }
++
+ #[cfg(feature = "full")]
+ impl Member {
+ fn is_named(&self) -> bool {
+diff --git a/third_party/rust/syn/src/ext.rs b/third_party/rust/syn/src/ext.rs
+index d09577a27a..4f9bc145d9 100644
+--- third_party/rust/syn/src/ext.rs
++++ third_party/rust/syn/src/ext.rs
+@@ -1,6 +1,6 @@
+ //! Extension traits to provide parsing methods on foreign types.
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ use proc_macro2::Ident;
+
+@@ -16,7 +16,7 @@ use crate::token::CustomToken;
+ /// This trait is sealed and cannot be implemented for types outside of Syn. It
+ /// is implemented only for `proc_macro2::Ident`.
+ ///
+-/// *This trait is available if Syn is built with the `"parsing"` feature.*
++/// *This trait is available only if Syn is built with the `"parsing"` feature.*
+ pub trait IdentExt: Sized + private::Sealed {
+ /// Parses any identifier including keywords.
+ ///
+@@ -129,7 +129,13 @@ mod private {
+
+ impl Sealed for Ident {}
+
+- #[derive(Copy, Clone)]
+ pub struct PeekFn;
+ pub struct IdentAny;
++
++ impl Copy for PeekFn {}
++ impl Clone for PeekFn {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
+ }
+diff --git a/third_party/rust/syn/src/file.rs b/third_party/rust/syn/src/file.rs
+index 88c02fe832..c8fab63cd9 100644
+--- third_party/rust/syn/src/file.rs
++++ third_party/rust/syn/src/file.rs
+@@ -3,7 +3,7 @@ use super::*;
+ ast_struct! {
+ /// A complete file of Rust source code.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Example
+ ///
+@@ -37,6 +37,8 @@ ast_struct! {
+ /// file.read_to_string(&mut src).expect("Unable to read file");
+ ///
+ /// let syntax = syn::parse_file(&src).expect("Unable to parse file");
++ ///
++ /// // Debug impl is available if Syn is built with "extra-traits" feature.
+ /// println!("{:#?}", syntax);
+ /// }
+ /// ```
+diff --git a/third_party/rust/syn/src/gen/clone.rs b/third_party/rust/syn/src/gen/clone.rs
+new file mode 100644
+index 0000000000..bea3887013
+--- /dev/null
++++ third_party/rust/syn/src/gen/clone.rs
+@@ -0,0 +1,2051 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#![allow(clippy::clone_on_copy, clippy::expl_impl_clone_on_copy)]
++use crate::*;
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Abi {
++ fn clone(&self) -> Self {
++ Abi {
++ extern_token: self.extern_token.clone(),
++ name: self.name.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for AngleBracketedGenericArguments {
++ fn clone(&self) -> Self {
++ AngleBracketedGenericArguments {
++ colon2_token: self.colon2_token.clone(),
++ lt_token: self.lt_token.clone(),
++ args: self.args.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Arm {
++ fn clone(&self) -> Self {
++ Arm {
++ attrs: self.attrs.clone(),
++ pat: self.pat.clone(),
++ guard: self.guard.clone(),
++ fat_arrow_token: self.fat_arrow_token.clone(),
++ body: self.body.clone(),
++ comma: self.comma.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for AttrStyle {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for AttrStyle {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Attribute {
++ fn clone(&self) -> Self {
++ Attribute {
++ pound_token: self.pound_token.clone(),
++ style: self.style.clone(),
++ bracket_token: self.bracket_token.clone(),
++ path: self.path.clone(),
++ tokens: self.tokens.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BareFnArg {
++ fn clone(&self) -> Self {
++ BareFnArg {
++ attrs: self.attrs.clone(),
++ name: self.name.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for BinOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BinOp {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Binding {
++ fn clone(&self) -> Self {
++ Binding {
++ ident: self.ident.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Block {
++ fn clone(&self) -> Self {
++ Block {
++ brace_token: self.brace_token.clone(),
++ stmts: self.stmts.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BoundLifetimes {
++ fn clone(&self) -> Self {
++ BoundLifetimes {
++ for_token: self.for_token.clone(),
++ lt_token: self.lt_token.clone(),
++ lifetimes: self.lifetimes.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ConstParam {
++ fn clone(&self) -> Self {
++ ConstParam {
++ attrs: self.attrs.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ default: self.default.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Constraint {
++ fn clone(&self) -> Self {
++ Constraint {
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for Data {
++ fn clone(&self) -> Self {
++ match self {
++ Data::Struct(v0) => Data::Struct(v0.clone()),
++ Data::Enum(v0) => Data::Enum(v0.clone()),
++ Data::Union(v0) => Data::Union(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataEnum {
++ fn clone(&self) -> Self {
++ DataEnum {
++ enum_token: self.enum_token.clone(),
++ brace_token: self.brace_token.clone(),
++ variants: self.variants.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataStruct {
++ fn clone(&self) -> Self {
++ DataStruct {
++ struct_token: self.struct_token.clone(),
++ fields: self.fields.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataUnion {
++ fn clone(&self) -> Self {
++ DataUnion {
++ union_token: self.union_token.clone(),
++ fields: self.fields.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DeriveInput {
++ fn clone(&self) -> Self {
++ DeriveInput {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ data: self.data.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Expr {
++ fn clone(&self) -> Self {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => Expr::Array(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => Expr::Assign(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => Expr::AssignOp(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => Expr::Async(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => Expr::Await(v0.clone()),
++ Expr::Binary(v0) => Expr::Binary(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => Expr::Block(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => Expr::Box(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => Expr::Break(v0.clone()),
++ Expr::Call(v0) => Expr::Call(v0.clone()),
++ Expr::Cast(v0) => Expr::Cast(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => Expr::Closure(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => Expr::Continue(v0.clone()),
++ Expr::Field(v0) => Expr::Field(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => Expr::ForLoop(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => Expr::Group(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::If(v0) => Expr::If(v0.clone()),
++ Expr::Index(v0) => Expr::Index(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => Expr::Let(v0.clone()),
++ Expr::Lit(v0) => Expr::Lit(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => Expr::Loop(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => Expr::Macro(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => Expr::Match(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => Expr::MethodCall(v0.clone()),
++ Expr::Paren(v0) => Expr::Paren(v0.clone()),
++ Expr::Path(v0) => Expr::Path(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => Expr::Range(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => Expr::Reference(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => Expr::Repeat(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => Expr::Return(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => Expr::Struct(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => Expr::Try(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => Expr::TryBlock(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => Expr::Tuple(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => Expr::Type(v0.clone()),
++ Expr::Unary(v0) => Expr::Unary(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => Expr::Unsafe(v0.clone()),
++ Expr::Verbatim(v0) => Expr::Verbatim(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::While(v0) => Expr::While(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => Expr::Yield(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprArray {
++ fn clone(&self) -> Self {
++ ExprArray {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAssign {
++ fn clone(&self) -> Self {
++ ExprAssign {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ eq_token: self.eq_token.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAssignOp {
++ fn clone(&self) -> Self {
++ ExprAssignOp {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ op: self.op.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAsync {
++ fn clone(&self) -> Self {
++ ExprAsync {
++ attrs: self.attrs.clone(),
++ async_token: self.async_token.clone(),
++ capture: self.capture.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAwait {
++ fn clone(&self) -> Self {
++ ExprAwait {
++ attrs: self.attrs.clone(),
++ base: self.base.clone(),
++ dot_token: self.dot_token.clone(),
++ await_token: self.await_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprBinary {
++ fn clone(&self) -> Self {
++ ExprBinary {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ op: self.op.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBlock {
++ fn clone(&self) -> Self {
++ ExprBlock {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBox {
++ fn clone(&self) -> Self {
++ ExprBox {
++ attrs: self.attrs.clone(),
++ box_token: self.box_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBreak {
++ fn clone(&self) -> Self {
++ ExprBreak {
++ attrs: self.attrs.clone(),
++ break_token: self.break_token.clone(),
++ label: self.label.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprCall {
++ fn clone(&self) -> Self {
++ ExprCall {
++ attrs: self.attrs.clone(),
++ func: self.func.clone(),
++ paren_token: self.paren_token.clone(),
++ args: self.args.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprCast {
++ fn clone(&self) -> Self {
++ ExprCast {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ as_token: self.as_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprClosure {
++ fn clone(&self) -> Self {
++ ExprClosure {
++ attrs: self.attrs.clone(),
++ asyncness: self.asyncness.clone(),
++ movability: self.movability.clone(),
++ capture: self.capture.clone(),
++ or1_token: self.or1_token.clone(),
++ inputs: self.inputs.clone(),
++ or2_token: self.or2_token.clone(),
++ output: self.output.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprContinue {
++ fn clone(&self) -> Self {
++ ExprContinue {
++ attrs: self.attrs.clone(),
++ continue_token: self.continue_token.clone(),
++ label: self.label.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprField {
++ fn clone(&self) -> Self {
++ ExprField {
++ attrs: self.attrs.clone(),
++ base: self.base.clone(),
++ dot_token: self.dot_token.clone(),
++ member: self.member.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprForLoop {
++ fn clone(&self) -> Self {
++ ExprForLoop {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ for_token: self.for_token.clone(),
++ pat: self.pat.clone(),
++ in_token: self.in_token.clone(),
++ expr: self.expr.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprGroup {
++ fn clone(&self) -> Self {
++ ExprGroup {
++ attrs: self.attrs.clone(),
++ group_token: self.group_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprIf {
++ fn clone(&self) -> Self {
++ ExprIf {
++ attrs: self.attrs.clone(),
++ if_token: self.if_token.clone(),
++ cond: self.cond.clone(),
++ then_branch: self.then_branch.clone(),
++ else_branch: self.else_branch.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprIndex {
++ fn clone(&self) -> Self {
++ ExprIndex {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ bracket_token: self.bracket_token.clone(),
++ index: self.index.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprLet {
++ fn clone(&self) -> Self {
++ ExprLet {
++ attrs: self.attrs.clone(),
++ let_token: self.let_token.clone(),
++ pat: self.pat.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprLit {
++ fn clone(&self) -> Self {
++ ExprLit {
++ attrs: self.attrs.clone(),
++ lit: self.lit.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprLoop {
++ fn clone(&self) -> Self {
++ ExprLoop {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ loop_token: self.loop_token.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMacro {
++ fn clone(&self) -> Self {
++ ExprMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMatch {
++ fn clone(&self) -> Self {
++ ExprMatch {
++ attrs: self.attrs.clone(),
++ match_token: self.match_token.clone(),
++ expr: self.expr.clone(),
++ brace_token: self.brace_token.clone(),
++ arms: self.arms.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMethodCall {
++ fn clone(&self) -> Self {
++ ExprMethodCall {
++ attrs: self.attrs.clone(),
++ receiver: self.receiver.clone(),
++ dot_token: self.dot_token.clone(),
++ method: self.method.clone(),
++ turbofish: self.turbofish.clone(),
++ paren_token: self.paren_token.clone(),
++ args: self.args.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprParen {
++ fn clone(&self) -> Self {
++ ExprParen {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprPath {
++ fn clone(&self) -> Self {
++ ExprPath {
++ attrs: self.attrs.clone(),
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprRange {
++ fn clone(&self) -> Self {
++ ExprRange {
++ attrs: self.attrs.clone(),
++ from: self.from.clone(),
++ limits: self.limits.clone(),
++ to: self.to.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprReference {
++ fn clone(&self) -> Self {
++ ExprReference {
++ attrs: self.attrs.clone(),
++ and_token: self.and_token.clone(),
++ raw: self.raw.clone(),
++ mutability: self.mutability.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprRepeat {
++ fn clone(&self) -> Self {
++ ExprRepeat {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ len: self.len.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprReturn {
++ fn clone(&self) -> Self {
++ ExprReturn {
++ attrs: self.attrs.clone(),
++ return_token: self.return_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprStruct {
++ fn clone(&self) -> Self {
++ ExprStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ brace_token: self.brace_token.clone(),
++ fields: self.fields.clone(),
++ dot2_token: self.dot2_token.clone(),
++ rest: self.rest.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTry {
++ fn clone(&self) -> Self {
++ ExprTry {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ question_token: self.question_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTryBlock {
++ fn clone(&self) -> Self {
++ ExprTryBlock {
++ attrs: self.attrs.clone(),
++ try_token: self.try_token.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTuple {
++ fn clone(&self) -> Self {
++ ExprTuple {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprType {
++ fn clone(&self) -> Self {
++ ExprType {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprUnary {
++ fn clone(&self) -> Self {
++ ExprUnary {
++ attrs: self.attrs.clone(),
++ op: self.op.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprUnsafe {
++ fn clone(&self) -> Self {
++ ExprUnsafe {
++ attrs: self.attrs.clone(),
++ unsafe_token: self.unsafe_token.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprWhile {
++ fn clone(&self) -> Self {
++ ExprWhile {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ while_token: self.while_token.clone(),
++ cond: self.cond.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprYield {
++ fn clone(&self) -> Self {
++ ExprYield {
++ attrs: self.attrs.clone(),
++ yield_token: self.yield_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Field {
++ fn clone(&self) -> Self {
++ Field {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FieldPat {
++ fn clone(&self) -> Self {
++ FieldPat {
++ attrs: self.attrs.clone(),
++ member: self.member.clone(),
++ colon_token: self.colon_token.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FieldValue {
++ fn clone(&self) -> Self {
++ FieldValue {
++ attrs: self.attrs.clone(),
++ member: self.member.clone(),
++ colon_token: self.colon_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Fields {
++ fn clone(&self) -> Self {
++ match self {
++ Fields::Named(v0) => Fields::Named(v0.clone()),
++ Fields::Unnamed(v0) => Fields::Unnamed(v0.clone()),
++ Fields::Unit => Fields::Unit,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for FieldsNamed {
++ fn clone(&self) -> Self {
++ FieldsNamed {
++ brace_token: self.brace_token.clone(),
++ named: self.named.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for FieldsUnnamed {
++ fn clone(&self) -> Self {
++ FieldsUnnamed {
++ paren_token: self.paren_token.clone(),
++ unnamed: self.unnamed.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for File {
++ fn clone(&self) -> Self {
++ File {
++ shebang: self.shebang.clone(),
++ attrs: self.attrs.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FnArg {
++ fn clone(&self) -> Self {
++ match self {
++ FnArg::Receiver(v0) => FnArg::Receiver(v0.clone()),
++ FnArg::Typed(v0) => FnArg::Typed(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItem {
++ fn clone(&self) -> Self {
++ match self {
++ ForeignItem::Fn(v0) => ForeignItem::Fn(v0.clone()),
++ ForeignItem::Static(v0) => ForeignItem::Static(v0.clone()),
++ ForeignItem::Type(v0) => ForeignItem::Type(v0.clone()),
++ ForeignItem::Macro(v0) => ForeignItem::Macro(v0.clone()),
++ ForeignItem::Verbatim(v0) => ForeignItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemFn {
++ fn clone(&self) -> Self {
++ ForeignItemFn {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ sig: self.sig.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemMacro {
++ fn clone(&self) -> Self {
++ ForeignItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemStatic {
++ fn clone(&self) -> Self {
++ ForeignItemStatic {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ static_token: self.static_token.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemType {
++ fn clone(&self) -> Self {
++ ForeignItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for GenericArgument {
++ fn clone(&self) -> Self {
++ match self {
++ GenericArgument::Lifetime(v0) => GenericArgument::Lifetime(v0.clone()),
++ GenericArgument::Type(v0) => GenericArgument::Type(v0.clone()),
++ GenericArgument::Binding(v0) => GenericArgument::Binding(v0.clone()),
++ GenericArgument::Constraint(v0) => GenericArgument::Constraint(v0.clone()),
++ GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for GenericMethodArgument {
++ fn clone(&self) -> Self {
++ match self {
++ GenericMethodArgument::Type(v0) => GenericMethodArgument::Type(v0.clone()),
++ GenericMethodArgument::Const(v0) => GenericMethodArgument::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for GenericParam {
++ fn clone(&self) -> Self {
++ match self {
++ GenericParam::Type(v0) => GenericParam::Type(v0.clone()),
++ GenericParam::Lifetime(v0) => GenericParam::Lifetime(v0.clone()),
++ GenericParam::Const(v0) => GenericParam::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Generics {
++ fn clone(&self) -> Self {
++ Generics {
++ lt_token: self.lt_token.clone(),
++ params: self.params.clone(),
++ gt_token: self.gt_token.clone(),
++ where_clause: self.where_clause.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItem {
++ fn clone(&self) -> Self {
++ match self {
++ ImplItem::Const(v0) => ImplItem::Const(v0.clone()),
++ ImplItem::Method(v0) => ImplItem::Method(v0.clone()),
++ ImplItem::Type(v0) => ImplItem::Type(v0.clone()),
++ ImplItem::Macro(v0) => ImplItem::Macro(v0.clone()),
++ ImplItem::Verbatim(v0) => ImplItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemConst {
++ fn clone(&self) -> Self {
++ ImplItemConst {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemMacro {
++ fn clone(&self) -> Self {
++ ImplItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemMethod {
++ fn clone(&self) -> Self {
++ ImplItemMethod {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ sig: self.sig.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemType {
++ fn clone(&self) -> Self {
++ ImplItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Index {
++ fn clone(&self) -> Self {
++ Index {
++ index: self.index.clone(),
++ span: self.span.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Item {
++ fn clone(&self) -> Self {
++ match self {
++ Item::Const(v0) => Item::Const(v0.clone()),
++ Item::Enum(v0) => Item::Enum(v0.clone()),
++ Item::ExternCrate(v0) => Item::ExternCrate(v0.clone()),
++ Item::Fn(v0) => Item::Fn(v0.clone()),
++ Item::ForeignMod(v0) => Item::ForeignMod(v0.clone()),
++ Item::Impl(v0) => Item::Impl(v0.clone()),
++ Item::Macro(v0) => Item::Macro(v0.clone()),
++ Item::Macro2(v0) => Item::Macro2(v0.clone()),
++ Item::Mod(v0) => Item::Mod(v0.clone()),
++ Item::Static(v0) => Item::Static(v0.clone()),
++ Item::Struct(v0) => Item::Struct(v0.clone()),
++ Item::Trait(v0) => Item::Trait(v0.clone()),
++ Item::TraitAlias(v0) => Item::TraitAlias(v0.clone()),
++ Item::Type(v0) => Item::Type(v0.clone()),
++ Item::Union(v0) => Item::Union(v0.clone()),
++ Item::Use(v0) => Item::Use(v0.clone()),
++ Item::Verbatim(v0) => Item::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemConst {
++ fn clone(&self) -> Self {
++ ItemConst {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemEnum {
++ fn clone(&self) -> Self {
++ ItemEnum {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ enum_token: self.enum_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ brace_token: self.brace_token.clone(),
++ variants: self.variants.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemExternCrate {
++ fn clone(&self) -> Self {
++ ItemExternCrate {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ extern_token: self.extern_token.clone(),
++ crate_token: self.crate_token.clone(),
++ ident: self.ident.clone(),
++ rename: self.rename.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemFn {
++ fn clone(&self) -> Self {
++ ItemFn {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ sig: self.sig.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemForeignMod {
++ fn clone(&self) -> Self {
++ ItemForeignMod {
++ attrs: self.attrs.clone(),
++ abi: self.abi.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemImpl {
++ fn clone(&self) -> Self {
++ ItemImpl {
++ attrs: self.attrs.clone(),
++ defaultness: self.defaultness.clone(),
++ unsafety: self.unsafety.clone(),
++ impl_token: self.impl_token.clone(),
++ generics: self.generics.clone(),
++ trait_: self.trait_.clone(),
++ self_ty: self.self_ty.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMacro {
++ fn clone(&self) -> Self {
++ ItemMacro {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMacro2 {
++ fn clone(&self) -> Self {
++ ItemMacro2 {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ macro_token: self.macro_token.clone(),
++ ident: self.ident.clone(),
++ rules: self.rules.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemMod {
++ fn clone(&self) -> Self {
++ ItemMod {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ mod_token: self.mod_token.clone(),
++ ident: self.ident.clone(),
++ content: self.content.clone(),
++ semi: self.semi.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemStatic {
++ fn clone(&self) -> Self {
++ ItemStatic {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ static_token: self.static_token.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemStruct {
++ fn clone(&self) -> Self {
++ ItemStruct {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ struct_token: self.struct_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ fields: self.fields.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemTrait {
++ fn clone(&self) -> Self {
++ ItemTrait {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ unsafety: self.unsafety.clone(),
++ auto_token: self.auto_token.clone(),
++ trait_token: self.trait_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ colon_token: self.colon_token.clone(),
++ supertraits: self.supertraits.clone(),
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemTraitAlias {
++ fn clone(&self) -> Self {
++ ItemTraitAlias {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ trait_token: self.trait_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ bounds: self.bounds.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemType {
++ fn clone(&self) -> Self {
++ ItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemUnion {
++ fn clone(&self) -> Self {
++ ItemUnion {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ union_token: self.union_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ fields: self.fields.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemUse {
++ fn clone(&self) -> Self {
++ ItemUse {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ use_token: self.use_token.clone(),
++ leading_colon: self.leading_colon.clone(),
++ tree: self.tree.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Label {
++ fn clone(&self) -> Self {
++ Label {
++ name: self.name.clone(),
++ colon_token: self.colon_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for LifetimeDef {
++ fn clone(&self) -> Self {
++ LifetimeDef {
++ attrs: self.attrs.clone(),
++ lifetime: self.lifetime.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++impl Clone for Lit {
++ fn clone(&self) -> Self {
++ match self {
++ Lit::Str(v0) => Lit::Str(v0.clone()),
++ Lit::ByteStr(v0) => Lit::ByteStr(v0.clone()),
++ Lit::Byte(v0) => Lit::Byte(v0.clone()),
++ Lit::Char(v0) => Lit::Char(v0.clone()),
++ Lit::Int(v0) => Lit::Int(v0.clone()),
++ Lit::Float(v0) => Lit::Float(v0.clone()),
++ Lit::Bool(v0) => Lit::Bool(v0.clone()),
++ Lit::Verbatim(v0) => Lit::Verbatim(v0.clone()),
++ }
++ }
++}
++impl Clone for LitBool {
++ fn clone(&self) -> Self {
++ LitBool {
++ value: self.value.clone(),
++ span: self.span.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Local {
++ fn clone(&self) -> Self {
++ Local {
++ attrs: self.attrs.clone(),
++ let_token: self.let_token.clone(),
++ pat: self.pat.clone(),
++ init: self.init.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Macro {
++ fn clone(&self) -> Self {
++ Macro {
++ path: self.path.clone(),
++ bang_token: self.bang_token.clone(),
++ delimiter: self.delimiter.clone(),
++ tokens: self.tokens.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MacroDelimiter {
++ fn clone(&self) -> Self {
++ match self {
++ MacroDelimiter::Paren(v0) => MacroDelimiter::Paren(v0.clone()),
++ MacroDelimiter::Brace(v0) => MacroDelimiter::Brace(v0.clone()),
++ MacroDelimiter::Bracket(v0) => MacroDelimiter::Bracket(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Member {
++ fn clone(&self) -> Self {
++ match self {
++ Member::Named(v0) => Member::Named(v0.clone()),
++ Member::Unnamed(v0) => Member::Unnamed(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Meta {
++ fn clone(&self) -> Self {
++ match self {
++ Meta::Path(v0) => Meta::Path(v0.clone()),
++ Meta::List(v0) => Meta::List(v0.clone()),
++ Meta::NameValue(v0) => Meta::NameValue(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MetaList {
++ fn clone(&self) -> Self {
++ MetaList {
++ path: self.path.clone(),
++ paren_token: self.paren_token.clone(),
++ nested: self.nested.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for MetaNameValue {
++ fn clone(&self) -> Self {
++ MetaNameValue {
++ path: self.path.clone(),
++ eq_token: self.eq_token.clone(),
++ lit: self.lit.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for MethodTurbofish {
++ fn clone(&self) -> Self {
++ MethodTurbofish {
++ colon2_token: self.colon2_token.clone(),
++ lt_token: self.lt_token.clone(),
++ args: self.args.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for NestedMeta {
++ fn clone(&self) -> Self {
++ match self {
++ NestedMeta::Meta(v0) => NestedMeta::Meta(v0.clone()),
++ NestedMeta::Lit(v0) => NestedMeta::Lit(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ParenthesizedGenericArguments {
++ fn clone(&self) -> Self {
++ ParenthesizedGenericArguments {
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Pat {
++ fn clone(&self) -> Self {
++ match self {
++ Pat::Box(v0) => Pat::Box(v0.clone()),
++ Pat::Ident(v0) => Pat::Ident(v0.clone()),
++ Pat::Lit(v0) => Pat::Lit(v0.clone()),
++ Pat::Macro(v0) => Pat::Macro(v0.clone()),
++ Pat::Or(v0) => Pat::Or(v0.clone()),
++ Pat::Path(v0) => Pat::Path(v0.clone()),
++ Pat::Range(v0) => Pat::Range(v0.clone()),
++ Pat::Reference(v0) => Pat::Reference(v0.clone()),
++ Pat::Rest(v0) => Pat::Rest(v0.clone()),
++ Pat::Slice(v0) => Pat::Slice(v0.clone()),
++ Pat::Struct(v0) => Pat::Struct(v0.clone()),
++ Pat::Tuple(v0) => Pat::Tuple(v0.clone()),
++ Pat::TupleStruct(v0) => Pat::TupleStruct(v0.clone()),
++ Pat::Type(v0) => Pat::Type(v0.clone()),
++ Pat::Verbatim(v0) => Pat::Verbatim(v0.clone()),
++ Pat::Wild(v0) => Pat::Wild(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatBox {
++ fn clone(&self) -> Self {
++ PatBox {
++ attrs: self.attrs.clone(),
++ box_token: self.box_token.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatIdent {
++ fn clone(&self) -> Self {
++ PatIdent {
++ attrs: self.attrs.clone(),
++ by_ref: self.by_ref.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ subpat: self.subpat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatLit {
++ fn clone(&self) -> Self {
++ PatLit {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatMacro {
++ fn clone(&self) -> Self {
++ PatMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatOr {
++ fn clone(&self) -> Self {
++ PatOr {
++ attrs: self.attrs.clone(),
++ leading_vert: self.leading_vert.clone(),
++ cases: self.cases.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatPath {
++ fn clone(&self) -> Self {
++ PatPath {
++ attrs: self.attrs.clone(),
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatRange {
++ fn clone(&self) -> Self {
++ PatRange {
++ attrs: self.attrs.clone(),
++ lo: self.lo.clone(),
++ limits: self.limits.clone(),
++ hi: self.hi.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatReference {
++ fn clone(&self) -> Self {
++ PatReference {
++ attrs: self.attrs.clone(),
++ and_token: self.and_token.clone(),
++ mutability: self.mutability.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatRest {
++ fn clone(&self) -> Self {
++ PatRest {
++ attrs: self.attrs.clone(),
++ dot2_token: self.dot2_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatSlice {
++ fn clone(&self) -> Self {
++ PatSlice {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatStruct {
++ fn clone(&self) -> Self {
++ PatStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ brace_token: self.brace_token.clone(),
++ fields: self.fields.clone(),
++ dot2_token: self.dot2_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatTuple {
++ fn clone(&self) -> Self {
++ PatTuple {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatTupleStruct {
++ fn clone(&self) -> Self {
++ PatTupleStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatType {
++ fn clone(&self) -> Self {
++ PatType {
++ attrs: self.attrs.clone(),
++ pat: self.pat.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for PatWild {
++ fn clone(&self) -> Self {
++ PatWild {
++ attrs: self.attrs.clone(),
++ underscore_token: self.underscore_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Path {
++ fn clone(&self) -> Self {
++ Path {
++ leading_colon: self.leading_colon.clone(),
++ segments: self.segments.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PathArguments {
++ fn clone(&self) -> Self {
++ match self {
++ PathArguments::None => PathArguments::None,
++ PathArguments::AngleBracketed(v0) => PathArguments::AngleBracketed(v0.clone()),
++ PathArguments::Parenthesized(v0) => PathArguments::Parenthesized(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PathSegment {
++ fn clone(&self) -> Self {
++ PathSegment {
++ ident: self.ident.clone(),
++ arguments: self.arguments.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateEq {
++ fn clone(&self) -> Self {
++ PredicateEq {
++ lhs_ty: self.lhs_ty.clone(),
++ eq_token: self.eq_token.clone(),
++ rhs_ty: self.rhs_ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateLifetime {
++ fn clone(&self) -> Self {
++ PredicateLifetime {
++ lifetime: self.lifetime.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for PredicateType {
++ fn clone(&self) -> Self {
++ PredicateType {
++ lifetimes: self.lifetimes.clone(),
++ bounded_ty: self.bounded_ty.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for QSelf {
++ fn clone(&self) -> Self {
++ QSelf {
++ lt_token: self.lt_token.clone(),
++ ty: self.ty.clone(),
++ position: self.position.clone(),
++ as_token: self.as_token.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Copy for RangeLimits {}
++#[cfg(feature = "full")]
++impl Clone for RangeLimits {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Receiver {
++ fn clone(&self) -> Self {
++ Receiver {
++ attrs: self.attrs.clone(),
++ reference: self.reference.clone(),
++ mutability: self.mutability.clone(),
++ self_token: self.self_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ReturnType {
++ fn clone(&self) -> Self {
++ match self {
++ ReturnType::Default => ReturnType::Default,
++ ReturnType::Type(v0, v1) => ReturnType::Type(v0.clone(), v1.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Signature {
++ fn clone(&self) -> Self {
++ Signature {
++ constness: self.constness.clone(),
++ asyncness: self.asyncness.clone(),
++ unsafety: self.unsafety.clone(),
++ abi: self.abi.clone(),
++ fn_token: self.fn_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ variadic: self.variadic.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Stmt {
++ fn clone(&self) -> Self {
++ match self {
++ Stmt::Local(v0) => Stmt::Local(v0.clone()),
++ Stmt::Item(v0) => Stmt::Item(v0.clone()),
++ Stmt::Expr(v0) => Stmt::Expr(v0.clone()),
++ Stmt::Semi(v0, v1) => Stmt::Semi(v0.clone(), v1.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TraitBound {
++ fn clone(&self) -> Self {
++ TraitBound {
++ paren_token: self.paren_token.clone(),
++ modifier: self.modifier.clone(),
++ lifetimes: self.lifetimes.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for TraitBoundModifier {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TraitBoundModifier {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItem {
++ fn clone(&self) -> Self {
++ match self {
++ TraitItem::Const(v0) => TraitItem::Const(v0.clone()),
++ TraitItem::Method(v0) => TraitItem::Method(v0.clone()),
++ TraitItem::Type(v0) => TraitItem::Type(v0.clone()),
++ TraitItem::Macro(v0) => TraitItem::Macro(v0.clone()),
++ TraitItem::Verbatim(v0) => TraitItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemConst {
++ fn clone(&self) -> Self {
++ TraitItemConst {
++ attrs: self.attrs.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemMacro {
++ fn clone(&self) -> Self {
++ TraitItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemMethod {
++ fn clone(&self) -> Self {
++ TraitItemMethod {
++ attrs: self.attrs.clone(),
++ sig: self.sig.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for TraitItemType {
++ fn clone(&self) -> Self {
++ TraitItemType {
++ attrs: self.attrs.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ default: self.default.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Type {
++ fn clone(&self) -> Self {
++ match self {
++ Type::Array(v0) => Type::Array(v0.clone()),
++ Type::BareFn(v0) => Type::BareFn(v0.clone()),
++ Type::Group(v0) => Type::Group(v0.clone()),
++ Type::ImplTrait(v0) => Type::ImplTrait(v0.clone()),
++ Type::Infer(v0) => Type::Infer(v0.clone()),
++ Type::Macro(v0) => Type::Macro(v0.clone()),
++ Type::Never(v0) => Type::Never(v0.clone()),
++ Type::Paren(v0) => Type::Paren(v0.clone()),
++ Type::Path(v0) => Type::Path(v0.clone()),
++ Type::Ptr(v0) => Type::Ptr(v0.clone()),
++ Type::Reference(v0) => Type::Reference(v0.clone()),
++ Type::Slice(v0) => Type::Slice(v0.clone()),
++ Type::TraitObject(v0) => Type::TraitObject(v0.clone()),
++ Type::Tuple(v0) => Type::Tuple(v0.clone()),
++ Type::Verbatim(v0) => Type::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeArray {
++ fn clone(&self) -> Self {
++ TypeArray {
++ bracket_token: self.bracket_token.clone(),
++ elem: self.elem.clone(),
++ semi_token: self.semi_token.clone(),
++ len: self.len.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeBareFn {
++ fn clone(&self) -> Self {
++ TypeBareFn {
++ lifetimes: self.lifetimes.clone(),
++ unsafety: self.unsafety.clone(),
++ abi: self.abi.clone(),
++ fn_token: self.fn_token.clone(),
++ paren_token: self.paren_token.clone(),
++ inputs: self.inputs.clone(),
++ variadic: self.variadic.clone(),
++ output: self.output.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeGroup {
++ fn clone(&self) -> Self {
++ TypeGroup {
++ group_token: self.group_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeImplTrait {
++ fn clone(&self) -> Self {
++ TypeImplTrait {
++ impl_token: self.impl_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeInfer {
++ fn clone(&self) -> Self {
++ TypeInfer {
++ underscore_token: self.underscore_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeMacro {
++ fn clone(&self) -> Self {
++ TypeMacro {
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeNever {
++ fn clone(&self) -> Self {
++ TypeNever {
++ bang_token: self.bang_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParam {
++ fn clone(&self) -> Self {
++ TypeParam {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ eq_token: self.eq_token.clone(),
++ default: self.default.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParamBound {
++ fn clone(&self) -> Self {
++ match self {
++ TypeParamBound::Trait(v0) => TypeParamBound::Trait(v0.clone()),
++ TypeParamBound::Lifetime(v0) => TypeParamBound::Lifetime(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeParen {
++ fn clone(&self) -> Self {
++ TypeParen {
++ paren_token: self.paren_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypePath {
++ fn clone(&self) -> Self {
++ TypePath {
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypePtr {
++ fn clone(&self) -> Self {
++ TypePtr {
++ star_token: self.star_token.clone(),
++ const_token: self.const_token.clone(),
++ mutability: self.mutability.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeReference {
++ fn clone(&self) -> Self {
++ TypeReference {
++ and_token: self.and_token.clone(),
++ lifetime: self.lifetime.clone(),
++ mutability: self.mutability.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeSlice {
++ fn clone(&self) -> Self {
++ TypeSlice {
++ bracket_token: self.bracket_token.clone(),
++ elem: self.elem.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeTraitObject {
++ fn clone(&self) -> Self {
++ TypeTraitObject {
++ dyn_token: self.dyn_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for TypeTuple {
++ fn clone(&self) -> Self {
++ TypeTuple {
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for UnOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for UnOp {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseGlob {
++ fn clone(&self) -> Self {
++ UseGlob {
++ star_token: self.star_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseGroup {
++ fn clone(&self) -> Self {
++ UseGroup {
++ brace_token: self.brace_token.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseName {
++ fn clone(&self) -> Self {
++ UseName {
++ ident: self.ident.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UsePath {
++ fn clone(&self) -> Self {
++ UsePath {
++ ident: self.ident.clone(),
++ colon2_token: self.colon2_token.clone(),
++ tree: self.tree.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseRename {
++ fn clone(&self) -> Self {
++ UseRename {
++ ident: self.ident.clone(),
++ as_token: self.as_token.clone(),
++ rename: self.rename.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for UseTree {
++ fn clone(&self) -> Self {
++ match self {
++ UseTree::Path(v0) => UseTree::Path(v0.clone()),
++ UseTree::Name(v0) => UseTree::Name(v0.clone()),
++ UseTree::Rename(v0) => UseTree::Rename(v0.clone()),
++ UseTree::Glob(v0) => UseTree::Glob(v0.clone()),
++ UseTree::Group(v0) => UseTree::Group(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Variadic {
++ fn clone(&self) -> Self {
++ Variadic {
++ attrs: self.attrs.clone(),
++ dots: self.dots.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Variant {
++ fn clone(&self) -> Self {
++ Variant {
++ attrs: self.attrs.clone(),
++ ident: self.ident.clone(),
++ fields: self.fields.clone(),
++ discriminant: self.discriminant.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisCrate {
++ fn clone(&self) -> Self {
++ VisCrate {
++ crate_token: self.crate_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisPublic {
++ fn clone(&self) -> Self {
++ VisPublic {
++ pub_token: self.pub_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for VisRestricted {
++ fn clone(&self) -> Self {
++ VisRestricted {
++ pub_token: self.pub_token.clone(),
++ paren_token: self.paren_token.clone(),
++ in_token: self.in_token.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Visibility {
++ fn clone(&self) -> Self {
++ match self {
++ Visibility::Public(v0) => Visibility::Public(v0.clone()),
++ Visibility::Crate(v0) => Visibility::Crate(v0.clone()),
++ Visibility::Restricted(v0) => Visibility::Restricted(v0.clone()),
++ Visibility::Inherited => Visibility::Inherited,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for WhereClause {
++ fn clone(&self) -> Self {
++ WhereClause {
++ where_token: self.where_token.clone(),
++ predicates: self.predicates.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for WherePredicate {
++ fn clone(&self) -> Self {
++ match self {
++ WherePredicate::Type(v0) => WherePredicate::Type(v0.clone()),
++ WherePredicate::Lifetime(v0) => WherePredicate::Lifetime(v0.clone()),
++ WherePredicate::Eq(v0) => WherePredicate::Eq(v0.clone()),
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/debug.rs b/third_party/rust/syn/src/gen/debug.rs
+new file mode 100644
+index 0000000000..72baab05f4
+--- /dev/null
++++ third_party/rust/syn/src/gen/debug.rs
+@@ -0,0 +1,2857 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++use crate::*;
++use std::fmt::{self, Debug};
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Abi {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Abi");
++ formatter.field("extern_token", &self.extern_token);
++ formatter.field("name", &self.name);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for AngleBracketedGenericArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("AngleBracketedGenericArguments");
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("args", &self.args);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Arm {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Arm");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("pat", &self.pat);
++ formatter.field("guard", &self.guard);
++ formatter.field("fat_arrow_token", &self.fat_arrow_token);
++ formatter.field("body", &self.body);
++ formatter.field("comma", &self.comma);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for AttrStyle {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ AttrStyle::Outer => formatter.write_str("Outer"),
++ AttrStyle::Inner(v0) => {
++ let mut formatter = formatter.debug_tuple("Inner");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Attribute {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Attribute");
++ formatter.field("pound_token", &self.pound_token);
++ formatter.field("style", &self.style);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("path", &self.path);
++ formatter.field("tokens", &self.tokens);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BareFnArg {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("BareFnArg");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("name", &self.name);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BinOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ BinOp::Add(v0) => {
++ let mut formatter = formatter.debug_tuple("Add");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Sub(v0) => {
++ let mut formatter = formatter.debug_tuple("Sub");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Mul(v0) => {
++ let mut formatter = formatter.debug_tuple("Mul");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Div(v0) => {
++ let mut formatter = formatter.debug_tuple("Div");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Rem(v0) => {
++ let mut formatter = formatter.debug_tuple("Rem");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::And(v0) => {
++ let mut formatter = formatter.debug_tuple("And");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Or(v0) => {
++ let mut formatter = formatter.debug_tuple("Or");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitXor(v0) => {
++ let mut formatter = formatter.debug_tuple("BitXor");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitAnd(v0) => {
++ let mut formatter = formatter.debug_tuple("BitAnd");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitOr(v0) => {
++ let mut formatter = formatter.debug_tuple("BitOr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Shl(v0) => {
++ let mut formatter = formatter.debug_tuple("Shl");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Shr(v0) => {
++ let mut formatter = formatter.debug_tuple("Shr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Eq(v0) => {
++ let mut formatter = formatter.debug_tuple("Eq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Lt(v0) => {
++ let mut formatter = formatter.debug_tuple("Lt");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Le(v0) => {
++ let mut formatter = formatter.debug_tuple("Le");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Ne(v0) => {
++ let mut formatter = formatter.debug_tuple("Ne");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Ge(v0) => {
++ let mut formatter = formatter.debug_tuple("Ge");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::Gt(v0) => {
++ let mut formatter = formatter.debug_tuple("Gt");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::AddEq(v0) => {
++ let mut formatter = formatter.debug_tuple("AddEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::SubEq(v0) => {
++ let mut formatter = formatter.debug_tuple("SubEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::MulEq(v0) => {
++ let mut formatter = formatter.debug_tuple("MulEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::DivEq(v0) => {
++ let mut formatter = formatter.debug_tuple("DivEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::RemEq(v0) => {
++ let mut formatter = formatter.debug_tuple("RemEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitXorEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitXorEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitAndEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitAndEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::BitOrEq(v0) => {
++ let mut formatter = formatter.debug_tuple("BitOrEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::ShlEq(v0) => {
++ let mut formatter = formatter.debug_tuple("ShlEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ BinOp::ShrEq(v0) => {
++ let mut formatter = formatter.debug_tuple("ShrEq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Binding {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Binding");
++ formatter.field("ident", &self.ident);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Block {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Block");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("stmts", &self.stmts);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for BoundLifetimes {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("BoundLifetimes");
++ formatter.field("for_token", &self.for_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ConstParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ConstParam");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("default", &self.default);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Constraint {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Constraint");
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for Data {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Data::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Data::Enum(v0) => {
++ let mut formatter = formatter.debug_tuple("Enum");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Data::Union(v0) => {
++ let mut formatter = formatter.debug_tuple("Union");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataEnum {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataEnum");
++ formatter.field("enum_token", &self.enum_token);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("variants", &self.variants);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataStruct");
++ formatter.field("struct_token", &self.struct_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DataUnion {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DataUnion");
++ formatter.field("union_token", &self.union_token);
++ formatter.field("fields", &self.fields);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "derive")]
++impl Debug for DeriveInput {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("DeriveInput");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("data", &self.data);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Expr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => {
++ let mut formatter = formatter.debug_tuple("Array");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => {
++ let mut formatter = formatter.debug_tuple("Assign");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => {
++ let mut formatter = formatter.debug_tuple("AssignOp");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => {
++ let mut formatter = formatter.debug_tuple("Async");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => {
++ let mut formatter = formatter.debug_tuple("Await");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Binary(v0) => {
++ let mut formatter = formatter.debug_tuple("Binary");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => {
++ let mut formatter = formatter.debug_tuple("Block");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => {
++ let mut formatter = formatter.debug_tuple("Box");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => {
++ let mut formatter = formatter.debug_tuple("Break");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Call(v0) => {
++ let mut formatter = formatter.debug_tuple("Call");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Cast(v0) => {
++ let mut formatter = formatter.debug_tuple("Cast");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => {
++ let mut formatter = formatter.debug_tuple("Closure");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => {
++ let mut formatter = formatter.debug_tuple("Continue");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Field(v0) => {
++ let mut formatter = formatter.debug_tuple("Field");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => {
++ let mut formatter = formatter.debug_tuple("ForLoop");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::If(v0) => {
++ let mut formatter = formatter.debug_tuple("If");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Index(v0) => {
++ let mut formatter = formatter.debug_tuple("Index");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => {
++ let mut formatter = formatter.debug_tuple("Let");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => {
++ let mut formatter = formatter.debug_tuple("Loop");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => {
++ let mut formatter = formatter.debug_tuple("Match");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => {
++ let mut formatter = formatter.debug_tuple("MethodCall");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => {
++ let mut formatter = formatter.debug_tuple("Range");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => {
++ let mut formatter = formatter.debug_tuple("Repeat");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => {
++ let mut formatter = formatter.debug_tuple("Return");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => {
++ let mut formatter = formatter.debug_tuple("Try");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => {
++ let mut formatter = formatter.debug_tuple("TryBlock");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Unary(v0) => {
++ let mut formatter = formatter.debug_tuple("Unary");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => {
++ let mut formatter = formatter.debug_tuple("Unsafe");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Expr::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::While(v0) => {
++ let mut formatter = formatter.debug_tuple("While");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => {
++ let mut formatter = formatter.debug_tuple("Yield");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprArray {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprArray");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAssign {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAssign");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAssignOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAssignOp");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("op", &self.op);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAsync {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAsync");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("async_token", &self.async_token);
++ formatter.field("capture", &self.capture);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprAwait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprAwait");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("base", &self.base);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("await_token", &self.await_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprBinary {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBinary");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("left", &self.left);
++ formatter.field("op", &self.op);
++ formatter.field("right", &self.right);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBlock {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBlock");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBox {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBox");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("box_token", &self.box_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprBreak {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprBreak");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("break_token", &self.break_token);
++ formatter.field("label", &self.label);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprCall {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprCall");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("func", &self.func);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("args", &self.args);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprCast {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprCast");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprClosure {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprClosure");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("asyncness", &self.asyncness);
++ formatter.field("movability", &self.movability);
++ formatter.field("capture", &self.capture);
++ formatter.field("or1_token", &self.or1_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("or2_token", &self.or2_token);
++ formatter.field("output", &self.output);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprContinue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprContinue");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("continue_token", &self.continue_token);
++ formatter.field("label", &self.label);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprField {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprField");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("base", &self.base);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("member", &self.member);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprForLoop {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprForLoop");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("for_token", &self.for_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("in_token", &self.in_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprGroup");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("group_token", &self.group_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprIf {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprIf");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("if_token", &self.if_token);
++ formatter.field("cond", &self.cond);
++ formatter.field("then_branch", &self.then_branch);
++ formatter.field("else_branch", &self.else_branch);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprIndex {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprIndex");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("index", &self.index);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprLet {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLet");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("let_token", &self.let_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprLit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLit");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lit", &self.lit);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprLoop {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprLoop");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("loop_token", &self.loop_token);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMatch {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMatch");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("match_token", &self.match_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("arms", &self.arms);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprMethodCall {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprMethodCall");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("receiver", &self.receiver);
++ formatter.field("dot_token", &self.dot_token);
++ formatter.field("method", &self.method);
++ formatter.field("turbofish", &self.turbofish);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("args", &self.args);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprParen {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprParen");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprPath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprPath");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprRange {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprRange");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("from", &self.from);
++ formatter.field("limits", &self.limits);
++ formatter.field("to", &self.to);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprReference");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("and_token", &self.and_token);
++ formatter.field("raw", &self.raw);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprRepeat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprRepeat");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.field("len", &self.len);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprReturn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprReturn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("return_token", &self.return_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.field("rest", &self.rest);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTry {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTry");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("question_token", &self.question_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTryBlock {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTryBlock");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("try_token", &self.try_token);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprTuple");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ExprUnary {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprUnary");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("op", &self.op);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprUnsafe {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprUnsafe");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("unsafe_token", &self.unsafe_token);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprWhile {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprWhile");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("label", &self.label);
++ formatter.field("while_token", &self.while_token);
++ formatter.field("cond", &self.cond);
++ formatter.field("body", &self.body);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ExprYield {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ExprYield");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("yield_token", &self.yield_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Field {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Field");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FieldPat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldPat");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("member", &self.member);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FieldValue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldValue");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("member", &self.member);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Fields {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Fields::Named(v0) => {
++ let mut formatter = formatter.debug_tuple("Named");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Fields::Unnamed(v0) => {
++ let mut formatter = formatter.debug_tuple("Unnamed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Fields::Unit => formatter.write_str("Unit"),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for FieldsNamed {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldsNamed");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("named", &self.named);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for FieldsUnnamed {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("FieldsUnnamed");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("unnamed", &self.unnamed);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for File {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("File");
++ formatter.field("shebang", &self.shebang);
++ formatter.field("attrs", &self.attrs);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for FnArg {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ FnArg::Receiver(v0) => {
++ let mut formatter = formatter.debug_tuple("Receiver");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ FnArg::Typed(v0) => {
++ let mut formatter = formatter.debug_tuple("Typed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ForeignItem::Fn(v0) => {
++ let mut formatter = formatter.debug_tuple("Fn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Static(v0) => {
++ let mut formatter = formatter.debug_tuple("Static");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ForeignItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemFn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("sig", &self.sig);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemStatic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemStatic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("static_token", &self.static_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ForeignItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ForeignItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for GenericArgument {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericArgument::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Binding(v0) => {
++ let mut formatter = formatter.debug_tuple("Binding");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Constraint(v0) => {
++ let mut formatter = formatter.debug_tuple("Constraint");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericArgument::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for GenericMethodArgument {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericMethodArgument::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericMethodArgument::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for GenericParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ GenericParam::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericParam::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ GenericParam::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Generics {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Generics");
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("params", &self.params);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.field("where_clause", &self.where_clause);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ImplItem::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Method(v0) => {
++ let mut formatter = formatter.debug_tuple("Method");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ ImplItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemMethod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemMethod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("sig", &self.sig);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ImplItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ImplItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Index {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Index");
++ formatter.field("index", &self.index);
++ formatter.field("span", &self.span);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Item {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Item::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Enum(v0) => {
++ let mut formatter = formatter.debug_tuple("Enum");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::ExternCrate(v0) => {
++ let mut formatter = formatter.debug_tuple("ExternCrate");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Fn(v0) => {
++ let mut formatter = formatter.debug_tuple("Fn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::ForeignMod(v0) => {
++ let mut formatter = formatter.debug_tuple("ForeignMod");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Impl(v0) => {
++ let mut formatter = formatter.debug_tuple("Impl");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Macro2(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro2");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Mod(v0) => {
++ let mut formatter = formatter.debug_tuple("Mod");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Static(v0) => {
++ let mut formatter = formatter.debug_tuple("Static");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Trait(v0) => {
++ let mut formatter = formatter.debug_tuple("Trait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::TraitAlias(v0) => {
++ let mut formatter = formatter.debug_tuple("TraitAlias");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Union(v0) => {
++ let mut formatter = formatter.debug_tuple("Union");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Use(v0) => {
++ let mut formatter = formatter.debug_tuple("Use");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Item::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemEnum {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemEnum");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("enum_token", &self.enum_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("variants", &self.variants);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemExternCrate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemExternCrate");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("extern_token", &self.extern_token);
++ formatter.field("crate_token", &self.crate_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("rename", &self.rename);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemFn");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("sig", &self.sig);
++ formatter.field("block", &self.block);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemForeignMod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemForeignMod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("abi", &self.abi);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemImpl {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemImpl");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("defaultness", &self.defaultness);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("impl_token", &self.impl_token);
++ formatter.field("generics", &self.generics);
++ formatter.field("trait_", &self.trait_);
++ formatter.field("self_ty", &self.self_ty);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMacro2 {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMacro2");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("macro_token", &self.macro_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("rules", &self.rules);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemMod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemMod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("mod_token", &self.mod_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("content", &self.content);
++ formatter.field("semi", &self.semi);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemStatic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemStatic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("static_token", &self.static_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("expr", &self.expr);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("struct_token", &self.struct_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("fields", &self.fields);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemTrait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemTrait");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("auto_token", &self.auto_token);
++ formatter.field("trait_token", &self.trait_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("supertraits", &self.supertraits);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemTraitAlias {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemTraitAlias");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("trait_token", &self.trait_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemUnion {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemUnion");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("union_token", &self.union_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("fields", &self.fields);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for ItemUse {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ItemUse");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("vis", &self.vis);
++ formatter.field("use_token", &self.use_token);
++ formatter.field("leading_colon", &self.leading_colon);
++ formatter.field("tree", &self.tree);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Label {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Label");
++ formatter.field("name", &self.name);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.finish()
++ }
++}
++impl Debug for Lifetime {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Lifetime");
++ formatter.field("apostrophe", &self.apostrophe);
++ formatter.field("ident", &self.ident);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for LifetimeDef {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("LifetimeDef");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++impl Debug for Lit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Lit::Str(v0) => {
++ let mut formatter = formatter.debug_tuple("Str");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::ByteStr(v0) => {
++ let mut formatter = formatter.debug_tuple("ByteStr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Byte(v0) => {
++ let mut formatter = formatter.debug_tuple("Byte");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Char(v0) => {
++ let mut formatter = formatter.debug_tuple("Char");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Int(v0) => {
++ let mut formatter = formatter.debug_tuple("Int");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Float(v0) => {
++ let mut formatter = formatter.debug_tuple("Float");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Bool(v0) => {
++ let mut formatter = formatter.debug_tuple("Bool");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Lit::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Local {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Local");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("let_token", &self.let_token);
++ formatter.field("pat", &self.pat);
++ formatter.field("init", &self.init);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Macro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Macro");
++ formatter.field("path", &self.path);
++ formatter.field("bang_token", &self.bang_token);
++ formatter.field("delimiter", &self.delimiter);
++ formatter.field("tokens", &self.tokens);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MacroDelimiter {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ MacroDelimiter::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ MacroDelimiter::Brace(v0) => {
++ let mut formatter = formatter.debug_tuple("Brace");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ MacroDelimiter::Bracket(v0) => {
++ let mut formatter = formatter.debug_tuple("Bracket");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Member {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Member::Named(v0) => {
++ let mut formatter = formatter.debug_tuple("Named");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Member::Unnamed(v0) => {
++ let mut formatter = formatter.debug_tuple("Unnamed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Meta {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Meta::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Meta::List(v0) => {
++ let mut formatter = formatter.debug_tuple("List");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Meta::NameValue(v0) => {
++ let mut formatter = formatter.debug_tuple("NameValue");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MetaList {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MetaList");
++ formatter.field("path", &self.path);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("nested", &self.nested);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for MetaNameValue {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MetaNameValue");
++ formatter.field("path", &self.path);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("lit", &self.lit);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for MethodTurbofish {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("MethodTurbofish");
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("args", &self.args);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for NestedMeta {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ NestedMeta::Meta(v0) => {
++ let mut formatter = formatter.debug_tuple("Meta");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ NestedMeta::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ParenthesizedGenericArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("ParenthesizedGenericArguments");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Pat {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Pat::Box(v0) => {
++ let mut formatter = formatter.debug_tuple("Box");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Ident(v0) => {
++ let mut formatter = formatter.debug_tuple("Ident");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Lit(v0) => {
++ let mut formatter = formatter.debug_tuple("Lit");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Or(v0) => {
++ let mut formatter = formatter.debug_tuple("Or");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Range(v0) => {
++ let mut formatter = formatter.debug_tuple("Range");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Rest(v0) => {
++ let mut formatter = formatter.debug_tuple("Rest");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Slice(v0) => {
++ let mut formatter = formatter.debug_tuple("Slice");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Struct(v0) => {
++ let mut formatter = formatter.debug_tuple("Struct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::TupleStruct(v0) => {
++ let mut formatter = formatter.debug_tuple("TupleStruct");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Pat::Wild(v0) => {
++ let mut formatter = formatter.debug_tuple("Wild");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatBox {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatBox");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("box_token", &self.box_token);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatIdent {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatIdent");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("by_ref", &self.by_ref);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("ident", &self.ident);
++ formatter.field("subpat", &self.subpat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatLit {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatLit");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("expr", &self.expr);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatOr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatOr");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("leading_vert", &self.leading_vert);
++ formatter.field("cases", &self.cases);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatPath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatPath");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatRange {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatRange");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("lo", &self.lo);
++ formatter.field("limits", &self.limits);
++ formatter.field("hi", &self.hi);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatReference");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("and_token", &self.and_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatRest {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatRest");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatSlice {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatSlice");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("fields", &self.fields);
++ formatter.field("dot2_token", &self.dot2_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatTuple");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatTupleStruct {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatTupleStruct");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("path", &self.path);
++ formatter.field("pat", &self.pat);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("pat", &self.pat);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for PatWild {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PatWild");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("underscore_token", &self.underscore_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Path {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Path");
++ formatter.field("leading_colon", &self.leading_colon);
++ formatter.field("segments", &self.segments);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PathArguments {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ PathArguments::None => formatter.write_str("None"),
++ PathArguments::AngleBracketed(v0) => {
++ let mut formatter = formatter.debug_tuple("AngleBracketed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ PathArguments::Parenthesized(v0) => {
++ let mut formatter = formatter.debug_tuple("Parenthesized");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PathSegment {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PathSegment");
++ formatter.field("ident", &self.ident);
++ formatter.field("arguments", &self.arguments);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateEq {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateEq");
++ formatter.field("lhs_ty", &self.lhs_ty);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("rhs_ty", &self.rhs_ty);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateLifetime {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateLifetime");
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for PredicateType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("PredicateType");
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("bounded_ty", &self.bounded_ty);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for QSelf {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("QSelf");
++ formatter.field("lt_token", &self.lt_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("position", &self.position);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("gt_token", &self.gt_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for RangeLimits {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ RangeLimits::HalfOpen(v0) => {
++ let mut formatter = formatter.debug_tuple("HalfOpen");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ RangeLimits::Closed(v0) => {
++ let mut formatter = formatter.debug_tuple("Closed");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Receiver {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Receiver");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("reference", &self.reference);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("self_token", &self.self_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for ReturnType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ ReturnType::Default => formatter.write_str("Default"),
++ ReturnType::Type(v0, v1) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.field(v1);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Signature {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Signature");
++ formatter.field("constness", &self.constness);
++ formatter.field("asyncness", &self.asyncness);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("abi", &self.abi);
++ formatter.field("fn_token", &self.fn_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("variadic", &self.variadic);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for Stmt {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Stmt::Local(v0) => {
++ let mut formatter = formatter.debug_tuple("Local");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Item(v0) => {
++ let mut formatter = formatter.debug_tuple("Item");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Expr(v0) => {
++ let mut formatter = formatter.debug_tuple("Expr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Stmt::Semi(v0, v1) => {
++ let mut formatter = formatter.debug_tuple("Semi");
++ formatter.field(v0);
++ formatter.field(v1);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TraitBound {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitBound");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("modifier", &self.modifier);
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TraitBoundModifier {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TraitBoundModifier::None => formatter.write_str("None"),
++ TraitBoundModifier::Maybe(v0) => {
++ let mut formatter = formatter.debug_tuple("Maybe");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItem {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TraitItem::Const(v0) => {
++ let mut formatter = formatter.debug_tuple("Const");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Method(v0) => {
++ let mut formatter = formatter.debug_tuple("Method");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TraitItem::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemConst {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemConst");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("ty", &self.ty);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemMacro");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("mac", &self.mac);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemMethod {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemMethod");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("sig", &self.sig);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for TraitItemType {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TraitItemType");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("type_token", &self.type_token);
++ formatter.field("ident", &self.ident);
++ formatter.field("generics", &self.generics);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("default", &self.default);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Type {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Type::Array(v0) => {
++ let mut formatter = formatter.debug_tuple("Array");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::BareFn(v0) => {
++ let mut formatter = formatter.debug_tuple("BareFn");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::ImplTrait(v0) => {
++ let mut formatter = formatter.debug_tuple("ImplTrait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Infer(v0) => {
++ let mut formatter = formatter.debug_tuple("Infer");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Macro(v0) => {
++ let mut formatter = formatter.debug_tuple("Macro");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Never(v0) => {
++ let mut formatter = formatter.debug_tuple("Never");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Paren(v0) => {
++ let mut formatter = formatter.debug_tuple("Paren");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Ptr(v0) => {
++ let mut formatter = formatter.debug_tuple("Ptr");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Reference(v0) => {
++ let mut formatter = formatter.debug_tuple("Reference");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Slice(v0) => {
++ let mut formatter = formatter.debug_tuple("Slice");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::TraitObject(v0) => {
++ let mut formatter = formatter.debug_tuple("TraitObject");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Tuple(v0) => {
++ let mut formatter = formatter.debug_tuple("Tuple");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Type::Verbatim(v0) => {
++ let mut formatter = formatter.debug_tuple("Verbatim");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeArray {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeArray");
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elem", &self.elem);
++ formatter.field("semi_token", &self.semi_token);
++ formatter.field("len", &self.len);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeBareFn {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeBareFn");
++ formatter.field("lifetimes", &self.lifetimes);
++ formatter.field("unsafety", &self.unsafety);
++ formatter.field("abi", &self.abi);
++ formatter.field("fn_token", &self.fn_token);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("inputs", &self.inputs);
++ formatter.field("variadic", &self.variadic);
++ formatter.field("output", &self.output);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeGroup");
++ formatter.field("group_token", &self.group_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeImplTrait {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeImplTrait");
++ formatter.field("impl_token", &self.impl_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeInfer {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeInfer");
++ formatter.field("underscore_token", &self.underscore_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeMacro {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeMacro");
++ formatter.field("mac", &self.mac);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeNever {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeNever");
++ formatter.field("bang_token", &self.bang_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParam {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeParam");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("colon_token", &self.colon_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.field("eq_token", &self.eq_token);
++ formatter.field("default", &self.default);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParamBound {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ TypeParamBound::Trait(v0) => {
++ let mut formatter = formatter.debug_tuple("Trait");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ TypeParamBound::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeParen {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeParen");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypePath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypePath");
++ formatter.field("qself", &self.qself);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypePtr {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypePtr");
++ formatter.field("star_token", &self.star_token);
++ formatter.field("const_token", &self.const_token);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeReference {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeReference");
++ formatter.field("and_token", &self.and_token);
++ formatter.field("lifetime", &self.lifetime);
++ formatter.field("mutability", &self.mutability);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeSlice {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeSlice");
++ formatter.field("bracket_token", &self.bracket_token);
++ formatter.field("elem", &self.elem);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeTraitObject {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeTraitObject");
++ formatter.field("dyn_token", &self.dyn_token);
++ formatter.field("bounds", &self.bounds);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for TypeTuple {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("TypeTuple");
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("elems", &self.elems);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for UnOp {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ UnOp::Deref(v0) => {
++ let mut formatter = formatter.debug_tuple("Deref");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UnOp::Not(v0) => {
++ let mut formatter = formatter.debug_tuple("Not");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UnOp::Neg(v0) => {
++ let mut formatter = formatter.debug_tuple("Neg");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseGlob {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseGlob");
++ formatter.field("star_token", &self.star_token);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseGroup {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseGroup");
++ formatter.field("brace_token", &self.brace_token);
++ formatter.field("items", &self.items);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseName {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseName");
++ formatter.field("ident", &self.ident);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UsePath {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UsePath");
++ formatter.field("ident", &self.ident);
++ formatter.field("colon2_token", &self.colon2_token);
++ formatter.field("tree", &self.tree);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseRename {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("UseRename");
++ formatter.field("ident", &self.ident);
++ formatter.field("as_token", &self.as_token);
++ formatter.field("rename", &self.rename);
++ formatter.finish()
++ }
++}
++#[cfg(feature = "full")]
++impl Debug for UseTree {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ UseTree::Path(v0) => {
++ let mut formatter = formatter.debug_tuple("Path");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Name(v0) => {
++ let mut formatter = formatter.debug_tuple("Name");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Rename(v0) => {
++ let mut formatter = formatter.debug_tuple("Rename");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Glob(v0) => {
++ let mut formatter = formatter.debug_tuple("Glob");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ UseTree::Group(v0) => {
++ let mut formatter = formatter.debug_tuple("Group");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Variadic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Variadic");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("dots", &self.dots);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Variant {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("Variant");
++ formatter.field("attrs", &self.attrs);
++ formatter.field("ident", &self.ident);
++ formatter.field("fields", &self.fields);
++ formatter.field("discriminant", &self.discriminant);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisCrate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisCrate");
++ formatter.field("crate_token", &self.crate_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisPublic {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisPublic");
++ formatter.field("pub_token", &self.pub_token);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for VisRestricted {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("VisRestricted");
++ formatter.field("pub_token", &self.pub_token);
++ formatter.field("paren_token", &self.paren_token);
++ formatter.field("in_token", &self.in_token);
++ formatter.field("path", &self.path);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for Visibility {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Visibility::Public(v0) => {
++ let mut formatter = formatter.debug_tuple("Public");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Crate(v0) => {
++ let mut formatter = formatter.debug_tuple("Crate");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Restricted(v0) => {
++ let mut formatter = formatter.debug_tuple("Restricted");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ Visibility::Inherited => formatter.write_str("Inherited"),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for WhereClause {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ let mut formatter = formatter.debug_struct("WhereClause");
++ formatter.field("where_token", &self.where_token);
++ formatter.field("predicates", &self.predicates);
++ formatter.finish()
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Debug for WherePredicate {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ WherePredicate::Type(v0) => {
++ let mut formatter = formatter.debug_tuple("Type");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ WherePredicate::Lifetime(v0) => {
++ let mut formatter = formatter.debug_tuple("Lifetime");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ WherePredicate::Eq(v0) => {
++ let mut formatter = formatter.debug_tuple("Eq");
++ formatter.field(v0);
++ formatter.finish()
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/eq.rs b/third_party/rust/syn/src/gen/eq.rs
+new file mode 100644
+index 0000000000..15b2bcbbde
+--- /dev/null
++++ third_party/rust/syn/src/gen/eq.rs
+@@ -0,0 +1,1930 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#[cfg(any(feature = "derive", feature = "full"))]
++use crate::tt::TokenStreamHelper;
++use crate::*;
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Abi {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Abi {
++ fn eq(&self, other: &Self) -> bool {
++ self.name == other.name
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for AngleBracketedGenericArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for AngleBracketedGenericArguments {
++ fn eq(&self, other: &Self) -> bool {
++ self.colon2_token == other.colon2_token && self.args == other.args
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Arm {}
++#[cfg(feature = "full")]
++impl PartialEq for Arm {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.pat == other.pat
++ && self.guard == other.guard
++ && self.body == other.body
++ && self.comma == other.comma
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for AttrStyle {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for AttrStyle {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (AttrStyle::Outer, AttrStyle::Outer) => true,
++ (AttrStyle::Inner(_), AttrStyle::Inner(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Attribute {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Attribute {
++ fn eq(&self, other: &Self) -> bool {
++ self.style == other.style
++ && self.path == other.path
++ && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BareFnArg {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BareFnArg {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.name == other.name && self.ty == other.ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BinOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BinOp {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (BinOp::Add(_), BinOp::Add(_)) => true,
++ (BinOp::Sub(_), BinOp::Sub(_)) => true,
++ (BinOp::Mul(_), BinOp::Mul(_)) => true,
++ (BinOp::Div(_), BinOp::Div(_)) => true,
++ (BinOp::Rem(_), BinOp::Rem(_)) => true,
++ (BinOp::And(_), BinOp::And(_)) => true,
++ (BinOp::Or(_), BinOp::Or(_)) => true,
++ (BinOp::BitXor(_), BinOp::BitXor(_)) => true,
++ (BinOp::BitAnd(_), BinOp::BitAnd(_)) => true,
++ (BinOp::BitOr(_), BinOp::BitOr(_)) => true,
++ (BinOp::Shl(_), BinOp::Shl(_)) => true,
++ (BinOp::Shr(_), BinOp::Shr(_)) => true,
++ (BinOp::Eq(_), BinOp::Eq(_)) => true,
++ (BinOp::Lt(_), BinOp::Lt(_)) => true,
++ (BinOp::Le(_), BinOp::Le(_)) => true,
++ (BinOp::Ne(_), BinOp::Ne(_)) => true,
++ (BinOp::Ge(_), BinOp::Ge(_)) => true,
++ (BinOp::Gt(_), BinOp::Gt(_)) => true,
++ (BinOp::AddEq(_), BinOp::AddEq(_)) => true,
++ (BinOp::SubEq(_), BinOp::SubEq(_)) => true,
++ (BinOp::MulEq(_), BinOp::MulEq(_)) => true,
++ (BinOp::DivEq(_), BinOp::DivEq(_)) => true,
++ (BinOp::RemEq(_), BinOp::RemEq(_)) => true,
++ (BinOp::BitXorEq(_), BinOp::BitXorEq(_)) => true,
++ (BinOp::BitAndEq(_), BinOp::BitAndEq(_)) => true,
++ (BinOp::BitOrEq(_), BinOp::BitOrEq(_)) => true,
++ (BinOp::ShlEq(_), BinOp::ShlEq(_)) => true,
++ (BinOp::ShrEq(_), BinOp::ShrEq(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Binding {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Binding {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Block {}
++#[cfg(feature = "full")]
++impl PartialEq for Block {
++ fn eq(&self, other: &Self) -> bool {
++ self.stmts == other.stmts
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for BoundLifetimes {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for BoundLifetimes {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ConstParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ConstParam {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.eq_token == other.eq_token
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Constraint {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Constraint {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.bounds == other.bounds
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for Data {}
++#[cfg(feature = "derive")]
++impl PartialEq for Data {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Data::Struct(self0), Data::Struct(other0)) => self0 == other0,
++ (Data::Enum(self0), Data::Enum(other0)) => self0 == other0,
++ (Data::Union(self0), Data::Union(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataEnum {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataEnum {
++ fn eq(&self, other: &Self) -> bool {
++ self.variants == other.variants
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataStruct {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.fields == other.fields && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DataUnion {}
++#[cfg(feature = "derive")]
++impl PartialEq for DataUnion {
++ fn eq(&self, other: &Self) -> bool {
++ self.fields == other.fields
++ }
++}
++#[cfg(feature = "derive")]
++impl Eq for DeriveInput {}
++#[cfg(feature = "derive")]
++impl PartialEq for DeriveInput {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.data == other.data
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Expr {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Expr {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ #[cfg(feature = "full")]
++ (Expr::Array(self0), Expr::Array(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Assign(self0), Expr::Assign(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::AssignOp(self0), Expr::AssignOp(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Async(self0), Expr::Async(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Await(self0), Expr::Await(other0)) => self0 == other0,
++ (Expr::Binary(self0), Expr::Binary(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Block(self0), Expr::Block(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Box(self0), Expr::Box(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Break(self0), Expr::Break(other0)) => self0 == other0,
++ (Expr::Call(self0), Expr::Call(other0)) => self0 == other0,
++ (Expr::Cast(self0), Expr::Cast(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Closure(self0), Expr::Closure(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Continue(self0), Expr::Continue(other0)) => self0 == other0,
++ (Expr::Field(self0), Expr::Field(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::ForLoop(self0), Expr::ForLoop(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Group(self0), Expr::Group(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::If(self0), Expr::If(other0)) => self0 == other0,
++ (Expr::Index(self0), Expr::Index(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Let(self0), Expr::Let(other0)) => self0 == other0,
++ (Expr::Lit(self0), Expr::Lit(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Loop(self0), Expr::Loop(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Macro(self0), Expr::Macro(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Match(self0), Expr::Match(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::MethodCall(self0), Expr::MethodCall(other0)) => self0 == other0,
++ (Expr::Paren(self0), Expr::Paren(other0)) => self0 == other0,
++ (Expr::Path(self0), Expr::Path(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Range(self0), Expr::Range(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Reference(self0), Expr::Reference(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Repeat(self0), Expr::Repeat(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Return(self0), Expr::Return(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Struct(self0), Expr::Struct(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Try(self0), Expr::Try(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::TryBlock(self0), Expr::TryBlock(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Tuple(self0), Expr::Tuple(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Type(self0), Expr::Type(other0)) => self0 == other0,
++ (Expr::Unary(self0), Expr::Unary(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Unsafe(self0), Expr::Unsafe(other0)) => self0 == other0,
++ (Expr::Verbatim(self0), Expr::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ #[cfg(feature = "full")]
++ (Expr::While(self0), Expr::While(other0)) => self0 == other0,
++ #[cfg(feature = "full")]
++ (Expr::Yield(self0), Expr::Yield(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprArray {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprArray {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAssign {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAssign {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.left == other.left && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAssignOp {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAssignOp {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.left == other.left
++ && self.op == other.op
++ && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAsync {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAsync {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.capture == other.capture && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprAwait {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprAwait {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.base == other.base
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprBinary {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprBinary {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.left == other.left
++ && self.op == other.op
++ && self.right == other.right
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBlock {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBlock {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBox {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBox {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprBreak {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprBreak {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprCall {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprCall {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.func == other.func && self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprCast {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprCast {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprClosure {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprClosure {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.asyncness == other.asyncness
++ && self.movability == other.movability
++ && self.capture == other.capture
++ && self.inputs == other.inputs
++ && self.output == other.output
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprContinue {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprContinue {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprField {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprField {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.base == other.base && self.member == other.member
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprForLoop {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprForLoop {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.label == other.label
++ && self.pat == other.pat
++ && self.expr == other.expr
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprGroup {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprIf {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprIf {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.cond == other.cond
++ && self.then_branch == other.then_branch
++ && self.else_branch == other.else_branch
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprIndex {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprIndex {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.index == other.index
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprLet {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprLet {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprLit {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprLit {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.lit == other.lit
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprLoop {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprLoop {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.label == other.label && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMatch {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMatch {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.arms == other.arms
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprMethodCall {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprMethodCall {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.receiver == other.receiver
++ && self.method == other.method
++ && self.turbofish == other.turbofish
++ && self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprParen {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprParen {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprPath {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprPath {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprRange {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprRange {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.from == other.from
++ && self.limits == other.limits
++ && self.to == other.to
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprReference {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mutability == other.mutability && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprRepeat {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprRepeat {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.len == other.len
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprReturn {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprReturn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.path == other.path
++ && self.fields == other.fields
++ && self.dot2_token == other.dot2_token
++ && self.rest == other.rest
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTry {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTry {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTryBlock {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTryBlock {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprTuple {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprType {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ExprUnary {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ExprUnary {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.op == other.op && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprUnsafe {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprUnsafe {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprWhile {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprWhile {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.label == other.label
++ && self.cond == other.cond
++ && self.body == other.body
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ExprYield {}
++#[cfg(feature = "full")]
++impl PartialEq for ExprYield {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Field {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Field {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.colon_token == other.colon_token
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FieldPat {}
++#[cfg(feature = "full")]
++impl PartialEq for FieldPat {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.member == other.member
++ && self.colon_token == other.colon_token
++ && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FieldValue {}
++#[cfg(feature = "full")]
++impl PartialEq for FieldValue {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.member == other.member
++ && self.colon_token == other.colon_token
++ && self.expr == other.expr
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Fields {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Fields {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Fields::Named(self0), Fields::Named(other0)) => self0 == other0,
++ (Fields::Unnamed(self0), Fields::Unnamed(other0)) => self0 == other0,
++ (Fields::Unit, Fields::Unit) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for FieldsNamed {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for FieldsNamed {
++ fn eq(&self, other: &Self) -> bool {
++ self.named == other.named
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for FieldsUnnamed {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for FieldsUnnamed {
++ fn eq(&self, other: &Self) -> bool {
++ self.unnamed == other.unnamed
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for File {}
++#[cfg(feature = "full")]
++impl PartialEq for File {
++ fn eq(&self, other: &Self) -> bool {
++ self.shebang == other.shebang && self.attrs == other.attrs && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for FnArg {}
++#[cfg(feature = "full")]
++impl PartialEq for FnArg {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (FnArg::Receiver(self0), FnArg::Receiver(other0)) => self0 == other0,
++ (FnArg::Typed(self0), FnArg::Typed(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItem {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ForeignItem::Fn(self0), ForeignItem::Fn(other0)) => self0 == other0,
++ (ForeignItem::Static(self0), ForeignItem::Static(other0)) => self0 == other0,
++ (ForeignItem::Type(self0), ForeignItem::Type(other0)) => self0 == other0,
++ (ForeignItem::Macro(self0), ForeignItem::Macro(other0)) => self0 == other0,
++ (ForeignItem::Verbatim(self0), ForeignItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemFn {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.vis == other.vis && self.sig == other.sig
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemStatic {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemStatic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ForeignItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ForeignItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for GenericArgument {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for GenericArgument {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericArgument::Lifetime(self0), GenericArgument::Lifetime(other0)) => {
++ self0 == other0
++ }
++ (GenericArgument::Type(self0), GenericArgument::Type(other0)) => self0 == other0,
++ (GenericArgument::Binding(self0), GenericArgument::Binding(other0)) => self0 == other0,
++ (GenericArgument::Constraint(self0), GenericArgument::Constraint(other0)) => {
++ self0 == other0
++ }
++ (GenericArgument::Const(self0), GenericArgument::Const(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for GenericMethodArgument {}
++#[cfg(feature = "full")]
++impl PartialEq for GenericMethodArgument {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericMethodArgument::Type(self0), GenericMethodArgument::Type(other0)) => {
++ self0 == other0
++ }
++ (GenericMethodArgument::Const(self0), GenericMethodArgument::Const(other0)) => {
++ self0 == other0
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for GenericParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for GenericParam {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (GenericParam::Type(self0), GenericParam::Type(other0)) => self0 == other0,
++ (GenericParam::Lifetime(self0), GenericParam::Lifetime(other0)) => self0 == other0,
++ (GenericParam::Const(self0), GenericParam::Const(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Generics {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Generics {
++ fn eq(&self, other: &Self) -> bool {
++ self.lt_token == other.lt_token
++ && self.params == other.params
++ && self.gt_token == other.gt_token
++ && self.where_clause == other.where_clause
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItem {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ImplItem::Const(self0), ImplItem::Const(other0)) => self0 == other0,
++ (ImplItem::Method(self0), ImplItem::Method(other0)) => self0 == other0,
++ (ImplItem::Type(self0), ImplItem::Type(other0)) => self0 == other0,
++ (ImplItem::Macro(self0), ImplItem::Macro(other0)) => self0 == other0,
++ (ImplItem::Verbatim(self0), ImplItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemMethod {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemMethod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.sig == other.sig
++ && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ImplItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ImplItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.defaultness == other.defaultness
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Item {}
++#[cfg(feature = "full")]
++impl PartialEq for Item {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Item::Const(self0), Item::Const(other0)) => self0 == other0,
++ (Item::Enum(self0), Item::Enum(other0)) => self0 == other0,
++ (Item::ExternCrate(self0), Item::ExternCrate(other0)) => self0 == other0,
++ (Item::Fn(self0), Item::Fn(other0)) => self0 == other0,
++ (Item::ForeignMod(self0), Item::ForeignMod(other0)) => self0 == other0,
++ (Item::Impl(self0), Item::Impl(other0)) => self0 == other0,
++ (Item::Macro(self0), Item::Macro(other0)) => self0 == other0,
++ (Item::Macro2(self0), Item::Macro2(other0)) => self0 == other0,
++ (Item::Mod(self0), Item::Mod(other0)) => self0 == other0,
++ (Item::Static(self0), Item::Static(other0)) => self0 == other0,
++ (Item::Struct(self0), Item::Struct(other0)) => self0 == other0,
++ (Item::Trait(self0), Item::Trait(other0)) => self0 == other0,
++ (Item::TraitAlias(self0), Item::TraitAlias(other0)) => self0 == other0,
++ (Item::Type(self0), Item::Type(other0)) => self0 == other0,
++ (Item::Union(self0), Item::Union(other0)) => self0 == other0,
++ (Item::Use(self0), Item::Use(other0)) => self0 == other0,
++ (Item::Verbatim(self0), Item::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemEnum {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemEnum {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.variants == other.variants
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemExternCrate {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemExternCrate {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.rename == other.rename
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemFn {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.sig == other.sig
++ && self.block == other.block
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemForeignMod {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemForeignMod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.abi == other.abi && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemImpl {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemImpl {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.defaultness == other.defaultness
++ && self.unsafety == other.unsafety
++ && self.generics == other.generics
++ && self.trait_ == other.trait_
++ && self.self_ty == other.self_ty
++ && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.mac == other.mac
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMacro2 {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMacro2 {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules)
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemMod {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemMod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.content == other.content
++ && self.semi == other.semi
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemStatic {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemStatic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.fields == other.fields
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemTrait {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemTrait {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.unsafety == other.unsafety
++ && self.auto_token == other.auto_token
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.colon_token == other.colon_token
++ && self.supertraits == other.supertraits
++ && self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemTraitAlias {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemTraitAlias {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.bounds == other.bounds
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemUnion {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemUnion {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.fields == other.fields
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for ItemUse {}
++#[cfg(feature = "full")]
++impl PartialEq for ItemUse {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.vis == other.vis
++ && self.leading_colon == other.leading_colon
++ && self.tree == other.tree
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Label {}
++#[cfg(feature = "full")]
++impl PartialEq for Label {
++ fn eq(&self, other: &Self) -> bool {
++ self.name == other.name
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for LifetimeDef {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for LifetimeDef {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.lifetime == other.lifetime
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ }
++}
++impl Eq for Lit {}
++impl PartialEq for Lit {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Lit::Str(self0), Lit::Str(other0)) => self0 == other0,
++ (Lit::ByteStr(self0), Lit::ByteStr(other0)) => self0 == other0,
++ (Lit::Byte(self0), Lit::Byte(other0)) => self0 == other0,
++ (Lit::Char(self0), Lit::Char(other0)) => self0 == other0,
++ (Lit::Int(self0), Lit::Int(other0)) => self0 == other0,
++ (Lit::Float(self0), Lit::Float(other0)) => self0 == other0,
++ (Lit::Bool(self0), Lit::Bool(other0)) => self0 == other0,
++ (Lit::Verbatim(self0), Lit::Verbatim(other0)) => {
++ self0.to_string() == other0.to_string()
++ }
++ _ => false,
++ }
++ }
++}
++impl Eq for LitBool {}
++impl PartialEq for LitBool {
++ fn eq(&self, other: &Self) -> bool {
++ self.value == other.value
++ }
++}
++impl Eq for LitByte {}
++impl Eq for LitByteStr {}
++impl Eq for LitChar {}
++impl Eq for LitFloat {}
++impl Eq for LitInt {}
++impl Eq for LitStr {}
++#[cfg(feature = "full")]
++impl Eq for Local {}
++#[cfg(feature = "full")]
++impl PartialEq for Local {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.init == other.init
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Macro {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Macro {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path
++ && self.delimiter == other.delimiter
++ && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MacroDelimiter {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MacroDelimiter {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (MacroDelimiter::Paren(_), MacroDelimiter::Paren(_)) => true,
++ (MacroDelimiter::Brace(_), MacroDelimiter::Brace(_)) => true,
++ (MacroDelimiter::Bracket(_), MacroDelimiter::Bracket(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Meta {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Meta {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Meta::Path(self0), Meta::Path(other0)) => self0 == other0,
++ (Meta::List(self0), Meta::List(other0)) => self0 == other0,
++ (Meta::NameValue(self0), Meta::NameValue(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MetaList {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MetaList {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path && self.nested == other.nested
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for MetaNameValue {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for MetaNameValue {
++ fn eq(&self, other: &Self) -> bool {
++ self.path == other.path && self.lit == other.lit
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for MethodTurbofish {}
++#[cfg(feature = "full")]
++impl PartialEq for MethodTurbofish {
++ fn eq(&self, other: &Self) -> bool {
++ self.args == other.args
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for NestedMeta {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for NestedMeta {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (NestedMeta::Meta(self0), NestedMeta::Meta(other0)) => self0 == other0,
++ (NestedMeta::Lit(self0), NestedMeta::Lit(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ParenthesizedGenericArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ParenthesizedGenericArguments {
++ fn eq(&self, other: &Self) -> bool {
++ self.inputs == other.inputs && self.output == other.output
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Pat {}
++#[cfg(feature = "full")]
++impl PartialEq for Pat {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Pat::Box(self0), Pat::Box(other0)) => self0 == other0,
++ (Pat::Ident(self0), Pat::Ident(other0)) => self0 == other0,
++ (Pat::Lit(self0), Pat::Lit(other0)) => self0 == other0,
++ (Pat::Macro(self0), Pat::Macro(other0)) => self0 == other0,
++ (Pat::Or(self0), Pat::Or(other0)) => self0 == other0,
++ (Pat::Path(self0), Pat::Path(other0)) => self0 == other0,
++ (Pat::Range(self0), Pat::Range(other0)) => self0 == other0,
++ (Pat::Reference(self0), Pat::Reference(other0)) => self0 == other0,
++ (Pat::Rest(self0), Pat::Rest(other0)) => self0 == other0,
++ (Pat::Slice(self0), Pat::Slice(other0)) => self0 == other0,
++ (Pat::Struct(self0), Pat::Struct(other0)) => self0 == other0,
++ (Pat::Tuple(self0), Pat::Tuple(other0)) => self0 == other0,
++ (Pat::TupleStruct(self0), Pat::TupleStruct(other0)) => self0 == other0,
++ (Pat::Type(self0), Pat::Type(other0)) => self0 == other0,
++ (Pat::Verbatim(self0), Pat::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ (Pat::Wild(self0), Pat::Wild(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatBox {}
++#[cfg(feature = "full")]
++impl PartialEq for PatBox {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatIdent {}
++#[cfg(feature = "full")]
++impl PartialEq for PatIdent {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.by_ref == other.by_ref
++ && self.mutability == other.mutability
++ && self.ident == other.ident
++ && self.subpat == other.subpat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatLit {}
++#[cfg(feature = "full")]
++impl PartialEq for PatLit {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.expr == other.expr
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for PatMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatOr {}
++#[cfg(feature = "full")]
++impl PartialEq for PatOr {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.leading_vert == other.leading_vert
++ && self.cases == other.cases
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatPath {}
++#[cfg(feature = "full")]
++impl PartialEq for PatPath {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatRange {}
++#[cfg(feature = "full")]
++impl PartialEq for PatRange {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.lo == other.lo
++ && self.limits == other.limits
++ && self.hi == other.hi
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatReference {}
++#[cfg(feature = "full")]
++impl PartialEq for PatReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mutability == other.mutability && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatRest {}
++#[cfg(feature = "full")]
++impl PartialEq for PatRest {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatSlice {}
++#[cfg(feature = "full")]
++impl PartialEq for PatSlice {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for PatStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.path == other.path
++ && self.fields == other.fields
++ && self.dot2_token == other.dot2_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatTuple {}
++#[cfg(feature = "full")]
++impl PartialEq for PatTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.elems == other.elems
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatTupleStruct {}
++#[cfg(feature = "full")]
++impl PartialEq for PatTupleStruct {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.path == other.path && self.pat == other.pat
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatType {}
++#[cfg(feature = "full")]
++impl PartialEq for PatType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.pat == other.pat && self.ty == other.ty
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for PatWild {}
++#[cfg(feature = "full")]
++impl PartialEq for PatWild {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Path {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Path {
++ fn eq(&self, other: &Self) -> bool {
++ self.leading_colon == other.leading_colon && self.segments == other.segments
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PathArguments {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PathArguments {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (PathArguments::None, PathArguments::None) => true,
++ (PathArguments::AngleBracketed(self0), PathArguments::AngleBracketed(other0)) => {
++ self0 == other0
++ }
++ (PathArguments::Parenthesized(self0), PathArguments::Parenthesized(other0)) => {
++ self0 == other0
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PathSegment {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PathSegment {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.arguments == other.arguments
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateEq {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateEq {
++ fn eq(&self, other: &Self) -> bool {
++ self.lhs_ty == other.lhs_ty && self.rhs_ty == other.rhs_ty
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateLifetime {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateLifetime {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetime == other.lifetime && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for PredicateType {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for PredicateType {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ && self.bounded_ty == other.bounded_ty
++ && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for QSelf {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for QSelf {
++ fn eq(&self, other: &Self) -> bool {
++ self.ty == other.ty && self.position == other.position && self.as_token == other.as_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for RangeLimits {}
++#[cfg(feature = "full")]
++impl PartialEq for RangeLimits {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (RangeLimits::HalfOpen(_), RangeLimits::HalfOpen(_)) => true,
++ (RangeLimits::Closed(_), RangeLimits::Closed(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Receiver {}
++#[cfg(feature = "full")]
++impl PartialEq for Receiver {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.reference == other.reference
++ && self.mutability == other.mutability
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for ReturnType {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for ReturnType {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (ReturnType::Default, ReturnType::Default) => true,
++ (ReturnType::Type(_, self1), ReturnType::Type(_, other1)) => self1 == other1,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Signature {}
++#[cfg(feature = "full")]
++impl PartialEq for Signature {
++ fn eq(&self, other: &Self) -> bool {
++ self.constness == other.constness
++ && self.asyncness == other.asyncness
++ && self.unsafety == other.unsafety
++ && self.abi == other.abi
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.inputs == other.inputs
++ && self.variadic == other.variadic
++ && self.output == other.output
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for Stmt {}
++#[cfg(feature = "full")]
++impl PartialEq for Stmt {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Stmt::Local(self0), Stmt::Local(other0)) => self0 == other0,
++ (Stmt::Item(self0), Stmt::Item(other0)) => self0 == other0,
++ (Stmt::Expr(self0), Stmt::Expr(other0)) => self0 == other0,
++ (Stmt::Semi(self0, _), Stmt::Semi(other0, _)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TraitBound {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TraitBound {
++ fn eq(&self, other: &Self) -> bool {
++ self.paren_token == other.paren_token
++ && self.modifier == other.modifier
++ && self.lifetimes == other.lifetimes
++ && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TraitBoundModifier {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TraitBoundModifier {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TraitBoundModifier::None, TraitBoundModifier::None) => true,
++ (TraitBoundModifier::Maybe(_), TraitBoundModifier::Maybe(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItem {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItem {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TraitItem::Const(self0), TraitItem::Const(other0)) => self0 == other0,
++ (TraitItem::Method(self0), TraitItem::Method(other0)) => self0 == other0,
++ (TraitItem::Type(self0), TraitItem::Type(other0)) => self0 == other0,
++ (TraitItem::Macro(self0), TraitItem::Macro(other0)) => self0 == other0,
++ (TraitItem::Verbatim(self0), TraitItem::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemConst {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemConst {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.ty == other.ty
++ && self.default == other.default
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemMacro {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs && self.mac == other.mac && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemMethod {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemMethod {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.sig == other.sig
++ && self.default == other.default
++ && self.semi_token == other.semi_token
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for TraitItemType {}
++#[cfg(feature = "full")]
++impl PartialEq for TraitItemType {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.generics == other.generics
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Type {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Type {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Type::Array(self0), Type::Array(other0)) => self0 == other0,
++ (Type::BareFn(self0), Type::BareFn(other0)) => self0 == other0,
++ (Type::Group(self0), Type::Group(other0)) => self0 == other0,
++ (Type::ImplTrait(self0), Type::ImplTrait(other0)) => self0 == other0,
++ (Type::Infer(self0), Type::Infer(other0)) => self0 == other0,
++ (Type::Macro(self0), Type::Macro(other0)) => self0 == other0,
++ (Type::Never(self0), Type::Never(other0)) => self0 == other0,
++ (Type::Paren(self0), Type::Paren(other0)) => self0 == other0,
++ (Type::Path(self0), Type::Path(other0)) => self0 == other0,
++ (Type::Ptr(self0), Type::Ptr(other0)) => self0 == other0,
++ (Type::Reference(self0), Type::Reference(other0)) => self0 == other0,
++ (Type::Slice(self0), Type::Slice(other0)) => self0 == other0,
++ (Type::TraitObject(self0), Type::TraitObject(other0)) => self0 == other0,
++ (Type::Tuple(self0), Type::Tuple(other0)) => self0 == other0,
++ (Type::Verbatim(self0), Type::Verbatim(other0)) => {
++ TokenStreamHelper(self0) == TokenStreamHelper(other0)
++ }
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeArray {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeArray {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem && self.len == other.len
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeBareFn {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeBareFn {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetimes == other.lifetimes
++ && self.unsafety == other.unsafety
++ && self.abi == other.abi
++ && self.inputs == other.inputs
++ && self.variadic == other.variadic
++ && self.output == other.output
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeGroup {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeImplTrait {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeImplTrait {
++ fn eq(&self, other: &Self) -> bool {
++ self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeInfer {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeInfer {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeMacro {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeMacro {
++ fn eq(&self, other: &Self) -> bool {
++ self.mac == other.mac
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeNever {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeNever {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParam {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParam {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.colon_token == other.colon_token
++ && self.bounds == other.bounds
++ && self.eq_token == other.eq_token
++ && self.default == other.default
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParamBound {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParamBound {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (TypeParamBound::Trait(self0), TypeParamBound::Trait(other0)) => self0 == other0,
++ (TypeParamBound::Lifetime(self0), TypeParamBound::Lifetime(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeParen {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeParen {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypePath {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypePath {
++ fn eq(&self, other: &Self) -> bool {
++ self.qself == other.qself && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypePtr {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypePtr {
++ fn eq(&self, other: &Self) -> bool {
++ self.const_token == other.const_token
++ && self.mutability == other.mutability
++ && self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeReference {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeReference {
++ fn eq(&self, other: &Self) -> bool {
++ self.lifetime == other.lifetime
++ && self.mutability == other.mutability
++ && self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeSlice {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeSlice {
++ fn eq(&self, other: &Self) -> bool {
++ self.elem == other.elem
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeTraitObject {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeTraitObject {
++ fn eq(&self, other: &Self) -> bool {
++ self.dyn_token == other.dyn_token && self.bounds == other.bounds
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for TypeTuple {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for TypeTuple {
++ fn eq(&self, other: &Self) -> bool {
++ self.elems == other.elems
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for UnOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for UnOp {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (UnOp::Deref(_), UnOp::Deref(_)) => true,
++ (UnOp::Not(_), UnOp::Not(_)) => true,
++ (UnOp::Neg(_), UnOp::Neg(_)) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseGlob {}
++#[cfg(feature = "full")]
++impl PartialEq for UseGlob {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseGroup {}
++#[cfg(feature = "full")]
++impl PartialEq for UseGroup {
++ fn eq(&self, other: &Self) -> bool {
++ self.items == other.items
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseName {}
++#[cfg(feature = "full")]
++impl PartialEq for UseName {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UsePath {}
++#[cfg(feature = "full")]
++impl PartialEq for UsePath {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.tree == other.tree
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseRename {}
++#[cfg(feature = "full")]
++impl PartialEq for UseRename {
++ fn eq(&self, other: &Self) -> bool {
++ self.ident == other.ident && self.rename == other.rename
++ }
++}
++#[cfg(feature = "full")]
++impl Eq for UseTree {}
++#[cfg(feature = "full")]
++impl PartialEq for UseTree {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (UseTree::Path(self0), UseTree::Path(other0)) => self0 == other0,
++ (UseTree::Name(self0), UseTree::Name(other0)) => self0 == other0,
++ (UseTree::Rename(self0), UseTree::Rename(other0)) => self0 == other0,
++ (UseTree::Glob(self0), UseTree::Glob(other0)) => self0 == other0,
++ (UseTree::Group(self0), UseTree::Group(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Variadic {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Variadic {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Variant {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Variant {
++ fn eq(&self, other: &Self) -> bool {
++ self.attrs == other.attrs
++ && self.ident == other.ident
++ && self.fields == other.fields
++ && self.discriminant == other.discriminant
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisCrate {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisCrate {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisPublic {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisPublic {
++ fn eq(&self, _other: &Self) -> bool {
++ true
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for VisRestricted {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for VisRestricted {
++ fn eq(&self, other: &Self) -> bool {
++ self.in_token == other.in_token && self.path == other.path
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for Visibility {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for Visibility {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Visibility::Public(self0), Visibility::Public(other0)) => self0 == other0,
++ (Visibility::Crate(self0), Visibility::Crate(other0)) => self0 == other0,
++ (Visibility::Restricted(self0), Visibility::Restricted(other0)) => self0 == other0,
++ (Visibility::Inherited, Visibility::Inherited) => true,
++ _ => false,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for WhereClause {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for WhereClause {
++ fn eq(&self, other: &Self) -> bool {
++ self.predicates == other.predicates
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Eq for WherePredicate {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl PartialEq for WherePredicate {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (WherePredicate::Type(self0), WherePredicate::Type(other0)) => self0 == other0,
++ (WherePredicate::Lifetime(self0), WherePredicate::Lifetime(other0)) => self0 == other0,
++ (WherePredicate::Eq(self0), WherePredicate::Eq(other0)) => self0 == other0,
++ _ => false,
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/fold.rs b/third_party/rust/syn/src/gen/fold.rs
+index f51218b78c..d9dd32a420 100644
+--- third_party/rust/syn/src/gen/fold.rs
++++ third_party/rust/syn/src/gen/fold.rs
+@@ -2,6 +2,7 @@
+ // It is not intended for manual editing.
+
+ #![allow(unreachable_code, unused_variables)]
++#![allow(clippy::match_wildcard_for_single_variants)]
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::gen::helper::fold::*;
+ #[cfg(any(feature = "full", feature = "derive"))]
+@@ -26,7 +27,7 @@ macro_rules! full {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"fold"` feature.*
++/// *This trait is available only if Syn is built with the `"fold"` feature.*
+ pub trait Fold {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_abi(&mut self, i: Abi) -> Abi {
+@@ -433,35 +434,27 @@ pub trait Fold {
+ fn fold_lifetime_def(&mut self, i: LifetimeDef) -> LifetimeDef {
+ fold_lifetime_def(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit(&mut self, i: Lit) -> Lit {
+ fold_lit(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_bool(&mut self, i: LitBool) -> LitBool {
+ fold_lit_bool(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_byte(&mut self, i: LitByte) -> LitByte {
+ fold_lit_byte(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_byte_str(&mut self, i: LitByteStr) -> LitByteStr {
+ fold_lit_byte_str(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_char(&mut self, i: LitChar) -> LitChar {
+ fold_lit_char(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_float(&mut self, i: LitFloat) -> LitFloat {
+ fold_lit_float(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_int(&mut self, i: LitInt) -> LitInt {
+ fold_lit_int(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn fold_lit_str(&mut self, i: LitStr) -> LitStr {
+ fold_lit_str(self, i)
+ }
+@@ -799,10 +792,10 @@ where
+ F: Fold + ?Sized,
+ {
+ AngleBracketedGenericArguments {
+- colon2_token: (node.colon2_token).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ colon2_token: (node.colon2_token).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ args: FoldHelper::lift(node.args, |it| f.fold_generic_argument(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -819,9 +812,9 @@ where
+ Box::new(f.fold_expr(*(it).1)),
+ )
+ }),
+- fat_arrow_token: Token ! [ => ](tokens_helper(f, &node.fat_arrow_token.spans)),
++ fat_arrow_token: Token ! [=>](tokens_helper(f, &node.fat_arrow_token.spans)),
+ body: Box::new(f.fold_expr(*node.body)),
+- comma: (node.comma).map(|it| Token ! [ , ](tokens_helper(f, &it.spans))),
++ comma: (node.comma).map(|it| Token ! [,](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -842,7 +835,7 @@ where
+ F: Fold + ?Sized,
+ {
+ Attribute {
+- pound_token: Token ! [ # ](tokens_helper(f, &node.pound_token.spans)),
++ pound_token: Token ! [#](tokens_helper(f, &node.pound_token.spans)),
+ style: f.fold_attr_style(node.style),
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ path: f.fold_path(node.path),
+@@ -859,7 +852,7 @@ where
+ name: (node.name).map(|it| {
+ (
+ f.fold_ident((it).0),
+- Token ! [ : ](tokens_helper(f, &(it).1.spans)),
++ Token ! [:](tokens_helper(f, &(it).1.spans)),
+ )
+ }),
+ ty: f.fold_type(node.ty),
+@@ -871,59 +864,47 @@ where
+ F: Fold + ?Sized,
+ {
+ match node {
+- BinOp::Add(_binding_0) => BinOp::Add(Token ! [ + ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Sub(_binding_0) => BinOp::Sub(Token ! [ - ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Mul(_binding_0) => BinOp::Mul(Token ! [ * ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Div(_binding_0) => BinOp::Div(Token ! [ / ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Rem(_binding_0) => BinOp::Rem(Token ! [ % ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::And(_binding_0) => BinOp::And(Token ! [ && ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Or(_binding_0) => BinOp::Or(Token ! [ || ](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Add(_binding_0) => BinOp::Add(Token ! [+](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Sub(_binding_0) => BinOp::Sub(Token ! [-](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Mul(_binding_0) => BinOp::Mul(Token ! [*](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Div(_binding_0) => BinOp::Div(Token ! [/](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Rem(_binding_0) => BinOp::Rem(Token ! [%](tokens_helper(f, &_binding_0.spans))),
++ BinOp::And(_binding_0) => BinOp::And(Token ! [&&](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Or(_binding_0) => BinOp::Or(Token ! [||](tokens_helper(f, &_binding_0.spans))),
+ BinOp::BitXor(_binding_0) => {
+- BinOp::BitXor(Token ! [ ^ ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitXor(Token ! [^](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitAnd(_binding_0) => {
+- BinOp::BitAnd(Token ! [ & ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::BitOr(_binding_0) => {
+- BinOp::BitOr(Token ! [ | ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::Shl(_binding_0) => BinOp::Shl(Token ! [ << ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Shr(_binding_0) => BinOp::Shr(Token ! [ >> ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Eq(_binding_0) => BinOp::Eq(Token ! [ == ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Lt(_binding_0) => BinOp::Lt(Token ! [ < ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Le(_binding_0) => BinOp::Le(Token ! [ <= ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Ne(_binding_0) => BinOp::Ne(Token ! [ != ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Ge(_binding_0) => BinOp::Ge(Token ! [ >= ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::Gt(_binding_0) => BinOp::Gt(Token ! [ > ](tokens_helper(f, &_binding_0.spans))),
+- BinOp::AddEq(_binding_0) => {
+- BinOp::AddEq(Token ! [ += ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::SubEq(_binding_0) => {
+- BinOp::SubEq(Token ! [ -= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::MulEq(_binding_0) => {
+- BinOp::MulEq(Token ! [ *= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::DivEq(_binding_0) => {
+- BinOp::DivEq(Token ! [ /= ](tokens_helper(f, &_binding_0.spans)))
+- }
+- BinOp::RemEq(_binding_0) => {
+- BinOp::RemEq(Token ! [ %= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitAnd(Token ! [&](tokens_helper(f, &_binding_0.spans)))
+ }
++ BinOp::BitOr(_binding_0) => BinOp::BitOr(Token ! [|](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Shl(_binding_0) => BinOp::Shl(Token ! [<<](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Shr(_binding_0) => BinOp::Shr(Token ! [>>](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Eq(_binding_0) => BinOp::Eq(Token ! [==](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Lt(_binding_0) => BinOp::Lt(Token ! [<](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Le(_binding_0) => BinOp::Le(Token ! [<=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Ne(_binding_0) => BinOp::Ne(Token ! [!=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Ge(_binding_0) => BinOp::Ge(Token ! [>=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::Gt(_binding_0) => BinOp::Gt(Token ! [>](tokens_helper(f, &_binding_0.spans))),
++ BinOp::AddEq(_binding_0) => BinOp::AddEq(Token ! [+=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::SubEq(_binding_0) => BinOp::SubEq(Token ! [-=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::MulEq(_binding_0) => BinOp::MulEq(Token ! [*=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::DivEq(_binding_0) => BinOp::DivEq(Token ! [/=](tokens_helper(f, &_binding_0.spans))),
++ BinOp::RemEq(_binding_0) => BinOp::RemEq(Token ! [%=](tokens_helper(f, &_binding_0.spans))),
+ BinOp::BitXorEq(_binding_0) => {
+- BinOp::BitXorEq(Token ! [ ^= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitXorEq(Token ! [^=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitAndEq(_binding_0) => {
+- BinOp::BitAndEq(Token ! [ &= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitAndEq(Token ! [&=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::BitOrEq(_binding_0) => {
+- BinOp::BitOrEq(Token ! [ |= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::BitOrEq(Token ! [|=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::ShlEq(_binding_0) => {
+- BinOp::ShlEq(Token ! [ <<= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::ShlEq(Token ! [<<=](tokens_helper(f, &_binding_0.spans)))
+ }
+ BinOp::ShrEq(_binding_0) => {
+- BinOp::ShrEq(Token ! [ >>= ](tokens_helper(f, &_binding_0.spans)))
++ BinOp::ShrEq(Token ! [>>=](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+@@ -934,7 +915,7 @@ where
+ {
+ Binding {
+ ident: f.fold_ident(node.ident),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: f.fold_type(node.ty),
+ }
+ }
+@@ -955,9 +936,9 @@ where
+ {
+ BoundLifetimes {
+ for_token: Token![for](tokens_helper(f, &node.for_token.span)),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ lifetimes: FoldHelper::lift(node.lifetimes, |it| f.fold_lifetime_def(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -969,9 +950,9 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+- eq_token: (node.eq_token).map(|it| Token ! [ = ](tokens_helper(f, &it.spans))),
++ eq_token: (node.eq_token).map(|it| Token ! [=](tokens_helper(f, &it.spans))),
+ default: (node.default).map(|it| f.fold_expr(it)),
+ }
+ }
+@@ -982,7 +963,7 @@ where
+ {
+ Constraint {
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ }
+ }
+@@ -1016,7 +997,7 @@ where
+ DataStruct {
+ struct_token: Token![struct](tokens_helper(f, &node.struct_token.span)),
+ fields: f.fold_fields(node.fields),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "derive")]
+@@ -1112,7 +1093,7 @@ where
+ ExprAssign {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ left: Box::new(f.fold_expr(*node.left)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ right: Box::new(f.fold_expr(*node.right)),
+ }
+ }
+@@ -1148,7 +1129,7 @@ where
+ ExprAwait {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ base: Box::new(f.fold_expr(*node.base)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ await_token: crate::token::Await(tokens_helper(f, &node.await_token.span)),
+ }
+ }
+@@ -1232,9 +1213,9 @@ where
+ asyncness: (node.asyncness).map(|it| Token![async](tokens_helper(f, &it.span))),
+ movability: (node.movability).map(|it| Token![static](tokens_helper(f, &it.span))),
+ capture: (node.capture).map(|it| Token![move](tokens_helper(f, &it.span))),
+- or1_token: Token ! [ | ](tokens_helper(f, &node.or1_token.spans)),
++ or1_token: Token ! [|](tokens_helper(f, &node.or1_token.spans)),
+ inputs: FoldHelper::lift(node.inputs, |it| f.fold_pat(it)),
+- or2_token: Token ! [ | ](tokens_helper(f, &node.or2_token.spans)),
++ or2_token: Token ! [|](tokens_helper(f, &node.or2_token.spans)),
+ output: f.fold_return_type(node.output),
+ body: Box::new(f.fold_expr(*node.body)),
+ }
+@@ -1258,7 +1239,7 @@ where
+ ExprField {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ base: Box::new(f.fold_expr(*node.base)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ member: f.fold_member(node.member),
+ }
+ }
+@@ -1327,7 +1308,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ let_token: Token![let](tokens_helper(f, &node.let_token.span)),
+ pat: f.fold_pat(node.pat),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+ }
+ }
+@@ -1384,7 +1365,7 @@ where
+ ExprMethodCall {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ receiver: Box::new(f.fold_expr(*node.receiver)),
+- dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
++ dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
+ method: f.fold_ident(node.method),
+ turbofish: (node.turbofish).map(|it| f.fold_method_turbofish(it)),
+ paren_token: Paren(tokens_helper(f, &node.paren_token.span)),
+@@ -1432,7 +1413,7 @@ where
+ {
+ ExprReference {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ raw: node.raw,
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ expr: Box::new(f.fold_expr(*node.expr)),
+@@ -1447,7 +1428,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ len: Box::new(f.fold_expr(*node.len)),
+ }
+ }
+@@ -1484,7 +1465,7 @@ where
+ ExprTry {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- question_token: Token ! [ ? ](tokens_helper(f, &node.question_token.spans)),
++ question_token: Token ! [?](tokens_helper(f, &node.question_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1517,7 +1498,7 @@ where
+ ExprType {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ }
+ }
+@@ -1576,7 +1557,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ ident: (node.ident).map(|it| f.fold_ident(it)),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ ty: f.fold_type(node.ty),
+ }
+ }
+@@ -1588,7 +1569,7 @@ where
+ FieldPat {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ member: f.fold_member(node.member),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ pat: Box::new(f.fold_pat(*node.pat)),
+ }
+ }
+@@ -1600,7 +1581,7 @@ where
+ FieldValue {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ member: f.fold_member(node.member),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ expr: f.fold_expr(node.expr),
+ }
+ }
+@@ -1681,7 +1662,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ sig: f.fold_signature(node.sig),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1692,7 +1673,7 @@ where
+ ForeignItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1706,9 +1687,9 @@ where
+ static_token: Token![static](tokens_helper(f, &node.static_token.span)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1721,7 +1702,7 @@ where
+ vis: f.fold_visibility(node.vis),
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -1779,9 +1760,9 @@ where
+ F: Fold + ?Sized,
+ {
+ Generics {
+- lt_token: (node.lt_token).map(|it| Token ! [ < ](tokens_helper(f, &it.spans))),
++ lt_token: (node.lt_token).map(|it| Token ! [<](tokens_helper(f, &it.spans))),
+ params: FoldHelper::lift(node.params, |it| f.fold_generic_param(it)),
+- gt_token: (node.gt_token).map(|it| Token ! [ > ](tokens_helper(f, &it.spans))),
++ gt_token: (node.gt_token).map(|it| Token ! [>](tokens_helper(f, &it.spans))),
+ where_clause: (node.where_clause).map(|it| f.fold_where_clause(it)),
+ }
+ }
+@@ -1819,11 +1800,11 @@ where
+ defaultness: (node.defaultness).map(|it| Token![default](tokens_helper(f, &it.span))),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: f.fold_expr(node.expr),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1834,7 +1815,7 @@ where
+ ImplItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1862,9 +1843,9 @@ where
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: f.fold_type(node.ty),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -1913,11 +1894,11 @@ where
+ vis: f.fold_visibility(node.vis),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -1952,7 +1933,7 @@ where
+ f.fold_ident((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2011,7 +1992,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ ident: (node.ident).map(|it| f.fold_ident(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2043,7 +2024,7 @@ where
+ FoldHelper::lift((it).1, |it| f.fold_item(it)),
+ )
+ }),
+- semi: (node.semi).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi: (node.semi).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2057,11 +2038,11 @@ where
+ static_token: Token![static](tokens_helper(f, &node.static_token.span)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ expr: Box::new(f.fold_expr(*node.expr)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2076,7 +2057,7 @@ where
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+ fields: f.fold_fields(node.fields),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2092,7 +2073,7 @@ where
+ trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ supertraits: FoldHelper::lift(node.supertraits, |it| f.fold_type_param_bound(it)),
+ brace_token: Brace(tokens_helper(f, &node.brace_token.span)),
+ items: FoldHelper::lift(node.items, |it| f.fold_trait_item(it)),
+@@ -2109,9 +2090,9 @@ where
+ trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2125,9 +2106,9 @@ where
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2153,9 +2134,9 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ vis: f.fold_visibility(node.vis),
+ use_token: Token![use](tokens_helper(f, &node.use_token.span)),
+- leading_colon: (node.leading_colon).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
++ leading_colon: (node.leading_colon).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
+ tree: f.fold_use_tree(node.tree),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2165,7 +2146,7 @@ where
+ {
+ Label {
+ name: f.fold_lifetime(node.name),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ }
+ }
+ pub fn fold_lifetime<F>(f: &mut F, node: Lifetime) -> Lifetime
+@@ -2185,11 +2166,10 @@ where
+ LifetimeDef {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ lifetime: f.fold_lifetime(node.lifetime),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit<F>(f: &mut F, node: Lit) -> Lit
+ where
+ F: Fold + ?Sized,
+@@ -2205,7 +2185,6 @@ where
+ Lit::Verbatim(_binding_0) => Lit::Verbatim(_binding_0),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_bool<F>(f: &mut F, node: LitBool) -> LitBool
+ where
+ F: Fold + ?Sized,
+@@ -2215,7 +2194,6 @@ where
+ span: f.fold_span(node.span),
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_byte<F>(f: &mut F, node: LitByte) -> LitByte
+ where
+ F: Fold + ?Sized,
+@@ -2225,7 +2203,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_byte_str<F>(f: &mut F, node: LitByteStr) -> LitByteStr
+ where
+ F: Fold + ?Sized,
+@@ -2235,7 +2212,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_char<F>(f: &mut F, node: LitChar) -> LitChar
+ where
+ F: Fold + ?Sized,
+@@ -2245,7 +2221,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_float<F>(f: &mut F, node: LitFloat) -> LitFloat
+ where
+ F: Fold + ?Sized,
+@@ -2255,7 +2230,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_int<F>(f: &mut F, node: LitInt) -> LitInt
+ where
+ F: Fold + ?Sized,
+@@ -2265,7 +2239,6 @@ where
+ node.set_span(span);
+ node
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn fold_lit_str<F>(f: &mut F, node: LitStr) -> LitStr
+ where
+ F: Fold + ?Sized,
+@@ -2286,11 +2259,11 @@ where
+ pat: f.fold_pat(node.pat),
+ init: (node.init).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ Box::new(f.fold_expr(*(it).1)),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2361,7 +2334,7 @@ where
+ {
+ MetaNameValue {
+ path: f.fold_path(node.path),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ lit: f.fold_lit(node.lit),
+ }
+ }
+@@ -2371,10 +2344,10 @@ where
+ F: Fold + ?Sized,
+ {
+ MethodTurbofish {
+- colon2_token: Token ! [ :: ](tokens_helper(f, &node.colon2_token.spans)),
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ colon2_token: Token ! [::](tokens_helper(f, &node.colon2_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ args: FoldHelper::lift(node.args, |it| f.fold_generic_method_argument(it)),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2449,7 +2422,7 @@ where
+ ident: f.fold_ident(node.ident),
+ subpat: (node.subpat).map(|it| {
+ (
+- Token ! [ @ ](tokens_helper(f, &(it).0.spans)),
++ Token ! [@](tokens_helper(f, &(it).0.spans)),
+ Box::new(f.fold_pat(*(it).1)),
+ )
+ }),
+@@ -2482,7 +2455,7 @@ where
+ {
+ PatOr {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- leading_vert: (node.leading_vert).map(|it| Token ! [ | ](tokens_helper(f, &it.spans))),
++ leading_vert: (node.leading_vert).map(|it| Token ! [|](tokens_helper(f, &it.spans))),
+ cases: FoldHelper::lift(node.cases, |it| f.fold_pat(it)),
+ }
+ }
+@@ -2516,7 +2489,7 @@ where
+ {
+ PatReference {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ pat: Box::new(f.fold_pat(*node.pat)),
+ }
+@@ -2585,7 +2558,7 @@ where
+ PatType {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ pat: Box::new(f.fold_pat(*node.pat)),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ }
+ }
+@@ -2605,7 +2578,7 @@ where
+ F: Fold + ?Sized,
+ {
+ Path {
+- leading_colon: (node.leading_colon).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
++ leading_colon: (node.leading_colon).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
+ segments: FoldHelper::lift(node.segments, |it| f.fold_path_segment(it)),
+ }
+ }
+@@ -2641,7 +2614,7 @@ where
+ {
+ PredicateEq {
+ lhs_ty: f.fold_type(node.lhs_ty),
+- eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
++ eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
+ rhs_ty: f.fold_type(node.rhs_ty),
+ }
+ }
+@@ -2652,7 +2625,7 @@ where
+ {
+ PredicateLifetime {
+ lifetime: f.fold_lifetime(node.lifetime),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)),
+ }
+ }
+@@ -2664,7 +2637,7 @@ where
+ PredicateType {
+ lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)),
+ bounded_ty: f.fold_type(node.bounded_ty),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ }
+ }
+@@ -2674,11 +2647,11 @@ where
+ F: Fold + ?Sized,
+ {
+ QSelf {
+- lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
++ lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
+ ty: Box::new(f.fold_type(*node.ty)),
+ position: node.position,
+ as_token: (node.as_token).map(|it| Token![as](tokens_helper(f, &it.span))),
+- gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
++ gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2691,7 +2664,7 @@ where
+ RangeLimits::HalfOpen(Token![..](tokens_helper(f, &_binding_0.spans)))
+ }
+ RangeLimits::Closed(_binding_0) => {
+- RangeLimits::Closed(Token ! [ ..= ](tokens_helper(f, &_binding_0.spans)))
++ RangeLimits::Closed(Token ! [..=](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+@@ -2704,7 +2677,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ reference: (node.reference).map(|it| {
+ (
+- Token ! [ & ](tokens_helper(f, &(it).0.spans)),
++ Token ! [&](tokens_helper(f, &(it).0.spans)),
+ ((it).1).map(|it| f.fold_lifetime(it)),
+ )
+ }),
+@@ -2720,7 +2693,7 @@ where
+ match node {
+ ReturnType::Default => ReturnType::Default,
+ ReturnType::Type(_binding_0, _binding_1) => ReturnType::Type(
+- Token ! [ -> ](tokens_helper(f, &_binding_0.spans)),
++ Token ! [->](tokens_helper(f, &_binding_0.spans)),
+ Box::new(f.fold_type(*_binding_1)),
+ ),
+ }
+@@ -2761,7 +2734,7 @@ where
+ Stmt::Expr(_binding_0) => Stmt::Expr(f.fold_expr(_binding_0)),
+ Stmt::Semi(_binding_0, _binding_1) => Stmt::Semi(
+ f.fold_expr(_binding_0),
+- Token ! [ ; ](tokens_helper(f, &_binding_1.spans)),
++ Token ! [;](tokens_helper(f, &_binding_1.spans)),
+ ),
+ }
+ }
+@@ -2785,7 +2758,7 @@ where
+ match node {
+ TraitBoundModifier::None => TraitBoundModifier::None,
+ TraitBoundModifier::Maybe(_binding_0) => {
+- TraitBoundModifier::Maybe(Token ! [ ? ](tokens_helper(f, &_binding_0.spans)))
++ TraitBoundModifier::Maybe(Token ! [?](tokens_helper(f, &_binding_0.spans)))
+ }
+ }
+ }
+@@ -2812,15 +2785,15 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ const_token: Token![const](tokens_helper(f, &node.const_token.span)),
+ ident: f.fold_ident(node.ident),
+- colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
++ colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
+ ty: f.fold_type(node.ty),
+ default: (node.default).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_expr((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2831,7 +2804,7 @@ where
+ TraitItemMacro {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ mac: f.fold_macro(node.mac),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2843,7 +2816,7 @@ where
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ sig: f.fold_signature(node.sig),
+ default: (node.default).map(|it| f.fold_block(it)),
+- semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
++ semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -2856,15 +2829,15 @@ where
+ type_token: Token![type](tokens_helper(f, &node.type_token.span)),
+ ident: f.fold_ident(node.ident),
+ generics: f.fold_generics(node.generics),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+ default: (node.default).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_type((it).1),
+ )
+ }),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -2899,7 +2872,7 @@ where
+ TypeArray {
+ bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
+ elem: Box::new(f.fold_type(*node.elem)),
+- semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
++ semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
+ len: f.fold_expr(node.len),
+ }
+ }
+@@ -2974,9 +2947,9 @@ where
+ TypeParam {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+ ident: f.fold_ident(node.ident),
+- colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
++ colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
+ bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
+- eq_token: (node.eq_token).map(|it| Token ! [ = ](tokens_helper(f, &it.spans))),
++ eq_token: (node.eq_token).map(|it| Token ! [=](tokens_helper(f, &it.spans))),
+ default: (node.default).map(|it| f.fold_type(it)),
+ }
+ }
+@@ -3018,7 +2991,7 @@ where
+ F: Fold + ?Sized,
+ {
+ TypePtr {
+- star_token: Token ! [ * ](tokens_helper(f, &node.star_token.spans)),
++ star_token: Token ! [*](tokens_helper(f, &node.star_token.spans)),
+ const_token: (node.const_token).map(|it| Token![const](tokens_helper(f, &it.span))),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ elem: Box::new(f.fold_type(*node.elem)),
+@@ -3030,7 +3003,7 @@ where
+ F: Fold + ?Sized,
+ {
+ TypeReference {
+- and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
++ and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
+ lifetime: (node.lifetime).map(|it| f.fold_lifetime(it)),
+ mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
+ elem: Box::new(f.fold_type(*node.elem)),
+@@ -3072,9 +3045,9 @@ where
+ F: Fold + ?Sized,
+ {
+ match node {
+- UnOp::Deref(_binding_0) => UnOp::Deref(Token ! [ * ](tokens_helper(f, &_binding_0.spans))),
++ UnOp::Deref(_binding_0) => UnOp::Deref(Token ! [*](tokens_helper(f, &_binding_0.spans))),
+ UnOp::Not(_binding_0) => UnOp::Not(Token![!](tokens_helper(f, &_binding_0.spans))),
+- UnOp::Neg(_binding_0) => UnOp::Neg(Token ! [ - ](tokens_helper(f, &_binding_0.spans))),
++ UnOp::Neg(_binding_0) => UnOp::Neg(Token ! [-](tokens_helper(f, &_binding_0.spans))),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -3083,7 +3056,7 @@ where
+ F: Fold + ?Sized,
+ {
+ UseGlob {
+- star_token: Token ! [ * ](tokens_helper(f, &node.star_token.spans)),
++ star_token: Token ! [*](tokens_helper(f, &node.star_token.spans)),
+ }
+ }
+ #[cfg(feature = "full")]
+@@ -3112,7 +3085,7 @@ where
+ {
+ UsePath {
+ ident: f.fold_ident(node.ident),
+- colon2_token: Token ! [ :: ](tokens_helper(f, &node.colon2_token.spans)),
++ colon2_token: Token ! [::](tokens_helper(f, &node.colon2_token.spans)),
+ tree: Box::new(f.fold_use_tree(*node.tree)),
+ }
+ }
+@@ -3147,7 +3120,7 @@ where
+ {
+ Variadic {
+ attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
+- dots: Token ! [ ... ](tokens_helper(f, &node.dots.spans)),
++ dots: Token ! [...](tokens_helper(f, &node.dots.spans)),
+ }
+ }
+ #[cfg(any(feature = "derive", feature = "full"))]
+@@ -3161,7 +3134,7 @@ where
+ fields: f.fold_fields(node.fields),
+ discriminant: (node.discriminant).map(|it| {
+ (
+- Token ! [ = ](tokens_helper(f, &(it).0.spans)),
++ Token ! [=](tokens_helper(f, &(it).0.spans)),
+ f.fold_expr((it).1),
+ )
+ }),
+diff --git a/third_party/rust/syn/src/gen/hash.rs b/third_party/rust/syn/src/gen/hash.rs
+new file mode 100644
+index 0000000000..9e9e84a7af
+--- /dev/null
++++ third_party/rust/syn/src/gen/hash.rs
+@@ -0,0 +1,2691 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#[cfg(any(feature = "derive", feature = "full"))]
++use crate::tt::TokenStreamHelper;
++use crate::*;
++use std::hash::{Hash, Hasher};
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Abi {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.name.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for AngleBracketedGenericArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.colon2_token.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Arm {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.guard.hash(state);
++ self.body.hash(state);
++ self.comma.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for AttrStyle {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ AttrStyle::Outer => {
++ state.write_u8(0u8);
++ }
++ AttrStyle::Inner(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Attribute {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.style.hash(state);
++ self.path.hash(state);
++ TokenStreamHelper(&self.tokens).hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BareFnArg {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.name.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BinOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ BinOp::Add(_) => {
++ state.write_u8(0u8);
++ }
++ BinOp::Sub(_) => {
++ state.write_u8(1u8);
++ }
++ BinOp::Mul(_) => {
++ state.write_u8(2u8);
++ }
++ BinOp::Div(_) => {
++ state.write_u8(3u8);
++ }
++ BinOp::Rem(_) => {
++ state.write_u8(4u8);
++ }
++ BinOp::And(_) => {
++ state.write_u8(5u8);
++ }
++ BinOp::Or(_) => {
++ state.write_u8(6u8);
++ }
++ BinOp::BitXor(_) => {
++ state.write_u8(7u8);
++ }
++ BinOp::BitAnd(_) => {
++ state.write_u8(8u8);
++ }
++ BinOp::BitOr(_) => {
++ state.write_u8(9u8);
++ }
++ BinOp::Shl(_) => {
++ state.write_u8(10u8);
++ }
++ BinOp::Shr(_) => {
++ state.write_u8(11u8);
++ }
++ BinOp::Eq(_) => {
++ state.write_u8(12u8);
++ }
++ BinOp::Lt(_) => {
++ state.write_u8(13u8);
++ }
++ BinOp::Le(_) => {
++ state.write_u8(14u8);
++ }
++ BinOp::Ne(_) => {
++ state.write_u8(15u8);
++ }
++ BinOp::Ge(_) => {
++ state.write_u8(16u8);
++ }
++ BinOp::Gt(_) => {
++ state.write_u8(17u8);
++ }
++ BinOp::AddEq(_) => {
++ state.write_u8(18u8);
++ }
++ BinOp::SubEq(_) => {
++ state.write_u8(19u8);
++ }
++ BinOp::MulEq(_) => {
++ state.write_u8(20u8);
++ }
++ BinOp::DivEq(_) => {
++ state.write_u8(21u8);
++ }
++ BinOp::RemEq(_) => {
++ state.write_u8(22u8);
++ }
++ BinOp::BitXorEq(_) => {
++ state.write_u8(23u8);
++ }
++ BinOp::BitAndEq(_) => {
++ state.write_u8(24u8);
++ }
++ BinOp::BitOrEq(_) => {
++ state.write_u8(25u8);
++ }
++ BinOp::ShlEq(_) => {
++ state.write_u8(26u8);
++ }
++ BinOp::ShrEq(_) => {
++ state.write_u8(27u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Binding {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Block {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.stmts.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for BoundLifetimes {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ConstParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.eq_token.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Constraint {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for Data {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Data::Struct(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Data::Enum(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Data::Union(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataEnum {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.variants.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.fields.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DataUnion {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.fields.hash(state);
++ }
++}
++#[cfg(feature = "derive")]
++impl Hash for DeriveInput {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.data.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Expr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Expr::Binary(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Expr::Call(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Expr::Cast(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Expr::Field(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => {
++ state.write_u8(14u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::If(v0) => {
++ state.write_u8(16u8);
++ v0.hash(state);
++ }
++ Expr::Index(v0) => {
++ state.write_u8(17u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => {
++ state.write_u8(18u8);
++ v0.hash(state);
++ }
++ Expr::Lit(v0) => {
++ state.write_u8(19u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => {
++ state.write_u8(20u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => {
++ state.write_u8(21u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => {
++ state.write_u8(22u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => {
++ state.write_u8(23u8);
++ v0.hash(state);
++ }
++ Expr::Paren(v0) => {
++ state.write_u8(24u8);
++ v0.hash(state);
++ }
++ Expr::Path(v0) => {
++ state.write_u8(25u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => {
++ state.write_u8(26u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => {
++ state.write_u8(27u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => {
++ state.write_u8(28u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => {
++ state.write_u8(29u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => {
++ state.write_u8(30u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => {
++ state.write_u8(31u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => {
++ state.write_u8(32u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => {
++ state.write_u8(33u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => {
++ state.write_u8(34u8);
++ v0.hash(state);
++ }
++ Expr::Unary(v0) => {
++ state.write_u8(35u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => {
++ state.write_u8(36u8);
++ v0.hash(state);
++ }
++ Expr::Verbatim(v0) => {
++ state.write_u8(37u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::While(v0) => {
++ state.write_u8(38u8);
++ v0.hash(state);
++ }
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => {
++ state.write_u8(39u8);
++ v0.hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprArray {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAssign {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAssignOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.op.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAsync {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.capture.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprAwait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.base.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprBinary {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.left.hash(state);
++ self.op.hash(state);
++ self.right.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBlock {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBox {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprBreak {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprCall {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.func.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprCast {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprClosure {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.asyncness.hash(state);
++ self.movability.hash(state);
++ self.capture.hash(state);
++ self.inputs.hash(state);
++ self.output.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprContinue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprField {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.base.hash(state);
++ self.member.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprForLoop {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.pat.hash(state);
++ self.expr.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprIf {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.cond.hash(state);
++ self.then_branch.hash(state);
++ self.else_branch.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprIndex {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.index.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprLet {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprLit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lit.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprLoop {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMatch {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.arms.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprMethodCall {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.receiver.hash(state);
++ self.method.hash(state);
++ self.turbofish.hash(state);
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprParen {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprPath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprRange {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.from.hash(state);
++ self.limits.hash(state);
++ self.to.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mutability.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprRepeat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.len.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprReturn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.fields.hash(state);
++ self.dot2_token.hash(state);
++ self.rest.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTry {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTryBlock {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ExprUnary {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.op.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprUnsafe {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprWhile {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.label.hash(state);
++ self.cond.hash(state);
++ self.body.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ExprYield {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Field {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.colon_token.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FieldPat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.member.hash(state);
++ self.colon_token.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FieldValue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.member.hash(state);
++ self.colon_token.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Fields {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Fields::Named(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Fields::Unnamed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Fields::Unit => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for FieldsNamed {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.named.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for FieldsUnnamed {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.unnamed.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for File {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.shebang.hash(state);
++ self.attrs.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for FnArg {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ FnArg::Receiver(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ FnArg::Typed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ForeignItem::Fn(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ ForeignItem::Static(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ ForeignItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ ForeignItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ ForeignItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.sig.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemStatic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ForeignItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for GenericArgument {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericArgument::Lifetime(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericArgument::Type(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ GenericArgument::Binding(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ GenericArgument::Constraint(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ GenericArgument::Const(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for GenericMethodArgument {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericMethodArgument::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericMethodArgument::Const(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for GenericParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ GenericParam::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ GenericParam::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ GenericParam::Const(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Generics {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lt_token.hash(state);
++ self.params.hash(state);
++ self.gt_token.hash(state);
++ self.where_clause.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ImplItem::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ ImplItem::Method(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ ImplItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ ImplItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ ImplItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemMethod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.sig.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ImplItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.defaultness.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Item {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Item::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Item::Enum(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Item::ExternCrate(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Item::Fn(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Item::ForeignMod(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Item::Impl(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Item::Macro(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Item::Macro2(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Item::Mod(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Item::Static(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Item::Struct(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Item::Trait(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Item::TraitAlias(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Item::Type(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Item::Union(v0) => {
++ state.write_u8(14u8);
++ v0.hash(state);
++ }
++ Item::Use(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ Item::Verbatim(v0) => {
++ state.write_u8(16u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemEnum {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.variants.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemExternCrate {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.rename.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.sig.hash(state);
++ self.block.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemForeignMod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.abi.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemImpl {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.defaultness.hash(state);
++ self.unsafety.hash(state);
++ self.generics.hash(state);
++ self.trait_.hash(state);
++ self.self_ty.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMacro2 {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ TokenStreamHelper(&self.rules).hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemMod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.content.hash(state);
++ self.semi.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemStatic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.fields.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemTrait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.unsafety.hash(state);
++ self.auto_token.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.colon_token.hash(state);
++ self.supertraits.hash(state);
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemTraitAlias {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemUnion {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.fields.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for ItemUse {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.vis.hash(state);
++ self.leading_colon.hash(state);
++ self.tree.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Label {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.name.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for LifetimeDef {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lifetime.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ }
++}
++impl Hash for Lit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Lit::Str(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Lit::ByteStr(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Lit::Byte(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Lit::Char(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Lit::Int(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Lit::Float(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Lit::Bool(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Lit::Verbatim(v0) => {
++ state.write_u8(7u8);
++ v0.to_string().hash(state);
++ }
++ }
++ }
++}
++impl Hash for LitBool {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.value.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Local {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.init.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Macro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.delimiter.hash(state);
++ TokenStreamHelper(&self.tokens).hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MacroDelimiter {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ MacroDelimiter::Paren(_) => {
++ state.write_u8(0u8);
++ }
++ MacroDelimiter::Brace(_) => {
++ state.write_u8(1u8);
++ }
++ MacroDelimiter::Bracket(_) => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Meta {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Meta::Path(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Meta::List(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Meta::NameValue(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MetaList {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.nested.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for MetaNameValue {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.path.hash(state);
++ self.lit.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for MethodTurbofish {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.args.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for NestedMeta {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ NestedMeta::Meta(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ NestedMeta::Lit(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ParenthesizedGenericArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.inputs.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Pat {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Pat::Box(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Pat::Ident(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Pat::Lit(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Pat::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Pat::Or(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Pat::Path(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Pat::Range(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Pat::Reference(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Pat::Rest(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Pat::Slice(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Pat::Struct(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Pat::Tuple(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Pat::TupleStruct(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Pat::Type(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Pat::Verbatim(v0) => {
++ state.write_u8(14u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ Pat::Wild(v0) => {
++ state.write_u8(15u8);
++ v0.hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatBox {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatIdent {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.by_ref.hash(state);
++ self.mutability.hash(state);
++ self.ident.hash(state);
++ self.subpat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatLit {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.expr.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatOr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.leading_vert.hash(state);
++ self.cases.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatPath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatRange {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.lo.hash(state);
++ self.limits.hash(state);
++ self.hi.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mutability.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatRest {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatSlice {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.fields.hash(state);
++ self.dot2_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.elems.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatTupleStruct {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.path.hash(state);
++ self.pat.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.pat.hash(state);
++ self.ty.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for PatWild {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Path {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.leading_colon.hash(state);
++ self.segments.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PathArguments {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ PathArguments::None => {
++ state.write_u8(0u8);
++ }
++ PathArguments::AngleBracketed(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ PathArguments::Parenthesized(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PathSegment {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.arguments.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateEq {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lhs_ty.hash(state);
++ self.rhs_ty.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateLifetime {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetime.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for PredicateType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ self.bounded_ty.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for QSelf {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ty.hash(state);
++ self.position.hash(state);
++ self.as_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for RangeLimits {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ RangeLimits::HalfOpen(_) => {
++ state.write_u8(0u8);
++ }
++ RangeLimits::Closed(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Receiver {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.reference.hash(state);
++ self.mutability.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for ReturnType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ ReturnType::Default => {
++ state.write_u8(0u8);
++ }
++ ReturnType::Type(_, v1) => {
++ state.write_u8(1u8);
++ v1.hash(state);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Signature {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.constness.hash(state);
++ self.asyncness.hash(state);
++ self.unsafety.hash(state);
++ self.abi.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.inputs.hash(state);
++ self.variadic.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for Stmt {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Stmt::Local(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Stmt::Item(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Stmt::Expr(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Stmt::Semi(v0, _) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TraitBound {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.paren_token.hash(state);
++ self.modifier.hash(state);
++ self.lifetimes.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TraitBoundModifier {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TraitBoundModifier::None => {
++ state.write_u8(0u8);
++ }
++ TraitBoundModifier::Maybe(_) => {
++ state.write_u8(1u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItem {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TraitItem::Const(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ TraitItem::Method(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ TraitItem::Type(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ TraitItem::Macro(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ TraitItem::Verbatim(v0) => {
++ state.write_u8(4u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemConst {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.ty.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.mac.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemMethod {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.sig.hash(state);
++ self.default.hash(state);
++ self.semi_token.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for TraitItemType {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.generics.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Type {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Type::Array(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Type::BareFn(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Type::Group(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Type::ImplTrait(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ Type::Infer(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ Type::Macro(v0) => {
++ state.write_u8(5u8);
++ v0.hash(state);
++ }
++ Type::Never(v0) => {
++ state.write_u8(6u8);
++ v0.hash(state);
++ }
++ Type::Paren(v0) => {
++ state.write_u8(7u8);
++ v0.hash(state);
++ }
++ Type::Path(v0) => {
++ state.write_u8(8u8);
++ v0.hash(state);
++ }
++ Type::Ptr(v0) => {
++ state.write_u8(9u8);
++ v0.hash(state);
++ }
++ Type::Reference(v0) => {
++ state.write_u8(10u8);
++ v0.hash(state);
++ }
++ Type::Slice(v0) => {
++ state.write_u8(11u8);
++ v0.hash(state);
++ }
++ Type::TraitObject(v0) => {
++ state.write_u8(12u8);
++ v0.hash(state);
++ }
++ Type::Tuple(v0) => {
++ state.write_u8(13u8);
++ v0.hash(state);
++ }
++ Type::Verbatim(v0) => {
++ state.write_u8(14u8);
++ TokenStreamHelper(v0).hash(state);
++ }
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeArray {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ self.len.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeBareFn {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetimes.hash(state);
++ self.unsafety.hash(state);
++ self.abi.hash(state);
++ self.inputs.hash(state);
++ self.variadic.hash(state);
++ self.output.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeImplTrait {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeInfer {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeMacro {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.mac.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeNever {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParam {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.colon_token.hash(state);
++ self.bounds.hash(state);
++ self.eq_token.hash(state);
++ self.default.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParamBound {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ TypeParamBound::Trait(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ TypeParamBound::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeParen {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypePath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.qself.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypePtr {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.const_token.hash(state);
++ self.mutability.hash(state);
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeReference {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.lifetime.hash(state);
++ self.mutability.hash(state);
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeSlice {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elem.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeTraitObject {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.dyn_token.hash(state);
++ self.bounds.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for TypeTuple {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.elems.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for UnOp {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ UnOp::Deref(_) => {
++ state.write_u8(0u8);
++ }
++ UnOp::Not(_) => {
++ state.write_u8(1u8);
++ }
++ UnOp::Neg(_) => {
++ state.write_u8(2u8);
++ }
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseGlob {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseGroup {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.items.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseName {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UsePath {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.tree.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseRename {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.ident.hash(state);
++ self.rename.hash(state);
++ }
++}
++#[cfg(feature = "full")]
++impl Hash for UseTree {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ UseTree::Path(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ UseTree::Name(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ UseTree::Rename(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ UseTree::Glob(v0) => {
++ state.write_u8(3u8);
++ v0.hash(state);
++ }
++ UseTree::Group(v0) => {
++ state.write_u8(4u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Variadic {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Variant {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.attrs.hash(state);
++ self.ident.hash(state);
++ self.fields.hash(state);
++ self.discriminant.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisCrate {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisPublic {
++ fn hash<H>(&self, _state: &mut H)
++ where
++ H: Hasher,
++ {
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for VisRestricted {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.in_token.hash(state);
++ self.path.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for Visibility {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ Visibility::Public(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ Visibility::Crate(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ Visibility::Restricted(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ Visibility::Inherited => {
++ state.write_u8(3u8);
++ }
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for WhereClause {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ self.predicates.hash(state);
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Hash for WherePredicate {
++ fn hash<H>(&self, state: &mut H)
++ where
++ H: Hasher,
++ {
++ match self {
++ WherePredicate::Type(v0) => {
++ state.write_u8(0u8);
++ v0.hash(state);
++ }
++ WherePredicate::Lifetime(v0) => {
++ state.write_u8(1u8);
++ v0.hash(state);
++ }
++ WherePredicate::Eq(v0) => {
++ state.write_u8(2u8);
++ v0.hash(state);
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/gen/visit.rs b/third_party/rust/syn/src/gen/visit.rs
+index b667f530c3..24d34b7480 100644
+--- third_party/rust/syn/src/gen/visit.rs
++++ third_party/rust/syn/src/gen/visit.rs
+@@ -20,7 +20,6 @@ macro_rules! full {
+ unreachable!()
+ };
+ }
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! skip {
+ ($($tt:tt)*) => {};
+ }
+@@ -30,7 +29,7 @@ macro_rules! skip {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"visit"` feature.*
++/// *This trait is available only if Syn is built with the `"visit"` feature.*
+ pub trait Visit<'ast> {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_abi(&mut self, i: &'ast Abi) {
+@@ -434,35 +433,27 @@ pub trait Visit<'ast> {
+ fn visit_lifetime_def(&mut self, i: &'ast LifetimeDef) {
+ visit_lifetime_def(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit(&mut self, i: &'ast Lit) {
+ visit_lit(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_bool(&mut self, i: &'ast LitBool) {
+ visit_lit_bool(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte(&mut self, i: &'ast LitByte) {
+ visit_lit_byte(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_str(&mut self, i: &'ast LitByteStr) {
+ visit_lit_byte_str(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_char(&mut self, i: &'ast LitChar) {
+ visit_lit_char(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_float(&mut self, i: &'ast LitFloat) {
+ visit_lit_float(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_int(&mut self, i: &'ast LitInt) {
+ visit_lit_int(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_str(&mut self, i: &'ast LitStr) {
+ visit_lit_str(self, i)
+ }
+@@ -2537,7 +2528,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit<'ast, V>(v: &mut V, node: &'ast Lit)
+ where
+ V: Visit<'ast> + ?Sized,
+@@ -2569,7 +2559,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_bool<'ast, V>(v: &mut V, node: &'ast LitBool)
+ where
+ V: Visit<'ast> + ?Sized,
+@@ -2577,37 +2566,31 @@ where
+ skip!(node.value);
+ v.visit_span(&node.span);
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte<'ast, V>(v: &mut V, node: &'ast LitByte)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_str<'ast, V>(v: &mut V, node: &'ast LitByteStr)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_char<'ast, V>(v: &mut V, node: &'ast LitChar)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_float<'ast, V>(v: &mut V, node: &'ast LitFloat)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_int<'ast, V>(v: &mut V, node: &'ast LitInt)
+ where
+ V: Visit<'ast> + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_str<'ast, V>(v: &mut V, node: &'ast LitStr)
+ where
+ V: Visit<'ast> + ?Sized,
+diff --git a/third_party/rust/syn/src/gen/visit_mut.rs b/third_party/rust/syn/src/gen/visit_mut.rs
+index 5cddb827c6..5ce11f0b2e 100644
+--- third_party/rust/syn/src/gen/visit_mut.rs
++++ third_party/rust/syn/src/gen/visit_mut.rs
+@@ -20,7 +20,6 @@ macro_rules! full {
+ unreachable!()
+ };
+ }
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! skip {
+ ($($tt:tt)*) => {};
+ }
+@@ -31,7 +30,7 @@ macro_rules! skip {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"visit-mut"` feature.*
++/// *This trait is available only if Syn is built with the `"visit-mut"` feature.*
+ pub trait VisitMut {
+ #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_abi_mut(&mut self, i: &mut Abi) {
+@@ -438,35 +437,27 @@ pub trait VisitMut {
+ fn visit_lifetime_def_mut(&mut self, i: &mut LifetimeDef) {
+ visit_lifetime_def_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_mut(&mut self, i: &mut Lit) {
+ visit_lit_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_bool_mut(&mut self, i: &mut LitBool) {
+ visit_lit_bool_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_mut(&mut self, i: &mut LitByte) {
+ visit_lit_byte_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_byte_str_mut(&mut self, i: &mut LitByteStr) {
+ visit_lit_byte_str_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_char_mut(&mut self, i: &mut LitChar) {
+ visit_lit_char_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_float_mut(&mut self, i: &mut LitFloat) {
+ visit_lit_float_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_int_mut(&mut self, i: &mut LitInt) {
+ visit_lit_int_mut(self, i)
+ }
+- #[cfg(any(feature = "derive", feature = "full"))]
+ fn visit_lit_str_mut(&mut self, i: &mut LitStr) {
+ visit_lit_str_mut(self, i)
+ }
+@@ -2543,7 +2534,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_mut<V>(v: &mut V, node: &mut Lit)
+ where
+ V: VisitMut + ?Sized,
+@@ -2575,7 +2565,6 @@ where
+ }
+ }
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_bool_mut<V>(v: &mut V, node: &mut LitBool)
+ where
+ V: VisitMut + ?Sized,
+@@ -2583,37 +2572,31 @@ where
+ skip!(node.value);
+ v.visit_span_mut(&mut node.span);
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_mut<V>(v: &mut V, node: &mut LitByte)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_byte_str_mut<V>(v: &mut V, node: &mut LitByteStr)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_char_mut<V>(v: &mut V, node: &mut LitChar)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_float_mut<V>(v: &mut V, node: &mut LitFloat)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_int_mut<V>(v: &mut V, node: &mut LitInt)
+ where
+ V: VisitMut + ?Sized,
+ {
+ }
+-#[cfg(any(feature = "derive", feature = "full"))]
+ pub fn visit_lit_str_mut<V>(v: &mut V, node: &mut LitStr)
+ where
+ V: VisitMut + ?Sized,
+diff --git a/third_party/rust/syn/src/generics.rs b/third_party/rust/syn/src/generics.rs
+index 95ab2e404a..05e8ef5cdf 100644
+--- third_party/rust/syn/src/generics.rs
++++ third_party/rust/syn/src/generics.rs
+@@ -1,13 +1,16 @@
+ use super::*;
+ use crate::punctuated::{Iter, IterMut, Punctuated};
++#[cfg(all(feature = "printing", feature = "extra-traits"))]
++use std::fmt::{self, Debug};
++#[cfg(all(feature = "printing", feature = "extra-traits"))]
++use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// Lifetimes and type parameters attached to a declaration of a function,
+ /// enum, trait, etc.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[derive(Default)]
+ pub struct Generics {
+ pub lt_token: Option<Token![<]>,
+ pub params: Punctuated<GenericParam, Token![,]>,
+@@ -20,7 +23,7 @@ ast_enum_of_structs! {
+ /// A generic type parameter, lifetime, or const generic: `T: Into<String>`,
+ /// `'a: 'b`, `const LEN: usize`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -28,9 +31,6 @@ ast_enum_of_structs! {
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+- //
+- // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+- // blocked on https://github.com/rust-lang/rust/issues/62833
+ pub enum GenericParam {
+ /// A generic type parameter: `T: Into<String>`.
+ Type(TypeParam),
+@@ -46,7 +46,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A generic type parameter: `T: Into<String>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeParam {
+ pub attrs: Vec<Attribute>,
+@@ -61,7 +61,7 @@ ast_struct! {
+ ast_struct! {
+ /// A lifetime definition: `'a: 'b + 'c + 'd`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct LifetimeDef {
+ pub attrs: Vec<Attribute>,
+@@ -74,7 +74,7 @@ ast_struct! {
+ ast_struct! {
+ /// A const generic parameter: `const LENGTH: usize`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ConstParam {
+ pub attrs: Vec<Attribute>,
+@@ -87,6 +87,17 @@ ast_struct! {
+ }
+ }
+
++impl Default for Generics {
++ fn default() -> Self {
++ Generics {
++ lt_token: None,
++ params: Punctuated::new(),
++ gt_token: None,
++ where_clause: None,
++ }
++ }
++}
++
+ impl Generics {
+ /// Returns an
+ /// <code
+@@ -280,29 +291,23 @@ impl<'a> Iterator for ConstParamsMut<'a> {
+
+ /// Returned by `Generics::split_for_impl`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct ImplGenerics<'a>(&'a Generics);
+
+ /// Returned by `Generics::split_for_impl`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct TypeGenerics<'a>(&'a Generics);
+
+ /// Returned by `TypeGenerics::as_turbofish`.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature and the `"printing"` feature.*
+ #[cfg(feature = "printing")]
+-#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct Turbofish<'a>(&'a Generics);
+
+ #[cfg(feature = "printing")]
+@@ -314,9 +319,8 @@ impl Generics {
+ /// # use proc_macro2::{Span, Ident};
+ /// # use quote::quote;
+ /// #
+- /// # fn main() {
+- /// # let generics: syn::Generics = Default::default();
+- /// # let name = Ident::new("MyType", Span::call_site());
++ /// # let generics: syn::Generics = Default::default();
++ /// # let name = Ident::new("MyType", Span::call_site());
+ /// #
+ /// let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
+ /// quote! {
+@@ -324,11 +328,10 @@ impl Generics {
+ /// // ...
+ /// }
+ /// }
+- /// # ;
+- /// # }
++ /// # ;
+ /// ```
+ ///
+- /// *This method is available if Syn is built with the `"derive"` or
++ /// *This method is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature and the `"printing"` feature.*
+ pub fn split_for_impl(&self) -> (ImplGenerics, TypeGenerics, Option<&WhereClause>) {
+ (
+@@ -339,11 +342,57 @@ impl Generics {
+ }
+ }
+
++#[cfg(feature = "printing")]
++macro_rules! generics_wrapper_impls {
++ ($ty:ident) => {
++ #[cfg(feature = "clone-impls")]
++ impl<'a> Clone for $ty<'a> {
++ fn clone(&self) -> Self {
++ $ty(self.0)
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Debug for $ty<'a> {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ formatter
++ .debug_tuple(stringify!($ty))
++ .field(self.0)
++ .finish()
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Eq for $ty<'a> {}
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> PartialEq for $ty<'a> {
++ fn eq(&self, other: &Self) -> bool {
++ self.0 == other.0
++ }
++ }
++
++ #[cfg(feature = "extra-traits")]
++ impl<'a> Hash for $ty<'a> {
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ self.0.hash(state);
++ }
++ }
++ };
++}
++
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(ImplGenerics);
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(TypeGenerics);
++#[cfg(feature = "printing")]
++generics_wrapper_impls!(Turbofish);
++
+ #[cfg(feature = "printing")]
+ impl<'a> TypeGenerics<'a> {
+ /// Turn a type's generics like `<X, Y>` into a turbofish like `::<X, Y>`.
+ ///
+- /// *This method is available if Syn is built with the `"derive"` or
++ /// *This method is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature and the `"printing"` feature.*
+ pub fn as_turbofish(&self) -> Turbofish {
+ Turbofish(self.0)
+@@ -353,9 +402,8 @@ impl<'a> TypeGenerics<'a> {
+ ast_struct! {
+ /// A set of bound lifetimes: `for<'a, 'b, 'c>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[derive(Default)]
+ pub struct BoundLifetimes {
+ pub for_token: Token![for],
+ pub lt_token: Token![<],
+@@ -364,6 +412,17 @@ ast_struct! {
+ }
+ }
+
++impl Default for BoundLifetimes {
++ fn default() -> Self {
++ BoundLifetimes {
++ for_token: Default::default(),
++ lt_token: Default::default(),
++ lifetimes: Punctuated::new(),
++ gt_token: Default::default(),
++ }
++ }
++}
++
+ impl LifetimeDef {
+ pub fn new(lifetime: Lifetime) -> Self {
+ LifetimeDef {
+@@ -391,7 +450,7 @@ impl From<Ident> for TypeParam {
+ ast_enum_of_structs! {
+ /// A trait or lifetime used as a bound on a type parameter.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum TypeParamBound {
+ Trait(TraitBound),
+@@ -402,7 +461,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A trait used as a bound on a type parameter.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct TraitBound {
+ pub paren_token: Option<token::Paren>,
+@@ -418,9 +477,8 @@ ast_enum! {
+ /// A modifier on a trait bound, currently only used for the `?` in
+ /// `?Sized`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum TraitBoundModifier {
+ None,
+ Maybe(Token![?]),
+@@ -431,7 +489,7 @@ ast_struct! {
+ /// A `where` clause in a definition: `where T: Deserialize<'de>, D:
+ /// 'static`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct WhereClause {
+ pub where_token: Token![where],
+@@ -442,7 +500,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// A single predicate in a `where` clause: `T: Deserialize<'de>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -450,9 +508,6 @@ ast_enum_of_structs! {
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+- //
+- // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+- // blocked on https://github.com/rust-lang/rust/issues/62833
+ pub enum WherePredicate {
+ /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
+ Type(PredicateType),
+@@ -468,7 +523,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateType {
+ /// Any lifetimes from a `for` binding
+@@ -484,7 +539,7 @@ ast_struct! {
+ ast_struct! {
+ /// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateLifetime {
+ pub lifetime: Lifetime,
+@@ -496,7 +551,7 @@ ast_struct! {
+ ast_struct! {
+ /// An equality predicate in a `where` clause (unsupported).
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct PredicateEq {
+ pub lhs_ty: Type,
+@@ -521,7 +576,6 @@ pub mod parsing {
+
+ let mut params = Punctuated::new();
+ let mut allow_lifetime_param = true;
+- let mut allow_type_param = true;
+ loop {
+ if input.peek(Token![>]) {
+ break;
+@@ -534,7 +588,7 @@ pub mod parsing {
+ attrs,
+ ..input.parse()?
+ }));
+- } else if allow_type_param && lookahead.peek(Ident) {
++ } else if lookahead.peek(Ident) {
+ allow_lifetime_param = false;
+ params.push_value(GenericParam::Type(TypeParam {
+ attrs,
+@@ -542,7 +596,6 @@ pub mod parsing {
+ }));
+ } else if lookahead.peek(Token![const]) {
+ allow_lifetime_param = false;
+- allow_type_param = false;
+ params.push_value(GenericParam::Const(ConstParam {
+ attrs,
+ ..input.parse()?
+@@ -665,57 +718,53 @@ pub mod parsing {
+
+ impl Parse for TypeParam {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let has_colon;
+- let has_default;
+- Ok(TypeParam {
+- attrs: input.call(Attribute::parse_outer)?,
+- ident: input.parse()?,
+- colon_token: {
+- if input.peek(Token![:]) {
+- has_colon = true;
+- Some(input.parse()?)
+- } else {
+- has_colon = false;
+- None
+- }
+- },
+- bounds: {
+- let mut bounds = Punctuated::new();
+- if has_colon {
+- loop {
+- if input.peek(Token![,])
+- || input.peek(Token![>])
+- || input.peek(Token![=])
+- {
+- break;
+- }
+- let value = input.parse()?;
+- bounds.push_value(value);
+- if !input.peek(Token![+]) {
+- break;
+- }
+- let punct = input.parse()?;
+- bounds.push_punct(punct);
+- }
++ let attrs = input.call(Attribute::parse_outer)?;
++ let ident: Ident = input.parse()?;
++ let colon_token: Option<Token![:]> = input.parse()?;
++
++ let begin_bound = input.fork();
++ let mut is_maybe_const = false;
++ let mut bounds = Punctuated::new();
++ if colon_token.is_some() {
++ loop {
++ if input.peek(Token![,]) || input.peek(Token![>]) || input.peek(Token![=]) {
++ break;
+ }
+- bounds
+- },
+- eq_token: {
+- if input.peek(Token![=]) {
+- has_default = true;
+- Some(input.parse()?)
+- } else {
+- has_default = false;
+- None
++ if input.peek(Token![?]) && input.peek2(Token![const]) {
++ input.parse::<Token![?]>()?;
++ input.parse::<Token![const]>()?;
++ is_maybe_const = true;
+ }
+- },
+- default: {
+- if has_default {
+- Some(input.parse()?)
+- } else {
+- None
++ let value: TypeParamBound = input.parse()?;
++ bounds.push_value(value);
++ if !input.peek(Token![+]) {
++ break;
+ }
+- },
++ let punct: Token![+] = input.parse()?;
++ bounds.push_punct(punct);
++ }
++ }
++
++ let mut eq_token: Option<Token![=]> = input.parse()?;
++ let mut default = if eq_token.is_some() {
++ Some(input.parse::<Type>()?)
++ } else {
++ None
++ };
++
++ if is_maybe_const {
++ bounds.clear();
++ eq_token = None;
++ default = Some(Type::Verbatim(verbatim::between(begin_bound, input)));
++ }
++
++ Ok(TypeParam {
++ attrs,
++ ident,
++ colon_token,
++ bounds,
++ eq_token,
++ default,
+ })
+ }
+ }
+@@ -898,6 +947,8 @@ mod printing {
+ use super::*;
+
+ use proc_macro2::TokenStream;
++ #[cfg(feature = "full")]
++ use proc_macro2::TokenTree;
+ use quote::{ToTokens, TokenStreamExt};
+
+ use crate::attr::FilterAttrs;
+@@ -1080,9 +1131,25 @@ mod printing {
+ TokensOrDefault(&self.colon_token).to_tokens(tokens);
+ self.bounds.to_tokens(tokens);
+ }
+- if self.default.is_some() {
++ if let Some(default) = &self.default {
++ #[cfg(feature = "full")]
++ {
++ if self.eq_token.is_none() {
++ if let Type::Verbatim(default) = default {
++ let mut iter = default.clone().into_iter();
++ match (iter.next(), iter.next()) {
++ (Some(TokenTree::Punct(ref q)), Some(TokenTree::Ident(ref c)))
++ if q.as_char() == '?' && c == "const" =>
++ {
++ return default.to_tokens(tokens);
++ }
++ _ => {}
++ }
++ }
++ }
++ }
+ TokensOrDefault(&self.eq_token).to_tokens(tokens);
+- self.default.to_tokens(tokens);
++ default.to_tokens(tokens);
+ }
+ }
+ }
+@@ -1117,9 +1184,9 @@ mod printing {
+ self.ident.to_tokens(tokens);
+ self.colon_token.to_tokens(tokens);
+ self.ty.to_tokens(tokens);
+- if self.default.is_some() {
++ if let Some(default) = &self.default {
+ TokensOrDefault(&self.eq_token).to_tokens(tokens);
+- self.default.to_tokens(tokens);
++ default.to_tokens(tokens);
+ }
+ }
+ }
+diff --git a/third_party/rust/syn/src/item.rs b/third_party/rust/syn/src/item.rs
+index ff4485ace9..0d8f7d3ddc 100644
+--- third_party/rust/syn/src/item.rs
++++ third_party/rust/syn/src/item.rs
+@@ -1,17 +1,15 @@
+ use super::*;
+-use crate::derive::{Data, DeriveInput};
++use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
+ use crate::punctuated::Punctuated;
+ use proc_macro2::TokenStream;
+
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
++#[cfg(feature = "parsing")]
++use std::mem;
+
+ ast_enum_of_structs! {
+ /// Things that can appear directly inside of a module or scope.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -21,7 +19,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Item #manual_extra_traits {
++ pub enum Item {
+ /// A constant item: `const MAX: u16 = 65535`.
+ Const(ItemConst),
+
+@@ -83,7 +81,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A constant item: `const MAX: u16 = 65535`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemConst {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -100,7 +98,7 @@ ast_struct! {
+ ast_struct! {
+ /// An enum definition: `enum Foo<A, B> { A(A), B(B) }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemEnum {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -115,7 +113,7 @@ ast_struct! {
+ ast_struct! {
+ /// An `extern crate` item: `extern crate serde`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemExternCrate {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -131,7 +129,7 @@ ast_struct! {
+ /// A free-standing function: `fn process(n: usize) -> Result<()> { ...
+ /// }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemFn {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -143,7 +141,7 @@ ast_struct! {
+ ast_struct! {
+ /// A block of foreign items: `extern "C" { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemForeignMod {
+ pub attrs: Vec<Attribute>,
+ pub abi: Abi,
+@@ -156,7 +154,7 @@ ast_struct! {
+ /// An impl block providing trait or associated items: `impl<A> Trait
+ /// for Data<A> { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemImpl {
+ pub attrs: Vec<Attribute>,
+ pub defaultness: Option<Token![default]>,
+@@ -175,7 +173,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation, which includes `macro_rules!` definitions.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemMacro {
+ pub attrs: Vec<Attribute>,
+ /// The `example` in `macro_rules! example { ... }`.
+@@ -188,8 +186,8 @@ ast_struct! {
+ ast_struct! {
+ /// A 2.0-style declarative macro introduced by the `macro` keyword.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
+- pub struct ItemMacro2 #manual_extra_traits {
++ /// *This type is available only if Syn is built with the `"full"` feature.*
++ pub struct ItemMacro2 {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+ pub macro_token: Token![macro],
+@@ -201,7 +199,7 @@ ast_struct! {
+ ast_struct! {
+ /// A module or module declaration: `mod m` or `mod m { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemMod {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -215,7 +213,7 @@ ast_struct! {
+ ast_struct! {
+ /// A static item: `static BIKE: Shed = Shed(42)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemStatic {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -233,7 +231,7 @@ ast_struct! {
+ ast_struct! {
+ /// A struct definition: `struct Foo<A> { x: A }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemStruct {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -248,7 +246,7 @@ ast_struct! {
+ ast_struct! {
+ /// A trait definition: `pub trait Iterator { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemTrait {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -267,7 +265,7 @@ ast_struct! {
+ ast_struct! {
+ /// A trait alias: `pub trait SharableIterator = Iterator + Sync`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemTraitAlias {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -283,7 +281,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type alias: `type Result<T> = std::result::Result<T, MyError>`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -299,7 +297,7 @@ ast_struct! {
+ ast_struct! {
+ /// A union definition: `union Foo<A, B> { x: A, y: B }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemUnion {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -313,7 +311,7 @@ ast_struct! {
+ ast_struct! {
+ /// A use declaration: `use std::collections::HashMap`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ItemUse {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -324,145 +322,32 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Item {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Item {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Item::Const(this), Item::Const(other)) => this == other,
+- (Item::Enum(this), Item::Enum(other)) => this == other,
+- (Item::ExternCrate(this), Item::ExternCrate(other)) => this == other,
+- (Item::Fn(this), Item::Fn(other)) => this == other,
+- (Item::ForeignMod(this), Item::ForeignMod(other)) => this == other,
+- (Item::Impl(this), Item::Impl(other)) => this == other,
+- (Item::Macro(this), Item::Macro(other)) => this == other,
+- (Item::Macro2(this), Item::Macro2(other)) => this == other,
+- (Item::Mod(this), Item::Mod(other)) => this == other,
+- (Item::Static(this), Item::Static(other)) => this == other,
+- (Item::Struct(this), Item::Struct(other)) => this == other,
+- (Item::Trait(this), Item::Trait(other)) => this == other,
+- (Item::TraitAlias(this), Item::TraitAlias(other)) => this == other,
+- (Item::Type(this), Item::Type(other)) => this == other,
+- (Item::Union(this), Item::Union(other)) => this == other,
+- (Item::Use(this), Item::Use(other)) => this == other,
+- (Item::Verbatim(this), Item::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Item {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
++impl Item {
++ #[cfg(feature = "parsing")]
++ pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
+ match self {
+- Item::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- Item::Enum(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- Item::ExternCrate(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- Item::Fn(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- Item::ForeignMod(item) => {
+- state.write_u8(4);
+- item.hash(state);
+- }
+- Item::Impl(item) => {
+- state.write_u8(5);
+- item.hash(state);
+- }
+- Item::Macro(item) => {
+- state.write_u8(6);
+- item.hash(state);
+- }
+- Item::Macro2(item) => {
+- state.write_u8(7);
+- item.hash(state);
+- }
+- Item::Mod(item) => {
+- state.write_u8(8);
+- item.hash(state);
+- }
+- Item::Static(item) => {
+- state.write_u8(9);
+- item.hash(state);
+- }
+- Item::Struct(item) => {
+- state.write_u8(10);
+- item.hash(state);
+- }
+- Item::Trait(item) => {
+- state.write_u8(11);
+- item.hash(state);
+- }
+- Item::TraitAlias(item) => {
+- state.write_u8(12);
+- item.hash(state);
+- }
+- Item::Type(item) => {
+- state.write_u8(13);
+- item.hash(state);
+- }
+- Item::Union(item) => {
+- state.write_u8(14);
+- item.hash(state);
+- }
+- Item::Use(item) => {
+- state.write_u8(15);
+- item.hash(state);
+- }
+- Item::Verbatim(item) => {
+- state.write_u8(16);
+- TokenStreamHelper(item).hash(state);
+- }
++ Item::ExternCrate(ItemExternCrate { attrs, .. })
++ | Item::Use(ItemUse { attrs, .. })
++ | Item::Static(ItemStatic { attrs, .. })
++ | Item::Const(ItemConst { attrs, .. })
++ | Item::Fn(ItemFn { attrs, .. })
++ | Item::Mod(ItemMod { attrs, .. })
++ | Item::ForeignMod(ItemForeignMod { attrs, .. })
++ | Item::Type(ItemType { attrs, .. })
++ | Item::Struct(ItemStruct { attrs, .. })
++ | Item::Enum(ItemEnum { attrs, .. })
++ | Item::Union(ItemUnion { attrs, .. })
++ | Item::Trait(ItemTrait { attrs, .. })
++ | Item::TraitAlias(ItemTraitAlias { attrs, .. })
++ | Item::Impl(ItemImpl { attrs, .. })
++ | Item::Macro(ItemMacro { attrs, .. })
++ | Item::Macro2(ItemMacro2 { attrs, .. }) => mem::replace(attrs, new),
++ Item::Verbatim(_) => Vec::new(),
+ Item::__Nonexhaustive => unreachable!(),
+ }
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ItemMacro2 {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ItemMacro2 {
+- fn eq(&self, other: &Self) -> bool {
+- self.attrs == other.attrs
+- && self.vis == other.vis
+- && self.macro_token == other.macro_token
+- && self.ident == other.ident
+- && TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ItemMacro2 {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.attrs.hash(state);
+- self.vis.hash(state);
+- self.macro_token.hash(state);
+- self.ident.hash(state);
+- TokenStreamHelper(&self.rules).hash(state);
+- }
+-}
+-
+ impl From<DeriveInput> for Item {
+ fn from(input: DeriveInput) -> Item {
+ match input.data {
+@@ -496,10 +381,57 @@ impl From<DeriveInput> for Item {
+ }
+ }
+
++impl From<ItemStruct> for DeriveInput {
++ fn from(input: ItemStruct) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Struct(DataStruct {
++ struct_token: input.struct_token,
++ fields: input.fields,
++ semi_token: input.semi_token,
++ }),
++ }
++ }
++}
++
++impl From<ItemEnum> for DeriveInput {
++ fn from(input: ItemEnum) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Enum(DataEnum {
++ enum_token: input.enum_token,
++ brace_token: input.brace_token,
++ variants: input.variants,
++ }),
++ }
++ }
++}
++
++impl From<ItemUnion> for DeriveInput {
++ fn from(input: ItemUnion) -> DeriveInput {
++ DeriveInput {
++ attrs: input.attrs,
++ vis: input.vis,
++ ident: input.ident,
++ generics: input.generics,
++ data: Data::Union(DataUnion {
++ union_token: input.union_token,
++ fields: input.fields,
++ }),
++ }
++ }
++}
++
+ ast_enum_of_structs! {
+ /// A suffix of an import tree in a `use` item: `Type as Renamed` or `*`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -530,7 +462,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A path prefix of imports in a `use` item: `std::...`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UsePath {
+ pub ident: Ident,
+ pub colon2_token: Token![::],
+@@ -541,7 +473,7 @@ ast_struct! {
+ ast_struct! {
+ /// An identifier imported by a `use` item: `HashMap`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseName {
+ pub ident: Ident,
+ }
+@@ -550,7 +482,7 @@ ast_struct! {
+ ast_struct! {
+ /// An renamed identifier imported by a `use` item: `HashMap as Map`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseRename {
+ pub ident: Ident,
+ pub as_token: Token![as],
+@@ -561,7 +493,7 @@ ast_struct! {
+ ast_struct! {
+ /// A glob import in a `use` item: `*`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseGlob {
+ pub star_token: Token![*],
+ }
+@@ -570,7 +502,7 @@ ast_struct! {
+ ast_struct! {
+ /// A braced group of imports in a `use` item: `{A, B, C}`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct UseGroup {
+ pub brace_token: token::Brace,
+ pub items: Punctuated<UseTree, Token![,]>,
+@@ -580,7 +512,7 @@ ast_struct! {
+ ast_enum_of_structs! {
+ /// An item within an `extern` block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -590,7 +522,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum ForeignItem #manual_extra_traits {
++ pub enum ForeignItem {
+ /// A foreign function in an `extern` block.
+ Fn(ForeignItemFn),
+
+@@ -614,7 +546,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A foreign function in an `extern` block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemFn {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -626,7 +558,7 @@ ast_struct! {
+ ast_struct! {
+ /// A foreign static item in an `extern` block: `static ext: u8`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemStatic {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -642,7 +574,7 @@ ast_struct! {
+ ast_struct! {
+ /// A foreign type in an `extern` block: `type void`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -655,7 +587,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation within an extern block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ForeignItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -663,61 +595,10 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ForeignItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ForeignItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (ForeignItem::Fn(this), ForeignItem::Fn(other)) => this == other,
+- (ForeignItem::Static(this), ForeignItem::Static(other)) => this == other,
+- (ForeignItem::Type(this), ForeignItem::Type(other)) => this == other,
+- (ForeignItem::Macro(this), ForeignItem::Macro(other)) => this == other,
+- (ForeignItem::Verbatim(this), ForeignItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ForeignItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- ForeignItem::Fn(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- ForeignItem::Static(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- ForeignItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- ForeignItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- ForeignItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- ForeignItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_enum_of_structs! {
+ /// An item declaration within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -727,7 +608,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum TraitItem #manual_extra_traits {
++ pub enum TraitItem {
+ /// An associated constant within the definition of a trait.
+ Const(TraitItemConst),
+
+@@ -751,7 +632,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// An associated constant within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemConst {
+ pub attrs: Vec<Attribute>,
+ pub const_token: Token![const],
+@@ -766,7 +647,7 @@ ast_struct! {
+ ast_struct! {
+ /// A trait method within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemMethod {
+ pub attrs: Vec<Attribute>,
+ pub sig: Signature,
+@@ -778,7 +659,7 @@ ast_struct! {
+ ast_struct! {
+ /// An associated type within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemType {
+ pub attrs: Vec<Attribute>,
+ pub type_token: Token![type],
+@@ -794,7 +675,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation within the definition of a trait.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct TraitItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -802,61 +683,10 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for TraitItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for TraitItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (TraitItem::Const(this), TraitItem::Const(other)) => this == other,
+- (TraitItem::Method(this), TraitItem::Method(other)) => this == other,
+- (TraitItem::Type(this), TraitItem::Type(other)) => this == other,
+- (TraitItem::Macro(this), TraitItem::Macro(other)) => this == other,
+- (TraitItem::Verbatim(this), TraitItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for TraitItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- TraitItem::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- TraitItem::Method(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- TraitItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- TraitItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- TraitItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- TraitItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_enum_of_structs! {
+ /// An item within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -866,7 +696,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum ImplItem #manual_extra_traits {
++ pub enum ImplItem {
+ /// An associated constant within an impl block.
+ Const(ImplItemConst),
+
+@@ -890,7 +720,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// An associated constant within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemConst {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -908,7 +738,7 @@ ast_struct! {
+ ast_struct! {
+ /// A method within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemMethod {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -921,7 +751,7 @@ ast_struct! {
+ ast_struct! {
+ /// An associated type within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemType {
+ pub attrs: Vec<Attribute>,
+ pub vis: Visibility,
+@@ -938,7 +768,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro invocation within an impl block.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ImplItemMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -946,62 +776,11 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for ImplItem {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for ImplItem {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (ImplItem::Const(this), ImplItem::Const(other)) => this == other,
+- (ImplItem::Method(this), ImplItem::Method(other)) => this == other,
+- (ImplItem::Type(this), ImplItem::Type(other)) => this == other,
+- (ImplItem::Macro(this), ImplItem::Macro(other)) => this == other,
+- (ImplItem::Verbatim(this), ImplItem::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for ImplItem {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- ImplItem::Const(item) => {
+- state.write_u8(0);
+- item.hash(state);
+- }
+- ImplItem::Method(item) => {
+- state.write_u8(1);
+- item.hash(state);
+- }
+- ImplItem::Type(item) => {
+- state.write_u8(2);
+- item.hash(state);
+- }
+- ImplItem::Macro(item) => {
+- state.write_u8(3);
+- item.hash(state);
+- }
+- ImplItem::Verbatim(item) => {
+- state.write_u8(4);
+- TokenStreamHelper(item).hash(state);
+- }
+- ImplItem::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_struct! {
+ /// A function signature in a trait or implementation: `unsafe fn
+ /// initialize(&self)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Signature {
+ pub constness: Option<Token![const]>,
+ pub asyncness: Option<Token![async]>,
+@@ -1017,13 +796,34 @@ ast_struct! {
+ }
+ }
+
++impl Signature {
++ /// A method's `self` receiver, such as `&self` or `self: Box<Self>`.
++ pub fn receiver(&self) -> Option<&FnArg> {
++ let arg = self.inputs.first()?;
++ match arg {
++ FnArg::Receiver(_) => Some(arg),
++ FnArg::Typed(PatType { pat, .. }) => {
++ if let Pat::Ident(PatIdent { ident, .. }) = &**pat {
++ if ident == "self" {
++ return Some(arg);
++ }
++ }
++ None
++ }
++ }
++ }
++}
++
+ ast_enum_of_structs! {
+ /// An argument in a function signature: the `n: usize` in `fn f(n: usize)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum FnArg {
+ /// The `self` argument of an associated method, whether taken by value
+ /// or by reference.
++ ///
++ /// Note that `self` receivers with a specified type, such as `self:
++ /// Box<Self>`, are parsed as a `FnArg::Typed`.
+ Receiver(Receiver),
+
+ /// A function argument accepted by pattern and type.
+@@ -1035,7 +835,10 @@ ast_struct! {
+ /// The `self` argument of an associated method, whether taken by value
+ /// or by reference.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// Note that `self` receivers with a specified type, such as `self:
++ /// Box<Self>`, are parsed as a `FnArg::Typed`.
++ ///
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Receiver {
+ pub attrs: Vec<Attribute>,
+ pub reference: Option<(Token![&], Option<Lifetime>)>,
+@@ -1056,7 +859,8 @@ pub mod parsing {
+
+ use crate::ext::IdentExt;
+ use crate::parse::discouraged::Speculative;
+- use crate::parse::{Parse, ParseStream, Result};
++ use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
++ use crate::token::Brace;
+ use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenTree};
+ use std::iter::{self, FromIterator};
+
+@@ -1064,18 +868,26 @@ pub mod parsing {
+
+ impl Parse for Item {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![extern]) {
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ parse_rest_of_fn(input, Vec::new(), vis, sig).map(Item::Fn)
++ }
++ } else if lookahead.peek(Token![extern]) {
+ ahead.parse::<Token![extern]>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Token![crate]) {
+ input.parse().map(Item::ExternCrate)
+- } else if lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else if lookahead.peek(token::Brace) {
+ input.parse().map(Item::ForeignMod)
+ } else if lookahead.peek(LitStr) {
+@@ -1083,8 +895,6 @@ pub mod parsing {
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(token::Brace) {
+ input.parse().map(Item::ForeignMod)
+- } else if lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else {
+ Err(lookahead.error())
+ }
+@@ -1094,18 +904,61 @@ pub mod parsing {
+ } else if lookahead.peek(Token![use]) {
+ input.parse().map(Item::Use)
+ } else if lookahead.peek(Token![static]) {
+- input.parse().map(Item::Static)
++ let vis = input.parse()?;
++ let static_token = input.parse()?;
++ let mutability = input.parse()?;
++ let ident = input.parse()?;
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(Item::Static(ItemStatic {
++ attrs: Vec::new(),
++ vis,
++ static_token,
++ mutability,
++ ident,
++ colon_token,
++ ty,
++ eq_token: input.parse()?,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![const]) {
+ ahead.parse::<Token![const]>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+- input.parse().map(Item::Const)
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(Item::Fn)
++ let vis = input.parse()?;
++ let const_token = input.parse()?;
++ let ident = {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ };
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![;]) {
++ input.parse::<Token![;]>()?;
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(Item::Const(ItemConst {
++ attrs: Vec::new(),
++ vis,
++ const_token,
++ ident,
++ colon_token,
++ ty,
++ eq_token: input.parse()?,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else {
+ Err(lookahead.error())
+ }
+@@ -1117,21 +970,19 @@ pub mod parsing {
+ {
+ input.parse().map(Item::Trait)
+ } else if lookahead.peek(Token![impl]) {
+- input.parse().map(Item::Impl)
+- } else if lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(Item::Fn)
++ let allow_const_impl = true;
++ if let Some(item) = parse_impl(input, allow_const_impl)? {
++ Ok(Item::Impl(item))
++ } else {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ }
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![async]) || lookahead.peek(Token![fn]) {
+- input.parse().map(Item::Fn)
+ } else if lookahead.peek(Token![mod]) {
+ input.parse().map(Item::Mod)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(Item::Type)
++ parse_item_type(begin, input)
+ } else if lookahead.peek(existential) {
+ input.call(item_existential).map(Item::Verbatim)
+ } else if lookahead.peek(Token![struct]) {
+@@ -1147,14 +998,18 @@ pub mod parsing {
+ } else if lookahead.peek(Token![impl])
+ || lookahead.peek(Token![default]) && !ahead.peek2(Token![!])
+ {
+- input.parse().map(Item::Impl)
++ let allow_const_impl = true;
++ if let Some(item) = parse_impl(input, allow_const_impl)? {
++ Ok(Item::Impl(item))
++ } else {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ }
+ } else if lookahead.peek(Token![macro]) {
+ input.parse().map(Item::Macro2)
+ } else if vis.is_inherited()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+@@ -1163,32 +1018,64 @@ pub mod parsing {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- Item::ExternCrate(item) => &mut item.attrs,
+- Item::Use(item) => &mut item.attrs,
+- Item::Static(item) => &mut item.attrs,
+- Item::Const(item) => &mut item.attrs,
+- Item::Fn(item) => &mut item.attrs,
+- Item::Mod(item) => &mut item.attrs,
+- Item::ForeignMod(item) => &mut item.attrs,
+- Item::Type(item) => &mut item.attrs,
+- Item::Struct(item) => &mut item.attrs,
+- Item::Enum(item) => &mut item.attrs,
+- Item::Union(item) => &mut item.attrs,
+- Item::Trait(item) => &mut item.attrs,
+- Item::TraitAlias(item) => &mut item.attrs,
+- Item::Impl(item) => &mut item.attrs,
+- Item::Macro(item) => &mut item.attrs,
+- Item::Macro2(item) => &mut item.attrs,
+- Item::Verbatim(_) => return Ok(item),
+- Item::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
++ attrs.extend(item.replace_attrs(Vec::new()));
++ item.replace_attrs(attrs);
++ Ok(item)
++ }
++ }
++
++ struct FlexibleItemType {
++ vis: Visibility,
++ defaultness: Option<Token![default]>,
++ type_token: Token![type],
++ ident: Ident,
++ generics: Generics,
++ colon_token: Option<Token![:]>,
++ bounds: Punctuated<TypeParamBound, Token![+]>,
++ ty: Option<(Token![=], Type)>,
++ semi_token: Token![;],
++ }
++
++ impl Parse for FlexibleItemType {
++ fn parse(input: ParseStream) -> Result<Self> {
++ let vis: Visibility = input.parse()?;
++ let defaultness: Option<Token![default]> = input.parse()?;
++ let type_token: Token![type] = input.parse()?;
++ let ident: Ident = input.parse()?;
++ let mut generics: Generics = input.parse()?;
++ let colon_token: Option<Token![:]> = input.parse()?;
++ let mut bounds = Punctuated::new();
++ if colon_token.is_some() {
++ loop {
++ bounds.push_value(input.parse::<TypeParamBound>()?);
++ if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) {
++ break;
++ }
++ bounds.push_punct(input.parse::<Token![+]>()?);
++ if input.peek(Token![where]) || input.peek(Token![=]) || input.peek(Token![;]) {
++ break;
++ }
++ }
+ }
++ generics.where_clause = input.parse()?;
++ let ty = if let Some(eq_token) = input.parse()? {
++ Some((eq_token, input.parse::<Type>()?))
++ } else {
++ None
++ };
++ let semi_token: Token![;] = input.parse()?;
+
+- Ok(item)
++ Ok(FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ ty,
++ semi_token,
++ })
+ }
+ }
+
+@@ -1310,7 +1197,6 @@ pub mod parsing {
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+ || lookahead.peek(Token![crate])
+- || lookahead.peek(Token![extern])
+ {
+ let ident = input.call(Ident::parse_any)?;
+ if input.peek(Token![::]) {
+@@ -1392,69 +1278,126 @@ pub mod parsing {
+ }
+ }
+
+- impl Parse for ItemFn {
+- fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+- let vis: Visibility = input.parse()?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
++ fn pop_variadic(args: &mut Punctuated<FnArg, Token![,]>) -> Option<Variadic> {
++ let trailing_punct = args.trailing_punct();
+
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+- let variadic = inputs.last().as_ref().and_then(get_variadic);
+-
+- fn get_variadic(input: &&FnArg) -> Option<Variadic> {
+- if let FnArg::Typed(PatType { ty, .. }) = input {
+- if let Type::Verbatim(tokens) = &**ty {
+- if let Ok(dots) = parse2(tokens.clone()) {
+- return Some(Variadic {
+- attrs: Vec::new(),
+- dots,
+- });
+- }
+- }
+- }
+- None
++ let last = match args.last_mut()? {
++ FnArg::Typed(last) => last,
++ _ => return None,
++ };
++
++ let ty = match last.ty.as_ref() {
++ Type::Verbatim(ty) => ty,
++ _ => return None,
++ };
++
++ let mut variadic = Variadic {
++ attrs: Vec::new(),
++ dots: parse2(ty.clone()).ok()?,
++ };
++
++ if let Pat::Verbatim(pat) = last.pat.as_ref() {
++ if pat.to_string() == "..." && !trailing_punct {
++ variadic.attrs = mem::replace(&mut last.attrs, Vec::new());
++ args.pop();
+ }
++ }
+
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ Some(variadic)
++ }
+
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
+- let stmts = content.call(Block::parse_within)?;
++ fn variadic_to_tokens(dots: &Token![...]) -> TokenStream {
++ TokenStream::from_iter(vec![
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Joint);
++ dot.set_span(dots.spans[0]);
++ dot
++ }),
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Joint);
++ dot.set_span(dots.spans[1]);
++ dot
++ }),
++ TokenTree::Punct({
++ let mut dot = Punct::new('.', Spacing::Alone);
++ dot.set_span(dots.spans[2]);
++ dot
++ }),
++ ])
++ }
+
+- Ok(ItemFn {
+- attrs: private::attrs(outer_attrs, inner_attrs),
+- vis,
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
+- block: Box::new(Block { brace_token, stmts }),
+- })
++ fn peek_signature(input: ParseStream) -> bool {
++ let fork = input.fork();
++ fork.parse::<Option<Token![const]>>().is_ok()
++ && fork.parse::<Option<Token![async]>>().is_ok()
++ && fork.parse::<Option<Token![unsafe]>>().is_ok()
++ && fork.parse::<Option<Abi>>().is_ok()
++ && fork.peek(Token![fn])
++ }
++
++ fn parse_signature(input: ParseStream) -> Result<Signature> {
++ let constness: Option<Token![const]> = input.parse()?;
++ let asyncness: Option<Token![async]> = input.parse()?;
++ let unsafety: Option<Token![unsafe]> = input.parse()?;
++ let abi: Option<Abi> = input.parse()?;
++ let fn_token: Token![fn] = input.parse()?;
++ let ident: Ident = input.parse()?;
++ let generics: Generics = input.parse()?;
++
++ let content;
++ let paren_token = parenthesized!(content in input);
++ let mut inputs = parse_fn_args(&content)?;
++ let variadic = pop_variadic(&mut inputs);
++
++ let output: ReturnType = input.parse()?;
++ let where_clause: Option<WhereClause> = input.parse()?;
++
++ Ok(Signature {
++ constness,
++ asyncness,
++ unsafety,
++ abi,
++ fn_token,
++ ident,
++ paren_token,
++ inputs,
++ output,
++ variadic,
++ generics: Generics {
++ where_clause,
++ ..generics
++ },
++ })
++ }
++
++ impl Parse for ItemFn {
++ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ parse_rest_of_fn(input, outer_attrs, vis, sig)
+ }
+ }
+
++ fn parse_rest_of_fn(
++ input: ParseStream,
++ outer_attrs: Vec<Attribute>,
++ vis: Visibility,
++ sig: Signature,
++ ) -> Result<ItemFn> {
++ let content;
++ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
++ let stmts = content.call(Block::parse_within)?;
++
++ Ok(ItemFn {
++ attrs: private::attrs(outer_attrs, inner_attrs),
++ vis,
++ sig,
++ block: Box::new(Block { brace_token, stmts }),
++ })
++ }
++
+ impl Parse for FnArg {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -1491,26 +1434,79 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_fn_args(input: ParseStream) -> Result<Punctuated<FnArg, Token![,]>> {
++ let mut args = Punctuated::new();
++ let mut has_receiver = false;
++
++ while !input.is_empty() {
++ let attrs = input.call(Attribute::parse_outer)?;
++
++ let arg = if let Some(dots) = input.parse::<Option<Token![...]>>()? {
++ FnArg::Typed(PatType {
++ attrs,
++ pat: Box::new(Pat::Verbatim(variadic_to_tokens(&dots))),
++ colon_token: Token![:](dots.spans[0]),
++ ty: Box::new(Type::Verbatim(variadic_to_tokens(&dots))),
++ })
++ } else {
++ let mut arg: FnArg = input.parse()?;
++ match &mut arg {
++ FnArg::Receiver(receiver) if has_receiver => {
++ return Err(Error::new(
++ receiver.self_token.span,
++ "unexpected second method receiver",
++ ));
++ }
++ FnArg::Receiver(receiver) if !args.is_empty() => {
++ return Err(Error::new(
++ receiver.self_token.span,
++ "unexpected method receiver",
++ ));
++ }
++ FnArg::Receiver(receiver) => {
++ has_receiver = true;
++ receiver.attrs = attrs;
++ }
++ FnArg::Typed(arg) => arg.attrs = attrs,
++ }
++ arg
++ };
++ args.push_value(arg);
++
++ if input.is_empty() {
++ break;
++ }
++
++ let comma: Token![,] = input.parse()?;
++ args.push_punct(comma);
++ }
++
++ Ok(args)
++ }
++
+ fn fn_arg_typed(input: ParseStream) -> Result<PatType> {
++ // Hack to parse pre-2018 syntax in
++ // test/ui/rfc-2565-param-attrs/param-attrs-pretty.rs
++ // because the rest of the test case is valuable.
++ if input.peek(Ident) && input.peek2(Token![<]) {
++ let span = input.fork().parse::<Ident>()?.span();
++ return Ok(PatType {
++ attrs: Vec::new(),
++ pat: Box::new(Pat::Wild(PatWild {
++ attrs: Vec::new(),
++ underscore_token: Token![_](span),
++ })),
++ colon_token: Token![:](span),
++ ty: input.parse()?,
++ });
++ }
++
+ Ok(PatType {
+ attrs: Vec::new(),
+- pat: input.parse()?,
++ pat: Box::new(pat::parsing::multi_pat(input)?),
+ colon_token: input.parse()?,
+ ty: Box::new(match input.parse::<Option<Token![...]>>()? {
+- Some(dot3) => {
+- let args = vec![
+- TokenTree::Punct(Punct::new('.', Spacing::Joint)),
+- TokenTree::Punct(Punct::new('.', Spacing::Joint)),
+- TokenTree::Punct(Punct::new('.', Spacing::Alone)),
+- ];
+- let tokens = TokenStream::from_iter(args.into_iter().zip(&dot3.spans).map(
+- |(mut arg, span)| {
+- arg.set_span(*span);
+- arg
+- },
+- ));
+- Type::Verbatim(tokens)
+- }
++ Some(dot3) => Type::Verbatim(variadic_to_tokens(&dot3)),
+ None => input.parse()?,
+ }),
+ })
+@@ -1581,22 +1577,60 @@ pub mod parsing {
+
+ impl Parse for ForeignItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![fn]) {
+- input.parse().map(ForeignItem::Fn)
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ let vis: Visibility = input.parse()?;
++ let sig = parse_signature(input)?;
++ if input.peek(token::Brace) {
++ let content;
++ braced!(content in input);
++ content.call(Attribute::parse_inner)?;
++ content.call(Block::parse_within)?;
++
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Fn(ForeignItemFn {
++ attrs: Vec::new(),
++ vis,
++ sig,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![static]) {
+- input.parse().map(ForeignItem::Static)
++ let vis = input.parse()?;
++ let static_token = input.parse()?;
++ let mutability = input.parse()?;
++ let ident = input.parse()?;
++ let colon_token = input.parse()?;
++ let ty = input.parse()?;
++ if input.peek(Token![=]) {
++ input.parse::<Token![=]>()?;
++ input.parse::<Expr>()?;
++ input.parse::<Token![;]>()?;
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Static(ForeignItemStatic {
++ attrs: Vec::new(),
++ vis,
++ static_token,
++ mutability,
++ ident,
++ colon_token,
++ ty,
++ semi_token: input.parse()?,
++ }))
++ }
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(ForeignItem::Type)
++ parse_foreign_item_type(begin, input)
+ } else if vis.is_inherited()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+@@ -1605,17 +1639,16 @@ pub mod parsing {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- ForeignItem::Fn(item) => &mut item.attrs,
+- ForeignItem::Static(item) => &mut item.attrs,
+- ForeignItem::Type(item) => &mut item.attrs,
+- ForeignItem::Macro(item) => &mut item.attrs,
+- ForeignItem::Verbatim(_) | ForeignItem::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
+- }
++ let item_attrs = match &mut item {
++ ForeignItem::Fn(item) => &mut item.attrs,
++ ForeignItem::Static(item) => &mut item.attrs,
++ ForeignItem::Type(item) => &mut item.attrs,
++ ForeignItem::Macro(item) => &mut item.attrs,
++ ForeignItem::Verbatim(_) => return Ok(item),
++ ForeignItem::__Nonexhaustive => unreachable!(),
++ };
++ attrs.extend(item_attrs.drain(..));
++ *item_attrs = attrs;
+
+ Ok(item)
+ }
+@@ -1625,55 +1658,12 @@ pub mod parsing {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let mut inputs = Punctuated::new();
+- let mut variadic = None;
+- while !content.is_empty() {
+- let attrs = content.call(Attribute::parse_outer)?;
+-
+- if let Some(dots) = content.parse()? {
+- variadic = Some(Variadic { attrs, dots });
+- break;
+- }
+-
+- let mut arg = content.call(fn_arg_typed)?;
+- arg.attrs = attrs;
+- inputs.push_value(FnArg::Typed(arg));
+- if content.is_empty() {
+- break;
+- }
+-
+- inputs.push_punct(content.parse()?);
+- }
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let sig = parse_signature(input)?;
+ let semi_token: Token![;] = input.parse()?;
+-
+ Ok(ForeignItemFn {
+ attrs,
+ vis,
+- sig: Signature {
+- constness: None,
+- asyncness: None,
+- unsafety: None,
+- abi: None,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
++ sig,
+ semi_token,
+ })
+ }
+@@ -1706,6 +1696,37 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_foreign_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ForeignItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some()
++ || generics.lt_token.is_some()
++ || generics.where_clause.is_some()
++ || colon_token.is_some()
++ || ty.is_some()
++ {
++ Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(ForeignItem::Type(ForeignItemType {
++ attrs: Vec::new(),
++ vis,
++ type_token,
++ ident,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for ForeignItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -1742,6 +1763,36 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_item_type(begin: ParseBuffer, input: ParseStream) -> Result<Item> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some() || colon_token.is_some() || ty.is_none() {
++ Ok(Item::Verbatim(verbatim::between(begin, input)))
++ } else {
++ let (eq_token, ty) = ty.unwrap();
++ Ok(Item::Type(ItemType {
++ attrs: Vec::new(),
++ vis,
++ type_token,
++ ident,
++ generics,
++ eq_token,
++ ty: Box::new(ty),
++ semi_token,
++ }))
++ }
++ }
++
+ #[cfg(not(feature = "printing"))]
+ fn item_existential(input: ParseStream) -> Result<TokenStream> {
+ Err(input.error("existential type is not supported"))
+@@ -1887,7 +1938,7 @@ pub mod parsing {
+
+ impl Parse for ItemTrait {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let attrs = input.call(Attribute::parse_outer)?;
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let unsafety: Option<Token![unsafe]> = input.parse()?;
+ let auto_token: Option<Token![auto]> = input.parse()?;
+@@ -1896,7 +1947,7 @@ pub mod parsing {
+ let generics: Generics = input.parse()?;
+ parse_rest_of_trait(
+ input,
+- attrs,
++ outer_attrs,
+ vis,
+ unsafety,
+ auto_token,
+@@ -1909,7 +1960,7 @@ pub mod parsing {
+
+ fn parse_rest_of_trait(
+ input: ParseStream,
+- attrs: Vec<Attribute>,
++ outer_attrs: Vec<Attribute>,
+ vis: Visibility,
+ unsafety: Option<Token![unsafe]>,
+ auto_token: Option<Token![auto]>,
+@@ -1937,13 +1988,14 @@ pub mod parsing {
+
+ let content;
+ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
+ let mut items = Vec::new();
+ while !content.is_empty() {
+ items.push(content.parse()?);
+ }
+
+ Ok(ItemTrait {
+- attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ vis,
+ unsafety,
+ auto_token,
+@@ -2014,14 +2066,19 @@ pub mod parsing {
+
+ impl Parse for TraitItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
++ let vis: Visibility = input.parse()?;
++ let defaultness: Option<Token![default]> = input.parse()?;
+ let ahead = input.fork();
+
+ let lookahead = ahead.lookahead1();
+- let mut item = if lookahead.peek(Token![const]) {
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ input.parse().map(TraitItem::Method)
++ } else if lookahead.peek(Token![const]) {
+ ahead.parse::<Token![const]>()?;
+ let lookahead = ahead.lookahead1();
+- if lookahead.peek(Ident) {
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+ input.parse().map(TraitItem::Const)
+ } else if lookahead.peek(Token![async])
+ || lookahead.peek(Token![unsafe])
+@@ -2032,18 +2089,11 @@ pub mod parsing {
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![async])
+- || lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(TraitItem::Method)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(TraitItem::Type)
++ parse_trait_item_type(begin.fork(), input)
+ } else if lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::])
+ {
+@@ -2052,18 +2102,20 @@ pub mod parsing {
+ Err(lookahead.error())
+ }?;
+
+- {
+- let item_attrs = match &mut item {
+- TraitItem::Const(item) => &mut item.attrs,
+- TraitItem::Method(item) => &mut item.attrs,
+- TraitItem::Type(item) => &mut item.attrs,
+- TraitItem::Macro(item) => &mut item.attrs,
+- TraitItem::Verbatim(_) | TraitItem::__Nonexhaustive => unreachable!(),
+- };
+- attrs.extend(item_attrs.drain(..));
+- *item_attrs = attrs;
++ match (vis, defaultness) {
++ (Visibility::Inherited, None) => {}
++ _ => return Ok(TraitItem::Verbatim(verbatim::between(begin, input))),
+ }
+
++ let item_attrs = match &mut item {
++ TraitItem::Const(item) => &mut item.attrs,
++ TraitItem::Method(item) => &mut item.attrs,
++ TraitItem::Type(item) => &mut item.attrs,
++ TraitItem::Macro(item) => &mut item.attrs,
++ TraitItem::Verbatim(_) | TraitItem::__Nonexhaustive => unreachable!(),
++ };
++ attrs.extend(item_attrs.drain(..));
++ *item_attrs = attrs;
+ Ok(item)
+ }
+ }
+@@ -2073,7 +2125,14 @@ pub mod parsing {
+ Ok(TraitItemConst {
+ attrs: input.call(Attribute::parse_outer)?,
+ const_token: input.parse()?,
+- ident: input.parse()?,
++ ident: {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ },
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ default: {
+@@ -2093,20 +2152,7 @@ pub mod parsing {
+ impl Parse for TraitItemMethod {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let outer_attrs = input.call(Attribute::parse_outer)?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let sig = parse_signature(input)?;
+
+ let lookahead = input.lookahead1();
+ let (brace_token, inner_attrs, stmts, semi_token) = if lookahead.peek(token::Brace) {
+@@ -2124,22 +2170,7 @@ pub mod parsing {
+
+ Ok(TraitItemMethod {
+ attrs: private::attrs(outer_attrs, inner_attrs),
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic: None,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
++ sig,
+ default: brace_token.map(|brace_token| Block { brace_token, stmts }),
+ semi_token,
+ })
+@@ -2188,6 +2219,35 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_trait_item_type(begin: ParseBuffer, input: ParseStream) -> Result<TraitItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if defaultness.is_some() || vis.is_some() {
++ Ok(TraitItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ Ok(TraitItem::Type(TraitItemType {
++ attrs: Vec::new(),
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds,
++ default: ty,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for TraitItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -2207,52 +2267,67 @@ pub mod parsing {
+
+ impl Parse for ItemImpl {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+- let defaultness: Option<Token![default]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let impl_token: Token![impl] = input.parse()?;
+-
+- let has_generics = input.peek(Token![<])
+- && (input.peek2(Token![>])
+- || input.peek2(Token![#])
+- || (input.peek2(Ident) || input.peek2(Lifetime))
+- && (input.peek3(Token![:])
+- || input.peek3(Token![,])
+- || input.peek3(Token![>])));
+- let generics: Generics = if has_generics {
+- input.parse()?
+- } else {
+- Generics::default()
+- };
+-
+- let trait_ = {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- if ahead.parse::<Option<Token![!]>>().is_ok()
+- && ahead.parse::<Path>().is_ok()
+- && ahead.parse::<Token![for]>().is_ok()
+- {
+- let polarity: Option<Token![!]> = input.parse()?;
+- let path: Path = input.parse()?;
+- let for_token: Token![for] = input.parse()?;
+- Some((polarity, path, for_token))
+- } else {
+- None
+- }
+- };
+- let self_ty: Type = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
++ let allow_const_impl = false;
++ parse_impl(input, allow_const_impl).map(Option::unwrap)
++ }
++ }
++
++ fn parse_impl(input: ParseStream, allow_const_impl: bool) -> Result<Option<ItemImpl>> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
++ let defaultness: Option<Token![default]> = input.parse()?;
++ let unsafety: Option<Token![unsafe]> = input.parse()?;
++ let impl_token: Token![impl] = input.parse()?;
++
++ let has_generics = input.peek(Token![<])
++ && (input.peek2(Token![>])
++ || input.peek2(Token![#])
++ || (input.peek2(Ident) || input.peek2(Lifetime))
++ && (input.peek3(Token![:])
++ || input.peek3(Token![,])
++ || input.peek3(Token![>]))
++ || input.peek2(Token![const]));
++ let generics: Generics = if has_generics {
++ input.parse()?
++ } else {
++ Generics::default()
++ };
+
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
++ let is_const_impl = allow_const_impl
++ && (input.peek(Token![const]) || input.peek(Token![?]) && input.peek2(Token![const]));
++ if is_const_impl {
++ input.parse::<Option<Token![?]>>()?;
++ input.parse::<Token![const]>()?;
++ }
+
+- let mut items = Vec::new();
+- while !content.is_empty() {
+- items.push(content.parse()?);
++ let trait_ = (|| -> Option<_> {
++ let ahead = input.fork();
++ let polarity: Option<Token![!]> = ahead.parse().ok()?;
++ let mut path: Path = ahead.parse().ok()?;
++ if path.segments.last().unwrap().arguments.is_empty() && ahead.peek(token::Paren) {
++ let parenthesized = PathArguments::Parenthesized(ahead.parse().ok()?);
++ path.segments.last_mut().unwrap().arguments = parenthesized;
+ }
++ let for_token: Token![for] = ahead.parse().ok()?;
++ input.advance_to(&ahead);
++ Some((polarity, path, for_token))
++ })();
++
++ let self_ty: Type = input.parse()?;
++ let where_clause: Option<WhereClause> = input.parse()?;
++
++ let content;
++ let brace_token = braced!(content in input);
++ let inner_attrs = content.call(Attribute::parse_inner)?;
++
++ let mut items = Vec::new();
++ while !content.is_empty() {
++ items.push(content.parse()?);
++ }
+
+- Ok(ItemImpl {
++ if is_const_impl {
++ Ok(None)
++ } else {
++ Ok(Some(ItemImpl {
+ attrs: private::attrs(outer_attrs, inner_attrs),
+ defaultness,
+ unsafety,
+@@ -2265,12 +2340,13 @@ pub mod parsing {
+ self_ty: Box::new(self_ty),
+ brace_token,
+ items,
+- })
++ }))
+ }
+ }
+
+ impl Parse for ImplItem {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let mut attrs = input.call(Attribute::parse_outer)?;
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+@@ -2284,28 +2360,38 @@ pub mod parsing {
+ None
+ };
+
+- let mut item = if lookahead.peek(Token![const]) {
+- ahead.parse::<Token![const]>()?;
++ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
++ input.parse().map(ImplItem::Method)
++ } else if lookahead.peek(Token![const]) {
++ let const_token: Token![const] = ahead.parse()?;
+ let lookahead = ahead.lookahead1();
+- if lookahead.peek(Ident) {
+- input.parse().map(ImplItem::Const)
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(ImplItem::Method)
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.advance_to(&ahead);
++ let ident: Ident = input.call(Ident::parse_any)?;
++ let colon_token: Token![:] = input.parse()?;
++ let ty: Type = input.parse()?;
++ if let Some(eq_token) = input.parse()? {
++ return Ok(ImplItem::Const(ImplItemConst {
++ attrs,
++ vis,
++ defaultness,
++ const_token,
++ ident,
++ colon_token,
++ ty,
++ eq_token,
++ expr: input.parse()?,
++ semi_token: input.parse()?,
++ }));
++ } else {
++ input.parse::<Token![;]>()?;
++ return Ok(ImplItem::Verbatim(verbatim::between(begin, input)));
++ }
+ } else {
+ Err(lookahead.error())
+ }
+- } else if lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![async])
+- || lookahead.peek(Token![extern])
+- || lookahead.peek(Token![fn])
+- {
+- input.parse().map(ImplItem::Method)
+ } else if lookahead.peek(Token![type]) {
+- input.parse().map(ImplItem::Type)
++ parse_impl_item_type(begin, input)
+ } else if vis.is_inherited() && defaultness.is_none() && lookahead.peek(existential) {
+ input.call(item_existential).map(ImplItem::Verbatim)
+ } else if vis.is_inherited()
+@@ -2313,7 +2399,6 @@ pub mod parsing {
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+@@ -2346,7 +2431,14 @@ pub mod parsing {
+ vis: input.parse()?,
+ defaultness: input.parse()?,
+ const_token: input.parse()?,
+- ident: input.parse()?,
++ ident: {
++ let lookahead = input.lookahead1();
++ if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
++ input.call(Ident::parse_any)?
++ } else {
++ return Err(lookahead.error());
++ }
++ },
+ colon_token: input.parse()?,
+ ty: input.parse()?,
+ eq_token: input.parse()?,
+@@ -2358,50 +2450,39 @@ pub mod parsing {
+
+ impl Parse for ImplItemMethod {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let outer_attrs = input.call(Attribute::parse_outer)?;
++ let mut attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let defaultness: Option<Token![default]> = input.parse()?;
+- let constness: Option<Token![const]> = input.parse()?;
+- let asyncness: Option<Token![async]> = input.parse()?;
+- let unsafety: Option<Token![unsafe]> = input.parse()?;
+- let abi: Option<Abi> = input.parse()?;
+- let fn_token: Token![fn] = input.parse()?;
+- let ident: Ident = input.parse()?;
+- let generics: Generics = input.parse()?;
+-
+- let content;
+- let paren_token = parenthesized!(content in input);
+- let inputs = content.parse_terminated(FnArg::parse)?;
+-
+- let output: ReturnType = input.parse()?;
+- let where_clause: Option<WhereClause> = input.parse()?;
+-
+- let content;
+- let brace_token = braced!(content in input);
+- let inner_attrs = content.call(Attribute::parse_inner)?;
+- let stmts = content.call(Block::parse_within)?;
++ let sig = parse_signature(input)?;
++
++ let block = if let Some(semi) = input.parse::<Option<Token![;]>>()? {
++ // Accept methods without a body in an impl block because
++ // rustc's *parser* does not reject them (the compilation error
++ // is emitted later than parsing) and it can be useful for macro
++ // DSLs.
++ let mut punct = Punct::new(';', Spacing::Alone);
++ punct.set_span(semi.span);
++ let tokens = TokenStream::from_iter(vec![TokenTree::Punct(punct)]);
++ Block {
++ brace_token: Brace::default(),
++ stmts: vec![Stmt::Item(Item::Verbatim(tokens))],
++ }
++ } else {
++ let content;
++ let brace_token = braced!(content in input);
++ attrs.extend(content.call(Attribute::parse_inner)?);
++ Block {
++ brace_token,
++ stmts: content.call(Block::parse_within)?,
++ }
++ };
+
+ Ok(ImplItemMethod {
+- attrs: private::attrs(outer_attrs, inner_attrs),
++ attrs,
+ vis,
+ defaultness,
+- sig: Signature {
+- constness,
+- asyncness,
+- unsafety,
+- abi,
+- fn_token,
+- ident,
+- paren_token,
+- inputs,
+- output,
+- variadic: None,
+- generics: Generics {
+- where_clause,
+- ..generics
+- },
+- },
+- block: Block { brace_token, stmts },
++ sig,
++ block,
+ })
+ }
+ }
+@@ -2426,6 +2507,37 @@ pub mod parsing {
+ }
+ }
+
++ fn parse_impl_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ImplItem> {
++ let FlexibleItemType {
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ colon_token,
++ bounds: _,
++ ty,
++ semi_token,
++ } = input.parse()?;
++
++ if colon_token.is_some() || ty.is_none() {
++ Ok(ImplItem::Verbatim(verbatim::between(begin, input)))
++ } else {
++ let (eq_token, ty) = ty.unwrap();
++ Ok(ImplItem::Type(ImplItemType {
++ attrs: Vec::new(),
++ vis,
++ defaultness,
++ type_token,
++ ident,
++ generics,
++ eq_token,
++ ty,
++ semi_token,
++ }))
++ }
++ }
++
+ impl Parse for ImplItemMacro {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+@@ -2471,6 +2583,7 @@ mod printing {
+
+ use crate::attr::FilterAttrs;
+ use crate::print::TokensOrDefault;
++ use crate::punctuated::Pair;
+
+ impl ToTokens for ItemExternCrate {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+@@ -2835,6 +2948,14 @@ mod printing {
+ self.vis.to_tokens(tokens);
+ self.defaultness.to_tokens(tokens);
+ self.sig.to_tokens(tokens);
++ if self.block.stmts.len() == 1 {
++ if let Stmt::Item(Item::Verbatim(verbatim)) = &self.block.stmts[0] {
++ if verbatim.to_string() == ";" {
++ verbatim.to_tokens(tokens);
++ return;
++ }
++ }
++ }
+ self.block.brace_token.surround(tokens, |tokens| {
+ tokens.append_all(self.attrs.inner());
+ tokens.append_all(&self.block.stmts);
+@@ -2905,6 +3026,33 @@ mod printing {
+ }
+ }
+
++ fn maybe_variadic_to_tokens(arg: &FnArg, tokens: &mut TokenStream) -> bool {
++ let arg = match arg {
++ FnArg::Typed(arg) => arg,
++ FnArg::Receiver(receiver) => {
++ receiver.to_tokens(tokens);
++ return false;
++ }
++ };
++
++ match arg.ty.as_ref() {
++ Type::Verbatim(ty) if ty.to_string() == "..." => {
++ match arg.pat.as_ref() {
++ Pat::Verbatim(pat) if pat.to_string() == "..." => {
++ tokens.append_all(arg.attrs.outer());
++ pat.to_tokens(tokens);
++ }
++ _ => arg.to_tokens(tokens),
++ }
++ true
++ }
++ _ => {
++ arg.to_tokens(tokens);
++ false
++ }
++ }
++ }
++
+ impl ToTokens for Signature {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.constness.to_tokens(tokens);
+@@ -2915,11 +3063,24 @@ mod printing {
+ self.ident.to_tokens(tokens);
+ self.generics.to_tokens(tokens);
+ self.paren_token.surround(tokens, |tokens| {
+- self.inputs.to_tokens(tokens);
+- if self.variadic.is_some() && !self.inputs.empty_or_trailing() {
+- <Token![,]>::default().to_tokens(tokens);
++ let mut last_is_variadic = false;
++ for input in self.inputs.pairs() {
++ match input {
++ Pair::Punctuated(input, comma) => {
++ maybe_variadic_to_tokens(input, tokens);
++ comma.to_tokens(tokens);
++ }
++ Pair::End(input) => {
++ last_is_variadic = maybe_variadic_to_tokens(input, tokens);
++ }
++ }
++ }
++ if self.variadic.is_some() && !last_is_variadic {
++ if !self.inputs.empty_or_trailing() {
++ <Token![,]>::default().to_tokens(tokens);
++ }
++ self.variadic.to_tokens(tokens);
+ }
+- self.variadic.to_tokens(tokens);
+ });
+ self.output.to_tokens(tokens);
+ self.generics.where_clause.to_tokens(tokens);
+diff --git a/third_party/rust/syn/src/keyword.rs b/third_party/rust/syn/src/keyword.rs
+deleted file mode 100644
+index e69de29bb2..0000000000
+diff --git a/third_party/rust/syn/src/lib.rs b/third_party/rust/syn/src/lib.rs
+index c8ada7e638..3da506731e 100644
+--- third_party/rust/syn/src/lib.rs
++++ third_party/rust/syn/src/lib.rs
+@@ -1,3 +1,11 @@
++//! [![github]](https://github.com/dtolnay/syn)&ensp;[![crates-io]](https://crates.io/crates/syn)&ensp;[![docs-rs]](https://docs.rs/syn)
++//!
++//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
++//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
++//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K
++//!
++//! <br>
++//!
+ //! Syn is a parsing library for parsing a stream of Rust tokens into a syntax
+ //! tree of Rust source code.
+ //!
+@@ -62,8 +70,8 @@
+ //! ```
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use quote::quote;
+ //! use syn::{parse_macro_input, DeriveInput};
+@@ -242,35 +250,48 @@
+ //! dynamic library libproc_macro from rustc toolchain.
+
+ // Syn types in rustdoc of other crates get linked to here.
+-#![doc(html_root_url = "https://docs.rs/syn/1.0.5")]
++#![doc(html_root_url = "https://docs.rs/syn/1.0.40")]
+ #![deny(clippy::all, clippy::pedantic)]
+ // Ignored clippy lints.
+ #![allow(
+- clippy::block_in_if_condition_stmt,
++ clippy::blocks_in_if_conditions,
+ clippy::cognitive_complexity,
+ clippy::doc_markdown,
+ clippy::eval_order_dependence,
+ clippy::inherent_to_string,
+ clippy::large_enum_variant,
++ clippy::manual_non_exhaustive,
++ clippy::match_like_matches_macro,
++ clippy::match_on_vec_items,
++ clippy::needless_doctest_main,
+ clippy::needless_pass_by_value,
+ clippy::never_loop,
+ clippy::suspicious_op_assign_impl,
+ clippy::too_many_arguments,
+- clippy::trivially_copy_pass_by_ref
++ clippy::trivially_copy_pass_by_ref,
++ clippy::unnecessary_unwrap
+ )]
+ // Ignored clippy_pedantic lints.
+ #![allow(
+ clippy::cast_possible_truncation,
++ clippy::default_trait_access,
+ clippy::empty_enum,
++ clippy::expl_impl_clone_on_copy,
+ clippy::if_not_else,
+ clippy::items_after_statements,
++ clippy::match_same_arms,
++ clippy::missing_errors_doc,
+ clippy::module_name_repetitions,
++ clippy::must_use_candidate,
++ clippy::option_if_let_else,
+ clippy::shadow_unrelated,
+ clippy::similar_names,
+ clippy::single_match_else,
++ clippy::too_many_lines,
+ clippy::unseparated_literal_suffix,
+ clippy::use_self,
+- clippy::used_underscore_binding
++ clippy::used_underscore_binding,
++ clippy::wildcard_imports
+ )]
+
+ #[cfg(all(
+@@ -284,7 +305,6 @@ extern crate unicode_xid;
+ #[cfg(feature = "printing")]
+ extern crate quote;
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[macro_use]
+ mod macros;
+
+@@ -307,7 +327,6 @@ pub use crate::attr::{
+ AttrStyle, Attribute, AttributeArgs, Meta, MetaList, MetaNameValue, NestedMeta,
+ };
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ mod bigint;
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+@@ -364,9 +383,7 @@ pub use crate::file::File;
+ mod lifetime;
+ pub use crate::lifetime::Lifetime;
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ mod lit;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ pub use crate::lit::{
+ Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr, StrStyle,
+ };
+@@ -441,6 +458,9 @@ pub mod parse_macro_input;
+ #[cfg(all(feature = "parsing", feature = "printing"))]
+ pub mod spanned;
+
++#[cfg(all(feature = "parsing", feature = "full"))]
++mod whitespace;
++
+ mod gen {
+ /// Syntax tree traversal to walk a shared borrow of a syntax tree.
+ ///
+@@ -482,7 +502,7 @@ mod gen {
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"visit"` feature.*
++ /// *This module is available only if Syn is built with the `"visit"` feature.*
+ ///
+ /// <br>
+ ///
+@@ -603,7 +623,7 @@ mod gen {
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"visit-mut"`
++ /// *This module is available only if Syn is built with the `"visit-mut"`
+ /// feature.*
+ ///
+ /// <br>
+@@ -702,7 +722,7 @@ mod gen {
+ /// /* ... */
+ /// ```
+ ///
+- /// *This module is available if Syn is built with the `"fold"` feature.*
++ /// *This module is available only if Syn is built with the `"fold"` feature.*
+ ///
+ /// <br>
+ ///
+@@ -744,6 +764,22 @@ mod gen {
+ #[rustfmt::skip]
+ pub mod fold;
+
++ #[cfg(feature = "clone-impls")]
++ #[rustfmt::skip]
++ mod clone;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod eq;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod hash;
++
++ #[cfg(feature = "extra-traits")]
++ #[rustfmt::skip]
++ mod debug;
++
+ #[cfg(any(feature = "full", feature = "derive"))]
+ #[path = "../gen_helper.rs"]
+ mod helper;
+@@ -757,6 +793,8 @@ pub mod export;
+ mod custom_keyword;
+ mod custom_punctuation;
+ mod sealed;
++mod span;
++mod thread;
+
+ #[cfg(feature = "parsing")]
+ mod lookahead;
+@@ -764,13 +802,15 @@ mod lookahead;
+ #[cfg(feature = "parsing")]
+ pub mod parse;
+
+-mod span;
++#[cfg(feature = "full")]
++mod reserved;
++
++#[cfg(all(any(feature = "full", feature = "derive"), feature = "parsing"))]
++mod verbatim;
+
+ #[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
+ mod print;
+
+-mod thread;
+-
+ ////////////////////////////////////////////////////////////////////////////////
+
+ #[allow(dead_code, non_camel_case_types)]
+@@ -800,14 +840,14 @@ pub use crate::error::{Error, Result};
+ ///
+ /// [`syn::parse2`]: parse2
+ ///
+-/// *This function is available if Syn is built with both the `"parsing"` and
++/// *This function is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ ///
+ /// # Examples
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use quote::quote;
+ /// use syn::DeriveInput;
+@@ -847,7 +887,7 @@ pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> {
+ ///
+ /// [`syn::parse`]: parse()
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` feature.*
++/// *This function is available only if Syn is built with the `"parsing"` feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+ parse::Parser::parse2(T::parse, tokens)
+@@ -855,7 +895,7 @@ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+
+ /// Parse a string of Rust code into the chosen syntax tree node.
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` feature.*
++/// *This function is available only if Syn is built with the `"parsing"` feature.*
+ ///
+ /// # Hygiene
+ ///
+@@ -874,9 +914,7 @@ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
+ /// Ok(())
+ /// }
+ /// #
+-/// # fn main() {
+-/// # run().unwrap();
+-/// # }
++/// # run().unwrap();
+ /// ```
+ #[cfg(feature = "parsing")]
+ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+@@ -894,7 +932,7 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+ ///
+ /// If present, either of these would be an error using `from_str`.
+ ///
+-/// *This function is available if Syn is built with the `"parsing"` and
++/// *This function is available only if Syn is built with the `"parsing"` and
+ /// `"full"` features.*
+ ///
+ /// # Examples
+@@ -918,9 +956,7 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
+ /// Ok(())
+ /// }
+ /// #
+-/// # fn main() {
+-/// # run().unwrap();
+-/// # }
++/// # run().unwrap();
+ /// ```
+ #[cfg(all(feature = "parsing", feature = "full"))]
+ pub fn parse_file(mut content: &str) -> Result<File> {
+@@ -931,13 +967,16 @@ pub fn parse_file(mut content: &str) -> Result<File> {
+ }
+
+ let mut shebang = None;
+- if content.starts_with("#!") && !content.starts_with("#![") {
+- if let Some(idx) = content.find('\n') {
+- shebang = Some(content[..idx].to_string());
+- content = &content[idx..];
+- } else {
+- shebang = Some(content.to_string());
+- content = "";
++ if content.starts_with("#!") {
++ let rest = whitespace::skip(&content[2..]);
++ if !rest.starts_with('[') {
++ if let Some(idx) = content.find('\n') {
++ shebang = Some(content[..idx].to_string());
++ content = &content[idx..];
++ } else {
++ shebang = Some(content.to_string());
++ content = "";
++ }
+ }
+ }
+
+diff --git a/third_party/rust/syn/src/lifetime.rs b/third_party/rust/syn/src/lifetime.rs
+index d51c48e827..959cc5f9c6 100644
+--- third_party/rust/syn/src/lifetime.rs
++++ third_party/rust/syn/src/lifetime.rs
+@@ -18,10 +18,8 @@ use crate::lookahead;
+ /// - All following characters must be Unicode code points with the XID_Continue
+ /// property.
+ ///
+-/// *This type is available if Syn is built with the `"derive"` or `"full"`
++/// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+-#[cfg_attr(feature = "extra-traits", derive(Debug))]
+-#[derive(Clone)]
+ pub struct Lifetime {
+ pub apostrophe: Span,
+ pub ident: Ident,
+@@ -72,6 +70,15 @@ impl Display for Lifetime {
+ }
+ }
+
++impl Clone for Lifetime {
++ fn clone(&self) -> Self {
++ Lifetime {
++ apostrophe: self.apostrophe,
++ ident: self.ident.clone(),
++ }
++ }
++}
++
+ impl PartialEq for Lifetime {
+ fn eq(&self, other: &Lifetime) -> bool {
+ self.ident.eq(&other.ident)
+diff --git a/third_party/rust/syn/src/lit.rs b/third_party/rust/syn/src/lit.rs
+index f2209a2980..ee77e75bec 100644
+--- third_party/rust/syn/src/lit.rs
++++ third_party/rust/syn/src/lit.rs
+@@ -22,9 +22,6 @@ use crate::{Error, Result};
+ ast_enum_of_structs! {
+ /// A Rust literal such as a string or integer or boolean.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
+- ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+@@ -33,7 +30,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Lit #manual_extra_traits {
++ pub enum Lit {
+ /// A UTF-8 string literal: `"foo"`.
+ Str(LitStr),
+
+@@ -64,61 +61,44 @@ ast_enum_of_structs! {
+
+ ast_struct! {
+ /// A UTF-8 string literal: `"foo"`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitStr #manual_extra_traits_debug {
+- repr: Box<LitStrRepr>,
++ pub struct LitStr {
++ repr: Box<LitRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+-struct LitStrRepr {
+- token: Literal,
+- suffix: Box<str>,
+-}
+-
+ ast_struct! {
+ /// A byte string literal: `b"foo"`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitByteStr #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitByteStr {
++ repr: Box<LitRepr>,
+ }
+ }
+
+ ast_struct! {
+ /// A byte literal: `b'f'`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitByte #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitByte {
++ repr: Box<LitRepr>,
+ }
+ }
+
+ ast_struct! {
+ /// A character literal: `'a'`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitChar #manual_extra_traits_debug {
+- token: Literal,
++ pub struct LitChar {
++ repr: Box<LitRepr>,
+ }
+ }
+
++struct LitRepr {
++ token: Literal,
++ suffix: Box<str>,
++}
++
+ ast_struct! {
+ /// An integer literal: `1` or `1u16`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitInt #manual_extra_traits_debug {
++ pub struct LitInt {
+ repr: Box<LitIntRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ struct LitIntRepr {
+ token: Literal,
+ digits: Box<str>,
+@@ -129,15 +109,11 @@ ast_struct! {
+ /// A floating point literal: `1f64` or `1.0e10f64`.
+ ///
+ /// Must be finite. May not be infinte or NaN.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitFloat #manual_extra_traits_debug {
++ pub struct LitFloat {
+ repr: Box<LitFloatRepr>,
+ }
+ }
+
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ struct LitFloatRepr {
+ token: Literal,
+ digits: Box<str>,
+@@ -146,92 +122,27 @@ struct LitFloatRepr {
+
+ ast_struct! {
+ /// A boolean literal: `true` or `false`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or
+- /// `"full"` feature.*
+- pub struct LitBool #manual_extra_traits_debug {
++ pub struct LitBool {
+ pub value: bool,
+ pub span: Span,
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Lit {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Lit {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Lit::Str(this), Lit::Str(other)) => this == other,
+- (Lit::ByteStr(this), Lit::ByteStr(other)) => this == other,
+- (Lit::Byte(this), Lit::Byte(other)) => this == other,
+- (Lit::Char(this), Lit::Char(other)) => this == other,
+- (Lit::Int(this), Lit::Int(other)) => this == other,
+- (Lit::Float(this), Lit::Float(other)) => this == other,
+- (Lit::Bool(this), Lit::Bool(other)) => this == other,
+- (Lit::Verbatim(this), Lit::Verbatim(other)) => this.to_string() == other.to_string(),
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Lit {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Lit::Str(lit) => {
+- hash.write_u8(0);
+- lit.hash(hash);
+- }
+- Lit::ByteStr(lit) => {
+- hash.write_u8(1);
+- lit.hash(hash);
+- }
+- Lit::Byte(lit) => {
+- hash.write_u8(2);
+- lit.hash(hash);
+- }
+- Lit::Char(lit) => {
+- hash.write_u8(3);
+- lit.hash(hash);
+- }
+- Lit::Int(lit) => {
+- hash.write_u8(4);
+- lit.hash(hash);
+- }
+- Lit::Float(lit) => {
+- hash.write_u8(5);
+- lit.hash(hash);
+- }
+- Lit::Bool(lit) => {
+- hash.write_u8(6);
+- lit.hash(hash);
+- }
+- Lit::Verbatim(lit) => {
+- hash.write_u8(7);
+- lit.to_string().hash(hash);
+- }
+- }
+- }
+-}
+-
+ impl LitStr {
+ pub fn new(value: &str, span: Span) -> Self {
+- let mut lit = Literal::string(value);
+- lit.set_span(span);
++ let mut token = Literal::string(value);
++ token.set_span(span);
+ LitStr {
+- repr: Box::new(LitStrRepr {
+- token: lit,
++ repr: Box::new(LitRepr {
++ token,
+ suffix: Box::<str>::default(),
+ }),
+ }
+ }
+
+ pub fn value(&self) -> String {
+- let (value, _) = value::parse_lit_str(&self.repr.token.to_string());
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_str(&repr);
+ String::from(value)
+ }
+
+@@ -311,7 +222,7 @@ impl LitStr {
+ fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
+ match &mut token {
+ TokenTree::Group(g) => {
+- let stream = respan_token_stream(g.stream().clone(), span);
++ let stream = respan_token_stream(g.stream(), span);
+ *g = Group::new(g.delimiter(), stream);
+ g.set_span(span);
+ }
+@@ -345,19 +256,30 @@ impl LitByteStr {
+ pub fn new(value: &[u8], span: Span) -> Self {
+ let mut token = Literal::byte_string(value);
+ token.set_span(span);
+- LitByteStr { token }
++ LitByteStr {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> Vec<u8> {
+- value::parse_lit_byte_str(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_byte_str(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+@@ -365,19 +287,30 @@ impl LitByte {
+ pub fn new(value: u8, span: Span) -> Self {
+ let mut token = Literal::u8_suffixed(value);
+ token.set_span(span);
+- LitByte { token }
++ LitByte {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> u8 {
+- value::parse_lit_byte(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_byte(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+@@ -385,36 +318,52 @@ impl LitChar {
+ pub fn new(value: char, span: Span) -> Self {
+ let mut token = Literal::character(value);
+ token.set_span(span);
+- LitChar { token }
++ LitChar {
++ repr: Box::new(LitRepr {
++ token,
++ suffix: Box::<str>::default(),
++ }),
++ }
+ }
+
+ pub fn value(&self) -> char {
+- value::parse_lit_char(&self.token.to_string())
++ let repr = self.repr.token.to_string();
++ let (value, _suffix) = value::parse_lit_char(&repr);
++ value
+ }
+
+ pub fn span(&self) -> Span {
+- self.token.span()
++ self.repr.token.span()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+- self.token.set_span(span)
++ self.repr.token.set_span(span)
++ }
++
++ pub fn suffix(&self) -> &str {
++ &self.repr.suffix
+ }
+ }
+
+ impl LitInt {
+ pub fn new(repr: &str, span: Span) -> Self {
+- if let Some((digits, suffix)) = value::parse_lit_int(repr) {
+- let mut token = value::to_literal(repr);
+- token.set_span(span);
+- LitInt {
+- repr: Box::new(LitIntRepr {
+- token,
+- digits,
+- suffix,
+- }),
+- }
+- } else {
+- panic!("Not an integer literal: `{}`", repr);
++ let (digits, suffix) = match value::parse_lit_int(repr) {
++ Some(parse) => parse,
++ None => panic!("Not an integer literal: `{}`", repr),
++ };
++
++ let mut token = match value::to_literal(repr, &digits, &suffix) {
++ Some(token) => token,
++ None => panic!("Unsupported integer literal: `{}`", repr),
++ };
++
++ token.set_span(span);
++ LitInt {
++ repr: Box::new(LitIntRepr {
++ token,
++ digits,
++ suffix,
++ }),
+ }
+ }
+
+@@ -492,18 +441,23 @@ impl Display for LitInt {
+
+ impl LitFloat {
+ pub fn new(repr: &str, span: Span) -> Self {
+- if let Some((digits, suffix)) = value::parse_lit_float(repr) {
+- let mut token = value::to_literal(repr);
+- token.set_span(span);
+- LitFloat {
+- repr: Box::new(LitFloatRepr {
+- token,
+- digits,
+- suffix,
+- }),
+- }
+- } else {
+- panic!("Not a float literal: `{}`", repr);
++ let (digits, suffix) = match value::parse_lit_float(repr) {
++ Some(parse) => parse,
++ None => panic!("Not a float literal: `{}`", repr),
++ };
++
++ let mut token = match value::to_literal(repr, &digits, &suffix) {
++ Some(token) => token,
++ None => panic!("Unsupported float literal: `{}`", repr),
++ };
++
++ token.set_span(span);
++ LitFloat {
++ repr: Box::new(LitFloatRepr {
++ token,
++ digits,
++ suffix,
++ }),
+ }
+ }
+
+@@ -575,7 +529,7 @@ mod debug_impls {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitByteStr")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+@@ -584,7 +538,7 @@ mod debug_impls {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitByte")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+@@ -593,7 +547,7 @@ mod debug_impls {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter
+ .debug_struct("LitChar")
+- .field("token", &format_args!("{}", self.token))
++ .field("token", &format_args!("{}", self.repr.token))
+ .finish()
+ }
+ }
+@@ -626,15 +580,53 @@ mod debug_impls {
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl Clone for LitRepr {
++ fn clone(&self) -> Self {
++ LitRepr {
++ token: self.token.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for LitIntRepr {
++ fn clone(&self) -> Self {
++ LitIntRepr {
++ token: self.token.clone(),
++ digits: self.digits.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for LitFloatRepr {
++ fn clone(&self) -> Self {
++ LitFloatRepr {
++ token: self.token.clone(),
++ digits: self.digits.clone(),
++ suffix: self.suffix.clone(),
++ }
++ }
++}
++
+ macro_rules! lit_extra_traits {
+- ($ty:ident, $($field:ident).+) => {
+- #[cfg(feature = "extra-traits")]
+- impl Eq for $ty {}
++ ($ty:ident) => {
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $ty {
++ fn clone(&self) -> Self {
++ $ty {
++ repr: self.repr.clone(),
++ }
++ }
++ }
+
+ #[cfg(feature = "extra-traits")]
+ impl PartialEq for $ty {
+ fn eq(&self, other: &Self) -> bool {
+- self.$($field).+.to_string() == other.$($field).+.to_string()
++ self.repr.token.to_string() == other.repr.token.to_string()
+ }
+ }
+
+@@ -644,7 +636,7 @@ macro_rules! lit_extra_traits {
+ where
+ H: Hasher,
+ {
+- self.$($field).+.to_string().hash(state);
++ self.repr.token.to_string().hash(state);
+ }
+ }
+
+@@ -657,20 +649,23 @@ macro_rules! lit_extra_traits {
+ };
+ }
+
+-lit_extra_traits!(LitStr, repr.token);
+-lit_extra_traits!(LitByteStr, token);
+-lit_extra_traits!(LitByte, token);
+-lit_extra_traits!(LitChar, token);
+-lit_extra_traits!(LitInt, repr.token);
+-lit_extra_traits!(LitFloat, repr.token);
+-lit_extra_traits!(LitBool, value);
++lit_extra_traits!(LitStr);
++lit_extra_traits!(LitByteStr);
++lit_extra_traits!(LitByte);
++lit_extra_traits!(LitChar);
++lit_extra_traits!(LitInt);
++lit_extra_traits!(LitFloat);
++
++#[cfg(feature = "parsing")]
++#[doc(hidden)]
++#[allow(non_snake_case)]
++pub fn LitBool(marker: lookahead::TokenMarker) -> LitBool {
++ match marker {}
++}
+
+ ast_enum! {
+ /// The style of a string literal, either plain quoted or a raw string like
+ /// `r##"data"##`.
+- ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
+ pub enum StrStyle #no_visit {
+ /// An ordinary string like `"data"`.
+ Cooked,
+@@ -691,7 +686,9 @@ pub fn Lit(marker: lookahead::TokenMarker) -> Lit {
+ #[cfg(feature = "parsing")]
+ pub mod parsing {
+ use super::*;
++ use crate::buffer::Cursor;
+ use crate::parse::{Parse, ParseStream, Result};
++ use proc_macro2::Punct;
+
+ impl Parse for Lit {
+ fn parse(input: ParseStream) -> Result<Self> {
+@@ -699,25 +696,73 @@ pub mod parsing {
+ if let Some((lit, rest)) = cursor.literal() {
+ return Ok((Lit::new(lit), rest));
+ }
+- while let Some((ident, rest)) = cursor.ident() {
+- let value = if ident == "true" {
+- true
+- } else if ident == "false" {
+- false
+- } else {
+- break;
+- };
+- let lit_bool = LitBool {
+- value,
+- span: ident.span(),
+- };
+- return Ok((Lit::Bool(lit_bool), rest));
++
++ if let Some((ident, rest)) = cursor.ident() {
++ let value = ident == "true";
++ if value || ident == "false" {
++ let lit_bool = LitBool {
++ value,
++ span: ident.span(),
++ };
++ return Ok((Lit::Bool(lit_bool), rest));
++ }
+ }
++
++ if let Some((punct, rest)) = cursor.punct() {
++ if punct.as_char() == '-' {
++ if let Some((lit, rest)) = parse_negative_lit(punct, rest) {
++ return Ok((lit, rest));
++ }
++ }
++ }
++
+ Err(cursor.error("expected literal"))
+ })
+ }
+ }
+
++ fn parse_negative_lit(neg: Punct, cursor: Cursor) -> Option<(Lit, Cursor)> {
++ let (lit, rest) = cursor.literal()?;
++
++ let mut span = neg.span();
++ span = span.join(lit.span()).unwrap_or(span);
++
++ let mut repr = lit.to_string();
++ repr.insert(0, '-');
++
++ if !(repr.ends_with("f32") || repr.ends_with("f64")) {
++ if let Some((digits, suffix)) = value::parse_lit_int(&repr) {
++ if let Some(mut token) = value::to_literal(&repr, &digits, &suffix) {
++ token.set_span(span);
++ return Some((
++ Lit::Int(LitInt {
++ repr: Box::new(LitIntRepr {
++ token,
++ digits,
++ suffix,
++ }),
++ }),
++ rest,
++ ));
++ }
++ }
++ }
++
++ let (digits, suffix) = value::parse_lit_float(&repr)?;
++ let mut token = value::to_literal(&repr, &digits, &suffix)?;
++ token.set_span(span);
++ Some((
++ Lit::Float(LitFloat {
++ repr: Box::new(LitFloatRepr {
++ token,
++ digits,
++ suffix,
++ }),
++ }),
++ rest,
++ ))
++ }
++
+ impl Parse for LitStr {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let head = input.fork();
+@@ -803,19 +848,19 @@ mod printing {
+
+ impl ToTokens for LitByteStr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for LitByte {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for LitChar {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+- self.token.to_tokens(tokens);
++ self.repr.token.to_tokens(tokens);
+ }
+ }
+
+@@ -855,20 +900,29 @@ mod value {
+ b'"' | b'r' => {
+ let (_, suffix) = parse_lit_str(&repr);
+ return Lit::Str(LitStr {
+- repr: Box::new(LitStrRepr { token, suffix }),
++ repr: Box::new(LitRepr { token, suffix }),
+ });
+ }
+ b'b' => match byte(&repr, 1) {
+ b'"' | b'r' => {
+- return Lit::ByteStr(LitByteStr { token });
++ let (_, suffix) = parse_lit_byte_str(&repr);
++ return Lit::ByteStr(LitByteStr {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ b'\'' => {
+- return Lit::Byte(LitByte { token });
++ let (_, suffix) = parse_lit_byte(&repr);
++ return Lit::Byte(LitByte {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ _ => {}
+ },
+ b'\'' => {
+- return Lit::Char(LitChar { token });
++ let (_, suffix) = parse_lit_char(&repr);
++ return Lit::Char(LitChar {
++ repr: Box::new(LitRepr { token, suffix }),
++ });
+ }
+ b'0'..=b'9' | b'-' => {
+ if !(repr.ends_with("f32") || repr.ends_with("f64")) {
+@@ -905,6 +959,44 @@ mod value {
+
+ panic!("Unrecognized literal: `{}`", repr);
+ }
++
++ pub fn suffix(&self) -> &str {
++ match self {
++ Lit::Str(lit) => lit.suffix(),
++ Lit::ByteStr(lit) => lit.suffix(),
++ Lit::Byte(lit) => lit.suffix(),
++ Lit::Char(lit) => lit.suffix(),
++ Lit::Int(lit) => lit.suffix(),
++ Lit::Float(lit) => lit.suffix(),
++ Lit::Bool(_) | Lit::Verbatim(_) => "",
++ }
++ }
++
++ pub fn span(&self) -> Span {
++ match self {
++ Lit::Str(lit) => lit.span(),
++ Lit::ByteStr(lit) => lit.span(),
++ Lit::Byte(lit) => lit.span(),
++ Lit::Char(lit) => lit.span(),
++ Lit::Int(lit) => lit.span(),
++ Lit::Float(lit) => lit.span(),
++ Lit::Bool(lit) => lit.span,
++ Lit::Verbatim(lit) => lit.span(),
++ }
++ }
++
++ pub fn set_span(&mut self, span: Span) {
++ match self {
++ Lit::Str(lit) => lit.set_span(span),
++ Lit::ByteStr(lit) => lit.set_span(span),
++ Lit::Byte(lit) => lit.set_span(span),
++ Lit::Char(lit) => lit.set_span(span),
++ Lit::Int(lit) => lit.set_span(span),
++ Lit::Float(lit) => lit.set_span(span),
++ Lit::Bool(lit) => lit.span = span,
++ Lit::Verbatim(lit) => lit.set_span(span),
++ }
++ }
+ }
+
+ /// Get the byte at offset idx, or a default of `b'\0'` if we're looking
+@@ -1004,19 +1096,18 @@ mod value {
+ pounds += 1;
+ }
+ assert_eq!(byte(s, pounds), b'"');
+- assert_eq!(byte(s, s.len() - pounds - 1), b'"');
+- for end in s[s.len() - pounds..].bytes() {
++ let close = s.rfind('"').unwrap();
++ for end in s[close + 1..close + 1 + pounds].bytes() {
+ assert_eq!(end, b'#');
+ }
+
+- let content = s[pounds + 1..s.len() - pounds - 1]
+- .to_owned()
+- .into_boxed_str();
+- let suffix = Box::<str>::default(); // todo
++ let content = s[pounds + 1..close].to_owned().into_boxed_str();
++ let suffix = s[close + 1 + pounds..].to_owned().into_boxed_str();
+ (content, suffix)
+ }
+
+- pub fn parse_lit_byte_str(s: &str) -> Vec<u8> {
++ // Returns (content, suffix).
++ pub fn parse_lit_byte_str(s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ match byte(s, 1) {
+ b'"' => parse_lit_byte_str_cooked(s),
+@@ -1028,25 +1119,25 @@ mod value {
+ // Clippy false positive
+ // https://github.com/rust-lang-nursery/rust-clippy/issues/2329
+ #[allow(clippy::needless_continue)]
+- fn parse_lit_byte_str_cooked(mut s: &str) -> Vec<u8> {
++ fn parse_lit_byte_str_cooked(mut s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ assert_eq!(byte(s, 1), b'"');
+ s = &s[2..];
+
+ // We're going to want to have slices which don't respect codepoint boundaries.
+- let mut s = s.as_bytes();
++ let mut v = s.as_bytes();
+
+ let mut out = Vec::new();
+ 'outer: loop {
+- let byte = match byte(s, 0) {
++ let byte = match byte(v, 0) {
+ b'"' => break,
+ b'\\' => {
+- let b = byte(s, 1);
+- s = &s[2..];
++ let b = byte(v, 1);
++ v = &v[2..];
+ match b {
+ b'x' => {
+- let (b, rest) = backslash_x(s);
+- s = rest;
++ let (b, rest) = backslash_x(v);
++ v = rest;
+ b
+ }
+ b'n' => b'\n',
+@@ -1057,10 +1148,10 @@ mod value {
+ b'\'' => b'\'',
+ b'"' => b'"',
+ b'\r' | b'\n' => loop {
+- let byte = byte(s, 0);
++ let byte = byte(v, 0);
+ let ch = char::from_u32(u32::from(byte)).unwrap();
+ if ch.is_whitespace() {
+- s = &s[1..];
++ v = &v[1..];
+ } else {
+ continue 'outer;
+ }
+@@ -1069,42 +1160,45 @@ mod value {
+ }
+ }
+ b'\r' => {
+- assert_eq!(byte(s, 1), b'\n', "Bare CR not allowed in string");
+- s = &s[2..];
++ assert_eq!(byte(v, 1), b'\n', "Bare CR not allowed in string");
++ v = &v[2..];
+ b'\n'
+ }
+ b => {
+- s = &s[1..];
++ v = &v[1..];
+ b
+ }
+ };
+ out.push(byte);
+ }
+
+- assert_eq!(s, b"\"");
+- out
++ assert_eq!(byte(v, 0), b'"');
++ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
++ (out, suffix)
+ }
+
+- fn parse_lit_byte_str_raw(s: &str) -> Vec<u8> {
++ fn parse_lit_byte_str_raw(s: &str) -> (Vec<u8>, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+- String::from(parse_lit_str_raw(&s[1..]).0).into_bytes()
++ let (value, suffix) = parse_lit_str_raw(&s[1..]);
++ (String::from(value).into_bytes(), suffix)
+ }
+
+- pub fn parse_lit_byte(s: &str) -> u8 {
++ // Returns (value, suffix).
++ pub fn parse_lit_byte(s: &str) -> (u8, Box<str>) {
+ assert_eq!(byte(s, 0), b'b');
+ assert_eq!(byte(s, 1), b'\'');
+
+ // We're going to want to have slices which don't respect codepoint boundaries.
+- let mut s = s[2..].as_bytes();
++ let mut v = s[2..].as_bytes();
+
+- let b = match byte(s, 0) {
++ let b = match byte(v, 0) {
+ b'\\' => {
+- let b = byte(s, 1);
+- s = &s[2..];
++ let b = byte(v, 1);
++ v = &v[2..];
+ match b {
+ b'x' => {
+- let (b, rest) = backslash_x(s);
+- s = rest;
++ let (b, rest) = backslash_x(v);
++ v = rest;
+ b
+ }
+ b'n' => b'\n',
+@@ -1118,16 +1212,18 @@ mod value {
+ }
+ }
+ b => {
+- s = &s[1..];
++ v = &v[1..];
+ b
+ }
+ };
+
+- assert_eq!(byte(s, 0), b'\'');
+- b
++ assert_eq!(byte(v, 0), b'\'');
++ let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
++ (b, suffix)
+ }
+
+- pub fn parse_lit_char(mut s: &str) -> char {
++ // Returns (value, suffix).
++ pub fn parse_lit_char(mut s: &str) -> (char, Box<str>) {
+ assert_eq!(byte(s, 0), b'\'');
+ s = &s[1..];
+
+@@ -1163,8 +1259,9 @@ mod value {
+ ch
+ }
+ };
+- assert_eq!(s, "\'", "Expected end of char literal");
+- ch
++ assert_eq!(byte(s, 0), b'\'');
++ let suffix = s[1..].to_owned().into_boxed_str();
++ (ch, suffix)
+ }
+
+ fn backslash_x<S>(s: &S) -> (u8, &S)
+@@ -1334,7 +1431,11 @@ mod value {
+ }
+ b'e' | b'E' => {
+ if has_e {
+- return None;
++ if has_exponent {
++ break;
++ } else {
++ return None;
++ }
+ }
+ has_e = true;
+ bytes[write] = b'e';
+@@ -1372,11 +1473,33 @@ mod value {
+ }
+ }
+
+- pub fn to_literal(s: &str) -> Literal {
+- let stream = s.parse::<TokenStream>().unwrap();
+- match stream.into_iter().next().unwrap() {
+- TokenTree::Literal(l) => l,
+- _ => unreachable!(),
++ pub fn to_literal(repr: &str, digits: &str, suffix: &str) -> Option<Literal> {
++ if repr.starts_with('-') {
++ if suffix == "f64" {
++ digits.parse().ok().map(Literal::f64_suffixed)
++ } else if suffix == "f32" {
++ digits.parse().ok().map(Literal::f32_suffixed)
++ } else if suffix == "i64" {
++ digits.parse().ok().map(Literal::i64_suffixed)
++ } else if suffix == "i32" {
++ digits.parse().ok().map(Literal::i32_suffixed)
++ } else if suffix == "i16" {
++ digits.parse().ok().map(Literal::i16_suffixed)
++ } else if suffix == "i8" {
++ digits.parse().ok().map(Literal::i8_suffixed)
++ } else if !suffix.is_empty() {
++ None
++ } else if digits.contains('.') {
++ digits.parse().ok().map(Literal::f64_unsuffixed)
++ } else {
++ digits.parse().ok().map(Literal::i64_unsuffixed)
++ }
++ } else {
++ let stream = repr.parse::<TokenStream>().unwrap();
++ match stream.into_iter().next().unwrap() {
++ TokenTree::Literal(l) => Some(l),
++ _ => unreachable!(),
++ }
+ }
+ }
+ }
+diff --git a/third_party/rust/syn/src/mac.rs b/third_party/rust/syn/src/mac.rs
+index 6c3dcae92a..de288a34e1 100644
+--- third_party/rust/syn/src/mac.rs
++++ third_party/rust/syn/src/mac.rs
+@@ -2,21 +2,17 @@ use super::*;
+ use crate::token::{Brace, Bracket, Paren};
+ use proc_macro2::TokenStream;
+ #[cfg(feature = "parsing")]
+-use proc_macro2::{Delimiter, Span, TokenTree};
++use proc_macro2::{Delimiter, Group, Span, TokenTree};
+
+ #[cfg(feature = "parsing")]
+ use crate::parse::{Parse, ParseStream, Parser, Result};
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// A macro invocation: `println!("{}", mac)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- pub struct Macro #manual_extra_traits {
++ pub struct Macro {
+ pub path: Path,
+ pub bang_token: Token![!],
+ pub delimiter: MacroDelimiter,
+@@ -27,7 +23,7 @@ ast_struct! {
+ ast_enum! {
+ /// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}` or `m![...]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum MacroDelimiter {
+ Paren(Paren),
+@@ -36,39 +32,20 @@ ast_enum! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Macro {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Macro {
+- fn eq(&self, other: &Self) -> bool {
+- self.path == other.path
+- && self.bang_token == other.bang_token
+- && self.delimiter == other.delimiter
+- && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Macro {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.path.hash(state);
+- self.bang_token.hash(state);
+- self.delimiter.hash(state);
+- TokenStreamHelper(&self.tokens).hash(state);
+- }
+-}
+-
+ #[cfg(feature = "parsing")]
+-fn delimiter_span(delimiter: &MacroDelimiter) -> Span {
+- match delimiter {
++fn delimiter_span_close(macro_delimiter: &MacroDelimiter) -> Span {
++ let delimiter = match macro_delimiter {
++ MacroDelimiter::Paren(_) => Delimiter::Parenthesis,
++ MacroDelimiter::Brace(_) => Delimiter::Brace,
++ MacroDelimiter::Bracket(_) => Delimiter::Bracket,
++ };
++ let mut group = Group::new(delimiter, TokenStream::new());
++ group.set_span(match macro_delimiter {
+ MacroDelimiter::Paren(token) => token.span,
+ MacroDelimiter::Brace(token) => token.span,
+ MacroDelimiter::Bracket(token) => token.span,
+- }
++ });
++ group.span_close()
+ }
+
+ impl Macro {
+@@ -163,9 +140,7 @@ impl Macro {
+ /// given parser.
+ #[cfg(feature = "parsing")]
+ pub fn parse_body_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+- // TODO: see if we can get a group.span_close() span in here as the
+- // scope, rather than the span of the whole group.
+- let scope = delimiter_span(&self.delimiter);
++ let scope = delimiter_span_close(&self.delimiter);
+ crate::parse::parse_scoped(parser, scope, self.tokens.clone())
+ }
+ }
+diff --git a/third_party/rust/syn/src/macros.rs b/third_party/rust/syn/src/macros.rs
+index 9cac5c15df..8060224381 100644
+--- third_party/rust/syn/src/macros.rs
++++ third_party/rust/syn/src/macros.rs
+@@ -4,15 +4,11 @@ macro_rules! ast_struct {
+ struct $name:ident #full $($rest:tt)*
+ ) => {
+ #[cfg(feature = "full")]
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name $($rest)*
+
+ #[cfg(not(feature = "full"))]
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name {
+- _noconstruct: (),
++ _noconstruct: ::std::marker::PhantomData<::proc_macro2::Span>,
+ }
+
+ #[cfg(all(not(feature = "full"), feature = "printing"))]
+@@ -23,29 +19,10 @@ macro_rules! ast_struct {
+ }
+ };
+
+- (
+- [$($attrs_pub:tt)*]
+- struct $name:ident #manual_extra_traits $($rest:tt)*
+- ) => {
+- #[cfg_attr(feature = "extra-traits", derive(Debug))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* struct $name $($rest)*
+- };
+-
+- (
+- [$($attrs_pub:tt)*]
+- struct $name:ident #manual_extra_traits_debug $($rest:tt)*
+- ) => {
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* struct $name $($rest)*
+- };
+-
+ (
+ [$($attrs_pub:tt)*]
+ struct $name:ident $($rest:tt)*
+ ) => {
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* struct $name $($rest)*
+ };
+
+@@ -63,21 +40,10 @@ macro_rules! ast_enum {
+ ast_enum!([$($attrs_pub)*] enum $name $($rest)*);
+ );
+
+- (
+- [$($attrs_pub:tt)*]
+- enum $name:ident #manual_extra_traits $($rest:tt)*
+- ) => (
+- #[cfg_attr(feature = "extra-traits", derive(Debug))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+- $($attrs_pub)* enum $name $($rest)*
+- );
+-
+ (
+ [$($attrs_pub:tt)*]
+ enum $name:ident $($rest:tt)*
+ ) => (
+- #[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
+- #[cfg_attr(feature = "clone-impls", derive(Clone))]
+ $($attrs_pub)* enum $name $($rest)*
+ );
+
+@@ -120,15 +86,9 @@ macro_rules! ast_enum_of_structs_impl {
+ check_keyword_matches!(pub $pub);
+ check_keyword_matches!(enum $enum);
+
+- $(
+- $(
+- impl From<$member> for $name {
+- fn from(e: $member) -> $name {
+- $name::$variant(e)
+- }
+- }
+- )*
+- )*
++ $($(
++ ast_enum_from_struct!($name::$variant, $member);
++ )*)*
+
+ #[cfg(feature = "printing")]
+ generate_to_tokens! {
+@@ -140,6 +100,19 @@ macro_rules! ast_enum_of_structs_impl {
+ };
+ }
+
++macro_rules! ast_enum_from_struct {
++ // No From<TokenStream> for verbatim variants.
++ ($name:ident::Verbatim, $member:ident) => {};
++
++ ($name:ident::$variant:ident, $member:ident) => {
++ impl From<$member> for $name {
++ fn from(e: $member) -> $name {
++ $name::$variant(e)
++ }
++ }
++ };
++}
++
+ #[cfg(feature = "printing")]
+ macro_rules! generate_to_tokens {
+ (do_not_generate_to_tokens $($foo:tt)*) => ();
+diff --git a/third_party/rust/syn/src/op.rs b/third_party/rust/syn/src/op.rs
+index 49fb853c79..d254673b40 100644
+--- third_party/rust/syn/src/op.rs
++++ third_party/rust/syn/src/op.rs
+@@ -1,9 +1,8 @@
+ ast_enum! {
+ /// A binary operator: `+`, `+=`, `&`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum BinOp {
+ /// The `+` operator (addition)
+ Add(Token![+]),
+@@ -67,9 +66,8 @@ ast_enum! {
+ ast_enum! {
+ /// A unary operator: `*`, `!`, `-`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum UnOp {
+ /// The `*` operator for dereferencing
+ Deref(Token![*]),
+diff --git a/third_party/rust/syn/src/parse.rs b/third_party/rust/syn/src/parse.rs
+index 7c7b194308..abb4c4c14f 100644
+--- third_party/rust/syn/src/parse.rs
++++ third_party/rust/syn/src/parse.rs
+@@ -26,8 +26,8 @@
+ //! [`parse_macro_input!`]: ../macro.parse_macro_input.html
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use syn::{braced, parse_macro_input, token, Field, Ident, Result, Token};
+ //! use syn::parse::{Parse, ParseStream};
+@@ -109,9 +109,7 @@
+ //! # Ok(())
+ //! # }
+ //! #
+-//! # fn main() {
+-//! # run_parser().unwrap();
+-//! # }
++//! # run_parser().unwrap();
+ //! ```
+ //!
+ //! The [`parse_quote!`] macro also uses this approach.
+@@ -155,8 +153,8 @@
+ //! [`Parser`]: trait.Parser.html
+ //!
+ //! ```
+-//! extern crate proc_macro;
+-//!
++//! # extern crate proc_macro;
++//! #
+ //! use proc_macro::TokenStream;
+ //! use syn::parse::Parser;
+ //! use syn::punctuated::Punctuated;
+@@ -186,7 +184,7 @@
+ //!
+ //! ---
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ #[path = "discouraged.rs"]
+ pub mod discouraged;
+@@ -217,6 +215,11 @@ pub use crate::lookahead::{Lookahead1, Peek};
+
+ /// Parsing interface implemented by all types that can be parsed in a default
+ /// way from a token stream.
++///
++/// Refer to the [module documentation] for details about implementing and using
++/// the `Parse` trait.
++///
++/// [module documentation]: self
+ pub trait Parse: Sized {
+ fn parse(input: ParseStream) -> Result<Self>;
+ }
+@@ -263,13 +266,16 @@ pub struct ParseBuffer<'a> {
+ // the cell.
+ cell: Cell<Cursor<'static>>,
+ marker: PhantomData<Cursor<'a>>,
+- unexpected: Rc<Cell<Option<Span>>>,
++ unexpected: Cell<Option<Rc<Cell<Unexpected>>>>,
+ }
+
+ impl<'a> Drop for ParseBuffer<'a> {
+ fn drop(&mut self) {
+- if !self.is_empty() && self.unexpected.get().is_none() {
+- self.unexpected.set(Some(self.cursor().span()));
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(self.cursor()) {
++ let (inner, old_span) = inner_unexpected(self);
++ if old_span.is_none() {
++ inner.set(Unexpected::Some(unexpected_span));
++ }
+ }
+ }
+ }
+@@ -324,15 +330,12 @@ impl<'a> Debug for ParseBuffer<'a> {
+ /// # input.parse()
+ /// # }
+ /// #
+-/// # fn main() {
+-/// # use syn::parse::Parser;
+-/// # let remainder = remainder_after_skipping_past_next_at
+-/// # .parse_str("a @ b c")
+-/// # .unwrap();
+-/// # assert_eq!(remainder.to_string(), "b c");
+-/// # }
++/// # use syn::parse::Parser;
++/// # let remainder = remainder_after_skipping_past_next_at
++/// # .parse_str("a @ b c")
++/// # .unwrap();
++/// # assert_eq!(remainder.to_string(), "b c");
+ /// ```
+-#[derive(Copy, Clone)]
+ pub struct StepCursor<'c, 'a> {
+ scope: Span,
+ // This field is covariant in 'c.
+@@ -356,6 +359,14 @@ impl<'c, 'a> Deref for StepCursor<'c, 'a> {
+ }
+ }
+
++impl<'c, 'a> Copy for StepCursor<'c, 'a> {}
++
++impl<'c, 'a> Clone for StepCursor<'c, 'a> {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++
+ impl<'c, 'a> StepCursor<'c, 'a> {
+ /// Triggers an error at the current position of the parse stream.
+ ///
+@@ -375,36 +386,81 @@ pub(crate) fn advance_step_cursor<'c, 'a>(proof: StepCursor<'c, 'a>, to: Cursor<
+ unsafe { mem::transmute::<Cursor<'c>, Cursor<'a>>(to) }
+ }
+
+-fn skip(input: ParseStream) -> bool {
+- input
+- .step(|cursor| {
+- if let Some((_lifetime, rest)) = cursor.lifetime() {
+- Ok((true, rest))
+- } else if let Some((_token, rest)) = cursor.token_tree() {
+- Ok((true, rest))
+- } else {
+- Ok((false, *cursor))
+- }
+- })
+- .unwrap()
+-}
+-
+ pub(crate) fn new_parse_buffer(
+ scope: Span,
+ cursor: Cursor,
+- unexpected: Rc<Cell<Option<Span>>>,
++ unexpected: Rc<Cell<Unexpected>>,
+ ) -> ParseBuffer {
+ ParseBuffer {
+ scope,
+ // See comment on `cell` in the struct definition.
+ cell: Cell::new(unsafe { mem::transmute::<Cursor, Cursor<'static>>(cursor) }),
+ marker: PhantomData,
+- unexpected,
++ unexpected: Cell::new(Some(unexpected)),
++ }
++}
++
++pub(crate) enum Unexpected {
++ None,
++ Some(Span),
++ Chain(Rc<Cell<Unexpected>>),
++}
++
++impl Default for Unexpected {
++ fn default() -> Self {
++ Unexpected::None
+ }
+ }
+
+-pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Option<Span>>> {
+- buffer.unexpected.clone()
++impl Clone for Unexpected {
++ fn clone(&self) -> Self {
++ match self {
++ Unexpected::None => Unexpected::None,
++ Unexpected::Some(span) => Unexpected::Some(*span),
++ Unexpected::Chain(next) => Unexpected::Chain(next.clone()),
++ }
++ }
++}
++
++// We call this on Cell<Unexpected> and Cell<Option<T>> where temporarily
++// swapping in a None is cheap.
++fn cell_clone<T: Default + Clone>(cell: &Cell<T>) -> T {
++ let prev = cell.take();
++ let ret = prev.clone();
++ cell.set(prev);
++ ret
++}
++
++fn inner_unexpected(buffer: &ParseBuffer) -> (Rc<Cell<Unexpected>>, Option<Span>) {
++ let mut unexpected = get_unexpected(buffer);
++ loop {
++ match cell_clone(&unexpected) {
++ Unexpected::None => return (unexpected, None),
++ Unexpected::Some(span) => return (unexpected, Some(span)),
++ Unexpected::Chain(next) => unexpected = next,
++ }
++ }
++}
++
++pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Unexpected>> {
++ cell_clone(&buffer.unexpected).unwrap()
++}
++
++fn span_of_unexpected_ignoring_nones(mut cursor: Cursor) -> Option<Span> {
++ if cursor.eof() {
++ return None;
++ }
++ while let Some((inner, _span, rest)) = cursor.group(Delimiter::None) {
++ if let Some(unexpected) = span_of_unexpected_ignoring_nones(inner) {
++ return Some(unexpected);
++ }
++ cursor = rest;
++ }
++ if cursor.eof() {
++ None
++ } else {
++ Some(cursor.span())
++ }
+ }
+
+ impl<'a> ParseBuffer<'a> {
+@@ -566,14 +622,17 @@ impl<'a> ParseBuffer<'a> {
+ /// }
+ /// ```
+ pub fn peek2<T: Peek>(&self, token: T) -> bool {
+- let ahead = self.fork();
+- skip(&ahead) && ahead.peek(token)
++ let _ = token;
++ self.cursor().skip().map_or(false, T::Token::peek)
+ }
+
+ /// Looks at the third-next token in the parse stream.
+ pub fn peek3<T: Peek>(&self, token: T) -> bool {
+- let ahead = self.fork();
+- skip(&ahead) && skip(&ahead) && ahead.peek(token)
++ let _ = token;
++ self.cursor()
++ .skip()
++ .and_then(Cursor::skip)
++ .map_or(false, T::Token::peek)
+ }
+
+ /// Parses zero or more occurrences of `T` separated by punctuation of type
+@@ -615,12 +674,10 @@ impl<'a> ParseBuffer<'a> {
+ /// }
+ /// }
+ /// #
+- /// # fn main() {
+- /// # let input = quote! {
+- /// # struct S(A, B);
+- /// # };
+- /// # syn::parse2::<TupleStruct>(input).unwrap();
+- /// # }
++ /// # let input = quote! {
++ /// # struct S(A, B);
++ /// # };
++ /// # syn::parse2::<TupleStruct>(input).unwrap();
+ /// ```
+ pub fn parse_terminated<T, P: Parse>(
+ &self,
+@@ -847,8 +904,8 @@ impl<'a> ParseBuffer<'a> {
+ cell: self.cell.clone(),
+ marker: PhantomData,
+ // Not the parent's unexpected. Nothing cares whether the clone
+- // parses all the way.
+- unexpected: Rc::new(Cell::new(None)),
++ // parses all the way unless we `advance_to`.
++ unexpected: Cell::new(Some(Rc::new(Cell::new(Unexpected::None)))),
+ }
+ }
+
+@@ -923,13 +980,11 @@ impl<'a> ParseBuffer<'a> {
+ /// # input.parse()
+ /// # }
+ /// #
+- /// # fn main() {
+- /// # use syn::parse::Parser;
+- /// # let remainder = remainder_after_skipping_past_next_at
+- /// # .parse_str("a @ b c")
+- /// # .unwrap();
+- /// # assert_eq!(remainder.to_string(), "b c");
+- /// # }
++ /// # use syn::parse::Parser;
++ /// # let remainder = remainder_after_skipping_past_next_at
++ /// # .parse_str("a @ b c")
++ /// # .unwrap();
++ /// # assert_eq!(remainder.to_string(), "b c");
+ /// ```
+ pub fn step<F, R>(&self, function: F) -> Result<R>
+ where
+@@ -961,6 +1016,18 @@ impl<'a> ParseBuffer<'a> {
+ Ok(node)
+ }
+
++ /// Returns the `Span` of the next token in the parse stream, or
++ /// `Span::call_site()` if this parse stream has completely exhausted its
++ /// input `TokenStream`.
++ pub fn span(&self) -> Span {
++ let cursor = self.cursor();
++ if cursor.eof() {
++ self.scope
++ } else {
++ crate::buffer::open_span_of_group(cursor)
++ }
++ }
++
+ /// Provides low-level access to the token representation underlying this
+ /// parse stream.
+ ///
+@@ -971,7 +1038,7 @@ impl<'a> ParseBuffer<'a> {
+ }
+
+ fn check_unexpected(&self) -> Result<()> {
+- match self.unexpected.get() {
++ match inner_unexpected(self).1 {
+ Some(span) => Err(Error::new(span, "unexpected token")),
+ None => Ok(()),
+ }
+@@ -1048,7 +1115,7 @@ impl Parse for Literal {
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with the `"parsing"` feature.*
++/// *This trait is available only if Syn is built with the `"parsing"` feature.*
+ pub trait Parser: Sized {
+ type Output;
+
+@@ -1063,7 +1130,7 @@ pub trait Parser: Sized {
+ /// This function will check that the input is fully parsed. If there are
+ /// any unparsed tokens at the end of the stream, an error is returned.
+ ///
+- /// *This method is available if Syn is built with both the `"parsing"` and
++ /// *This method is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ #[cfg(all(
+ not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
+@@ -1088,6 +1155,7 @@ pub trait Parser: Sized {
+
+ // Not public API.
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
+ let _ = scope;
+ self.parse2(tokens)
+@@ -1095,6 +1163,7 @@ pub trait Parser: Sized {
+
+ // Not public API.
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
+ input.parse().and_then(|tokens| self.parse2(tokens))
+ }
+@@ -1103,7 +1172,7 @@ pub trait Parser: Sized {
+ fn tokens_to_parse_buffer(tokens: &TokenBuffer) -> ParseBuffer {
+ let scope = Span::call_site();
+ let cursor = tokens.begin();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ new_parse_buffer(scope, cursor, unexpected)
+ }
+
+@@ -1118,38 +1187,42 @@ where
+ let state = tokens_to_parse_buffer(&buf);
+ let node = self(&state)?;
+ state.check_unexpected()?;
+- if state.is_empty() {
+- Ok(node)
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) {
++ Err(Error::new(unexpected_span, "unexpected token"))
+ } else {
+- Err(state.error("unexpected token"))
++ Ok(node)
+ }
+ }
+
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
+ let buf = TokenBuffer::new2(tokens);
+ let cursor = buf.begin();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ let state = new_parse_buffer(scope, cursor, unexpected);
+ let node = self(&state)?;
+ state.check_unexpected()?;
+- if state.is_empty() {
+- Ok(node)
++ if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) {
++ Err(Error::new(unexpected_span, "unexpected token"))
+ } else {
+- Err(state.error("unexpected token"))
++ Ok(node)
+ }
+ }
+
+ #[doc(hidden)]
++ #[cfg(any(feature = "full", feature = "derive"))]
+ fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
+ self(input)
+ }
+ }
+
++#[cfg(any(feature = "full", feature = "derive"))]
+ pub(crate) fn parse_scoped<F: Parser>(f: F, scope: Span, tokens: TokenStream) -> Result<F::Output> {
+ f.__parse_scoped(scope, tokens)
+ }
+
++#[cfg(any(feature = "full", feature = "derive"))]
+ pub(crate) fn parse_stream<F: Parser>(f: F, input: ParseStream) -> Result<F::Output> {
+ f.__parse_stream(input)
+ }
+@@ -1160,8 +1233,8 @@ pub(crate) fn parse_stream<F: Parser>(f: F, input: ParseStream) -> Result<F::Out
+ /// provided any attribute args.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::parse_macro_input;
+ /// use syn::parse::Nothing;
+diff --git a/third_party/rust/syn/src/parse_macro_input.rs b/third_party/rust/syn/src/parse_macro_input.rs
+index d6e0725c17..c8fc1cea37 100644
+--- third_party/rust/syn/src/parse_macro_input.rs
++++ third_party/rust/syn/src/parse_macro_input.rs
+@@ -16,8 +16,8 @@
+ /// #\[proc_macro_attribute\] attribute.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, Result};
+ /// use syn::parse::{Parse, ParseStream};
+@@ -43,7 +43,31 @@
+ /// # "".parse().unwrap()
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++///
++/// <br>
++///
++/// # Expansion
++///
++/// `parse_macro_input!($variable as $Type)` expands to something like:
++///
++/// ```no_run
++/// # extern crate proc_macro;
++/// #
++/// # macro_rules! doc_test {
++/// # ($variable:ident as $Type:ty) => {
++/// match syn::parse::<$Type>($variable) {
++/// Ok(syntax_tree) => syntax_tree,
++/// Err(err) => return proc_macro::TokenStream::from(err.to_compile_error()),
++/// }
++/// # };
++/// # }
++/// #
++/// # fn test(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
++/// # let _ = doc_test!(input as syn::Ident);
++/// # proc_macro::TokenStream::new()
++/// # }
++/// ```
++#[macro_export]
+ macro_rules! parse_macro_input {
+ ($tokenstream:ident as $ty:ty) => {
+ match $crate::parse_macro_input::parse::<$ty>($tokenstream) {
+@@ -54,7 +78,7 @@ macro_rules! parse_macro_input {
+ }
+ };
+ ($tokenstream:ident) => {
+- parse_macro_input!($tokenstream as _)
++ $crate::parse_macro_input!($tokenstream as _)
+ };
+ }
+
+diff --git a/third_party/rust/syn/src/parse_quote.rs b/third_party/rust/syn/src/parse_quote.rs
+index 18a47b95c7..66aa818cd0 100644
+--- third_party/rust/syn/src/parse_quote.rs
++++ third_party/rust/syn/src/parse_quote.rs
+@@ -24,7 +24,7 @@
+ /// }
+ /// ```
+ ///
+-/// *This macro is available if Syn is built with the `"parsing"` feature,
++/// *This macro is available only if Syn is built with the `"parsing"` feature,
+ /// although interpolation of syntax tree nodes into the quoted tokens is only
+ /// supported if Syn is built with the `"printing"` feature as well.*
+ ///
+@@ -56,8 +56,10 @@
+ /// or inner like `#![...]`
+ /// - [`Punctuated<T, P>`] — parses zero or more `T` separated by punctuation
+ /// `P` with optional trailing punctuation
++/// - [`Vec<Stmt>`] — parses the same as `Block::parse_within`
+ ///
+ /// [`Punctuated<T, P>`]: punctuated::Punctuated
++/// [`Vec<Stmt>`]: Block::parse_within
+ ///
+ /// # Panics
+ ///
+@@ -67,7 +69,7 @@
+ //
+ // TODO: allow Punctuated to be inferred as intra doc link, currently blocked on
+ // https://github.com/rust-lang/rust/issues/62834
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! parse_quote {
+ ($($tt:tt)*) => {
+ $crate::parse_quote::parse(
+@@ -112,6 +114,8 @@ impl<T: Parse> ParseQuote for T {
+ use crate::punctuated::Punctuated;
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use crate::{attr, Attribute};
++#[cfg(feature = "full")]
++use crate::{Block, Stmt};
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+ impl ParseQuote for Attribute {
+@@ -129,3 +133,10 @@ impl<T: Parse, P: Parse> ParseQuote for Punctuated<T, P> {
+ Self::parse_terminated(input)
+ }
+ }
++
++#[cfg(feature = "full")]
++impl ParseQuote for Vec<Stmt> {
++ fn parse(input: ParseStream) -> Result<Self> {
++ Block::parse_within(input)
++ }
++}
+diff --git a/third_party/rust/syn/src/pat.rs b/third_party/rust/syn/src/pat.rs
+index 9371e05493..e9576a2361 100644
+--- third_party/rust/syn/src/pat.rs
++++ third_party/rust/syn/src/pat.rs
+@@ -1,16 +1,12 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+ use proc_macro2::TokenStream;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_enum_of_structs! {
+ /// A pattern in a local binding, function signature, match expression, or
+ /// various other places.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+@@ -20,7 +16,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Pat #manual_extra_traits {
++ pub enum Pat {
+ /// A box pattern: `box v`.
+ Box(PatBox),
+
+@@ -86,7 +82,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A box pattern: `box v`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatBox {
+ pub attrs: Vec<Attribute>,
+ pub box_token: Token![box],
+@@ -97,7 +93,10 @@ ast_struct! {
+ ast_struct! {
+ /// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// It may also be a unit struct or struct variant (e.g. `None`), or a
++ /// constant; these cannot be distinguished syntactically.
++ ///
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatIdent {
+ pub attrs: Vec<Attribute>,
+ pub by_ref: Option<Token![ref]>,
+@@ -113,7 +112,7 @@ ast_struct! {
+ /// This holds an `Expr` rather than a `Lit` because negative numbers
+ /// are represented as an `Expr::Unary`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatLit {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+@@ -123,7 +122,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro in pattern position.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatMacro {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+@@ -133,7 +132,7 @@ ast_struct! {
+ ast_struct! {
+ /// A pattern that matches any one of a set of cases.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatOr {
+ pub attrs: Vec<Attribute>,
+ pub leading_vert: Option<Token![|]>,
+@@ -150,7 +149,7 @@ ast_struct! {
+ /// `<A>::B::C` and `<A as Trait>::B::C` can only legally refer to
+ /// associated constants.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatPath {
+ pub attrs: Vec<Attribute>,
+ pub qself: Option<QSelf>,
+@@ -161,7 +160,7 @@ ast_struct! {
+ ast_struct! {
+ /// A range pattern: `1..=2`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatRange {
+ pub attrs: Vec<Attribute>,
+ pub lo: Box<Expr>,
+@@ -173,7 +172,7 @@ ast_struct! {
+ ast_struct! {
+ /// A reference pattern: `&mut var`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatReference {
+ pub attrs: Vec<Attribute>,
+ pub and_token: Token![&],
+@@ -185,7 +184,7 @@ ast_struct! {
+ ast_struct! {
+ /// The dots in a tuple or slice pattern: `[0, 1, ..]`
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatRest {
+ pub attrs: Vec<Attribute>,
+ pub dot2_token: Token![..],
+@@ -195,7 +194,7 @@ ast_struct! {
+ ast_struct! {
+ /// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatSlice {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+@@ -206,7 +205,7 @@ ast_struct! {
+ ast_struct! {
+ /// A struct or struct variant pattern: `Variant { x, y, .. }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatStruct {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+@@ -219,7 +218,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple pattern: `(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatTuple {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+@@ -230,7 +229,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatTupleStruct {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+@@ -241,7 +240,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type ascription pattern: `foo: f64`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatType {
+ pub attrs: Vec<Attribute>,
+ pub pat: Box<Pat>,
+@@ -253,7 +252,7 @@ ast_struct! {
+ ast_struct! {
+ /// A pattern that matches any value: `_`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct PatWild {
+ pub attrs: Vec<Attribute>,
+ pub underscore_token: Token![_],
+@@ -266,7 +265,7 @@ ast_struct! {
+ /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` are treated
+ /// the same as `x: x, y: ref y, z: ref mut z` but there is no colon token.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct FieldPat {
+ pub attrs: Vec<Attribute>,
+ pub member: Member,
+@@ -275,122 +274,17 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Pat {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Pat {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Pat::Box(this), Pat::Box(other)) => this == other,
+- (Pat::Ident(this), Pat::Ident(other)) => this == other,
+- (Pat::Lit(this), Pat::Lit(other)) => this == other,
+- (Pat::Macro(this), Pat::Macro(other)) => this == other,
+- (Pat::Or(this), Pat::Or(other)) => this == other,
+- (Pat::Path(this), Pat::Path(other)) => this == other,
+- (Pat::Range(this), Pat::Range(other)) => this == other,
+- (Pat::Reference(this), Pat::Reference(other)) => this == other,
+- (Pat::Rest(this), Pat::Rest(other)) => this == other,
+- (Pat::Slice(this), Pat::Slice(other)) => this == other,
+- (Pat::Struct(this), Pat::Struct(other)) => this == other,
+- (Pat::Tuple(this), Pat::Tuple(other)) => this == other,
+- (Pat::TupleStruct(this), Pat::TupleStruct(other)) => this == other,
+- (Pat::Type(this), Pat::Type(other)) => this == other,
+- (Pat::Verbatim(this), Pat::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- (Pat::Wild(this), Pat::Wild(other)) => this == other,
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Pat {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Pat::Box(pat) => {
+- hash.write_u8(0);
+- pat.hash(hash);
+- }
+- Pat::Ident(pat) => {
+- hash.write_u8(1);
+- pat.hash(hash);
+- }
+- Pat::Lit(pat) => {
+- hash.write_u8(2);
+- pat.hash(hash);
+- }
+- Pat::Macro(pat) => {
+- hash.write_u8(3);
+- pat.hash(hash);
+- }
+- Pat::Or(pat) => {
+- hash.write_u8(4);
+- pat.hash(hash);
+- }
+- Pat::Path(pat) => {
+- hash.write_u8(5);
+- pat.hash(hash);
+- }
+- Pat::Range(pat) => {
+- hash.write_u8(6);
+- pat.hash(hash);
+- }
+- Pat::Reference(pat) => {
+- hash.write_u8(7);
+- pat.hash(hash);
+- }
+- Pat::Rest(pat) => {
+- hash.write_u8(8);
+- pat.hash(hash);
+- }
+- Pat::Slice(pat) => {
+- hash.write_u8(9);
+- pat.hash(hash);
+- }
+- Pat::Struct(pat) => {
+- hash.write_u8(10);
+- pat.hash(hash);
+- }
+- Pat::Tuple(pat) => {
+- hash.write_u8(11);
+- pat.hash(hash);
+- }
+- Pat::TupleStruct(pat) => {
+- hash.write_u8(12);
+- pat.hash(hash);
+- }
+- Pat::Type(pat) => {
+- hash.write_u8(13);
+- pat.hash(hash);
+- }
+- Pat::Verbatim(pat) => {
+- hash.write_u8(14);
+- TokenStreamHelper(pat).hash(hash);
+- }
+- Pat::Wild(pat) => {
+- hash.write_u8(15);
+- pat.hash(hash);
+- }
+- Pat::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ #[cfg(feature = "parsing")]
+-mod parsing {
++pub mod parsing {
+ use super::*;
+
+ use crate::ext::IdentExt;
+- use crate::parse::{Parse, ParseStream, Result};
++ use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
+ use crate::path;
+
+ impl Parse for Pat {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let begin = input.fork();
+ let lookahead = input.lookahead1();
+ if lookahead.peek(Ident)
+ && ({
+@@ -411,7 +305,6 @@ mod parsing {
+ || lookahead.peek(Token![<])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ pat_path_or_macro_or_struct_or_range(input)
+@@ -434,7 +327,7 @@ mod parsing {
+ } else if lookahead.peek(token::Bracket) {
+ input.call(pat_slice).map(Pat::Slice)
+ } else if lookahead.peek(Token![..]) && !input.peek(Token![...]) {
+- input.call(pat_rest).map(Pat::Rest)
++ pat_range_half_open(input, begin)
+ } else {
+ Err(lookahead.error())
+ }
+@@ -442,10 +335,11 @@ mod parsing {
+ }
+
+ fn pat_path_or_macro_or_struct_or_range(input: ParseStream) -> Result<Pat> {
++ let begin = input.fork();
+ let (qself, path) = path::parsing::qpath(input, true)?;
+
+ if input.peek(Token![..]) {
+- return pat_range(input, qself, path).map(Pat::Range);
++ return pat_range(input, begin, qself, path);
+ }
+
+ if qself.is_some() {
+@@ -487,7 +381,7 @@ mod parsing {
+ } else if input.peek(token::Paren) {
+ pat_tuple_struct(input, path).map(Pat::TupleStruct)
+ } else if input.peek(Token![..]) {
+- pat_range(input, qself, path).map(Pat::Range)
++ pat_range(input, begin, qself, path)
+ } else {
+ Ok(Pat::Path(PatPath {
+ attrs: Vec::new(),
+@@ -546,7 +440,7 @@ mod parsing {
+ while !content.is_empty() && !content.peek(Token![..]) {
+ let value = content.call(field_pat)?;
+ fields.push_value(value);
+- if !content.peek(Token![,]) {
++ if content.is_empty() {
+ break;
+ }
+ let punct: Token![,] = content.parse()?;
+@@ -578,6 +472,7 @@ mod parsing {
+ }
+
+ fn field_pat(input: ParseStream) -> Result<FieldPat> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ let boxed: Option<Token![box]> = input.parse()?;
+ let by_ref: Option<Token![ref]> = input.parse()?;
+ let mutability: Option<Token![mut]> = input.parse()?;
+@@ -587,10 +482,10 @@ mod parsing {
+ || member.is_unnamed()
+ {
+ return Ok(FieldPat {
+- attrs: Vec::new(),
++ attrs,
+ member,
+ colon_token: input.parse()?,
+- pat: input.parse()?,
++ pat: Box::new(multi_pat(input)?),
+ });
+ }
+
+@@ -610,30 +505,57 @@ mod parsing {
+ if let Some(boxed) = boxed {
+ pat = Pat::Box(PatBox {
+ attrs: Vec::new(),
+- pat: Box::new(pat),
+ box_token: boxed,
++ pat: Box::new(pat),
+ });
+ }
+
+ Ok(FieldPat {
++ attrs,
+ member: Member::Named(ident),
+- pat: Box::new(pat),
+- attrs: Vec::new(),
+ colon_token: None,
++ pat: Box::new(pat),
+ })
+ }
+
+- fn pat_range(input: ParseStream, qself: Option<QSelf>, path: Path) -> Result<PatRange> {
+- Ok(PatRange {
+- attrs: Vec::new(),
+- lo: Box::new(Expr::Path(ExprPath {
++ fn pat_range(
++ input: ParseStream,
++ begin: ParseBuffer,
++ qself: Option<QSelf>,
++ path: Path,
++ ) -> Result<Pat> {
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if let Some(hi) = hi {
++ Ok(Pat::Range(PatRange {
+ attrs: Vec::new(),
+- qself,
+- path,
+- })),
+- limits: input.parse()?,
+- hi: input.call(pat_lit_expr)?,
+- })
++ lo: Box::new(Expr::Path(ExprPath {
++ attrs: Vec::new(),
++ qself,
++ path,
++ })),
++ limits,
++ hi,
++ }))
++ } else {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ }
++ }
++
++ fn pat_range_half_open(input: ParseStream, begin: ParseBuffer) -> Result<Pat> {
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if hi.is_some() {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ } else {
++ match limits {
++ RangeLimits::HalfOpen(dot2_token) => Ok(Pat::Rest(PatRest {
++ attrs: Vec::new(),
++ dot2_token,
++ })),
++ RangeLimits::Closed(_) => Err(input.error("expected range upper bound")),
++ }
++ }
+ }
+
+ fn pat_tuple(input: ParseStream) -> Result<PatTuple> {
+@@ -642,7 +564,7 @@ mod parsing {
+
+ let mut elems = Punctuated::new();
+ while !content.is_empty() {
+- let value: Pat = content.parse()?;
++ let value = multi_pat(&content)?;
+ elems.push_value(value);
+ if content.is_empty() {
+ break;
+@@ -668,14 +590,21 @@ mod parsing {
+ }
+
+ fn pat_lit_or_range(input: ParseStream) -> Result<Pat> {
+- let lo = input.call(pat_lit_expr)?;
++ let begin = input.fork();
++ let lo = input.call(pat_lit_expr)?.unwrap();
+ if input.peek(Token![..]) {
+- Ok(Pat::Range(PatRange {
+- attrs: Vec::new(),
+- lo,
+- limits: input.parse()?,
+- hi: input.call(pat_lit_expr)?,
+- }))
++ let limits: RangeLimits = input.parse()?;
++ let hi = input.call(pat_lit_expr)?;
++ if let Some(hi) = hi {
++ Ok(Pat::Range(PatRange {
++ attrs: Vec::new(),
++ lo,
++ limits,
++ hi,
++ }))
++ } else {
++ Ok(Pat::Verbatim(verbatim::between(begin, input)))
++ }
+ } else {
+ Ok(Pat::Lit(PatLit {
+ attrs: Vec::new(),
+@@ -684,7 +613,17 @@ mod parsing {
+ }
+ }
+
+- fn pat_lit_expr(input: ParseStream) -> Result<Box<Expr>> {
++ fn pat_lit_expr(input: ParseStream) -> Result<Option<Box<Expr>>> {
++ if input.is_empty()
++ || input.peek(Token![|])
++ || input.peek(Token![=>])
++ || input.peek(Token![:]) && !input.peek(Token![::])
++ || input.peek(Token![,])
++ || input.peek(Token![;])
++ {
++ return Ok(None);
++ }
++
+ let neg: Option<Token![-]> = input.parse()?;
+
+ let lookahead = input.lookahead1();
+@@ -696,7 +635,6 @@ mod parsing {
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![Self])
+ || lookahead.peek(Token![super])
+- || lookahead.peek(Token![extern])
+ || lookahead.peek(Token![crate])
+ {
+ Expr::Path(input.parse()?)
+@@ -704,7 +642,7 @@ mod parsing {
+ return Err(lookahead.error());
+ };
+
+- Ok(Box::new(if let Some(neg) = neg {
++ Ok(Some(Box::new(if let Some(neg) = neg {
+ Expr::Unary(ExprUnary {
+ attrs: Vec::new(),
+ op: UnOp::Neg(neg),
+@@ -712,7 +650,7 @@ mod parsing {
+ })
+ } else {
+ expr
+- }))
++ })))
+ }
+
+ fn pat_slice(input: ParseStream) -> Result<PatSlice> {
+@@ -721,7 +659,7 @@ mod parsing {
+
+ let mut elems = Punctuated::new();
+ while !content.is_empty() {
+- let value: Pat = content.parse()?;
++ let value = multi_pat(&content)?;
+ elems.push_value(value);
+ if content.is_empty() {
+ break;
+@@ -737,11 +675,35 @@ mod parsing {
+ })
+ }
+
+- fn pat_rest(input: ParseStream) -> Result<PatRest> {
+- Ok(PatRest {
+- attrs: Vec::new(),
+- dot2_token: input.parse()?,
+- })
++ pub fn multi_pat(input: ParseStream) -> Result<Pat> {
++ multi_pat_impl(input, None)
++ }
++
++ pub fn multi_pat_with_leading_vert(input: ParseStream) -> Result<Pat> {
++ let leading_vert: Option<Token![|]> = input.parse()?;
++ multi_pat_impl(input, leading_vert)
++ }
++
++ fn multi_pat_impl(input: ParseStream, leading_vert: Option<Token![|]>) -> Result<Pat> {
++ let mut pat: Pat = input.parse()?;
++ if leading_vert.is_some()
++ || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
++ {
++ let mut cases = Punctuated::new();
++ cases.push_value(pat);
++ while input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=]) {
++ let punct = input.parse()?;
++ cases.push_punct(punct);
++ let pat: Pat = input.parse()?;
++ cases.push_value(pat);
++ }
++ pat = Pat::Or(PatOr {
++ attrs: Vec::new(),
++ leading_vert,
++ cases,
++ });
++ }
++ Ok(pat)
+ }
+ }
+
+@@ -756,12 +718,14 @@ mod printing {
+
+ impl ToTokens for PatWild {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.underscore_token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatIdent {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.by_ref.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.ident.to_tokens(tokens);
+@@ -774,6 +738,7 @@ mod printing {
+
+ impl ToTokens for PatStruct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.path.to_tokens(tokens);
+ self.brace_token.surround(tokens, |tokens| {
+ self.fields.to_tokens(tokens);
+@@ -788,6 +753,7 @@ mod printing {
+
+ impl ToTokens for PatTupleStruct {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.path.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+ }
+@@ -804,12 +770,14 @@ mod printing {
+
+ impl ToTokens for PatPath {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ private::print_path(tokens, &self.qself, &self.path);
+ }
+ }
+
+ impl ToTokens for PatTuple {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.paren_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ });
+@@ -818,6 +786,7 @@ mod printing {
+
+ impl ToTokens for PatBox {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.box_token.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+ }
+@@ -825,6 +794,7 @@ mod printing {
+
+ impl ToTokens for PatReference {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.and_token.to_tokens(tokens);
+ self.mutability.to_tokens(tokens);
+ self.pat.to_tokens(tokens);
+@@ -833,18 +803,21 @@ mod printing {
+
+ impl ToTokens for PatRest {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.dot2_token.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatLit {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.expr.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatRange {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.lo.to_tokens(tokens);
+ match &self.limits {
+ RangeLimits::HalfOpen(t) => t.to_tokens(tokens),
+@@ -856,6 +829,7 @@ mod printing {
+
+ impl ToTokens for PatSlice {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.bracket_token.surround(tokens, |tokens| {
+ self.elems.to_tokens(tokens);
+ });
+@@ -864,12 +838,14 @@ mod printing {
+
+ impl ToTokens for PatMacro {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.mac.to_tokens(tokens);
+ }
+ }
+
+ impl ToTokens for PatOr {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ self.leading_vert.to_tokens(tokens);
+ self.cases.to_tokens(tokens);
+ }
+@@ -877,6 +853,7 @@ mod printing {
+
+ impl ToTokens for FieldPat {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
++ tokens.append_all(self.attrs.outer());
+ if let Some(colon_token) = &self.colon_token {
+ self.member.to_tokens(tokens);
+ colon_token.to_tokens(tokens);
+diff --git a/third_party/rust/syn/src/path.rs b/third_party/rust/syn/src/path.rs
+index 8dda43ee67..15c0fcc664 100644
+--- third_party/rust/syn/src/path.rs
++++ third_party/rust/syn/src/path.rs
+@@ -2,9 +2,9 @@ use super::*;
+ use crate::punctuated::Punctuated;
+
+ ast_struct! {
+- /// A path at which a named item is exported: `std::collections::HashMap`.
++ /// A path at which a named item is exported (e.g. `std::collections::HashMap`).
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Path {
+ pub leading_colon: Option<Token![::]>,
+@@ -29,7 +29,7 @@ where
+ ast_struct! {
+ /// A segment of a path together with any path arguments on that segment.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct PathSegment {
+ pub ident: Ident,
+@@ -52,7 +52,7 @@ where
+ ast_enum! {
+ /// Angle bracketed or parenthesized arguments of a path segment.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// ## Angle bracketed
+@@ -98,7 +98,7 @@ impl PathArguments {
+ ast_enum! {
+ /// An individual generic argument, like `'a`, `T`, or `Item = T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum GenericArgument {
+ /// A lifetime argument.
+@@ -122,7 +122,7 @@ ast_struct! {
+ /// Angle bracketed arguments of a path segment: the `<K, V>` in `HashMap<K,
+ /// V>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct AngleBracketedGenericArguments {
+ pub colon2_token: Option<Token![::]>,
+@@ -135,7 +135,7 @@ ast_struct! {
+ ast_struct! {
+ /// A binding (equality constraint) on an associated type: `Item = u8`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Binding {
+ pub ident: Ident,
+@@ -147,7 +147,7 @@ ast_struct! {
+ ast_struct! {
+ /// An associated type bound: `Iterator<Item: Display>`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Constraint {
+ pub ident: Ident,
+@@ -160,7 +160,7 @@ ast_struct! {
+ /// Arguments of a function path segment: the `(A, B) -> C` in `Fn(A,B) ->
+ /// C`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct ParenthesizedGenericArguments {
+ pub paren_token: token::Paren,
+@@ -189,7 +189,7 @@ ast_struct! {
+ /// ty position = 0
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct QSelf {
+ pub lt_token: Token![<],
+@@ -291,11 +291,7 @@ pub mod parsing {
+
+ impl PathSegment {
+ fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
+- if input.peek(Token![super])
+- || input.peek(Token![self])
+- || input.peek(Token![crate])
+- || input.peek(Token![extern])
+- {
++ if input.peek(Token![super]) || input.peek(Token![self]) || input.peek(Token![crate]) {
+ let ident = input.call(Ident::parse_any)?;
+ return Ok(PathSegment::from(ident));
+ }
+@@ -358,7 +354,7 @@ pub mod parsing {
+ impl Path {
+ /// Parse a `Path` containing no path arguments on any of its segments.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+@@ -400,7 +396,6 @@ pub mod parsing {
+ && !input.peek(Token![self])
+ && !input.peek(Token![Self])
+ && !input.peek(Token![crate])
+- && !input.peek(Token![extern])
+ {
+ break;
+ }
+@@ -433,7 +428,7 @@ pub mod parsing {
+ /// path arguments, and
+ /// - the ident of the first path segment is equal to the given one.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+@@ -472,7 +467,7 @@ pub mod parsing {
+ /// - the first path segment has no angle bracketed or parenthesized
+ /// path arguments.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ pub fn get_ident(&self) -> Option<&Ident> {
+ if self.leading_colon.is_none()
+diff --git a/third_party/rust/syn/src/punctuated.rs b/third_party/rust/syn/src/punctuated.rs
+index 38c7bf4e82..46c82a65b1 100644
+--- third_party/rust/syn/src/punctuated.rs
++++ third_party/rust/syn/src/punctuated.rs
+@@ -22,6 +22,8 @@
+
+ #[cfg(feature = "extra-traits")]
+ use std::fmt::{self, Debug};
++#[cfg(feature = "extra-traits")]
++use std::hash::{Hash, Hasher};
+ #[cfg(any(feature = "full", feature = "derive"))]
+ use std::iter;
+ use std::iter::FromIterator;
+@@ -41,8 +43,6 @@ use crate::token::Token;
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[cfg_attr(feature = "extra-traits", derive(Eq, PartialEq, Hash))]
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub struct Punctuated<T, P> {
+ inner: Vec<(T, P)>,
+ last: Option<Box<T>>,
+@@ -76,22 +76,19 @@ impl<T, P> Punctuated<T, P> {
+ self.iter().next()
+ }
+
++ /// Mutably borrows the first element in this sequence.
++ pub fn first_mut(&mut self) -> Option<&mut T> {
++ self.iter_mut().next()
++ }
++
+ /// Borrows the last element in this sequence.
+ pub fn last(&self) -> Option<&T> {
+- if self.last.is_some() {
+- self.last.as_ref().map(Box::as_ref)
+- } else {
+- self.inner.last().map(|pair| &pair.0)
+- }
++ self.iter().next_back()
+ }
+
+ /// Mutably borrows the last element in this sequence.
+ pub fn last_mut(&mut self) -> Option<&mut T> {
+- if self.last.is_some() {
+- self.last.as_mut().map(Box::as_mut)
+- } else {
+- self.inner.last_mut().map(|pair| &mut pair.0)
+- }
++ self.iter_mut().next_back()
+ }
+
+ /// Returns an iterator over borrowed syntax tree nodes of type `&T`.
+@@ -230,13 +227,19 @@ impl<T, P> Punctuated<T, P> {
+ }
+ }
+
++ /// Clears the sequence of all values and punctuation, making it empty.
++ pub fn clear(&mut self) {
++ self.inner.clear();
++ self.last = None;
++ }
++
+ /// Parses zero or more occurrences of `T` separated by punctuation of type
+ /// `P`, with optional trailing punctuation.
+ ///
+ /// Parsing continues until the end of this parse stream. The entire content
+ /// of this parse stream must consist of `T` and `P`.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_terminated(input: ParseStream) -> Result<Self>
+@@ -256,7 +259,7 @@ impl<T, P> Punctuated<T, P> {
+ ///
+ /// [`parse_terminated`]: Punctuated::parse_terminated
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_terminated_with(
+@@ -292,7 +295,7 @@ impl<T, P> Punctuated<T, P> {
+ /// is not followed by a `P`, even if there are remaining tokens in the
+ /// stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_separated_nonempty(input: ParseStream) -> Result<Self>
+@@ -312,7 +315,7 @@ impl<T, P> Punctuated<T, P> {
+ ///
+ /// [`parse_separated_nonempty`]: Punctuated::parse_separated_nonempty
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_separated_nonempty_with(
+@@ -338,6 +341,53 @@ impl<T, P> Punctuated<T, P> {
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl<T, P> Clone for Punctuated<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ Punctuated {
++ inner: self.inner.clone(),
++ last: self.last.clone(),
++ }
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> Eq for Punctuated<T, P>
++where
++ T: Eq,
++ P: Eq,
++{
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> PartialEq for Punctuated<T, P>
++where
++ T: PartialEq,
++ P: PartialEq,
++{
++ fn eq(&self, other: &Self) -> bool {
++ let Punctuated { inner, last } = self;
++ *inner == other.inner && *last == other.last
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl<T, P> Hash for Punctuated<T, P>
++where
++ T: Hash,
++ P: Hash,
++{
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ let Punctuated { inner, last } = self;
++ inner.hash(state);
++ last.hash(state);
++ }
++}
++
+ #[cfg(feature = "extra-traits")]
+ impl<T: Debug, P: Debug> Debug for Punctuated<T, P> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -536,7 +586,6 @@ impl<'a, T, P> ExactSizeIterator for PairsMut<'a, T, P> {
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[derive(Clone)]
+ pub struct IntoPairs<T, P> {
+ inner: vec::IntoIter<(T, P)>,
+ last: option::IntoIter<T>,
+@@ -572,12 +621,24 @@ impl<T, P> ExactSizeIterator for IntoPairs<T, P> {
+ }
+ }
+
++impl<T, P> Clone for IntoPairs<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ IntoPairs {
++ inner: self.inner.clone(),
++ last: self.last.clone(),
++ }
++ }
++}
++
+ /// An iterator over owned values of type `T`.
+ ///
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[derive(Clone)]
+ pub struct IntoIter<T> {
+ inner: vec::IntoIter<T>,
+ }
+@@ -606,6 +667,17 @@ impl<T> ExactSizeIterator for IntoIter<T> {
+ }
+ }
+
++impl<T> Clone for IntoIter<T>
++where
++ T: Clone,
++{
++ fn clone(&self) -> Self {
++ IntoIter {
++ inner: self.inner.clone(),
++ }
++ }
++}
++
+ /// An iterator over borrowed values of type `&T`.
+ ///
+ /// Refer to the [module documentation] for details about punctuated sequences.
+@@ -799,7 +871,6 @@ impl<'a, T: 'a, I: 'a> IterMutTrait<'a, T> for I where
+ /// Refer to the [module documentation] for details about punctuated sequences.
+ ///
+ /// [module documentation]: self
+-#[cfg_attr(feature = "clone-impls", derive(Clone))]
+ pub enum Pair<T, P> {
+ Punctuated(T, P),
+ End(T),
+@@ -856,6 +927,20 @@ impl<T, P> Pair<T, P> {
+ }
+ }
+
++#[cfg(feature = "clone-impls")]
++impl<T, P> Clone for Pair<T, P>
++where
++ T: Clone,
++ P: Clone,
++{
++ fn clone(&self) -> Self {
++ match self {
++ Pair::Punctuated(t, p) => Pair::Punctuated(t.clone(), p.clone()),
++ Pair::End(t) => Pair::End(t.clone()),
++ }
++ }
++}
++
+ impl<T, P> Index<usize> for Punctuated<T, P> {
+ type Output = T;
+
+diff --git a/third_party/rust/syn/src/reserved.rs b/third_party/rust/syn/src/reserved.rs
+new file mode 100644
+index 0000000000..ccfb8b5ad0
+--- /dev/null
++++ third_party/rust/syn/src/reserved.rs
+@@ -0,0 +1,42 @@
++// Type for a syntax tree node that is reserved for future use.
++//
++// For example ExprReference contains a field `raw` of type Reserved. If `&raw
++// place` syntax becomes a thing as per https://github.com/rust-lang/rfcs/pull/2582,
++// we can backward compatibly change `raw`'s type to Option<Token![raw]> without
++// the possibility of breaking any code.
++
++use proc_macro2::Span;
++use std::marker::PhantomData;
++
++#[cfg(feature = "extra-traits")]
++use std::fmt::{self, Debug};
++
++ast_struct! {
++ pub struct Reserved {
++ _private: PhantomData<Span>,
++ }
++}
++
++impl Default for Reserved {
++ fn default() -> Self {
++ Reserved {
++ _private: PhantomData,
++ }
++ }
++}
++
++#[cfg(feature = "clone-impls")]
++impl Clone for Reserved {
++ fn clone(&self) -> Self {
++ Reserved {
++ _private: self._private,
++ }
++ }
++}
++
++#[cfg(feature = "extra-traits")]
++impl Debug for Reserved {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ formatter.debug_struct("Reserved").finish()
++ }
++}
+diff --git a/third_party/rust/syn/src/spanned.rs b/third_party/rust/syn/src/spanned.rs
+index 71ffe26b81..01591cedcb 100644
+--- third_party/rust/syn/src/spanned.rs
++++ third_party/rust/syn/src/spanned.rs
+@@ -1,7 +1,7 @@
+ //! A trait that can provide the `Span` of the complete contents of a syntax
+ //! tree node.
+ //!
+-//! *This module is available if Syn is built with both the `"parsing"` and
++//! *This module is available only if Syn is built with both the `"parsing"` and
+ //! `"printing"` features.*
+ //!
+ //! <br>
+@@ -97,7 +97,7 @@ use quote::spanned::Spanned as ToTokens;
+ ///
+ /// [module documentation]: self
+ ///
+-/// *This trait is available if Syn is built with both the `"parsing"` and
++/// *This trait is available only if Syn is built with both the `"parsing"` and
+ /// `"printing"` features.*
+ pub trait Spanned {
+ /// Returns a `Span` covering the complete contents of this syntax tree
+diff --git a/third_party/rust/syn/src/stmt.rs b/third_party/rust/syn/src/stmt.rs
+index e4277fdbaa..b06e843d75 100644
+--- third_party/rust/syn/src/stmt.rs
++++ third_party/rust/syn/src/stmt.rs
+@@ -3,7 +3,7 @@ use super::*;
+ ast_struct! {
+ /// A braced block containing Rust statements.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Block {
+ pub brace_token: token::Brace,
+ /// Statements in a block
+@@ -14,7 +14,7 @@ ast_struct! {
+ ast_enum! {
+ /// A statement, usually ending in a semicolon.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum Stmt {
+ /// A local (let) binding.
+ Local(Local),
+@@ -33,7 +33,7 @@ ast_enum! {
+ ast_struct! {
+ /// A local `let` binding: `let x: u64 = s.parse()?`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Local {
+ pub attrs: Vec<Attribute>,
+ pub let_token: Token![let],
+@@ -47,14 +47,15 @@ ast_struct! {
+ pub mod parsing {
+ use super::*;
+
++ use crate::parse::discouraged::Speculative;
+ use crate::parse::{Parse, ParseStream, Result};
+- use crate::punctuated::Punctuated;
++ use proc_macro2::TokenStream;
+
+ impl Block {
+ /// Parse the body of a block as zero or more statements, possibly
+ /// including one trailing expression.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ ///
+ /// # Example
+@@ -106,8 +107,8 @@ pub mod parsing {
+ pub fn parse_within(input: ParseStream) -> Result<Vec<Stmt>> {
+ let mut stmts = Vec::new();
+ loop {
+- while input.peek(Token![;]) {
+- input.parse::<Token![;]>()?;
++ while let Some(semi) = input.parse::<Option<Token![;]>>()? {
++ stmts.push(Stmt::Semi(Expr::Verbatim(TokenStream::new()), semi));
+ }
+ if input.is_empty() {
+ break;
+@@ -146,55 +147,55 @@ pub mod parsing {
+ }
+
+ fn parse_stmt(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
+- // TODO: optimize using advance_to
++ let mut attrs = input.call(Attribute::parse_outer)?;
++
++ // brace-style macros; paren and bracket macros get parsed as
++ // expression statements.
+ let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
++ if let Ok(path) = ahead.call(Path::parse_mod_style) {
++ if ahead.peek(Token![!]) && (ahead.peek2(token::Brace) || ahead.peek2(Ident)) {
++ input.advance_to(&ahead);
++ return stmt_mac(input, attrs, path);
++ }
++ }
+
+- if {
+- let ahead = ahead.fork();
+- // Only parse braces here; paren and bracket will get parsed as
+- // expression statements
+- ahead.call(Path::parse_mod_style).is_ok()
+- && ahead.parse::<Token![!]>().is_ok()
+- && (ahead.peek(token::Brace) || ahead.peek(Ident))
+- } {
+- stmt_mac(input)
+- } else if ahead.peek(Token![let]) {
+- stmt_local(input).map(Stmt::Local)
+- } else if ahead.peek(Token![pub])
+- || ahead.peek(Token![crate]) && !ahead.peek2(Token![::])
+- || ahead.peek(Token![extern]) && !ahead.peek2(Token![::])
+- || ahead.peek(Token![use])
+- || ahead.peek(Token![static]) && (ahead.peek2(Token![mut]) || ahead.peek2(Ident))
+- || ahead.peek(Token![const])
+- || ahead.peek(Token![unsafe]) && !ahead.peek2(token::Brace)
+- || ahead.peek(Token![async])
+- && (ahead.peek2(Token![unsafe])
+- || ahead.peek2(Token![extern])
+- || ahead.peek2(Token![fn]))
+- || ahead.peek(Token![fn])
+- || ahead.peek(Token![mod])
+- || ahead.peek(Token![type])
+- || ahead.peek(item::parsing::existential) && ahead.peek2(Token![type])
+- || ahead.peek(Token![struct])
+- || ahead.peek(Token![enum])
+- || ahead.peek(Token![union]) && ahead.peek2(Ident)
+- || ahead.peek(Token![auto]) && ahead.peek2(Token![trait])
+- || ahead.peek(Token![trait])
+- || ahead.peek(Token![default])
+- && (ahead.peek2(Token![unsafe]) || ahead.peek2(Token![impl]))
+- || ahead.peek(Token![impl])
+- || ahead.peek(Token![macro])
++ if input.peek(Token![let]) {
++ stmt_local(input, attrs).map(Stmt::Local)
++ } else if input.peek(Token![pub])
++ || input.peek(Token![crate]) && !input.peek2(Token![::])
++ || input.peek(Token![extern])
++ || input.peek(Token![use])
++ || input.peek(Token![static]) && (input.peek2(Token![mut]) || input.peek2(Ident))
++ || input.peek(Token![const])
++ || input.peek(Token![unsafe]) && !input.peek2(token::Brace)
++ || input.peek(Token![async])
++ && (input.peek2(Token![unsafe])
++ || input.peek2(Token![extern])
++ || input.peek2(Token![fn]))
++ || input.peek(Token![fn])
++ || input.peek(Token![mod])
++ || input.peek(Token![type])
++ || input.peek(item::parsing::existential) && input.peek2(Token![type])
++ || input.peek(Token![struct])
++ || input.peek(Token![enum])
++ || input.peek(Token![union]) && input.peek2(Ident)
++ || input.peek(Token![auto]) && input.peek2(Token![trait])
++ || input.peek(Token![trait])
++ || input.peek(Token![default])
++ && (input.peek2(Token![unsafe]) || input.peek2(Token![impl]))
++ || input.peek(Token![impl])
++ || input.peek(Token![macro])
+ {
+- input.parse().map(Stmt::Item)
++ let mut item: Item = input.parse()?;
++ attrs.extend(item.replace_attrs(Vec::new()));
++ item.replace_attrs(attrs);
++ Ok(Stmt::Item(item))
+ } else {
+- stmt_expr(input, allow_nosemi)
++ stmt_expr(input, allow_nosemi, attrs)
+ }
+ }
+
+- fn stmt_mac(input: ParseStream) -> Result<Stmt> {
+- let attrs = input.call(Attribute::parse_outer)?;
+- let path = input.call(Path::parse_mod_style)?;
++ fn stmt_mac(input: ParseStream, attrs: Vec<Attribute>, path: Path) -> Result<Stmt> {
+ let bang_token: Token![!] = input.parse()?;
+ let ident: Option<Ident> = input.parse()?;
+ let (delimiter, tokens) = mac::parse_delimiter(input)?;
+@@ -213,33 +214,12 @@ pub mod parsing {
+ })))
+ }
+
+- fn stmt_local(input: ParseStream) -> Result<Local> {
++ fn stmt_local(input: ParseStream, attrs: Vec<Attribute>) -> Result<Local> {
+ Ok(Local {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs,
+ let_token: input.parse()?,
+ pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let mut pat: Pat = input.parse()?;
+- if leading_vert.is_some()
+- || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
+- {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|])
+- && !input.peek(Token![||])
+- && !input.peek(Token![|=])
+- {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- pat = Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- });
+- }
++ let mut pat: Pat = pat::parsing::multi_pat_with_leading_vert(input)?;
+ if input.peek(Token![:]) {
+ let colon_token: Token![:] = input.parse()?;
+ let ty: Type = input.parse()?;
+@@ -265,12 +245,19 @@ pub mod parsing {
+ })
+ }
+
+- fn stmt_expr(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
+- let mut attrs = input.call(Attribute::parse_outer)?;
++ fn stmt_expr(
++ input: ParseStream,
++ allow_nosemi: bool,
++ mut attrs: Vec<Attribute>,
++ ) -> Result<Stmt> {
+ let mut e = expr::parsing::expr_early(input)?;
+
+- attrs.extend(e.replace_attrs(Vec::new()));
+- e.replace_attrs(attrs);
++ let mut attr_target = &mut e;
++ while let Expr::Binary(e) = attr_target {
++ attr_target = &mut e.left;
++ }
++ attrs.extend(attr_target.replace_attrs(Vec::new()));
++ attr_target.replace_attrs(attrs);
+
+ if input.peek(Token![;]) {
+ return Ok(Stmt::Semi(e, input.parse()?));
+diff --git a/third_party/rust/syn/src/token.rs b/third_party/rust/syn/src/token.rs
+index 0b8c18192f..8539378c5e 100644
+--- third_party/rust/syn/src/token.rs
++++ third_party/rust/syn/src/token.rs
+@@ -88,7 +88,6 @@
+ //! [Printing]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html
+ //! [`Span`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html
+
+-use std;
+ #[cfg(feature = "extra-traits")]
+ use std::cmp;
+ #[cfg(feature = "extra-traits")]
+@@ -97,13 +96,13 @@ use std::fmt::{self, Debug};
+ use std::hash::{Hash, Hasher};
+ use std::ops::{Deref, DerefMut};
+
+-#[cfg(feature = "parsing")]
+-use proc_macro2::Delimiter;
+ #[cfg(any(feature = "parsing", feature = "printing"))]
+ use proc_macro2::Ident;
+ use proc_macro2::Span;
+ #[cfg(feature = "printing")]
+ use proc_macro2::TokenStream;
++#[cfg(feature = "parsing")]
++use proc_macro2::{Delimiter, Literal, Punct, TokenTree};
+ #[cfg(feature = "printing")]
+ use quote::{ToTokens, TokenStreamExt};
+
+@@ -112,10 +111,8 @@ use self::private::WithSpan;
+ use crate::buffer::Cursor;
+ #[cfg(feature = "parsing")]
+ use crate::error::Result;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ use crate::lifetime::Lifetime;
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ use crate::lit::{Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr};
+ #[cfg(feature = "parsing")]
+@@ -155,21 +152,20 @@ mod private {
+ #[cfg(feature = "parsing")]
+ impl private::Sealed for Ident {}
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ #[cfg(feature = "parsing")]
+ fn peek_impl(cursor: Cursor, peek: fn(ParseStream) -> bool) -> bool {
++ use crate::parse::Unexpected;
+ use std::cell::Cell;
+ use std::rc::Rc;
+
+ let scope = Span::call_site();
+- let unexpected = Rc::new(Cell::new(None));
++ let unexpected = Rc::new(Cell::new(Unexpected::None));
+ let buffer = crate::parse::new_parse_buffer(scope, cursor, unexpected);
+ peek(&buffer)
+ }
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+ macro_rules! impl_token {
+- ($name:ident $display:expr) => {
++ ($display:tt $name:ty) => {
+ #[cfg(feature = "parsing")]
+ impl Token for $name {
+ fn peek(cursor: Cursor) -> bool {
+@@ -189,24 +185,38 @@ macro_rules! impl_token {
+ };
+ }
+
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(Lifetime "lifetime");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(Lit "literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitStr "string literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitByteStr "byte string literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitByte "byte literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitChar "character literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitInt "integer literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitFloat "floating point literal");
+-#[cfg(any(feature = "full", feature = "derive"))]
+-impl_token!(LitBool "boolean literal");
++impl_token!("lifetime" Lifetime);
++impl_token!("literal" Lit);
++impl_token!("string literal" LitStr);
++impl_token!("byte string literal" LitByteStr);
++impl_token!("byte literal" LitByte);
++impl_token!("character literal" LitChar);
++impl_token!("integer literal" LitInt);
++impl_token!("floating point literal" LitFloat);
++impl_token!("boolean literal" LitBool);
++impl_token!("group token" proc_macro2::Group);
++
++macro_rules! impl_low_level_token {
++ ($display:tt $ty:ident $get:ident) => {
++ #[cfg(feature = "parsing")]
++ impl Token for $ty {
++ fn peek(cursor: Cursor) -> bool {
++ cursor.$get().is_some()
++ }
++
++ fn display() -> &'static str {
++ $display
++ }
++ }
++
++ #[cfg(feature = "parsing")]
++ impl private::Sealed for $ty {}
++ };
++}
++
++impl_low_level_token!("punctuation token" Punct punct);
++impl_low_level_token!("literal" Literal literal);
++impl_low_level_token!("token" TokenTree token_tree);
+
+ // Not public API.
+ #[doc(hidden)]
+@@ -233,7 +243,6 @@ impl<T: CustomToken> Token for T {
+ macro_rules! define_keywords {
+ ($($token:tt pub struct $name:ident #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[$doc]
+ ///
+ /// Don't try to remember the name of this type &mdash; use the
+@@ -260,6 +269,16 @@ macro_rules! define_keywords {
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -338,7 +357,6 @@ macro_rules! impl_deref_if_len_is_1 {
+ macro_rules! define_punctuation_structs {
+ ($($token:tt pub struct $name:ident/$len:tt #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[repr(C)]
+ #[$doc]
+ ///
+@@ -366,6 +384,16 @@ macro_rules! define_punctuation_structs {
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -436,7 +464,6 @@ macro_rules! define_punctuation {
+ macro_rules! define_delimiters {
+ ($($token:tt pub struct $name:ident #[$doc:meta])*) => {
+ $(
+- #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
+ #[$doc]
+ pub struct $name {
+ pub span: Span,
+@@ -458,6 +485,16 @@ macro_rules! define_delimiters {
+ }
+ }
+
++ #[cfg(feature = "clone-impls")]
++ impl Copy for $name {}
++
++ #[cfg(feature = "clone-impls")]
++ impl Clone for $name {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
+ #[cfg(feature = "extra-traits")]
+ impl Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+@@ -855,7 +892,7 @@ pub mod parsing {
+ }
+
+ pub fn punct<S: FromSpans>(input: ParseStream, token: &str) -> Result<S> {
+- let mut spans = [input.cursor().span(); 3];
++ let mut spans = [input.span(); 3];
+ punct_helper(input, token, &mut spans)?;
+ Ok(S::from_spans(&spans))
+ }
+diff --git a/third_party/rust/syn/src/tt.rs b/third_party/rust/syn/src/tt.rs
+index f860eebb4f..8dba0627cd 100644
+--- third_party/rust/syn/src/tt.rs
++++ third_party/rust/syn/src/tt.rs
+@@ -18,8 +18,8 @@ impl<'a> PartialEq for TokenTreeHelper<'a> {
+ _ => return false,
+ }
+
+- let s1 = g1.stream().clone().into_iter();
+- let mut s2 = g2.stream().clone().into_iter();
++ let s1 = g1.stream().into_iter();
++ let mut s2 = g2.stream().into_iter();
+
+ for item1 in s1 {
+ let item2 = match s2.next() {
+@@ -60,7 +60,7 @@ impl<'a> Hash for TokenTreeHelper<'a> {
+ Delimiter::None => 3u8.hash(h),
+ }
+
+- for item in g.stream().clone() {
++ for item in g.stream() {
+ TokenTreeHelper(&item).hash(h);
+ }
+ 0xffu8.hash(h); // terminator w/ a variant we don't normally hash
+diff --git a/third_party/rust/syn/src/ty.rs b/third_party/rust/syn/src/ty.rs
+index 4ee59bda2a..fd7c97eab7 100644
+--- third_party/rust/syn/src/ty.rs
++++ third_party/rust/syn/src/ty.rs
+@@ -1,15 +1,11 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+ use proc_macro2::TokenStream;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_enum_of_structs! {
+ /// The possible types that a Rust value could have.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+@@ -20,7 +16,7 @@ ast_enum_of_structs! {
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+ // blocked on https://github.com/rust-lang/rust/issues/62833
+- pub enum Type #manual_extra_traits {
++ pub enum Type {
+ /// A fixed size array type: `[T; n]`.
+ Array(TypeArray),
+
+@@ -77,7 +73,7 @@ ast_enum_of_structs! {
+ ast_struct! {
+ /// A fixed size array type: `[T; n]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeArray {
+ pub bracket_token: token::Bracket,
+@@ -90,7 +86,7 @@ ast_struct! {
+ ast_struct! {
+ /// A bare function type: `fn(usize) -> bool`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeBareFn {
+ pub lifetimes: Option<BoundLifetimes>,
+@@ -107,7 +103,7 @@ ast_struct! {
+ ast_struct! {
+ /// A type contained within invisible delimiters.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeGroup {
+ pub group_token: token::Group,
+@@ -119,7 +115,7 @@ ast_struct! {
+ /// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or
+ /// a lifetime.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeImplTrait {
+ pub impl_token: Token![impl],
+@@ -130,7 +126,7 @@ ast_struct! {
+ ast_struct! {
+ /// Indication that a type should be inferred by the compiler: `_`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeInfer {
+ pub underscore_token: Token![_],
+@@ -140,7 +136,7 @@ ast_struct! {
+ ast_struct! {
+ /// A macro in the type position.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeMacro {
+ pub mac: Macro,
+@@ -150,7 +146,7 @@ ast_struct! {
+ ast_struct! {
+ /// The never type: `!`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeNever {
+ pub bang_token: Token![!],
+@@ -160,7 +156,7 @@ ast_struct! {
+ ast_struct! {
+ /// A parenthesized type equivalent to the inner type.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeParen {
+ pub paren_token: token::Paren,
+@@ -172,7 +168,7 @@ ast_struct! {
+ /// A path like `std::slice::Iter`, optionally qualified with a
+ /// self-type as in `<Vec<T> as SomeTrait>::Associated`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypePath {
+ pub qself: Option<QSelf>,
+@@ -183,7 +179,7 @@ ast_struct! {
+ ast_struct! {
+ /// A raw pointer type: `*const T` or `*mut T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypePtr {
+ pub star_token: Token![*],
+@@ -196,7 +192,7 @@ ast_struct! {
+ ast_struct! {
+ /// A reference type: `&'a T` or `&'a mut T`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeReference {
+ pub and_token: Token![&],
+@@ -209,7 +205,7 @@ ast_struct! {
+ ast_struct! {
+ /// A dynamically sized slice type: `[T]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeSlice {
+ pub bracket_token: token::Bracket,
+@@ -221,7 +217,7 @@ ast_struct! {
+ /// A trait object type `Bound1 + Bound2 + Bound3` where `Bound` is a
+ /// trait or a lifetime.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeTraitObject {
+ pub dyn_token: Option<Token![dyn]>,
+@@ -232,7 +228,7 @@ ast_struct! {
+ ast_struct! {
+ /// A tuple type: `(A, B, C, String)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct TypeTuple {
+ pub paren_token: token::Paren,
+@@ -240,111 +236,10 @@ ast_struct! {
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Type {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Type {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Type::Array(this), Type::Array(other)) => this == other,
+- (Type::BareFn(this), Type::BareFn(other)) => this == other,
+- (Type::Group(this), Type::Group(other)) => this == other,
+- (Type::ImplTrait(this), Type::ImplTrait(other)) => this == other,
+- (Type::Infer(this), Type::Infer(other)) => this == other,
+- (Type::Macro(this), Type::Macro(other)) => this == other,
+- (Type::Never(this), Type::Never(other)) => this == other,
+- (Type::Paren(this), Type::Paren(other)) => this == other,
+- (Type::Path(this), Type::Path(other)) => this == other,
+- (Type::Ptr(this), Type::Ptr(other)) => this == other,
+- (Type::Reference(this), Type::Reference(other)) => this == other,
+- (Type::Slice(this), Type::Slice(other)) => this == other,
+- (Type::TraitObject(this), Type::TraitObject(other)) => this == other,
+- (Type::Tuple(this), Type::Tuple(other)) => this == other,
+- (Type::Verbatim(this), Type::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Type {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Type::Array(ty) => {
+- hash.write_u8(0);
+- ty.hash(hash);
+- }
+- Type::BareFn(ty) => {
+- hash.write_u8(1);
+- ty.hash(hash);
+- }
+- Type::Group(ty) => {
+- hash.write_u8(2);
+- ty.hash(hash);
+- }
+- Type::ImplTrait(ty) => {
+- hash.write_u8(3);
+- ty.hash(hash);
+- }
+- Type::Infer(ty) => {
+- hash.write_u8(4);
+- ty.hash(hash);
+- }
+- Type::Macro(ty) => {
+- hash.write_u8(5);
+- ty.hash(hash);
+- }
+- Type::Never(ty) => {
+- hash.write_u8(6);
+- ty.hash(hash);
+- }
+- Type::Paren(ty) => {
+- hash.write_u8(7);
+- ty.hash(hash);
+- }
+- Type::Path(ty) => {
+- hash.write_u8(8);
+- ty.hash(hash);
+- }
+- Type::Ptr(ty) => {
+- hash.write_u8(9);
+- ty.hash(hash);
+- }
+- Type::Reference(ty) => {
+- hash.write_u8(10);
+- ty.hash(hash);
+- }
+- Type::Slice(ty) => {
+- hash.write_u8(11);
+- ty.hash(hash);
+- }
+- Type::TraitObject(ty) => {
+- hash.write_u8(12);
+- ty.hash(hash);
+- }
+- Type::Tuple(ty) => {
+- hash.write_u8(13);
+- ty.hash(hash);
+- }
+- Type::Verbatim(ty) => {
+- hash.write_u8(14);
+- TokenStreamHelper(ty).hash(hash);
+- }
+- Type::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ ast_struct! {
+ /// The binary interface of a function: `extern "C"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Abi {
+ pub extern_token: Token![extern],
+@@ -355,7 +250,7 @@ ast_struct! {
+ ast_struct! {
+ /// An argument in a function type: the `usize` in `fn(usize) -> bool`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct BareFnArg {
+ pub attrs: Vec<Attribute>,
+@@ -377,7 +272,7 @@ ast_struct! {
+ /// }
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Variadic {
+ pub attrs: Vec<Attribute>,
+@@ -388,7 +283,7 @@ ast_struct! {
+ ast_enum! {
+ /// Return type of a function signature.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum ReturnType {
+ /// Return type is not specified.
+@@ -407,10 +302,13 @@ pub mod parsing {
+ use crate::ext::IdentExt;
+ use crate::parse::{Parse, ParseStream, Result};
+ use crate::path;
++ use proc_macro2::{Punct, Spacing, TokenTree};
++ use std::iter::FromIterator;
+
+ impl Parse for Type {
+ fn parse(input: ParseStream) -> Result<Self> {
+- ambig_ty(input, true)
++ let allow_plus = true;
++ ambig_ty(input, allow_plus)
+ }
+ }
+
+@@ -421,15 +319,17 @@ pub mod parsing {
+ ///
+ /// This parser does not allow a `+`, while the default parser does.
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- ambig_ty(input, false)
++ let allow_plus = false;
++ ambig_ty(input, allow_plus)
+ }
+ }
+
+ fn ambig_ty(input: ParseStream, allow_plus: bool) -> Result<Type> {
+- if input.peek(token::Group) {
++ if input.peek(token::Group) && !input.peek2(Token![::]) && !input.peek2(Token![<]) {
+ return input.parse().map(Type::Group);
+ }
+
++ let begin = input.fork();
+ let mut lifetimes = None::<BoundLifetimes>;
+ let mut lookahead = input.lookahead1();
+ if lookahead.peek(Token![for]) {
+@@ -524,7 +424,7 @@ pub mod parsing {
+ ..trait_bound
+ })
+ }
+- other => other,
++ other @ TypeParamBound::Lifetime(_) => other,
+ }
+ }
+ _ => break,
+@@ -549,17 +449,20 @@ pub mod parsing {
+ }))
+ } else if lookahead.peek(Token![fn])
+ || lookahead.peek(Token![unsafe])
+- || lookahead.peek(Token![extern]) && !input.peek2(Token![::])
++ || lookahead.peek(Token![extern])
+ {
+- let mut bare_fn: TypeBareFn = input.parse()?;
+- bare_fn.lifetimes = lifetimes;
+- Ok(Type::BareFn(bare_fn))
++ let allow_mut_self = true;
++ if let Some(mut bare_fn) = parse_bare_fn(input, allow_mut_self)? {
++ bare_fn.lifetimes = lifetimes;
++ Ok(Type::BareFn(bare_fn))
++ } else {
++ Ok(Type::Verbatim(verbatim::between(begin, input)))
++ }
+ } else if lookahead.peek(Ident)
+ || input.peek(Token![super])
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![crate])
+- || input.peek(Token![extern])
+ || lookahead.peek(Token![::])
+ || lookahead.peek(Token![<])
+ {
+@@ -722,38 +625,58 @@ pub mod parsing {
+
+ impl Parse for TypeBareFn {
+ fn parse(input: ParseStream) -> Result<Self> {
+- let args;
+- let allow_variadic;
+- Ok(TypeBareFn {
+- lifetimes: input.parse()?,
+- unsafety: input.parse()?,
+- abi: input.parse()?,
+- fn_token: input.parse()?,
+- paren_token: parenthesized!(args in input),
+- inputs: {
+- let mut inputs = Punctuated::new();
+- while !args.is_empty() && !args.peek(Token![...]) {
+- inputs.push_value(args.parse()?);
+- if args.is_empty() {
+- break;
+- }
+- inputs.push_punct(args.parse()?);
+- }
+- allow_variadic = inputs.empty_or_trailing();
+- inputs
+- },
+- variadic: {
+- if allow_variadic && args.peek(Token![...]) {
+- Some(Variadic {
+- attrs: Vec::new(),
++ let allow_mut_self = false;
++ parse_bare_fn(input, allow_mut_self).map(Option::unwrap)
++ }
++ }
++
++ fn parse_bare_fn(input: ParseStream, allow_mut_self: bool) -> Result<Option<TypeBareFn>> {
++ let args;
++ let mut variadic = None;
++ let mut has_mut_self = false;
++
++ let bare_fn = TypeBareFn {
++ lifetimes: input.parse()?,
++ unsafety: input.parse()?,
++ abi: input.parse()?,
++ fn_token: input.parse()?,
++ paren_token: parenthesized!(args in input),
++ inputs: {
++ let mut inputs = Punctuated::new();
++
++ while !args.is_empty() {
++ let attrs = args.call(Attribute::parse_outer)?;
++
++ if inputs.empty_or_trailing() && args.peek(Token![...]) {
++ variadic = Some(Variadic {
++ attrs,
+ dots: args.parse()?,
+- })
++ });
++ break;
++ }
++
++ if let Some(arg) = parse_bare_fn_arg(&args, allow_mut_self)? {
++ inputs.push_value(BareFnArg { attrs, ..arg });
+ } else {
+- None
++ has_mut_self = true;
+ }
+- },
+- output: input.call(ReturnType::without_plus)?,
+- })
++ if args.is_empty() {
++ break;
++ }
++
++ inputs.push_punct(args.parse()?);
++ }
++
++ inputs
++ },
++ variadic,
++ output: input.call(ReturnType::without_plus)?,
++ };
++
++ if has_mut_self {
++ Ok(None)
++ } else {
++ Ok(Some(bare_fn))
+ }
+ }
+
+@@ -776,9 +699,27 @@ pub mod parsing {
+ impl Parse for TypeTuple {
+ fn parse(input: ParseStream) -> Result<Self> {
+ let content;
++ let paren_token = parenthesized!(content in input);
++
++ if content.is_empty() {
++ return Ok(TypeTuple {
++ paren_token,
++ elems: Punctuated::new(),
++ });
++ }
++
++ let first: Type = content.parse()?;
+ Ok(TypeTuple {
+- paren_token: parenthesized!(content in input),
+- elems: content.parse_terminated(Type::parse)?,
++ paren_token,
++ elems: {
++ let mut elems = Punctuated::new();
++ elems.push_value(first);
++ elems.push_punct(content.parse()?);
++ let rest: Punctuated<Type, Token![,]> =
++ content.parse_terminated(Parse::parse)?;
++ elems.extend(rest);
++ elems
++ },
+ })
+ }
+ }
+@@ -807,9 +748,11 @@ pub mod parsing {
+
+ impl ReturnType {
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+
++ #[doc(hidden)]
+ pub fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
+ if input.peek(Token![->]) {
+ let arrow = input.parse()?;
+@@ -844,10 +787,12 @@ pub mod parsing {
+
+ impl TypeTraitObject {
+ pub fn without_plus(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+
+ // Only allow multiple trait references if allow_plus is true.
++ #[doc(hidden)]
+ pub fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
+ Ok(TypeTraitObject {
+ dyn_token: input.parse()?,
+@@ -910,7 +855,8 @@ pub mod parsing {
+
+ impl Parse for TypeParen {
+ fn parse(input: ParseStream) -> Result<Self> {
+- Self::parse(input, false)
++ let allow_plus = false;
++ Self::parse(input, allow_plus)
+ }
+ }
+
+@@ -926,22 +872,72 @@ pub mod parsing {
+
+ impl Parse for BareFnArg {
+ fn parse(input: ParseStream) -> Result<Self> {
+- Ok(BareFnArg {
+- attrs: input.call(Attribute::parse_outer)?,
+- name: {
+- if (input.peek(Ident) || input.peek(Token![_]))
+- && input.peek2(Token![:])
+- && !input.peek2(Token![::])
+- {
+- let name = input.call(Ident::parse_any)?;
+- let colon: Token![:] = input.parse()?;
+- Some((name, colon))
+- } else {
+- None
+- }
+- },
+- ty: input.parse()?,
+- })
++ let allow_mut_self = false;
++ parse_bare_fn_arg(input, allow_mut_self).map(Option::unwrap)
++ }
++ }
++
++ fn parse_bare_fn_arg(
++ input: ParseStream,
++ mut allow_mut_self: bool,
++ ) -> Result<Option<BareFnArg>> {
++ let mut has_mut_self = false;
++ let arg = BareFnArg {
++ attrs: input.call(Attribute::parse_outer)?,
++ name: {
++ if (input.peek(Ident) || input.peek(Token![_]) || input.peek(Token![self]))
++ && input.peek2(Token![:])
++ && !input.peek2(Token![::])
++ {
++ let name = input.call(Ident::parse_any)?;
++ let colon: Token![:] = input.parse()?;
++ Some((name, colon))
++ } else if allow_mut_self
++ && input.peek(Token![mut])
++ && input.peek2(Token![self])
++ && input.peek3(Token![:])
++ && !input.peek3(Token![::])
++ {
++ has_mut_self = true;
++ allow_mut_self = false;
++ input.parse::<Token![mut]>()?;
++ input.parse::<Token![self]>()?;
++ input.parse::<Token![:]>()?;
++ None
++ } else {
++ None
++ }
++ },
++ ty: if !has_mut_self && input.peek(Token![...]) {
++ let dot3 = input.parse::<Token![...]>()?;
++ let args = vec![
++ TokenTree::Punct(Punct::new('.', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('.', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('.', Spacing::Alone)),
++ ];
++ let tokens = TokenStream::from_iter(args.into_iter().zip(&dot3.spans).map(
++ |(mut arg, span)| {
++ arg.set_span(*span);
++ arg
++ },
++ ));
++ Type::Verbatim(tokens)
++ } else if allow_mut_self && input.peek(Token![mut]) && input.peek2(Token![self]) {
++ has_mut_self = true;
++ input.parse::<Token![mut]>()?;
++ Type::Path(TypePath {
++ qself: None,
++ path: input.parse::<Token![self]>()?.into(),
++ })
++ } else {
++ input.parse()?
++ },
++ };
++
++ if has_mut_self {
++ Ok(None)
++ } else {
++ Ok(Some(arg))
+ }
+ }
+
+diff --git a/third_party/rust/syn/src/verbatim.rs b/third_party/rust/syn/src/verbatim.rs
+new file mode 100644
+index 0000000000..0686352f7a
+--- /dev/null
++++ third_party/rust/syn/src/verbatim.rs
+@@ -0,0 +1,15 @@
++use crate::parse::{ParseBuffer, ParseStream};
++use proc_macro2::TokenStream;
++use std::iter;
++
++pub fn between<'a>(begin: ParseBuffer<'a>, end: ParseStream<'a>) -> TokenStream {
++ let end = end.cursor();
++ let mut cursor = begin.cursor();
++ let mut tokens = TokenStream::new();
++ while cursor != end {
++ let (tt, next) = cursor.token_tree().unwrap();
++ tokens.extend(iter::once(tt));
++ cursor = next;
++ }
++ tokens
++}
+diff --git a/third_party/rust/syn/src/whitespace.rs b/third_party/rust/syn/src/whitespace.rs
+new file mode 100644
+index 0000000000..7be082e1a2
+--- /dev/null
++++ third_party/rust/syn/src/whitespace.rs
+@@ -0,0 +1,65 @@
++pub fn skip(mut s: &str) -> &str {
++ 'skip: while !s.is_empty() {
++ let byte = s.as_bytes()[0];
++ if byte == b'/' {
++ if s.starts_with("//")
++ && (!s.starts_with("///") || s.starts_with("////"))
++ && !s.starts_with("//!")
++ {
++ if let Some(i) = s.find('\n') {
++ s = &s[i + 1..];
++ continue;
++ } else {
++ return "";
++ }
++ } else if s.starts_with("/**/") {
++ s = &s[4..];
++ continue;
++ } else if s.starts_with("/*")
++ && (!s.starts_with("/**") || s.starts_with("/***"))
++ && !s.starts_with("/*!")
++ {
++ let mut depth = 0;
++ let bytes = s.as_bytes();
++ let mut i = 0;
++ let upper = bytes.len() - 1;
++ while i < upper {
++ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
++ depth += 1;
++ i += 1; // eat '*'
++ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
++ depth -= 1;
++ if depth == 0 {
++ s = &s[i + 2..];
++ continue 'skip;
++ }
++ i += 1; // eat '/'
++ }
++ i += 1;
++ }
++ return s;
++ }
++ }
++ match byte {
++ b' ' | 0x09..=0x0d => {
++ s = &s[1..];
++ continue;
++ }
++ b if b <= 0x7f => {}
++ _ => {
++ let ch = s.chars().next().unwrap();
++ if is_whitespace(ch) {
++ s = &s[ch.len_utf8()..];
++ continue;
++ }
++ }
++ }
++ return s;
++ }
++ s
++}
++
++fn is_whitespace(ch: char) -> bool {
++ // Rust treats left-to-right mark and right-to-left mark as whitespace
++ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
++}
+diff --git a/third_party/rust/syn/tests/clone.sh b/third_party/rust/syn/tests/clone.sh
+deleted file mode 100755
+index 8e0863cba6..0000000000
+--- third_party/rust/syn/tests/clone.sh
++++ /dev/null
+@@ -1,16 +0,0 @@
+-#!/bin/bash
+-
+-REV=4560cb830fce63fcffdc4558f4281aaac6a3a1ba
+-
+-set -euo pipefail
+-cd "$(dirname "${BASH_SOURCE[0]}")"
+-mkdir -p rust
+-touch rust/COMMIT
+-
+-if [ "$(cat rust/COMMIT)" != "$REV" ]; then
+- rm -rf rust
+- mkdir rust
+- curl -L "https://github.com/rust-lang/rust/archive/${REV}.tar.gz" \
+- | tar xz --directory rust --strip-components 1
+- echo "$REV" > rust/COMMIT
+-fi
+diff --git a/third_party/rust/syn/tests/common/eq.rs b/third_party/rust/syn/tests/common/eq.rs
+index 13a6c36ae5..7589a07573 100644
+--- third_party/rust/syn/tests/common/eq.rs
++++ third_party/rust/syn/tests/common/eq.rs
+@@ -1,36 +1,35 @@
++extern crate rustc_ast;
+ extern crate rustc_data_structures;
++extern crate rustc_span;
+ extern crate rustc_target;
+-extern crate syntax;
+-extern crate syntax_pos;
+
+ use std::mem;
+
+-use self::rustc_data_structures::sync::Lrc;
+-use self::rustc_data_structures::thin_vec::ThinVec;
+-use self::rustc_target::abi::FloatTy;
+-use self::rustc_target::spec::abi::Abi;
+-use self::syntax::ast::{
+- AngleBracketedArgs, AnonConst, Arg, Arm, AsmDialect, AssocTyConstraint, AssocTyConstraintKind,
+- AttrId, AttrStyle, Attribute, BareFnTy, BinOpKind, BindingMode, Block, BlockCheckMode,
+- CaptureBy, Constness, Crate, CrateSugar, Defaultness, EnumDef, Expr, ExprKind, Field, FieldPat,
+- FnDecl, FnHeader, ForeignItem, ForeignItemKind, ForeignMod, FunctionRetTy, GenericArg,
+- GenericArgs, GenericBound, GenericParam, GenericParamKind, Generics, GlobalAsm, Ident,
+- ImplItem, ImplItemKind, ImplPolarity, InlineAsm, InlineAsmOutput, IntTy, IsAsync, IsAuto, Item,
+- ItemKind, Label, Lifetime, Lit, LitIntType, LitKind, Local, Mac, MacDelimiter, MacStmtStyle,
+- MacroDef, MethodSig, Mod, Movability, MutTy, Mutability, NodeId, ParenthesizedArgs, Pat,
+- PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
+- StmtKind, StrStyle, StructField, TraitBoundModifier, TraitItem, TraitItemKind,
+- TraitObjectSyntax, TraitRef, Ty, TyKind, UintTy, UnOp, UnsafeSource, Unsafety, UseTree,
+- UseTreeKind, Variant, VariantData, VisibilityKind, WhereBoundPredicate, WhereClause,
+- WhereEqPredicate, WherePredicate, WhereRegionPredicate,
++use rustc_ast::ast::{
++ AngleBracketedArg, AngleBracketedArgs, AnonConst, Arm, AssocItemKind, AssocTyConstraint,
++ AssocTyConstraintKind, Async, AttrId, AttrItem, AttrKind, AttrStyle, Attribute, BareFnTy,
++ BinOpKind, BindingMode, Block, BlockCheckMode, BorrowKind, CaptureBy, Const, Crate, CrateSugar,
++ Defaultness, EnumDef, Expr, ExprKind, Extern, Field, FieldPat, FloatTy, FnDecl, FnHeader,
++ FnRetTy, FnSig, ForeignItemKind, ForeignMod, GenericArg, GenericArgs, GenericBound,
++ GenericParam, GenericParamKind, Generics, GlobalAsm, ImplPolarity, InlineAsm, InlineAsmOperand,
++ InlineAsmOptions, InlineAsmRegOrRegClass, InlineAsmTemplatePiece, IntTy, IsAuto, Item,
++ ItemKind, Label, Lifetime, Lit, LitFloatType, LitIntType, LitKind, LlvmAsmDialect,
++ LlvmInlineAsm, LlvmInlineAsmOutput, Local, MacArgs, MacCall, MacCallStmt, MacDelimiter,
++ MacStmtStyle, MacroDef, Mod, Movability, MutTy, Mutability, NodeId, Param, ParenthesizedArgs,
++ Pat, PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
++ StmtKind, StrLit, StrStyle, StructField, TraitBoundModifier, TraitObjectSyntax, TraitRef, Ty,
++ TyKind, UintTy, UnOp, Unsafe, UnsafeSource, UseTree, UseTreeKind, Variant, VariantData,
++ VisibilityKind, WhereBoundPredicate, WhereClause, WhereEqPredicate, WherePredicate,
++ WhereRegionPredicate,
+ };
+-use self::syntax::parse::lexer::comments;
+-use self::syntax::parse::token::{self, DelimToken, Token, TokenKind};
+-use self::syntax::ptr::P;
+-use self::syntax::source_map::Spanned;
+-use self::syntax::symbol::{sym, Symbol};
+-use self::syntax::tokenstream::{DelimSpan, TokenStream, TokenTree};
+-use self::syntax_pos::{Span, SyntaxContext, DUMMY_SP};
++use rustc_ast::ptr::P;
++use rustc_ast::token::{self, CommentKind, DelimToken, Token, TokenKind};
++use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
++use rustc_data_structures::sync::Lrc;
++use rustc_data_structures::thin_vec::ThinVec;
++use rustc_span::source_map::Spanned;
++use rustc_span::symbol::Ident;
++use rustc_span::{Span, Symbol, SyntaxContext};
+
+ pub trait SpanlessEq {
+ fn eq(&self, other: &Self) -> bool;
+@@ -86,14 +85,6 @@ impl<A: SpanlessEq, B: SpanlessEq> SpanlessEq for (A, B) {
+ }
+ }
+
+-impl<A: SpanlessEq, B: SpanlessEq, C: SpanlessEq> SpanlessEq for (A, B, C) {
+- fn eq(&self, other: &Self) -> bool {
+- SpanlessEq::eq(&self.0, &other.0)
+- && SpanlessEq::eq(&self.1, &other.1)
+- && SpanlessEq::eq(&self.2, &other.2)
+- }
+-}
+-
+ macro_rules! spanless_eq_true {
+ ($name:ident) => {
+ impl SpanlessEq for $name {
+@@ -126,17 +117,19 @@ spanless_eq_partial_eq!(u16);
+ spanless_eq_partial_eq!(u128);
+ spanless_eq_partial_eq!(usize);
+ spanless_eq_partial_eq!(char);
++spanless_eq_partial_eq!(String);
+ spanless_eq_partial_eq!(Symbol);
+-spanless_eq_partial_eq!(Abi);
++spanless_eq_partial_eq!(CommentKind);
+ spanless_eq_partial_eq!(DelimToken);
++spanless_eq_partial_eq!(InlineAsmOptions);
+
+ macro_rules! spanless_eq_struct {
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $(![$ignore:ident])*
+ } => {
+- impl SpanlessEq for $name {
++ impl $(<$param: SpanlessEq>)* SpanlessEq for $name $(<$param>)* {
+ fn eq(&self, other: &Self) -> bool {
+ let $name { $($field,)* $($ignore: _,)* } = self;
+ let $name { $($field: $other,)* $($ignore: _,)* } = other;
+@@ -146,14 +139,14 @@ macro_rules! spanless_eq_struct {
+ };
+
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $next:ident
+ $($rest:ident)*
+ $(!$ignore:ident)*
+ } => {
+ spanless_eq_struct! {
+- $name;
++ $name $(<$param>)*;
+ $([$field $other])*
+ [$next other]
+ $($rest)*
+@@ -162,14 +155,14 @@ macro_rules! spanless_eq_struct {
+ };
+
+ {
+- $name:ident;
++ $name:ident $(<$param:ident>)?;
+ $([$field:ident $other:ident])*
+ $(![$ignore:ident])*
+ !$next:ident
+ $(!$rest:ident)*
+ } => {
+ spanless_eq_struct! {
+- $name;
++ $name $(<$param>)*;
+ $([$field $other])*
+ $(![$ignore])*
+ ![$next]
+@@ -263,119 +256,131 @@ macro_rules! spanless_eq_enum {
+ };
+ }
+
+-spanless_eq_struct!(AngleBracketedArgs; span args constraints);
++spanless_eq_struct!(AngleBracketedArgs; span args);
+ spanless_eq_struct!(AnonConst; id value);
+-spanless_eq_struct!(Arg; attrs ty pat id span);
+-spanless_eq_struct!(Arm; attrs pats guard body span id);
++spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder);
+ spanless_eq_struct!(AssocTyConstraint; id ident kind span);
+-spanless_eq_struct!(Attribute; id style path tokens span !is_sugared_doc);
+-spanless_eq_struct!(BareFnTy; unsafety abi generic_params decl);
++spanless_eq_struct!(AttrItem; path args);
++spanless_eq_struct!(Attribute; kind id style span);
++spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl);
+ spanless_eq_struct!(Block; stmts id rules span);
+-spanless_eq_struct!(Crate; module attrs span);
++spanless_eq_struct!(Crate; module attrs span proc_macros);
+ spanless_eq_struct!(EnumDef; variants);
+-spanless_eq_struct!(Expr; id node span attrs);
+-spanless_eq_struct!(Field; ident expr span is_shorthand attrs id);
+-spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span);
+-spanless_eq_struct!(FnDecl; inputs output c_variadic);
+-spanless_eq_struct!(FnHeader; constness asyncness unsafety abi);
+-spanless_eq_struct!(ForeignItem; ident attrs node id span vis);
++spanless_eq_struct!(Expr; id kind span attrs !tokens);
++spanless_eq_struct!(Field; attrs id span ident expr is_shorthand is_placeholder);
++spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span is_placeholder);
++spanless_eq_struct!(FnDecl; inputs output);
++spanless_eq_struct!(FnHeader; constness asyncness unsafety ext);
++spanless_eq_struct!(FnSig; header decl span);
+ spanless_eq_struct!(ForeignMod; abi items);
+-spanless_eq_struct!(GenericParam; id ident attrs bounds kind);
++spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind);
+ spanless_eq_struct!(Generics; params where_clause span);
+ spanless_eq_struct!(GlobalAsm; asm);
+-spanless_eq_struct!(ImplItem; id ident vis defaultness attrs generics node span !tokens);
+-spanless_eq_struct!(InlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
+-spanless_eq_struct!(InlineAsmOutput; constraint expr is_rw is_indirect);
+-spanless_eq_struct!(Item; ident attrs id node vis span !tokens);
++spanless_eq_struct!(InlineAsm; template operands options line_spans);
++spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens);
+ spanless_eq_struct!(Label; ident);
+ spanless_eq_struct!(Lifetime; id ident);
+-spanless_eq_struct!(Lit; token node span);
++spanless_eq_struct!(Lit; token kind span);
++spanless_eq_struct!(LlvmInlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
++spanless_eq_struct!(LlvmInlineAsmOutput; constraint expr is_rw is_indirect);
+ spanless_eq_struct!(Local; pat ty init id span attrs);
+-spanless_eq_struct!(Mac; path delim tts span prior_type_ascription);
+-spanless_eq_struct!(MacroDef; tokens legacy);
+-spanless_eq_struct!(MethodSig; header decl);
++spanless_eq_struct!(MacCall; path args prior_type_ascription);
++spanless_eq_struct!(MacCallStmt; mac style attrs);
++spanless_eq_struct!(MacroDef; body macro_rules);
+ spanless_eq_struct!(Mod; inner items inline);
+ spanless_eq_struct!(MutTy; ty mutbl);
++spanless_eq_struct!(Param; attrs ty pat id span is_placeholder);
+ spanless_eq_struct!(ParenthesizedArgs; span inputs output);
+-spanless_eq_struct!(Pat; id node span);
++spanless_eq_struct!(Pat; id kind span tokens);
+ spanless_eq_struct!(Path; span segments);
+ spanless_eq_struct!(PathSegment; ident id args);
+ spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span);
+ spanless_eq_struct!(QSelf; ty path_span position);
+-spanless_eq_struct!(Stmt; id node span);
+-spanless_eq_struct!(StructField; span ident vis id ty attrs);
++spanless_eq_struct!(Stmt; id kind span);
++spanless_eq_struct!(StrLit; style symbol suffix span symbol_unescaped);
++spanless_eq_struct!(StructField; attrs id span vis ident ty is_placeholder);
+ spanless_eq_struct!(Token; kind span);
+-spanless_eq_struct!(TraitItem; id ident attrs generics node span !tokens);
+ spanless_eq_struct!(TraitRef; path ref_id);
+-spanless_eq_struct!(Ty; id node span);
++spanless_eq_struct!(Ty; id kind span);
+ spanless_eq_struct!(UseTree; prefix kind span);
+-spanless_eq_struct!(Variant; ident attrs id data disr_expr span);
++spanless_eq_struct!(Variant; attrs id span vis ident data disr_expr is_placeholder);
+ spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds);
+-spanless_eq_struct!(WhereClause; predicates span);
++spanless_eq_struct!(WhereClause; has_where_token predicates span);
+ spanless_eq_struct!(WhereEqPredicate; id span lhs_ty rhs_ty);
+ spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
+-spanless_eq_enum!(AsmDialect; Att Intel);
++spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
++spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
+ spanless_eq_enum!(AssocTyConstraintKind; Equality(ty) Bound(bounds));
++spanless_eq_enum!(Async; Yes(span closure_id return_impl_trait_id) No);
++spanless_eq_enum!(AttrKind; Normal(0) DocComment(0 1));
+ spanless_eq_enum!(AttrStyle; Outer Inner);
+ spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
+ spanless_eq_enum!(BindingMode; ByRef(0) ByValue(0));
+ spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
++spanless_eq_enum!(BorrowKind; Ref Raw);
+ spanless_eq_enum!(CaptureBy; Value Ref);
+-spanless_eq_enum!(Constness; Const NotConst);
++spanless_eq_enum!(Const; Yes(0) No);
+ spanless_eq_enum!(CrateSugar; PubCrate JustCrate);
+-spanless_eq_enum!(Defaultness; Default Final);
++spanless_eq_enum!(Defaultness; Default(0) Final);
++spanless_eq_enum!(Extern; None Implicit Explicit(0));
+ spanless_eq_enum!(FloatTy; F32 F64);
+-spanless_eq_enum!(ForeignItemKind; Fn(0 1) Static(0 1) Ty Macro(0));
+-spanless_eq_enum!(FunctionRetTy; Default(0) Ty(0));
++spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
++spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
+ spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0));
+ spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0));
+ spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0));
+-spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty));
+-spanless_eq_enum!(ImplItemKind; Const(0 1) Method(0 1) TyAlias(0) OpaqueTy(0) Macro(0));
+-spanless_eq_enum!(ImplPolarity; Positive Negative);
++spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span));
++spanless_eq_enum!(ImplPolarity; Positive Negative(0));
++spanless_eq_enum!(InlineAsmRegOrRegClass; Reg(0) RegClass(0));
++spanless_eq_enum!(InlineAsmTemplatePiece; String(0) Placeholder(operand_idx modifier span));
+ spanless_eq_enum!(IntTy; Isize I8 I16 I32 I64 I128);
+-spanless_eq_enum!(IsAsync; Async(closure_id return_impl_trait_id) NotAsync);
+ spanless_eq_enum!(IsAuto; Yes No);
++spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
+ spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
++spanless_eq_enum!(LlvmAsmDialect; Att Intel);
++spanless_eq_enum!(MacArgs; Empty Delimited(0 1 2) Eq(0 1));
+ spanless_eq_enum!(MacDelimiter; Parenthesis Bracket Brace);
+ spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
+ spanless_eq_enum!(Movability; Static Movable);
+-spanless_eq_enum!(Mutability; Mutable Immutable);
++spanless_eq_enum!(Mutability; Mut Not);
+ spanless_eq_enum!(RangeEnd; Included(0) Excluded);
+ spanless_eq_enum!(RangeLimits; HalfOpen Closed);
+-spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Mac(0));
++spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
+ spanless_eq_enum!(StrStyle; Cooked Raw(0));
+ spanless_eq_enum!(TokenTree; Token(0) Delimited(0 1 2));
+-spanless_eq_enum!(TraitBoundModifier; None Maybe);
+-spanless_eq_enum!(TraitItemKind; Const(0 1) Method(0 1) Type(0 1) Macro(0));
++spanless_eq_enum!(TraitBoundModifier; None Maybe MaybeConst MaybeConstMaybe);
+ spanless_eq_enum!(TraitObjectSyntax; Dyn None);
+ spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
+ spanless_eq_enum!(UnOp; Deref Not Neg);
++spanless_eq_enum!(Unsafe; Yes(0) No);
+ spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
+-spanless_eq_enum!(Unsafety; Unsafe Normal);
+ spanless_eq_enum!(UseTreeKind; Simple(0 1 2) Nested(0) Glob);
+ spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0));
+ spanless_eq_enum!(VisibilityKind; Public Crate(0) Restricted(path id) Inherited);
+ spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
+-spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1) Tup(0)
++spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1 2) Tup(0)
+ Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1) If(0 1 2)
+ While(0 1 2) ForLoop(0 1 2 3) Loop(0 1) Match(0 1) Closure(0 1 2 3 4 5)
+- Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1) AssignOp(0 1 2)
+- Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1) Break(0 1)
+- Continue(0) Ret(0) InlineAsm(0) Mac(0) Struct(0 1 2) Repeat(0 1) Paren(0)
+- Try(0) Yield(0) Err);
+-spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1)
+- Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1) OpaqueTy(0 1)
+- Enum(0 1) Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
+- Impl(0 1 2 3 4 5 6) Mac(0) MacroDef(0));
++ Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1 2) AssignOp(0 1 2)
++ Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1)
++ Continue(0) Ret(0) InlineAsm(0) LlvmInlineAsm(0) MacCall(0) Struct(0 1 2)
++ Repeat(0 1) Paren(0) Try(0) Yield(0) Err);
++spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
++ InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(expr)
++ Sym(expr));
++spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1 2)
++ Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1 2 3) Enum(0 1)
++ Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
++ Impl(unsafety polarity defaultness constness generics of_trait self_ty items)
++ MacCall(0) MacroDef(0));
+ spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0) Byte(0) Char(0) Int(0 1)
+- Float(0 1) FloatUnsuffixed(0) Bool(0) Err(0));
++ Float(0 1) Bool(0) Err(0));
+ spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2) TupleStruct(0 1)
+ Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest
+- Paren(0) Mac(0));
++ Paren(0) MacCall(0));
+ spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Rptr(0 1) BareFn(0) Never
+ Tup(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1) Paren(0) Typeof(0) Infer
+- ImplicitSelf Mac(0) Err CVarArgs);
++ ImplicitSelf MacCall(0) Err CVarArgs);
+
+ impl SpanlessEq for Ident {
+ fn eq(&self, other: &Self) -> bool {
+@@ -414,44 +419,20 @@ impl SpanlessEq for TokenKind {
+
+ impl SpanlessEq for TokenStream {
+ fn eq(&self, other: &Self) -> bool {
+- SpanlessEq::eq(&expand_tts(self), &expand_tts(other))
+- }
+-}
+-
+-fn expand_tts(tts: &TokenStream) -> Vec<TokenTree> {
+- let mut tokens = Vec::new();
+- for tt in tts.clone().into_trees() {
+- let c = match tt {
+- TokenTree::Token(Token {
+- kind: TokenKind::DocComment(c),
+- ..
+- }) => c,
+- _ => {
+- tokens.push(tt);
+- continue;
++ let mut this = self.clone().into_trees();
++ let mut other = other.clone().into_trees();
++ loop {
++ let this = match this.next() {
++ None => return other.next().is_none(),
++ Some(val) => val,
++ };
++ let other = match other.next() {
++ None => return false,
++ Some(val) => val,
++ };
++ if !SpanlessEq::eq(&this, &other) {
++ return false;
+ }
+- };
+- let contents = comments::strip_doc_comment_decoration(&c.as_str());
+- let style = comments::doc_comment_style(&c.as_str());
+- tokens.push(TokenTree::token(TokenKind::Pound, DUMMY_SP));
+- if style == AttrStyle::Inner {
+- tokens.push(TokenTree::token(TokenKind::Not, DUMMY_SP));
+ }
+- let lit = token::Lit {
+- kind: token::LitKind::Str,
+- symbol: Symbol::intern(&contents),
+- suffix: None,
+- };
+- let tts = vec![
+- TokenTree::token(TokenKind::Ident(sym::doc, false), DUMMY_SP),
+- TokenTree::token(TokenKind::Eq, DUMMY_SP),
+- TokenTree::token(TokenKind::Literal(lit), DUMMY_SP),
+- ];
+- tokens.push(TokenTree::Delimited(
+- DelimSpan::dummy(),
+- DelimToken::Bracket,
+- tts.into_iter().collect::<TokenStream>().into(),
+- ));
+ }
+- tokens
+ }
+diff --git a/third_party/rust/syn/tests/common/mod.rs b/third_party/rust/syn/tests/common/mod.rs
+index 8b784beed7..a1cc80a16f 100644
+--- third_party/rust/syn/tests/common/mod.rs
++++ third_party/rust/syn/tests/common/mod.rs
+@@ -1,5 +1,6 @@
+ #![allow(dead_code)]
+
++use rayon::ThreadPoolBuilder;
+ use std::env;
+
+ pub mod eq;
+@@ -12,3 +13,15 @@ pub fn abort_after() -> usize {
+ Err(_) => usize::max_value(),
+ }
+ }
++
++/// Configure Rayon threadpool.
++pub fn rayon_init() {
++ let stack_size = match env::var("RUST_MIN_STACK") {
++ Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"),
++ Err(_) => 20 * 1024 * 1024,
++ };
++ ThreadPoolBuilder::new()
++ .stack_size(stack_size)
++ .build_global()
++ .unwrap();
++}
+diff --git a/third_party/rust/syn/tests/common/parse.rs b/third_party/rust/syn/tests/common/parse.rs
+index 41d192f6fb..192828fedd 100644
+--- third_party/rust/syn/tests/common/parse.rs
++++ third_party/rust/syn/tests/common/parse.rs
+@@ -1,20 +1,20 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
++extern crate rustc_ast;
++extern crate rustc_expand;
++extern crate rustc_parse as parse;
++extern crate rustc_session;
++extern crate rustc_span;
+
+-use self::syntax::ast;
+-use self::syntax::parse::{self, ParseSess};
+-use self::syntax::ptr::P;
+-use self::syntax::source_map::FilePathMapping;
+-use self::syntax_pos::FileName;
++use rustc_ast::ast;
++use rustc_ast::ptr::P;
++use rustc_session::parse::ParseSess;
++use rustc_span::source_map::FilePathMapping;
++use rustc_span::FileName;
+
+ use std::panic;
+
+-pub fn libsyntax_expr(input: &str) -> Option<P<ast::Expr>> {
++pub fn librustc_expr(input: &str) -> Option<P<ast::Expr>> {
+ match panic::catch_unwind(|| {
+ let sess = ParseSess::new(FilePathMapping::empty());
+- sess.span_diagnostic.set_continue_after_error(false);
+ let e = parse::new_parser_from_source_str(
+ &sess,
+ FileName::Custom("test_precedence".to_string()),
+@@ -32,7 +32,7 @@ pub fn libsyntax_expr(input: &str) -> Option<P<ast::Expr>> {
+ Ok(Some(e)) => Some(e),
+ Ok(None) => None,
+ Err(_) => {
+- errorf!("libsyntax panicked\n");
++ errorf!("librustc panicked\n");
+ None
+ }
+ }
+diff --git a/third_party/rust/syn/tests/debug/gen.rs b/third_party/rust/syn/tests/debug/gen.rs
+index 8450c09ecf..85a1a39079 100644
+--- third_party/rust/syn/tests/debug/gen.rs
++++ third_party/rust/syn/tests/debug/gen.rs
+@@ -2,7 +2,7 @@
+ // It is not intended for manual editing.
+
+ use super::{Lite, RefCast};
+-use std::fmt::{self, Debug};
++use std::fmt::{self, Debug, Display};
+ impl Debug for Lite<syn::Abi> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ let _val = &self.value;
+@@ -1039,9 +1039,9 @@ impl Debug for Lite<syn::Expr> {
+ }
+ syn::Expr::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ syn::Expr::While(_val) => {
+@@ -2116,9 +2116,9 @@ impl Debug for Lite<syn::ForeignItem> {
+ }
+ syn::ForeignItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -2432,9 +2432,9 @@ impl Debug for Lite<syn::ImplItem> {
+ }
+ syn::ImplItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -2940,9 +2940,9 @@ impl Debug for Lite<syn::Item> {
+ }
+ syn::Item::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -3437,9 +3437,9 @@ impl Debug for Lite<syn::Lit> {
+ }
+ syn::Lit::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ }
+@@ -3878,9 +3878,9 @@ impl Debug for Lite<syn::Pat> {
+ }
+ syn::Pat::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ syn::Pat::Wild(_val) => {
+@@ -4674,9 +4674,9 @@ impl Debug for Lite<syn::TraitItem> {
+ }
+ syn::TraitItem::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+@@ -5040,9 +5040,9 @@ impl Debug for Lite<syn::Type> {
+ }
+ syn::Type::Verbatim(_val) => {
+ formatter.write_str("Verbatim")?;
+- formatter.write_str("(")?;
+- Debug::fmt(Lite(_val), formatter)?;
+- formatter.write_str(")")?;
++ formatter.write_str("(`")?;
++ Display::fmt(_val, formatter)?;
++ formatter.write_str("`)")?;
+ Ok(())
+ }
+ _ => unreachable!(),
+diff --git a/third_party/rust/syn/tests/debug/mod.rs b/third_party/rust/syn/tests/debug/mod.rs
+index c1180532ec..cefebacef7 100644
+--- third_party/rust/syn/tests/debug/mod.rs
++++ third_party/rust/syn/tests/debug/mod.rs
+@@ -1,10 +1,7 @@
+-extern crate proc_macro2;
+-extern crate ref_cast;
+-
+ mod gen;
+
+-use self::proc_macro2::{Ident, Literal, TokenStream};
+-use self::ref_cast::RefCast;
++use proc_macro2::{Ident, Literal, TokenStream};
++use ref_cast::RefCast;
+ use std::fmt::{self, Debug};
+ use std::ops::Deref;
+ use syn::punctuated::Punctuated;
+@@ -66,7 +63,15 @@ impl Debug for Lite<Literal> {
+
+ impl Debug for Lite<TokenStream> {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- write!(formatter, "`{}`", self.value)
++ let string = self.value.to_string();
++ if string.len() <= 80 {
++ write!(formatter, "TokenStream(`{}`)", self.value)
++ } else {
++ formatter
++ .debug_tuple("TokenStream")
++ .field(&format_args!("`{}`", string))
++ .finish()
++ }
+ }
+ }
+
+diff --git a/third_party/rust/syn/tests/features/error.rs b/third_party/rust/syn/tests/features/error.rs
+deleted file mode 100644
+index 10ac88965d..0000000000
+--- third_party/rust/syn/tests/features/error.rs
++++ /dev/null
+@@ -1 +0,0 @@
+-"Hello! You want: cargo test --release --all-features"
+diff --git a/third_party/rust/syn/tests/features/mod.rs b/third_party/rust/syn/tests/features/mod.rs
+deleted file mode 100644
+index 83fbe13e7e..0000000000
+--- third_party/rust/syn/tests/features/mod.rs
++++ /dev/null
+@@ -1,22 +0,0 @@
+-#[allow(unused_macros)]
+-macro_rules! hide_from_rustfmt {
+- ($mod:item) => {
+- $mod
+- };
+-}
+-
+-#[cfg(not(all(
+- feature = "derive",
+- feature = "full",
+- feature = "parsing",
+- feature = "printing",
+- feature = "visit",
+- feature = "visit-mut",
+- feature = "fold",
+- feature = "clone-impls",
+- feature = "extra-traits",
+- feature = "proc-macro",
+-)))]
+-hide_from_rustfmt! {
+- mod error;
+-}
+diff --git a/third_party/rust/syn/tests/macros/mod.rs b/third_party/rust/syn/tests/macros/mod.rs
+index c72fd01058..3994615fc4 100644
+--- third_party/rust/syn/tests/macros/mod.rs
++++ third_party/rust/syn/tests/macros/mod.rs
+@@ -1,5 +1,3 @@
+-extern crate proc_macro2;
+-
+ #[path = "../debug/mod.rs"]
+ pub mod debug;
+
+@@ -42,18 +40,18 @@ macro_rules! snapshot_impl {
+ (($expr:ident) as $t:ty, @$snapshot:literal) => {
+ let $expr = crate::macros::Tokens::parse::<$t>($expr).unwrap();
+ let debug = crate::macros::debug::Lite(&$expr);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ };
+ (($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{
+ let syntax_tree = crate::macros::Tokens::parse::<$t>($($expr)*).unwrap();
+ let debug = crate::macros::debug::Lite(&syntax_tree);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ syntax_tree
+ }};
+ (($($expr:tt)*) , @$snapshot:literal) => {{
+ let syntax_tree = $($expr)*;
+ let debug = crate::macros::debug::Lite(&syntax_tree);
+- insta::assert_debug_snapshot_matches!(debug, @$snapshot);
++ insta::assert_debug_snapshot!(debug, @$snapshot);
+ syntax_tree
+ }};
+ (($($expr:tt)*) $next:tt $($rest:tt)*) => {
+diff --git a/third_party/rust/syn/tests/repo/mod.rs b/third_party/rust/syn/tests/repo/mod.rs
+index c22cb03758..1d3e1f0e74 100644
+--- third_party/rust/syn/tests/repo/mod.rs
++++ third_party/rust/syn/tests/repo/mod.rs
+@@ -1,8 +1,37 @@
+-extern crate walkdir;
++mod progress;
+
+-use std::process::Command;
++use self::progress::Progress;
++use anyhow::Result;
++use flate2::read::GzDecoder;
++use std::fs;
++use std::path::Path;
++use tar::Archive;
++use walkdir::DirEntry;
+
+-use self::walkdir::DirEntry;
++const REVISION: &str = "792c645ca7d11a8d254df307d019c5bf01445c37";
++
++#[rustfmt::skip]
++static EXCLUDE: &[&str] = &[
++ // Compile-fail expr parameter in const generic position: f::<1 + 2>()
++ "test/ui/const-generics/const-expression-parameter.rs",
++
++ // Deprecated anonymous parameter syntax in traits
++ "test/ui/issues/issue-13105.rs",
++ "test/ui/issues/issue-13775.rs",
++ "test/ui/issues/issue-34074.rs",
++ "test/ui/proc-macro/trait-fn-args-2015.rs",
++
++ // Not actually test cases
++ "test/rustdoc-ui/test-compile-fail2.rs",
++ "test/rustdoc-ui/test-compile-fail3.rs",
++ "test/ui/include-single-expr-helper.rs",
++ "test/ui/include-single-expr-helper-1.rs",
++ "test/ui/issues/auxiliary/issue-21146-inc.rs",
++ "test/ui/json-bom-plus-crlf-multifile-aux.rs",
++ "test/ui/lint/expansion-time-include.rs",
++ "test/ui/macros/auxiliary/macro-comma-support.rs",
++ "test/ui/macros/auxiliary/macro-include-items-expr.rs",
++];
+
+ pub fn base_dir_filter(entry: &DirEntry) -> bool {
+ let path = entry.path();
+@@ -12,49 +41,95 @@ pub fn base_dir_filter(entry: &DirEntry) -> bool {
+ if path.extension().map(|e| e != "rs").unwrap_or(true) {
+ return false;
+ }
+- let path_string = path.to_string_lossy();
+- let path_string = if cfg!(windows) {
+- path_string.replace('\\', "/").into()
++
++ let mut path_string = path.to_string_lossy();
++ if cfg!(windows) {
++ path_string = path_string.replace('\\', "/").into();
++ }
++ let path = if let Some(path) = path_string.strip_prefix("tests/rust/src/") {
++ path
++ } else if let Some(path) = path_string.strip_prefix("tests/rust/library/") {
++ path
+ } else {
+- path_string
++ panic!("unexpected path in Rust dist: {}", path_string);
+ };
++
+ // TODO assert that parsing fails on the parse-fail cases
+- if path_string.starts_with("tests/rust/src/test/parse-fail")
+- || path_string.starts_with("tests/rust/src/test/compile-fail")
+- || path_string.starts_with("tests/rust/src/test/rustfix")
++ if path.starts_with("test/parse-fail")
++ || path.starts_with("test/compile-fail")
++ || path.starts_with("test/rustfix")
+ {
+ return false;
+ }
+
+- if path_string.starts_with("tests/rust/src/test/ui") {
+- let stderr_path = path.with_extension("stderr");
++ if path.starts_with("test/ui") {
++ let stderr_path = entry.path().with_extension("stderr");
+ if stderr_path.exists() {
+ // Expected to fail in some way
+ return false;
+ }
+ }
+
+- match path_string.as_ref() {
+- // Deprecated placement syntax
+- "tests/rust/src/test/ui/obsolete-in-place/bad.rs" |
+- // Deprecated anonymous parameter syntax in traits
+- "tests/rust/src/test/ui/error-codes/e0119/auxiliary/issue-23563-a.rs" |
+- "tests/rust/src/test/ui/issues/issue-13105.rs" |
+- "tests/rust/src/test/ui/issues/issue-13775.rs" |
+- "tests/rust/src/test/ui/issues/issue-34074.rs" |
+- // Deprecated await macro syntax
+- "tests/rust/src/test/ui/async-await/await-macro.rs" |
+- // 2015-style dyn that libsyntax rejects
+- "tests/rust/src/test/ui/dyn-keyword/dyn-2015-no-warnings-without-lints.rs" |
+- // not actually test cases
+- "tests/rust/src/test/ui/macros/auxiliary/macro-comma-support.rs" |
+- "tests/rust/src/test/ui/macros/auxiliary/macro-include-items-expr.rs" |
+- "tests/rust/src/test/ui/issues/auxiliary/issue-21146-inc.rs" => false,
+- _ => true,
++ !EXCLUDE.contains(&path)
++}
++
++#[allow(dead_code)]
++pub fn edition(path: &Path) -> &'static str {
++ if path.ends_with("dyn-2015-no-warnings-without-lints.rs") {
++ "2015"
++ } else {
++ "2018"
+ }
+ }
+
+ pub fn clone_rust() {
+- let result = Command::new("tests/clone.sh").status().unwrap();
+- assert!(result.success());
++ let needs_clone = match fs::read_to_string("tests/rust/COMMIT") {
++ Err(_) => true,
++ Ok(contents) => contents.trim() != REVISION,
++ };
++ if needs_clone {
++ download_and_unpack().unwrap();
++ }
++ let mut missing = String::new();
++ let test_src = Path::new("tests/rust/src");
++ for exclude in EXCLUDE {
++ if !test_src.join(exclude).exists() {
++ missing += "\ntests/rust/src/";
++ missing += exclude;
++ }
++ }
++ if !missing.is_empty() {
++ panic!("excluded test file does not exist:{}\n", missing);
++ }
++}
++
++fn download_and_unpack() -> Result<()> {
++ let url = format!(
++ "https://github.com/rust-lang/rust/archive/{}.tar.gz",
++ REVISION
++ );
++ let response = reqwest::blocking::get(&url)?.error_for_status()?;
++ let progress = Progress::new(response);
++ let decoder = GzDecoder::new(progress);
++ let mut archive = Archive::new(decoder);
++ let prefix = format!("rust-{}", REVISION);
++
++ let tests_rust = Path::new("tests/rust");
++ if tests_rust.exists() {
++ fs::remove_dir_all(tests_rust)?;
++ }
++
++ for entry in archive.entries()? {
++ let mut entry = entry?;
++ let path = entry.path()?;
++ if path == Path::new("pax_global_header") {
++ continue;
++ }
++ let relative = path.strip_prefix(&prefix)?;
++ let out = tests_rust.join(relative);
++ entry.unpack(&out)?;
++ }
++
++ fs::write("tests/rust/COMMIT", REVISION)?;
++ Ok(())
+ }
+diff --git a/third_party/rust/syn/tests/repo/progress.rs b/third_party/rust/syn/tests/repo/progress.rs
+new file mode 100644
+index 0000000000..28c8a44b12
+--- /dev/null
++++ third_party/rust/syn/tests/repo/progress.rs
+@@ -0,0 +1,37 @@
++use std::io::{Read, Result};
++use std::time::{Duration, Instant};
++
++pub struct Progress<R> {
++ bytes: usize,
++ tick: Instant,
++ stream: R,
++}
++
++impl<R> Progress<R> {
++ pub fn new(stream: R) -> Self {
++ Progress {
++ bytes: 0,
++ tick: Instant::now() + Duration::from_millis(2000),
++ stream,
++ }
++ }
++}
++
++impl<R: Read> Read for Progress<R> {
++ fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
++ let num = self.stream.read(buf)?;
++ self.bytes += num;
++ let now = Instant::now();
++ if now > self.tick {
++ self.tick = now + Duration::from_millis(500);
++ errorf!("downloading... {} bytes\n", self.bytes);
++ }
++ Ok(num)
++ }
++}
++
++impl<R> Drop for Progress<R> {
++ fn drop(&mut self) {
++ errorf!("done ({} bytes)\n", self.bytes);
++ }
++}
+diff --git a/third_party/rust/syn/tests/test_asyncness.rs b/third_party/rust/syn/tests/test_asyncness.rs
+index f868fbcc20..0efef5976f 100644
+--- third_party/rust/syn/tests/test_asyncness.rs
++++ third_party/rust/syn/tests/test_asyncness.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -12,16 +8,16 @@ fn test_async_fn() {
+ let input = "async fn process() {}";
+
+ snapshot!(input as Item, @r###"
+- ⋮Item::Fn {
+- ⋮ vis: Inherited,
+- ⋮ sig: Signature {
+- ⋮ asyncness: Some,
+- ⋮ ident: "process",
+- ⋮ generics: Generics,
+- ⋮ output: Default,
+- ⋮ },
+- ⋮ block: Block,
+- ⋮}
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ asyncness: Some,
++ ident: "process",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ }
+ "###);
+ }
+
+@@ -30,12 +26,12 @@ fn test_async_closure() {
+ let input = "async || {}";
+
+ snapshot!(input as Expr, @r###"
+- ⋮Expr::Closure {
+- ⋮ asyncness: Some,
+- ⋮ output: Default,
+- ⋮ body: Expr::Block {
+- ⋮ block: Block,
+- ⋮ },
+- ⋮}
++ Expr::Closure {
++ asyncness: Some,
++ output: Default,
++ body: Expr::Block {
++ block: Block,
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_attribute.rs b/third_party/rust/syn/tests/test_attribute.rs
+index aff6294fc3..c26bd090ec 100644
+--- third_party/rust/syn/tests/test_attribute.rs
++++ third_party/rust/syn/tests/test_attribute.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -13,14 +9,14 @@ fn test_meta_item_word() {
+ let meta = test("#[foo]");
+
+ snapshot!(meta, @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+@@ -29,17 +25,17 @@ fn test_meta_item_name_value() {
+ let meta = test("#[foo = 5]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+ }
+
+@@ -48,37 +44,37 @@ fn test_meta_item_bool_value() {
+ let meta = test("#[foo = true]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: true,
+- ⋮ },
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: true,
++ },
++ }
+ "###);
+
+ let meta = test("#[foo = false]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: false,
+- ⋮ },
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: false,
++ },
++ }
+ "###);
+ }
+
+@@ -87,19 +83,19 @@ fn test_meta_item_list_lit() {
+ let meta = test("#[foo(5)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+ }
+
+@@ -108,26 +104,26 @@ fn test_meta_item_list_word() {
+ let meta = test("#[foo(bar)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+@@ -136,29 +132,29 @@ fn test_meta_item_list_name_value() {
+ let meta = test("#[foo(bar = 5)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ ],
++ }
+ "###);
+ }
+
+@@ -167,31 +163,31 @@ fn test_meta_item_list_bool_value() {
+ let meta = test("#[foo(bar = true)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "bar",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: Lit::Bool {
+- ⋮ value: true,
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "bar",
++ arguments: None,
++ },
++ ],
++ },
++ lit: Lit::Bool {
++ value: true,
++ },
++ }),
++ ],
++ }
+ "###);
+ }
+
+@@ -200,68 +196,68 @@ fn test_meta_item_multiple() {
+ let meta = test("#[foo(word, name = 5, list(name2 = 6), word2)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+@@ -270,21 +266,63 @@ fn test_bool_lit() {
+ let meta = test("#[foo(true)]");
+
+ snapshot!(meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(Lit::Bool {
+- ⋮ value: true,
+- ⋮ }),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(Lit::Bool {
++ value: true,
++ }),
++ ],
++ }
++ "###);
++}
++
++#[test]
++fn test_negative_lit() {
++ let meta = test("#[form(min = -1, max = 200)]");
++
++ snapshot!(meta, @r###"
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "form",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "min",
++ arguments: None,
++ },
++ ],
++ },
++ lit: -1,
++ }),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "max",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 200,
++ }),
++ ],
++ }
+ "###);
+ }
+
+diff --git a/third_party/rust/syn/tests/test_derive_input.rs b/third_party/rust/syn/tests/test_derive_input.rs
+index de68240166..bf1ebdb67d 100644
+--- third_party/rust/syn/tests/test_derive_input.rs
++++ third_party/rust/syn/tests/test_derive_input.rs
+@@ -1,8 +1,3 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -16,15 +11,15 @@ fn test_unit() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "Unit",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "Unit",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -39,105 +34,105 @@ fn test_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "derive",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `( Debug , Clone )`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: "Item",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Named {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("ident"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Ident",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("attrs"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Vec",
+- ⋮ arguments: PathArguments::AngleBracketed {
+- ⋮ args: [
+- ⋮ Type(Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Attribute",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "derive",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`(Debug , Clone)`),
++ },
++ ],
++ vis: Visibility::Public,
++ ident: "Item",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Visibility::Public,
++ ident: Some("ident"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Ident",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("attrs"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Vec",
++ arguments: PathArguments::AngleBracketed {
++ args: [
++ Type(Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Attribute",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ },
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+
+ snapshot!(input.attrs[0].parse_meta().unwrap(), @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "derive",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Clone",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "derive",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "Clone",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+ }
+
+@@ -151,46 +146,46 @@ fn test_union() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "MaybeUninit",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "T",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ },
+- ⋮ data: Data::Union {
+- ⋮ fields: FieldsNamed {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("uninit"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Tuple,
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("value"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "MaybeUninit",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "T",
++ }),
++ ],
++ gt_token: Some,
++ },
++ data: Data::Union {
++ fields: FieldsNamed {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("uninit"),
++ colon_token: Some,
++ ty: Type::Tuple,
++ },
++ Field {
++ vis: Inherited,
++ ident: Some("value"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+ }
+
+@@ -212,118 +207,118 @@ fn test_enum() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "doc",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `= r" See the std::result module documentation for details."`,
+- ⋮ },
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "must_use",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: "Result",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "T",
+- ⋮ }),
+- ⋮ Type(TypeParam {
+- ⋮ ident: "E",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ },
+- ⋮ data: Data::Enum {
+- ⋮ variants: [
+- ⋮ Variant {
+- ⋮ ident: "Ok",
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "Err",
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "E",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "Surprise",
+- ⋮ fields: Unit,
+- ⋮ discriminant: Some(Expr::Lit {
+- ⋮ lit: 0isize,
+- ⋮ }),
+- ⋮ },
+- ⋮ Variant {
+- ⋮ ident: "ProcMacroHack",
+- ⋮ fields: Unit,
+- ⋮ discriminant: Some(Expr::Field {
+- ⋮ base: Expr::Tuple {
+- ⋮ elems: [
+- ⋮ Expr::Lit {
+- ⋮ lit: 0,
+- ⋮ },
+- ⋮ Expr::Lit {
+- ⋮ lit: "data",
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ member: Unnamed(Index {
+- ⋮ index: 0,
+- ⋮ }),
+- ⋮ }),
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "doc",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`= r" See the std::result module documentation for details."`),
++ },
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "must_use",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Visibility::Public,
++ ident: "Result",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "T",
++ }),
++ Type(TypeParam {
++ ident: "E",
++ }),
++ ],
++ gt_token: Some,
++ },
++ data: Data::Enum {
++ variants: [
++ Variant {
++ ident: "Ok",
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ Variant {
++ ident: "Err",
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "E",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ Variant {
++ ident: "Surprise",
++ fields: Unit,
++ discriminant: Some(Expr::Lit {
++ lit: 0isize,
++ }),
++ },
++ Variant {
++ ident: "ProcMacroHack",
++ fields: Unit,
++ discriminant: Some(Expr::Field {
++ base: Expr::Tuple {
++ elems: [
++ Expr::Lit {
++ lit: 0,
++ },
++ Expr::Lit {
++ lit: "data",
++ },
++ ],
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }),
++ },
++ ],
++ },
++ }
+ "###);
+
+ let meta_items: Vec<_> = input
+@@ -333,27 +328,27 @@ fn test_enum() {
+ .collect();
+
+ snapshot!(meta_items, @r###"
+- ⋮[
+- ⋮ Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "doc",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: " See the std::result module documentation for details.",
+- ⋮ },
+- ⋮ Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "must_use",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ }),
+- ⋮]
++ [
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "doc",
++ arguments: None,
++ },
++ ],
++ },
++ lit: " See the std::result module documentation for details.",
++ },
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "must_use",
++ arguments: None,
++ },
++ ],
++ }),
++ ]
+ "###);
+ }
+
+@@ -366,34 +361,34 @@ fn test_attr_with_path() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ leading_colon: Some,
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "attr_args",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "identity",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `fn main ( ) { assert_eq ! ( foo ( ) , "Hello, world!" ) ; }`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "Dummy",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ leading_colon: Some,
++ segments: [
++ PathSegment {
++ ident: "attr_args",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "identity",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`fn main () { assert_eq ! (foo () , "Hello, world!") ; }`),
++ },
++ ],
++ vis: Inherited,
++ ident: "Dummy",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ assert!(input.attrs[0].parse_meta().is_err());
+@@ -407,29 +402,29 @@ fn test_attr_with_non_mod_style_path() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "inert",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: `< T >`,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "inert",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`< T >`),
++ },
++ ],
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ assert!(input.attrs[0].parse_meta().is_err());
+@@ -443,48 +438,48 @@ fn test_attr_with_mod_style_path_with_self() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "self",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "self",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ snapshot!(input.attrs[0].parse_meta().unwrap(), @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "self",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "self",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+@@ -496,55 +491,55 @@ fn test_pub_restricted() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "m",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "Z",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "m",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "n",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "u8",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "Z",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "n",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "u8",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -555,15 +550,15 @@ fn test_vis_crate() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Crate,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Crate,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -574,24 +569,24 @@ fn test_pub_restricted_crate() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "crate",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "crate",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -602,24 +597,24 @@ fn test_pub_restricted_super() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "super",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "super",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -630,25 +625,25 @@ fn test_pub_restricted_in_super() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Visibility::Restricted {
+- ⋮ in_token: Some,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "super",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Visibility::Restricted {
++ in_token: Some,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "super",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+
+@@ -659,15 +654,15 @@ fn test_fields_on_unit_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let data = match input.data {
+@@ -688,47 +683,47 @@ fn test_fields_on_named_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Named {
+- ⋮ named: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("foo"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("bar"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("foo"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("bar"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ },
++ }
+ "###);
+
+ let data = match input.data {
+@@ -737,38 +732,38 @@ fn test_fields_on_named_struct() {
+ };
+
+ snapshot!(data.fields.into_iter().collect::<Vec<_>>(), @r###"
+- ⋮[
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ident: Some("foo"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ident: Some("bar"),
+- ⋮ colon_token: Some,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮]
++ [
++ Field {
++ vis: Inherited,
++ ident: Some("foo"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ident: Some("bar"),
++ colon_token: Some,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ]
+ "###);
+ }
+
+@@ -779,44 +774,44 @@ fn test_fields_on_tuple_struct() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let data = match input.data {
+@@ -825,34 +820,34 @@ fn test_fields_on_tuple_struct() {
+ };
+
+ snapshot!(data.fields.iter().collect::<Vec<_>>(), @r###"
+- ⋮[
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ Field {
+- ⋮ vis: Visibility::Public,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "String",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮]
++ [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ Field {
++ vis: Visibility::Public,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "String",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ]
+ "###);
+ }
+
+@@ -864,34 +859,34 @@ fn test_ambiguous_crate() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics,
+- ⋮ data: Data::Struct {
+- ⋮ fields: Fields::Unnamed {
+- ⋮ unnamed: [
+- ⋮ Field {
+- ⋮ vis: Inherited,
+- ⋮ ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "crate",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ PathSegment {
+- ⋮ ident: "X",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Unnamed {
++ unnamed: [
++ Field {
++ vis: Inherited,
++ ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "crate",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "X",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ ],
++ },
++ semi_token: Some,
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_expr.rs b/third_party/rust/syn/tests/test_expr.rs
+index c8a11cec2c..b2b65a254f 100644
+--- third_party/rust/syn/tests/test_expr.rs
++++ third_party/rust/syn/tests/test_expr.rs
+@@ -1,40 +1,302 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+ #[macro_use]
+ mod macros;
+
+-use std::str::FromStr;
+-
+-use proc_macro2::TokenStream;
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
+ use syn::{Expr, ExprRange};
+
+ #[test]
+ fn test_expr_parse() {
+- let code = "..100u32";
+- let tt = TokenStream::from_str(code).unwrap();
+- let expr: Expr = syn::parse2(tt.clone()).unwrap();
+- let expr_range: ExprRange = syn::parse2(tt).unwrap();
+- assert_eq!(expr, Expr::Range(expr_range));
++ let tokens = quote!(..100u32);
++ snapshot!(tokens as Expr, @r###"
++ Expr::Range {
++ limits: HalfOpen,
++ to: Some(Expr::Lit {
++ lit: 100u32,
++ }),
++ }
++ "###);
++
++ let tokens = quote!(..100u32);
++ snapshot!(tokens as ExprRange, @r###"
++ ExprRange {
++ limits: HalfOpen,
++ to: Some(Expr::Lit {
++ lit: 100u32,
++ }),
++ }
++ "###);
+ }
+
+ #[test]
+ fn test_await() {
+ // Must not parse as Expr::Field.
+- let expr = syn::parse_str::<Expr>("fut.await").unwrap();
+-
+- snapshot!(expr, @r###"
+- ⋮Expr::Await {
+- ⋮ base: Expr::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "fut",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮}
++ let tokens = quote!(fut.await);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Await {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "fut",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ }
++ "###);
++}
++
++#[rustfmt::skip]
++#[test]
++fn test_tuple_multi_index() {
++ for &input in &[
++ "tuple.0.0",
++ "tuple .0.0",
++ "tuple. 0.0",
++ "tuple.0 .0",
++ "tuple.0. 0",
++ "tuple . 0 . 0",
++ ] {
++ snapshot!(input as Expr, @r###"
++ Expr::Field {
++ base: Expr::Field {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "tuple",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }
++ "###);
++ }
++
++ for tokens in vec![
++ quote!(tuple.0.0),
++ quote!(tuple .0.0),
++ quote!(tuple. 0.0),
++ quote!(tuple.0 .0),
++ quote!(tuple.0. 0),
++ quote!(tuple . 0 . 0),
++ ] {
++ snapshot!(tokens as Expr, @r###"
++ Expr::Field {
++ base: Expr::Field {
++ base: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "tuple",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ },
++ member: Unnamed(Index {
++ index: 0,
++ }),
++ }
++ "###);
++ }
++}
++
++#[test]
++fn test_macro_variable_func() {
++ // mimics the token stream corresponding to `$fn()`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { f })),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Call {
++ func: Expr::Group {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "f",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ }
++ "###);
++
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('#', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Bracket, quote! { outside })),
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[inside] f })),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Call {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "outside",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ func: Expr::Group {
++ expr: Expr::Path {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "inside",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "f",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_macro() {
++ // mimics the token stream corresponding to `$macro!()`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { m })),
++ TokenTree::Punct(Punct::new('!', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Macro {
++ mac: Macro {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "m",
++ arguments: None,
++ },
++ ],
++ },
++ delimiter: Paren,
++ tokens: TokenStream(``),
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_struct() {
++ // mimics the token stream corresponding to `$struct {}`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { S })),
++ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Struct {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "S",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++}
++
++#[test]
++fn test_macro_variable_match_arm() {
++ // mimics the token stream corresponding to `match v { _ => $expr }`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Ident(Ident::new("match", Span::call_site())),
++ TokenTree::Ident(Ident::new("v", Span::call_site())),
++ TokenTree::Group(Group::new(
++ Delimiter::Brace,
++ TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('_', Spacing::Alone)),
++ TokenTree::Punct(Punct::new('=', Spacing::Joint)),
++ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[a] () })),
++ ]),
++ )),
++ ]);
++
++ snapshot!(tokens as Expr, @r###"
++ Expr::Match {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "v",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ arms: [
++ Arm {
++ pat: Pat::Wild,
++ body: Expr::Group {
++ expr: Expr::Tuple {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "a",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ },
++ },
++ },
++ ],
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_generics.rs b/third_party/rust/syn/tests/test_generics.rs
+index 55c79e066b..b29434a147 100644
+--- third_party/rust/syn/tests/test_generics.rs
++++ third_party/rust/syn/tests/test_generics.rs
+@@ -1,8 +1,3 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -16,90 +11,90 @@ fn test_split_for_impl() {
+ };
+
+ snapshot!(input as DeriveInput, @r###"
+- ⋮DeriveInput {
+- ⋮ vis: Inherited,
+- ⋮ ident: "S",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Lifetime(LifetimeDef {
+- ⋮ lifetime: Lifetime {
+- ⋮ ident: "a",
+- ⋮ },
+- ⋮ }),
+- ⋮ Lifetime(LifetimeDef {
+- ⋮ lifetime: Lifetime {
+- ⋮ ident: "b",
+- ⋮ },
+- ⋮ colon_token: Some,
+- ⋮ bounds: [
+- ⋮ Lifetime {
+- ⋮ ident: "a",
+- ⋮ },
+- ⋮ ],
+- ⋮ }),
+- ⋮ Type(TypeParam {
+- ⋮ attrs: [
+- ⋮ Attribute {
+- ⋮ style: Outer,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "may_dangle",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ tokens: ``,
+- ⋮ },
+- ⋮ ],
+- ⋮ ident: "T",
+- ⋮ colon_token: Some,
+- ⋮ bounds: [
+- ⋮ Lifetime(Lifetime {
+- ⋮ ident: "a",
+- ⋮ }),
+- ⋮ ],
+- ⋮ eq_token: Some,
+- ⋮ default: Some(Type::Tuple),
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ where_clause: Some(WhereClause {
+- ⋮ predicates: [
+- ⋮ Type(PredicateType {
+- ⋮ bounded_ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "T",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ bounds: [
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ },
+- ⋮ data: Data::Struct {
+- ⋮ fields: Unit,
+- ⋮ semi_token: Some,
+- ⋮ },
+- ⋮}
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Lifetime(LifetimeDef {
++ lifetime: Lifetime {
++ ident: "a",
++ },
++ }),
++ Lifetime(LifetimeDef {
++ lifetime: Lifetime {
++ ident: "b",
++ },
++ colon_token: Some,
++ bounds: [
++ Lifetime {
++ ident: "a",
++ },
++ ],
++ }),
++ Type(TypeParam {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "may_dangle",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ ident: "T",
++ colon_token: Some,
++ bounds: [
++ Lifetime(Lifetime {
++ ident: "a",
++ }),
++ ],
++ eq_token: Some,
++ default: Some(Type::Tuple),
++ }),
++ ],
++ gt_token: Some,
++ where_clause: Some(WhereClause {
++ predicates: [
++ Type(PredicateType {
++ bounded_ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ bounds: [
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ }),
++ ],
++ }),
++ },
++ data: Data::Struct {
++ fields: Unit,
++ semi_token: Some,
++ },
++ }
+ "###);
+
+ let generics = input.generics;
+@@ -131,46 +126,46 @@ fn test_split_for_impl() {
+ fn test_ty_param_bound() {
+ let tokens = quote!('a);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Lifetime(Lifetime {
+- ⋮ ident: "a",
+- ⋮})
++ Lifetime(Lifetime {
++ ident: "a",
++ })
+ "###);
+
+ let tokens = quote!('_);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Lifetime(Lifetime {
+- ⋮ ident: "_",
+- ⋮})
++ Lifetime(Lifetime {
++ ident: "_",
++ })
+ "###);
+
+ let tokens = quote!(Debug);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Debug",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮})
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Debug",
++ arguments: None,
++ },
++ ],
++ },
++ })
+ "###);
+
+ let tokens = quote!(?Sized);
+ snapshot!(tokens as TypeParamBound, @r###"
+- ⋮Trait(TraitBound {
+- ⋮ modifier: Maybe,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Sized",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮})
++ Trait(TraitBound {
++ modifier: Maybe,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Sized",
++ arguments: None,
++ },
++ ],
++ },
++ })
+ "###);
+ }
+
+@@ -187,76 +182,76 @@ fn test_fn_precedence_in_where_clause() {
+ };
+
+ snapshot!(input as ItemFn, @r###"
+- ⋮ItemFn {
+- ⋮ vis: Inherited,
+- ⋮ sig: Signature {
+- ⋮ ident: "f",
+- ⋮ generics: Generics {
+- ⋮ lt_token: Some,
+- ⋮ params: [
+- ⋮ Type(TypeParam {
+- ⋮ ident: "G",
+- ⋮ }),
+- ⋮ ],
+- ⋮ gt_token: Some,
+- ⋮ where_clause: Some(WhereClause {
+- ⋮ predicates: [
+- ⋮ Type(PredicateType {
+- ⋮ bounded_ty: Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "G",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ bounds: [
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "FnOnce",
+- ⋮ arguments: PathArguments::Parenthesized {
+- ⋮ output: Type(
+- ⋮ Type::Path {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "i32",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ },
+- ⋮ ),
+- ⋮ },
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ Trait(TraitBound {
+- ⋮ modifier: None,
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "Send",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ },
+- ⋮ output: Default,
+- ⋮ },
+- ⋮ block: Block,
+- ⋮}
++ ItemFn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "f",
++ generics: Generics {
++ lt_token: Some,
++ params: [
++ Type(TypeParam {
++ ident: "G",
++ }),
++ ],
++ gt_token: Some,
++ where_clause: Some(WhereClause {
++ predicates: [
++ Type(PredicateType {
++ bounded_ty: Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "G",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ bounds: [
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "FnOnce",
++ arguments: PathArguments::Parenthesized {
++ output: Type(
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "i32",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ ),
++ },
++ },
++ ],
++ },
++ }),
++ Trait(TraitBound {
++ modifier: None,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "Send",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ }),
++ ],
++ }),
++ },
++ output: Default,
++ },
++ block: Block,
++ }
+ "###);
+
+ let where_clause = input.sig.generics.where_clause.as_ref().unwrap();
+@@ -270,7 +265,7 @@ fn test_fn_precedence_in_where_clause() {
+ assert_eq!(predicate.bounds.len(), 2, "{:#?}", predicate.bounds);
+
+ let first_bound = &predicate.bounds[0];
+- assert_eq!(quote!(#first_bound).to_string(), "FnOnce ( ) -> i32");
++ assert_eq!(quote!(#first_bound).to_string(), "FnOnce () -> i32");
+
+ let second_bound = &predicate.bounds[1];
+ assert_eq!(quote!(#second_bound).to_string(), "Send");
+diff --git a/third_party/rust/syn/tests/test_grouping.rs b/third_party/rust/syn/tests/test_grouping.rs
+index 1558a47b4b..a0fe716390 100644
+--- third_party/rust/syn/tests/test_grouping.rs
++++ third_party/rust/syn/tests/test_grouping.rs
+@@ -1,8 +1,3 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -28,31 +23,31 @@ fn test_grouping() {
+ TokenTree::Literal(Literal::i32_suffixed(4)),
+ ]);
+
+- assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
++ assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
+
+ snapshot!(tokens as Expr, @r###"
+- ⋮Expr::Binary {
+- ⋮ left: Expr::Lit {
+- ⋮ lit: 1i32,
+- ⋮ },
+- ⋮ op: Add,
+- ⋮ right: Expr::Binary {
+- ⋮ left: Expr::Group {
+- ⋮ expr: Expr::Binary {
+- ⋮ left: Expr::Lit {
+- ⋮ lit: 2i32,
+- ⋮ },
+- ⋮ op: Add,
+- ⋮ right: Expr::Lit {
+- ⋮ lit: 3i32,
+- ⋮ },
+- ⋮ },
+- ⋮ },
+- ⋮ op: Mul,
+- ⋮ right: Expr::Lit {
+- ⋮ lit: 4i32,
+- ⋮ },
+- ⋮ },
+- ⋮}
++ Expr::Binary {
++ left: Expr::Lit {
++ lit: 1i32,
++ },
++ op: Add,
++ right: Expr::Binary {
++ left: Expr::Group {
++ expr: Expr::Binary {
++ left: Expr::Lit {
++ lit: 2i32,
++ },
++ op: Add,
++ right: Expr::Lit {
++ lit: 3i32,
++ },
++ },
++ },
++ op: Mul,
++ right: Expr::Lit {
++ lit: 4i32,
++ },
++ },
++ }
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_ident.rs b/third_party/rust/syn/tests/test_ident.rs
+index bec00a70c9..ee01bfcc9f 100644
+--- third_party/rust/syn/tests/test_ident.rs
++++ third_party/rust/syn/tests/test_ident.rs
+@@ -1,8 +1,3 @@
+-extern crate proc_macro2;
+-extern crate syn;
+-
+-mod features;
+-
+ use proc_macro2::{Ident, Span, TokenStream};
+ use std::str::FromStr;
+ use syn::Result;
+diff --git a/third_party/rust/syn/tests/test_item.rs b/third_party/rust/syn/tests/test_item.rs
+new file mode 100644
+index 0000000000..74ac4baec6
+--- /dev/null
++++ third_party/rust/syn/tests/test_item.rs
+@@ -0,0 +1,45 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::Item;
++
++#[test]
++fn test_macro_variable_attr() {
++ // mimics the token stream corresponding to `$attr fn f() {}`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { #[test] })),
++ TokenTree::Ident(Ident::new("fn", Span::call_site())),
++ TokenTree::Ident(Ident::new("f", Span::call_site())),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
++ ]);
++
++ snapshot!(tokens as Item, @r###"
++ Item::Fn {
++ attrs: [
++ Attribute {
++ style: Outer,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "test",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(``),
++ },
++ ],
++ vis: Inherited,
++ sig: Signature {
++ ident: "f",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_iterators.rs b/third_party/rust/syn/tests/test_iterators.rs
+index 1cf7157e6f..2c8359c157 100644
+--- third_party/rust/syn/tests/test_iterators.rs
++++ third_party/rust/syn/tests/test_iterators.rs
+@@ -1,10 +1,5 @@
+ use syn::punctuated::{Pair, Punctuated};
+-
+-extern crate quote;
+-#[macro_use]
+-extern crate syn;
+-
+-mod features;
++use syn::Token;
+
+ #[macro_use]
+ mod macros;
+diff --git a/third_party/rust/syn/tests/test_lit.rs b/third_party/rust/syn/tests/test_lit.rs
+index 1e8f49d19b..e995f2287f 100644
+--- third_party/rust/syn/tests/test_lit.rs
++++ third_party/rust/syn/tests/test_lit.rs
+@@ -1,13 +1,11 @@
+-extern crate proc_macro2;
+-extern crate quote;
+-extern crate syn;
++#[macro_use]
++mod macros;
+
+-mod features;
+-
+-use proc_macro2::{TokenStream, TokenTree};
++use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
+ use quote::ToTokens;
++use std::iter::FromIterator;
+ use std::str::FromStr;
+-use syn::Lit;
++use syn::{Lit, LitFloat, LitInt};
+
+ fn lit(s: &str) -> Lit {
+ match TokenStream::from_str(s)
+@@ -50,6 +48,9 @@ fn strings() {
+ "contains\nnewlinesescaped newlines",
+ );
+ test_string("r\"raw\nstring\\\nhere\"", "raw\nstring\\\nhere");
++ test_string("\"...\"q", "...");
++ test_string("r\"...\"q", "...");
++ test_string("r##\"...\"##q", "...");
+ }
+
+ #[test]
+@@ -79,6 +80,9 @@ fn byte_strings() {
+ b"contains\nnewlinesescaped newlines",
+ );
+ test_byte_string("br\"raw\nstring\\\nhere\"", b"raw\nstring\\\nhere");
++ test_byte_string("b\"...\"q", b"...");
++ test_byte_string("br\"...\"q", b"...");
++ test_byte_string("br##\"...\"##q", b"...");
+ }
+
+ #[test]
+@@ -100,6 +104,7 @@ fn bytes() {
+ test_byte("b'\\t'", b'\t');
+ test_byte("b'\\''", b'\'');
+ test_byte("b'\"'", b'"');
++ test_byte("b'a'q", b'a');
+ }
+
+ #[test]
+@@ -125,6 +130,7 @@ fn chars() {
+ test_char("'\\''", '\'');
+ test_char("'\"'", '"');
+ test_char("'\\u{1F415}'", '\u{1F415}');
++ test_char("'a'q", 'a');
+ }
+
+ #[test]
+@@ -185,4 +191,59 @@ fn floats() {
+ test_float("5.5e12", 5.5e12, "");
+ test_float("1.0__3e-12", 1.03e-12, "");
+ test_float("1.03e+12", 1.03e12, "");
++ test_float("9e99e99", 9e99, "e99");
++}
++
++#[test]
++fn negative() {
++ let span = Span::call_site();
++ assert_eq!("-1", LitInt::new("-1", span).to_string());
++ assert_eq!("-1i8", LitInt::new("-1i8", span).to_string());
++ assert_eq!("-1i16", LitInt::new("-1i16", span).to_string());
++ assert_eq!("-1i32", LitInt::new("-1i32", span).to_string());
++ assert_eq!("-1i64", LitInt::new("-1i64", span).to_string());
++ assert_eq!("-1.5", LitFloat::new("-1.5", span).to_string());
++ assert_eq!("-1.5f32", LitFloat::new("-1.5f32", span).to_string());
++ assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string());
++}
++
++#[test]
++fn suffix() {
++ fn get_suffix(token: &str) -> String {
++ let lit = syn::parse_str::<Lit>(token).unwrap();
++ match lit {
++ Lit::Str(lit) => lit.suffix().to_owned(),
++ Lit::ByteStr(lit) => lit.suffix().to_owned(),
++ Lit::Byte(lit) => lit.suffix().to_owned(),
++ Lit::Char(lit) => lit.suffix().to_owned(),
++ Lit::Int(lit) => lit.suffix().to_owned(),
++ Lit::Float(lit) => lit.suffix().to_owned(),
++ _ => unimplemented!(),
++ }
++ }
++
++ assert_eq!(get_suffix("\"\"s"), "s");
++ assert_eq!(get_suffix("r\"\"r"), "r");
++ assert_eq!(get_suffix("b\"\"b"), "b");
++ assert_eq!(get_suffix("br\"\"br"), "br");
++ assert_eq!(get_suffix("r#\"\"#r"), "r");
++ assert_eq!(get_suffix("'c'c"), "c");
++ assert_eq!(get_suffix("b'b'b"), "b");
++ assert_eq!(get_suffix("1i32"), "i32");
++ assert_eq!(get_suffix("1_i32"), "i32");
++ assert_eq!(get_suffix("1.0f32"), "f32");
++ assert_eq!(get_suffix("1.0_f32"), "f32");
++}
++
++#[test]
++fn test_deep_group_empty() {
++ let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Literal(Literal::string("hi"))]),
++ ))]),
++ ))]);
++
++ snapshot!(tokens as Lit, @r#""hi""# );
+ }
+diff --git a/third_party/rust/syn/tests/test_meta.rs b/third_party/rust/syn/tests/test_meta.rs
+index 547472d6f4..d37dda948a 100644
+--- third_party/rust/syn/tests/test_meta.rs
++++ third_party/rust/syn/tests/test_meta.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -12,14 +8,14 @@ fn test_parse_meta_item_word() {
+ let input = "hello";
+
+ snapshot!(input as Meta, @r###"
+- ⋮Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "hello",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮})
++ Path(Path {
++ segments: [
++ PathSegment {
++ ident: "hello",
++ arguments: None,
++ },
++ ],
++ })
+ "###);
+ }
+
+@@ -29,31 +25,31 @@ fn test_parse_meta_name_value() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -65,31 +61,31 @@ fn test_parse_meta_name_value_with_keyword() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "static",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "static",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "static",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "static",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -101,31 +97,31 @@ fn test_parse_meta_name_value_with_bool() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaNameValue, @r###"
+- ⋮MetaNameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "true",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ MetaNameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "true",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "true",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮}
++ Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "true",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -137,35 +133,35 @@ fn test_parse_meta_item_list_lit() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaList, @r###"
+- ⋮MetaList {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ MetaList {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Lit(5),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Lit(5),
++ ],
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -177,133 +173,133 @@ fn test_parse_meta_item_multiple() {
+ let (inner, meta) = (input, input);
+
+ snapshot!(inner as MetaList, @r###"
+- ⋮MetaList {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ MetaList {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+
+ snapshot!(meta as Meta, @r###"
+- ⋮Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "foo",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 5,
+- ⋮ }),
+- ⋮ Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮ }),
+- ⋮ Meta(Path(Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "word2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ })),
+- ⋮ ],
+- ⋮}
++ Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "foo",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word",
++ arguments: None,
++ },
++ ],
++ })),
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 5,
++ }),
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ }),
++ Meta(Path(Path {
++ segments: [
++ PathSegment {
++ ident: "word2",
++ arguments: None,
++ },
++ ],
++ })),
++ ],
++ }
+ "###);
+
+ assert_eq!(meta, inner.into());
+@@ -316,28 +312,28 @@ fn test_parse_nested_meta() {
+
+ let input = "list(name2 = 6)";
+ snapshot!(input as NestedMeta, @r###"
+- ⋮Meta(Meta::List {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "list",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ nested: [
+- ⋮ Meta(Meta::NameValue {
+- ⋮ path: Path {
+- ⋮ segments: [
+- ⋮ PathSegment {
+- ⋮ ident: "name2",
+- ⋮ arguments: None,
+- ⋮ },
+- ⋮ ],
+- ⋮ },
+- ⋮ lit: 6,
+- ⋮ }),
+- ⋮ ],
+- ⋮})
++ Meta(Meta::List {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "list",
++ arguments: None,
++ },
++ ],
++ },
++ nested: [
++ Meta(Meta::NameValue {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "name2",
++ arguments: None,
++ },
++ ],
++ },
++ lit: 6,
++ }),
++ ],
++ })
+ "###);
+ }
+diff --git a/third_party/rust/syn/tests/test_parse_buffer.rs b/third_party/rust/syn/tests/test_parse_buffer.rs
+index f09495187f..57a3c7c38c 100644
+--- third_party/rust/syn/tests/test_parse_buffer.rs
++++ third_party/rust/syn/tests/test_parse_buffer.rs
+@@ -1,7 +1,7 @@
+-#[macro_use]
+-extern crate syn;
+-
++use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenStream, TokenTree};
++use std::iter::FromIterator;
+ use syn::parse::{discouraged::Speculative, Parse, ParseStream, Parser, Result};
++use syn::{parenthesized, Token};
+
+ #[test]
+ #[should_panic(expected = "Fork was not derived from the advancing parse stream")]
+@@ -53,3 +53,38 @@ fn smuggled_speculative_cursor_into_brackets() {
+
+ syn::parse_str::<BreakRules>("()").unwrap();
+ }
++
++#[test]
++fn trailing_empty_none_group() {
++ fn parse(input: ParseStream) -> Result<()> {
++ input.parse::<Token![+]>()?;
++
++ let content;
++ parenthesized!(content in input);
++ content.parse::<Token![+]>()?;
++
++ Ok(())
++ }
++
++ // `+ ( + <Ø Ø> ) <Ø <Ø Ø> Ø>`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
++ TokenTree::Group(Group::new(
++ Delimiter::Parenthesis,
++ TokenStream::from_iter(vec![
++ TokenTree::Punct(Punct::new('+', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ ]),
++ )),
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::new(),
++ ))]),
++ )),
++ ]);
++
++ parse.parse2(tokens).unwrap();
++}
+diff --git a/third_party/rust/syn/tests/test_parse_stream.rs b/third_party/rust/syn/tests/test_parse_stream.rs
+new file mode 100644
+index 0000000000..76bd065777
+--- /dev/null
++++ third_party/rust/syn/tests/test_parse_stream.rs
+@@ -0,0 +1,12 @@
++use syn::ext::IdentExt;
++use syn::parse::ParseStream;
++use syn::{Ident, Token};
++
++#[test]
++fn test_peek() {
++ let _ = |input: ParseStream| {
++ let _ = input.peek(Ident);
++ let _ = input.peek(Ident::peek_any);
++ let _ = input.peek(Token![::]);
++ };
++}
+diff --git a/third_party/rust/syn/tests/test_pat.rs b/third_party/rust/syn/tests/test_pat.rs
+index 1343aa646f..73388dd79d 100644
+--- third_party/rust/syn/tests/test_pat.rs
++++ third_party/rust/syn/tests/test_pat.rs
+@@ -1,10 +1,5 @@
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ use quote::quote;
+-use syn::Pat;
++use syn::{Item, Pat, Stmt};
+
+ #[test]
+ fn test_pat_ident() {
+@@ -21,3 +16,23 @@ fn test_pat_path() {
+ value => panic!("expected PatPath, got {:?}", value),
+ }
+ }
++
++#[test]
++fn test_leading_vert() {
++ // https://github.com/rust-lang/rust/blob/1.43.0/src/test/ui/or-patterns/remove-leading-vert.rs
++
++ syn::parse_str::<Item>("fn f() {}").unwrap();
++ syn::parse_str::<Item>("fn fun1(| A: E) {}").unwrap_err();
++ syn::parse_str::<Item>("fn fun2(|| A: E) {}").unwrap_err();
++
++ syn::parse_str::<Stmt>("let | () = ();").unwrap();
++ syn::parse_str::<Stmt>("let (| A): E;").unwrap_err();
++ syn::parse_str::<Stmt>("let (|| A): (E);").unwrap_err();
++ syn::parse_str::<Stmt>("let (| A,): (E,);").unwrap_err();
++ syn::parse_str::<Stmt>("let [| A]: [E; 1];").unwrap_err();
++ syn::parse_str::<Stmt>("let [|| A]: [E; 1];").unwrap_err();
++ syn::parse_str::<Stmt>("let TS(| A): TS;").unwrap_err();
++ syn::parse_str::<Stmt>("let TS(|| A): TS;").unwrap_err();
++ syn::parse_str::<Stmt>("let NS { f: | A }: NS;").unwrap_err();
++ syn::parse_str::<Stmt>("let NS { f: || A }: NS;").unwrap_err();
++}
+diff --git a/third_party/rust/syn/tests/test_path.rs b/third_party/rust/syn/tests/test_path.rs
+new file mode 100644
+index 0000000000..2ce12066f5
+--- /dev/null
++++ third_party/rust/syn/tests/test_path.rs
+@@ -0,0 +1,52 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::{Expr, Type};
++
++#[test]
++fn parse_interpolated_leading_component() {
++ // mimics the token stream corresponding to `$mod::rest`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { first })),
++ TokenTree::Punct(Punct::new(':', Spacing::Joint)),
++ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
++ TokenTree::Ident(Ident::new("rest", Span::call_site())),
++ ]);
++
++ snapshot!(tokens.clone() as Expr, @r###"
++ Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "first",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "rest",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++
++ snapshot!(tokens as Type, @r###"
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "first",
++ arguments: None,
++ },
++ PathSegment {
++ ident: "rest",
++ arguments: None,
++ },
++ ],
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_precedence.rs b/third_party/rust/syn/tests/test_precedence.rs
+index 53ee66e372..a586b3fe48 100644
+--- third_party/rust/syn/tests/test_precedence.rs
++++ third_party/rust/syn/tests/test_precedence.rs
+@@ -4,35 +4,26 @@
+
+ //! The tests in this module do the following:
+ //!
+-//! 1. Parse a given expression in both `syn` and `libsyntax`.
++//! 1. Parse a given expression in both `syn` and `librustc`.
+ //! 2. Fold over the expression adding brackets around each subexpression (with
+-//! some complications - see the `syn_brackets` and `libsyntax_brackets`
++//! some complications - see the `syn_brackets` and `librustc_brackets`
+ //! methods).
+ //! 3. Serialize the `syn` expression back into a string, and re-parse it with
+-//! `libsyntax`.
++//! `librustc`.
+ //! 4. Respan all of the expressions, replacing the spans with the default
+ //! spans.
+ //! 5. Compare the expressions with one another, if they are not equal fail.
+
+-extern crate quote;
+-extern crate rayon;
+-extern crate regex;
++extern crate rustc_ast;
+ extern crate rustc_data_structures;
+-extern crate smallvec;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
+-extern crate walkdir;
+-
+-mod features;
++extern crate rustc_span;
+
+ use quote::quote;
+ use rayon::iter::{IntoParallelIterator, ParallelIterator};
+ use regex::Regex;
+-use smallvec::smallvec;
+-use syntax::ast;
+-use syntax::ptr::P;
+-use syntax_pos::edition::Edition;
++use rustc_ast::ast;
++use rustc_ast::ptr::P;
++use rustc_span::edition::Edition;
+ use walkdir::{DirEntry, WalkDir};
+
+ use std::fs::File;
+@@ -73,7 +64,7 @@ fn test_simple_precedence() {
+ continue;
+ };
+
+- let pf = match test_expressions(vec![expr]) {
++ let pf = match test_expressions(Edition::Edition2018, vec![expr]) {
+ (1, 0) => "passed",
+ (0, 1) => {
+ failed += 1;
+@@ -91,8 +82,8 @@ fn test_simple_precedence() {
+
+ /// Test expressions from rustc, like in `test_round_trip`.
+ #[test]
+-#[cfg_attr(target_os = "windows", ignore = "requires nix .sh")]
+ fn test_rustc_precedence() {
++ common::rayon_init();
+ repo::clone_rust();
+ let abort_after = common::abort_after();
+ if abort_after == 0 {
+@@ -118,15 +109,6 @@ fn test_rustc_precedence() {
+ return;
+ }
+
+- // Our version of `libsyntax` can't parse this tests
+- if path
+- .to_str()
+- .unwrap()
+- .ends_with("optional_comma_in_match_arm.rs")
+- {
+- return;
+- }
+-
+ let mut file = File::open(path).unwrap();
+ let mut content = String::new();
+ file.read_to_string(&mut content).unwrap();
+@@ -134,8 +116,9 @@ fn test_rustc_precedence() {
+
+ let (l_passed, l_failed) = match syn::parse_file(&content) {
+ Ok(file) => {
++ let edition = repo::edition(path).parse().unwrap();
+ let exprs = collect_exprs(file);
+- test_expressions(exprs)
++ test_expressions(edition, exprs)
+ }
+ Err(msg) => {
+ errorf!("syn failed to parse\n{:?}\n", msg);
+@@ -169,36 +152,36 @@ fn test_rustc_precedence() {
+ }
+ }
+
+-fn test_expressions(exprs: Vec<syn::Expr>) -> (usize, usize) {
++fn test_expressions(edition: Edition, exprs: Vec<syn::Expr>) -> (usize, usize) {
+ let mut passed = 0;
+ let mut failed = 0;
+
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(edition, || {
+ for expr in exprs {
+ let raw = quote!(#expr).to_string();
+
+- let libsyntax_ast = if let Some(e) = libsyntax_parse_and_rewrite(&raw) {
++ let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&raw) {
+ e
+ } else {
+ failed += 1;
+- errorf!("\nFAIL - libsyntax failed to parse raw\n");
++ errorf!("\nFAIL - librustc failed to parse raw\n");
+ continue;
+ };
+
+ let syn_expr = syn_brackets(expr);
+- let syn_ast = if let Some(e) = parse::libsyntax_expr(&quote!(#syn_expr).to_string()) {
++ let syn_ast = if let Some(e) = parse::librustc_expr(&quote!(#syn_expr).to_string()) {
+ e
+ } else {
+ failed += 1;
+- errorf!("\nFAIL - libsyntax failed to parse bracketed\n");
++ errorf!("\nFAIL - librustc failed to parse bracketed\n");
+ continue;
+ };
+
+- if SpanlessEq::eq(&syn_ast, &libsyntax_ast) {
++ if SpanlessEq::eq(&syn_ast, &librustc_ast) {
+ passed += 1;
+ } else {
+ failed += 1;
+- errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, libsyntax_ast);
++ errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, librustc_ast);
+ }
+ }
+ });
+@@ -206,54 +189,106 @@ fn test_expressions(exprs: Vec<syn::Expr>) -> (usize, usize) {
+ (passed, failed)
+ }
+
+-fn libsyntax_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
+- parse::libsyntax_expr(input).and_then(libsyntax_brackets)
++fn librustc_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
++ parse::librustc_expr(input).and_then(librustc_brackets)
+ }
+
+ /// Wrap every expression which is not already wrapped in parens with parens, to
+ /// reveal the precidence of the parsed expressions, and produce a stringified
+ /// form of the resulting expression.
+ ///
+-/// This method operates on libsyntax objects.
+-fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
++/// This method operates on librustc objects.
++fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
++ use rustc_ast::ast::{
++ Block, BorrowKind, Expr, ExprKind, Field, GenericArg, MacCall, Pat, Stmt, StmtKind, Ty,
++ };
++ use rustc_ast::mut_visit::{noop_visit_generic_arg, MutVisitor};
++ use rustc_data_structures::map_in_place::MapInPlace;
+ use rustc_data_structures::thin_vec::ThinVec;
+- use smallvec::SmallVec;
++ use rustc_span::DUMMY_SP;
+ use std::mem;
+- use syntax::ast::{Expr, ExprKind, Field, Mac, Pat, Stmt, StmtKind, Ty};
+- use syntax::mut_visit::{noop_visit_expr, MutVisitor};
+- use syntax_pos::DUMMY_SP;
+
+ struct BracketsVisitor {
+ failed: bool,
+ };
+
++ fn flat_map_field<T: MutVisitor>(mut f: Field, vis: &mut T) -> Vec<Field> {
++ if f.is_shorthand {
++ noop_visit_expr(&mut f.expr, vis);
++ } else {
++ vis.visit_expr(&mut f.expr);
++ }
++ vec![f]
++ }
++
++ fn flat_map_stmt<T: MutVisitor>(stmt: Stmt, vis: &mut T) -> Vec<Stmt> {
++ let kind = match stmt.kind {
++ // Don't wrap toplevel expressions in statements.
++ StmtKind::Expr(mut e) => {
++ noop_visit_expr(&mut e, vis);
++ StmtKind::Expr(e)
++ }
++ StmtKind::Semi(mut e) => {
++ noop_visit_expr(&mut e, vis);
++ StmtKind::Semi(e)
++ }
++ s => s,
++ };
++
++ vec![Stmt { kind, ..stmt }]
++ }
++
++ fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) {
++ use rustc_ast::mut_visit::{noop_visit_expr, visit_opt, visit_thin_attrs};
++ match &mut e.kind {
++ ExprKind::AddrOf(BorrowKind::Raw, ..) => {}
++ ExprKind::Struct(path, fields, expr) => {
++ vis.visit_path(path);
++ fields.flat_map_in_place(|field| flat_map_field(field, vis));
++ visit_opt(expr, |expr| vis.visit_expr(expr));
++ vis.visit_id(&mut e.id);
++ vis.visit_span(&mut e.span);
++ visit_thin_attrs(&mut e.attrs, vis);
++ }
++ _ => noop_visit_expr(e, vis),
++ }
++ }
++
+ impl MutVisitor for BracketsVisitor {
+ fn visit_expr(&mut self, e: &mut P<Expr>) {
+ noop_visit_expr(e, self);
+- match e.node {
++ match e.kind {
+ ExprKind::If(..) | ExprKind::Block(..) | ExprKind::Let(..) => {}
+ _ => {
+ let inner = mem::replace(
+ e,
+ P(Expr {
+ id: ast::DUMMY_NODE_ID,
+- node: ExprKind::Err,
++ kind: ExprKind::Err,
+ span: DUMMY_SP,
+ attrs: ThinVec::new(),
++ tokens: None,
+ }),
+ );
+- e.node = ExprKind::Paren(inner);
++ e.kind = ExprKind::Paren(inner);
+ }
+ }
+ }
+
+- fn flat_map_field(&mut self, mut f: Field) -> SmallVec<[Field; 1]> {
+- if f.is_shorthand {
+- noop_visit_expr(&mut f.expr, self);
+- } else {
+- self.visit_expr(&mut f.expr);
++ fn visit_generic_arg(&mut self, arg: &mut GenericArg) {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericArg::Const(arg) => noop_visit_expr(&mut arg.value, self),
++ _ => noop_visit_generic_arg(arg, self),
+ }
+- SmallVec::from([f])
++ }
++
++ fn visit_block(&mut self, block: &mut P<Block>) {
++ self.visit_id(&mut block.id);
++ block
++ .stmts
++ .flat_map_in_place(|stmt| flat_map_stmt(stmt, self));
++ self.visit_span(&mut block.span);
+ }
+
+ // We don't want to look at expressions that might appear in patterns or
+@@ -267,25 +302,8 @@ fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>>
+ let _ = ty;
+ }
+
+- fn flat_map_stmt(&mut self, stmt: Stmt) -> SmallVec<[Stmt; 1]> {
+- let node = match stmt.node {
+- // Don't wrap toplevel expressions in statements.
+- StmtKind::Expr(mut e) => {
+- noop_visit_expr(&mut e, self);
+- StmtKind::Expr(e)
+- }
+- StmtKind::Semi(mut e) => {
+- noop_visit_expr(&mut e, self);
+- StmtKind::Semi(e)
+- }
+- s => s,
+- };
+-
+- smallvec![Stmt { node, ..stmt }]
+- }
+-
+- fn visit_mac(&mut self, mac: &mut Mac) {
+- // By default when folding over macros, libsyntax panics. This is
++ fn visit_mac(&mut self, mac: &mut MacCall) {
++ // By default when folding over macros, librustc panics. This is
+ // because it's usually not what you want, you want to run after
+ // macro expansion. We do want to do that (syn doesn't do macro
+ // expansion), so we implement visit_mac to just return the macro
+@@ -295,11 +313,11 @@ fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>>
+ }
+
+ let mut folder = BracketsVisitor { failed: false };
+- folder.visit_expr(&mut libsyntax_expr);
++ folder.visit_expr(&mut librustc_expr);
+ if folder.failed {
+ None
+ } else {
+- Some(libsyntax_expr)
++ Some(librustc_expr)
+ }
+ }
+
+@@ -318,14 +336,33 @@ fn syn_brackets(syn_expr: syn::Expr) -> syn::Expr {
+ Expr::If(..) | Expr::Unsafe(..) | Expr::Block(..) | Expr::Let(..) => {
+ fold_expr(self, expr)
+ }
+- node => Expr::Paren(ExprParen {
++ _ => Expr::Paren(ExprParen {
+ attrs: Vec::new(),
+- expr: Box::new(fold_expr(self, node)),
++ expr: Box::new(fold_expr(self, expr)),
+ paren_token: token::Paren::default(),
+ }),
+ }
+ }
+
++ fn fold_generic_argument(&mut self, arg: GenericArgument) -> GenericArgument {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericArgument::Const(a) => GenericArgument::Const(fold_expr(self, a)),
++ _ => fold_generic_argument(self, arg),
++ }
++ }
++
++ fn fold_generic_method_argument(
++ &mut self,
++ arg: GenericMethodArgument,
++ ) -> GenericMethodArgument {
++ match arg {
++ // Don't wrap const generic arg as that's invalid syntax.
++ GenericMethodArgument::Const(a) => GenericMethodArgument::Const(fold_expr(self, a)),
++ _ => fold_generic_method_argument(self, arg),
++ }
++ }
++
+ fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
+ match stmt {
+ // Don't wrap toplevel expressions in statements.
+@@ -360,7 +397,10 @@ fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
+ struct CollectExprs(Vec<Expr>);
+ impl Fold for CollectExprs {
+ fn fold_expr(&mut self, expr: Expr) -> Expr {
+- self.0.push(expr);
++ match expr {
++ Expr::Verbatim(tokens) if tokens.is_empty() => {}
++ _ => self.0.push(expr),
++ }
+
+ Expr::Tuple(ExprTuple {
+ attrs: vec![],
+diff --git a/third_party/rust/syn/tests/test_receiver.rs b/third_party/rust/syn/tests/test_receiver.rs
+new file mode 100644
+index 0000000000..923df96ba9
+--- /dev/null
++++ third_party/rust/syn/tests/test_receiver.rs
+@@ -0,0 +1,127 @@
++use syn::{parse_quote, FnArg, Receiver, TraitItemMethod};
++
++#[test]
++fn test_by_value() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_value(self: Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_mut_value() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_mut(mut self: Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_ref() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_ref(self: &Self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_box() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_box(self: Box<Self>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_by_pin() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn by_pin(self: Pin<Self>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_explicit_type() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn explicit_type(self: Pin<MyType>);
++ };
++ match sig.receiver() {
++ Some(FnArg::Typed(_)) => (),
++ value => panic!("expected FnArg::Typed, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_value_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn value_shorthand(self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: None,
++ mutability: None,
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver without ref/mut, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_mut_value_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn mut_value_shorthand(mut self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: None,
++ mutability: Some(_),
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with mut, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_ref_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn ref_shorthand(&self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: Some(_),
++ mutability: None,
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with ref, got {:?}", value),
++ }
++}
++
++#[test]
++fn test_ref_mut_shorthand() {
++ let TraitItemMethod { sig, .. } = parse_quote! {
++ fn ref_mut_shorthand(&mut self);
++ };
++ match sig.receiver() {
++ Some(FnArg::Receiver(Receiver {
++ reference: Some(_),
++ mutability: Some(_),
++ ..
++ })) => (),
++ value => panic!("expected FnArg::Receiver with ref+mut, got {:?}", value),
++ }
++}
+diff --git a/third_party/rust/syn/tests/test_round_trip.rs b/third_party/rust/syn/tests/test_round_trip.rs
+index 2fc9cecd86..260dd0c3d9 100644
+--- third_party/rust/syn/tests/test_round_trip.rs
++++ third_party/rust/syn/tests/test_round_trip.rs
+@@ -2,22 +2,20 @@
+ #![recursion_limit = "1024"]
+ #![feature(rustc_private)]
+
+-extern crate quote;
+-extern crate rayon;
+-extern crate syn;
+-extern crate syntax;
+-extern crate syntax_pos;
+-extern crate walkdir;
+-
+-mod features;
++extern crate rustc_ast;
++extern crate rustc_errors;
++extern crate rustc_expand;
++extern crate rustc_parse as parse;
++extern crate rustc_session;
++extern crate rustc_span;
+
+ use quote::quote;
+ use rayon::iter::{IntoParallelIterator, ParallelIterator};
+-use syntax::ast;
+-use syntax::parse::{self, PResult, ParseSess};
+-use syntax::source_map::FilePathMapping;
+-use syntax_pos::edition::Edition;
+-use syntax_pos::FileName;
++use rustc_ast::ast;
++use rustc_errors::PResult;
++use rustc_session::parse::ParseSess;
++use rustc_span::source_map::FilePathMapping;
++use rustc_span::FileName;
+ use walkdir::{DirEntry, WalkDir};
+
+ use std::fs::File;
+@@ -38,8 +36,8 @@ mod repo;
+ use common::eq::SpanlessEq;
+
+ #[test]
+-#[cfg_attr(target_os = "windows", ignore = "requires nix .sh")]
+ fn test_round_trip() {
++ common::rayon_init();
+ repo::clone_rust();
+ let abort_after = common::abort_after();
+ if abort_after == 0 {
+@@ -78,11 +76,12 @@ fn test_round_trip() {
+ }
+ };
+ let back = quote!(#krate).to_string();
++ let edition = repo::edition(path).parse().unwrap();
+
+ let equal = panic::catch_unwind(|| {
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(edition, || {
+ let sess = ParseSess::new(FilePathMapping::empty());
+- let before = match libsyntax_parse(content, &sess) {
++ let before = match librustc_parse(content, &sess) {
+ Ok(before) => before,
+ Err(mut diagnostic) => {
+ diagnostic.cancel();
+@@ -93,7 +92,7 @@ fn test_round_trip() {
+ errorf!("=== {}: ignore\n", path.display());
+ } else {
+ errorf!(
+- "=== {}: ignore - libsyntax failed to parse original content: {}\n",
++ "=== {}: ignore - librustc failed to parse original content: {}\n",
+ path.display(),
+ diagnostic.message()
+ );
+@@ -101,10 +100,10 @@ fn test_round_trip() {
+ return true;
+ }
+ };
+- let after = match libsyntax_parse(back, &sess) {
++ let after = match librustc_parse(back, &sess) {
+ Ok(after) => after,
+ Err(mut diagnostic) => {
+- errorf!("=== {}: libsyntax failed to parse", path.display());
++ errorf!("=== {}: librustc failed to parse", path.display());
+ diagnostic.emit();
+ return false;
+ }
+@@ -130,7 +129,7 @@ fn test_round_trip() {
+ })
+ });
+ match equal {
+- Err(_) => errorf!("=== {}: ignoring libsyntax panic\n", path.display()),
++ Err(_) => errorf!("=== {}: ignoring librustc panic\n", path.display()),
+ Ok(true) => {}
+ Ok(false) => {
+ let prev_failed = failed.fetch_add(1, Ordering::SeqCst);
+@@ -147,7 +146,7 @@ fn test_round_trip() {
+ }
+ }
+
+-fn libsyntax_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
++fn librustc_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
+ let name = FileName::Custom("test_round_trip".to_string());
+ parse::parse_crate_from_source_str(name, content, sess)
+ }
+diff --git a/third_party/rust/syn/tests/test_shebang.rs b/third_party/rust/syn/tests/test_shebang.rs
+new file mode 100644
+index 0000000000..dc26b9aab3
+--- /dev/null
++++ third_party/rust/syn/tests/test_shebang.rs
+@@ -0,0 +1,59 @@
++#[macro_use]
++mod macros;
++
++#[test]
++fn test_basic() {
++ let content = "#!/usr/bin/env rustx\nfn main() {}";
++ let file = syn::parse_file(content).unwrap();
++ snapshot!(file, @r###"
++ File {
++ shebang: Some("#!/usr/bin/env rustx"),
++ items: [
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "main",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ },
++ ],
++ }
++ "###);
++}
++
++#[test]
++fn test_comment() {
++ let content = "#!//am/i/a/comment\n[allow(dead_code)] fn main() {}";
++ let file = syn::parse_file(content).unwrap();
++ snapshot!(file, @r###"
++ File {
++ attrs: [
++ Attribute {
++ style: Inner,
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "allow",
++ arguments: None,
++ },
++ ],
++ },
++ tokens: TokenStream(`(dead_code)`),
++ },
++ ],
++ items: [
++ Item::Fn {
++ vis: Inherited,
++ sig: Signature {
++ ident: "main",
++ generics: Generics,
++ output: Default,
++ },
++ block: Block,
++ },
++ ],
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_should_parse.rs b/third_party/rust/syn/tests/test_should_parse.rs
+index aadf42e3af..180d859916 100644
+--- third_party/rust/syn/tests/test_should_parse.rs
++++ third_party/rust/syn/tests/test_should_parse.rs
+@@ -1,7 +1,3 @@
+-extern crate syn;
+-
+-mod features;
+-
+ macro_rules! should_parse {
+ ($name:ident, { $($in:tt)* }) => {
+ #[test]
+diff --git a/third_party/rust/syn/tests/test_size.rs b/third_party/rust/syn/tests/test_size.rs
+index 386d4df889..01e8401158 100644
+--- third_party/rust/syn/tests/test_size.rs
++++ third_party/rust/syn/tests/test_size.rs
+@@ -1,7 +1,5 @@
+ #![cfg(target_pointer_width = "64")]
+
+-mod features;
+-
+ use std::mem;
+ use syn::*;
+
+diff --git a/third_party/rust/syn/tests/test_stmt.rs b/third_party/rust/syn/tests/test_stmt.rs
+new file mode 100644
+index 0000000000..d68b47fd2f
+--- /dev/null
++++ third_party/rust/syn/tests/test_stmt.rs
+@@ -0,0 +1,44 @@
++#[macro_use]
++mod macros;
++
++use syn::Stmt;
++
++#[test]
++fn test_raw_operator() {
++ let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap();
++
++ snapshot!(stmt, @r###"
++ Local(Local {
++ pat: Pat::Wild,
++ init: Some(Verbatim(`& raw const x`)),
++ })
++ "###);
++}
++
++#[test]
++fn test_raw_variable() {
++ let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap();
++
++ snapshot!(stmt, @r###"
++ Local(Local {
++ pat: Pat::Wild,
++ init: Some(Expr::Reference {
++ expr: Expr::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "raw",
++ arguments: None,
++ },
++ ],
++ },
++ },
++ }),
++ })
++ "###);
++}
++
++#[test]
++fn test_raw_invalid() {
++ assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
++}
+diff --git a/third_party/rust/syn/tests/test_token_trees.rs b/third_party/rust/syn/tests/test_token_trees.rs
+index 70a9a72aab..5b00448af8 100644
+--- third_party/rust/syn/tests/test_token_trees.rs
++++ third_party/rust/syn/tests/test_token_trees.rs
+@@ -1,9 +1,3 @@
+-extern crate proc_macro2;
+-extern crate quote;
+-extern crate syn;
+-
+-mod features;
+-
+ #[macro_use]
+ mod macros;
+
+@@ -21,7 +15,11 @@ fn test_struct() {
+ }
+ ";
+
+- snapshot!(input as TokenStream, @"`# [ derive ( Debug , Clone ) ] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`");
++ snapshot!(input as TokenStream, @r###"
++ TokenStream(
++ `# [derive (Debug , Clone)] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`,
++ )
++ "###);
+ }
+
+ #[test]
+diff --git a/third_party/rust/syn/tests/test_ty.rs b/third_party/rust/syn/tests/test_ty.rs
+new file mode 100644
+index 0000000000..9cbdcd6b99
+--- /dev/null
++++ third_party/rust/syn/tests/test_ty.rs
+@@ -0,0 +1,53 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use quote::quote;
++use std::iter::FromIterator;
++use syn::Type;
++
++#[test]
++fn test_mut_self() {
++ syn::parse_str::<Type>("fn(mut self)").unwrap();
++ syn::parse_str::<Type>("fn(mut self: ())").unwrap();
++ syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err();
++ syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err();
++ syn::parse_str::<Type>("fn(mut self::T)").unwrap_err();
++}
++
++#[test]
++fn test_macro_variable_type() {
++ // mimics the token stream corresponding to `$ty<T>`
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
++ TokenTree::Punct(Punct::new('<', Spacing::Alone)),
++ TokenTree::Ident(Ident::new("T", Span::call_site())),
++ TokenTree::Punct(Punct::new('>', Spacing::Alone)),
++ ]);
++
++ snapshot!(tokens as Type, @r###"
++ Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "ty",
++ arguments: PathArguments::AngleBracketed {
++ args: [
++ Type(Type::Path {
++ path: Path {
++ segments: [
++ PathSegment {
++ ident: "T",
++ arguments: None,
++ },
++ ],
++ },
++ }),
++ ],
++ },
++ },
++ ],
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/test_visibility.rs b/third_party/rust/syn/tests/test_visibility.rs
+new file mode 100644
+index 0000000000..c3d0ac7a5b
+--- /dev/null
++++ third_party/rust/syn/tests/test_visibility.rs
+@@ -0,0 +1,145 @@
++#[macro_use]
++mod macros;
++
++use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
++use std::iter::FromIterator;
++use syn::parse::{Parse, ParseStream};
++use syn::{DeriveInput, Result, Visibility};
++
++#[derive(Debug)]
++struct VisRest {
++ vis: Visibility,
++ rest: TokenStream,
++}
++
++impl Parse for VisRest {
++ fn parse(input: ParseStream) -> Result<Self> {
++ Ok(VisRest {
++ vis: input.parse()?,
++ rest: input.parse()?,
++ })
++ }
++}
++
++macro_rules! assert_vis_parse {
++ ($input:expr, Ok($p:pat)) => {
++ assert_vis_parse!($input, Ok($p) + "");
++ };
++
++ ($input:expr, Ok($p:pat) + $rest:expr) => {
++ let expected = $rest.parse::<TokenStream>().unwrap();
++ let parse: VisRest = syn::parse_str($input).unwrap();
++
++ match parse.vis {
++ $p => {}
++ _ => panic!("Expected {}, got {:?}", stringify!($p), parse.vis),
++ }
++
++ // NOTE: Round-trips through `to_string` to avoid potential whitespace
++ // diffs.
++ assert_eq!(parse.rest.to_string(), expected.to_string());
++ };
++
++ ($input:expr, Err) => {
++ syn::parse2::<VisRest>($input.parse().unwrap()).unwrap_err();
++ };
++}
++
++#[test]
++fn test_pub() {
++ assert_vis_parse!("pub", Ok(Visibility::Public(_)));
++}
++
++#[test]
++fn test_crate() {
++ assert_vis_parse!("crate", Ok(Visibility::Crate(_)));
++}
++
++#[test]
++fn test_inherited() {
++ assert_vis_parse!("", Ok(Visibility::Inherited));
++}
++
++#[test]
++fn test_in() {
++ assert_vis_parse!("pub(in foo::bar)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_crate() {
++ assert_vis_parse!("pub(crate)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_self() {
++ assert_vis_parse!("pub(self)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_pub_super() {
++ assert_vis_parse!("pub(super)", Ok(Visibility::Restricted(_)));
++}
++
++#[test]
++fn test_missing_in() {
++ assert_vis_parse!("pub(foo::bar)", Ok(Visibility::Public(_)) + "(foo::bar)");
++}
++
++#[test]
++fn test_missing_in_path() {
++ assert_vis_parse!("pub(in)", Err);
++}
++
++#[test]
++fn test_crate_path() {
++ assert_vis_parse!("pub(crate::A, crate::B)", Ok(Visibility::Public(_)) + "(crate::A, crate::B)");
++}
++
++#[test]
++fn test_junk_after_in() {
++ assert_vis_parse!("pub(in some::path @@garbage)", Err);
++}
++
++#[test]
++fn test_empty_group_vis() {
++ // mimics `struct S { $vis $field: () }` where $vis is empty
++ let tokens = TokenStream::from_iter(vec![
++ TokenTree::Ident(Ident::new("struct", Span::call_site())),
++ TokenTree::Ident(Ident::new("S", Span::call_site())),
++ TokenTree::Group(Group::new(
++ Delimiter::Brace,
++ TokenStream::from_iter(vec![
++ TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
++ TokenTree::Group(Group::new(
++ Delimiter::None,
++ TokenStream::from_iter(vec![TokenTree::Ident(Ident::new(
++ "f",
++ Span::call_site(),
++ ))]),
++ )),
++ TokenTree::Punct(Punct::new(':', Spacing::Alone)),
++ TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
++ ]),
++ )),
++ ]);
++
++ snapshot!(tokens as DeriveInput, @r###"
++ DeriveInput {
++ vis: Inherited,
++ ident: "S",
++ generics: Generics,
++ data: Data::Struct {
++ fields: Fields::Named {
++ named: [
++ Field {
++ vis: Inherited,
++ ident: Some("f"),
++ colon_token: Some,
++ ty: Type::Tuple,
++ },
++ ],
++ },
++ },
++ }
++ "###);
++}
+diff --git a/third_party/rust/syn/tests/zzz_stable.rs b/third_party/rust/syn/tests/zzz_stable.rs
+index a81b3df4d0..a1a670d9ed 100644
+--- third_party/rust/syn/tests/zzz_stable.rs
++++ third_party/rust/syn/tests/zzz_stable.rs
+@@ -1,7 +1,5 @@
+ #![cfg(syn_disable_nightly_tests)]
+
+-extern crate termcolor;
+-
+ use std::io::{self, Write};
+ use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
+
+@@ -10,7 +8,7 @@ const MSG: &str = "\
+ ‖ WARNING:
+ ‖ This is not a nightly compiler so not all tests were able to
+ ‖ run. Syn includes tests that compare Syn's parser against the
+-‖ compiler's parser, which requires access to unstable libsyntax
++‖ compiler's parser, which requires access to unstable librustc
+ ‖ data structures and a nightly compiler.
+ ‖
+ ";
+--
+2.28.0
+
Property changes on: head/www/firefox-esr/files/patch-bug1663715
___________________________________________________________________
Added: fbsd:nokeywords
## -0,0 +1 ##
+yes
\ No newline at end of property
Added: svn:eol-style
## -0,0 +1 ##
+native
\ No newline at end of property
Added: svn:mime-type
## -0,0 +1 ##
+text/plain
\ No newline at end of property
Index: head/www/geckodriver/Makefile
===================================================================
--- head/www/geckodriver/Makefile (revision 552220)
+++ head/www/geckodriver/Makefile (revision 552221)
@@ -1,370 +1,370 @@
# $FreeBSD$
PORTNAME= geckodriver
DISTVERSION= 0.26.0
-PORTREVISION= 9
+PORTREVISION= 10
CATEGORIES= www
MASTER_SITES= https://hg.mozilla.org/mozilla-central/archive/${DISTNAME}.zip/testing/geckodriver/?dummy=/
DISTNAME= e9783a644016aa9b317887076618425586730d73
EXTRACT_SUFX= .zip
DISTFILES= ${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= daniel@blodan.se
COMMENT= Proxy for using WebDriver clients with Gecko-based browsers
LICENSE= MPL20
USES= cargo
WRKSRC= ${WRKDIR}/mozilla-central-${DISTNAME}/testing/geckodriver
PLIST_FILES= bin/geckodriver
CARGO_CRATES= Inflector-0.11.4 \
adler32-1.0.4 \
aho-corasick-0.7.6 \
ansi_term-0.11.0 \
app_units-0.7.0 \
arrayref-0.3.5 \
arrayvec-0.4.11 \
atomic-0.4.5 \
atomic_refcell-0.1.0 \
atty-0.2.11 \
audio_thread_priority-0.19.1 \
authenticator-0.2.6 \
autocfg-0.1.6 \
backtrace-0.3.9 \
backtrace-sys-0.1.24 \
base64-0.10.1 \
binary-space-partition-0.1.2 \
bincode-1.0.0 \
bindgen-0.51.1 \
binjs_meta-0.5.2 \
bit-vec-0.5.1 \
bit_reverse-0.1.7 \
bitflags-1.0.4 \
bitreader-0.3.0 \
blake2b_simd-0.5.8 \
block-buffer-0.7.3 \
block-padding-0.1.2 \
boxfnonce-0.0.3 \
byte-tools-0.3.0 \
byteorder-1.3.1 \
bytes-0.4.9 \
cc-1.0.34 \
cexpr-0.3.3 \
cfg-if-0.1.6 \
chrono-0.4.6 \
clang-sys-0.28.1 \
clap-2.31.2 \
cloudabi-0.0.3 \
cmake-0.1.29 \
comedy-0.1.0 \
constant_time_eq-0.1.3 \
cookie-0.12.0 \
core-foundation-0.6.3 \
core-foundation-sys-0.6.2 \
core-graphics-0.17.1 \
core-text-13.0.0 \
coreaudio-sys-0.2.2 \
cose-0.1.4 \
cose-c-0.1.5 \
cranelift-bforest-0.44.0 \
cranelift-codegen-0.44.0 \
cranelift-codegen-meta-0.44.0 \
cranelift-codegen-shared-0.44.0 \
cranelift-entity-0.44.0 \
cranelift-frontend-0.44.0 \
cranelift-wasm-0.44.0 \
crc32fast-1.2.0 \
crossbeam-deque-0.7.1 \
crossbeam-epoch-0.7.2 \
crossbeam-queue-0.1.2 \
crossbeam-utils-0.6.5 \
cssparser-0.25.9 \
cssparser-macros-0.3.6 \
cstr-0.1.3 \
cstr-macros-0.1.6 \
cubeb-0.5.5 \
cubeb-backend-0.5.5 \
cubeb-core-0.5.5 \
cubeb-sys-0.5.5 \
darling-0.10.1 \
darling_core-0.10.1 \
darling_macro-0.10.1 \
dbus-0.6.4 \
deflate-0.7.19 \
derive_more-0.13.0 \
devd-rs-0.3.0 \
digest-0.8.0 \
dirs-1.0.5 \
dns-parser-0.8.0 \
dogear-0.4.0 \
dtoa-0.4.2 \
dtoa-short-0.3.1 \
dwrote-0.9.0 \
either-1.1.0 \
encoding_c-0.9.5 \
encoding_c_mem-0.2.4 \
encoding_rs-0.8.20 \
env_logger-0.6.2 \
error-chain-0.11.0 \
euclid-0.20.0 \
failure-0.1.3 \
failure_derive-0.1.3 \
fake-simd-0.1.2 \
filetime_win-0.1.0 \
flate2-1.0.11 \
fnv-1.0.6 \
foreign-types-0.3.0 \
freetype-0.4.0 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.1.23 \
futures-cpupool-0.1.8 \
fxhash-0.2.1 \
generic-array-0.12.0 \
gl_generator-0.11.0 \
gleam-0.6.17 \
glob-0.3.0 \
goblin-0.0.24 \
guid_win-0.1.0 \
h2-0.1.12 \
headers-0.2.1 \
headers-core-0.1.1 \
headers-derive-0.1.1 \
http-0.1.17 \
httparse-1.3.3 \
humantime-1.1.1 \
hyper-0.12.19 \
ident_case-1.0.0 \
idna-0.2.0 \
image-0.22.1 \
indexmap-1.1.0 \
inflate-0.4.5 \
iovec-0.1.2 \
itertools-0.8.0 \
itoa-0.4.1 \
kernel32-sys-0.2.2 \
khronos_api-3.1.0 \
lazy_static-1.2.0 \
lazycell-1.2.1 \
libc-0.2.60 \
libdbus-sys-0.1.5 \
libloading-0.5.0 \
libudev-0.2.0 \
libudev-sys-0.1.3 \
libz-sys-1.0.25 \
line-wrap-0.1.1 \
linked-hash-map-0.5.1 \
lmdb-rkv-0.12.3 \
lmdb-rkv-sys-0.9.5 \
lock_api-0.3.1 \
log-0.4.6 \
lzw-0.10.0 \
mach-0.3.2 \
malloc_size_of_derive-0.1.0 \
matches-0.1.6 \
memchr-2.2.0 \
memmap-0.7.0 \
memoffset-0.5.1 \
mime-0.3.13 \
mime_guess-2.0.1 \
miniz_oxide-0.3.2 \
mio-0.6.19 \
mio-extras-2.0.5 \
mio-named-pipes-0.1.6 \
mio-uds-0.6.7 \
miow-0.2.1 \
miow-0.3.3 \
moz_cbor-0.1.1 \
mozdevice-0.1.0 \
mozprofile-0.6.0 \
mozrunner-0.10.0 \
mozversion-0.2.1 \
mp4parse-0.11.2 \
mp4parse_capi-0.11.2 \
mp4parse_fallible-0.0.1 \
msdos_time-0.1.6 \
murmurhash3-0.0.5 \
net2-0.2.33 \
new_debug_unreachable-1.0.1 \
nodrop-0.1.12 \
nom-4.1.1 \
num-derive-0.3.0 \
num-integer-0.1.39 \
num-iter-0.1.37 \
num-rational-0.2.1 \
num-traits-0.2.6 \
num_cpus-1.7.0 \
object-0.14.0 \
opaque-debug-0.2.1 \
ordered-float-1.0.1 \
owning_ref-0.4.0 \
packed_simd-0.3.3 \
parking_lot-0.9.0 \
parking_lot_core-0.6.2 \
peeking_take_while-0.1.2 \
percent-encoding-2.1.0 \
phf-0.7.24 \
phf_codegen-0.7.24 \
phf_generator-0.7.24 \
phf_shared-0.7.24 \
pkg-config-0.3.9 \
plain-0.2.3 \
plane-split-0.15.0 \
plist-0.5.1 \
png-0.15.0 \
podio-0.1.5 \
precomputed-hash-0.1.1 \
proc-macro2-0.4.27 \
proc-macro2-1.0.5 \
procedural-masquerade-0.1.1 \
pulse-0.2.0 \
quick-error-1.2.1 \
quote-0.6.11 \
quote-1.0.2 \
rand-0.6.5 \
rand_chacha-0.1.1 \
rand_core-0.3.1 \
rand_core-0.4.0 \
rand_hc-0.1.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rayon-1.2.0 \
rayon-core-1.6.0 \
rdrand-0.4.0 \
redox_syscall-0.1.56 \
redox_termios-0.1.1 \
redox_users-0.3.1 \
regex-1.1.9 \
regex-syntax-0.6.12 \
remove_dir_all-0.5.2 \
ringbuf-0.1.4 \
rkv-0.10.2 \
ron-0.1.7 \
runloop-0.1.0 \
rust-argon2-0.5.1 \
rust-ini-0.10.3 \
rust_cascade-0.3.4 \
rustc-demangle-0.1.8 \
rustc-hash-1.0.1 \
rustc_version-0.2.3 \
ryu-0.2.4 \
safemem-0.3.0 \
same-file-1.0.2 \
scoped-tls-0.1.0 \
scoped-tls-1.0.0 \
scopeguard-1.0.0 \
scroll-0.9.2 \
scroll_derive-0.9.5 \
selectors-0.21.0 \
semver-0.6.0 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.88 \
serde_bytes-0.11.2 \
serde_derive-1.0.88 \
serde_json-1.0.26 \
serde_repr-0.1.4 \
serde_urlencoded-0.6.1 \
serde_yaml-0.8.9 \
servo_arc-0.1.1 \
sha-1-0.8.1 \
sha2-0.8.0 \
shift_or_euc-0.1.0 \
shift_or_euc_c-0.1.0 \
shlex-0.1.1 \
siphasher-0.2.1 \
slab-0.3.0 \
slab-0.4.1 \
smallbitvec-2.3.0 \
smallvec-0.6.10 \
socket2-0.3.10 \
stable_deref_trait-1.0.0 \
storage-0.1.0 \
string-0.1.1 \
strsim-0.7.0 \
svg_fmt-0.4.0 \
syn-0.15.30 \
syn-1.0.5 \
synstructure-0.10.1 \
synstructure-0.12.1 \
target-lexicon-0.8.1 \
tempfile-3.0.5 \
term_size-0.3.0 \
termcolor-1.0.5 \
termion-1.5.1 \
textwrap-0.9.0 \
thin-slice-0.1.1 \
thin-vec-0.1.0 \
thread_local-0.3.6 \
thread_profiler-0.1.1 \
threadbound-0.1.0 \
time-0.1.40 \
tokio-0.1.11 \
tokio-codec-0.1.0 \
tokio-current-thread-0.1.6 \
tokio-executor-0.1.7 \
tokio-fs-0.1.3 \
tokio-io-0.1.7 \
tokio-named-pipes-0.1.0 \
tokio-reactor-0.1.3 \
tokio-tcp-0.1.1 \
tokio-threadpool-0.1.14 \
tokio-timer-0.2.11 \
tokio-udp-0.1.1 \
tokio-uds-0.2.5 \
toml-0.4.5 \
try-lock-0.2.2 \
typenum-1.10.0 \
uluru-0.3.0 \
unicase-2.4.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.5 \
unicode-segmentation-1.2.1 \
unicode-width-0.1.4 \
unicode-xid-0.1.0 \
unicode-xid-0.2.0 \
unreachable-1.0.0 \
url-2.1.0 \
urlencoding-1.0.0 \
utf8-ranges-1.0.4 \
uuid-0.7.4 \
vcpkg-0.2.2 \
vec_map-0.8.0 \
version_check-0.1.5 \
void-1.0.2 \
walkdir-2.1.4 \
want-0.0.6 \
warp-0.1.19 \
wasmparser-0.39.1 \
webdriver-0.40.2 \
webrender-0.60.0 \
webrender_api-0.60.0 \
webrender_build-0.0.1 \
webrtc-sdp-0.3.1 \
weedle-0.8.0 \
winapi-0.2.8 \
winapi-0.3.6 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.2 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
wincolor-1.0.2 \
winreg-0.5.1 \
wr_malloc_size_of-0.0.1 \
ws-0.9.0 \
ws2_32-sys-0.2.1 \
xml-rs-0.8.0 \
yaml-rust-0.4.2 \
zip-0.4.2
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/geckodriver
.include <bsd.port.mk>
Index: head/www/jwt-cli/Makefile
===================================================================
--- head/www/jwt-cli/Makefile (revision 552220)
+++ head/www/jwt-cli/Makefile (revision 552221)
@@ -1,88 +1,89 @@
# Created by: Sergey A. Osokin <osa@FreeBSD.org>
# $FreeBSD$
PORTNAME= jwt-cli
PORTVERSION= 3.2.1
+PORTREVISION= 1
CATEGORIES= www
MAINTAINER= osa@FreeBSD.org
COMMENT= Super fast CLI tool to decode and encode JWTs
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE.md
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= mike-engel
CARGO_CRATES= aho-corasick-0.7.13 \
ansi_term-0.11.0 \
atty-0.2.14 \
autocfg-1.0.1 \
base64-0.12.3 \
bitflags-1.2.1 \
bumpalo-3.4.0 \
cc-1.0.59 \
cfg-if-0.1.10 \
chrono-0.4.15 \
clap-2.33.3 \
hermit-abi-0.1.15 \
itoa-0.4.6 \
js-sys-0.3.45 \
jsonwebtoken-7.2.0 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.77 \
log-0.4.11 \
memchr-2.3.3 \
num-0.2.1 \
num-bigint-0.2.6 \
num-complex-0.2.4 \
num-integer-0.1.43 \
num-iter-0.1.41 \
num-rational-0.2.4 \
num-traits-0.2.12 \
once_cell-1.4.1 \
parse_duration-2.1.0 \
pem-0.8.1 \
proc-macro2-1.0.21 \
quote-1.0.7 \
regex-1.3.9 \
regex-syntax-0.6.18 \
ring-0.16.15 \
ryu-1.0.5 \
serde-1.0.115 \
serde_derive-1.0.115 \
serde_json-1.0.57 \
simple_asn1-0.4.1 \
spin-0.5.2 \
strsim-0.8.0 \
syn-1.0.40 \
term-0.4.6 \
term-painter-0.2.4 \
textwrap-0.11.0 \
thread_local-1.0.1 \
time-0.1.44 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
untrusted-0.7.1 \
vec_map-0.8.2 \
wasi-0.10.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.68 \
wasm-bindgen-backend-0.2.68 \
wasm-bindgen-macro-0.2.68 \
wasm-bindgen-macro-support-0.2.68 \
wasm-bindgen-shared-0.2.68 \
web-sys-0.3.45 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0
PLIST_FILES= bin/jwt
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/jwt
.include <bsd.port.mk>
Index: head/www/miniserve/Makefile
===================================================================
--- head/www/miniserve/Makefile (revision 552220)
+++ head/www/miniserve/Makefile (revision 552221)
@@ -1,365 +1,366 @@
# $FreeBSD$
PORTNAME= miniserve
DISTVERSIONPREFIX= v
DISTVERSION= 0.9.0
+PORTREVISION= 1
CATEGORIES= www
MAINTAINER= ports@FreeBSD.org
COMMENT= Ad-hoc HTTP server for file sharing
LICENSE= APACHE20 BSD3CLAUSE CC0-1.0 ISCL MIT MPL20 UNLICENSE
LICENSE_COMB= multi
LICENSE_FILE_MIT= ${WRKSRC}/LICENSE
ONLY_FOR_ARCHS= amd64 i386
ONLY_FOR_ARCHS_REASON= ring crate not ported to other architectures
USES= cargo ssl:build
USE_GITHUB= yes
GH_ACCOUNT= svenstaro
CARGO_CRATES= actix-codec-0.2.0 \
actix-codec-0.3.0 \
actix-connect-1.0.2 \
actix-connect-2.0.0 \
actix-files-0.3.0 \
actix-http-1.0.1 \
actix-http-2.0.0 \
actix-macros-0.1.2 \
actix-multipart-0.3.0 \
actix-router-0.2.4 \
actix-rt-1.1.1 \
actix-server-1.0.3 \
actix-service-1.0.6 \
actix-testing-1.0.1 \
actix-threadpool-0.3.3 \
actix-tls-1.0.0 \
actix-tls-2.0.0 \
actix-utils-1.0.6 \
actix-utils-2.0.0 \
actix-web-2.0.0 \
actix-web-3.0.2 \
actix-web-codegen-0.2.2 \
actix-web-codegen-0.3.0 \
actix-web-httpauth-0.5.0 \
addr2line-0.13.0 \
adler-0.2.3 \
adler32-1.2.0 \
aho-corasick-0.7.13 \
alphanumeric-sort-1.4.0 \
ansi_term-0.11.0 \
arc-swap-0.4.7 \
assert_cmd-1.0.1 \
assert_fs-1.0.0 \
async-trait-0.1.40 \
atty-0.2.14 \
autocfg-0.1.7 \
autocfg-1.0.1 \
awc-1.0.1 \
awc-2.0.0 \
backtrace-0.3.50 \
base-x-0.2.6 \
base64-0.11.0 \
base64-0.12.3 \
bit-set-0.5.2 \
bit-vec-0.6.2 \
bitflags-1.2.1 \
block-buffer-0.9.0 \
brotli-sys-0.3.2 \
brotli2-0.3.2 \
bstr-0.2.13 \
buf-min-0.1.1 \
bumpalo-3.4.0 \
byteorder-1.3.4 \
bytes-0.5.6 \
bytesize-1.0.1 \
bytestring-0.1.5 \
bzip2-0.3.3 \
bzip2-sys-0.1.9+1.0.8 \
cc-1.0.59 \
cfg-if-0.1.10 \
chrono-0.4.15 \
chrono-humanize-0.1.1 \
clap-2.33.3 \
cloudabi-0.0.3 \
cloudabi-0.1.0 \
const_fn-0.4.2 \
cookie-0.14.2 \
copyless-0.1.5 \
cpuid-bool-0.1.2 \
crc32fast-1.2.0 \
crossbeam-utils-0.7.2 \
ctor-0.1.15 \
derive_more-0.99.10 \
difference-2.0.0 \
digest-0.9.0 \
discard-1.0.4 \
doc-comment-0.3.3 \
dtoa-0.4.6 \
either-1.6.0 \
encoding_rs-0.8.24 \
enum-as-inner-0.3.3 \
failure-0.1.8 \
failure_derive-0.1.8 \
filetime-0.2.12 \
flate2-1.0.14 \
float-cmp-0.8.0 \
fnv-1.0.7 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futf-0.1.4 \
futures-0.3.5 \
futures-channel-0.3.5 \
futures-core-0.3.5 \
futures-executor-0.3.5 \
futures-io-0.3.5 \
futures-macro-0.3.5 \
futures-sink-0.3.5 \
futures-task-0.3.5 \
futures-util-0.3.5 \
fxhash-0.2.1 \
generic-array-0.14.4 \
getrandom-0.1.15 \
gimli-0.22.0 \
globset-0.4.5 \
globwalk-0.7.3 \
h2-0.2.6 \
hashbrown-0.9.0 \
heck-0.3.1 \
hermit-abi-0.1.15 \
hex-0.4.2 \
hostname-0.3.1 \
html5ever-0.25.1 \
http-0.2.1 \
http-body-0.3.1 \
httparse-1.3.4 \
hyper-0.13.7 \
hyper-rustls-0.21.0 \
idna-0.2.0 \
ignore-0.4.16 \
indexmap-1.6.0 \
instant-0.1.6 \
iovec-0.1.4 \
ipconfig-0.2.2 \
ipnet-2.3.0 \
itoa-0.4.6 \
js-sys-0.3.45 \
kernel32-sys-0.2.2 \
language-tags-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.77 \
libflate-1.0.2 \
libflate_lz77-1.0.0 \
linked-hash-map-0.5.3 \
lock_api-0.4.1 \
log-0.4.11 \
lru-cache-0.1.2 \
mac-0.1.1 \
markup5ever-0.10.0 \
markup5ever_rcdom-0.1.0 \
match_cfg-0.1.0 \
matches-0.1.8 \
maud-0.22.0 \
maud_htmlescape-0.17.0 \
maud_macros-0.22.0 \
memchr-2.3.3 \
mime-0.3.16 \
mime_guess-2.0.3 \
miniz_oxide-0.3.7 \
miniz_oxide-0.4.1 \
mio-0.6.22 \
mio-uds-0.6.8 \
miow-0.2.1 \
nanoid-0.3.0 \
net2-0.2.35 \
new_debug_unreachable-1.0.4 \
nom-4.2.3 \
normalize-line-endings-0.3.0 \
num-integer-0.1.43 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
object-0.20.0 \
once_cell-1.4.1 \
opaque-debug-0.3.0 \
output_vt100-0.1.2 \
parking_lot-0.11.0 \
parking_lot_core-0.8.0 \
percent-encoding-2.1.0 \
phf-0.8.0 \
phf_codegen-0.8.0 \
phf_generator-0.8.0 \
phf_shared-0.8.0 \
pin-project-0.4.23 \
pin-project-internal-0.4.23 \
pin-project-lite-0.1.7 \
pin-utils-0.1.0 \
pkg-config-0.3.18 \
port_check-0.1.5 \
ppv-lite86-0.2.9 \
precomputed-hash-0.1.1 \
predicates-1.0.5 \
predicates-core-1.0.0 \
predicates-tree-1.0.0 \
pretty_assertions-0.6.1 \
proc-macro-error-1.0.4 \
proc-macro-error-attr-1.0.4 \
proc-macro-hack-0.5.18 \
proc-macro-nested-0.1.6 \
proc-macro2-1.0.21 \
qrcodegen-1.6.0 \
quick-error-1.2.3 \
quote-1.0.7 \
rand-0.6.5 \
rand-0.7.3 \
rand_chacha-0.1.1 \
rand_chacha-0.2.2 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_pcg-0.2.1 \
rand_xorshift-0.1.1 \
rdrand-0.4.0 \
redox_syscall-0.1.57 \
regex-1.3.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.3 \
reqwest-0.10.8 \
resolv-conf-0.6.3 \
ring-0.16.15 \
rle-decode-fast-1.0.1 \
rstest-0.6.4 \
rustc-demangle-0.1.16 \
rustc_version-0.2.3 \
rustls-0.18.1 \
ryu-1.0.5 \
same-file-1.0.6 \
scopeguard-1.1.0 \
sct-0.6.0 \
select-0.5.0 \
semver-0.9.0 \
semver-parser-0.7.0 \
serde-1.0.116 \
serde_derive-1.0.116 \
serde_json-1.0.57 \
serde_urlencoded-0.6.1 \
sha-1-0.9.1 \
sha1-0.6.0 \
sha2-0.9.1 \
signal-hook-registry-1.2.1 \
simplelog-0.8.0 \
siphasher-0.3.3 \
slab-0.4.2 \
smallvec-1.4.2 \
socket2-0.3.15 \
spin-0.5.2 \
standback-0.2.10 \
stdweb-0.4.20 \
stdweb-derive-0.5.3 \
stdweb-internal-macros-0.2.9 \
stdweb-internal-runtime-0.1.5 \
string_cache-0.8.0 \
string_cache_codegen-0.5.1 \
strsim-0.8.0 \
structopt-0.3.17 \
structopt-derive-0.4.10 \
strum-0.19.2 \
strum_macros-0.19.2 \
syn-1.0.40 \
synstructure-0.12.4 \
tar-0.4.30 \
tempfile-3.1.0 \
tendril-0.4.1 \
termcolor-1.1.0 \
textwrap-0.11.0 \
thiserror-1.0.20 \
thiserror-impl-1.0.20 \
thread_local-1.0.1 \
threadpool-1.8.1 \
time-0.1.44 \
time-0.2.18 \
time-macros-0.1.0 \
time-macros-impl-0.1.1 \
tinyvec-0.3.4 \
tokio-0.2.22 \
tokio-rustls-0.14.1 \
tokio-util-0.2.0 \
tokio-util-0.3.1 \
tower-service-0.3.0 \
tracing-0.1.19 \
tracing-core-0.1.16 \
treeline-0.1.0 \
trust-dns-proto-0.18.0-alpha.2 \
trust-dns-proto-0.19.5 \
trust-dns-resolver-0.18.0-alpha.2 \
trust-dns-resolver-0.19.5 \
try-lock-0.2.3 \
twoway-0.2.1 \
typenum-1.12.0 \
unchecked-index-0.2.2 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
untrusted-0.7.1 \
url-2.1.1 \
utf-8-0.7.5 \
v_escape-0.12.1 \
v_escape_derive-0.8.1 \
v_htmlescape-0.10.0 \
vec_map-0.8.2 \
version_check-0.1.5 \
version_check-0.9.2 \
wait-timeout-0.2.0 \
walkdir-2.3.1 \
want-0.3.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasi-0.10.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.68 \
wasm-bindgen-backend-0.2.68 \
wasm-bindgen-futures-0.4.18 \
wasm-bindgen-macro-0.2.68 \
wasm-bindgen-macro-support-0.2.68 \
wasm-bindgen-shared-0.2.68 \
web-sys-0.3.45 \
webpki-0.21.3 \
webpki-roots-0.19.0 \
widestring-0.4.2 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.6.2 \
winreg-0.7.0 \
ws2_32-sys-0.2.1 \
xattr-0.2.2 \
xml5ever-0.16.1 \
yansi-0.5.0 \
zip-0.5.8
# maud_macros uses feature(proc_macro)
MAKE_ENV= RUSTC_BOOTSTRAP=1
PLIST_FILES= bin/miniserve
PORTDOCS= README.md
OPTIONS_DEFINE= DOCS
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/miniserve
post-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_MAN} ${WRKSRC}/README.md ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/www/monolith/Makefile
===================================================================
--- head/www/monolith/Makefile (revision 552220)
+++ head/www/monolith/Makefile (revision 552221)
@@ -1,210 +1,210 @@
# $FreeBSD$
PORTNAME= monolith
DISTVERSIONPREFIX= v
DISTVERSION= 2.3.1
-PORTREVISION= 1
+PORTREVISION= 2
CATEGORIES= www
MAINTAINER= vulcan@wired.sh
COMMENT= CLI tool for saving complete web pages as a single HTML file
LICENSE= UNLICENSE
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo ssl
USE_GITHUB= yes
GH_ACCOUNT= Y2Z
CARGO_CRATES= adler-0.2.3 \
ansi_term-0.11.0 \
assert_cmd-1.0.1 \
async-compression-0.3.5 \
atty-0.2.14 \
autocfg-0.1.7 \
autocfg-1.0.0 \
base64-0.12.3 \
bitflags-1.2.1 \
block-buffer-0.9.0 \
bumpalo-3.4.0 \
bytes-0.5.6 \
cc-1.0.58 \
cfg-if-0.1.10 \
chrono-0.4.13 \
clap-2.33.1 \
cloudabi-0.0.3 \
core-foundation-0.7.0 \
core-foundation-sys-0.7.0 \
cpuid-bool-0.1.2 \
crc32fast-1.2.0 \
cssparser-0.27.2 \
cssparser-macros-0.6.0 \
difference-2.0.0 \
digest-0.9.0 \
doc-comment-0.3.3 \
dtoa-0.4.6 \
dtoa-short-0.3.2 \
encoding_rs-0.8.23 \
flate2-1.0.16 \
fnv-1.0.7 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futf-0.1.4 \
futures-channel-0.3.5 \
futures-core-0.3.5 \
futures-io-0.3.5 \
futures-sink-0.3.5 \
futures-task-0.3.5 \
futures-util-0.3.5 \
generic-array-0.14.3 \
getrandom-0.1.14 \
h2-0.2.6 \
hashbrown-0.8.1 \
hermit-abi-0.1.15 \
html5ever-0.24.1 \
http-0.2.1 \
http-body-0.3.1 \
httparse-1.3.4 \
hyper-0.13.7 \
hyper-tls-0.4.3 \
idna-0.2.0 \
indexmap-1.5.0 \
iovec-0.1.4 \
ipnet-2.3.0 \
itoa-0.4.6 \
js-sys-0.3.44 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.74 \
log-0.4.11 \
mac-0.1.1 \
markup5ever-0.9.0 \
matches-0.1.8 \
memchr-2.3.3 \
mime-0.3.16 \
mime_guess-2.0.3 \
miniz_oxide-0.4.0 \
mio-0.6.22 \
miow-0.2.1 \
native-tls-0.2.4 \
net2-0.2.34 \
new_debug_unreachable-1.0.4 \
num-integer-0.1.43 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
once_cell-1.4.0 \
opaque-debug-0.3.0 \
openssl-0.10.30 \
openssl-probe-0.1.2 \
openssl-sys-0.9.58 \
percent-encoding-2.1.0 \
phf-0.7.24 \
phf-0.8.0 \
phf_codegen-0.7.24 \
phf_generator-0.7.24 \
phf_generator-0.8.0 \
phf_macros-0.8.0 \
phf_shared-0.7.24 \
phf_shared-0.8.0 \
pin-project-0.4.23 \
pin-project-internal-0.4.23 \
pin-project-lite-0.1.7 \
pin-utils-0.1.0 \
pkg-config-0.3.18 \
ppv-lite86-0.2.8 \
precomputed-hash-0.1.1 \
predicates-1.0.5 \
predicates-core-1.0.0 \
predicates-tree-1.0.0 \
proc-macro-hack-0.5.18 \
proc-macro2-1.0.19 \
quote-1.0.7 \
rand-0.6.5 \
rand-0.7.3 \
rand_chacha-0.1.1 \
rand_chacha-0.2.2 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_pcg-0.2.1 \
rand_xorshift-0.1.1 \
rdrand-0.4.0 \
redox_syscall-0.1.57 \
remove_dir_all-0.5.3 \
reqwest-0.10.7 \
ryu-1.0.5 \
schannel-0.1.19 \
security-framework-0.4.4 \
security-framework-sys-0.4.3 \
serde-1.0.114 \
serde_derive-1.0.114 \
serde_json-1.0.57 \
serde_urlencoded-0.6.1 \
sha2-0.9.1 \
siphasher-0.2.3 \
siphasher-0.3.3 \
slab-0.4.2 \
smallvec-1.4.1 \
socket2-0.3.12 \
string_cache-0.7.5 \
string_cache_codegen-0.4.4 \
string_cache_shared-0.3.0 \
strsim-0.8.0 \
syn-1.0.36 \
tempfile-3.1.0 \
tendril-0.4.1 \
textwrap-0.11.0 \
time-0.1.43 \
tinyvec-0.3.3 \
tokio-0.2.22 \
tokio-tls-0.3.1 \
tokio-util-0.3.1 \
tower-service-0.3.0 \
tracing-0.1.18 \
tracing-core-0.1.12 \
treeline-0.1.0 \
try-lock-0.2.3 \
typenum-1.12.0 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
url-2.1.1 \
utf-8-0.7.5 \
vcpkg-0.2.10 \
vec_map-0.8.2 \
version_check-0.9.2 \
wait-timeout-0.2.0 \
want-0.3.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.67 \
wasm-bindgen-backend-0.2.67 \
wasm-bindgen-futures-0.4.17 \
wasm-bindgen-macro-0.2.67 \
wasm-bindgen-macro-support-0.2.67 \
wasm-bindgen-shared-0.2.67 \
web-sys-0.3.44 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.7.0 \
ws2_32-sys-0.2.1
PLIST_FILES= bin/${PORTNAME}
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
.include <bsd.port.mk>
Index: head/www/newsboat/Makefile
===================================================================
--- head/www/newsboat/Makefile (revision 552220)
+++ head/www/newsboat/Makefile (revision 552221)
@@ -1,125 +1,126 @@
# Created by: arved
# $FreeBSD$
PORTNAME= newsboat
PORTVERSION= 2.21
+PORTREVISION= 1
CATEGORIES= www
MASTER_SITES= https://newsboat.org/releases/${PORTVERSION}/
DISTFILES= ${DISTNAME}${EXTRACT_SUFX}
MAINTAINER= mt@markoturk.info
COMMENT= RSS feed reader for the text console
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
BUILD_DEPENDS= asciidoctor:textproc/rubygem-asciidoctor
LIB_DEPENDS= libstfl.so:devel/stfl \
libcurl.so:ftp/curl \
libjson-c.so:devel/json-c
USES= cargo compiler:c++11-lang gettext gmake gnome iconv:translit \
ncurses pkgconfig python:env shebangfix sqlite ssl tar:xz
USE_GNOME= libxml2
SHEBANG_FILES= contrib/bookmark-evernote.sh contrib/feedgrabber.rb \
contrib/getpocket.com/send-to-pocket.sh contrib/heise.rb \
contrib/slashdot.rb doc/examples/example-exec-script.py
MAKE_ARGS= CARGO=${CARGO_CARGO_BIN} prefix=${PREFIX}
MAKE_ENV= ${CARGO_ENV}
CARGO_BUILD= no
CARGO_INSTALL= no
CARGO_TARGET_DIR= ${WRKSRC}/target
CARGO_CRATES= addr2line-0.12.2 \
adler32-1.1.0 \
aho-corasick-0.7.13 \
arrayvec-0.5.1 \
autocfg-1.0.0 \
backtrace-0.3.49 \
bit-set-0.5.2 \
bit-vec-0.6.2 \
bitflags-1.2.1 \
block-0.1.6 \
byteorder-1.3.4 \
cc-1.0.57 \
cfg-if-0.1.10 \
chrono-0.4.15 \
clap-2.33.3 \
curl-sys-0.4.36+curl-7.71.1 \
fnv-1.0.7 \
getrandom-0.1.14 \
gettext-rs-0.5.0 \
gettext-sys-0.19.9 \
gimli-0.21.0 \
idna-0.2.0 \
lazy_static-1.4.0 \
lexical-core-0.7.4 \
libc-0.2.77 \
libz-sys-1.1.0 \
locale_config-0.3.0 \
malloc_buf-0.0.6 \
matches-0.1.8 \
memchr-2.3.3 \
miniz_oxide-0.3.7 \
natord-1.0.9 \
nom-5.1.2 \
num-integer-0.1.43 \
num-traits-0.2.12 \
objc-0.2.7 \
objc-foundation-0.1.1 \
objc_id-0.1.1 \
object-0.20.0 \
once_cell-1.4.1 \
openssl-sys-0.9.58 \
percent-encoding-2.1.0 \
pkg-config-0.3.17 \
ppv-lite86-0.2.8 \
proptest-0.10.1 \
quick-error-1.2.3 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rand_xorshift-0.2.0 \
redox_syscall-0.1.56 \
regex-1.3.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.3 \
rustc-demangle-0.1.16 \
rusty-fork-0.3.0 \
ryu-1.0.5 \
section_testing-0.0.4 \
static_assertions-1.1.0 \
tempfile-3.1.0 \
textwrap-0.11.0 \
thread_local-1.0.1 \
time-0.1.43 \
tinyvec-0.3.3 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-width-0.1.8 \
url-2.1.1 \
vcpkg-0.2.10 \
version_check-0.9.2 \
wait-timeout-0.2.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
winapi-0.3.9 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
xdg-2.2.0
OPTIONS_DEFINE= DOCS
post-patch:
@${REINPLACE_CMD} -e 's,fail "ncursesw",true,; s,fail "libcrypto",true,' \
-e 's,fail "libssl",true,' \
${WRKSRC}/config.sh
@${REINPLACE_CMD} -e 's|-liconv|${ICONV_LIB} -lcrypto -l${NCURSES_IMPL}|;s|-Werror||' \
${WRKSRC}/Makefile
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/*boat
.include <bsd.port.mk>
Index: head/www/websocat/Makefile
===================================================================
--- head/www/websocat/Makefile (revision 552220)
+++ head/www/websocat/Makefile (revision 552221)
@@ -1,186 +1,186 @@
# Created by: Sergey A. Osokin <osa@FreeBSD.org>
# $FreeBSD$
PORTNAME= websocat
DISTVERSIONPREFIX= v
DISTVERSION= 1.6.0
-PORTREVISION= 4
+PORTREVISION= 5
CATEGORIES= www
MAINTAINER= osa@FreeBSD.org
COMMENT= WebSockets tools
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo ssl
USE_GITHUB= yes
GH_ACCOUNT= vi
CARGO_FEATURES= ssl seqpacket
CARGO_CRATES= anymap-0.12.1 \
arc-swap-0.4.3 \
autocfg-0.1.7 \
base64-0.9.3 \
base64-0.10.1 \
bitflags-1.2.1 \
block-buffer-0.7.3 \
block-padding-0.1.5 \
byte-tools-0.3.1 \
byteorder-1.3.2 \
bytes-0.4.12 \
c2-chacha-0.2.3 \
cc-1.0.41 \
cfg-if-0.1.9 \
clap-2.33.0 \
cloudabi-0.0.3 \
core-foundation-0.6.4 \
core-foundation-sys-0.6.2 \
crossbeam-deque-0.7.2 \
crossbeam-epoch-0.8.0 \
crossbeam-queue-0.1.2 \
crossbeam-utils-0.6.6 \
crossbeam-utils-0.7.0 \
derivative-1.0.3 \
digest-0.8.1 \
env_logger-0.6.2 \
fake-simd-0.1.2 \
fnv-1.0.6 \
foreign-types-0.3.2 \
foreign-types-shared-0.1.1 \
fuchsia-cprng-0.1.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futures-0.1.29 \
generic-array-0.12.3 \
getrandom-0.1.13 \
heck-0.3.1 \
hermit-abi-0.1.3 \
http-0.1.19 \
http-bytes-0.1.0 \
httparse-1.3.4 \
hyper-0.10.16 \
idna-0.1.5 \
iovec-0.1.4 \
itoa-0.4.4 \
kernel32-sys-0.2.2 \
language-tags-0.2.2 \
lazy_static-1.4.0 \
libc-0.2.65 \
lock_api-0.1.5 \
log-0.3.9 \
log-0.4.8 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memoffset-0.5.3 \
mime-0.2.6 \
mio-0.6.19 \
mio-named-pipes-0.1.6 \
mio-uds-0.6.7 \
miow-0.2.1 \
miow-0.3.3 \
native-tls-0.2.3 \
net2-0.2.33 \
num_cpus-1.11.0 \
opaque-debug-0.2.3 \
openssl-0.10.25 \
openssl-probe-0.1.2 \
openssl-sys-0.9.52 \
owning_ref-0.4.0 \
parking_lot-0.7.1 \
parking_lot_core-0.4.0 \
percent-encoding-1.0.1 \
pkg-config-0.3.17 \
ppv-lite86-0.2.6 \
proc-macro2-0.4.30 \
quote-0.6.13 \
rand-0.6.5 \
rand-0.7.2 \
rand_chacha-0.1.1 \
rand_chacha-0.2.1 \
rand_core-0.3.1 \
rand_core-0.4.2 \
rand_core-0.5.1 \
rand_hc-0.1.0 \
rand_hc-0.2.0 \
rand_isaac-0.1.1 \
rand_jitter-0.1.4 \
rand_os-0.1.3 \
rand_pcg-0.1.2 \
rand_xorshift-0.1.1 \
rdrand-0.4.0 \
readwrite-0.1.1 \
redox_syscall-0.1.56 \
remove_dir_all-0.5.2 \
rustc_version-0.2.3 \
safemem-0.3.3 \
schannel-0.1.16 \
scopeguard-0.3.3 \
scopeguard-1.0.0 \
security-framework-0.3.3 \
security-framework-sys-0.3.3 \
semver-0.9.0 \
semver-parser-0.7.0 \
sha-1-0.8.2 \
signal-hook-0.1.11 \
signal-hook-registry-1.1.1 \
slab-0.4.2 \
slab_typesafe-0.1.3 \
smallvec-0.6.13 \
smart-default-0.3.0 \
socket2-0.3.11 \
stable_deref_trait-1.1.1 \
structopt-0.2.16 \
structopt-derive-0.2.16 \
syn-0.15.44 \
tempfile-3.1.0 \
textwrap-0.11.0 \
time-0.1.42 \
tk-listen-0.2.1 \
tokio-0.1.22 \
tokio-codec-0.1.1 \
tokio-current-thread-0.1.6 \
tokio-executor-0.1.8 \
tokio-file-unix-0.5.1 \
tokio-fs-0.1.6 \
tokio-io-0.1.12 \
tokio-process-0.2.4 \
tokio-reactor-0.1.9 \
tokio-signal-0.2.7 \
tokio-stdin-stdout-0.1.5 \
tokio-sync-0.1.7 \
tokio-tcp-0.1.3 \
tokio-threadpool-0.1.16 \
tokio-timer-0.2.11 \
tokio-tls-0.2.1 \
tokio-udp-0.1.5 \
tokio-uds-0.2.5 \
traitobject-0.1.0 \
typeable-0.1.2 \
typenum-1.12.0 \
unicase-1.4.2 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.9 \
unicode-segmentation-1.5.0 \
unicode-width-0.1.5 \
unicode-xid-0.1.0 \
url-1.7.2 \
vcpkg-0.2.7 \
version_check-0.1.5 \
wasi-0.7.0 \
websocket-0.26.2 \
websocket-base-0.26.2 \
winapi-0.2.8 \
winapi-0.3.8 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
ws2_32-sys-0.2.1
PLIST_FILES= bin/websocat
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/websocat
.include <bsd.port.mk>
Index: head/www/zola/Makefile
===================================================================
--- head/www/zola/Makefile (revision 552220)
+++ head/www/zola/Makefile (revision 552221)
@@ -1,336 +1,337 @@
# $FreeBSD$
PORTNAME= zola
DISTVERSIONPREFIX= v
DISTVERSION= 0.12.2
+PORTREVISION= 1
CATEGORIES= www
MAINTAINER= ports@FreeBSD.org
COMMENT= Fast static site generator
LICENSE= MIT
LICENSE_FILE= ${WRKSRC}/LICENSE
LIB_DEPENDS= libonig.so:devel/oniguruma \
libsass.so:textproc/libsass
USES= cargo ssl
USE_GITHUB= yes
GH_ACCOUNT= getzola
CARGO_CRATES= adler-0.2.3 \
adler32-1.2.0 \
ahash-0.3.8 \
aho-corasick-0.7.13 \
ammonia-3.1.0 \
ansi_term-0.11.0 \
assert-json-diff-1.1.0 \
atty-0.2.14 \
autocfg-1.0.1 \
base64-0.12.3 \
bincode-1.3.1 \
bitflags-1.2.1 \
block-buffer-0.7.3 \
block-buffer-0.9.0 \
block-padding-0.1.5 \
bstr-0.2.13 \
bumpalo-3.4.0 \
byte-tools-0.3.1 \
bytemuck-1.4.0 \
byteorder-1.3.4 \
bytes-0.4.12 \
bytes-0.5.6 \
cc-1.0.59 \
cedarwood-0.4.4 \
cfg-if-0.1.10 \
chrono-0.4.15 \
chrono-tz-0.5.3 \
clap-2.33.3 \
color_quant-1.0.1 \
colored-1.9.3 \
cpuid-bool-0.1.2 \
crc32fast-1.2.0 \
crossbeam-channel-0.4.3 \
crossbeam-deque-0.7.3 \
crossbeam-epoch-0.8.2 \
crossbeam-utils-0.7.2 \
csv-1.1.3 \
csv-core-0.1.10 \
ctrlc-3.1.6 \
deflate-0.8.6 \
deunicode-0.4.3 \
difference-2.0.0 \
digest-0.8.1 \
digest-0.9.0 \
doc-comment-0.3.3 \
dtoa-0.4.6 \
either-1.6.0 \
elasticlunr-rs-2.3.9 \
encoding-0.2.33 \
encoding-index-japanese-1.20141219.5 \
encoding-index-korean-1.20141219.5 \
encoding-index-simpchinese-1.20141219.5 \
encoding-index-singlebyte-1.20141219.5 \
encoding-index-tradchinese-1.20141219.5 \
encoding_index_tests-0.1.4 \
encoding_rs-0.8.24 \
extend-0.1.2 \
fake-simd-0.1.2 \
filetime-0.2.12 \
flate2-1.0.17 \
fnv-1.0.7 \
fsevent-0.4.0 \
fsevent-sys-2.0.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
futf-0.1.4 \
futures-channel-0.3.5 \
futures-core-0.3.5 \
futures-io-0.3.5 \
futures-macro-0.3.5 \
futures-sink-0.3.5 \
futures-task-0.3.5 \
futures-util-0.3.5 \
generic-array-0.12.3 \
generic-array-0.14.4 \
getrandom-0.1.14 \
gif-0.10.3 \
glob-0.3.0 \
globset-0.4.5 \
globwalk-0.8.0 \
h2-0.2.6 \
hashbrown-0.8.2 \
heck-0.3.1 \
hermit-abi-0.1.15 \
html5ever-0.25.1 \
http-0.2.1 \
http-body-0.3.1 \
httparse-1.3.4 \
humansize-1.1.0 \
hyper-0.13.7 \
hyper-rustls-0.21.0 \
hyper-staticfile-0.5.3 \
idna-0.2.0 \
ignore-0.4.16 \
image-0.23.9 \
indexmap-1.5.2 \
inotify-0.7.1 \
inotify-sys-0.1.3 \
iovec-0.1.4 \
ipnet-2.3.0 \
itoa-0.4.6 \
jieba-rs-0.5.1 \
jpeg-decoder-0.1.20 \
js-sys-0.3.44 \
kernel32-sys-0.2.2 \
lazy_static-1.4.0 \
lazycell-1.3.0 \
levenshtein_automata-0.1.1 \
libc-0.2.76 \
lindera-0.3.5 \
lindera-core-0.3.3 \
lindera-dictionary-0.3.3 \
lindera-fst-0.1.1 \
lindera-ipadic-0.3.3 \
line-wrap-0.1.1 \
linked-hash-map-0.5.3 \
log-0.4.11 \
lzw-0.10.0 \
mac-0.1.1 \
maplit-1.0.2 \
markup5ever-0.10.0 \
markup5ever_rcdom-0.1.0 \
matches-0.1.8 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memoffset-0.5.5 \
mime-0.3.16 \
mime_guess-2.0.3 \
minify-html-0.3.8 \
miniz_oxide-0.3.7 \
miniz_oxide-0.4.1 \
mio-0.6.22 \
mio-extras-2.0.6 \
miow-0.2.1 \
mockito-0.27.0 \
net2-0.2.34 \
new_debug_unreachable-1.0.4 \
nix-0.17.0 \
notify-4.0.15 \
num-integer-0.1.43 \
num-iter-0.1.41 \
num-rational-0.3.0 \
num-traits-0.2.12 \
num_cpus-1.13.0 \
once_cell-1.4.1 \
onig-6.0.0 \
onig_sys-69.5.0 \
opaque-debug-0.2.3 \
opaque-debug-0.3.0 \
open-1.4.0 \
parse-zoneinfo-0.3.0 \
percent-encoding-2.1.0 \
pest-2.1.3 \
pest_derive-2.1.0 \
pest_generator-2.1.3 \
pest_meta-2.1.3 \
phf-0.8.0 \
phf_codegen-0.8.0 \
phf_generator-0.8.0 \
phf_shared-0.8.0 \
pin-project-0.4.23 \
pin-project-internal-0.4.23 \
pin-project-lite-0.1.7 \
pin-utils-0.1.0 \
pkg-config-0.3.18 \
plist-1.0.0 \
png-0.16.7 \
ppv-lite86-0.2.9 \
precomputed-hash-0.1.1 \
proc-macro-error-1.0.4 \
proc-macro-error-attr-1.0.4 \
proc-macro-hack-0.5.18 \
proc-macro-nested-0.1.6 \
proc-macro2-1.0.20 \
pulldown-cmark-0.8.0 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rand_pcg-0.2.1 \
rayon-1.4.0 \
rayon-core-1.8.0 \
redox_syscall-0.1.57 \
regex-1.3.9 \
regex-automata-0.1.9 \
regex-syntax-0.4.2 \
regex-syntax-0.6.18 \
relative-path-1.3.2 \
remove_dir_all-0.5.3 \
reqwest-0.10.8 \
ring-0.16.15 \
roxmltree-0.11.0 \
rust-stemmers-1.2.0 \
rustls-0.18.1 \
ryu-1.0.5 \
safemem-0.3.3 \
same-file-1.0.6 \
sass-rs-0.2.2 \
sass-sys-0.4.21 \
scoped_threadpool-0.1.9 \
scopeguard-1.1.0 \
sct-0.6.0 \
serde-1.0.115 \
serde_derive-1.0.115 \
serde_json-1.0.57 \
serde_urlencoded-0.6.1 \
sha-1-0.8.2 \
sha2-0.9.1 \
siphasher-0.3.3 \
slab-0.4.2 \
slotmap-0.4.0 \
slug-0.1.4 \
smallvec-1.4.2 \
socket2-0.3.12 \
spin-0.5.2 \
string_cache-0.8.0 \
string_cache_codegen-0.5.1 \
strsim-0.8.0 \
strum-0.18.0 \
strum_macros-0.18.0 \
svg_metadata-0.4.1 \
syn-1.0.39 \
syntect-4.4.0 \
tempfile-3.1.0 \
tendril-0.4.1 \
tera-1.5.0 \
termcolor-1.1.0 \
textwrap-0.11.0 \
thread_local-1.0.1 \
tiff-0.5.0 \
time-0.1.44 \
tinyvec-0.3.4 \
tokio-0.2.22 \
tokio-macros-0.2.5 \
tokio-rustls-0.14.1 \
tokio-util-0.3.1 \
toml-0.5.6 \
tower-service-0.3.0 \
tracing-0.1.19 \
tracing-core-0.1.15 \
try-lock-0.2.3 \
typenum-1.12.0 \
ucd-trie-0.1.3 \
unic-char-property-0.9.0 \
unic-char-range-0.9.0 \
unic-common-0.9.0 \
unic-segment-0.9.0 \
unic-ucd-segment-0.9.0 \
unic-ucd-version-0.9.0 \
unicase-2.6.0 \
unicode-bidi-0.3.4 \
unicode-normalization-0.1.13 \
unicode-segmentation-1.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.2.1 \
untrusted-0.7.1 \
url-2.1.1 \
utf-8-0.7.5 \
utf8-ranges-1.0.4 \
vec_map-0.8.2 \
version_check-0.9.2 \
void-1.0.2 \
walkdir-2.3.1 \
want-0.3.0 \
wasi-0.9.0+wasi-snapshot-preview1 \
wasi-0.10.0+wasi-snapshot-preview1 \
wasm-bindgen-0.2.67 \
wasm-bindgen-backend-0.2.67 \
wasm-bindgen-futures-0.4.17 \
wasm-bindgen-macro-0.2.67 \
wasm-bindgen-macro-support-0.2.67 \
wasm-bindgen-shared-0.2.67 \
web-sys-0.3.44 \
webpki-0.21.3 \
webpki-roots-0.19.0 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winreg-0.7.0 \
ws-0.9.1 \
ws2_32-sys-0.2.1 \
xml-rs-0.8.3 \
xml5ever-0.16.1 \
xmlparser-0.13.3 \
yaml-rust-0.4.4
MAKE_ENV= CARGO_PROFILE_RELEASE_LTO=off
PLIST_FILES= bin/zola \
etc/bash_completion.d/zola.bash \
share/fish/completions/zola.fish \
share/zsh/site-functions/_zola
PORTDOCS= *
OPTIONS_DEFINE= DOCS
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/zola
@${MKDIR} ${STAGEDIR}${PREFIX}/etc/bash_completion.d \
${STAGEDIR}${PREFIX}/share/fish/completions \
${STAGEDIR}${PREFIX}/share/zsh/site-functions
${INSTALL_DATA} ${WRKSRC}/completions/zola.bash \
${STAGEDIR}${PREFIX}/etc/bash_completion.d
${INSTALL_DATA} ${WRKSRC}/completions/zola.fish \
${STAGEDIR}${PREFIX}/share/fish/completions
${INSTALL_DATA} ${WRKSRC}/completions/_zola \
${STAGEDIR}${PREFIX}/share/zsh/site-functions
post-install-DOCS-on:
@cd ${WRKSRC}/docs && \
${COPYTREE_SHARE} . ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>
Index: head/x11/admiral/Makefile
===================================================================
--- head/x11/admiral/Makefile (revision 552220)
+++ head/x11/admiral/Makefile (revision 552221)
@@ -1,59 +1,59 @@
# $FreeBSD$
PORTNAME= admiral
DISTVERSIONPREFIX= v
DISTVERSION= 1.0.0-7
DISTVERSIONSUFFIX= -g5a9f33b
-PORTREVISION= 24
+PORTREVISION= 25
CATEGORIES= x11
MAINTAINER= rigoletto@FreeBSD.org
COMMENT= Merge concurrent outputs for a status bar
LICENSE= BSD2CLAUSE
LICENSE_FILE= ${WRKSRC}/LICENSE
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= sector-f
CARGO_CRATES= ansi_term-0.10.2 \
atty-0.2.6 \
bitflags-1.0.1 \
clap-2.29.2 \
libc-0.2.49 \
num_cpus-1.8.0 \
quote-0.3.15 \
redox_syscall-0.1.37 \
redox_termios-0.1.1 \
serde-1.0.27 \
serde_derive-1.0.27 \
serde_derive_internals-0.19.0 \
strsim-0.6.0 \
syn-0.11.11 \
synom-0.11.3 \
termion-1.5.1 \
textwrap-0.9.0 \
threadpool-1.7.1 \
toml-0.4.5 \
unicode-width-0.1.4 \
unicode-xid-0.0.4 \
vec_map-0.8.0 \
winapi-0.3.3 \
winapi-i686-pc-windows-gnu-0.3.2 \
winapi-x86_64-pc-windows-gnu-0.3.2
OPTIONS_DEFINE= EXAMPLES
PORTEXAMPLES= *
PLIST_FILES= bin/admiral
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/admiral
post-install-EXAMPLES-on:
${MKDIR} ${STAGEDIR}${EXAMPLESDIR}
(cd ${WRKSRC} && ${COPYTREE_SHARE} admiral.d ${STAGEDIR}${EXAMPLESDIR}/admiral.d)
.include <bsd.port.mk>
Index: head/x11/alacritty/Makefile
===================================================================
--- head/x11/alacritty/Makefile (revision 552220)
+++ head/x11/alacritty/Makefile (revision 552221)
@@ -1,318 +1,318 @@
# $FreeBSD$
PORTNAME= alacritty
DISTVERSIONPREFIX= v
DISTVERSION= 0.5.0
-PORTREVISION= 2
+PORTREVISION= 3
CATEGORIES= x11
MAINTAINER= ports@FreeBSD.org
COMMENT= GPU-accelerated terminal emulator
LICENSE= APACHE20
LICENSE_FILE= ${WRKSRC}/LICENSE-APACHE
LIB_DEPENDS= libfontconfig.so:x11-fonts/fontconfig \
libfreetype.so:print/freetype2
TEST_DEPENDS= dejavu>0:x11-fonts/dejavu
USES= cargo localbase:ldflags python:3.6+,build ssl:build xorg
USE_GITHUB= yes
USE_XORG= xcb
CARGO_CRATES= adler-0.2.2 \
adler32-1.1.0 \
aho-corasick-0.7.13 \
andrew-0.2.1 \
android_glue-0.2.3 \
android_log-sys-0.1.2 \
ansi_term-0.11.0 \
approx-0.3.2 \
arc-swap-0.4.7 \
arrayref-0.3.6 \
arrayvec-0.5.1 \
atty-0.2.14 \
autocfg-1.0.0 \
base64-0.11.0 \
base64-0.12.3 \
bindgen-0.53.3 \
bitflags-1.2.1 \
blake2b_simd-0.5.10 \
block-0.1.6 \
bytemuck-1.2.0 \
byteorder-1.3.4 \
bzip2-0.3.3 \
bzip2-sys-0.1.9+1.0.8 \
calloop-0.4.4 \
cc-1.0.58 \
cexpr-0.4.0 \
cfg-if-0.1.10 \
cgl-0.3.2 \
clang-sys-0.29.3 \
clap-2.33.1 \
clipboard-win-2.2.0 \
cloudabi-0.0.3 \
cmake-0.1.44 \
cocoa-0.20.2 \
constant_time_eq-0.1.5 \
copypasta-0.7.0 \
core-foundation-0.7.0 \
core-foundation-sys-0.7.0 \
core-graphics-0.19.2 \
core-text-15.0.0 \
core-video-sys-0.1.4 \
crc32fast-1.2.0 \
crossbeam-utils-0.7.2 \
crossfont-0.1.0 \
deflate-0.8.6 \
derivative-2.1.1 \
dirs-2.0.2 \
dirs-sys-0.3.5 \
dispatch-0.2.0 \
dlib-0.4.2 \
downcast-rs-1.2.0 \
dtoa-0.4.6 \
dwrote-0.11.0 \
embed-resource-1.3.3 \
env_logger-0.7.1 \
euclid-0.20.14 \
expat-sys-2.1.6 \
filetime-0.2.10 \
flate2-1.0.16 \
fnv-1.0.7 \
foreign-types-0.3.2 \
foreign-types-0.5.0 \
foreign-types-macros-0.2.1 \
foreign-types-shared-0.1.1 \
foreign-types-shared-0.3.0 \
freetype-rs-0.26.0 \
freetype-sys-0.13.1 \
fsevent-0.4.0 \
fsevent-sys-2.0.1 \
fuchsia-zircon-0.3.3 \
fuchsia-zircon-sys-0.3.3 \
getrandom-0.1.14 \
gl_generator-0.13.1 \
gl_generator-0.14.0 \
glob-0.3.0 \
glutin-0.24.1 \
glutin_egl_sys-0.1.4 \
glutin_emscripten_sys-0.1.1 \
glutin_gles2_sys-0.1.4 \
glutin_glx_sys-0.1.6 \
glutin_wgl_sys-0.1.4 \
hermit-abi-0.1.15 \
http_req-0.5.5 \
humantime-1.3.0 \
image-0.23.6 \
inotify-0.7.1 \
inotify-sys-0.1.3 \
instant-0.1.6 \
iovec-0.1.4 \
itoa-0.4.6 \
jni-sys-0.3.0 \
jobserver-0.1.21 \
kernel32-sys-0.2.2 \
khronos_api-3.1.0 \
lazy_static-1.4.0 \
lazycell-1.2.1 \
libc-0.2.72 \
libloading-0.5.2 \
libloading-0.6.2 \
line_drawing-0.7.0 \
linked-hash-map-0.5.3 \
lock_api-0.3.4 \
log-0.4.8 \
malloc_buf-0.0.6 \
maybe-uninit-2.0.0 \
memchr-2.3.3 \
memmap-0.7.0 \
miniz_oxide-0.3.7 \
miniz_oxide-0.4.0 \
mio-0.6.22 \
mio-anonymous-pipes-0.1.0 \
mio-extras-2.0.6 \
mio-named-pipes-0.1.7 \
miow-0.2.1 \
miow-0.3.5 \
native-tls-0.2.4 \
ndk-0.1.0 \
ndk-glue-0.1.0 \
ndk-sys-0.1.0 \
net2-0.2.34 \
nix-0.14.1 \
nix-0.17.0 \
nom-5.1.2 \
notify-4.0.15 \
num-integer-0.1.43 \
num-iter-0.1.41 \
num-rational-0.3.0 \
num-traits-0.2.12 \
num_enum-0.4.3 \
num_enum_derive-0.4.3 \
objc-0.2.7 \
objc-foundation-0.1.1 \
objc_id-0.1.1 \
once_cell-1.4.0 \
openssl-0.10.30 \
openssl-probe-0.1.2 \
openssl-sys-0.9.58 \
ordered-float-1.1.0 \
osmesa-sys-0.1.2 \
parking_lot-0.10.2 \
parking_lot_core-0.7.2 \
peeking_take_while-0.1.2 \
percent-encoding-2.1.0 \
phf-0.8.0 \
phf_codegen-0.8.0 \
phf_generator-0.8.0 \
phf_shared-0.8.0 \
pkg-config-0.3.17 \
png-0.16.6 \
podio-0.1.7 \
ppv-lite86-0.2.8 \
proc-macro-crate-0.1.4 \
proc-macro2-0.4.30 \
proc-macro2-1.0.18 \
quick-error-1.2.3 \
quote-0.6.13 \
quote-1.0.7 \
rand-0.7.3 \
rand_chacha-0.2.2 \
rand_core-0.5.1 \
rand_hc-0.2.0 \
rand_pcg-0.2.1 \
raw-window-handle-0.3.3 \
redox_syscall-0.1.56 \
redox_users-0.3.4 \
regex-1.3.9 \
regex-automata-0.1.9 \
regex-syntax-0.6.18 \
remove_dir_all-0.5.3 \
rust-argon2-0.7.0 \
rustc-hash-1.1.0 \
rustc_tools_util-0.2.0 \
rusttype-0.7.9 \
rusttype-0.8.3 \
ryu-1.0.5 \
same-file-1.0.6 \
schannel-0.1.19 \
scoped-tls-1.0.0 \
scopeguard-1.1.0 \
security-framework-0.4.4 \
security-framework-sys-0.4.3 \
serde-1.0.114 \
serde_derive-1.0.114 \
serde_json-1.0.56 \
serde_yaml-0.8.13 \
servo-fontconfig-0.5.1 \
servo-fontconfig-sys-5.1.0 \
shared_library-0.1.9 \
shlex-0.1.1 \
signal-hook-0.1.16 \
signal-hook-registry-1.2.0 \
siphasher-0.3.3 \
slab-0.4.2 \
smallvec-1.4.1 \
smithay-client-toolkit-0.6.6 \
smithay-client-toolkit-0.10.0 \
smithay-clipboard-0.5.1 \
socket2-0.3.12 \
spsc-buffer-0.1.1 \
stb_truetype-0.3.1 \
strsim-0.8.0 \
syn-1.0.33 \
tempfile-3.1.0 \
termcolor-1.1.0 \
terminfo-0.7.3 \
textwrap-0.11.0 \
thread_local-1.0.1 \
time-0.1.43 \
toml-0.5.6 \
unicase-2.6.0 \
unicode-width-0.1.8 \
unicode-xid-0.1.0 \
unicode-xid-0.2.1 \
urlocator-0.1.4 \
utf8parse-0.2.0 \
vcpkg-0.2.10 \
vec_map-0.8.2 \
version_check-0.9.2 \
void-1.0.2 \
vswhom-0.1.0 \
vswhom-sys-0.1.0 \
vte-0.8.0 \
vte_generate_state_changes-0.1.1 \
walkdir-2.3.1 \
wasi-0.9.0+wasi-snapshot-preview1 \
wayland-client-0.23.6 \
wayland-client-0.27.0 \
wayland-commons-0.23.6 \
wayland-commons-0.27.0 \
wayland-cursor-0.27.0 \
wayland-protocols-0.23.6 \
wayland-protocols-0.27.0 \
wayland-scanner-0.23.6 \
wayland-scanner-0.27.0 \
wayland-sys-0.23.6 \
wayland-sys-0.27.0 \
which-3.1.1 \
winapi-0.2.8 \
winapi-0.3.9 \
winapi-build-0.1.1 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-util-0.1.5 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
winit-0.22.2 \
winpty-0.2.0 \
winpty-sys-0.5.0 \
winreg-0.6.2 \
wio-0.2.2 \
ws2_32-sys-0.2.1 \
x11-clipboard-0.5.1 \
x11-dl-2.18.5 \
xcb-0.9.0 \
xcursor-0.3.2 \
xdg-2.2.0 \
xml-rs-0.8.3 \
yaml-rust-0.4.4 \
zip-0.5.6
CARGO_INSTALL_PATH= alacritty
# For xcb crate
BINARY_ALIAS= python3=${PYTHON_CMD}
PLIST_FILES= bin/alacritty \
etc/bash_completion.d/alacritty.bash \
share/fish/completions/alacritty.fish \
share/man/man1/alacritty.1.gz \
share/pixmaps/Alacritty.svg \
share/zsh/site-functions/_alacritty \
${DESKTOPDIR}/Alacritty.desktop
PORTEXAMPLES= alacritty.yml
OPTIONS_DEFINE= EXAMPLES
post-install:
${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/alacritty
${INSTALL_DATA} ${WRKSRC}/extra/linux/Alacritty.desktop \
${STAGEDIR}${DESKTOPDIR}
${INSTALL_DATA} ${WRKSRC}/extra/logo/alacritty-term.svg \
${STAGEDIR}${PREFIX}/share/pixmaps/Alacritty.svg
${INSTALL_MAN} ${WRKSRC}/extra/alacritty.man \
${STAGEDIR}${PREFIX}/share/man/man1/alacritty.1
@${MKDIR} ${STAGEDIR}${PREFIX}/etc/bash_completion.d
${INSTALL_DATA} ${WRKSRC}/extra/completions/alacritty.bash \
${STAGEDIR}${PREFIX}/etc/bash_completion.d
@${MKDIR} ${STAGEDIR}${PREFIX}/share/fish/completions
${INSTALL_DATA} ${WRKSRC}/extra/completions/alacritty.fish \
${STAGEDIR}${PREFIX}/share/fish/completions
@${MKDIR} ${STAGEDIR}${PREFIX}/share/zsh/site-functions
${INSTALL_DATA} ${WRKSRC}/extra/completions/_alacritty \
${STAGEDIR}${PREFIX}/share/zsh/site-functions
post-install-EXAMPLES-on:
@${MKDIR} ${STAGEDIR}${EXAMPLESDIR}
${INSTALL_DATA} ${WRKSRC}/${PORTEXAMPLES} ${STAGEDIR}${EXAMPLESDIR}
.include <bsd.port.mk>
Index: head/x11/squeekboard/Makefile
===================================================================
--- head/x11/squeekboard/Makefile (revision 552220)
+++ head/x11/squeekboard/Makefile (revision 552221)
@@ -1,101 +1,101 @@
# $FreeBSD$
PORTNAME= squeekboard
DISTVERSIONPREFIX= v
DISTVERSION= 1.9.3
-PORTREVISION= 2
+PORTREVISION= 3
CATEGORIES= x11
MAINTAINER= jbeich@FreeBSD.org
COMMENT= On-screen keyboard for Wayland
LICENSE= GPLv3
LICENSE_FILE= ${WRKSRC}/COPYING
BROKEN_FreeBSD_11= requires getrandom()
BROKEN_FreeBSD_12_powerpc64= fails to compile: rs.e2mg9us3-cgu.14:(.text.imservice_handle_commit_state+0x228): undefined reference to 'eekboard_context_service_show_keyboard'
BROKEN_FreeBSD_13_powerpc64= fails to link: ld: error: undefined symbol: eekboard_context_service_get_overlay
BUILD_DEPENDS= wayland-protocols>=1.12:graphics/wayland-protocols
LIB_DEPENDS= libfeedback-0.0.so:accessibility/feedbackd \
libwayland-client.so:graphics/wayland \
libcroco-0.6.so:textproc/libcroco \
libxkbcommon.so:x11/libxkbcommon
RUN_DEPENDS= gsettings-desktop-schemas>0:devel/gsettings-desktop-schemas
USES= cargo compiler:c11 gettext-tools gnome meson pkgconfig
USE_GITLAB= yes
USE_GNOME= cairo gdkpixbuf2 gnomedesktop3 gtk30
GL_SITE= https://source.puri.sm
GL_ACCOUNT= Librem5
GL_COMMIT= 1ee58ce7a0d8513b5381190bead7c27a4e899f75
MAKE_ENV= ${CARGO_ENV}
PLIST_FILES= bin/${PORTNAME} \
bin/${PORTNAME}-entry \
bin/${PORTNAME}-test-layout \
share/applications/sm.puri.Squeekboard.desktop
CARGO_CRATES= atk-0.7.0 \
atk-sys-0.9.1 \
bitflags-1.2.1 \
cairo-rs-0.7.1 \
cairo-sys-rs-0.9.2 \
cc-1.0.54 \
clap-2.33.1 \
dtoa-0.4.5 \
fragile-0.3.0 \
gdk-0.11.0 \
gdk-pixbuf-0.7.0 \
gdk-pixbuf-sys-0.9.1 \
gdk-sys-0.9.1 \
gio-0.7.0 \
gio-sys-0.9.1 \
glib-0.8.2 \
glib-sys-0.9.1 \
gobject-sys-0.9.1 \
gtk-0.7.0 \
gtk-sys-0.9.2 \
lazy_static-1.4.0 \
libc-0.2.71 \
linked-hash-map-0.5.3 \
maplit-1.0.2 \
memmap-0.7.0 \
pango-0.7.0 \
pango-sys-0.9.1 \
pkg-config-0.3.17 \
proc-macro2-1.0.18 \
quote-1.0.7 \
regex-1.3.9 \
regex-syntax-0.6.18 \
serde-1.0.111 \
serde_derive-1.0.111 \
serde_yaml-0.8.13 \
syn-1.0.31 \
textwrap-0.11.0 \
unicode-width-0.1.7 \
unicode-xid-0.2.0 \
winapi-0.3.8 \
winapi-i686-pc-windows-gnu-0.4.0 \
winapi-x86_64-pc-windows-gnu-0.4.0 \
xkbcommon-0.4.0 \
yaml-rust-0.4.4
CARGO_BUILD= no
CARGO_INSTALL= no
CARGO_TEST= no
TARGET_ORDER_OVERRIDE= 750:cargo-configure
CARGO_CARGOTOML= ${BUILD_WRKSRC}/Cargo.toml
post-patch:
# Add --verbose after each cargo subcommand
# Recognize --release even if not passed as 1st argument
# Don't use BSD realpath(1) against non-existing files
@${REINPLACE_CMD} -e '/^cargo/s/"$$@"/--verbose &/' \
${"${CARGO_BUILD_ARGS:M--release}":?-e '/^BINARY_DIR/s/debug/release/':} \
-e '/OUT_PATH=/s,=.*,=$$PWD/$$1,' \
${WRKSRC}/cargo*.sh
post-configure:
# Provide preprocessed Cargo.toml for USES=cargo
@${DO_MAKE_BUILD} -C ${BUILD_WRKSRC} Cargo.toml
.include <bsd.port.mk>

File Metadata

Mime Type
application/octet-stream
Expires
Wed, May 15, 1:31 AM (2 d)
Storage Engine
chunks
Storage Format
Chunks
Storage Handle
rPxqE2BxyXkR
Default Alt Text
(5 MB)

Event Timeline